diff --git a/.gitignore b/.gitignore index 9e12afb775..ae18c58e08 100644 --- a/.gitignore +++ b/.gitignore @@ -46,6 +46,8 @@ nosetests.xml coverage.xml *,cover experiments/examples/output/ +experiments/IMC/output/ +experiments/IMC/yaml test.py # Translations @@ -67,6 +69,9 @@ target/ # Visual Studio Code /.vscode/ +# Qt Creator +CMakeLists.txt.user + # Virtual env .venv*/ .python-version diff --git a/cpprevolve/CMakeLists.txt b/cpprevolve/CMakeLists.txt index d39c8565d7..bcfc924d30 100644 --- a/cpprevolve/CMakeLists.txt +++ b/cpprevolve/CMakeLists.txt @@ -3,7 +3,8 @@ cmake_minimum_required (VERSION 3.7.0) # Project name project (Revolve) -set (CMAKE_CXX_STANDARD 11) +set (CMAKE_CXX_STANDARD 14) +find_package(Torch REQUIRED) # Include cmake subdirectories add_subdirectory(revolve/brains) diff --git a/cpprevolve/revolve/brains/CMakeLists.txt b/cpprevolve/revolve/brains/CMakeLists.txt index 07ad58b8ed..cd4a8b107b 100644 --- a/cpprevolve/revolve/brains/CMakeLists.txt +++ b/cpprevolve/revolve/brains/CMakeLists.txt @@ -1,13 +1,20 @@ -file(GLOB_RECURSE - CONTROLLER_SRCS - controller/*.cpp - controller/actuators/*.cpp - controller/sensors/*.cpp -) -file(GLOB_RECURSE - LEARNER_SRCS - learner/*.cpp +set (CMAKE_CXX_STANDARD 14) + +set(CONTROLLER_SRCS + controller/DifferentialCPG.cpp + controller/IMC/IMC.cpp + controller/IMC/FeedForwardNetwork.cpp + controller/IMC/InverseNetwork.cpp ) +set(LEARNER_SRCS + learner/Learner.cpp + learner/BayesianOptimizer.cpp + learner/HyperNEAT.cpp + learner/EA.cpp + learner/NIPES.cpp + learner/DifferentialEvo.cpp + learner/EA_misc/RandNum.cpp + learner/EA_misc/Novelty.cpp) # PKG-CONFIG find_package(PkgConfig REQUIRED) @@ -17,8 +24,12 @@ find_package(Boost REQUIRED COMPONENTS system) # Find Eigen3 - A lightweight C++ template library for vector and matrix math find_package(Eigen3 REQUIRED) - find_package(MultiNEAT REQUIRED) +find_package(libcmaes REQUIRED) + +# These dependencies are required for the AngleToTargetDetector Sensor +find_package(OpenCV REQUIRED) +#find_package(raspicam REQUIRED) #only on the raspberry side # Find NLOpt - Non Linear Optimization pkg_check_modules(NLOpt REQUIRED nlopt>=2.4) @@ -32,19 +43,30 @@ add_library(revolve-learners SHARED ${LEARNER_SRCS}) target_include_directories(revolve-controllers PUBLIC ${EIGEN3_INCLUDE_DIR} - PUBLIC ${Boost_INCLUDE_DIRS}) + PUBLIC ${Boost_INCLUDE_DIRS} + PUBLIC ${OpenCV_INCLUDE_DIRS}) target_include_directories(revolve-learners + PUBLIC ${EIGEN3_INCLUDE_DIR} PUBLIC ${Boost_INCLUDE_DIRS} PUBLIC ${LIMBO_DIR}/src PUBLIC ${NLOpt_INCLUDE_DIRS}) -target_include_directories(revolve-learners - PUBLIC ${NLOpt_LIBRARY_DIRS}) +target_compile_definitions(revolve-learners + PUBLIC USE_NLOPT=1 + PUBLIC CMAES) target_link_libraries(revolve-controllers + PUBLIC MultiNEAT::MultiNEAT + ${OpenCV_LIBS} + PUBLIC torch + PUBLIC libcmaes::cmaes + ) + +target_link_libraries(revolve-learners + revolve-controllers MultiNEAT::MultiNEAT) install(TARGETS revolve-controllers revolve-learners RUNTIME DESTINATION bin - LIBRARY DESTINATION lib) \ No newline at end of file + LIBRARY DESTINATION lib) diff --git a/cpprevolve/revolve/brains/controller/BrokenDifferentialCPG.cpp b/cpprevolve/revolve/brains/controller/BrokenDifferentialCPG.cpp new file mode 100644 index 0000000000..80fa89f158 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/BrokenDifferentialCPG.cpp @@ -0,0 +1,477 @@ +/* + * Copyright (C) 2015-2018 Vrije Universiteit Amsterdam + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Description: TODO: + * Author: Milan Jelisavcic & Maarten van Hooft + * Date: December 29, 2018 + * + */ + +#include "BrokenDifferentialCPG.h" + +// STL macros +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Project headers +#include "actuators/Actuator.h" + +#include "sensors/Sensor.h" + +// TODO: Resolve odd behaviour at the end of the validation procedure +// This behaviour is not present if you directly load a trained controller + +// Define namespaces +using namespace revolve; + +/** + * Constructor for BrokenDifferentialCPG class. + * + * @param _model + * @param robot_config + */ +BrokenDifferentialCPG::BrokenDifferentialCPG( + const BrokenDifferentialCPG::ControllerParams ¶ms, + const std::vector< std::shared_ptr< Actuator > > &actuators, + std::shared_ptr angle_to_target_sensor) + : Controller(ControllerType::DIFFERENTIAL_CPG) + , next_state(nullptr) + , n_motors(actuators.size()) + , output(new double[actuators.size()]) + , angle_to_target_sensor(std::move(angle_to_target_sensor)) +{ + // Controller parameters + this->reset_neuron_random = params.reset_neuron_random; + this->init_neuron_state = params.init_neuron_state; + this->range_lb = -params.range_ub; + this->range_ub = params.range_ub; + this->use_frame_of_reference = params.use_frame_of_reference; + this->signal_factor_all_ = params.signal_factor_all; + this->signal_factor_mid = params.signal_factor_mid; + this->signal_factor_left_right = params.signal_factor_left_right; + this->abs_output_bound = params.abs_output_bound; + + if (use_frame_of_reference and not this->angle_to_target_sensor) { + std::clog << "WARNING!: use_frame_of_reference is activated but no angle_to_target_sensor camera is configured. " + "Disabling the use of the frame of reference" << std::endl; + use_frame_of_reference = false; + } + + size_t j=0; + for (const std::shared_ptr &actuator: actuators) + { + // Pass coordinates + auto coord_x = actuator->coordinate_x(); + auto coord_y = actuator->coordinate_y(); + this->motor_coordinates[{coord_x, coord_y}] = j; + + // Set frame of reference + int frame_of_reference = 0; + // We are a left neuron + if (coord_y < 0) + { + frame_of_reference = -1; + } + // We are a right neuron + else if (coord_y > 0) + { + frame_of_reference = 1; + } + + // Save neurons: bias/gain/state. Make sure initial states are of different sign. + this->neurons[{coord_x, coord_y, 1}] = {0.f, 0.f, this->init_neuron_state, frame_of_reference}; //Neuron A + this->neurons[{coord_x, coord_y, -1}] = {0.f, 0.f, -this->init_neuron_state, frame_of_reference}; // Neuron B + j++; + } + + // Add connections between neighbouring neurons + int i = 0; + for (const std::shared_ptr &actuator: actuators) + { + // Get name and x,y-coordinates of all neurons. + double x = actuator->coordinate_x(); + double y = actuator->coordinate_y(); + + // Continue to next iteration in case there is already a connection between the 1 and -1 neuron. + // These checks feel a bit redundant. + // if A->B connection exists. + if (this->connections.count({x, y, 1, x, y, -1}) > 0) + { + continue; + } + // if B->A connection exists: + if (this->connections.count({x, y, -1, x, y, 1}) > 0) + { + continue; + } + + // Loop over all positions. We call it neighbours, but we still need to check if they are a neighbour. + for (const std::shared_ptr &neighbour: actuators) + { + // Get information of this neuron (that we call neighbour). + double near_x = neighbour->coordinate_x(); + double near_y = neighbour->coordinate_y(); + + // If there is a node that is a Moore neighbour, we set it to be a neighbour for their A-nodes. + // Thus the connections list only contains connections to the A-neighbourhood, and not the + // A->B and B->A for some node (which makes sense). + double dist_x = std::fabs(x - near_x); + double dist_y = std::fabs(y - near_y); + + // TODO: Verify for non-spiders + if (std::fabs(dist_x + dist_y - 2) < 0.01) + { + if(std::get<0>(this->connections[{x, y, 1, near_x, near_y, 1}]) != 1 or + std::get<0>(this->connections[{near_x, near_y, 1, x, y, 1}]) != 1) + { + this->connections[{x, y, 1, near_x, near_y, 1}] = std::make_tuple(1, i); + this->connections[{near_x, near_y, 1, x, y, 1}] = std::make_tuple(1, i); + i++; + } + } + } + } + + // Initialise array of neuron states for Update() method + this->next_state = new double[this->neurons.size()]; + this->n_weights = (int)(this->connections.size()/2) + this->n_motors; + + // Loading Brain + + // Save weights for brain + assert(params.weights.size() == this->n_weights); + this->sample.resize(this->n_weights); + for(size_t j = 0; j < this->n_weights; j++) + { + this->sample(j) = params.weights.at(j); + } + + // Set ODE matrix at initialization + this->set_ode_matrix(); + + std::cout << "Brain has been loaded." << std::endl; +} + +/** + * Destructor + */ +BrokenDifferentialCPG::~BrokenDifferentialCPG() +{ + delete[] this->next_state; + delete[] this->output; +} + +/** + * Callback function that defines the movement of the robot + * + * @param _motors + * @param _sensors + * @param _time + * @param _step + */ +void BrokenDifferentialCPG::update( + const std::vector< std::shared_ptr < Actuator > > &actuators, + const std::vector< std::shared_ptr < Sensor > > &sensors, + const double time, + const double step) +{ + // Send new signals to the motors + this->step(time, step); + + unsigned int p = 0; + for (const auto &actuator: actuators) + { + actuator->write(this->output + p, step); + p += actuator->n_outputs(); + } +} + +/** + * Make matrix of weights A as defined in dx/dt = Ax. + * Element (i,j) specifies weight from neuron i to neuron j in the system of ODEs + */ +void BrokenDifferentialCPG::set_ode_matrix(){ + // Initiate new matrix + std::vector> matrix; + + // Fill with zeroes + for(size_t i =0; i neurons.size(); i++) + { + // Initialize row in matrix with zeros + std::vector< double > row; + for (size_t j = 0; j < this->neurons.size(); j++) + { + row.push_back(0); + } + matrix.push_back(row); + } + + // Process A<->B connections + int index = 0; + for(size_t i =0; i neurons.size(); i++) + { + // Get correct index + int c = 0; + if (i%2 == 0){ + c = i + 1; + } + else{ + c = i - 1; + } + + // Add a/b connection weight + index = (int)(i/2); + auto w = this->sample(index) * + (this->range_ub - this->range_lb) + this->range_lb; + matrix[i][c] = w; + matrix[c][i] = -w; + } + + // A<->A connections + index++; + int k = 0; + std::vector connections_seen; + + for (auto const &connection : this->connections) + { + // Get connection information + int x1, y1, z1, x2, y2, z2; + std::tie(x1, y1, z1, x2, y2, z2) = connection.first; + + // Find location of the two neurons in this->neurons list + int l1 = -1; + int l2 = -1; + int c = 0; + for(auto const &neuron : this->neurons) + { + int x, y, z; + std::tie(x, y, z) = neuron.first; + if (x == x1 and y == y1 and z == z1) + { + l1 = c; + } + else if (x == x2 and y == y2 and z == z2) + { + l2 = c; + } + // Update counter + c++; + } + + // Add connection to seen connections + if(l1 > l2) + { + int l1_old = l1; + l1 = l2; + l2 = l1_old; + } + std::string connection_string = std::to_string(l1) + "-" + std::to_string(l2); + + // if not in list, add to list + auto connections_list = std::find(connections_seen.begin(), connections_seen.end(), connection_string); + if(connections_list == connections_seen.end()) + { + connections_seen.push_back(connection_string); + } + // else continue to next iteration + else{ + continue; + } + + // Get weight + auto w = this->sample(index + k) * + (this->range_ub - this->range_lb) + this->range_lb; + + // Set connection in weight matrix + matrix[l1][l2] = w; + matrix[l2][l1] = -w; + k++; + } + + // Update matrix + this->ode_matrix = matrix; + + // Reset neuron state + this->reset_neuron_state(); +} + + +/** + * Set states back to original value (that is on the unit circle) + */ +void BrokenDifferentialCPG::reset_neuron_state(){ + int c = 0; + for(auto const &neuron : this->neurons) + { + // Get neuron properties + int x, y, z, frame_of_reference; + double bias ,gain ,state; + std::tie(x, y, z) = neuron.first; + std::tie(bias, gain, state, frame_of_reference) = neuron.second; + + if (z == -1) + { + // Neuron B + if (this->reset_neuron_random) + { + this->neurons[{x, y, z}] = {0.f, + 0.f, + ((double) rand() / (RAND_MAX))*2*this->init_neuron_state - this->init_neuron_state, + frame_of_reference}; + } + else + { + this->neurons[{x, y, z}] = {0.f, 0.f, -this->init_neuron_state, frame_of_reference}; + } + } + else + { + // Neuron A + if (this->reset_neuron_random) + { + this->neurons[{x, y, z}] = {0.f, + 0.f, + ((double) rand() / (RAND_MAX))*2*this->init_neuron_state - this->init_neuron_state, + frame_of_reference}; + } + else + { + this->neurons[{x, y, z}] = {0.f, 0.f, +this->init_neuron_state, frame_of_reference}; + } + } + c++; + } +} + +/** + * Step function that is called from within Update() + * + * @param _time + * @param _output + */ +void BrokenDifferentialCPG::step( + const double time, + const double dt) +{ + int neuron_count = 0; + for (const auto &neuron : this->neurons) + { + // Neuron.second accesses the second 3-tuple of a neuron, containing the bias/gain/state. + double recipient_bias, recipient_gain, recipient_state; + int frame_of_reference; + std::tie(recipient_bias, recipient_gain, recipient_state, frame_of_reference) = neuron.second; + + // Save for ODE + this->next_state[neuron_count] = recipient_state; + neuron_count++; + } + + // Copy values from next_state into x for ODEINT + state_type x(this->neurons.size()); + for (size_t i = 0; i < this->neurons.size(); i++) + { + x[i] = this->next_state[i]; + } + + // Perform one step + stepper.do_step( + [this](const state_type &x, state_type &dxdt, double t) + { + for(size_t i = 0; i < this->neurons.size(); i++) + { + dxdt[i] = 0; + for(size_t j = 0; j < this->neurons.size(); j++) + { + dxdt[i] += x[j]*this->ode_matrix[j][i]; + } + } + }, + x, + time, + dt); + + // Copy values into nextstate + for (size_t i = 0; i < this->neurons.size(); i++) + { + this->next_state[i] = x[i]; + } + + double angle_difference = 0.0; + double slow_down_factor = 1.0; + if (use_frame_of_reference) { + angle_difference = angle_to_target_sensor->detect_angle(); + const double frame_of_reference_slower_power = 7.0; + slow_down_factor = std::pow( + (180.0 - std::abs(angle_difference))/180.0, frame_of_reference_slower_power); + } + + // Loop over all neurons to actually update their states. Note that this is a new outer for loop + auto i = 0; auto j = 0; + for (auto &neuron : this->neurons) + { + // Get bias gain and state for this neuron. Note that we don't take the coordinates. + // However, they are implicit as their order did not change. + double bias, gain, state; + int frame_of_reference; + std::tie(bias, gain, state, frame_of_reference) = neuron.second; + double x, y, z; + std::tie(x, y, z) = neuron.first; + neuron.second = {bias, gain, this->next_state[i], frame_of_reference}; + j = this->motor_coordinates[{x,y}]; + // Should be one, as output should be based on +1 neurons, which are the A neurons + if (i % 2 == 1) + { + // TODO: Add Milan's function here as soon as things are working a bit + // f(a) = (w_ao*a - bias)*gain + + // Apply saturation formula + auto x_input = this->next_state[i]; + + double output_j = this->output_function(x_input); + + // Use frame of reference + if(use_frame_of_reference and frame_of_reference != 0) + { + if ((frame_of_reference == 1 and angle_difference < 0) or + (frame_of_reference == -1 and angle_difference > 0)) //TODO >= / <= ? + { + output_j *= slow_down_factor; + //std::cout << "Slow down " << x <<','<< y <<','<< z << " with factor " << slow_down_factor << std::endl; + //output_j = 0; + } + } + + this->output[j] = output_j; + } + i++; + } +} + +double BrokenDifferentialCPG::output_function(double input) const +{ + return this->signal_factor_all_ + * this->abs_output_bound + * ( + (2.0) / (1.0 + std::pow(2.718, -2.0 * input / this->abs_output_bound)) + - 1 + ); +} diff --git a/cpprevolve/revolve/brains/controller/BrokenDifferentialCPG.h b/cpprevolve/revolve/brains/controller/BrokenDifferentialCPG.h new file mode 100644 index 0000000000..799dd2573e --- /dev/null +++ b/cpprevolve/revolve/brains/controller/BrokenDifferentialCPG.h @@ -0,0 +1,147 @@ +// +// Created by matteo on 14/06/19. +// + +#pragma once + +#include "Controller.h" +#include "actuators/Actuator.h" +#include "sensors/Sensor.h" +#include "sensors/AngleToTargetDetector.h" +#include +#include +#include + +typedef std::vector< double > state_type; + +namespace revolve +{ +class BrokenDifferentialCPG + : public Controller +{ +public: + struct ControllerParams { + bool reset_neuron_random; + bool use_frame_of_reference; + double init_neuron_state; + double range_ub; + double signal_factor_all; + double signal_factor_mid; + double signal_factor_left_right; + double abs_output_bound; + std::vector< double > weights; + }; + + /// \brief Constructor + /// \param[in] _modelName Name of the robot + /// \param[in] _node The brain node + /// \param[in] _motors Reference to a motor list, it be reordered + /// \param[in] _sensors Reference to a sensor list, it might be reordered + BrokenDifferentialCPG( + const ControllerParams ¶ms, + const std::vector< std::shared_ptr < Actuator > > &_actuators, + std::shared_ptr angle_to_target_sensor = nullptr); + + /// \brief Destructor + virtual ~BrokenDifferentialCPG(); + + /// \brief The default update method for the controller + /// \param[in] _motors Motor list + /// \param[in] _sensors Sensor list + /// \param[in] _time Current world time + /// \param[in] _step Current time step + virtual void update( + const std::vector< std::shared_ptr < Actuator > > &actuators, + const std::vector< std::shared_ptr < Sensor > > &sensors, + const double _time, + const double _step) override; + +protected: + + void step( + const double time, + const double step); + + void set_ode_matrix(); + +private: + /// \brief Function that resets neuron state + void reset_neuron_state(); + + /// \brief function that transforms the value of the CPG A-neurons and returns the correct output for the actuators + double output_function(double input) const; + +public: + std::map< std::tuple< double, double >, size_t > motor_coordinates; + +protected: + /// \brief Register of motor IDs and their x,y-coordinates +// std::map< std::string, std::tuple< int, int > > positions; + + /// \brief Register of individual neurons in x,y,z-coordinates + /// \details x,y-coordinates define position of a robot's module and + // z-coordinate define A or B neuron (z=1 or -1 respectively). Stored + // values are a bias, gain, state and frame of reference of each neuron. + std::map< std::tuple< int, int, int >, std::tuple< double, double, double, int > > + neurons; + + /// \brief Register of connections between neighnouring neurons + /// \details Coordinate set of two neurons (x1, y1, z1) and (x2, y2, z2) + // define a connection. The second tuple contains 1: the connection value and + // 2: the weight index corresponding to this connection. + std::map< std::tuple< int, int, int, int, int, int >, std::tuple > + connections; + + /// \brief Runge-Kutta 45 stepper + boost::numeric::odeint::runge_kutta4< state_type > stepper; + + /// \brief Used for ODE-int + std::vector> ode_matrix; + +private: + /// \brief Used to determine the next state array + double *next_state; + + /// \brief Used to determine the output to the motors array + double *output; + + /// \brief Limbo optimizes in [0,1] + double range_lb; + + /// \brief Limbo optimizes in [0,1] + double range_ub; + + /// \brief Loaded sample + Eigen::VectorXd sample; + + /// \brief The number of weights to optimize + size_t n_weights; + + /// \brief Factor to multiply output signal with + double signal_factor_all_; + + /// \brief Factor to multiply output signal with + double signal_factor_mid; + + /// \brief Factor to multiply output signal with + double signal_factor_left_right; + + /// \brief When reset a neuron state,do it randomly: + bool reset_neuron_random; + + /// \brief Holds the number of motors in the robot + size_t n_motors; + + /// \brief Initial neuron state + double init_neuron_state; + + /// \brief Use frame of reference {-1,0,1} version or not + bool use_frame_of_reference; + + double abs_output_bound; + +// targeted locomotion stuff + std::shared_ptr angle_to_target_sensor; +}; + +} diff --git a/cpprevolve/revolve/brains/controller/Controller.h b/cpprevolve/revolve/brains/controller/Controller.h index 4be01b3020..d0d0f5bb62 100644 --- a/cpprevolve/revolve/brains/controller/Controller.h +++ b/cpprevolve/revolve/brains/controller/Controller.h @@ -12,12 +12,23 @@ namespace revolve { +class DifferentialCPG; class Controller { public: + enum ControllerType { + NONE = 0, + NEURAL_NETWORK, + SPLINES, + DIFFERENTIAL_CPG, + // add new controller types here + } const controller_type; + /// \brief Constructor - explicit Controller() {} + explicit Controller(ControllerType controller_type) + : controller_type(controller_type) + {} /// \brief Deconstructor virtual ~Controller() {} @@ -28,6 +39,8 @@ class Controller const double _time, const double _step ) = 0; + + virtual DifferentialCPG* into_DifferentialCPG() { return nullptr; } }; } diff --git a/cpprevolve/revolve/brains/controller/DifferentialCPG.cpp b/cpprevolve/revolve/brains/controller/DifferentialCPG.cpp index 46a21cc525..c109694c69 100644 --- a/cpprevolve/revolve/brains/controller/DifferentialCPG.cpp +++ b/cpprevolve/revolve/brains/controller/DifferentialCPG.cpp @@ -26,12 +26,9 @@ #include #include #include -#include -#include #include #include #include -#include #include #include @@ -52,20 +49,23 @@ using namespace revolve; * @param robot_config */ DifferentialCPG::DifferentialCPG( - const DifferentialCPG::ControllerParams params, - const std::vector> &actuators) - : next_state(nullptr) + const DifferentialCPG::ControllerParams ¶ms, + const std::vector> &actuators, + std::shared_ptr angle_to_target_sensor) + : Controller(ControllerType::DIFFERENTIAL_CPG) + , next_state(nullptr) , n_motors(actuators.size()) , output(new double[actuators.size()]) - , sample(actuators.size(), 0) + , angle_to_target_sensor(std::move(angle_to_target_sensor)) + , connection_weights(actuators.size(), 0) { this->init_params_and_connections(params, actuators); // Save weights for brain assert(params.weights.size() == n_weights); - sample.resize(n_weights, 0); + connection_weights.resize(n_weights, 0); for(size_t j = 0; j < n_weights; j++) { - sample.at(j) = params.weights.at(j); + connection_weights.at(j) = params.weights.at(j); } // Set ODE matrix at initialization @@ -82,67 +82,20 @@ DifferentialCPG::DifferentialCPG( * @param config_cppn_genome */ DifferentialCPG::DifferentialCPG( - DifferentialCPG::ControllerParams params, + const DifferentialCPG::ControllerParams ¶ms, const std::vector> &actuators, - const NEAT::Genome &gen) - : next_state(nullptr) + const NEAT::Genome &gen, + std::shared_ptr angle_to_target_sensor) + : Controller(ControllerType::DIFFERENTIAL_CPG) + , next_state(nullptr) , n_motors(actuators.size()) , output(new double[actuators.size()]) - , sample(actuators.size(), 0) + , angle_to_target_sensor(std::move(angle_to_target_sensor)) + , connection_weights(actuators.size(), 0) { this->init_params_and_connections(params, actuators); - // build the NN according to the genome - NEAT::NeuralNetwork net; - gen.BuildPhenotype(net); - - // get weights for each connection - // assuming that connections are distinct for each direction - sample.resize(n_weights, 0); - std::vector inputs(8); - - for(const std::pair< const std::tuple< int, int, int>, size_t > &motor: motor_coordinates) - { - size_t k = motor.second; - - // convert tuple to vector - std::tie(inputs[0], inputs[1], inputs[2]) = motor.first; - inputs[3] = 1; - std::tie(inputs[4], inputs[5], inputs[6]) = motor.first; - inputs[7] = -1; - - net.Input(inputs); - net.Activate(); - double weight = net.Output()[0]; -#ifdef DifferentialCPG_PRINT_INFO - std::cout << "Creating weight [" - << inputs[0] << ';' << inputs[1] << ';' << inputs[2] << ';' << inputs[3] << '-' - << inputs[4] << ';' << inputs[5] << ';' << inputs[6] << ';' << inputs[7] - << "] to sample[" << k << "]\t-> " << weight << std::endl; -#endif - sample.at(k) = weight; // order of weights corresponds to order of connections. - } - - for(const std::pair, int > &con : connections) - { - int k = con.second; - // convert tuple to vector - std::tie(inputs[0], inputs[1], inputs[2], inputs[3], inputs[4], inputs[5], inputs[6], inputs[7]) = con.first; - net.Input(inputs); - net.Activate(); - double weight = net.Output()[0]; -#ifdef DifferentialCPG_PRINT_INFO - std::cout << "Creating weight [" - << inputs[0] << ';' << inputs[1] << ';' << inputs[2] << ';' << inputs[3] << '-' - << inputs[4] << ';' << inputs[5] << ';' << inputs[6] << ';' << inputs[7] - << "] to sample[" << k << "]\t-> " << weight << std::endl; -#endif - sample.at(k) = weight; // order of weights corresponds to order of connections. - } - - // Set ODE matrix at initialization - set_ode_matrix(); - + this->load_genome_to_controller(gen); std::cout << "DifferentialCPG brain with CPPN configuration has been loaded." << std::endl; } @@ -154,10 +107,15 @@ void DifferentialCPG::init_params_and_connections(const ControllerParams ¶ms this->range_lb = -params.range_ub; this->range_ub = params.range_ub; this->use_frame_of_reference = params.use_frame_of_reference; - this->signal_factor_all_ = params.signal_factor_all; - this->signal_factor_mid = params.signal_factor_mid; - this->signal_factor_left_right = params.signal_factor_left_right; + this->output_signal_factor = params.output_signal_factor; this->abs_output_bound = params.abs_output_bound; + this->connection_weights = params.weights; + + if (use_frame_of_reference and not angle_to_target_sensor) { + std::clog << "WARNING!: use_frame_of_reference is activated but no angle_to_target_sensor camera is configured. " + "Disabling the use of the frame of reference" << std::endl; + use_frame_of_reference = false; + } size_t j=0; for (const std::shared_ptr &actuator: actuators) @@ -175,7 +133,7 @@ void DifferentialCPG::init_params_and_connections(const ControllerParams ¶ms { frame_of_reference = -1; } - // We are a right neuron + // We are a right neuron else if (coord_x > 0) { frame_of_reference = 1; @@ -237,7 +195,7 @@ void DifferentialCPG::init_params_and_connections(const ControllerParams ¶ms std::cout << "Creating connnection [" << x << ';' << y << ';' << z << ';' << 1 << '-' << near_x << ';' << near_y << ';' << near_z << ';' << 1 - << "] to sample[" << i << ']' << std::endl; + << "] to connection_weights[" << i << ']' << std::endl; #endif this->connections[{x, y, z, 1, near_x, near_y, near_z, 1}] = i; //this->connections[{near_x, near_y, near_z, 1, x, y, z, 1}] = i; @@ -263,6 +221,78 @@ DifferentialCPG::~DifferentialCPG() delete[] this->output; } +void DifferentialCPG::set_connection_weights(std::vector weights){ + this->connection_weights = weights; + this->set_ode_matrix(); +} + +void DifferentialCPG::load_genome_to_controller(const NEAT::Genome &genome) +{ + // build the NN according to the genome + NEAT::NeuralNetwork net; + genome.BuildPhenotype(net); + unsigned int net_depth =99999;// net.CalculateNetworkDepth(); + + // get weights for each connection + // assuming that connections are distinct for each direction + connection_weights.resize(n_weights, 0); + std::vector inputs(8); + + for(const std::pair< const std::tuple< int, int, int>, size_t > &motor: motor_coordinates) + { + size_t k = motor.second; + + // convert tuple to vector + std::tie(inputs[0], inputs[1], inputs[2]) = motor.first; + inputs[3] = 1; + std::tie(inputs[4], inputs[5], inputs[6]) = motor.first; + inputs[7] = -1; + inputs[8] = 1; + + net.Flush(); + net.Input(inputs); + for (int i=0; i " << weight << std::endl; +#endif + this->connection_weights.at(k) = weight; // order of weights corresponds to order of connections. + } + + + for(const std::pair, int > &con : connections) + { + int k = con.second; + // convert tuple to vector + std::tie(inputs[0], inputs[1], inputs[2], inputs[3], inputs[4], inputs[5], inputs[6], inputs[7]) = con.first; + inputs[8] = 1; + + net.Flush(); + net.Input(inputs); + for (int i=0; i " << weight << std::endl; +#endif + this->connection_weights.at(k) = weight; // order of weights corresponds to order of connections. + } + + // Set ODE matrix at initialization + this->set_ode_matrix(); +} + +std::vector DifferentialCPG::get_connection_weights(){ + return this->connection_weights; +} + /** * Callback function that defines the movement of the robot * @@ -306,7 +336,7 @@ void DifferentialCPG::set_ode_matrix() matrix.emplace_back(row); } - // Process A<->A connections + // Process A<->B connections int index = 0; for (const Neuron &neuron: neurons) { @@ -321,10 +351,10 @@ void DifferentialCPG::set_ode_matrix() std::cout << "Setting connection [" << x << ';' << y << ';' << z << ';' << 1 << '-' << x << ';' << y << ';' << z << ';' << -1 - << "] to sample[" << k << "]\t-> " << this->sample.at(k) << std::endl; + << "] to connection_weights[" << k << "]\t-> " << this->connection_weights.at(k) << std::endl; #endif - auto weight = this->sample.at(k) * - (this->range_ub - this->range_lb) + this->range_lb; + auto weight = this->connection_weights.at(k) * + (this->range_ub - this->range_lb) + this->range_lb; size_t i = index; size_t c = index + 1; matrix.at(i).at(c) = weight; @@ -332,7 +362,7 @@ void DifferentialCPG::set_ode_matrix() index+=2; } - // A<->B connections + // A<->A connections index++; int k = 0; std::vector connections_seen; @@ -384,7 +414,7 @@ void DifferentialCPG::set_ode_matrix() else // else continue to next iteration { // actually, we should never encounter this, every connection should appear only once - std::cerr << "Should not see the same connection appearing twice" << std::endl; + std::cerr << "Should not see the same connection appearing twice: " << connection_string << std::endl; throw std::runtime_error("Should not see the same connection appearing twice"); continue; } @@ -394,11 +424,11 @@ void DifferentialCPG::set_ode_matrix() std::cout << "Setting connection [" << x1 << ';' << y1 << ';' << z1 << ';' << w1 << '-' << x2 << ';' << y2 << ';' << z2 << ';' << w2 - << "] to sample[" << sample_index << "]\t-> " << this->sample.at(sample_index) << std::endl; + << "] to connection_weights[" << sample_index << "]\t-> " << this->connection_weights.at(sample_index) << std::endl; #endif // Get weight - const auto w = this->sample.at(sample_index) * + const auto w = this->connection_weights.at(sample_index) * (this->range_ub - this->range_lb) + this->range_lb; // Set connection in weight matrix @@ -480,11 +510,11 @@ void DifferentialCPG::step( neuron_count++; } - // Copy values from next_state into x for ODEINT - state_type x(this->neurons.size()); + // Copy values from next_state into x_state for ODEINT + state_type x_state(this->neurons.size()); for (size_t i = 0; i < this->neurons.size(); i++) { - x[i] = this->next_state[i]; + x_state[i] = this->next_state[i]; } // Perform one step @@ -500,14 +530,31 @@ void DifferentialCPG::step( } } }, - x, + x_state, time, dt); // Copy values into nextstate for (size_t i = 0; i < this->neurons.size(); i++) { - this->next_state[i] = x[i]; + this->next_state[i] = x_state[i]; + } + +// // Load the angle value from the sensor +// double angle_difference = this->angle_to_goal - move_angle; +// if (angle_difference > 180) +// angle_difference -= 360; +// else if (angle_difference < -180) +// angle_difference += 360; +// this->angle_diff = angle_difference; + double angle_difference = 0.0; + double slow_down_factor = 1.0; + if (use_frame_of_reference) { + angle_difference = angle_to_target_sensor->detect_angle(); + std::cout << "Angle detected " << angle_difference << std::endl; + const double frame_of_reference_slower_power = 7.0; + slow_down_factor = std::pow( + (180.0 - std::abs(angle_difference))/180.0, frame_of_reference_slower_power); } // Loop over all neurons to actually update their states. Note that this is a new outer for loop @@ -531,30 +578,30 @@ void DifferentialCPG::step( // Apply saturation formula auto x_input = this->next_state[i]; + double output_j = this->output_function(x_input); + // Use frame of reference - if(use_frame_of_reference) + if(use_frame_of_reference and frame_of_reference != 0) { - - if (std::abs(frame_of_reference) == 1) - { - this->output[j] = this->signal_factor_left_right*this->abs_output_bound*((2.0)/(1.0 + std::pow(2.718, -2.0 * x_input / this->abs_output_bound)) - 1); - } - else if (frame_of_reference == 0) + if ((frame_of_reference == 1 and angle_difference < 0) or + (frame_of_reference == -1 and angle_difference > 0)) //TODO >= / <= ? { - this->output[j] = this->signal_factor_mid*this->abs_output_bound*((2.0)/(1.0 + std::pow(2.718, -2.0 * x_input / this->abs_output_bound)) - 1); + output_j *= slow_down_factor; } - else - { - std::clog << "WARNING: frame_of_reference not in {-1,0,1}." << std::endl; - } - - } - // Don't use frame of reference - else - { - this->output[j] = this->signal_factor_all_*this->abs_output_bound*((2.0)/(1.0 + std::pow(2.718, -2.0 * x_input / this->abs_output_bound)) - 1); } + + this->output[j] = output_j; } i++; } } + +double DifferentialCPG::output_function(double input) const +{ + return this->output_signal_factor + * this->abs_output_bound + * ( + (2.0) / (1.0 + std::pow(2.718, -2.0 * input / this->abs_output_bound)) + - 1 + ); +} diff --git a/cpprevolve/revolve/brains/controller/DifferentialCPG.h b/cpprevolve/revolve/brains/controller/DifferentialCPG.h index f21c993b34..c539f25859 100644 --- a/cpprevolve/revolve/brains/controller/DifferentialCPG.h +++ b/cpprevolve/revolve/brains/controller/DifferentialCPG.h @@ -8,6 +8,7 @@ #include "Controller.h" #include "actuators/Actuator.h" #include "sensors/Sensor.h" +#include "sensors/AngleToTargetDetector.h" #include #include @@ -27,9 +28,7 @@ class DifferentialCPG bool use_frame_of_reference; double init_neuron_state; double range_ub; - double signal_factor_all; - double signal_factor_mid; - double signal_factor_left_right; + double output_signal_factor; double abs_output_bound; std::vector< double > weights; /// can be null, indicating that there is no map @@ -40,17 +39,19 @@ class DifferentialCPG /// \param[in] params Parameters for the controller /// \param[in] _actuators Reference to a actuator list DifferentialCPG( - DifferentialCPG::ControllerParams params, - const std::vector> &_actuators); + const DifferentialCPG::ControllerParams ¶ms, + const std::vector> &_actuators, + std::shared_ptr angle_to_target_sensor = nullptr); /// \brief Constructor for Controller with config CPPN /// \param[in] params Parameters for the controller /// \param[in] _actuators Reference to a actuator list /// \param[in] config_cppn_genome Reference to the genome for configuring the weights in CPG DifferentialCPG( - DifferentialCPG::ControllerParams params, + const DifferentialCPG::ControllerParams ¶ms, const std::vector> &_actuators, - const NEAT::Genome &config_cppn_genome); + const NEAT::Genome &config_cppn_genome, + std::shared_ptr angle_to_target_sensor = nullptr); /// \brief Destructor virtual ~DifferentialCPG(); @@ -63,14 +64,23 @@ class DifferentialCPG virtual void update( const std::vector> &actuators, const std::vector> &sensors, - const double _time, - const double _step) override; + double _time, + double _step) override; + + /// \brief Set the connection weights of the Controller and make sure the matrix is set appropriately + /// \param[in] The weights to be set + void set_connection_weights(std::vector weights); + + void load_genome_to_controller(const NEAT::Genome &genome); + + DifferentialCPG* into_DifferentialCPG() override { return this; }; + + /// \brief Return the weights of the connections + std::vector get_connection_weights(); protected: - void step( - const double time, - const double step); + void step(double time, double step); void init_params_and_connections(const ControllerParams ¶ms, const std::vector> &actuators); @@ -80,6 +90,9 @@ class DifferentialCPG /// \brief Function that resets neuron state void reset_neuron_state(); + /// \brief function that transforms the value of the CPG A-neurons and returns the correct output for the actuators + double output_function(double input) const; + public: std::map< std::tuple< int, int, int >, size_t > motor_coordinates; @@ -111,6 +124,9 @@ class DifferentialCPG /// \brief Used for ODE-int std::vector> ode_matrix; + /// \brief Angle sensor holder + std::shared_ptr<::revolve::AngleToTargetDetector> angle_to_target_sensor; + private: /// \brief Used to determine the next state array double *next_state; @@ -124,20 +140,14 @@ class DifferentialCPG /// \brief Limbo optimizes in [0,1] double range_ub; - /// \brief Loaded sample - std::vector sample; + /// \brief Loaded weights + std::vector connection_weights; /// \brief The number of weights to optimize size_t n_weights; /// \brief Factor to multiply output signal with - double signal_factor_all_; - - /// \brief Factor to multiply output signal with - double signal_factor_mid; - - /// \brief Factor to multiply output signal with - double signal_factor_left_right; + double output_signal_factor; /// \brief When reset a neuron state,do it randomly: bool reset_neuron_random; @@ -152,7 +162,8 @@ class DifferentialCPG bool use_frame_of_reference; double abs_output_bound; - }; + +}; } diff --git a/cpprevolve/revolve/brains/controller/IMC/CMakeLists.txt b/cpprevolve/revolve/brains/controller/IMC/CMakeLists.txt new file mode 100644 index 0000000000..4fda9addc5 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/CMakeLists.txt @@ -0,0 +1,183 @@ +#cmake_minimum_required(VERSION 3.0 FATAL_ERROR) +#project(IMC) +# +#if (${CUDA}) +# set(Torch_DIR "/home/fuda/Projects/pytorch/libtorch_cuda/share/cmake/Torch") +#else () +# set(Torch_DIR "/home/fuda/Projects/pytorch/libtorch/share/cmake/Torch") +#endif() +# +## CMake flag to help local projects find the build dir +#set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_SOURCE_DIR}/build/lib") +#set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_SOURCE_DIR}/build/lib") +#set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_SOURCE_DIR}/build") +# +#include_directories(${CMAKE_SOURCE_DIR}/cpprevolve) +# +## Pass source dir to preprocessor +#add_definitions(-DSOURCE_DIR=${CMAKE_SOURCE_DIR}) +# +## Compiler options +## TODO This currently assumes GCC, add Windows support in due time +#add_definitions(-pedantic -Wno-long-long -Wall -Wextra -Wformat=2 +# -Wredundant-decls -Wwrite-strings -Wmissing-include-dirs +# -Wswitch-enum -Wuninitialized +# -Wswitch-default -Winit-self -Wfloat-equal -fPIC ) +# +#set (CMAKE_CXX_STANDARD 11) +# +## Debug Flags +#set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 -ggdb3 -DDEBUG") +# +## Release flags +##set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3 -funroll-loops -finline-functions -fomit-frame-pointer -DNDEBUG") +# +# +# +## Finding dependencies +## _____________________________________________________________________________ +# +## PKG-CONFIG +#find_package(PkgConfig REQUIRED) +# +## Find Boost +#find_package(Boost REQUIRED COMPONENTS system) +#include_directories(${Boost_INCLUDE_DIRS}) +# +## Find Eigen3 - A lightweight C++ template library for vector and matrix math +#find_package(Eigen3 REQUIRED) +#include_directories(${EIGEN3_INCLUDE_DIR}) +# +## Find NLOpt - Non Linear Optimization +#pkg_check_modules(NLOpt REQUIRED nlopt>=2.4) +#include_directories(${NLOpt_INCLUDE_DIRS}) +#link_directories(${NLOpt_LIBRARY_DIRS}) +# +## Find Limbo - LIbrary for Model-Based Optimization +#set(LIMBO_DIR ${CMAKE_SOURCE_DIR}/thirdparty/limbo) +#set(LIMBO_DEFINES USE_NLOPT) +#include_directories(${LIMBO_DIR}/src) +# +## Find GSL - GNU Scientific Library +#find_package(GSL REQUIRED) +#include_directories(${GSL_INCLUDE_DIRS}) +# +## Find Yaml-cpp +#find_package(yaml-cpp REQUIRED) +#include_directories(${YAML_CPP_INCLUDE_DIR}) +# +## Find Gazebo +#set(LOCAL_GAZEBO_DIR "/home/fuda/gazebo/cmake") +#if (LOCAL_GAZEBO_DIR) +# find_package(gazebo 10 REQUIRED CONFIG +# PATHS "${LOCAL_GAZEBO_DIR}" +# NO_DEFAULT_PATH) +# message(WARNING "Using local Gazebo @ ${gazebo_DIR}") +#else() +# find_package(gazebo 10 REQUIRED) +#endif() +#include_directories(${GAZEBO_INCLUDE_DIRS}) +#link_directories(${GAZEBO_LIBRARY_DIRS}) +# +## Gazebo dependencies +## Find avcodec +#pkg_check_modules(libavcodec libavcodec) +#if (NOT libavcodec_FOUND) +# BUILD_WARNING ("libavcodec not found. Audio-video capabilities of gazebo are probably disabled.") +#else() +# include_directories(${libavcodec_INCLUDE_DIRS}) +# link_directories(${libavcodec_LIBRARY_DIRS}) +#endif () +# +## Find Protobuf +## TODO: This part is currently a mess, and it should be handeled better +#find_package(Protobuf REQUIRED) +# +## Find the Protobuf import directory for Gazebo. Found in this +## tutorial: http://gazebosim.org/tutorials?tut=custom_messages&cat=transport +#set(GAZEBO_PROTOBUF_DIR) +#foreach(ITR ${GAZEBO_INCLUDE_DIRS}) +# if(ITR MATCHES ".*gazebo-[0-9.]+$") +# set(GAZEBO_PROTO_PATH "${ITR}/gazebo/msgs/proto") +# set(GAZEBO_PROTO_INCLUDE "${ITR}/gazebo/msgs") +# endif() +#endforeach() +#include_directories( +# ${CMAKE_SOURCE_DIR} +# ${PROTOBUF_INCLUDE_DIRS} +# ${GAZEBO_PROTO_INCLUDE} +#) +# +## Add Gazebo C++ flags (this includes c++11) +#list(APPEND CMAKE_CXX_FLAGS "${GAZEBO_CXX_FLAGS}") +# +## Directory where the .proto files reside within revolve +#set(SPEC_DIR "msgs") +# +## All protobuf files we need, including the Gazebo ones +#file(GLOB_RECURSE REVOLVE_PROTOS ${SPEC_DIR}/*.proto) +# +## Do the protobuf generation by hand for more flexibility. The files are +## generated in a subdirectory such that it can potentially be added to the +## include path for work-in-progress projects I co-develop with this. +## Copied most of this code from +## http://stackoverflow.com/questions/29346488/protobuf-generate-cpp-not-generating-src-and-header-files +## Also see +## https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake +#set(PROTO_SRCS) +#set(PROTO_HDRS) +#set(PROTO_OUTPUT_BASE ${CMAKE_CURRENT_BINARY_DIR}/../) +# +# +## Include the directory where the protobuf files will be placed +#include_directories(${PROTO_OUTPUT_BASE}) +# +#file(MAKE_DIRECTORY ${PROTO_OUTPUT_BASE}/revolve/msgs) +#foreach(RV_PROTO ${REVOLVE_PROTOS}) +# get_filename_component(RV_ABS_PROTO ${RV_PROTO} ABSOLUTE) +# get_filename_component(RV_NAME_PROTO ${RV_PROTO} NAME_WE) +# get_filename_component(RV_DIR_PROTO ${RV_PROTO} DIRECTORY) +# +# list(APPEND PROTO_SRCS +# "${PROTO_OUTPUT_BASE}/revolve/msgs/${RV_NAME_PROTO}.pb.cc") +# list(APPEND PROTO_HDRS +# "${PROTO_OUTPUT_BASE}/revolve/msgs/${RV_NAME_PROTO}.pb.h") +# +# add_custom_command( +# OUTPUT "${PROTO_OUTPUT_BASE}/revolve/msgs/${RV_NAME_PROTO}.pb.cc" +# "${PROTO_OUTPUT_BASE}/revolve/msgs/${RV_NAME_PROTO}.pb.h" +# COMMAND ${PROTOBUF_PROTOC_EXECUTABLE} +# ARGS -I ${RV_DIR_PROTO} +# -I ${GAZEBO_PROTO_PATH} +# --cpp_out ${PROTO_OUTPUT_BASE}/revolve/msgs ${RV_ABS_PROTO} +# DEPENDS ${RV_ABS_PROTO} +# COMMENT "Running C++ protocol buffer compiler on ${RV_PROTO}" +# VERBATIM ) +#endforeach() +# +# +# +## Tell the compiler these files were generated +#set_source_files_properties( +# ${PROTO_SRCS} ${PROTO_HDRS} PROPERTIES GENERATED TRUE) +# +## Source subdirectories +## _____________________________________________________________________________ +# +## Plugin C++ files +#file(GLOB_RECURSE +# REVOLVE_GZ_SRC +# brains/*.cpp +# motors/*.cpp +# sensors/*.cpp +# util/*.cpp +# ) +# +# +# +# +#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${TORCH_CXX_FLAGS}") +# +#add_executable(Test_IMC Test_IMC.cpp FeedForwardNetwork.cpp FeedForwardNetwork.h InverseNetwork.cpp InverseNetwork.h) +#target_link_libraries(Test_IMC "${TORCH_LIBRARIES}") +#set_property(TARGET Test_IMC PROPERTY CXX_STANDARD 14) \ No newline at end of file diff --git a/cpprevolve/revolve/brains/controller/IMC/FeedForwardNetwork.cpp b/cpprevolve/revolve/brains/controller/IMC/FeedForwardNetwork.cpp new file mode 100644 index 0000000000..57cb23a344 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/FeedForwardNetwork.cpp @@ -0,0 +1,48 @@ +/* +* Copyright (C) 2017 Vrije Universiteit Amsterdam +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* +* Author: Fuda van Diggelen +* +*/ +#include "torch/torch.h" +#include "FeedForwardNetwork.h" + + +///////////////////////////////////////////////// +FeedForwardNetworkImpl::FeedForwardNetworkImpl( + int actuatorsize) + : linear_In(actuatorsize * 3, actuatorsize * 3), + linear_H1(actuatorsize * 3, actuatorsize * 3), + linear_Out(actuatorsize * 3,actuatorsize* 2) +{ + // register_module() is needed if we want to use the parameters() method later on + register_module("linear_In", linear_In); + register_module("linear_H1", linear_H1); + register_module("linear_Out", linear_Out); + + bias_in = register_parameter("bias_in", torch::randn(actuatorsize * 3)); + bias_h1 = register_parameter("bias_h1", torch::randn(actuatorsize * 3)); + bias_out = register_parameter("bias_out", torch::randn(actuatorsize * 2)); +} + + +torch::Tensor FeedForwardNetworkImpl::forward(torch::Tensor x){ + x = torch::relu(linear_In->forward(x)+bias_in); + x = torch::relu(linear_H1->forward(x)+bias_h1); + x = torch::tanh(linear_Out->forward(x)+bias_out); + return x; +} + +FeedForwardNetworkImpl::~FeedForwardNetworkImpl() = default; + diff --git a/cpprevolve/revolve/brains/controller/IMC/FeedForwardNetwork.h b/cpprevolve/revolve/brains/controller/IMC/FeedForwardNetwork.h new file mode 100644 index 0000000000..e40c1e0ed4 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/FeedForwardNetwork.h @@ -0,0 +1,47 @@ +/* +* Copyright (C) 2017 Vrije Universiteit Amsterdam +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* +* Description: Position based (servo) motor +* Author: Elte Hupkes +* +*/ + + +#include +#include "iostream" + +#include "torch/torch.h" + +class FeedForwardNetworkImpl : public torch::nn::Module { + /// \brief Constructor +public: explicit FeedForwardNetworkImpl( + int actuatorsize + ); + + /// \brief Destructor +public: ~FeedForwardNetworkImpl() override; + + /// \brief Forward function +public: torch::Tensor forward(torch::Tensor x); + +private: + /// \brief Layers + torch::nn::Linear linear_In, linear_H1, linear_Out; + + /// \brief Biases + torch::Tensor bias_in, bias_h1, bias_out; + +}; + +TORCH_MODULE(FeedForwardNetwork); \ No newline at end of file diff --git a/cpprevolve/revolve/brains/controller/IMC/IMC.cpp b/cpprevolve/revolve/brains/controller/IMC/IMC.cpp new file mode 100644 index 0000000000..402a9529ef --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/IMC.cpp @@ -0,0 +1,319 @@ +// +// Created by matteo on 14/06/19. +// +#include "IMC.h" +#include "../sensors/Sensor.h" +#include "../actuators/Actuator.h" +#include "../Controller.h" +#include "torch/torch.h" + +#include +#include +#include +#include "iostream" +#include + +const std::string project_root = "."; + + +using namespace revolve; + +IMC::IMC(std::unique_ptr<::revolve::Controller> wrapped_controller, + const std::vector> &_actuators, + const IMC::IMCParams ¶ms) + : Controller(wrapped_controller->controller_type) + , _wrapped_controller(std::move(wrapped_controller)) + , InverseNet(int(_actuators.size())) + , Inverse_Optim(nullptr) + , FeedForNet(int(_actuators.size())) + , FeedFor_Optim(nullptr) + +{ + this->Inverse_Optim = std::make_unique( + this->InverseNet->parameters(), + torch::optim::AdamOptions(params.learning_rate*2).betas({params.beta1, params.beta2}).weight_decay(params.weight_decay) + ); + this->FeedFor_Optim = std::make_unique( + this->FeedForNet->parameters(), + torch::optim::AdamOptions(params.learning_rate).betas({params.beta1, params.beta2}).weight_decay(params.weight_decay) + ); + + this->Reference_state_Prev = torch::ones(_actuators.size()*2, torch::kFloat)*0.5; + this->Predicted_state_Prev = this->Reference_state_Prev.requires_grad_(true); + this->Current_State_Prev = this->Predicted_state_Prev.requires_grad_(true); + this->Motor_Input_Prev = torch::ones(_actuators.size(), torch::kFloat)*0.5; + this->Motor_Input_Prev_fb = this->Motor_Input_Prev; + this->FeedForNet->to(torch::kDouble); + this->InverseNet->to(torch::kDouble); + + this->State_Memory = torch::ones({long(_actuators.size()*2), params.window_length}, torch::kFloat64)* 0.5; + this->Reference_Memory = torch::ones({long(_actuators.size()*2), params.window_length}, torch::kFloat64)* 0.4999/params.window_length; + + this->Save_Check = params.save_checkpoint; + this->model_name = params.model_name; + + if (params.restore_checkpoint & false) { + + this->Load_Progress(params.model_name); + bool test_best = false; + + if(test_best){ + this->Save_Check = false; + for (const auto& param : this->InverseNet->named_parameters()) { + param->requires_grad_(false); + } + for (const auto& param : this->FeedForNet->named_parameters()) { + param->requires_grad_(false); + } + for (int i = 0; i < int(_actuators.size()); ++i) { + std::ofstream ofs; + ofs.open(project_root + "/experiments/IMC/output" + this->model_name + "/act_info/A" + + std::to_string(i + 1) + ".log", std::ofstream::out | std::ofstream::trunc); + ofs.close(); + } + } + } + else {//clear log files for new + for (int i = 0; i < int(_actuators.size()); ++i) { + std::ofstream ofs; + ofs.open(project_root + "/experiments/IMC/output" + this->model_name + "/act_info/A" + + std::to_string(i + 1) + ".log", std::ofstream::out | std::ofstream::trunc); + ofs.close(); + } + } +// std::ofstream ofs; +// ofs.open(project_root+"/experiments/IMC/output"+this->model_name+"/IMC_time.txt", std::ofstream::out | std::ofstream::trunc); +// ofs.close(); + +} + + +torch::Tensor IMC::InverseModel( + const torch::Tensor& Current_State, + const torch::Tensor& Reference_State) +{ + torch::Tensor Network_Input_Inverse = torch::cat({Reference_State, Current_State}, 0).to(torch::kDouble); + this->InverseNet->zero_grad(); + return (this->InverseNet->forward(Network_Input_Inverse.detach())+1)/2; +} + +torch::Tensor IMC::FeedForModel( + const torch::Tensor& Current_State, + const torch::Tensor& Motor_Input) +{ + torch::Tensor Model_Input_FeedFor = torch::cat({Motor_Input, Current_State},0).to(torch::kDouble); + this->FeedForNet->zero_grad(); + return (this->FeedForNet->forward(Model_Input_FeedFor.detach())+1)/2; +} + +void IMC::Update_Weights( + const torch::Tensor& Current_State, + const torch::Tensor& Motor_Input) +{ + torch::nn::MSELoss MSE_FeedFor_loss; + torch::Tensor FeedFor_loss = MSE_FeedFor_loss(Current_State.clone().detach(),this->Predicted_state_Prev); + FeedFor_loss.backward(); + this->FeedFor_Optim->step(); + + this->State_Memory = torch::cat({this->State_Memory.narrow(1, 1, this->State_Memory.size(1) - 1).detach(), + Current_State.reshape({Motor_Input.size(0) * 2, 1})},1); + this->Reference_Memory = torch::cat({this->Reference_Memory.narrow(1,1,this->State_Memory.size(1)-1), + this->Reference_state_Prev.reshape({Motor_Input.size(0)*2,1})},1); + + torch::nn::MSELoss MSE_Inverse_loss; + torch::Tensor Inverse_loss = MSE_Inverse_loss(this->State_Memory,this->Reference_Memory.detach()); +// torch::Tensor Inverse_loss = MSE_Inverse_loss(Current_State.narrow(0,0,Motor_Input.size(0)), +// this->Reference_state_Prev.narrow(0,0,Motor_Input.size(0)));//.narrow(0,0,Motor_Input.size(0)).detach()) + + Inverse_loss.backward(); + this->Inverse_Optim->step(); + +// /// Logging results +// auto mi_a = this->Motor_Input_Prev.accessor(); +// auto fb_a = this->Motor_Input_Prev_fb.accessor(); +// auto ps_a = this->Predicted_state_Prev.accessor(); +// auto cs_a = this->Current_State_Prev.accessor(); +// auto rs_a = this->Reference_state_Prev.accessor(); +// +// +// for(int i=0 ; imodel_name+"/act_info/A"+std::to_string(i+1)+".log", std::ios::app); +// ofs<()<<","<()<<","<> &_actuators, + double dt) +{ + if(true){ + this->Current_State_Prev = current_state.requires_grad_(true); + this->Update_Weights(this->Current_State_Prev, this->Motor_Input_Prev); + + torch::Tensor feedback_error = (this->Predicted_state_Prev-current_state).detach(); + torch::Tensor motor_input = this->InverseModel(current_state, reference_state + feedback_error).to(torch::kFloat); + + torch::Tensor predicted_state = FeedForModel(current_state, motor_input).to(torch::kFloat); + torch::Tensor FeedFor_error = (reference_state-predicted_state).detach(); + + torch::Tensor feedback = feedback_error+FeedFor_error; + + motor_input = (motor_input-0.5)*dt+0.5 + + (feedback.narrow(0,0,int(_actuators.size()))*5.235988*1.0 + + feedback.narrow(0,int(_actuators.size()),int(_actuators.size()))*1.0)*dt; + + this->Reference_state_Prev = reference_state; + this->Predicted_state_Prev = predicted_state; + this->Current_State_Prev = current_state; + this->Motor_Input_Prev = motor_input.requires_grad_(true); + this->Motor_Input_Prev_fb = (feedback.narrow(0,0,int(_actuators.size()))*5.235988*2.0 + + feedback.narrow(0,int(_actuators.size()),int(_actuators.size()))*2.0)*dt+0.5; + + motor_input = motor_input.to(torch::kDouble); + unsigned int p = 0; + for (const auto &actuator: _actuators) + { + double *output = motor_input[p].data_ptr(); + actuator->write(output, dt); + p += 1; + } + } + else{ + torch::Tensor motor_input = reference_state.narrow(0,0,_actuators.size()); + +// /// Logging results +// auto mi_a = this->Motor_Input_Prev.accessor(); +// auto fb_a = this->Motor_Input_Prev_fb.accessor(); +// auto ps_a = this->Predicted_state_Prev.accessor(); +// auto cs_a = this->Current_State_Prev.accessor(); +// auto rs_a = this->Reference_state_Prev.accessor(); +// +// for(int i=0 ; imodel_name+"/act_info/A"+std::to_string(i+1)+".log", std::ios::app); +// ofs<Reference_state_Prev = reference_state; + this->Predicted_state_Prev = motor_input*0.0; + this->Current_State_Prev = current_state; + this->Motor_Input_Prev = motor_input; + this->Motor_Input_Prev_fb = motor_input*0.0; + + motor_input = motor_input.to(torch::kDouble); + unsigned int p = 0; + for (const auto &actuator: _actuators) + { + double *output = motor_input[p].data_ptr(); + actuator->write(output, dt); + p += 1; + } + } +} + +void IMC::Save_Progress(std::string model_name) +{ +// this->InverseNet->eval(); + std::cout<<"SAVE IMC NETWORK PROGRESS"<InverseNet, project_root+"/experiments/IMC/output"+model_name+"/"+model_name+"_Inverse_network.pt"); + torch::save(*this->Inverse_Optim, project_root+"/experiments/IMC/output"+model_name+"/"+model_name+"_Inverse_optimizer.pt"); + + torch::save(this->FeedForNet, project_root+"/experiments/IMC/output"+model_name+"/"+model_name+"_FeedForModel_network.pt"); + torch::save(*this->FeedFor_Optim, project_root+"/experiments/IMC/output"+model_name+"/"+model_name+"_FeedForModel_optimizer.pt"); + } + else { + this->Save_Check = false; + } +} + +void IMC::Load_Progress(std::string model_name) +{ + + std::ifstream net(project_root+"/experiments/IMC/output"+model_name+"/"+model_name+"_Inverse_network.pt"); + if(net) { + std::cout<<"load IMC"<InverseNet, project_root+"/experiments/IMC/output"+model_name+"/"+model_name+"_Inverse_network.pt"); + torch::load(*this->Inverse_Optim, project_root+"/experiments/IMC/output"+model_name+"/"+model_name+"_Inverse_optimizer.pt"); + + torch::load(this->FeedForNet, project_root+"/experiments/IMC/output"+model_name+"/"+model_name+"_FeedForModel_network.pt"); + torch::load(*this->FeedFor_Optim, project_root+"/experiments/IMC/output"+model_name+"/"+model_name+"_FeedForModel_optimizer.pt"); + } +} + +void IMC::update( + const std::vector > &actuators, + const std::vector > &sensors, + double time, double dt) +{ + size_t output_size = 0; + for (const auto &actuator: actuators) { + output_size += actuator->n_outputs(); + } + + std::vector actuator_buffer(output_size); + double *actuator_buffer_data_p = actuator_buffer.data(); + + size_t output_cursor = 0; + std::vector > fake_actuators; + for (const auto &actuator: actuators) { + const size_t size = actuator->n_outputs(); + fake_actuators.emplace_back(std::make_shared( + actuator_buffer_data_p+output_cursor, + actuator)); + output_cursor += size; + } + + this->_wrapped_controller->update(fake_actuators, sensors, time, dt); + + // ================= READ STATE ======================= + std::vector current_state_v; + current_state_v.reserve(actuators.size()*2); + for (const auto &actuator: actuators) { + current_state_v.emplace_back(actuator->Current_State(Actuator::StateType::POSITION)/2+.5); + } + for (const auto &actuator: actuators) { + current_state_v.emplace_back(actuator->Current_State(Actuator::StateType::VELOCITY)/2/5.235988+.5); + } + + // Current state of the servo motor + torch::Tensor current_state = torch::tensor(current_state_v); + + // State you want to go to (controller output) + torch::Tensor reference_state = torch::tensor(actuator_buffer)/2+.5; + + torch::Tensor reference_state_dot = ((reference_state-this->Reference_state_Prev.narrow(0,0,actuators.size()))/dt/5.235988+.5).clamp(0,1); + reference_state = torch::cat({reference_state,reference_state_dot},0); + + this->Step(current_state, reference_state, actuators, dt); + + if((std::fmod(time,60.0) == 0) & this->Save_Check){ + this->Save_Progress(this->model_name); + if(time == dt){ + for(const auto& pair : this->InverseNet->named_parameters().pairs()){ + this->InverseNet->named_parameters().find(pair.first)-> + set_requires_grad(pair.first.find("ut") == std::string::npos); + } + for(const auto& pair : this->InverseNet->named_parameters().pairs()){ + this->InverseNet->named_parameters().find(pair.first)-> + set_requires_grad(pair.first.find("ut") == std::string::npos); + } + } + } +} \ No newline at end of file diff --git a/cpprevolve/revolve/brains/controller/IMC/IMC.h b/cpprevolve/revolve/brains/controller/IMC/IMC.h new file mode 100644 index 0000000000..61aa720855 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/IMC.h @@ -0,0 +1,133 @@ +// +// +// + + +#include "../sensors/Sensor.h" +#include "../actuators/Actuator.h" +#include "../Controller.h" +#include "torch/torch.h" +#include "FeedForwardNetwork.h" +#include "InverseNetwork.h" +#include "../DifferentialCPG.h" + + +namespace revolve { + +class IMC : public Controller +{ +public: + struct IMCParams { + double learning_rate = 5e-2; + double beta1 = 0.9; + double beta2 = 0.99; + double weight_decay = 0.001; + int window_length = 60; + bool restore_checkpoint = false; +<<<<<<< HEAD + bool save_checkpoint = false; +======= + bool save_checkpoint = true; +>>>>>>> 1b077afcba9df8d1c45154780b47411c13e9973c + std::string model_name; + }; + /// \brief Constructor + /// \param[in] _controller Reference + /// \param[in] _actuators Reference to a actuator list + /// \param[in] params Parameters for the IMC learning rate/beta_factor/restore_checkpoint + IMC( + std::unique_ptr<::revolve::Controller> wrapped_controller, + const std::vector> &_actuators, + const IMC::IMCParams ¶ms); + + /// \brief Inverse Model function + torch::Tensor InverseModel( + const torch::Tensor& Current_State, + const torch::Tensor& Reference_State); + + /// \brief FeedForward Model function + torch::Tensor FeedForModel( + const torch::Tensor& Current_State, + const torch::Tensor& Motor_Input); + + /// \brief Generic functions + void Update_Weights( + const torch::Tensor& Current_State, + const torch::Tensor& Motor_Input); + + void Step( + const torch::Tensor& current_state, + const torch::Tensor& reference_state, + const std::vector> &_actuators, + double dt); + + void update( + const std::vector> &actuators, + const std::vector> &sensors, + double time, + double step + ) override; + + void Save_Progress(std::string model_name); + void Load_Progress(std::string model_name); + + DifferentialCPG* into_DifferentialCPG() override { return this->_wrapped_controller->into_DifferentialCPG(); } + +private: + class FakeActuator: public Actuator { + double *const buffer; + + public: + explicit FakeActuator(double *const buffer, const std::shared_ptr &wrapped) + : Actuator(wrapped->n_outputs(), wrapped->coordinate_x(), wrapped->coordinate_y(), wrapped->coordinate_z()) + , buffer(buffer) + {}; + + double Current_State( StateType /*type*/ ) override { return 0.0; } + + void write(const double *output, double /*step*/) override + { + for (unsigned int i=0; ibuffer[i] = output[i]; + } + } + }; + + +protected: + std::unique_ptr<::revolve::Controller> _wrapped_controller; + + /// \brief Inverse Network + InverseNetwork InverseNet; + + /// \brief Loaded Inverse weights + std::unique_ptr Inverse_Optim; + + /// \brief Feed Forward Network + FeedForwardNetwork FeedForNet; + + /// \brief Loaded Feed Forward weights + std::unique_ptr FeedFor_Optim; + + /// \brief Previous reference state + torch::Tensor Reference_state_Prev; + + /// \brief Previous reference state + torch::Tensor Predicted_state_Prev; + + torch::Tensor Motor_Input_Prev; + torch::Tensor Motor_Input_Prev_fb; + torch::Tensor Current_State_Prev; + + torch::Tensor State_Memory; + torch::Tensor Reference_Memory; + bool Save_Check; + std::string model_name; + + std::string model_name; + +}; + +} + + diff --git a/cpprevolve/revolve/brains/controller/IMC/InverseNetwork.cpp b/cpprevolve/revolve/brains/controller/IMC/InverseNetwork.cpp new file mode 100644 index 0000000000..89cfae31be --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/InverseNetwork.cpp @@ -0,0 +1,49 @@ +/* +* Copyright (C) 2017 Vrije Universiteit Amsterdam +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* +* Author: Fuda van Diggelen +* +*/ + +#include "torch/torch.h" +#include "InverseNetwork.h" + + +///////////////////////////////////////////////// +InverseNetworkImpl::InverseNetworkImpl( + int actuatorsize) + : linear_In(actuatorsize * 4, actuatorsize * 4), + linear_H1(actuatorsize * 4, actuatorsize * 4), + linear_Out( actuatorsize* 4,actuatorsize) +{ + // register_module() is needed if we want to use the parameters() method later on + register_module("linear_In", linear_In); + register_module("linear_H1", linear_H1); + register_module("linear_Out", linear_Out); + + bias_in = register_parameter("bias_in", torch::randn(actuatorsize * 4)); + bias_h1 = register_parameter("bias_h1", torch::randn(actuatorsize * 4)); + bias_out = register_parameter("bias_out", torch::randn(actuatorsize)); +} + + +torch::Tensor InverseNetworkImpl::forward(torch::Tensor x){ + x = torch::relu(linear_In->forward(x)+bias_in); + x = torch::relu(linear_H1->forward(x)+bias_h1); + x = torch::tanh(linear_Out->forward(x)+bias_out); + return x; +} + +InverseNetworkImpl::~InverseNetworkImpl() = default; + diff --git a/cpprevolve/revolve/brains/controller/IMC/InverseNetwork.h b/cpprevolve/revolve/brains/controller/IMC/InverseNetwork.h new file mode 100644 index 0000000000..b63f793fb1 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/InverseNetwork.h @@ -0,0 +1,47 @@ +/* +* Copyright (C) 2017 Vrije Universiteit Amsterdam +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* +* Description: Position based (servo) motor +* Author: Elte Hupkes +* +*/ + + +#include +#include "iostream" + +#include "torch/torch.h" + +class InverseNetworkImpl : public torch::nn::Module { + /// \brief Constructor +public: explicit InverseNetworkImpl( + int actuatorsize + ); + + /// \brief Destructor +public: ~InverseNetworkImpl() override; + + /// \brief Forward function +public: torch::Tensor forward(torch::Tensor x); + +private: + /// \brief Layers + torch::nn::Linear linear_In, linear_H1, linear_Out; + + /// \brief Biases + torch::Tensor bias_in, bias_h1, bias_out; + +}; + +TORCH_MODULE(InverseNetwork); \ No newline at end of file diff --git a/cpprevolve/revolve/brains/controller/IMC/Test_IMC.cpp b/cpprevolve/revolve/brains/controller/IMC/Test_IMC.cpp new file mode 100644 index 0000000000..6fabef9561 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/Test_IMC.cpp @@ -0,0 +1,160 @@ +// +// Created by fuda on 16-03-20. +// + +// +// Created by fuda on 25-02-20. +// +#include "FeedForwardNetwork.h" +#include "InverseNetwork.h" +#include "torch/torch.h" + +#include + + +const std::string NetworkFor = "/home/fuda/Projects/revolve/experiments/examples/output/testFor.log"; +const std::string NetworkInv = "/home/fuda/Projects/revolve/experiments/examples/output/testInv.log"; +int actuatorsize = 2; +double phase =0.3; +double freq = 2*3.141592654/75; + + +FeedForwardNetwork FeedForNet(actuatorsize); +torch::optim::Adam FeedFor_Optim(FeedForNet.parameters(), + torch::optim::AdamOptions(3e-2).beta1(0.0).weight_decay(0.001)); + + +InverseNetwork InvNet(actuatorsize); +torch::optim::Adam Inv_Optim(InvNet.parameters(), + torch::optim::AdamOptions(3e-2).beta1(0.0).weight_decay(0.001)); + + + +torch::Tensor Reference_state_Prev = torch::ones((0,actuatorsize*2)).to(torch::kFloat64)*.5; +torch::Tensor Predicted_state_Prev =Reference_state_Prev.requires_grad_(true); + + +torch::Tensor InvModel(const torch::Tensor& Current_State, + const torch::Tensor& Reference_State, + const torch::Tensor& error +){ + torch::Tensor Network_Input_Inv = torch::cat({Reference_State, Current_State}, 0); + InvNet.zero_grad(); + return (InvNet.forward(Network_Input_Inv)+1)/2; +} + +void Update_Weights_InvModel(const torch::Tensor& Current_State, + const torch::Tensor& Motor_Input) +{ +///////////////////////////////////////////////////////////////// +////////////// Update the weight of the inverse ///////////////// +///////////////////////////////////////////////////////////////// + auto C_a = Current_State.accessor(); + auto R_a = Reference_state_Prev.accessor(); + auto MI_a =Motor_Input.accessor(); + torch::nn::MSELoss lossInv; + torch::Tensor Inv_loss = lossInv(Current_State, Reference_state_Prev.detach()); + Inv_loss.backward(); + Inv_Optim.step(); + + std::ofstream foutInv; // Create Object of ofstream + foutInv.open (NetworkInv , std::ios::app); + foutInv<()<<"\t"<> &_actuators +) +{ + torch::Tensor Network_Input_FeedFor = torch::cat({Motor_Input, Current_State},0); + + FeedForNet.zero_grad(); + Predicted_state_Prev = (FeedForNet.forward(Network_Input_FeedFor.detach())+1)/2; +} + +void Update_Weights_FeedForModel(const torch::Tensor& Current_State, + const torch::Tensor& Motor_Input) +{ + + torch::nn::MSELoss lossFor; + torch::Tensor FeedFor_loss = lossFor(Predicted_state_Prev, Current_State.clone().detach()); + FeedFor_loss.backward(); + FeedFor_Optim.step(); + + auto P_a = Predicted_state_Prev.accessor(); + auto C_a = Current_State.accessor(); + + std::ofstream fout; // Create Object of ofstream + fout.open (NetworkFor , std::ios::app); + fout<()<<"\n"; + fout.close(); // Closing the file + + +} + + + +int main(){ + std::ofstream ofs; + ofs.open(NetworkFor, std::ofstream::out | std::ofstream::trunc); + ofs.close(); + ofs.open(NetworkInv, std::ofstream::out | std::ofstream::trunc); + ofs.close(); + + + torch::Tensor current_state = Predicted_state_Prev+0.0; + auto options = torch::TensorOptions().dtype(torch::kFloat64); + + std::time_t result = std::time(nullptr); + std::cout << current_state << "\n" << Predicted_state_Prev << "\n" << std::asctime(std::localtime(&result)); + + FeedForNet.to(torch::kDouble); + InvNet.to(torch::kDouble); + + + for(double t=0; t < 5000;) { + double data_input[]={ (cos(t*freq+phase)+sin(t*freq*2.5-phase)+2)/4, (-cos(t*freq+phase)-sin(t*freq*2.5-phase)+2)/4, - + ((sin(t*freq+phase)*freq+cos(t*freq*2.5-phase)*freq*2.5)/4+.2)*2.5, ((sin(t*freq+phase)*freq-cos(t*freq*2.5-phase)*freq*2.5)/4+.2)*2.5}; + + + torch::Tensor reference_state = torch::from_blob(data_input,{2*actuatorsize}, options); +// std::cout<< current_state-(Predicted_state_Prev-Reference_state_Prev); + torch::Tensor fb = (Predicted_state_Prev-current_state).detach(); + + + torch::Tensor motor_input = InvModel(current_state, reference_state+fb, fb); + + + FeedForModel( current_state, motor_input); + + torch::Tensor fb2 = (reference_state-Predicted_state_Prev).detach(); + motor_input = motor_input+fb.reshape({2,actuatorsize}).sum(0) + + fb2.reshape({2,actuatorsize}).sum(0); +// + torch::Tensor statedot = (torch::cat({torch::clamp( + current_state.narrow(0,actuatorsize,actuatorsize)*2-1, -1, 1), + (torch::clamp(motor_input,0,1)*2-1)}, + 0))*0.2; +// torch::Tensor statedot({(-sin(t*freq+phase)*freq+1)/2, (-sin(t*freq+phase)*freq+1)/2}); +// torch::Tensor statedot = (current_state-reference_state); +// std::cout << current_state; + current_state = clamp(current_state+statedot, 0 , 1); + + Update_Weights_FeedForModel(current_state,motor_input); + Update_Weights_InvModel(current_state,motor_input); + + Reference_state_Prev = reference_state; + current_state = current_state.detach(); +// current_state.to(options); +// std::cout << "\n\nData_out: \n" << data_output[0] << " " << data_output[1] << "\ncurrent_state: \n"<< current_state; +// std::cout << current_state; + t = t+1; + + } + + result = std::time(nullptr); + std::cout << std::asctime(std::localtime(&result)); +}; \ No newline at end of file diff --git a/cpprevolve/revolve/brains/controller/IMC/Test_IMC2.cpp b/cpprevolve/revolve/brains/controller/IMC/Test_IMC2.cpp new file mode 100644 index 0000000000..47ad51baf6 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/Test_IMC2.cpp @@ -0,0 +1,226 @@ +// +// Created by fuda on 16-03-20. +// + +// +// Created by fuda on 25-02-20. +// +//#pragma once +#include +#include + +#include +#include +#include +#include +#include + +#include + +//#include "../plugin/RobotController.h" + +#include "IMC.h" +#include "torch/torch.h" + +#include "revolve/brains/learner/EvaluationReporter.h" +#include "revolve/brains/controller/sensors/Sensor.h" +#include "revolve/brains/controller/actuators/Actuator.h" +#include "revolve/brains/learner/Learner.h" +#include "revolve/brains/learner/Evaluator.h" +#include "revolve/brains/learner/NoLearner.h" +#include "revolve/brains/learner/BayesianOptimizer.h" +#include "revolve/brains/learner/HyperNEAT.h" +#include "revolve/brains/controller/Controller.h" + + + +#include +#include +#include +#include +#include + +#include +#include +#include +#include "revolve/gazebo/brains/GazeboReporter.h" +#include "revolve/gazebo/brains/Brains.h" +//#include +#include "revolve/gazebo/Types.h" +#include + + +const std::string sdfPath("/home/fuda/Projects/revolve/experiments/examples/yaml/Single_link.yaml.sdf"); + +// load and check sdf file +sdf::SDFPtr sdfElement(new sdf::SDF()); + + + +std::unique_ptr<::revolve::gazebo::Evaluator> evaluator; +std::unique_ptr<::revolve::EvaluationReporter> reporter; +std::shared_ptr<::revolve::gazebo::GazeboReporter> gazebo_reporter; +/// \brief Networking node +::gazebo::transport::NodePtr node_; + + +/// \brief Motors in this model +//std::vector< std::shared_ptr< revolve::Actuator > > motors_; +std::vector motors_; + +/// \brief Sensors in this model +//std::vector< std::shared_ptr< revolve::Sensor > > sensors_; +std::vector sensors_; + +/// \brief Pointer to the model +::gazebo::physics::ModelPtr model_; + +/// \brief Learner for the brain controlling this model +std::unique_ptr<::revolve::Learner> learner; + + +//namespace revolve + +int main () +{ + sdf::init(sdfElement); + if (!sdf::readFile(sdfPath, sdfElement)) + { + std::cerr << sdfPath << " is not a valid SDF file!" << std::endl; + } + // start parsing model + const sdf::ElementPtr _sdf = sdfElement->Root(); + + if (not _sdf->HasElement("rv:brain")) { + std::cerr << "No robot brain detected, this is probably an error." + << std::endl; + + } + + auto brain_sdf = _sdf->GetElement("rv:brain"); + auto controller_type = brain_sdf->GetElement("rv:controller")->GetAttribute("type")->GetAsString(); + // auto IMC_params = brain_sdf->GetElement("rv:IMC")->GetElement("rv:params"); + auto learner_type = brain_sdf->GetElement("rv:learner")->GetAttribute("type")->GetAsString(); + std::cout << "Loading controller " << controller_type << " and learner " << learner_type << std::endl; + + + //TODO parameters from SDF + const double evaluation_rate = 15.0; + const unsigned int n_learning_evaluations = 50; + + evaluator = std::make_unique< revolve::gazebo::Evaluator >(evaluation_rate, true, model_); + + // aggregated reporter + std::unique_ptr aggregated_reporter(new revolve::AggregatedReporter(model_->GetName())); + // std::cout reporter + aggregated_reporter->create<::revolve::PrintReporter>(); + // gazebo network publisher reporter + gazebo_reporter.reset(new ::revolve::gazebo::GazeboReporter(aggregated_reporter->robot_id, node_)); + aggregated_reporter->append(gazebo_reporter); + + reporter = std::move(aggregated_reporter); + + // SELECT CONTROLLER ------------------------------------------------------ + std::unique_ptr<::revolve::Controller> controller; + + if ("ann" == controller_type) { + controller = std::make_unique(model_, brain_sdf, motors_, sensors_); + } else if ("spline" == controller_type) { + if (not motors_.empty()) { + controller = std::make_unique(model_, brain_sdf, motors_, sensors_); + } + } else if ("cpg" == controller_type) { + controller = std::make_unique(brain_sdf, motors_); + } else if ("cppn-cpg") { + controller = std::make_unique(brain_sdf, motors_); + } else { + throw std::runtime_error("Robot brain: Controller \"" + controller_type + "\" is not supported."); + } + std::cout << "initialized the controller" << std::endl; + // ================= INITIALIZE IMC ==================== + revolve::IMC::IMCParams imc_params = revolve::IMC::IMCParams(); + std::unique_ptr<::revolve::Controller> controller2; + std::unique_ptr<::revolve::Controller> imc; + imc = std::make_unique(std::move(controller), motors_, imc_params); + + controller2 = std::move(imc); + + + // SELECT LEARNER --------------------------------------------------------- + if ("offline" == learner_type) { + learner = std::make_unique>(std::move(controller)); + } else if ("rlpower" == learner_type) { + //TODO make RLPower generic + if ("spline" != controller_type) { + throw std::runtime_error("Robot brain: Learner RLPower not supported for \"" + controller_type + "\" controller."); + } + learner = std::make_unique>(std::move(controller)); + } else if ("bo" == learner_type) { + learner = std::make_unique( + std::move(controller), + evaluator.get(), + reporter.get(), + evaluation_rate, + n_learning_evaluations); + } else if ("hyperneat" == learner_type) { + NEAT::Parameters neat_params = NEAT::Parameters(); + + const sdf::ElementPtr learner_sdf = brain_sdf->GetElement("rv:learner")->GetElement("rv:params"); + + + #define WRITE_DOUBLE_PARAM(x) std::cout << #x << " is set to: " << learner_sdf->GetAttribute(#x)->GetAsString() << std::endl; neat_params.x = stod(learner_sdf->GetAttribute(#x)->GetAsString()); + #define CHECK_PARAM(x) {stod(std::to_string(neat_params.x))==stod(learner_sdf->GetAttribute(#x)->GetAsString()) ? std::cout << std::left <<#x << " is set to: Default" << std::endl : WRITE_DOUBLE_PARAM(x)} + CHECK_PARAM(PopulationSize) + CHECK_PARAM(WeightDiffCoeff) + CHECK_PARAM(CompatTreshold) + CHECK_PARAM(YoungAgeTreshold) + CHECK_PARAM(OldAgeTreshold) + CHECK_PARAM(MinSpecies) + CHECK_PARAM(MaxSpecies) + CHECK_PARAM(RouletteWheelSelection) + CHECK_PARAM(RecurrentProb) + CHECK_PARAM(OverallMutationRate) + CHECK_PARAM(ArchiveEnforcement) + CHECK_PARAM(MutateWeightsProb) + CHECK_PARAM(WeightMutationMaxPower) + CHECK_PARAM(WeightReplacementMaxPower) + CHECK_PARAM(MutateWeightsSevereProb) + CHECK_PARAM(WeightMutationRate) + CHECK_PARAM(WeightReplacementRate) + CHECK_PARAM(MaxWeight) + CHECK_PARAM(MutateAddNeuronProb) + CHECK_PARAM(MutateAddLinkProb) + CHECK_PARAM(MutateRemLinkProb) + CHECK_PARAM(MinActivationA) + CHECK_PARAM(MaxActivationA) + CHECK_PARAM(ActivationFunction_SignedSigmoid_Prob) + CHECK_PARAM(ActivationFunction_UnsignedSigmoid_Prob) + CHECK_PARAM(ActivationFunction_Tanh_Prob) + CHECK_PARAM(ActivationFunction_SignedStep_Prob) + CHECK_PARAM(CrossoverRate) + CHECK_PARAM(MultipointCrossoverRate) + CHECK_PARAM(SurvivalRate) + CHECK_PARAM(MutateNeuronTraitsProb) + CHECK_PARAM(MutateLinkTraitsProb) + #undef CHECK_PARAM + #undef WRITE_DOUBLE_PARAM + + neat_params.DynamicCompatibility = (learner_sdf->GetAttribute("DynamicCompatibility")->GetAsString() == "true"); + neat_params.NormalizeGenomeSize = (learner_sdf->GetAttribute("NormalizeGenomeSize")->GetAsString() == "true"); + neat_params.AllowLoops = (learner_sdf->GetAttribute("AllowLoops")->GetAsString() == "true"); + neat_params.AllowClones = (learner_sdf->GetAttribute("AllowClones")->GetAsString() == "true"); + + int seed = 0; + + learner = std::make_unique( + std::move(controller), + evaluator.get(), + reporter.get(), + neat_params, + seed, + evaluation_rate, + n_learning_evaluations); + } else { + throw std::runtime_error("Robot brain: Learner \"" + learner_type + "\" is not supported."); + } +} diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeCache.txt b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeCache.txt new file mode 100644 index 0000000000..5f4eb92ec2 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeCache.txt @@ -0,0 +1,416 @@ +# This is the CMakeCache file. +# For build in directory: /home/fuda/Projects/revolve/cpprevolve/revolve/gazebo/IMC/cmake-build-debug +# It was generated by CMake: /snap/clion/103/bin/cmake/linux/bin/cmake +# You can edit this file to change values found and used by cmake. +# If you do not want to change any of the values, simply exit the editor. +# If you do want to change a value, simply edit, save, and exit the editor. +# The syntax for the file is as follows: +# KEY:TYPE=VALUE +# KEY is the name of a variable in the cache. +# TYPE is a hint to GUIs for the type of VALUE, DO NOT EDIT TYPE!. +# VALUE is the current value for the KEY. + +######################## +# EXTERNAL cache entries +######################## + +//No help, variable specified on the command line. +BUILD_ONLY_BRAIN:UNINITIALIZED=0 + +//No help, variable specified on the command line. +BUILD_RASPBERRY:UNINITIALIZED=0 + +//Path to a library. +C10_LIBRARY:FILEPATH=/home/fuda/Projects/pytorch/libtorch/lib/libc10.so + +//Path to a program. +CMAKE_AR:FILEPATH=/usr/bin/ar + +//Choose the type of build, options are: None Debug Release RelWithDebInfo +// MinSizeRel ... +CMAKE_BUILD_TYPE:STRING=Release + +//Id string of the compiler for the CodeBlocks IDE. Automatically +// detected when left empty +CMAKE_CODEBLOCKS_COMPILER_ID:STRING= + +//The CodeBlocks executable +CMAKE_CODEBLOCKS_EXECUTABLE:FILEPATH=CMAKE_CODEBLOCKS_EXECUTABLE-NOTFOUND + +//Additional command line arguments when CodeBlocks invokes make. +// Enter e.g. -j to get parallel builds +CMAKE_CODEBLOCKS_MAKE_ARGUMENTS:STRING=-j8 + +//Enable/Disable color output during build. +CMAKE_COLOR_MAKEFILE:BOOL=ON + +//CXX compiler +CMAKE_CXX_COMPILER:FILEPATH=/usr/bin/c++ + +//A wrapper around 'ar' adding the appropriate '--plugin' option +// for the GCC compiler +CMAKE_CXX_COMPILER_AR:FILEPATH=/usr/bin/gcc-ar-7 + +//A wrapper around 'ranlib' adding the appropriate '--plugin' option +// for the GCC compiler +CMAKE_CXX_COMPILER_RANLIB:FILEPATH=/usr/bin/gcc-ranlib-7 + +//Flags used by the CXX compiler during all build types. +CMAKE_CXX_FLAGS:STRING= + +//Flags used by the CXX compiler during DEBUG builds. +CMAKE_CXX_FLAGS_DEBUG:STRING=-g + +//Flags used by the CXX compiler during MINSIZEREL builds. +CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG + +//Flags used by the CXX compiler during RELEASE builds. +CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 -DNDEBUG + +//Flags used by the CXX compiler during RELWITHDEBINFO builds. +CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g -DNDEBUG + +//C compiler +CMAKE_C_COMPILER:FILEPATH=/usr/bin/cc + +//A wrapper around 'ar' adding the appropriate '--plugin' option +// for the GCC compiler +CMAKE_C_COMPILER_AR:FILEPATH=/usr/bin/gcc-ar-7 + +//A wrapper around 'ranlib' adding the appropriate '--plugin' option +// for the GCC compiler +CMAKE_C_COMPILER_RANLIB:FILEPATH=/usr/bin/gcc-ranlib-7 + +//Flags used by the C compiler during all build types. +CMAKE_C_FLAGS:STRING= + +//Flags used by the C compiler during DEBUG builds. +CMAKE_C_FLAGS_DEBUG:STRING=-g + +//Flags used by the C compiler during MINSIZEREL builds. +CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG + +//Flags used by the C compiler during RELEASE builds. +CMAKE_C_FLAGS_RELEASE:STRING=-O3 -DNDEBUG + +//Flags used by the C compiler during RELWITHDEBINFO builds. +CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g -DNDEBUG + +//Flags used by the linker during all build types. +CMAKE_EXE_LINKER_FLAGS:STRING= + +//Flags used by the linker during DEBUG builds. +CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING= + +//Flags used by the linker during MINSIZEREL builds. +CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING= + +//Flags used by the linker during RELEASE builds. +CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING= + +//Flags used by the linker during RELWITHDEBINFO builds. +CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING= + +//Enable/Disable output of compile commands during generation. +CMAKE_EXPORT_COMPILE_COMMANDS:BOOL=OFF + +//Install path prefix, prepended onto install directories. +CMAKE_INSTALL_PREFIX:PATH=/usr/local + +//Path to a program. +CMAKE_LINKER:FILEPATH=/usr/bin/ld + +//Path to a program. +CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/make + +//Flags used by the linker during the creation of modules during +// all build types. +CMAKE_MODULE_LINKER_FLAGS:STRING= + +//Flags used by the linker during the creation of modules during +// DEBUG builds. +CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING= + +//Flags used by the linker during the creation of modules during +// MINSIZEREL builds. +CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING= + +//Flags used by the linker during the creation of modules during +// RELEASE builds. +CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING= + +//Flags used by the linker during the creation of modules during +// RELWITHDEBINFO builds. +CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING= + +//Path to a program. +CMAKE_NM:FILEPATH=/usr/bin/nm + +//Path to a program. +CMAKE_OBJCOPY:FILEPATH=/usr/bin/objcopy + +//Path to a program. +CMAKE_OBJDUMP:FILEPATH=/usr/bin/objdump + +//Value Computed by CMake +CMAKE_PROJECT_DESCRIPTION:STATIC= + +//Value Computed by CMake +CMAKE_PROJECT_HOMEPAGE_URL:STATIC= + +//Value Computed by CMake +CMAKE_PROJECT_NAME:STATIC=IMC + +//Path to a program. +CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib + +//Flags used by the linker during the creation of shared libraries +// during all build types. +CMAKE_SHARED_LINKER_FLAGS:STRING= + +//Flags used by the linker during the creation of shared libraries +// during DEBUG builds. +CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING= + +//Flags used by the linker during the creation of shared libraries +// during MINSIZEREL builds. +CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING= + +//Flags used by the linker during the creation of shared libraries +// during RELEASE builds. +CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING= + +//Flags used by the linker during the creation of shared libraries +// during RELWITHDEBINFO builds. +CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING= + +//If set, runtime paths are not added when installing shared libraries, +// but are added when building. +CMAKE_SKIP_INSTALL_RPATH:BOOL=NO + +//If set, runtime paths are not added when using shared libraries. +CMAKE_SKIP_RPATH:BOOL=NO + +//Flags used by the linker during the creation of static libraries +// during all build types. +CMAKE_STATIC_LINKER_FLAGS:STRING= + +//Flags used by the linker during the creation of static libraries +// during DEBUG builds. +CMAKE_STATIC_LINKER_FLAGS_DEBUG:STRING= + +//Flags used by the linker during the creation of static libraries +// during MINSIZEREL builds. +CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL:STRING= + +//Flags used by the linker during the creation of static libraries +// during RELEASE builds. +CMAKE_STATIC_LINKER_FLAGS_RELEASE:STRING= + +//Flags used by the linker during the creation of static libraries +// during RELWITHDEBINFO builds. +CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO:STRING= + +//Path to a program. +CMAKE_STRIP:FILEPATH=/usr/bin/strip + +//If this value is on, makefiles will be generated without the +// .SILENT directive, and all commands will be echoed to the console +// during the make. This is useful for debugging only. With Visual +// Studio IDE projects all commands are done without /nologo. +CMAKE_VERBOSE_MAKEFILE:BOOL=FALSE + +//The directory containing a CMake configuration file for Caffe2. +Caffe2_DIR:PATH=/home/fuda/Projects/pytorch/libtorch/share/cmake/Caffe2 + +//Value Computed by CMake +IMC_BINARY_DIR:STATIC=/home/fuda/Projects/revolve/cpprevolve/revolve/gazebo/IMC/cmake-build-debug + +//Value Computed by CMake +IMC_SOURCE_DIR:STATIC=/home/fuda/Projects/revolve/cpprevolve/revolve/gazebo/IMC + +//The directory containing a CMake configuration file for MKLDNN. +MKLDNN_DIR:PATH=MKLDNN_DIR-NOTFOUND + +//The directory containing a CMake configuration file for MKL. +MKL_DIR:PATH=MKL_DIR-NOTFOUND + +//Path to a program. +ProcessorCount_cmd_nproc:FILEPATH=/usr/bin/nproc + +//Path to a program. +ProcessorCount_cmd_sysctl:FILEPATH=/sbin/sysctl + +//Path to a library. +TORCH_LIBRARY:FILEPATH=/home/fuda/Projects/pytorch/libtorch/lib/libtorch.so + +//The directory containing a CMake configuration file for Torch. +Torch_DIR:PATH=Torch_DIR-NOTFOUND + + +######################## +# INTERNAL cache entries +######################## + +//ADVANCED property for variable: CMAKE_AR +CMAKE_AR-ADVANCED:INTERNAL=1 +//This is the directory where this CMakeCache.txt was created +CMAKE_CACHEFILE_DIR:INTERNAL=/home/fuda/Projects/revolve/cpprevolve/revolve/gazebo/IMC/cmake-build-debug +//Major version of cmake used to create the current loaded cache +CMAKE_CACHE_MAJOR_VERSION:INTERNAL=3 +//Minor version of cmake used to create the current loaded cache +CMAKE_CACHE_MINOR_VERSION:INTERNAL=15 +//Patch version of cmake used to create the current loaded cache +CMAKE_CACHE_PATCH_VERSION:INTERNAL=3 +//ADVANCED property for variable: CMAKE_COLOR_MAKEFILE +CMAKE_COLOR_MAKEFILE-ADVANCED:INTERNAL=1 +//Path to CMake executable. +CMAKE_COMMAND:INTERNAL=/snap/clion/103/bin/cmake/linux/bin/cmake +//Path to cpack program executable. +CMAKE_CPACK_COMMAND:INTERNAL=/snap/clion/103/bin/cmake/linux/bin/cpack +//Path to ctest program executable. +CMAKE_CTEST_COMMAND:INTERNAL=/snap/clion/103/bin/cmake/linux/bin/ctest +//ADVANCED property for variable: CMAKE_CXX_COMPILER +CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_COMPILER_AR +CMAKE_CXX_COMPILER_AR-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_COMPILER_RANLIB +CMAKE_CXX_COMPILER_RANLIB-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_FLAGS +CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_FLAGS_DEBUG +CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_FLAGS_MINSIZEREL +CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELEASE +CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO +CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_COMPILER +CMAKE_C_COMPILER-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_COMPILER_AR +CMAKE_C_COMPILER_AR-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_COMPILER_RANLIB +CMAKE_C_COMPILER_RANLIB-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_FLAGS +CMAKE_C_FLAGS-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_FLAGS_DEBUG +CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_FLAGS_MINSIZEREL +CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_FLAGS_RELEASE +CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_C_FLAGS_RELWITHDEBINFO +CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 +//Executable file format +CMAKE_EXECUTABLE_FORMAT:INTERNAL=ELF +//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS +CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG +CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL +CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE +CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO +CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_EXPORT_COMPILE_COMMANDS +CMAKE_EXPORT_COMPILE_COMMANDS-ADVANCED:INTERNAL=1 +//Name of external makefile project generator. +CMAKE_EXTRA_GENERATOR:INTERNAL=CodeBlocks +//CXX compiler system defined macros +CMAKE_EXTRA_GENERATOR_CXX_SYSTEM_DEFINED_MACROS:INTERNAL=__STDC__;1;__STDC_VERSION__;201112L;__STDC_UTF_16__;1;__STDC_UTF_32__;1;__STDC_HOSTED__;1;__GNUC__;7;__GNUC_MINOR__;5;__GNUC_PATCHLEVEL__;0;__VERSION__;"7.5.0";__ATOMIC_RELAXED;0;__ATOMIC_SEQ_CST;5;__ATOMIC_ACQUIRE;2;__ATOMIC_RELEASE;3;__ATOMIC_ACQ_REL;4;__ATOMIC_CONSUME;1;__pic__;2;__PIC__;2;__pie__;2;__PIE__;2;__FINITE_MATH_ONLY__;0;_LP64;1;__LP64__;1;__SIZEOF_INT__;4;__SIZEOF_LONG__;8;__SIZEOF_LONG_LONG__;8;__SIZEOF_SHORT__;2;__SIZEOF_FLOAT__;4;__SIZEOF_DOUBLE__;8;__SIZEOF_LONG_DOUBLE__;16;__SIZEOF_SIZE_T__;8;__CHAR_BIT__;8;__BIGGEST_ALIGNMENT__;16;__ORDER_LITTLE_ENDIAN__;1234;__ORDER_BIG_ENDIAN__;4321;__ORDER_PDP_ENDIAN__;3412;__BYTE_ORDER__;__ORDER_LITTLE_ENDIAN__;__FLOAT_WORD_ORDER__;__ORDER_LITTLE_ENDIAN__;__SIZEOF_POINTER__;8;__SIZE_TYPE__;long unsigned int;__PTRDIFF_TYPE__;long int;__WCHAR_TYPE__;int;__WINT_TYPE__;unsigned int;__INTMAX_TYPE__;long int;__UINTMAX_TYPE__;long unsigned int;__CHAR16_TYPE__;short unsigned int;__CHAR32_TYPE__;unsigned int;__SIG_ATOMIC_TYPE__;int;__INT8_TYPE__;signed char;__INT16_TYPE__;short int;__INT32_TYPE__;int;__INT64_TYPE__;long int;__UINT8_TYPE__;unsigned char;__UINT16_TYPE__;short unsigned int;__UINT32_TYPE__;unsigned int;__UINT64_TYPE__;long unsigned int;__INT_LEAST8_TYPE__;signed char;__INT_LEAST16_TYPE__;short int;__INT_LEAST32_TYPE__;int;__INT_LEAST64_TYPE__;long int;__UINT_LEAST8_TYPE__;unsigned char;__UINT_LEAST16_TYPE__;short unsigned int;__UINT_LEAST32_TYPE__;unsigned int;__UINT_LEAST64_TYPE__;long unsigned int;__INT_FAST8_TYPE__;signed char;__INT_FAST16_TYPE__;long int;__INT_FAST32_TYPE__;long int;__INT_FAST64_TYPE__;long int;__UINT_FAST8_TYPE__;unsigned char;__UINT_FAST16_TYPE__;long unsigned int;__UINT_FAST32_TYPE__;long unsigned int;__UINT_FAST64_TYPE__;long unsigned int;__INTPTR_TYPE__;long int;__UINTPTR_TYPE__;long unsigned int;__has_include(STR);__has_include__(STR);__has_include_next(STR);__has_include_next__(STR);__GXX_ABI_VERSION;1011;__SCHAR_MAX__;0x7f;__SHRT_MAX__;0x7fff;__INT_MAX__;0x7fffffff;__LONG_MAX__;0x7fffffffffffffffL;__LONG_LONG_MAX__;0x7fffffffffffffffLL;__WCHAR_MAX__;0x7fffffff;__WCHAR_MIN__;(-__WCHAR_MAX__ - 1);__WINT_MAX__;0xffffffffU;__WINT_MIN__;0U;__PTRDIFF_MAX__;0x7fffffffffffffffL;__SIZE_MAX__;0xffffffffffffffffUL;__SCHAR_WIDTH__;8;__SHRT_WIDTH__;16;__INT_WIDTH__;32;__LONG_WIDTH__;64;__LONG_LONG_WIDTH__;64;__WCHAR_WIDTH__;32;__WINT_WIDTH__;32;__PTRDIFF_WIDTH__;64;__SIZE_WIDTH__;64;__INTMAX_MAX__;0x7fffffffffffffffL;__INTMAX_C(c);c ## L;__UINTMAX_MAX__;0xffffffffffffffffUL;__UINTMAX_C(c);c ## UL;__INTMAX_WIDTH__;64;__SIG_ATOMIC_MAX__;0x7fffffff;__SIG_ATOMIC_MIN__;(-__SIG_ATOMIC_MAX__ - 1);__SIG_ATOMIC_WIDTH__;32;__INT8_MAX__;0x7f;__INT16_MAX__;0x7fff;__INT32_MAX__;0x7fffffff;__INT64_MAX__;0x7fffffffffffffffL;__UINT8_MAX__;0xff;__UINT16_MAX__;0xffff;__UINT32_MAX__;0xffffffffU;__UINT64_MAX__;0xffffffffffffffffUL;__INT_LEAST8_MAX__;0x7f;__INT8_C(c);c;__INT_LEAST8_WIDTH__;8;__INT_LEAST16_MAX__;0x7fff;__INT16_C(c);c;__INT_LEAST16_WIDTH__;16;__INT_LEAST32_MAX__;0x7fffffff;__INT32_C(c);c;__INT_LEAST32_WIDTH__;32;__INT_LEAST64_MAX__;0x7fffffffffffffffL;__INT64_C(c);c ## L;__INT_LEAST64_WIDTH__;64;__UINT_LEAST8_MAX__;0xff;__UINT8_C(c);c;__UINT_LEAST16_MAX__;0xffff;__UINT16_C(c);c;__UINT_LEAST32_MAX__;0xffffffffU;__UINT32_C(c);c ## U;__UINT_LEAST64_MAX__;0xffffffffffffffffUL;__UINT64_C(c);c ## UL;__INT_FAST8_MAX__;0x7f;__INT_FAST8_WIDTH__;8;__INT_FAST16_MAX__;0x7fffffffffffffffL;__INT_FAST16_WIDTH__;64;__INT_FAST32_MAX__;0x7fffffffffffffffL;__INT_FAST32_WIDTH__;64;__INT_FAST64_MAX__;0x7fffffffffffffffL;__INT_FAST64_WIDTH__;64;__UINT_FAST8_MAX__;0xff;__UINT_FAST16_MAX__;0xffffffffffffffffUL;__UINT_FAST32_MAX__;0xffffffffffffffffUL;__UINT_FAST64_MAX__;0xffffffffffffffffUL;__INTPTR_MAX__;0x7fffffffffffffffL;__INTPTR_WIDTH__;64;__UINTPTR_MAX__;0xffffffffffffffffUL;__GCC_IEC_559;2;__GCC_IEC_559_COMPLEX;2;__FLT_EVAL_METHOD__;0;__FLT_EVAL_METHOD_TS_18661_3__;0;__DEC_EVAL_METHOD__;2;__FLT_RADIX__;2;__FLT_MANT_DIG__;24;__FLT_DIG__;6;__FLT_MIN_EXP__;(-125);__FLT_MIN_10_EXP__;(-37);__FLT_MAX_EXP__;128;__FLT_MAX_10_EXP__;38;__FLT_DECIMAL_DIG__;9;__FLT_MAX__;3.40282346638528859811704183484516925e+38F;__FLT_MIN__;1.17549435082228750796873653722224568e-38F;__FLT_EPSILON__;1.19209289550781250000000000000000000e-7F;__FLT_DENORM_MIN__;1.40129846432481707092372958328991613e-45F;__FLT_HAS_DENORM__;1;__FLT_HAS_INFINITY__;1;__FLT_HAS_QUIET_NAN__;1;__DBL_MANT_DIG__;53;__DBL_DIG__;15;__DBL_MIN_EXP__;(-1021);__DBL_MIN_10_EXP__;(-307);__DBL_MAX_EXP__;1024;__DBL_MAX_10_EXP__;308;__DBL_DECIMAL_DIG__;17;__DBL_MAX__;((double)1.79769313486231570814527423731704357e+308L);__DBL_MIN__;((double)2.22507385850720138309023271733240406e-308L);__DBL_EPSILON__;((double)2.22044604925031308084726333618164062e-16L);__DBL_DENORM_MIN__;((double)4.94065645841246544176568792868221372e-324L);__DBL_HAS_DENORM__;1;__DBL_HAS_INFINITY__;1;__DBL_HAS_QUIET_NAN__;1;__LDBL_MANT_DIG__;64;__LDBL_DIG__;18;__LDBL_MIN_EXP__;(-16381);__LDBL_MIN_10_EXP__;(-4931);__LDBL_MAX_EXP__;16384;__LDBL_MAX_10_EXP__;4932;__DECIMAL_DIG__;21;__LDBL_DECIMAL_DIG__;21;__LDBL_MAX__;1.18973149535723176502126385303097021e+4932L;__LDBL_MIN__;3.36210314311209350626267781732175260e-4932L;__LDBL_EPSILON__;1.08420217248550443400745280086994171e-19L;__LDBL_DENORM_MIN__;3.64519953188247460252840593361941982e-4951L;__LDBL_HAS_DENORM__;1;__LDBL_HAS_INFINITY__;1;__LDBL_HAS_QUIET_NAN__;1;__FLT32_MANT_DIG__;24;__FLT32_DIG__;6;__FLT32_MIN_EXP__;(-125);__FLT32_MIN_10_EXP__;(-37);__FLT32_MAX_EXP__;128;__FLT32_MAX_10_EXP__;38;__FLT32_DECIMAL_DIG__;9;__FLT32_MAX__;3.40282346638528859811704183484516925e+38F32;__FLT32_MIN__;1.17549435082228750796873653722224568e-38F32;__FLT32_EPSILON__;1.19209289550781250000000000000000000e-7F32;__FLT32_DENORM_MIN__;1.40129846432481707092372958328991613e-45F32;__FLT32_HAS_DENORM__;1;__FLT32_HAS_INFINITY__;1;__FLT32_HAS_QUIET_NAN__;1;__FLT64_MANT_DIG__;53;__FLT64_DIG__;15;__FLT64_MIN_EXP__;(-1021);__FLT64_MIN_10_EXP__;(-307);__FLT64_MAX_EXP__;1024;__FLT64_MAX_10_EXP__;308;__FLT64_DECIMAL_DIG__;17;__FLT64_MAX__;1.79769313486231570814527423731704357e+308F64;__FLT64_MIN__;2.22507385850720138309023271733240406e-308F64;__FLT64_EPSILON__;2.22044604925031308084726333618164062e-16F64;__FLT64_DENORM_MIN__;4.94065645841246544176568792868221372e-324F64;__FLT64_HAS_DENORM__;1;__FLT64_HAS_INFINITY__;1;__FLT64_HAS_QUIET_NAN__;1;__FLT128_MANT_DIG__;113;__FLT128_DIG__;33;__FLT128_MIN_EXP__;(-16381);__FLT128_MIN_10_EXP__;(-4931);__FLT128_MAX_EXP__;16384;__FLT128_MAX_10_EXP__;4932;__FLT128_DECIMAL_DIG__;36;__FLT128_MAX__;1.18973149535723176508575932662800702e+4932F128;__FLT128_MIN__;3.36210314311209350626267781732175260e-4932F128;__FLT128_EPSILON__;1.92592994438723585305597794258492732e-34F128;__FLT128_DENORM_MIN__;6.47517511943802511092443895822764655e-4966F128;__FLT128_HAS_DENORM__;1;__FLT128_HAS_INFINITY__;1;__FLT128_HAS_QUIET_NAN__;1;__FLT32X_MANT_DIG__;53;__FLT32X_DIG__;15;__FLT32X_MIN_EXP__;(-1021);__FLT32X_MIN_10_EXP__;(-307);__FLT32X_MAX_EXP__;1024;__FLT32X_MAX_10_EXP__;308;__FLT32X_DECIMAL_DIG__;17;__FLT32X_MAX__;1.79769313486231570814527423731704357e+308F32x;__FLT32X_MIN__;2.22507385850720138309023271733240406e-308F32x;__FLT32X_EPSILON__;2.22044604925031308084726333618164062e-16F32x;__FLT32X_DENORM_MIN__;4.94065645841246544176568792868221372e-324F32x;__FLT32X_HAS_DENORM__;1;__FLT32X_HAS_INFINITY__;1;__FLT32X_HAS_QUIET_NAN__;1;__FLT64X_MANT_DIG__;64;__FLT64X_DIG__;18;__FLT64X_MIN_EXP__;(-16381);__FLT64X_MIN_10_EXP__;(-4931);__FLT64X_MAX_EXP__;16384;__FLT64X_MAX_10_EXP__;4932;__FLT64X_DECIMAL_DIG__;21;__FLT64X_MAX__;1.18973149535723176502126385303097021e+4932F64x;__FLT64X_MIN__;3.36210314311209350626267781732175260e-4932F64x;__FLT64X_EPSILON__;1.08420217248550443400745280086994171e-19F64x;__FLT64X_DENORM_MIN__;3.64519953188247460252840593361941982e-4951F64x;__FLT64X_HAS_DENORM__;1;__FLT64X_HAS_INFINITY__;1;__FLT64X_HAS_QUIET_NAN__;1;__DEC32_MANT_DIG__;7;__DEC32_MIN_EXP__;(-94);__DEC32_MAX_EXP__;97;__DEC32_MIN__;1E-95DF;__DEC32_MAX__;9.999999E96DF;__DEC32_EPSILON__;1E-6DF;__DEC32_SUBNORMAL_MIN__;0.000001E-95DF;__DEC64_MANT_DIG__;16;__DEC64_MIN_EXP__;(-382);__DEC64_MAX_EXP__;385;__DEC64_MIN__;1E-383DD;__DEC64_MAX__;9.999999999999999E384DD;__DEC64_EPSILON__;1E-15DD;__DEC64_SUBNORMAL_MIN__;0.000000000000001E-383DD;__DEC128_MANT_DIG__;34;__DEC128_MIN_EXP__;(-6142);__DEC128_MAX_EXP__;6145;__DEC128_MIN__;1E-6143DL;__DEC128_MAX__;9.999999999999999999999999999999999E6144DL;__DEC128_EPSILON__;1E-33DL;__DEC128_SUBNORMAL_MIN__;0.000000000000000000000000000000001E-6143DL;__REGISTER_PREFIX__; ;__USER_LABEL_PREFIX__; ;__GNUC_STDC_INLINE__;1;__NO_INLINE__;1;__GCC_HAVE_SYNC_COMPARE_AND_SWAP_1;1;__GCC_HAVE_SYNC_COMPARE_AND_SWAP_2;1;__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4;1;__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8;1;__GCC_ATOMIC_BOOL_LOCK_FREE;2;__GCC_ATOMIC_CHAR_LOCK_FREE;2;__GCC_ATOMIC_CHAR16_T_LOCK_FREE;2;__GCC_ATOMIC_CHAR32_T_LOCK_FREE;2;__GCC_ATOMIC_WCHAR_T_LOCK_FREE;2;__GCC_ATOMIC_SHORT_LOCK_FREE;2;__GCC_ATOMIC_INT_LOCK_FREE;2;__GCC_ATOMIC_LONG_LOCK_FREE;2;__GCC_ATOMIC_LLONG_LOCK_FREE;2;__GCC_ATOMIC_TEST_AND_SET_TRUEVAL;1;__GCC_ATOMIC_POINTER_LOCK_FREE;2;__GCC_HAVE_DWARF2_CFI_ASM;1;__PRAGMA_REDEFINE_EXTNAME;1;__SSP_STRONG__;3;__SIZEOF_INT128__;16;__SIZEOF_WCHAR_T__;4;__SIZEOF_WINT_T__;4;__SIZEOF_PTRDIFF_T__;8;__amd64;1;__amd64__;1;__x86_64;1;__x86_64__;1;__SIZEOF_FLOAT80__;16;__SIZEOF_FLOAT128__;16;__ATOMIC_HLE_ACQUIRE;65536;__ATOMIC_HLE_RELEASE;131072;__GCC_ASM_FLAG_OUTPUTS__;1;__k8;1;__k8__;1;__code_model_small__;1;__MMX__;1;__SSE__;1;__SSE2__;1;__FXSR__;1;__SSE_MATH__;1;__SSE2_MATH__;1;__SEG_FS;1;__SEG_GS;1;__gnu_linux__;1;__linux;1;__linux__;1;linux;1;__unix;1;__unix__;1;unix;1;__ELF__;1;__DECIMAL_BID_FORMAT__;1;_STDC_PREDEF_H;1;__STDC_IEC_559__;1;__STDC_IEC_559_COMPLEX__;1;__STDC_ISO_10646__;201706L;__STDC_NO_THREADS__;1;__STDC__;1;__cplusplus;201402L;__STDC_UTF_16__;1;__STDC_UTF_32__;1;__STDC_HOSTED__;1;__GNUC__;7;__GNUC_MINOR__;5;__GNUC_PATCHLEVEL__;0;__VERSION__;"7.5.0";__ATOMIC_RELAXED;0;__ATOMIC_SEQ_CST;5;__ATOMIC_ACQUIRE;2;__ATOMIC_RELEASE;3;__ATOMIC_ACQ_REL;4;__ATOMIC_CONSUME;1;__pic__;2;__PIC__;2;__pie__;2;__PIE__;2;__FINITE_MATH_ONLY__;0;_LP64;1;__LP64__;1;__SIZEOF_INT__;4;__SIZEOF_LONG__;8;__SIZEOF_LONG_LONG__;8;__SIZEOF_SHORT__;2;__SIZEOF_FLOAT__;4;__SIZEOF_DOUBLE__;8;__SIZEOF_LONG_DOUBLE__;16;__SIZEOF_SIZE_T__;8;__CHAR_BIT__;8;__BIGGEST_ALIGNMENT__;16;__ORDER_LITTLE_ENDIAN__;1234;__ORDER_BIG_ENDIAN__;4321;__ORDER_PDP_ENDIAN__;3412;__BYTE_ORDER__;__ORDER_LITTLE_ENDIAN__;__FLOAT_WORD_ORDER__;__ORDER_LITTLE_ENDIAN__;__SIZEOF_POINTER__;8;__GNUG__;7;__SIZE_TYPE__;long unsigned int;__PTRDIFF_TYPE__;long int;__WCHAR_TYPE__;int;__WINT_TYPE__;unsigned int;__INTMAX_TYPE__;long int;__UINTMAX_TYPE__;long unsigned int;__CHAR16_TYPE__;short unsigned int;__CHAR32_TYPE__;unsigned int;__SIG_ATOMIC_TYPE__;int;__INT8_TYPE__;signed char;__INT16_TYPE__;short int;__INT32_TYPE__;int;__INT64_TYPE__;long int;__UINT8_TYPE__;unsigned char;__UINT16_TYPE__;short unsigned int;__UINT32_TYPE__;unsigned int;__UINT64_TYPE__;long unsigned int;__INT_LEAST8_TYPE__;signed char;__INT_LEAST16_TYPE__;short int;__INT_LEAST32_TYPE__;int;__INT_LEAST64_TYPE__;long int;__UINT_LEAST8_TYPE__;unsigned char;__UINT_LEAST16_TYPE__;short unsigned int;__UINT_LEAST32_TYPE__;unsigned int;__UINT_LEAST64_TYPE__;long unsigned int;__INT_FAST8_TYPE__;signed char;__INT_FAST16_TYPE__;long int;__INT_FAST32_TYPE__;long int;__INT_FAST64_TYPE__;long int;__UINT_FAST8_TYPE__;unsigned char;__UINT_FAST16_TYPE__;long unsigned int;__UINT_FAST32_TYPE__;long unsigned int;__UINT_FAST64_TYPE__;long unsigned int;__INTPTR_TYPE__;long int;__UINTPTR_TYPE__;long unsigned int;__has_include(STR);__has_include__(STR);__has_include_next(STR);__has_include_next__(STR);__GXX_WEAK__;1;__DEPRECATED;1;__GXX_RTTI;1;__cpp_rtti;199711;__GXX_EXPERIMENTAL_CXX0X__;1;__cpp_binary_literals;201304;__cpp_hex_float;201603;__cpp_runtime_arrays;198712;__cpp_unicode_characters;200704;__cpp_raw_strings;200710;__cpp_unicode_literals;200710;__cpp_user_defined_literals;200809;__cpp_lambdas;200907;__cpp_range_based_for;200907;__cpp_static_assert;200410;__cpp_decltype;200707;__cpp_attributes;200809;__cpp_rvalue_reference;200610;__cpp_rvalue_references;200610;__cpp_variadic_templates;200704;__cpp_initializer_lists;200806;__cpp_delegating_constructors;200604;__cpp_nsdmi;200809;__cpp_inheriting_constructors;201511;__cpp_ref_qualifiers;200710;__cpp_alias_templates;200704;__cpp_return_type_deduction;201304;__cpp_init_captures;201304;__cpp_generic_lambdas;201304;__cpp_constexpr;201304;__cpp_decltype_auto;201304;__cpp_aggregate_nsdmi;201304;__cpp_variable_templates;201304;__cpp_digit_separators;201309;__cpp_sized_deallocation;201309;__cpp_threadsafe_static_init;200806;__EXCEPTIONS;1;__cpp_exceptions;199711;__GXX_ABI_VERSION;1011;__SCHAR_MAX__;0x7f;__SHRT_MAX__;0x7fff;__INT_MAX__;0x7fffffff;__LONG_MAX__;0x7fffffffffffffffL;__LONG_LONG_MAX__;0x7fffffffffffffffLL;__WCHAR_MAX__;0x7fffffff;__WCHAR_MIN__;(-__WCHAR_MAX__ - 1);__WINT_MAX__;0xffffffffU;__WINT_MIN__;0U;__PTRDIFF_MAX__;0x7fffffffffffffffL;__SIZE_MAX__;0xffffffffffffffffUL;__SCHAR_WIDTH__;8;__SHRT_WIDTH__;16;__INT_WIDTH__;32;__LONG_WIDTH__;64;__LONG_LONG_WIDTH__;64;__WCHAR_WIDTH__;32;__WINT_WIDTH__;32;__PTRDIFF_WIDTH__;64;__SIZE_WIDTH__;64;__GLIBCXX_TYPE_INT_N_0;__int128;__GLIBCXX_BITSIZE_INT_N_0;128;__INTMAX_MAX__;0x7fffffffffffffffL;__INTMAX_C(c);c ## L;__UINTMAX_MAX__;0xffffffffffffffffUL;__UINTMAX_C(c);c ## UL;__INTMAX_WIDTH__;64;__SIG_ATOMIC_MAX__;0x7fffffff;__SIG_ATOMIC_MIN__;(-__SIG_ATOMIC_MAX__ - 1);__SIG_ATOMIC_WIDTH__;32;__INT8_MAX__;0x7f;__INT16_MAX__;0x7fff;__INT32_MAX__;0x7fffffff;__INT64_MAX__;0x7fffffffffffffffL;__UINT8_MAX__;0xff;__UINT16_MAX__;0xffff;__UINT32_MAX__;0xffffffffU;__UINT64_MAX__;0xffffffffffffffffUL;__INT_LEAST8_MAX__;0x7f;__INT8_C(c);c;__INT_LEAST8_WIDTH__;8;__INT_LEAST16_MAX__;0x7fff;__INT16_C(c);c;__INT_LEAST16_WIDTH__;16;__INT_LEAST32_MAX__;0x7fffffff;__INT32_C(c);c;__INT_LEAST32_WIDTH__;32;__INT_LEAST64_MAX__;0x7fffffffffffffffL;__INT64_C(c);c ## L;__INT_LEAST64_WIDTH__;64;__UINT_LEAST8_MAX__;0xff;__UINT8_C(c);c;__UINT_LEAST16_MAX__;0xffff;__UINT16_C(c);c;__UINT_LEAST32_MAX__;0xffffffffU;__UINT32_C(c);c ## U;__UINT_LEAST64_MAX__;0xffffffffffffffffUL;__UINT64_C(c);c ## UL;__INT_FAST8_MAX__;0x7f;__INT_FAST8_WIDTH__;8;__INT_FAST16_MAX__;0x7fffffffffffffffL;__INT_FAST16_WIDTH__;64;__INT_FAST32_MAX__;0x7fffffffffffffffL;__INT_FAST32_WIDTH__;64;__INT_FAST64_MAX__;0x7fffffffffffffffL;__INT_FAST64_WIDTH__;64;__UINT_FAST8_MAX__;0xff;__UINT_FAST16_MAX__;0xffffffffffffffffUL;__UINT_FAST32_MAX__;0xffffffffffffffffUL;__UINT_FAST64_MAX__;0xffffffffffffffffUL;__INTPTR_MAX__;0x7fffffffffffffffL;__INTPTR_WIDTH__;64;__UINTPTR_MAX__;0xffffffffffffffffUL;__GCC_IEC_559;2;__GCC_IEC_559_COMPLEX;2;__FLT_EVAL_METHOD__;0;__FLT_EVAL_METHOD_TS_18661_3__;0;__DEC_EVAL_METHOD__;2;__FLT_RADIX__;2;__FLT_MANT_DIG__;24;__FLT_DIG__;6;__FLT_MIN_EXP__;(-125);__FLT_MIN_10_EXP__;(-37);__FLT_MAX_EXP__;128;__FLT_MAX_10_EXP__;38;__FLT_DECIMAL_DIG__;9;__FLT_MAX__;3.40282346638528859811704183484516925e+38F;__FLT_MIN__;1.17549435082228750796873653722224568e-38F;__FLT_EPSILON__;1.19209289550781250000000000000000000e-7F;__FLT_DENORM_MIN__;1.40129846432481707092372958328991613e-45F;__FLT_HAS_DENORM__;1;__FLT_HAS_INFINITY__;1;__FLT_HAS_QUIET_NAN__;1;__DBL_MANT_DIG__;53;__DBL_DIG__;15;__DBL_MIN_EXP__;(-1021);__DBL_MIN_10_EXP__;(-307);__DBL_MAX_EXP__;1024;__DBL_MAX_10_EXP__;308;__DBL_DECIMAL_DIG__;17;__DBL_MAX__;double(1.79769313486231570814527423731704357e+308L);__DBL_MIN__;double(2.22507385850720138309023271733240406e-308L);__DBL_EPSILON__;double(2.22044604925031308084726333618164062e-16L);__DBL_DENORM_MIN__;double(4.94065645841246544176568792868221372e-324L);__DBL_HAS_DENORM__;1;__DBL_HAS_INFINITY__;1;__DBL_HAS_QUIET_NAN__;1;__LDBL_MANT_DIG__;64;__LDBL_DIG__;18;__LDBL_MIN_EXP__;(-16381);__LDBL_MIN_10_EXP__;(-4931);__LDBL_MAX_EXP__;16384;__LDBL_MAX_10_EXP__;4932;__DECIMAL_DIG__;21;__LDBL_DECIMAL_DIG__;21;__LDBL_MAX__;1.18973149535723176502126385303097021e+4932L;__LDBL_MIN__;3.36210314311209350626267781732175260e-4932L;__LDBL_EPSILON__;1.08420217248550443400745280086994171e-19L;__LDBL_DENORM_MIN__;3.64519953188247460252840593361941982e-4951L;__LDBL_HAS_DENORM__;1;__LDBL_HAS_INFINITY__;1;__LDBL_HAS_QUIET_NAN__;1;__FLT32_MANT_DIG__;24;__FLT32_DIG__;6;__FLT32_MIN_EXP__;(-125);__FLT32_MIN_10_EXP__;(-37);__FLT32_MAX_EXP__;128;__FLT32_MAX_10_EXP__;38;__FLT32_DECIMAL_DIG__;9;__FLT32_MAX__;3.40282346638528859811704183484516925e+38F32;__FLT32_MIN__;1.17549435082228750796873653722224568e-38F32;__FLT32_EPSILON__;1.19209289550781250000000000000000000e-7F32;__FLT32_DENORM_MIN__;1.40129846432481707092372958328991613e-45F32;__FLT32_HAS_DENORM__;1;__FLT32_HAS_INFINITY__;1;__FLT32_HAS_QUIET_NAN__;1;__FLT64_MANT_DIG__;53;__FLT64_DIG__;15;__FLT64_MIN_EXP__;(-1021);__FLT64_MIN_10_EXP__;(-307);__FLT64_MAX_EXP__;1024;__FLT64_MAX_10_EXP__;308;__FLT64_DECIMAL_DIG__;17;__FLT64_MAX__;1.79769313486231570814527423731704357e+308F64;__FLT64_MIN__;2.22507385850720138309023271733240406e-308F64;__FLT64_EPSILON__;2.22044604925031308084726333618164062e-16F64;__FLT64_DENORM_MIN__;4.94065645841246544176568792868221372e-324F64;__FLT64_HAS_DENORM__;1;__FLT64_HAS_INFINITY__;1;__FLT64_HAS_QUIET_NAN__;1;__FLT128_MANT_DIG__;113;__FLT128_DIG__;33;__FLT128_MIN_EXP__;(-16381);__FLT128_MIN_10_EXP__;(-4931);__FLT128_MAX_EXP__;16384;__FLT128_MAX_10_EXP__;4932;__FLT128_DECIMAL_DIG__;36;__FLT128_MAX__;1.18973149535723176508575932662800702e+4932F128;__FLT128_MIN__;3.36210314311209350626267781732175260e-4932F128;__FLT128_EPSILON__;1.92592994438723585305597794258492732e-34F128;__FLT128_DENORM_MIN__;6.47517511943802511092443895822764655e-4966F128;__FLT128_HAS_DENORM__;1;__FLT128_HAS_INFINITY__;1;__FLT128_HAS_QUIET_NAN__;1;__FLT32X_MANT_DIG__;53;__FLT32X_DIG__;15;__FLT32X_MIN_EXP__;(-1021);__FLT32X_MIN_10_EXP__;(-307);__FLT32X_MAX_EXP__;1024;__FLT32X_MAX_10_EXP__;308;__FLT32X_DECIMAL_DIG__;17;__FLT32X_MAX__;1.79769313486231570814527423731704357e+308F32x;__FLT32X_MIN__;2.22507385850720138309023271733240406e-308F32x;__FLT32X_EPSILON__;2.22044604925031308084726333618164062e-16F32x;__FLT32X_DENORM_MIN__;4.94065645841246544176568792868221372e-324F32x;__FLT32X_HAS_DENORM__;1;__FLT32X_HAS_INFINITY__;1;__FLT32X_HAS_QUIET_NAN__;1;__FLT64X_MANT_DIG__;64;__FLT64X_DIG__;18;__FLT64X_MIN_EXP__;(-16381);__FLT64X_MIN_10_EXP__;(-4931);__FLT64X_MAX_EXP__;16384;__FLT64X_MAX_10_EXP__;4932;__FLT64X_DECIMAL_DIG__;21;__FLT64X_MAX__;1.18973149535723176502126385303097021e+4932F64x;__FLT64X_MIN__;3.36210314311209350626267781732175260e-4932F64x;__FLT64X_EPSILON__;1.08420217248550443400745280086994171e-19F64x;__FLT64X_DENORM_MIN__;3.64519953188247460252840593361941982e-4951F64x;__FLT64X_HAS_DENORM__;1;__FLT64X_HAS_INFINITY__;1;__FLT64X_HAS_QUIET_NAN__;1;__DEC32_MANT_DIG__;7;__DEC32_MIN_EXP__;(-94);__DEC32_MAX_EXP__;97;__DEC32_MIN__;1E-95DF;__DEC32_MAX__;9.999999E96DF;__DEC32_EPSILON__;1E-6DF;__DEC32_SUBNORMAL_MIN__;0.000001E-95DF;__DEC64_MANT_DIG__;16;__DEC64_MIN_EXP__;(-382);__DEC64_MAX_EXP__;385;__DEC64_MIN__;1E-383DD;__DEC64_MAX__;9.999999999999999E384DD;__DEC64_EPSILON__;1E-15DD;__DEC64_SUBNORMAL_MIN__;0.000000000000001E-383DD;__DEC128_MANT_DIG__;34;__DEC128_MIN_EXP__;(-6142);__DEC128_MAX_EXP__;6145;__DEC128_MIN__;1E-6143DL;__DEC128_MAX__;9.999999999999999999999999999999999E6144DL;__DEC128_EPSILON__;1E-33DL;__DEC128_SUBNORMAL_MIN__;0.000000000000000000000000000000001E-6143DL;__REGISTER_PREFIX__; ;__USER_LABEL_PREFIX__; ;__GNUC_STDC_INLINE__;1;__NO_INLINE__;1;__GCC_HAVE_SYNC_COMPARE_AND_SWAP_1;1;__GCC_HAVE_SYNC_COMPARE_AND_SWAP_2;1;__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4;1;__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8;1;__GCC_ATOMIC_BOOL_LOCK_FREE;2;__GCC_ATOMIC_CHAR_LOCK_FREE;2;__GCC_ATOMIC_CHAR16_T_LOCK_FREE;2;__GCC_ATOMIC_CHAR32_T_LOCK_FREE;2;__GCC_ATOMIC_WCHAR_T_LOCK_FREE;2;__GCC_ATOMIC_SHORT_LOCK_FREE;2;__GCC_ATOMIC_INT_LOCK_FREE;2;__GCC_ATOMIC_LONG_LOCK_FREE;2;__GCC_ATOMIC_LLONG_LOCK_FREE;2;__GCC_ATOMIC_TEST_AND_SET_TRUEVAL;1;__GCC_ATOMIC_POINTER_LOCK_FREE;2;__GCC_HAVE_DWARF2_CFI_ASM;1;__PRAGMA_REDEFINE_EXTNAME;1;__SSP_STRONG__;3;__SIZEOF_INT128__;16;__SIZEOF_WCHAR_T__;4;__SIZEOF_WINT_T__;4;__SIZEOF_PTRDIFF_T__;8;__amd64;1;__amd64__;1;__x86_64;1;__x86_64__;1;__SIZEOF_FLOAT80__;16;__SIZEOF_FLOAT128__;16;__ATOMIC_HLE_ACQUIRE;65536;__ATOMIC_HLE_RELEASE;131072;__GCC_ASM_FLAG_OUTPUTS__;1;__k8;1;__k8__;1;__code_model_small__;1;__MMX__;1;__SSE__;1;__SSE2__;1;__FXSR__;1;__SSE_MATH__;1;__SSE2_MATH__;1;__SEG_FS;1;__SEG_GS;1;__gnu_linux__;1;__linux;1;__linux__;1;linux;1;__unix;1;__unix__;1;unix;1;__ELF__;1;__DECIMAL_BID_FORMAT__;1;_GNU_SOURCE;1;_STDC_PREDEF_H;1;__STDC_IEC_559__;1;__STDC_IEC_559_COMPLEX__;1;__STDC_ISO_10646__;201706L;__STDC_NO_THREADS__;1 +//CXX compiler system include directories +CMAKE_EXTRA_GENERATOR_CXX_SYSTEM_INCLUDE_DIRS:INTERNAL=/usr/include/c++/7;/usr/include/x86_64-linux-gnu/c++/7;/usr/include/c++/7/backward;/usr/lib/gcc/x86_64-linux-gnu/7/include;/usr/local/include;/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed;/usr/include/x86_64-linux-gnu;/usr/include +//C compiler system defined macros +CMAKE_EXTRA_GENERATOR_C_SYSTEM_DEFINED_MACROS:INTERNAL=__STDC__;1;__STDC_VERSION__;201112L;__STDC_UTF_16__;1;__STDC_UTF_32__;1;__STDC_HOSTED__;1;__GNUC__;7;__GNUC_MINOR__;5;__GNUC_PATCHLEVEL__;0;__VERSION__;"7.5.0";__ATOMIC_RELAXED;0;__ATOMIC_SEQ_CST;5;__ATOMIC_ACQUIRE;2;__ATOMIC_RELEASE;3;__ATOMIC_ACQ_REL;4;__ATOMIC_CONSUME;1;__pic__;2;__PIC__;2;__pie__;2;__PIE__;2;__FINITE_MATH_ONLY__;0;_LP64;1;__LP64__;1;__SIZEOF_INT__;4;__SIZEOF_LONG__;8;__SIZEOF_LONG_LONG__;8;__SIZEOF_SHORT__;2;__SIZEOF_FLOAT__;4;__SIZEOF_DOUBLE__;8;__SIZEOF_LONG_DOUBLE__;16;__SIZEOF_SIZE_T__;8;__CHAR_BIT__;8;__BIGGEST_ALIGNMENT__;16;__ORDER_LITTLE_ENDIAN__;1234;__ORDER_BIG_ENDIAN__;4321;__ORDER_PDP_ENDIAN__;3412;__BYTE_ORDER__;__ORDER_LITTLE_ENDIAN__;__FLOAT_WORD_ORDER__;__ORDER_LITTLE_ENDIAN__;__SIZEOF_POINTER__;8;__SIZE_TYPE__;long unsigned int;__PTRDIFF_TYPE__;long int;__WCHAR_TYPE__;int;__WINT_TYPE__;unsigned int;__INTMAX_TYPE__;long int;__UINTMAX_TYPE__;long unsigned int;__CHAR16_TYPE__;short unsigned int;__CHAR32_TYPE__;unsigned int;__SIG_ATOMIC_TYPE__;int;__INT8_TYPE__;signed char;__INT16_TYPE__;short int;__INT32_TYPE__;int;__INT64_TYPE__;long int;__UINT8_TYPE__;unsigned char;__UINT16_TYPE__;short unsigned int;__UINT32_TYPE__;unsigned int;__UINT64_TYPE__;long unsigned int;__INT_LEAST8_TYPE__;signed char;__INT_LEAST16_TYPE__;short int;__INT_LEAST32_TYPE__;int;__INT_LEAST64_TYPE__;long int;__UINT_LEAST8_TYPE__;unsigned char;__UINT_LEAST16_TYPE__;short unsigned int;__UINT_LEAST32_TYPE__;unsigned int;__UINT_LEAST64_TYPE__;long unsigned int;__INT_FAST8_TYPE__;signed char;__INT_FAST16_TYPE__;long int;__INT_FAST32_TYPE__;long int;__INT_FAST64_TYPE__;long int;__UINT_FAST8_TYPE__;unsigned char;__UINT_FAST16_TYPE__;long unsigned int;__UINT_FAST32_TYPE__;long unsigned int;__UINT_FAST64_TYPE__;long unsigned int;__INTPTR_TYPE__;long int;__UINTPTR_TYPE__;long unsigned int;__has_include(STR);__has_include__(STR);__has_include_next(STR);__has_include_next__(STR);__GXX_ABI_VERSION;1011;__SCHAR_MAX__;0x7f;__SHRT_MAX__;0x7fff;__INT_MAX__;0x7fffffff;__LONG_MAX__;0x7fffffffffffffffL;__LONG_LONG_MAX__;0x7fffffffffffffffLL;__WCHAR_MAX__;0x7fffffff;__WCHAR_MIN__;(-__WCHAR_MAX__ - 1);__WINT_MAX__;0xffffffffU;__WINT_MIN__;0U;__PTRDIFF_MAX__;0x7fffffffffffffffL;__SIZE_MAX__;0xffffffffffffffffUL;__SCHAR_WIDTH__;8;__SHRT_WIDTH__;16;__INT_WIDTH__;32;__LONG_WIDTH__;64;__LONG_LONG_WIDTH__;64;__WCHAR_WIDTH__;32;__WINT_WIDTH__;32;__PTRDIFF_WIDTH__;64;__SIZE_WIDTH__;64;__INTMAX_MAX__;0x7fffffffffffffffL;__INTMAX_C(c);c ## L;__UINTMAX_MAX__;0xffffffffffffffffUL;__UINTMAX_C(c);c ## UL;__INTMAX_WIDTH__;64;__SIG_ATOMIC_MAX__;0x7fffffff;__SIG_ATOMIC_MIN__;(-__SIG_ATOMIC_MAX__ - 1);__SIG_ATOMIC_WIDTH__;32;__INT8_MAX__;0x7f;__INT16_MAX__;0x7fff;__INT32_MAX__;0x7fffffff;__INT64_MAX__;0x7fffffffffffffffL;__UINT8_MAX__;0xff;__UINT16_MAX__;0xffff;__UINT32_MAX__;0xffffffffU;__UINT64_MAX__;0xffffffffffffffffUL;__INT_LEAST8_MAX__;0x7f;__INT8_C(c);c;__INT_LEAST8_WIDTH__;8;__INT_LEAST16_MAX__;0x7fff;__INT16_C(c);c;__INT_LEAST16_WIDTH__;16;__INT_LEAST32_MAX__;0x7fffffff;__INT32_C(c);c;__INT_LEAST32_WIDTH__;32;__INT_LEAST64_MAX__;0x7fffffffffffffffL;__INT64_C(c);c ## L;__INT_LEAST64_WIDTH__;64;__UINT_LEAST8_MAX__;0xff;__UINT8_C(c);c;__UINT_LEAST16_MAX__;0xffff;__UINT16_C(c);c;__UINT_LEAST32_MAX__;0xffffffffU;__UINT32_C(c);c ## U;__UINT_LEAST64_MAX__;0xffffffffffffffffUL;__UINT64_C(c);c ## UL;__INT_FAST8_MAX__;0x7f;__INT_FAST8_WIDTH__;8;__INT_FAST16_MAX__;0x7fffffffffffffffL;__INT_FAST16_WIDTH__;64;__INT_FAST32_MAX__;0x7fffffffffffffffL;__INT_FAST32_WIDTH__;64;__INT_FAST64_MAX__;0x7fffffffffffffffL;__INT_FAST64_WIDTH__;64;__UINT_FAST8_MAX__;0xff;__UINT_FAST16_MAX__;0xffffffffffffffffUL;__UINT_FAST32_MAX__;0xffffffffffffffffUL;__UINT_FAST64_MAX__;0xffffffffffffffffUL;__INTPTR_MAX__;0x7fffffffffffffffL;__INTPTR_WIDTH__;64;__UINTPTR_MAX__;0xffffffffffffffffUL;__GCC_IEC_559;2;__GCC_IEC_559_COMPLEX;2;__FLT_EVAL_METHOD__;0;__FLT_EVAL_METHOD_TS_18661_3__;0;__DEC_EVAL_METHOD__;2;__FLT_RADIX__;2;__FLT_MANT_DIG__;24;__FLT_DIG__;6;__FLT_MIN_EXP__;(-125);__FLT_MIN_10_EXP__;(-37);__FLT_MAX_EXP__;128;__FLT_MAX_10_EXP__;38;__FLT_DECIMAL_DIG__;9;__FLT_MAX__;3.40282346638528859811704183484516925e+38F;__FLT_MIN__;1.17549435082228750796873653722224568e-38F;__FLT_EPSILON__;1.19209289550781250000000000000000000e-7F;__FLT_DENORM_MIN__;1.40129846432481707092372958328991613e-45F;__FLT_HAS_DENORM__;1;__FLT_HAS_INFINITY__;1;__FLT_HAS_QUIET_NAN__;1;__DBL_MANT_DIG__;53;__DBL_DIG__;15;__DBL_MIN_EXP__;(-1021);__DBL_MIN_10_EXP__;(-307);__DBL_MAX_EXP__;1024;__DBL_MAX_10_EXP__;308;__DBL_DECIMAL_DIG__;17;__DBL_MAX__;((double)1.79769313486231570814527423731704357e+308L);__DBL_MIN__;((double)2.22507385850720138309023271733240406e-308L);__DBL_EPSILON__;((double)2.22044604925031308084726333618164062e-16L);__DBL_DENORM_MIN__;((double)4.94065645841246544176568792868221372e-324L);__DBL_HAS_DENORM__;1;__DBL_HAS_INFINITY__;1;__DBL_HAS_QUIET_NAN__;1;__LDBL_MANT_DIG__;64;__LDBL_DIG__;18;__LDBL_MIN_EXP__;(-16381);__LDBL_MIN_10_EXP__;(-4931);__LDBL_MAX_EXP__;16384;__LDBL_MAX_10_EXP__;4932;__DECIMAL_DIG__;21;__LDBL_DECIMAL_DIG__;21;__LDBL_MAX__;1.18973149535723176502126385303097021e+4932L;__LDBL_MIN__;3.36210314311209350626267781732175260e-4932L;__LDBL_EPSILON__;1.08420217248550443400745280086994171e-19L;__LDBL_DENORM_MIN__;3.64519953188247460252840593361941982e-4951L;__LDBL_HAS_DENORM__;1;__LDBL_HAS_INFINITY__;1;__LDBL_HAS_QUIET_NAN__;1;__FLT32_MANT_DIG__;24;__FLT32_DIG__;6;__FLT32_MIN_EXP__;(-125);__FLT32_MIN_10_EXP__;(-37);__FLT32_MAX_EXP__;128;__FLT32_MAX_10_EXP__;38;__FLT32_DECIMAL_DIG__;9;__FLT32_MAX__;3.40282346638528859811704183484516925e+38F32;__FLT32_MIN__;1.17549435082228750796873653722224568e-38F32;__FLT32_EPSILON__;1.19209289550781250000000000000000000e-7F32;__FLT32_DENORM_MIN__;1.40129846432481707092372958328991613e-45F32;__FLT32_HAS_DENORM__;1;__FLT32_HAS_INFINITY__;1;__FLT32_HAS_QUIET_NAN__;1;__FLT64_MANT_DIG__;53;__FLT64_DIG__;15;__FLT64_MIN_EXP__;(-1021);__FLT64_MIN_10_EXP__;(-307);__FLT64_MAX_EXP__;1024;__FLT64_MAX_10_EXP__;308;__FLT64_DECIMAL_DIG__;17;__FLT64_MAX__;1.79769313486231570814527423731704357e+308F64;__FLT64_MIN__;2.22507385850720138309023271733240406e-308F64;__FLT64_EPSILON__;2.22044604925031308084726333618164062e-16F64;__FLT64_DENORM_MIN__;4.94065645841246544176568792868221372e-324F64;__FLT64_HAS_DENORM__;1;__FLT64_HAS_INFINITY__;1;__FLT64_HAS_QUIET_NAN__;1;__FLT128_MANT_DIG__;113;__FLT128_DIG__;33;__FLT128_MIN_EXP__;(-16381);__FLT128_MIN_10_EXP__;(-4931);__FLT128_MAX_EXP__;16384;__FLT128_MAX_10_EXP__;4932;__FLT128_DECIMAL_DIG__;36;__FLT128_MAX__;1.18973149535723176508575932662800702e+4932F128;__FLT128_MIN__;3.36210314311209350626267781732175260e-4932F128;__FLT128_EPSILON__;1.92592994438723585305597794258492732e-34F128;__FLT128_DENORM_MIN__;6.47517511943802511092443895822764655e-4966F128;__FLT128_HAS_DENORM__;1;__FLT128_HAS_INFINITY__;1;__FLT128_HAS_QUIET_NAN__;1;__FLT32X_MANT_DIG__;53;__FLT32X_DIG__;15;__FLT32X_MIN_EXP__;(-1021);__FLT32X_MIN_10_EXP__;(-307);__FLT32X_MAX_EXP__;1024;__FLT32X_MAX_10_EXP__;308;__FLT32X_DECIMAL_DIG__;17;__FLT32X_MAX__;1.79769313486231570814527423731704357e+308F32x;__FLT32X_MIN__;2.22507385850720138309023271733240406e-308F32x;__FLT32X_EPSILON__;2.22044604925031308084726333618164062e-16F32x;__FLT32X_DENORM_MIN__;4.94065645841246544176568792868221372e-324F32x;__FLT32X_HAS_DENORM__;1;__FLT32X_HAS_INFINITY__;1;__FLT32X_HAS_QUIET_NAN__;1;__FLT64X_MANT_DIG__;64;__FLT64X_DIG__;18;__FLT64X_MIN_EXP__;(-16381);__FLT64X_MIN_10_EXP__;(-4931);__FLT64X_MAX_EXP__;16384;__FLT64X_MAX_10_EXP__;4932;__FLT64X_DECIMAL_DIG__;21;__FLT64X_MAX__;1.18973149535723176502126385303097021e+4932F64x;__FLT64X_MIN__;3.36210314311209350626267781732175260e-4932F64x;__FLT64X_EPSILON__;1.08420217248550443400745280086994171e-19F64x;__FLT64X_DENORM_MIN__;3.64519953188247460252840593361941982e-4951F64x;__FLT64X_HAS_DENORM__;1;__FLT64X_HAS_INFINITY__;1;__FLT64X_HAS_QUIET_NAN__;1;__DEC32_MANT_DIG__;7;__DEC32_MIN_EXP__;(-94);__DEC32_MAX_EXP__;97;__DEC32_MIN__;1E-95DF;__DEC32_MAX__;9.999999E96DF;__DEC32_EPSILON__;1E-6DF;__DEC32_SUBNORMAL_MIN__;0.000001E-95DF;__DEC64_MANT_DIG__;16;__DEC64_MIN_EXP__;(-382);__DEC64_MAX_EXP__;385;__DEC64_MIN__;1E-383DD;__DEC64_MAX__;9.999999999999999E384DD;__DEC64_EPSILON__;1E-15DD;__DEC64_SUBNORMAL_MIN__;0.000000000000001E-383DD;__DEC128_MANT_DIG__;34;__DEC128_MIN_EXP__;(-6142);__DEC128_MAX_EXP__;6145;__DEC128_MIN__;1E-6143DL;__DEC128_MAX__;9.999999999999999999999999999999999E6144DL;__DEC128_EPSILON__;1E-33DL;__DEC128_SUBNORMAL_MIN__;0.000000000000000000000000000000001E-6143DL;__REGISTER_PREFIX__; ;__USER_LABEL_PREFIX__; ;__GNUC_STDC_INLINE__;1;__NO_INLINE__;1;__GCC_HAVE_SYNC_COMPARE_AND_SWAP_1;1;__GCC_HAVE_SYNC_COMPARE_AND_SWAP_2;1;__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4;1;__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8;1;__GCC_ATOMIC_BOOL_LOCK_FREE;2;__GCC_ATOMIC_CHAR_LOCK_FREE;2;__GCC_ATOMIC_CHAR16_T_LOCK_FREE;2;__GCC_ATOMIC_CHAR32_T_LOCK_FREE;2;__GCC_ATOMIC_WCHAR_T_LOCK_FREE;2;__GCC_ATOMIC_SHORT_LOCK_FREE;2;__GCC_ATOMIC_INT_LOCK_FREE;2;__GCC_ATOMIC_LONG_LOCK_FREE;2;__GCC_ATOMIC_LLONG_LOCK_FREE;2;__GCC_ATOMIC_TEST_AND_SET_TRUEVAL;1;__GCC_ATOMIC_POINTER_LOCK_FREE;2;__GCC_HAVE_DWARF2_CFI_ASM;1;__PRAGMA_REDEFINE_EXTNAME;1;__SSP_STRONG__;3;__SIZEOF_INT128__;16;__SIZEOF_WCHAR_T__;4;__SIZEOF_WINT_T__;4;__SIZEOF_PTRDIFF_T__;8;__amd64;1;__amd64__;1;__x86_64;1;__x86_64__;1;__SIZEOF_FLOAT80__;16;__SIZEOF_FLOAT128__;16;__ATOMIC_HLE_ACQUIRE;65536;__ATOMIC_HLE_RELEASE;131072;__GCC_ASM_FLAG_OUTPUTS__;1;__k8;1;__k8__;1;__code_model_small__;1;__MMX__;1;__SSE__;1;__SSE2__;1;__FXSR__;1;__SSE_MATH__;1;__SSE2_MATH__;1;__SEG_FS;1;__SEG_GS;1;__gnu_linux__;1;__linux;1;__linux__;1;linux;1;__unix;1;__unix__;1;unix;1;__ELF__;1;__DECIMAL_BID_FORMAT__;1;_STDC_PREDEF_H;1;__STDC_IEC_559__;1;__STDC_IEC_559_COMPLEX__;1;__STDC_ISO_10646__;201706L;__STDC_NO_THREADS__;1 +//C compiler system include directories +CMAKE_EXTRA_GENERATOR_C_SYSTEM_INCLUDE_DIRS:INTERNAL=/usr/lib/gcc/x86_64-linux-gnu/7/include;/usr/local/include;/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed;/usr/include/x86_64-linux-gnu;/usr/include +//Name of generator. +CMAKE_GENERATOR:INTERNAL=Unix Makefiles +//Generator instance identifier. +CMAKE_GENERATOR_INSTANCE:INTERNAL= +//Name of generator platform. +CMAKE_GENERATOR_PLATFORM:INTERNAL= +//Name of generator toolset. +CMAKE_GENERATOR_TOOLSET:INTERNAL= +//Test CMAKE_HAVE_LIBC_PTHREAD +CMAKE_HAVE_LIBC_PTHREAD:INTERNAL= +//Have library pthreads +CMAKE_HAVE_PTHREADS_CREATE:INTERNAL= +//Have library pthread +CMAKE_HAVE_PTHREAD_CREATE:INTERNAL=1 +//Have include pthread.h +CMAKE_HAVE_PTHREAD_H:INTERNAL=1 +//Source directory with the top level CMakeLists.txt file for this +// project +CMAKE_HOME_DIRECTORY:INTERNAL=/home/fuda/Projects/revolve/cpprevolve/revolve/gazebo/IMC +//Install .so files without execute permission. +CMAKE_INSTALL_SO_NO_EXE:INTERNAL=1 +//ADVANCED property for variable: CMAKE_LINKER +CMAKE_LINKER-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_MAKE_PROGRAM +CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS +CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG +CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL +CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE +CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO +CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_NM +CMAKE_NM-ADVANCED:INTERNAL=1 +//number of local generators +CMAKE_NUMBER_OF_MAKEFILES:INTERNAL=1 +//ADVANCED property for variable: CMAKE_OBJCOPY +CMAKE_OBJCOPY-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_OBJDUMP +CMAKE_OBJDUMP-ADVANCED:INTERNAL=1 +//Platform information initialized +CMAKE_PLATFORM_INFO_INITIALIZED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_RANLIB +CMAKE_RANLIB-ADVANCED:INTERNAL=1 +//Path to CMake installation. +CMAKE_ROOT:INTERNAL=/snap/clion/103/bin/cmake/linux/share/cmake-3.15 +//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS +CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG +CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL +CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE +CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO +CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_SKIP_INSTALL_RPATH +CMAKE_SKIP_INSTALL_RPATH-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_SKIP_RPATH +CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS +CMAKE_STATIC_LINKER_FLAGS-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_DEBUG +CMAKE_STATIC_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL +CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_RELEASE +CMAKE_STATIC_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO +CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: CMAKE_STRIP +CMAKE_STRIP-ADVANCED:INTERNAL=1 +//uname command +CMAKE_UNAME:INTERNAL=/bin/uname +//ADVANCED property for variable: CMAKE_VERBOSE_MAKEFILE +CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1 +//Details about finding Threads +FIND_PACKAGE_MESSAGE_DETAILS_Threads:INTERNAL=[TRUE][v()] +//Details about finding torch +FIND_PACKAGE_MESSAGE_DETAILS_torch:INTERNAL=[/home/fuda/Projects/pytorch/libtorch/lib/libtorch.so][/home/fuda/Projects/pytorch/libtorch/include;/home/fuda/Projects/pytorch/libtorch/include/torch/csrc/api/include][v()] +//ADVANCED property for variable: ProcessorCount_cmd_nproc +ProcessorCount_cmd_nproc-ADVANCED:INTERNAL=1 +//ADVANCED property for variable: ProcessorCount_cmd_sysctl +ProcessorCount_cmd_sysctl-ADVANCED:INTERNAL=1 + diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeCCompiler.cmake b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeCCompiler.cmake new file mode 100644 index 0000000000..5e07ef4f57 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeCCompiler.cmake @@ -0,0 +1,76 @@ +set(CMAKE_C_COMPILER "/usr/bin/cc") +set(CMAKE_C_COMPILER_ARG1 "") +set(CMAKE_C_COMPILER_ID "GNU") +set(CMAKE_C_COMPILER_VERSION "7.5.0") +set(CMAKE_C_COMPILER_VERSION_INTERNAL "") +set(CMAKE_C_COMPILER_WRAPPER "") +set(CMAKE_C_STANDARD_COMPUTED_DEFAULT "11") +set(CMAKE_C_COMPILE_FEATURES "c_std_90;c_function_prototypes;c_std_99;c_restrict;c_variadic_macros;c_std_11;c_static_assert") +set(CMAKE_C90_COMPILE_FEATURES "c_std_90;c_function_prototypes") +set(CMAKE_C99_COMPILE_FEATURES "c_std_99;c_restrict;c_variadic_macros") +set(CMAKE_C11_COMPILE_FEATURES "c_std_11;c_static_assert") + +set(CMAKE_C_PLATFORM_ID "Linux") +set(CMAKE_C_SIMULATE_ID "") +set(CMAKE_C_COMPILER_FRONTEND_VARIANT "") +set(CMAKE_C_SIMULATE_VERSION "") + + + +set(CMAKE_AR "/usr/bin/ar") +set(CMAKE_C_COMPILER_AR "/usr/bin/gcc-ar-7") +set(CMAKE_RANLIB "/usr/bin/ranlib") +set(CMAKE_C_COMPILER_RANLIB "/usr/bin/gcc-ranlib-7") +set(CMAKE_LINKER "/usr/bin/ld") +set(CMAKE_MT "") +set(CMAKE_COMPILER_IS_GNUCC 1) +set(CMAKE_C_COMPILER_LOADED 1) +set(CMAKE_C_COMPILER_WORKS TRUE) +set(CMAKE_C_ABI_COMPILED TRUE) +set(CMAKE_COMPILER_IS_MINGW ) +set(CMAKE_COMPILER_IS_CYGWIN ) +if(CMAKE_COMPILER_IS_CYGWIN) + set(CYGWIN 1) + set(UNIX 1) +endif() + +set(CMAKE_C_COMPILER_ENV_VAR "CC") + +if(CMAKE_COMPILER_IS_MINGW) + set(MINGW 1) +endif() +set(CMAKE_C_COMPILER_ID_RUN 1) +set(CMAKE_C_SOURCE_FILE_EXTENSIONS c;m) +set(CMAKE_C_IGNORE_EXTENSIONS h;H;o;O;obj;OBJ;def;DEF;rc;RC) +set(CMAKE_C_LINKER_PREFERENCE 10) + +# Save compiler ABI information. +set(CMAKE_C_SIZEOF_DATA_PTR "8") +set(CMAKE_C_COMPILER_ABI "ELF") +set(CMAKE_C_LIBRARY_ARCHITECTURE "x86_64-linux-gnu") + +if(CMAKE_C_SIZEOF_DATA_PTR) + set(CMAKE_SIZEOF_VOID_P "${CMAKE_C_SIZEOF_DATA_PTR}") +endif() + +if(CMAKE_C_COMPILER_ABI) + set(CMAKE_INTERNAL_PLATFORM_ABI "${CMAKE_C_COMPILER_ABI}") +endif() + +if(CMAKE_C_LIBRARY_ARCHITECTURE) + set(CMAKE_LIBRARY_ARCHITECTURE "x86_64-linux-gnu") +endif() + +set(CMAKE_C_CL_SHOWINCLUDES_PREFIX "") +if(CMAKE_C_CL_SHOWINCLUDES_PREFIX) + set(CMAKE_CL_SHOWINCLUDES_PREFIX "${CMAKE_C_CL_SHOWINCLUDES_PREFIX}") +endif() + + + + + +set(CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES "/usr/lib/gcc/x86_64-linux-gnu/7/include;/usr/local/include;/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed;/usr/include/x86_64-linux-gnu;/usr/include") +set(CMAKE_C_IMPLICIT_LINK_LIBRARIES "gcc;gcc_s;c;gcc;gcc_s") +set(CMAKE_C_IMPLICIT_LINK_DIRECTORIES "/usr/lib/gcc/x86_64-linux-gnu/7;/usr/lib/x86_64-linux-gnu;/usr/lib;/lib/x86_64-linux-gnu;/lib") +set(CMAKE_C_IMPLICIT_LINK_FRAMEWORK_DIRECTORIES "") diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeCXXCompiler.cmake b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeCXXCompiler.cmake new file mode 100644 index 0000000000..2e456f2ba6 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeCXXCompiler.cmake @@ -0,0 +1,79 @@ +set(CMAKE_CXX_COMPILER "/usr/bin/c++") +set(CMAKE_CXX_COMPILER_ARG1 "") +set(CMAKE_CXX_COMPILER_ID "GNU") +set(CMAKE_CXX_COMPILER_VERSION "7.5.0") +set(CMAKE_CXX_COMPILER_VERSION_INTERNAL "") +set(CMAKE_CXX_COMPILER_WRAPPER "") +set(CMAKE_CXX_STANDARD_COMPUTED_DEFAULT "14") +set(CMAKE_CXX_COMPILE_FEATURES "cxx_std_98;cxx_template_template_parameters;cxx_std_11;cxx_alias_templates;cxx_alignas;cxx_alignof;cxx_attributes;cxx_auto_type;cxx_constexpr;cxx_decltype;cxx_decltype_incomplete_return_types;cxx_default_function_template_args;cxx_defaulted_functions;cxx_defaulted_move_initializers;cxx_delegating_constructors;cxx_deleted_functions;cxx_enum_forward_declarations;cxx_explicit_conversions;cxx_extended_friend_declarations;cxx_extern_templates;cxx_final;cxx_func_identifier;cxx_generalized_initializers;cxx_inheriting_constructors;cxx_inline_namespaces;cxx_lambdas;cxx_local_type_template_args;cxx_long_long_type;cxx_noexcept;cxx_nonstatic_member_init;cxx_nullptr;cxx_override;cxx_range_for;cxx_raw_string_literals;cxx_reference_qualified_functions;cxx_right_angle_brackets;cxx_rvalue_references;cxx_sizeof_member;cxx_static_assert;cxx_strong_enums;cxx_thread_local;cxx_trailing_return_types;cxx_unicode_literals;cxx_uniform_initialization;cxx_unrestricted_unions;cxx_user_literals;cxx_variadic_macros;cxx_variadic_templates;cxx_std_14;cxx_aggregate_default_initializers;cxx_attribute_deprecated;cxx_binary_literals;cxx_contextual_conversions;cxx_decltype_auto;cxx_digit_separators;cxx_generic_lambdas;cxx_lambda_init_captures;cxx_relaxed_constexpr;cxx_return_type_deduction;cxx_variable_templates;cxx_std_17") +set(CMAKE_CXX98_COMPILE_FEATURES "cxx_std_98;cxx_template_template_parameters") +set(CMAKE_CXX11_COMPILE_FEATURES "cxx_std_11;cxx_alias_templates;cxx_alignas;cxx_alignof;cxx_attributes;cxx_auto_type;cxx_constexpr;cxx_decltype;cxx_decltype_incomplete_return_types;cxx_default_function_template_args;cxx_defaulted_functions;cxx_defaulted_move_initializers;cxx_delegating_constructors;cxx_deleted_functions;cxx_enum_forward_declarations;cxx_explicit_conversions;cxx_extended_friend_declarations;cxx_extern_templates;cxx_final;cxx_func_identifier;cxx_generalized_initializers;cxx_inheriting_constructors;cxx_inline_namespaces;cxx_lambdas;cxx_local_type_template_args;cxx_long_long_type;cxx_noexcept;cxx_nonstatic_member_init;cxx_nullptr;cxx_override;cxx_range_for;cxx_raw_string_literals;cxx_reference_qualified_functions;cxx_right_angle_brackets;cxx_rvalue_references;cxx_sizeof_member;cxx_static_assert;cxx_strong_enums;cxx_thread_local;cxx_trailing_return_types;cxx_unicode_literals;cxx_uniform_initialization;cxx_unrestricted_unions;cxx_user_literals;cxx_variadic_macros;cxx_variadic_templates") +set(CMAKE_CXX14_COMPILE_FEATURES "cxx_std_14;cxx_aggregate_default_initializers;cxx_attribute_deprecated;cxx_binary_literals;cxx_contextual_conversions;cxx_decltype_auto;cxx_digit_separators;cxx_generic_lambdas;cxx_lambda_init_captures;cxx_relaxed_constexpr;cxx_return_type_deduction;cxx_variable_templates") +set(CMAKE_CXX17_COMPILE_FEATURES "cxx_std_17") +set(CMAKE_CXX20_COMPILE_FEATURES "") + +set(CMAKE_CXX_PLATFORM_ID "Linux") +set(CMAKE_CXX_SIMULATE_ID "") +set(CMAKE_CXX_COMPILER_FRONTEND_VARIANT "") +set(CMAKE_CXX_SIMULATE_VERSION "") + + + +set(CMAKE_AR "/usr/bin/ar") +set(CMAKE_CXX_COMPILER_AR "/usr/bin/gcc-ar-7") +set(CMAKE_RANLIB "/usr/bin/ranlib") +set(CMAKE_CXX_COMPILER_RANLIB "/usr/bin/gcc-ranlib-7") +set(CMAKE_LINKER "/usr/bin/ld") +set(CMAKE_MT "") +set(CMAKE_COMPILER_IS_GNUCXX 1) +set(CMAKE_CXX_COMPILER_LOADED 1) +set(CMAKE_CXX_COMPILER_WORKS TRUE) +set(CMAKE_CXX_ABI_COMPILED TRUE) +set(CMAKE_COMPILER_IS_MINGW ) +set(CMAKE_COMPILER_IS_CYGWIN ) +if(CMAKE_COMPILER_IS_CYGWIN) + set(CYGWIN 1) + set(UNIX 1) +endif() + +set(CMAKE_CXX_COMPILER_ENV_VAR "CXX") + +if(CMAKE_COMPILER_IS_MINGW) + set(MINGW 1) +endif() +set(CMAKE_CXX_COMPILER_ID_RUN 1) +set(CMAKE_CXX_IGNORE_EXTENSIONS inl;h;hpp;HPP;H;o;O;obj;OBJ;def;DEF;rc;RC) +set(CMAKE_CXX_SOURCE_FILE_EXTENSIONS C;M;c++;cc;cpp;cxx;mm;CPP) +set(CMAKE_CXX_LINKER_PREFERENCE 30) +set(CMAKE_CXX_LINKER_PREFERENCE_PROPAGATES 1) + +# Save compiler ABI information. +set(CMAKE_CXX_SIZEOF_DATA_PTR "8") +set(CMAKE_CXX_COMPILER_ABI "ELF") +set(CMAKE_CXX_LIBRARY_ARCHITECTURE "x86_64-linux-gnu") + +if(CMAKE_CXX_SIZEOF_DATA_PTR) + set(CMAKE_SIZEOF_VOID_P "${CMAKE_CXX_SIZEOF_DATA_PTR}") +endif() + +if(CMAKE_CXX_COMPILER_ABI) + set(CMAKE_INTERNAL_PLATFORM_ABI "${CMAKE_CXX_COMPILER_ABI}") +endif() + +if(CMAKE_CXX_LIBRARY_ARCHITECTURE) + set(CMAKE_LIBRARY_ARCHITECTURE "x86_64-linux-gnu") +endif() + +set(CMAKE_CXX_CL_SHOWINCLUDES_PREFIX "") +if(CMAKE_CXX_CL_SHOWINCLUDES_PREFIX) + set(CMAKE_CL_SHOWINCLUDES_PREFIX "${CMAKE_CXX_CL_SHOWINCLUDES_PREFIX}") +endif() + + + + + +set(CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES "/usr/include/c++/7;/usr/include/x86_64-linux-gnu/c++/7;/usr/include/c++/7/backward;/usr/lib/gcc/x86_64-linux-gnu/7/include;/usr/local/include;/usr/lib/gcc/x86_64-linux-gnu/7/include-fixed;/usr/include/x86_64-linux-gnu;/usr/include") +set(CMAKE_CXX_IMPLICIT_LINK_LIBRARIES "stdc++;m;gcc_s;gcc;c;gcc_s;gcc") +set(CMAKE_CXX_IMPLICIT_LINK_DIRECTORIES "/usr/lib/gcc/x86_64-linux-gnu/7;/usr/lib/x86_64-linux-gnu;/usr/lib;/lib/x86_64-linux-gnu;/lib") +set(CMAKE_CXX_IMPLICIT_LINK_FRAMEWORK_DIRECTORIES "") diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeDetermineCompilerABI_C.bin b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeDetermineCompilerABI_C.bin new file mode 100755 index 0000000000..b1860a3dd4 Binary files /dev/null and b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeDetermineCompilerABI_C.bin differ diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeDetermineCompilerABI_CXX.bin b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeDetermineCompilerABI_CXX.bin new file mode 100755 index 0000000000..19a9ccae89 Binary files /dev/null and b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeDetermineCompilerABI_CXX.bin differ diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeSystem.cmake b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeSystem.cmake new file mode 100644 index 0000000000..9e33299353 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CMakeSystem.cmake @@ -0,0 +1,15 @@ +set(CMAKE_HOST_SYSTEM "Linux-4.15.0-88-generic") +set(CMAKE_HOST_SYSTEM_NAME "Linux") +set(CMAKE_HOST_SYSTEM_VERSION "4.15.0-88-generic") +set(CMAKE_HOST_SYSTEM_PROCESSOR "x86_64") + + + +set(CMAKE_SYSTEM "Linux-4.15.0-88-generic") +set(CMAKE_SYSTEM_NAME "Linux") +set(CMAKE_SYSTEM_VERSION "4.15.0-88-generic") +set(CMAKE_SYSTEM_PROCESSOR "x86_64") + +set(CMAKE_CROSSCOMPILING "FALSE") + +set(CMAKE_SYSTEM_LOADED 1) diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CompilerIdC/CMakeCCompilerId.c b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CompilerIdC/CMakeCCompilerId.c new file mode 100644 index 0000000000..917e8b9870 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CompilerIdC/CMakeCCompilerId.c @@ -0,0 +1,665 @@ +#ifdef __cplusplus +# error "A C++ compiler has been selected for C." +#endif + +#if defined(__18CXX) +# define ID_VOID_MAIN +#endif +#if defined(__CLASSIC_C__) +/* cv-qualifiers did not exist in K&R C */ +# define const +# define volatile +#endif + + +/* Version number components: V=Version, R=Revision, P=Patch + Version date components: YYYY=Year, MM=Month, DD=Day */ + +#if defined(__INTEL_COMPILER) || defined(__ICC) +# define COMPILER_ID "Intel" +# if defined(_MSC_VER) +# define SIMULATE_ID "MSVC" +# endif +# if defined(__GNUC__) +# define SIMULATE_ID "GNU" +# endif + /* __INTEL_COMPILER = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__INTEL_COMPILER/100) +# define COMPILER_VERSION_MINOR DEC(__INTEL_COMPILER/10 % 10) +# if defined(__INTEL_COMPILER_UPDATE) +# define COMPILER_VERSION_PATCH DEC(__INTEL_COMPILER_UPDATE) +# else +# define COMPILER_VERSION_PATCH DEC(__INTEL_COMPILER % 10) +# endif +# if defined(__INTEL_COMPILER_BUILD_DATE) + /* __INTEL_COMPILER_BUILD_DATE = YYYYMMDD */ +# define COMPILER_VERSION_TWEAK DEC(__INTEL_COMPILER_BUILD_DATE) +# endif +# if defined(_MSC_VER) + /* _MSC_VER = VVRR */ +# define SIMULATE_VERSION_MAJOR DEC(_MSC_VER / 100) +# define SIMULATE_VERSION_MINOR DEC(_MSC_VER % 100) +# endif +# if defined(__GNUC__) +# define SIMULATE_VERSION_MAJOR DEC(__GNUC__) +# elif defined(__GNUG__) +# define SIMULATE_VERSION_MAJOR DEC(__GNUG__) +# endif +# if defined(__GNUC_MINOR__) +# define SIMULATE_VERSION_MINOR DEC(__GNUC_MINOR__) +# endif +# if defined(__GNUC_PATCHLEVEL__) +# define SIMULATE_VERSION_PATCH DEC(__GNUC_PATCHLEVEL__) +# endif + +#elif defined(__PATHCC__) +# define COMPILER_ID "PathScale" +# define COMPILER_VERSION_MAJOR DEC(__PATHCC__) +# define COMPILER_VERSION_MINOR DEC(__PATHCC_MINOR__) +# if defined(__PATHCC_PATCHLEVEL__) +# define COMPILER_VERSION_PATCH DEC(__PATHCC_PATCHLEVEL__) +# endif + +#elif defined(__BORLANDC__) && defined(__CODEGEARC_VERSION__) +# define COMPILER_ID "Embarcadero" +# define COMPILER_VERSION_MAJOR HEX(__CODEGEARC_VERSION__>>24 & 0x00FF) +# define COMPILER_VERSION_MINOR HEX(__CODEGEARC_VERSION__>>16 & 0x00FF) +# define COMPILER_VERSION_PATCH DEC(__CODEGEARC_VERSION__ & 0xFFFF) + +#elif defined(__BORLANDC__) +# define COMPILER_ID "Borland" + /* __BORLANDC__ = 0xVRR */ +# define COMPILER_VERSION_MAJOR HEX(__BORLANDC__>>8) +# define COMPILER_VERSION_MINOR HEX(__BORLANDC__ & 0xFF) + +#elif defined(__WATCOMC__) && __WATCOMC__ < 1200 +# define COMPILER_ID "Watcom" + /* __WATCOMC__ = VVRR */ +# define COMPILER_VERSION_MAJOR DEC(__WATCOMC__ / 100) +# define COMPILER_VERSION_MINOR DEC((__WATCOMC__ / 10) % 10) +# if (__WATCOMC__ % 10) > 0 +# define COMPILER_VERSION_PATCH DEC(__WATCOMC__ % 10) +# endif + +#elif defined(__WATCOMC__) +# define COMPILER_ID "OpenWatcom" + /* __WATCOMC__ = VVRP + 1100 */ +# define COMPILER_VERSION_MAJOR DEC((__WATCOMC__ - 1100) / 100) +# define COMPILER_VERSION_MINOR DEC((__WATCOMC__ / 10) % 10) +# if (__WATCOMC__ % 10) > 0 +# define COMPILER_VERSION_PATCH DEC(__WATCOMC__ % 10) +# endif + +#elif defined(__SUNPRO_C) +# define COMPILER_ID "SunPro" +# if __SUNPRO_C >= 0x5100 + /* __SUNPRO_C = 0xVRRP */ +# define COMPILER_VERSION_MAJOR HEX(__SUNPRO_C>>12) +# define COMPILER_VERSION_MINOR HEX(__SUNPRO_C>>4 & 0xFF) +# define COMPILER_VERSION_PATCH HEX(__SUNPRO_C & 0xF) +# else + /* __SUNPRO_CC = 0xVRP */ +# define COMPILER_VERSION_MAJOR HEX(__SUNPRO_C>>8) +# define COMPILER_VERSION_MINOR HEX(__SUNPRO_C>>4 & 0xF) +# define COMPILER_VERSION_PATCH HEX(__SUNPRO_C & 0xF) +# endif + +#elif defined(__HP_cc) +# define COMPILER_ID "HP" + /* __HP_cc = VVRRPP */ +# define COMPILER_VERSION_MAJOR DEC(__HP_cc/10000) +# define COMPILER_VERSION_MINOR DEC(__HP_cc/100 % 100) +# define COMPILER_VERSION_PATCH DEC(__HP_cc % 100) + +#elif defined(__DECC) +# define COMPILER_ID "Compaq" + /* __DECC_VER = VVRRTPPPP */ +# define COMPILER_VERSION_MAJOR DEC(__DECC_VER/10000000) +# define COMPILER_VERSION_MINOR DEC(__DECC_VER/100000 % 100) +# define COMPILER_VERSION_PATCH DEC(__DECC_VER % 10000) + +#elif defined(__IBMC__) && defined(__COMPILER_VER__) +# define COMPILER_ID "zOS" + /* __IBMC__ = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__IBMC__/100) +# define COMPILER_VERSION_MINOR DEC(__IBMC__/10 % 10) +# define COMPILER_VERSION_PATCH DEC(__IBMC__ % 10) + +#elif defined(__ibmxl__) && defined(__clang__) +# define COMPILER_ID "XLClang" +# define COMPILER_VERSION_MAJOR DEC(__ibmxl_version__) +# define COMPILER_VERSION_MINOR DEC(__ibmxl_release__) +# define COMPILER_VERSION_PATCH DEC(__ibmxl_modification__) +# define COMPILER_VERSION_TWEAK DEC(__ibmxl_ptf_fix_level__) + + +#elif defined(__IBMC__) && !defined(__COMPILER_VER__) && __IBMC__ >= 800 +# define COMPILER_ID "XL" + /* __IBMC__ = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__IBMC__/100) +# define COMPILER_VERSION_MINOR DEC(__IBMC__/10 % 10) +# define COMPILER_VERSION_PATCH DEC(__IBMC__ % 10) + +#elif defined(__IBMC__) && !defined(__COMPILER_VER__) && __IBMC__ < 800 +# define COMPILER_ID "VisualAge" + /* __IBMC__ = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__IBMC__/100) +# define COMPILER_VERSION_MINOR DEC(__IBMC__/10 % 10) +# define COMPILER_VERSION_PATCH DEC(__IBMC__ % 10) + +#elif defined(__PGI) +# define COMPILER_ID "PGI" +# define COMPILER_VERSION_MAJOR DEC(__PGIC__) +# define COMPILER_VERSION_MINOR DEC(__PGIC_MINOR__) +# if defined(__PGIC_PATCHLEVEL__) +# define COMPILER_VERSION_PATCH DEC(__PGIC_PATCHLEVEL__) +# endif + +#elif defined(_CRAYC) +# define COMPILER_ID "Cray" +# define COMPILER_VERSION_MAJOR DEC(_RELEASE_MAJOR) +# define COMPILER_VERSION_MINOR DEC(_RELEASE_MINOR) + +#elif defined(__TI_COMPILER_VERSION__) +# define COMPILER_ID "TI" + /* __TI_COMPILER_VERSION__ = VVVRRRPPP */ +# define COMPILER_VERSION_MAJOR DEC(__TI_COMPILER_VERSION__/1000000) +# define COMPILER_VERSION_MINOR DEC(__TI_COMPILER_VERSION__/1000 % 1000) +# define COMPILER_VERSION_PATCH DEC(__TI_COMPILER_VERSION__ % 1000) + +#elif defined(__FUJITSU) || defined(__FCC_VERSION) || defined(__fcc_version) +# define COMPILER_ID "Fujitsu" + +#elif defined(__ghs__) +# define COMPILER_ID "GHS" +/* __GHS_VERSION_NUMBER = VVVVRP */ +# ifdef __GHS_VERSION_NUMBER +# define COMPILER_VERSION_MAJOR DEC(__GHS_VERSION_NUMBER / 100) +# define COMPILER_VERSION_MINOR DEC(__GHS_VERSION_NUMBER / 10 % 10) +# define COMPILER_VERSION_PATCH DEC(__GHS_VERSION_NUMBER % 10) +# endif + +#elif defined(__TINYC__) +# define COMPILER_ID "TinyCC" + +#elif defined(__BCC__) +# define COMPILER_ID "Bruce" + +#elif defined(__SCO_VERSION__) +# define COMPILER_ID "SCO" + +#elif defined(__ARMCC_VERSION) && !defined(__clang__) +# define COMPILER_ID "ARMCC" +#if __ARMCC_VERSION >= 1000000 + /* __ARMCC_VERSION = VRRPPPP */ + # define COMPILER_VERSION_MAJOR DEC(__ARMCC_VERSION/1000000) + # define COMPILER_VERSION_MINOR DEC(__ARMCC_VERSION/10000 % 100) + # define COMPILER_VERSION_PATCH DEC(__ARMCC_VERSION % 10000) +#else + /* __ARMCC_VERSION = VRPPPP */ + # define COMPILER_VERSION_MAJOR DEC(__ARMCC_VERSION/100000) + # define COMPILER_VERSION_MINOR DEC(__ARMCC_VERSION/10000 % 10) + # define COMPILER_VERSION_PATCH DEC(__ARMCC_VERSION % 10000) +#endif + + +#elif defined(__clang__) && defined(__apple_build_version__) +# define COMPILER_ID "AppleClang" +# if defined(_MSC_VER) +# define SIMULATE_ID "MSVC" +# endif +# define COMPILER_VERSION_MAJOR DEC(__clang_major__) +# define COMPILER_VERSION_MINOR DEC(__clang_minor__) +# define COMPILER_VERSION_PATCH DEC(__clang_patchlevel__) +# if defined(_MSC_VER) + /* _MSC_VER = VVRR */ +# define SIMULATE_VERSION_MAJOR DEC(_MSC_VER / 100) +# define SIMULATE_VERSION_MINOR DEC(_MSC_VER % 100) +# endif +# define COMPILER_VERSION_TWEAK DEC(__apple_build_version__) + +#elif defined(__clang__) && defined(__ARMCOMPILER_VERSION) +# define COMPILER_ID "ARMClang" + # define COMPILER_VERSION_MAJOR DEC(__ARMCOMPILER_VERSION/1000000) + # define COMPILER_VERSION_MINOR DEC(__ARMCOMPILER_VERSION/10000 % 100) + # define COMPILER_VERSION_PATCH DEC(__ARMCOMPILER_VERSION % 10000) +# define COMPILER_VERSION_INTERNAL DEC(__ARMCOMPILER_VERSION) + +#elif defined(__clang__) +# define COMPILER_ID "Clang" +# if defined(_MSC_VER) +# define SIMULATE_ID "MSVC" +# endif +# define COMPILER_VERSION_MAJOR DEC(__clang_major__) +# define COMPILER_VERSION_MINOR DEC(__clang_minor__) +# define COMPILER_VERSION_PATCH DEC(__clang_patchlevel__) +# if defined(_MSC_VER) + /* _MSC_VER = VVRR */ +# define SIMULATE_VERSION_MAJOR DEC(_MSC_VER / 100) +# define SIMULATE_VERSION_MINOR DEC(_MSC_VER % 100) +# endif + +#elif defined(__GNUC__) +# define COMPILER_ID "GNU" +# define COMPILER_VERSION_MAJOR DEC(__GNUC__) +# if defined(__GNUC_MINOR__) +# define COMPILER_VERSION_MINOR DEC(__GNUC_MINOR__) +# endif +# if defined(__GNUC_PATCHLEVEL__) +# define COMPILER_VERSION_PATCH DEC(__GNUC_PATCHLEVEL__) +# endif + +#elif defined(_MSC_VER) +# define COMPILER_ID "MSVC" + /* _MSC_VER = VVRR */ +# define COMPILER_VERSION_MAJOR DEC(_MSC_VER / 100) +# define COMPILER_VERSION_MINOR DEC(_MSC_VER % 100) +# if defined(_MSC_FULL_VER) +# if _MSC_VER >= 1400 + /* _MSC_FULL_VER = VVRRPPPPP */ +# define COMPILER_VERSION_PATCH DEC(_MSC_FULL_VER % 100000) +# else + /* _MSC_FULL_VER = VVRRPPPP */ +# define COMPILER_VERSION_PATCH DEC(_MSC_FULL_VER % 10000) +# endif +# endif +# if defined(_MSC_BUILD) +# define COMPILER_VERSION_TWEAK DEC(_MSC_BUILD) +# endif + +#elif defined(__VISUALDSPVERSION__) || defined(__ADSPBLACKFIN__) || defined(__ADSPTS__) || defined(__ADSP21000__) +# define COMPILER_ID "ADSP" +#if defined(__VISUALDSPVERSION__) + /* __VISUALDSPVERSION__ = 0xVVRRPP00 */ +# define COMPILER_VERSION_MAJOR HEX(__VISUALDSPVERSION__>>24) +# define COMPILER_VERSION_MINOR HEX(__VISUALDSPVERSION__>>16 & 0xFF) +# define COMPILER_VERSION_PATCH HEX(__VISUALDSPVERSION__>>8 & 0xFF) +#endif + +#elif defined(__IAR_SYSTEMS_ICC__) || defined(__IAR_SYSTEMS_ICC) +# define COMPILER_ID "IAR" +# if defined(__VER__) && defined(__ICCARM__) +# define COMPILER_VERSION_MAJOR DEC((__VER__) / 1000000) +# define COMPILER_VERSION_MINOR DEC(((__VER__) / 1000) % 1000) +# define COMPILER_VERSION_PATCH DEC((__VER__) % 1000) +# define COMPILER_VERSION_INTERNAL DEC(__IAR_SYSTEMS_ICC__) +# elif defined(__VER__) && (defined(__ICCAVR__) || defined(__ICCRX__) || defined(__ICCRH850__) || defined(__ICCRL78__) || defined(__ICC430__) || defined(__ICCRISCV__)) +# define COMPILER_VERSION_MAJOR DEC((__VER__) / 100) +# define COMPILER_VERSION_MINOR DEC((__VER__) - (((__VER__) / 100)*100)) +# define COMPILER_VERSION_PATCH DEC(__SUBVERSION__) +# define COMPILER_VERSION_INTERNAL DEC(__IAR_SYSTEMS_ICC__) +# endif + +#elif defined(__SDCC_VERSION_MAJOR) || defined(SDCC) +# define COMPILER_ID "SDCC" +# if defined(__SDCC_VERSION_MAJOR) +# define COMPILER_VERSION_MAJOR DEC(__SDCC_VERSION_MAJOR) +# define COMPILER_VERSION_MINOR DEC(__SDCC_VERSION_MINOR) +# define COMPILER_VERSION_PATCH DEC(__SDCC_VERSION_PATCH) +# else + /* SDCC = VRP */ +# define COMPILER_VERSION_MAJOR DEC(SDCC/100) +# define COMPILER_VERSION_MINOR DEC(SDCC/10 % 10) +# define COMPILER_VERSION_PATCH DEC(SDCC % 10) +# endif + + +/* These compilers are either not known or too old to define an + identification macro. Try to identify the platform and guess that + it is the native compiler. */ +#elif defined(__hpux) || defined(__hpua) +# define COMPILER_ID "HP" + +#else /* unknown compiler */ +# define COMPILER_ID "" +#endif + +/* Construct the string literal in pieces to prevent the source from + getting matched. Store it in a pointer rather than an array + because some compilers will just produce instructions to fill the + array rather than assigning a pointer to a static array. */ +char const* info_compiler = "INFO" ":" "compiler[" COMPILER_ID "]"; +#ifdef SIMULATE_ID +char const* info_simulate = "INFO" ":" "simulate[" SIMULATE_ID "]"; +#endif + +#ifdef __QNXNTO__ +char const* qnxnto = "INFO" ":" "qnxnto[]"; +#endif + +#if defined(__CRAYXE) || defined(__CRAYXC) +char const *info_cray = "INFO" ":" "compiler_wrapper[CrayPrgEnv]"; +#endif + +#define STRINGIFY_HELPER(X) #X +#define STRINGIFY(X) STRINGIFY_HELPER(X) + +/* Identify known platforms by name. */ +#if defined(__linux) || defined(__linux__) || defined(linux) +# define PLATFORM_ID "Linux" + +#elif defined(__CYGWIN__) +# define PLATFORM_ID "Cygwin" + +#elif defined(__MINGW32__) +# define PLATFORM_ID "MinGW" + +#elif defined(__APPLE__) +# define PLATFORM_ID "Darwin" + +#elif defined(_WIN32) || defined(__WIN32__) || defined(WIN32) +# define PLATFORM_ID "Windows" + +#elif defined(__FreeBSD__) || defined(__FreeBSD) +# define PLATFORM_ID "FreeBSD" + +#elif defined(__NetBSD__) || defined(__NetBSD) +# define PLATFORM_ID "NetBSD" + +#elif defined(__OpenBSD__) || defined(__OPENBSD) +# define PLATFORM_ID "OpenBSD" + +#elif defined(__sun) || defined(sun) +# define PLATFORM_ID "SunOS" + +#elif defined(_AIX) || defined(__AIX) || defined(__AIX__) || defined(__aix) || defined(__aix__) +# define PLATFORM_ID "AIX" + +#elif defined(__hpux) || defined(__hpux__) +# define PLATFORM_ID "HP-UX" + +#elif defined(__HAIKU__) +# define PLATFORM_ID "Haiku" + +#elif defined(__BeOS) || defined(__BEOS__) || defined(_BEOS) +# define PLATFORM_ID "BeOS" + +#elif defined(__QNX__) || defined(__QNXNTO__) +# define PLATFORM_ID "QNX" + +#elif defined(__tru64) || defined(_tru64) || defined(__TRU64__) +# define PLATFORM_ID "Tru64" + +#elif defined(__riscos) || defined(__riscos__) +# define PLATFORM_ID "RISCos" + +#elif defined(__sinix) || defined(__sinix__) || defined(__SINIX__) +# define PLATFORM_ID "SINIX" + +#elif defined(__UNIX_SV__) +# define PLATFORM_ID "UNIX_SV" + +#elif defined(__bsdos__) +# define PLATFORM_ID "BSDOS" + +#elif defined(_MPRAS) || defined(MPRAS) +# define PLATFORM_ID "MP-RAS" + +#elif defined(__osf) || defined(__osf__) +# define PLATFORM_ID "OSF1" + +#elif defined(_SCO_SV) || defined(SCO_SV) || defined(sco_sv) +# define PLATFORM_ID "SCO_SV" + +#elif defined(__ultrix) || defined(__ultrix__) || defined(_ULTRIX) +# define PLATFORM_ID "ULTRIX" + +#elif defined(__XENIX__) || defined(_XENIX) || defined(XENIX) +# define PLATFORM_ID "Xenix" + +#elif defined(__WATCOMC__) +# if defined(__LINUX__) +# define PLATFORM_ID "Linux" + +# elif defined(__DOS__) +# define PLATFORM_ID "DOS" + +# elif defined(__OS2__) +# define PLATFORM_ID "OS2" + +# elif defined(__WINDOWS__) +# define PLATFORM_ID "Windows3x" + +# else /* unknown platform */ +# define PLATFORM_ID +# endif + +#elif defined(__INTEGRITY) +# if defined(INT_178B) +# define PLATFORM_ID "Integrity178" + +# else /* regular Integrity */ +# define PLATFORM_ID "Integrity" +# endif + +#else /* unknown platform */ +# define PLATFORM_ID + +#endif + +/* For windows compilers MSVC and Intel we can determine + the architecture of the compiler being used. This is because + the compilers do not have flags that can change the architecture, + but rather depend on which compiler is being used +*/ +#if defined(_WIN32) && defined(_MSC_VER) +# if defined(_M_IA64) +# define ARCHITECTURE_ID "IA64" + +# elif defined(_M_X64) || defined(_M_AMD64) +# define ARCHITECTURE_ID "x64" + +# elif defined(_M_IX86) +# define ARCHITECTURE_ID "X86" + +# elif defined(_M_ARM64) +# define ARCHITECTURE_ID "ARM64" + +# elif defined(_M_ARM) +# if _M_ARM == 4 +# define ARCHITECTURE_ID "ARMV4I" +# elif _M_ARM == 5 +# define ARCHITECTURE_ID "ARMV5I" +# else +# define ARCHITECTURE_ID "ARMV" STRINGIFY(_M_ARM) +# endif + +# elif defined(_M_MIPS) +# define ARCHITECTURE_ID "MIPS" + +# elif defined(_M_SH) +# define ARCHITECTURE_ID "SHx" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif + +#elif defined(__WATCOMC__) +# if defined(_M_I86) +# define ARCHITECTURE_ID "I86" + +# elif defined(_M_IX86) +# define ARCHITECTURE_ID "X86" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif + +#elif defined(__IAR_SYSTEMS_ICC__) || defined(__IAR_SYSTEMS_ICC) +# if defined(__ICCARM__) +# define ARCHITECTURE_ID "ARM" + +# elif defined(__ICCRX__) +# define ARCHITECTURE_ID "RX" + +# elif defined(__ICCRH850__) +# define ARCHITECTURE_ID "RH850" + +# elif defined(__ICCRL78__) +# define ARCHITECTURE_ID "RL78" + +# elif defined(__ICCRISCV__) +# define ARCHITECTURE_ID "RISCV" + +# elif defined(__ICCAVR__) +# define ARCHITECTURE_ID "AVR" + +# elif defined(__ICC430__) +# define ARCHITECTURE_ID "MSP430" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif + +#elif defined(__ghs__) +# if defined(__PPC64__) +# define ARCHITECTURE_ID "PPC64" + +# elif defined(__ppc__) +# define ARCHITECTURE_ID "PPC" + +# elif defined(__ARM__) +# define ARCHITECTURE_ID "ARM" + +# elif defined(__x86_64__) +# define ARCHITECTURE_ID "x64" + +# elif defined(__i386__) +# define ARCHITECTURE_ID "X86" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif +#else +# define ARCHITECTURE_ID +#endif + +/* Convert integer to decimal digit literals. */ +#define DEC(n) \ + ('0' + (((n) / 10000000)%10)), \ + ('0' + (((n) / 1000000)%10)), \ + ('0' + (((n) / 100000)%10)), \ + ('0' + (((n) / 10000)%10)), \ + ('0' + (((n) / 1000)%10)), \ + ('0' + (((n) / 100)%10)), \ + ('0' + (((n) / 10)%10)), \ + ('0' + ((n) % 10)) + +/* Convert integer to hex digit literals. */ +#define HEX(n) \ + ('0' + ((n)>>28 & 0xF)), \ + ('0' + ((n)>>24 & 0xF)), \ + ('0' + ((n)>>20 & 0xF)), \ + ('0' + ((n)>>16 & 0xF)), \ + ('0' + ((n)>>12 & 0xF)), \ + ('0' + ((n)>>8 & 0xF)), \ + ('0' + ((n)>>4 & 0xF)), \ + ('0' + ((n) & 0xF)) + +/* Construct a string literal encoding the version number components. */ +#ifdef COMPILER_VERSION_MAJOR +char const info_version[] = { + 'I', 'N', 'F', 'O', ':', + 'c','o','m','p','i','l','e','r','_','v','e','r','s','i','o','n','[', + COMPILER_VERSION_MAJOR, +# ifdef COMPILER_VERSION_MINOR + '.', COMPILER_VERSION_MINOR, +# ifdef COMPILER_VERSION_PATCH + '.', COMPILER_VERSION_PATCH, +# ifdef COMPILER_VERSION_TWEAK + '.', COMPILER_VERSION_TWEAK, +# endif +# endif +# endif + ']','\0'}; +#endif + +/* Construct a string literal encoding the internal version number. */ +#ifdef COMPILER_VERSION_INTERNAL +char const info_version_internal[] = { + 'I', 'N', 'F', 'O', ':', + 'c','o','m','p','i','l','e','r','_','v','e','r','s','i','o','n','_', + 'i','n','t','e','r','n','a','l','[', + COMPILER_VERSION_INTERNAL,']','\0'}; +#endif + +/* Construct a string literal encoding the version number components. */ +#ifdef SIMULATE_VERSION_MAJOR +char const info_simulate_version[] = { + 'I', 'N', 'F', 'O', ':', + 's','i','m','u','l','a','t','e','_','v','e','r','s','i','o','n','[', + SIMULATE_VERSION_MAJOR, +# ifdef SIMULATE_VERSION_MINOR + '.', SIMULATE_VERSION_MINOR, +# ifdef SIMULATE_VERSION_PATCH + '.', SIMULATE_VERSION_PATCH, +# ifdef SIMULATE_VERSION_TWEAK + '.', SIMULATE_VERSION_TWEAK, +# endif +# endif +# endif + ']','\0'}; +#endif + +/* Construct the string literal in pieces to prevent the source from + getting matched. Store it in a pointer rather than an array + because some compilers will just produce instructions to fill the + array rather than assigning a pointer to a static array. */ +char const* info_platform = "INFO" ":" "platform[" PLATFORM_ID "]"; +char const* info_arch = "INFO" ":" "arch[" ARCHITECTURE_ID "]"; + + + + +#if !defined(__STDC__) +# if (defined(_MSC_VER) && !defined(__clang__)) \ + || (defined(__ibmxl__) || defined(__IBMC__)) +# define C_DIALECT "90" +# else +# define C_DIALECT +# endif +#elif __STDC_VERSION__ >= 201000L +# define C_DIALECT "11" +#elif __STDC_VERSION__ >= 199901L +# define C_DIALECT "99" +#else +# define C_DIALECT "90" +#endif +const char* info_language_dialect_default = + "INFO" ":" "dialect_default[" C_DIALECT "]"; + +/*--------------------------------------------------------------------------*/ + +#ifdef ID_VOID_MAIN +void main() {} +#else +# if defined(__CLASSIC_C__) +int main(argc, argv) int argc; char *argv[]; +# else +int main(int argc, char* argv[]) +# endif +{ + int require = 0; + require += info_compiler[argc]; + require += info_platform[argc]; + require += info_arch[argc]; +#ifdef COMPILER_VERSION_MAJOR + require += info_version[argc]; +#endif +#ifdef COMPILER_VERSION_INTERNAL + require += info_version_internal[argc]; +#endif +#ifdef SIMULATE_ID + require += info_simulate[argc]; +#endif +#ifdef SIMULATE_VERSION_MAJOR + require += info_simulate_version[argc]; +#endif +#if defined(__CRAYXE) || defined(__CRAYXC) + require += info_cray[argc]; +#endif + require += info_language_dialect_default[argc]; + (void)argv; + return require; +} +#endif diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CompilerIdC/a.out b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CompilerIdC/a.out new file mode 100755 index 0000000000..11b7df452a Binary files /dev/null and b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CompilerIdC/a.out differ diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CompilerIdCXX/CMakeCXXCompilerId.cpp b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CompilerIdCXX/CMakeCXXCompilerId.cpp new file mode 100644 index 0000000000..4761ea2b8f --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CompilerIdCXX/CMakeCXXCompilerId.cpp @@ -0,0 +1,644 @@ +/* This source file must have a .cpp extension so that all C++ compilers + recognize the extension without flags. Borland does not know .cxx for + example. */ +#ifndef __cplusplus +# error "A C compiler has been selected for C++." +#endif + + +/* Version number components: V=Version, R=Revision, P=Patch + Version date components: YYYY=Year, MM=Month, DD=Day */ + +#if defined(__COMO__) +# define COMPILER_ID "Comeau" + /* __COMO_VERSION__ = VRR */ +# define COMPILER_VERSION_MAJOR DEC(__COMO_VERSION__ / 100) +# define COMPILER_VERSION_MINOR DEC(__COMO_VERSION__ % 100) + +#elif defined(__INTEL_COMPILER) || defined(__ICC) +# define COMPILER_ID "Intel" +# if defined(_MSC_VER) +# define SIMULATE_ID "MSVC" +# endif +# if defined(__GNUC__) +# define SIMULATE_ID "GNU" +# endif + /* __INTEL_COMPILER = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__INTEL_COMPILER/100) +# define COMPILER_VERSION_MINOR DEC(__INTEL_COMPILER/10 % 10) +# if defined(__INTEL_COMPILER_UPDATE) +# define COMPILER_VERSION_PATCH DEC(__INTEL_COMPILER_UPDATE) +# else +# define COMPILER_VERSION_PATCH DEC(__INTEL_COMPILER % 10) +# endif +# if defined(__INTEL_COMPILER_BUILD_DATE) + /* __INTEL_COMPILER_BUILD_DATE = YYYYMMDD */ +# define COMPILER_VERSION_TWEAK DEC(__INTEL_COMPILER_BUILD_DATE) +# endif +# if defined(_MSC_VER) + /* _MSC_VER = VVRR */ +# define SIMULATE_VERSION_MAJOR DEC(_MSC_VER / 100) +# define SIMULATE_VERSION_MINOR DEC(_MSC_VER % 100) +# endif +# if defined(__GNUC__) +# define SIMULATE_VERSION_MAJOR DEC(__GNUC__) +# elif defined(__GNUG__) +# define SIMULATE_VERSION_MAJOR DEC(__GNUG__) +# endif +# if defined(__GNUC_MINOR__) +# define SIMULATE_VERSION_MINOR DEC(__GNUC_MINOR__) +# endif +# if defined(__GNUC_PATCHLEVEL__) +# define SIMULATE_VERSION_PATCH DEC(__GNUC_PATCHLEVEL__) +# endif + +#elif defined(__PATHCC__) +# define COMPILER_ID "PathScale" +# define COMPILER_VERSION_MAJOR DEC(__PATHCC__) +# define COMPILER_VERSION_MINOR DEC(__PATHCC_MINOR__) +# if defined(__PATHCC_PATCHLEVEL__) +# define COMPILER_VERSION_PATCH DEC(__PATHCC_PATCHLEVEL__) +# endif + +#elif defined(__BORLANDC__) && defined(__CODEGEARC_VERSION__) +# define COMPILER_ID "Embarcadero" +# define COMPILER_VERSION_MAJOR HEX(__CODEGEARC_VERSION__>>24 & 0x00FF) +# define COMPILER_VERSION_MINOR HEX(__CODEGEARC_VERSION__>>16 & 0x00FF) +# define COMPILER_VERSION_PATCH DEC(__CODEGEARC_VERSION__ & 0xFFFF) + +#elif defined(__BORLANDC__) +# define COMPILER_ID "Borland" + /* __BORLANDC__ = 0xVRR */ +# define COMPILER_VERSION_MAJOR HEX(__BORLANDC__>>8) +# define COMPILER_VERSION_MINOR HEX(__BORLANDC__ & 0xFF) + +#elif defined(__WATCOMC__) && __WATCOMC__ < 1200 +# define COMPILER_ID "Watcom" + /* __WATCOMC__ = VVRR */ +# define COMPILER_VERSION_MAJOR DEC(__WATCOMC__ / 100) +# define COMPILER_VERSION_MINOR DEC((__WATCOMC__ / 10) % 10) +# if (__WATCOMC__ % 10) > 0 +# define COMPILER_VERSION_PATCH DEC(__WATCOMC__ % 10) +# endif + +#elif defined(__WATCOMC__) +# define COMPILER_ID "OpenWatcom" + /* __WATCOMC__ = VVRP + 1100 */ +# define COMPILER_VERSION_MAJOR DEC((__WATCOMC__ - 1100) / 100) +# define COMPILER_VERSION_MINOR DEC((__WATCOMC__ / 10) % 10) +# if (__WATCOMC__ % 10) > 0 +# define COMPILER_VERSION_PATCH DEC(__WATCOMC__ % 10) +# endif + +#elif defined(__SUNPRO_CC) +# define COMPILER_ID "SunPro" +# if __SUNPRO_CC >= 0x5100 + /* __SUNPRO_CC = 0xVRRP */ +# define COMPILER_VERSION_MAJOR HEX(__SUNPRO_CC>>12) +# define COMPILER_VERSION_MINOR HEX(__SUNPRO_CC>>4 & 0xFF) +# define COMPILER_VERSION_PATCH HEX(__SUNPRO_CC & 0xF) +# else + /* __SUNPRO_CC = 0xVRP */ +# define COMPILER_VERSION_MAJOR HEX(__SUNPRO_CC>>8) +# define COMPILER_VERSION_MINOR HEX(__SUNPRO_CC>>4 & 0xF) +# define COMPILER_VERSION_PATCH HEX(__SUNPRO_CC & 0xF) +# endif + +#elif defined(__HP_aCC) +# define COMPILER_ID "HP" + /* __HP_aCC = VVRRPP */ +# define COMPILER_VERSION_MAJOR DEC(__HP_aCC/10000) +# define COMPILER_VERSION_MINOR DEC(__HP_aCC/100 % 100) +# define COMPILER_VERSION_PATCH DEC(__HP_aCC % 100) + +#elif defined(__DECCXX) +# define COMPILER_ID "Compaq" + /* __DECCXX_VER = VVRRTPPPP */ +# define COMPILER_VERSION_MAJOR DEC(__DECCXX_VER/10000000) +# define COMPILER_VERSION_MINOR DEC(__DECCXX_VER/100000 % 100) +# define COMPILER_VERSION_PATCH DEC(__DECCXX_VER % 10000) + +#elif defined(__IBMCPP__) && defined(__COMPILER_VER__) +# define COMPILER_ID "zOS" + /* __IBMCPP__ = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__IBMCPP__/100) +# define COMPILER_VERSION_MINOR DEC(__IBMCPP__/10 % 10) +# define COMPILER_VERSION_PATCH DEC(__IBMCPP__ % 10) + +#elif defined(__ibmxl__) && defined(__clang__) +# define COMPILER_ID "XLClang" +# define COMPILER_VERSION_MAJOR DEC(__ibmxl_version__) +# define COMPILER_VERSION_MINOR DEC(__ibmxl_release__) +# define COMPILER_VERSION_PATCH DEC(__ibmxl_modification__) +# define COMPILER_VERSION_TWEAK DEC(__ibmxl_ptf_fix_level__) + + +#elif defined(__IBMCPP__) && !defined(__COMPILER_VER__) && __IBMCPP__ >= 800 +# define COMPILER_ID "XL" + /* __IBMCPP__ = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__IBMCPP__/100) +# define COMPILER_VERSION_MINOR DEC(__IBMCPP__/10 % 10) +# define COMPILER_VERSION_PATCH DEC(__IBMCPP__ % 10) + +#elif defined(__IBMCPP__) && !defined(__COMPILER_VER__) && __IBMCPP__ < 800 +# define COMPILER_ID "VisualAge" + /* __IBMCPP__ = VRP */ +# define COMPILER_VERSION_MAJOR DEC(__IBMCPP__/100) +# define COMPILER_VERSION_MINOR DEC(__IBMCPP__/10 % 10) +# define COMPILER_VERSION_PATCH DEC(__IBMCPP__ % 10) + +#elif defined(__PGI) +# define COMPILER_ID "PGI" +# define COMPILER_VERSION_MAJOR DEC(__PGIC__) +# define COMPILER_VERSION_MINOR DEC(__PGIC_MINOR__) +# if defined(__PGIC_PATCHLEVEL__) +# define COMPILER_VERSION_PATCH DEC(__PGIC_PATCHLEVEL__) +# endif + +#elif defined(_CRAYC) +# define COMPILER_ID "Cray" +# define COMPILER_VERSION_MAJOR DEC(_RELEASE_MAJOR) +# define COMPILER_VERSION_MINOR DEC(_RELEASE_MINOR) + +#elif defined(__TI_COMPILER_VERSION__) +# define COMPILER_ID "TI" + /* __TI_COMPILER_VERSION__ = VVVRRRPPP */ +# define COMPILER_VERSION_MAJOR DEC(__TI_COMPILER_VERSION__/1000000) +# define COMPILER_VERSION_MINOR DEC(__TI_COMPILER_VERSION__/1000 % 1000) +# define COMPILER_VERSION_PATCH DEC(__TI_COMPILER_VERSION__ % 1000) + +#elif defined(__FUJITSU) || defined(__FCC_VERSION) || defined(__fcc_version) +# define COMPILER_ID "Fujitsu" + +#elif defined(__ghs__) +# define COMPILER_ID "GHS" +/* __GHS_VERSION_NUMBER = VVVVRP */ +# ifdef __GHS_VERSION_NUMBER +# define COMPILER_VERSION_MAJOR DEC(__GHS_VERSION_NUMBER / 100) +# define COMPILER_VERSION_MINOR DEC(__GHS_VERSION_NUMBER / 10 % 10) +# define COMPILER_VERSION_PATCH DEC(__GHS_VERSION_NUMBER % 10) +# endif + +#elif defined(__SCO_VERSION__) +# define COMPILER_ID "SCO" + +#elif defined(__ARMCC_VERSION) && !defined(__clang__) +# define COMPILER_ID "ARMCC" +#if __ARMCC_VERSION >= 1000000 + /* __ARMCC_VERSION = VRRPPPP */ + # define COMPILER_VERSION_MAJOR DEC(__ARMCC_VERSION/1000000) + # define COMPILER_VERSION_MINOR DEC(__ARMCC_VERSION/10000 % 100) + # define COMPILER_VERSION_PATCH DEC(__ARMCC_VERSION % 10000) +#else + /* __ARMCC_VERSION = VRPPPP */ + # define COMPILER_VERSION_MAJOR DEC(__ARMCC_VERSION/100000) + # define COMPILER_VERSION_MINOR DEC(__ARMCC_VERSION/10000 % 10) + # define COMPILER_VERSION_PATCH DEC(__ARMCC_VERSION % 10000) +#endif + + +#elif defined(__clang__) && defined(__apple_build_version__) +# define COMPILER_ID "AppleClang" +# if defined(_MSC_VER) +# define SIMULATE_ID "MSVC" +# endif +# define COMPILER_VERSION_MAJOR DEC(__clang_major__) +# define COMPILER_VERSION_MINOR DEC(__clang_minor__) +# define COMPILER_VERSION_PATCH DEC(__clang_patchlevel__) +# if defined(_MSC_VER) + /* _MSC_VER = VVRR */ +# define SIMULATE_VERSION_MAJOR DEC(_MSC_VER / 100) +# define SIMULATE_VERSION_MINOR DEC(_MSC_VER % 100) +# endif +# define COMPILER_VERSION_TWEAK DEC(__apple_build_version__) + +#elif defined(__clang__) && defined(__ARMCOMPILER_VERSION) +# define COMPILER_ID "ARMClang" + # define COMPILER_VERSION_MAJOR DEC(__ARMCOMPILER_VERSION/1000000) + # define COMPILER_VERSION_MINOR DEC(__ARMCOMPILER_VERSION/10000 % 100) + # define COMPILER_VERSION_PATCH DEC(__ARMCOMPILER_VERSION % 10000) +# define COMPILER_VERSION_INTERNAL DEC(__ARMCOMPILER_VERSION) + +#elif defined(__clang__) +# define COMPILER_ID "Clang" +# if defined(_MSC_VER) +# define SIMULATE_ID "MSVC" +# endif +# define COMPILER_VERSION_MAJOR DEC(__clang_major__) +# define COMPILER_VERSION_MINOR DEC(__clang_minor__) +# define COMPILER_VERSION_PATCH DEC(__clang_patchlevel__) +# if defined(_MSC_VER) + /* _MSC_VER = VVRR */ +# define SIMULATE_VERSION_MAJOR DEC(_MSC_VER / 100) +# define SIMULATE_VERSION_MINOR DEC(_MSC_VER % 100) +# endif + +#elif defined(__GNUC__) || defined(__GNUG__) +# define COMPILER_ID "GNU" +# if defined(__GNUC__) +# define COMPILER_VERSION_MAJOR DEC(__GNUC__) +# else +# define COMPILER_VERSION_MAJOR DEC(__GNUG__) +# endif +# if defined(__GNUC_MINOR__) +# define COMPILER_VERSION_MINOR DEC(__GNUC_MINOR__) +# endif +# if defined(__GNUC_PATCHLEVEL__) +# define COMPILER_VERSION_PATCH DEC(__GNUC_PATCHLEVEL__) +# endif + +#elif defined(_MSC_VER) +# define COMPILER_ID "MSVC" + /* _MSC_VER = VVRR */ +# define COMPILER_VERSION_MAJOR DEC(_MSC_VER / 100) +# define COMPILER_VERSION_MINOR DEC(_MSC_VER % 100) +# if defined(_MSC_FULL_VER) +# if _MSC_VER >= 1400 + /* _MSC_FULL_VER = VVRRPPPPP */ +# define COMPILER_VERSION_PATCH DEC(_MSC_FULL_VER % 100000) +# else + /* _MSC_FULL_VER = VVRRPPPP */ +# define COMPILER_VERSION_PATCH DEC(_MSC_FULL_VER % 10000) +# endif +# endif +# if defined(_MSC_BUILD) +# define COMPILER_VERSION_TWEAK DEC(_MSC_BUILD) +# endif + +#elif defined(__VISUALDSPVERSION__) || defined(__ADSPBLACKFIN__) || defined(__ADSPTS__) || defined(__ADSP21000__) +# define COMPILER_ID "ADSP" +#if defined(__VISUALDSPVERSION__) + /* __VISUALDSPVERSION__ = 0xVVRRPP00 */ +# define COMPILER_VERSION_MAJOR HEX(__VISUALDSPVERSION__>>24) +# define COMPILER_VERSION_MINOR HEX(__VISUALDSPVERSION__>>16 & 0xFF) +# define COMPILER_VERSION_PATCH HEX(__VISUALDSPVERSION__>>8 & 0xFF) +#endif + +#elif defined(__IAR_SYSTEMS_ICC__) || defined(__IAR_SYSTEMS_ICC) +# define COMPILER_ID "IAR" +# if defined(__VER__) && defined(__ICCARM__) +# define COMPILER_VERSION_MAJOR DEC((__VER__) / 1000000) +# define COMPILER_VERSION_MINOR DEC(((__VER__) / 1000) % 1000) +# define COMPILER_VERSION_PATCH DEC((__VER__) % 1000) +# define COMPILER_VERSION_INTERNAL DEC(__IAR_SYSTEMS_ICC__) +# elif defined(__VER__) && (defined(__ICCAVR__) || defined(__ICCRX__) || defined(__ICCRH850__) || defined(__ICCRL78__) || defined(__ICC430__) || defined(__ICCRISCV__)) +# define COMPILER_VERSION_MAJOR DEC((__VER__) / 100) +# define COMPILER_VERSION_MINOR DEC((__VER__) - (((__VER__) / 100)*100)) +# define COMPILER_VERSION_PATCH DEC(__SUBVERSION__) +# define COMPILER_VERSION_INTERNAL DEC(__IAR_SYSTEMS_ICC__) +# endif + + +/* These compilers are either not known or too old to define an + identification macro. Try to identify the platform and guess that + it is the native compiler. */ +#elif defined(__hpux) || defined(__hpua) +# define COMPILER_ID "HP" + +#else /* unknown compiler */ +# define COMPILER_ID "" +#endif + +/* Construct the string literal in pieces to prevent the source from + getting matched. Store it in a pointer rather than an array + because some compilers will just produce instructions to fill the + array rather than assigning a pointer to a static array. */ +char const* info_compiler = "INFO" ":" "compiler[" COMPILER_ID "]"; +#ifdef SIMULATE_ID +char const* info_simulate = "INFO" ":" "simulate[" SIMULATE_ID "]"; +#endif + +#ifdef __QNXNTO__ +char const* qnxnto = "INFO" ":" "qnxnto[]"; +#endif + +#if defined(__CRAYXE) || defined(__CRAYXC) +char const *info_cray = "INFO" ":" "compiler_wrapper[CrayPrgEnv]"; +#endif + +#define STRINGIFY_HELPER(X) #X +#define STRINGIFY(X) STRINGIFY_HELPER(X) + +/* Identify known platforms by name. */ +#if defined(__linux) || defined(__linux__) || defined(linux) +# define PLATFORM_ID "Linux" + +#elif defined(__CYGWIN__) +# define PLATFORM_ID "Cygwin" + +#elif defined(__MINGW32__) +# define PLATFORM_ID "MinGW" + +#elif defined(__APPLE__) +# define PLATFORM_ID "Darwin" + +#elif defined(_WIN32) || defined(__WIN32__) || defined(WIN32) +# define PLATFORM_ID "Windows" + +#elif defined(__FreeBSD__) || defined(__FreeBSD) +# define PLATFORM_ID "FreeBSD" + +#elif defined(__NetBSD__) || defined(__NetBSD) +# define PLATFORM_ID "NetBSD" + +#elif defined(__OpenBSD__) || defined(__OPENBSD) +# define PLATFORM_ID "OpenBSD" + +#elif defined(__sun) || defined(sun) +# define PLATFORM_ID "SunOS" + +#elif defined(_AIX) || defined(__AIX) || defined(__AIX__) || defined(__aix) || defined(__aix__) +# define PLATFORM_ID "AIX" + +#elif defined(__hpux) || defined(__hpux__) +# define PLATFORM_ID "HP-UX" + +#elif defined(__HAIKU__) +# define PLATFORM_ID "Haiku" + +#elif defined(__BeOS) || defined(__BEOS__) || defined(_BEOS) +# define PLATFORM_ID "BeOS" + +#elif defined(__QNX__) || defined(__QNXNTO__) +# define PLATFORM_ID "QNX" + +#elif defined(__tru64) || defined(_tru64) || defined(__TRU64__) +# define PLATFORM_ID "Tru64" + +#elif defined(__riscos) || defined(__riscos__) +# define PLATFORM_ID "RISCos" + +#elif defined(__sinix) || defined(__sinix__) || defined(__SINIX__) +# define PLATFORM_ID "SINIX" + +#elif defined(__UNIX_SV__) +# define PLATFORM_ID "UNIX_SV" + +#elif defined(__bsdos__) +# define PLATFORM_ID "BSDOS" + +#elif defined(_MPRAS) || defined(MPRAS) +# define PLATFORM_ID "MP-RAS" + +#elif defined(__osf) || defined(__osf__) +# define PLATFORM_ID "OSF1" + +#elif defined(_SCO_SV) || defined(SCO_SV) || defined(sco_sv) +# define PLATFORM_ID "SCO_SV" + +#elif defined(__ultrix) || defined(__ultrix__) || defined(_ULTRIX) +# define PLATFORM_ID "ULTRIX" + +#elif defined(__XENIX__) || defined(_XENIX) || defined(XENIX) +# define PLATFORM_ID "Xenix" + +#elif defined(__WATCOMC__) +# if defined(__LINUX__) +# define PLATFORM_ID "Linux" + +# elif defined(__DOS__) +# define PLATFORM_ID "DOS" + +# elif defined(__OS2__) +# define PLATFORM_ID "OS2" + +# elif defined(__WINDOWS__) +# define PLATFORM_ID "Windows3x" + +# else /* unknown platform */ +# define PLATFORM_ID +# endif + +#elif defined(__INTEGRITY) +# if defined(INT_178B) +# define PLATFORM_ID "Integrity178" + +# else /* regular Integrity */ +# define PLATFORM_ID "Integrity" +# endif + +#else /* unknown platform */ +# define PLATFORM_ID + +#endif + +/* For windows compilers MSVC and Intel we can determine + the architecture of the compiler being used. This is because + the compilers do not have flags that can change the architecture, + but rather depend on which compiler is being used +*/ +#if defined(_WIN32) && defined(_MSC_VER) +# if defined(_M_IA64) +# define ARCHITECTURE_ID "IA64" + +# elif defined(_M_X64) || defined(_M_AMD64) +# define ARCHITECTURE_ID "x64" + +# elif defined(_M_IX86) +# define ARCHITECTURE_ID "X86" + +# elif defined(_M_ARM64) +# define ARCHITECTURE_ID "ARM64" + +# elif defined(_M_ARM) +# if _M_ARM == 4 +# define ARCHITECTURE_ID "ARMV4I" +# elif _M_ARM == 5 +# define ARCHITECTURE_ID "ARMV5I" +# else +# define ARCHITECTURE_ID "ARMV" STRINGIFY(_M_ARM) +# endif + +# elif defined(_M_MIPS) +# define ARCHITECTURE_ID "MIPS" + +# elif defined(_M_SH) +# define ARCHITECTURE_ID "SHx" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif + +#elif defined(__WATCOMC__) +# if defined(_M_I86) +# define ARCHITECTURE_ID "I86" + +# elif defined(_M_IX86) +# define ARCHITECTURE_ID "X86" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif + +#elif defined(__IAR_SYSTEMS_ICC__) || defined(__IAR_SYSTEMS_ICC) +# if defined(__ICCARM__) +# define ARCHITECTURE_ID "ARM" + +# elif defined(__ICCRX__) +# define ARCHITECTURE_ID "RX" + +# elif defined(__ICCRH850__) +# define ARCHITECTURE_ID "RH850" + +# elif defined(__ICCRL78__) +# define ARCHITECTURE_ID "RL78" + +# elif defined(__ICCRISCV__) +# define ARCHITECTURE_ID "RISCV" + +# elif defined(__ICCAVR__) +# define ARCHITECTURE_ID "AVR" + +# elif defined(__ICC430__) +# define ARCHITECTURE_ID "MSP430" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif + +#elif defined(__ghs__) +# if defined(__PPC64__) +# define ARCHITECTURE_ID "PPC64" + +# elif defined(__ppc__) +# define ARCHITECTURE_ID "PPC" + +# elif defined(__ARM__) +# define ARCHITECTURE_ID "ARM" + +# elif defined(__x86_64__) +# define ARCHITECTURE_ID "x64" + +# elif defined(__i386__) +# define ARCHITECTURE_ID "X86" + +# else /* unknown architecture */ +# define ARCHITECTURE_ID "" +# endif +#else +# define ARCHITECTURE_ID +#endif + +/* Convert integer to decimal digit literals. */ +#define DEC(n) \ + ('0' + (((n) / 10000000)%10)), \ + ('0' + (((n) / 1000000)%10)), \ + ('0' + (((n) / 100000)%10)), \ + ('0' + (((n) / 10000)%10)), \ + ('0' + (((n) / 1000)%10)), \ + ('0' + (((n) / 100)%10)), \ + ('0' + (((n) / 10)%10)), \ + ('0' + ((n) % 10)) + +/* Convert integer to hex digit literals. */ +#define HEX(n) \ + ('0' + ((n)>>28 & 0xF)), \ + ('0' + ((n)>>24 & 0xF)), \ + ('0' + ((n)>>20 & 0xF)), \ + ('0' + ((n)>>16 & 0xF)), \ + ('0' + ((n)>>12 & 0xF)), \ + ('0' + ((n)>>8 & 0xF)), \ + ('0' + ((n)>>4 & 0xF)), \ + ('0' + ((n) & 0xF)) + +/* Construct a string literal encoding the version number components. */ +#ifdef COMPILER_VERSION_MAJOR +char const info_version[] = { + 'I', 'N', 'F', 'O', ':', + 'c','o','m','p','i','l','e','r','_','v','e','r','s','i','o','n','[', + COMPILER_VERSION_MAJOR, +# ifdef COMPILER_VERSION_MINOR + '.', COMPILER_VERSION_MINOR, +# ifdef COMPILER_VERSION_PATCH + '.', COMPILER_VERSION_PATCH, +# ifdef COMPILER_VERSION_TWEAK + '.', COMPILER_VERSION_TWEAK, +# endif +# endif +# endif + ']','\0'}; +#endif + +/* Construct a string literal encoding the internal version number. */ +#ifdef COMPILER_VERSION_INTERNAL +char const info_version_internal[] = { + 'I', 'N', 'F', 'O', ':', + 'c','o','m','p','i','l','e','r','_','v','e','r','s','i','o','n','_', + 'i','n','t','e','r','n','a','l','[', + COMPILER_VERSION_INTERNAL,']','\0'}; +#endif + +/* Construct a string literal encoding the version number components. */ +#ifdef SIMULATE_VERSION_MAJOR +char const info_simulate_version[] = { + 'I', 'N', 'F', 'O', ':', + 's','i','m','u','l','a','t','e','_','v','e','r','s','i','o','n','[', + SIMULATE_VERSION_MAJOR, +# ifdef SIMULATE_VERSION_MINOR + '.', SIMULATE_VERSION_MINOR, +# ifdef SIMULATE_VERSION_PATCH + '.', SIMULATE_VERSION_PATCH, +# ifdef SIMULATE_VERSION_TWEAK + '.', SIMULATE_VERSION_TWEAK, +# endif +# endif +# endif + ']','\0'}; +#endif + +/* Construct the string literal in pieces to prevent the source from + getting matched. Store it in a pointer rather than an array + because some compilers will just produce instructions to fill the + array rather than assigning a pointer to a static array. */ +char const* info_platform = "INFO" ":" "platform[" PLATFORM_ID "]"; +char const* info_arch = "INFO" ":" "arch[" ARCHITECTURE_ID "]"; + + + + +#if defined(_MSC_VER) && defined(_MSVC_LANG) +#define CXX_STD _MSVC_LANG +#else +#define CXX_STD __cplusplus +#endif + +const char* info_language_dialect_default = "INFO" ":" "dialect_default[" +#if CXX_STD > 201703L + "20" +#elif CXX_STD >= 201703L + "17" +#elif CXX_STD >= 201402L + "14" +#elif CXX_STD >= 201103L + "11" +#else + "98" +#endif +"]"; + +/*--------------------------------------------------------------------------*/ + +int main(int argc, char* argv[]) +{ + int require = 0; + require += info_compiler[argc]; + require += info_platform[argc]; +#ifdef COMPILER_VERSION_MAJOR + require += info_version[argc]; +#endif +#ifdef COMPILER_VERSION_INTERNAL + require += info_version_internal[argc]; +#endif +#ifdef SIMULATE_ID + require += info_simulate[argc]; +#endif +#ifdef SIMULATE_VERSION_MAJOR + require += info_simulate_version[argc]; +#endif +#if defined(__CRAYXE) || defined(__CRAYXC) + require += info_cray[argc]; +#endif + require += info_language_dialect_default[argc]; + (void)argv; + return require; +} diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CompilerIdCXX/a.out b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CompilerIdCXX/a.out new file mode 100755 index 0000000000..71c2ceda86 Binary files /dev/null and b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/3.15.3/CompilerIdCXX/a.out differ diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/clion-environment.txt b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/clion-environment.txt new file mode 100644 index 0000000000..12efafd246 Binary files /dev/null and b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/clion-environment.txt differ diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/clion-log.txt b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/clion-log.txt new file mode 100644 index 0000000000..1bd058cd55 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/clion-log.txt @@ -0,0 +1,16 @@ +/snap/clion/103/bin/cmake/linux/bin/cmake -DCMAKE_BUILD_TYPE=Debug -DBUILD_RASPBERRY=0 -DBUILD_ONLY_BRAIN=0 -DCMAKE_BUILD_TYPE=Release && make -G "CodeBlocks - Unix Makefiles" /home/fuda/Projects/revolve/cpprevolve/revolve/gazebo/IMC +-- Configuring done +CMake Error at CMakeLists.txt:16 (add_executable): + Cannot find source file: + + FeedForwardModel.cpp + + Tried extensions .c .C .c++ .cc .cpp .cxx .cu .m .M .mm .h .hh .h++ .hm + .hpp .hxx .in .txx + + +CMake Error at CMakeLists.txt:16 (add_executable): + No SOURCES given to target: Test_IMC + + +CMake Generate step failed. Build files cannot be regenerated correctly. diff --git a/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/cmake.check_cache b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/cmake.check_cache new file mode 100644 index 0000000000..3dccd73172 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/IMC/cmake-build-debug/CMakeFiles/cmake.check_cache @@ -0,0 +1 @@ +# This file is generated by cmake for dependency checking of the CMakeCache.txt file diff --git a/cpprevolve/revolve/brains/controller/actuators/Actuator.h b/cpprevolve/revolve/brains/controller/actuators/Actuator.h index 21510077cf..a961aa4888 100644 --- a/cpprevolve/revolve/brains/controller/actuators/Actuator.h +++ b/cpprevolve/revolve/brains/controller/actuators/Actuator.h @@ -24,6 +24,14 @@ class Actuator inline double coordinate_y() const { return std::get<1>(this->coordinates); } inline double coordinate_z() const { return std::get<2>(this->coordinates); } + enum StateType { + POSITION, + VELOCITY, + TORQUE + }; + + virtual double Current_State( StateType type ) = 0; + virtual void write(const double *output, double step) = 0; inline unsigned int n_outputs() const {return this->_n_outputs;} diff --git a/cpprevolve/revolve/brains/controller/sensors/AngleToTargetDetector.cpp b/cpprevolve/revolve/brains/controller/sensors/AngleToTargetDetector.cpp new file mode 100644 index 0000000000..cefac09940 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/sensors/AngleToTargetDetector.cpp @@ -0,0 +1,355 @@ +// +// Created by matteo on 2/28/20. +// + +#include "AngleToTargetDetector.h" +#include +#include +#include +#include + +revolve::AngleToTargetDetector::AngleToTargetDetector(const unsigned int shrink_factor, const bool show_image) + : Sensor(1) + , show_image(show_image) + , shrink_factor(shrink_factor) +// , angle(std::atan(img.cols/img.rows) * 180 / M_PI) + , angle(NAN) +{} + +void revolve::AngleToTargetDetector::read(double *input) +{ + input[0] = detect_angle(); +} + +float revolve::AngleToTargetDetector::detect_angle() +{ + get_image(raw_image); + unsigned int image_cols = raw_image.cols/shrink_factor; + unsigned int image_rows = raw_image.rows/shrink_factor; + cv::resize(raw_image, image, cv::Size(image_cols, image_rows)); + + cv::medianBlur(image, image_blur, 5); + cv::cvtColor(image_blur, image_hsv, cv::COLOR_BGR2HSV); + + //green + const int gLowH1=35,gHighH1=40,gLowH2=41,gHighH2=59,gLowS1=140,gLowS2=69,gHighS=255,gLowV=104,gHighV=255; + //blue + const int bLowH=99,bHighH=121,bLowS=120,bHighS=255,bLowV=57,bHighV=211; + + //detecting Blue + cv::inRange(image_hsv, cv::Scalar(bLowH,bLowS, bLowV), cv::Scalar(bHighH,bHighS, bHighV) ,image_blue); + //detecting Green + cv::inRange(image_hsv, cv::Scalar(gLowH1,gLowS1, gLowV), cv::Scalar(gHighH1,gHighS, gHighV),image_green1); + cv::inRange(image_hsv, cv::Scalar(gLowH2,gLowS2, gLowV), cv::Scalar(gHighH2,gHighS, gHighV),image_green2); + cv::add(image_green1, image_green2, image_green); + + std::vector> contours_blue, contours_green;; //contours_red, contours_yellow; + + cv::findContours(image_blue, contours_blue, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_NONE); + cv::findContours(image_green, contours_green, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_NONE); + //cv::findContours(image_red, contours_red, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_NONE); + //cv::findContours(image_yellow, contours_yellow, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_NONE); + + std::vector rect_coord, rect_coord_blue, rect_coord_green; //rect_coord_red, rect_coord_yellow; + + // blue contours + for (const std::vector &contours_blue_line : contours_blue) { + double image_blue_area_buf = cv::contourArea(contours_blue_line); + + if (image_blue_area_buf > 5) { + cv::Rect bounding_rect = cv::boundingRect(contours_blue_line); + rect_coord_blue.emplace_back(bounding_rect); + } + } + + // green contours + for (const std::vector &contours_green_line : contours_green) { + double image_blue_area_buf = cv::contourArea(contours_green_line); + + if (image_blue_area_buf > 5) { + cv::Rect bounding_rect = cv::boundingRect(contours_green_line); + rect_coord_green.emplace_back(bounding_rect); + } + } + + //// red contours + //for (const std::vector &contours_red_line : contours_red) { + // double image_blue_area_buf = cv::contourArea(contours_red_line); + // + // if (image_blue_area_buf > 5) { + // cv::Rect bounding_rect = cv::boundingRect(contours_red_line); + // rect_coord_red.emplace_back(bounding_rect); + // } + //} + // + //// yellow contours + //for (const std::vector &contours_yellow_line : contours_yellow) { + // double image_blue_area_buf = cv::contourArea(contours_yellow_line); + // + // if (image_blue_area_buf > 5) { + // cv::Rect bounding_rect = cv::boundingRect(contours_yellow_line); + // rect_coord_yellow.emplace_back(bounding_rect); + // } + //} + + rect_coord.reserve( rect_coord_blue.size() + rect_coord_green.size() ); // preallocate memory + // + rect_coord_red.size() + rect_coord_yellow.size() + rect_coord.insert( rect_coord.end(), rect_coord_blue.begin(), rect_coord_blue.end() ); + rect_coord.insert( rect_coord.end(), rect_coord_green.begin(), rect_coord_green.end() ); + //rect_coord.insert( rect_coord.end(), rect_coord_red.begin(), rect_coord_red.end() ); + //rect_coord.insert( rect_coord.end(), rect_coord_yellow.begin(), rect_coord_yellow.end() ); + +// ----- MAGIC GONGJIN CODE HERE ---------------------------------------------- + unsigned int num = rect_coord.size(); + int distanceBox[num][num], distanceBoxSum[num], numBox[num], minDistanceBox[num], min2DistanceBox[num],rectBoxHeight = 0, rectBoxHeightMax = 0; + for (int i = 0; i < num; i++) //calculating the suitable(medium) value of height + { + if (rect_coord[i].height > rectBoxHeightMax) + { + rectBoxHeight = rectBoxHeightMax; // set this value as the height of box + rectBoxHeightMax = rect_coord[i].height; + } + else if (rect_coord[i].height > rectBoxHeight) + rectBoxHeight = rect_coord[i].height; + } + + for (int j = 0; j < num; j++) //calculating the value of minimum and the second minimum distance for each box + { + minDistanceBox[j] = 800; + min2DistanceBox[j] = 800; + for (int x = 0; x < num; x++) + { + if (j != x) + { + distanceBox[j][x] = std::min( + std::abs(rect_coord[j].tl().x - rect_coord[x].br().x), + std::abs(rect_coord[j].br().x - rect_coord[x].tl().x) + ); + + if (distanceBox[j][x] < minDistanceBox[j]) + { + min2DistanceBox[j] = minDistanceBox[j]; //the second minimum distance + minDistanceBox[j] = distanceBox[j][x]; //the minimun distance + } + else if (distanceBox[j][x] < min2DistanceBox[j]) + { + min2DistanceBox[j] = distanceBox[j][x]; + } + } + } + distanceBoxSum[j] = minDistanceBox[j] + min2DistanceBox[j]; + } + + for (int i =0; i < num; i++) //sequence from minimum distance to maximum distance + { + numBox[i] = 0; + for (int j=0; j < num; j++) + { + if (i != j) // get the Box[i] sequence + { + if (distanceBoxSum[i] > distanceBoxSum[j]) + numBox[i]+=1; //numBox[i] = numBox[i] +1, save the number + if (distanceBoxSum[i] == distanceBoxSum[j]) + { + if (minDistanceBox[i] >= minDistanceBox[j]) //always have the same distance between two points each other + numBox[i]+=1; // + } + } + } + } + //-------------difine the ROIs of robot------------ + int lastnum = num, robNum, minRectCoorX[num], minRectCoorY[num], maxRectCoorX[num], maxRectCoorY[num]; + for (robNum = 0; lastnum >= 2 && robNum < num; robNum++) + { + int minNumBox=100; + for (int k = 0; k 2) //when robot only have 2 boxes at least, just combine the two boxes + numBox[i] = 100; //make it not included in the rest + minRectCoorX[robNum] = rect_coord[i].tl().x; + minRectCoorY[robNum] = rect_coord[i].tl().y; + maxRectCoorX[robNum] = rect_coord[i].br().x; + maxRectCoorY[robNum] = rect_coord[i].br().y; + int bufnum = 0, jBox[50] = {0}; + for (int j = 0; j < num; j++) //calculating the coordination of rectangle incluing boxes belong to the distance area + { + //-------------the first threshold condition------------------- + if (j != i && numBox[j] != 100 && distanceBox[i][j] < 4.3 * rectBoxHeight) //3.4, 3.5, 4.5, 4.3 justify if the box belong to the same robot by distance of boxeswith the center box + { + jBox[bufnum] = j; + lastnum --; + bufnum ++; //the number of boxes that match the threshold of (distanceBox[i][j] < 3.4 * rectBoxHeight) + } + //----calculating the max distance between boxes after the first threshold condition, preparing for next-------- + if (j == num - 1 && bufnum >= 1) //bufnum >= 1 (it have two candidate at least) + { + int maxBoxDisOut[num], max_in_out[num][num],maxBoxDisOutNum[num]; + for (int buf = 0; buf < bufnum; buf++) //calculating the max distance between boxes in jBox[bufnum] + { + maxBoxDisOut[jBox[buf]] = 0; + int rectCoor_tl_br, rectCoor_br_tl; + if (bufnum == 1) // one other box and one center box + { + rectCoor_tl_br = std::abs(rect_coord[i].tl().x - rect_coord[jBox[0]].br().x); //calculating the inside or outside distance between the same boxes + rectCoor_br_tl = std::abs(rect_coord[i].br().x - rect_coord[jBox[0]].tl().x); //calculating the inside or outside distance between the same boxes + maxBoxDisOut[jBox[0]] = std::min(rectCoor_tl_br,rectCoor_br_tl); //max, min + } + else + { + for (int buff = 0; buff < bufnum; buff++) + { + rectCoor_tl_br = std::abs(rect_coord[jBox[buf]].tl().x - rect_coord[jBox[buff]].br().x); //calculating the inside or outside distance between the same boxes + rectCoor_br_tl = std::abs(rect_coord[jBox[buf]].br().x - rect_coord[jBox[buff]].tl().x); //calculating the inside or outside distance between the same boxes + max_in_out[jBox[buf]][jBox[buff]] = std::min(rectCoor_tl_br,rectCoor_br_tl); //max,min + if (max_in_out[jBox[buf]][jBox[buff]] > maxBoxDisOut[jBox[buf]]) + { + maxBoxDisOut[jBox[buf]] = max_in_out[jBox[buf]][jBox[buff]]; + maxBoxDisOutNum[buf] = jBox[buff]; + } + } + } + } + //bufnum >1 guarantte the robot have center box and two other box (bufnum=2) at least, or not go to compare center box and another one box + if (bufnum >= 2) + { + int delNum = 0; + for (int bufff = 0; bufff < bufnum; bufff++) //compare the max distance (robot size from left to right) of boxes in jBox[bufnum] + { + if (maxBoxDisOut[jBox[bufff]] < 6.2 * rectBoxHeight) //if > the length of robot, delete far one, get the near one as rectangle + { + minRectCoorX[robNum] = std::min(rect_coord[jBox[bufff]].tl().x, minRectCoorX[robNum]); + minRectCoorY[robNum] = std::min(rect_coord[jBox[bufff]].tl().y, minRectCoorY[robNum]); + maxRectCoorX[robNum] = std::max(rect_coord[jBox[bufff]].br().x, maxRectCoorX[robNum]); + maxRectCoorY[robNum] = std::max(rect_coord[jBox[bufff]].br().y, maxRectCoorY[robNum]); + numBox[jBox[bufff]] = 100; //set a constant not zero and more than all of the numBox + } + //TODO this else if is doing exactly the same code as above, remove it + else if (distanceBox[i][jBox[bufff]] < distanceBox[i][maxBoxDisOutNum[bufff]]) //always have two boxes match this condition at the same time, choice one of them + { + minRectCoorX[robNum] = std::min(rect_coord[jBox[bufff]].tl().x, minRectCoorX[robNum]); + minRectCoorY[robNum] = std::min(rect_coord[jBox[bufff]].tl().y, minRectCoorY[robNum]); + maxRectCoorX[robNum] = std::max(rect_coord[jBox[bufff]].br().x, maxRectCoorX[robNum]); + maxRectCoorY[robNum] = std::max(rect_coord[jBox[bufff]].br().y, maxRectCoorY[robNum]); + numBox[jBox[bufff]] = 100; //set a constant not zero and more than all of the numBox + } + else + { + minRectCoorX[robNum] = std::min(rect_coord[maxBoxDisOutNum[bufff]].tl().x, minRectCoorX[robNum]); + minRectCoorY[robNum] = std::min(rect_coord[maxBoxDisOutNum[bufff]].tl().y, minRectCoorY[robNum]); + maxRectCoorX[robNum] = std::max(rect_coord[maxBoxDisOutNum[bufff]].br().x, maxRectCoorX[robNum]); + maxRectCoorY[robNum] = std::max(rect_coord[maxBoxDisOutNum[bufff]].br().y, maxRectCoorY[robNum]); + numBox[maxBoxDisOutNum[bufff]] = 100; + delNum ++; + } + } + lastnum = lastnum + delNum; //plus for the cancelled more one + bufnum = bufnum - delNum; + } + else //compare center box and another one box, when bufnum = 1 + { + if (maxBoxDisOut[jBox[0]] < 6.2 * rectBoxHeight) //the length of robot 9.4 + { + minRectCoorX[robNum] = std::min(rect_coord[jBox[0]].tl().x, minRectCoorX[robNum]); + minRectCoorY[robNum] = std::min(rect_coord[jBox[0]].tl().y, minRectCoorY[robNum]); + maxRectCoorX[robNum] = std::max(rect_coord[jBox[0]].br().x, maxRectCoorX[robNum]); + maxRectCoorY[robNum] = std::max(rect_coord[jBox[0]].br().y, maxRectCoorY[robNum]); + numBox[jBox[0]] = 100; //set a constant not zero and more than all of the numBox + } + else //just one center to rest + { + robNum --; + } + } + } + } + } + } + } + + // calculate the angle + if (std::isnan(angle) and robNum == 0) + { + // init first angle + angle = atan(image.cols / static_cast(image.rows)) * 180.0 / M_PI; + } + else + { + for (int i = 0; i < robNum; i++) + { + const int robCenterCoorX = 2*(minRectCoorX[i] + maxRectCoorX[i]); + const int robCenterCoorY = 2*(minRectCoorY[i] + maxRectCoorY[i]); + char textRobCenterCoor[64], textDistance[64]; + + if (show_image) { + cv::rectangle(raw_image, cv::Point(shrink_factor*minRectCoorX[i],shrink_factor*minRectCoorY[i]), cv::Point(shrink_factor*maxRectCoorX[i],shrink_factor*maxRectCoorY[i]), cv::Scalar(0,255,0),1); + cv::circle(raw_image, cv::Point(robCenterCoorX,robCenterCoorY),3, cv::Scalar(0,255,0),4); + + std::snprintf(textRobCenterCoor, sizeof(textRobCenterCoor), "(%d,%d)", robCenterCoorX, robCenterCoorY); + cv::putText(raw_image, textRobCenterCoor, cv::Point(robCenterCoorX + 10, robCenterCoorY + 3), + cv::FONT_HERSHEY_DUPLEX, 0.4, cv::Scalar(0, 255, 0), 1); + } + + const int leftLine = raw_image.cols / 2; + const int rightLine = raw_image.cols / 2; + if (robCenterCoorX < leftLine) + { + double distance = robCenterCoorX - leftLine; + angle = std::atan(distance/robCenterCoorY) * 180.0 / M_PI; + if (show_image) { + std::snprintf(textDistance, sizeof(textDistance), "L:%f Angle: %f", distance, angle); + cv::putText(raw_image, textDistance, cv::Point(0.0 * raw_image.cols, 15), cv::FONT_HERSHEY_DUPLEX, 0.5, + cv::Scalar(0, 255, 0), 1); + } + } + + if (robCenterCoorX > rightLine) + { + double distance = robCenterCoorX - rightLine; + angle = std::atan(distance/robCenterCoorY) * 180.0 / M_PI; + if (show_image) { + std::snprintf(textDistance, sizeof(textDistance), "R:%f Angle: %f", distance, angle); + cv::putText(raw_image, textDistance, cv::Point(0.5 * raw_image.cols, 15), cv::FONT_HERSHEY_DUPLEX, 0.5, + cv::Scalar(0, 255, 0), 1); + } + } + + if (show_image) { + cv::line(raw_image, cv::Point(shrink_factor * minRectCoorX[i], shrink_factor * minRectCoorY[i]), + cv::Point(shrink_factor * maxRectCoorX[i], shrink_factor * maxRectCoorY[i]), cv::Scalar(0, 255, 0), 1); + cv::line(raw_image, cv::Point(shrink_factor * minRectCoorX[i], shrink_factor * maxRectCoorY[i]), + cv::Point(shrink_factor * maxRectCoorX[i], shrink_factor * minRectCoorY[i]), cv::Scalar(0, 255, 0), 1); + cv::line(raw_image, cv::Point(leftLine, 0), cv::Point(leftLine, raw_image.rows), cv::Scalar(0, 255, 0), 1); + cv::line(raw_image, cv::Point(rightLine, 0), cv::Point(rightLine, raw_image.rows), cv::Scalar(0, 255, 0), 1); + } + } + } + + + if (robNum == 0 and show_image) // no robots in the field of view + { + // show image if no robot is detected + char textDistance[64]; + float text_pos; + if (angle < 0) text_pos = 0.0; + else text_pos = 0.5; + std::snprintf(textDistance, sizeof(textDistance), "Angle: %f", angle); + std::snprintf(textDistance, sizeof(textDistance), "Angle: %f", angle); + cv::putText(raw_image, textDistance, cv::Point(text_pos * raw_image.cols, 15), cv::FONT_HERSHEY_DUPLEX, 0.5, + cv::Scalar(255, 0, 0), 1); + } + + assert(not std::isnan(angle)); + + if (show_image) { + cv::imshow("revolve-controller", raw_image); + cv::waitKey(5); + } + return angle; +} diff --git a/cpprevolve/revolve/brains/controller/sensors/AngleToTargetDetector.h b/cpprevolve/revolve/brains/controller/sensors/AngleToTargetDetector.h new file mode 100644 index 0000000000..4ac5016db9 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/sensors/AngleToTargetDetector.h @@ -0,0 +1,35 @@ +// +// Created by matteo on 2/28/20. +// + +#ifndef REVOLVE_ANGLETOTARGETDETECTOR_H +#define REVOLVE_ANGLETOTARGETDETECTOR_H + +#include "Sensor.h" +#include + +namespace revolve { + +class AngleToTargetDetector : public Sensor { +public: + explicit AngleToTargetDetector(unsigned int shrink_factor = 4, bool show_image = false); + virtual ~AngleToTargetDetector() = default; + + void read(double *input) override; + virtual float detect_angle(); + +private: + virtual void get_image(cv::Mat &image) = 0; + +protected: + const bool show_image; + const unsigned int shrink_factor; + double angle; + cv::Mat raw_image, image; + cv::Mat image_blur, image_hsv, image_blue, image_green1, image_green2, image_green; +}; + +} + + +#endif //REVOLVE_ANGLETOTARGETDETECTOR_H diff --git a/cpprevolve/revolve/brains/controller/sensors/AngleToTargetSensor.h b/cpprevolve/revolve/brains/controller/sensors/AngleToTargetSensor.h new file mode 100644 index 0000000000..5059c6eac0 --- /dev/null +++ b/cpprevolve/revolve/brains/controller/sensors/AngleToTargetSensor.h @@ -0,0 +1,29 @@ +// +// Created by Matteo De Carlo on 25/02/2020. +// + +#ifndef REVOLVE_ANGLETOTARGETSENSOR_H +#define REVOLVE_ANGLETOTARGETSENSOR_H + +#include "Sensor.h" + +namespace revolve +{ + +class AngleToTargetSensor : public Sensor { +public: + explicit AngleToTargetSensor() + : Sensor(1) + {} + + virtual double angle_to_target() = 0; + + void read(double *input) override + { + *input = angle_to_target(); + } +}; + +} + +#endif //REVOLVE_ANGLETOTARGETSENSOR_H diff --git a/cpprevolve/revolve/brains/controller/sensors/Sensor.h b/cpprevolve/revolve/brains/controller/sensors/Sensor.h index 3f293da27f..f2678ac3e1 100644 --- a/cpprevolve/revolve/brains/controller/sensors/Sensor.h +++ b/cpprevolve/revolve/brains/controller/sensors/Sensor.h @@ -14,6 +14,8 @@ class Sensor : _n_inputs(n_inputs) {} + virtual ~Sensor() = default; + /// \brief Read the value of the sensor into the /// \param[in] _input: array. /// \brief[in,out] _input Input value to write on diff --git a/cpprevolve/revolve/brains/learner/BayesianOptimizer.cpp b/cpprevolve/revolve/brains/learner/BayesianOptimizer.cpp index a0615ea638..894727b04f 100644 --- a/cpprevolve/revolve/brains/learner/BayesianOptimizer.cpp +++ b/cpprevolve/revolve/brains/learner/BayesianOptimizer.cpp @@ -2,4 +2,439 @@ // Created by matteo on 14/06/19. // +#include +#include +#include +#include +#include +#include #include "BayesianOptimizer.h" +#include "BoDefinitions.h" +#include "../controller/DifferentialCPG.h" +#include "../controller/Controller.h" + +using namespace revolve; + +// Copied from the limbo tutorial the BO implementation is based on +using Mean_t = limbo::mean::Data; +using Init_t = limbo::init::FlexibleLHS; +using Kernel_t = limbo::kernel::MaternFiveHalves; +using GP_t = limbo::model::GP; + +const static Eigen::IOFormat CSVFormat(11, Eigen::DontAlignCols, ", ", ","); +BayesianOptimizer::BayesianOptimizer( + std::unique_ptr controller, + Evaluator *evaluator, + EvaluationReporter *reporter, + const double evaluation_time, + const unsigned int n_learning_evaluations, + const std::string& model_name) + : Learner(evaluator, reporter, evaluation_time, n_learning_evaluations) + , _controller(std::move(controller)) + , n_init_samples(50) + //, init_method("LHS") + , kernel_noise(0.001) + , kernel_optimize_noise("false") + , kernel_sigma_sq(1.0) + , kernel_l(0.2) + , kernel_squared_exp_ard_k(3) + , acqui_gpucb_delta(0.1) + , acqui_ucb_alpha(3.0) + , acqui_ei_jitter(0.5) + , acquisition_function("UCB") +{ + assert(this->_controller && "BayesianOptimizer: passed null controller"); + switch (this->_controller->controller_type) + { + case revolve::Controller::DIFFERENTIAL_CPG: + devectorize_controller = [this](Eigen::VectorXd weights) { + // Eigen::vector -> std::vector + std::vector std_weights(weights.size()); + for (size_t j = 0; j < weights.size(); j++) { + std_weights[j] = weights(j); + } + + auto *temp_controller = dynamic_cast<::revolve::DifferentialCPG*>(this->_controller->into_DifferentialCPG()); + temp_controller->set_connection_weights(std_weights); + }; + + vectorize_controller = [this]() { + auto *controller = dynamic_cast<::revolve::DifferentialCPG*>(this->_controller->into_DifferentialCPG()); + const std::vector &weights = controller->get_connection_weights(); + + // std::vector -> Eigen::Vector + Eigen::VectorXd eigen_weights(weights.size()); + for (size_t j = 0; j < weights.size(); j++) { + eigen_weights(j) = weights.at(j); + } + + return eigen_weights; + }; + break; + default: + std::cerr << "[BO] Controller not supported" << std::endl; + throw std::runtime_error("[BO] Controller not supported"); + } + + this->output_dir = "./experiments/IMC/output"+model_name; + + std::ifstream fin(this->output_dir+"/fitnesses.txt"); + std::ifstream gin(this->output_dir+"/genotype.log"); + if(fin){ // Continue Learning/test best + double fitness; + while (fin >> fitness){ + // Limbo requires fitness value to be of type Eigen::VectorXd + Eigen::VectorXd observation = Eigen::VectorXd(1); + observation(0) = fitness; + // Save fitness to std::vector. This fitness corresponds to the solution of the previous iteration + this->observations.push_back(observation); +// std::cout<vectorize_controller().size(); + // Initialize Eigen::VectorXd here. + Eigen::VectorXd init_sample(n_weights); + std::string genome; + while (std::getline(gin, genome)) + { + std::stringstream ss_weight(genome); + std::string weight; + int j =0; + while (std::getline(ss_weight, weight, ',')) + { + init_sample(j) = stod(weight); + j++; + } + // Save the initialized weights + this->samples.push_back(init_sample); + } + + this->evaluation_counter = this->observations.size()-1; + int best_index = 1; + for (int i=0; iobservations.size(); i++){ + if (this->best_fitnessobservations[i][0]){ + this->best_fitness = this->observations[i][0]; + this->best_sample = this->samples[i]; + best_index = i; + } + } + std::cout<<"[BO] Observations: "<observations.size()<<" | Samples: "<samples.size()<observations.clear(); + this->evaluation_counter = -1; + + auto sec_best = this->samples[best_index - 1]; + this->devectorize_controller(sec_best); + this->samples.clear(); + this->samples.push_back(this->best_sample); +// this->samples.push_back(sec_best); + std::cout<<"Retesting sample fitness: "<< this->best_fitness<output_dir+"/fitness_decom.txt", std::ofstream::out | std::ofstream::trunc); + files.close(); + } + } + else{ + std::cout<<"[BO] Create clean fitness/genotype files"<output_dir+"/fitnesses.txt", std::ofstream::out | std::ofstream::trunc); + files.open(this->output_dir+"/genotype.log", std::ofstream::out | std::ofstream::trunc); + files.open("../ctime.txt", std::ofstream::out | std::ofstream::trunc); + files.close(); + } + + this->output_dir = "./experiments/IMC/output"+model_name; + + std::ifstream fin(this->output_dir+"/fitnesses.txt"); + std::ifstream gin(this->output_dir+"/genotype.log"); + if(gin){ // Continue Learning + double fitness; + while (fin >> fitness){ + // Limbo requires fitness value to be of type Eigen::VectorXd + Eigen::VectorXd observation = Eigen::VectorXd(1); + observation(0) = fitness; + // Save fitness to std::vector. This fitness corresponds to the solution of the previous iteration + this->observations.push_back(observation); +// std::cout<vectorize_controller().size(); + // Initialize Eigen::VectorXd here. + Eigen::VectorXd init_sample(n_weights); + std::string genome; + while (std::getline(gin, genome)) + { + std::stringstream ss_weight(genome); + std::string weight; + int j =0; + while (std::getline(ss_weight, weight, ',')) + { + init_sample(j) = stod(weight); + j++; + } + // Save the initialized weights + this->samples.push_back(init_sample); + } + this->evaluation_counter = this->samples.size()-1; + int best_index = 1; + for (int i=0; iobservations.size(),500); i++){ + if (this->best_fitnessobservations[i][0]){ + this->best_fitness = this->observations[i][0]; + this->best_sample = this->samples[i]; + best_index = i; + } + } + std::cout<<"Observations: "<observations.size()<<" | Samples: "<samples.size()<observations.clear(); + this->evaluation_counter = -1; + + this->devectorize_controller(this->best_sample); + this->samples.clear(); + this->samples.push_back(this->best_sample); + } + } + else{ + std::ofstream files; + files.open(this->output_dir+"/fitnesses.txt", std::ofstream::out | std::ofstream::trunc); + files.open(this->output_dir+"/genotype.log", std::ofstream::out | std::ofstream::trunc); + files.open("../ctime.txt", std::ofstream::out | std::ofstream::trunc); + files.close(); + } + +} + +/** + * Struct that holds the parameters on which BO is called. This is required + * by limbo. + */ +struct BayesianOptimizer::params +{ + + struct bayes_opt_boptimizer : public limbo::defaults::bayes_opt_boptimizer + { + }; + + // depending on which internal optimizer we use, we need to import different parameters +#ifdef USE_NLOPT + struct opt_nloptnograd : public limbo::defaults::opt_nloptnograd { + }; +#elif defined(USE_LIBCMAES) + struct opt_cmaes : public lm::defaults::opt_cmaes { + }; +#endif + + struct kernel : public limbo::defaults::kernel + { + BO_PARAM(double, noise, 0.001); + + BO_PARAM(bool, optimize_noise, false); + }; + + struct bayes_opt_bobase : public limbo::defaults::bayes_opt_bobase + { + // set stats_enabled to prevent creating all the directories + BO_PARAM(bool, stats_enabled, false); + + BO_PARAM(bool, bounded, true); + }; + + // 1 Iteration as we will perform limbo step by steop + struct stop_maxiterations : public limbo::defaults::stop_maxiterations + { + BO_PARAM(int, iterations, 1); + }; + + struct kernel_exp : public limbo::defaults::kernel_exp + { + /// @ingroup kernel_defaults + BO_PARAM(double, sigma_sq, 0.1); + + BO_PARAM(double, l, 0.1); // the width of the kernel. Note that it assumes equally sized ranges over dimensions + }; + + struct kernel_squared_exp_ard : public limbo::defaults::kernel_squared_exp_ard + { + /// @ingroup kernel_defaults + BO_PARAM(int, k, 3); // k number of columns used to compute M + /// @ingroup kernel_defaults + BO_PARAM(double, sigma_sq, 0.1); //brochu2010tutorial p.9 without sigma_sq + }; + + struct kernel_maternfivehalves : public limbo::defaults::kernel_maternfivehalves + { + BO_DYN_PARAM(double, sigma_sq); //brochu2010tutorial p.9 without sigma_sq + BO_DYN_PARAM(double, l); //characteristic length scale + }; + + struct acqui_gpucb : public limbo::defaults::acqui_gpucb + { + //UCB(x) = \mu(x) + \kappa \sigma(x). + BO_PARAM(double, delta, + 0.1);//acqui_gpucb_delta_); // default delta = 0.1, delta in (0,1) convergence guaranteed + }; + + struct acqui_ei : public limbo::defaults::acqui_ei + { + BO_PARAM(double, jitter, 0.5); + }; + + // This is just a placeholder to be able to use limbo with revolve + struct init_lhs : public limbo::defaults::init_lhs + { + BO_PARAM(int, samples, 0); + }; + + struct acqui_ucb : public limbo::defaults::acqui_ucb + { + //constexpr double ra = acqui_ucb_alpha_; + //UCB(x) = \mu(x) + \alpha \sigma(x). high alpha have high exploration + //iterations is high, alpha can be low for high accuracy in enough iterations. + // In contrast, the lsow iterations should have high alpha for high + // searching in limited iterations, which guarantee to optimal. + // BO_PARAM(double, alpha, transform_double(acqui_ucb_alpha_)); // default alpha = 0.5 + BO_DYN_PARAM(double, alpha); // default alpha = 0.5 + + }; +}; + +BO_DECLARE_DYN_PARAM(double, BayesianOptimizer::params::acqui_ucb, alpha); +BO_DECLARE_DYN_PARAM(double, BayesianOptimizer::params::kernel_maternfivehalves, sigma_sq); +BO_DECLARE_DYN_PARAM(double, BayesianOptimizer::params::kernel_maternfivehalves, l); + + +void BayesianOptimizer::init_first_controller() +{ +// assert(n_init_samples == 1 and "INIT SAMPLES > 1 not supported"); + std::cout<<"Intialization BO algorithm"<vectorize_controller().size(); + + // Initialize Eigen::VectorXd here. + Eigen::VectorXd init_sample(n_weights); + + // Working variable + double my_range = 1.f / this->n_init_samples; + + // If we have n dimensions, create n such vectors that we will permute + std::vector> all_dimensions; + + // Fill vectors + for (size_t i = 0; i < n_weights; i++) { + std::vector one_dimension; + + // Prepare for vector permutation + for (size_t j = 0; j < this->n_init_samples; j++) { + one_dimension.push_back(j); + } + + // Vector permutation + std::random_shuffle(one_dimension.begin(), one_dimension.end()); + + // Save permuted vector + all_dimensions.push_back(one_dimension); + } + + // For all samples + for (size_t i = 0; i < this->n_init_samples; i++) { + + // For all dimensions + for (size_t j = 0; j < n_weights; j++) { + // Take a LHS + init_sample(j) = all_dimensions.at(j).at(i) * my_range + ((double) rand() / (RAND_MAX)) * my_range; + } + + // Save the initialized weights + this->samples.push_back(init_sample); + } + + if (!this->samples.empty()){ + this->devectorize_controller(this->samples[0]); + } +} + +void BayesianOptimizer::init_next_controller() +{ + std::cout<<"[BO] start update"<samples.size()>this->observations.size()){ + x = this->samples[this->observations.size()]; + std::cout<<"Initializing BO with LHS | "<observations.size()+1<<"/"<< this->samples.size()<acqui_ucb_alpha); + params::kernel_maternfivehalves::set_l(this->kernel_l); + params::kernel_maternfivehalves::set_sigma_sq(this->kernel_sigma_sq); + + // Specify bayesian optimizer. TODO: Make attribute and initialize at bo_init + limbo::bayes_opt::BOptimizer, + limbo::modelfun, + limbo::acquifun>> boptimizer; + + // Optimize. Pass evaluation function and observations . + boptimizer.optimize(BayesianOptimizer::evaluation_function(this->samples[0].size()), + this->samples, + this->observations); + + x = boptimizer.last_sample(); + this->samples.push_back(x); + } + + // load into controller + this->devectorize_controller(x); + std::cout << "[BO] end update" < this->best_fitness) + { + this->best_fitness = fitness; + this->best_sample = this->samples.back(); + } + + std::cout<<"[BO] Resulting fitness: "<best_fitness<observations.push_back(observation); + + // ->GetAttribute("output_directory")->GetAsString(); + // Write fitness to file + std::ofstream fitness_file; + fitness_file.open(this->output_dir+"/fitnesses.txt", std::ios::app); + fitness_file<< std::setprecision(std::numeric_limits::digits10 +1) + << fitness << std::endl; + fitness_file.close(); + + // Write genotype to file + std::ofstream genolog(this->output_dir+"/genotype.log", std::ios::app); + if (genolog.is_open()) + { + genolog << this->samples.back().format(CSVFormat) << std::endl; + genolog.close(); + } + genolog.close(); +} + +void BayesianOptimizer::load_best_controller() +{ + this->devectorize_controller(this->best_sample); +} diff --git a/cpprevolve/revolve/brains/learner/BayesianOptimizer.h b/cpprevolve/revolve/brains/learner/BayesianOptimizer.h index 15fcaa2621..69efbee204 100644 --- a/cpprevolve/revolve/brains/learner/BayesianOptimizer.h +++ b/cpprevolve/revolve/brains/learner/BayesianOptimizer.h @@ -2,13 +2,104 @@ // Created by matteo on 14/06/19. // -#ifndef REVOLVE_BAYESIANOPTIMIZER_H -#define REVOLVE_BAYESIANOPTIMIZER_H +#pragma once +#include +#include +#include "Learner.h" +#include "../controller/Controller.h" +#include "../controller/DifferentialCPG.h" -class BayesianOptimizer { +namespace revolve { +class BayesianOptimizer : public Learner +{ +public: + /// \brief Constructor + explicit BayesianOptimizer( + std::unique_ptr <::revolve::Controller> controller, + Evaluator *evaluator, + EvaluationReporter *reporter, + double evaluation_time, + unsigned int n_learning_evaluations, + const std::string& model_name); -}; + /// \brief Destructor + ~BayesianOptimizer() = default; + + void init_first_controller() override; + void init_next_controller() override; + void finalize_current_controller(double fitness) override; + void load_best_controller() override; + + Controller *controller() override + { return this->_controller.get(); } + +public: + + /// \brief parameters for optimization + struct params; + + /// \brief Dummy function for limbo + class evaluation_function { + public: + explicit evaluation_function(size_t dim_in) + : _dim_in(dim_in) + {} // Number of input dimension (samples.size()) + size_t dim_in() const + { return _dim_in; } // number of dimensions of the fitness + + static size_t dim_out() + { return 1; } + + Eigen::VectorXd operator()(const Eigen::VectorXd &/*x*/) const + { + Eigen::VectorXd res(1); + res(0) = 0; + return res; + }; + + private: + const size_t _dim_in; + }; + +protected: + std::unique_ptr<::revolve::Controller> _controller; + // BO Learner parameters + double kernel_noise; + bool kernel_optimize_noise; + double kernel_sigma_sq; + double kernel_l; + int kernel_squared_exp_ard_k; + double acqui_gpucb_delta; + double acqui_ucb_alpha; + double acqui_ei_jitter; -#endif //REVOLVE_BAYESIANOPTIMIZER_H + /// \brief Specifies the acquisition function used + std::string acquisition_function; + + /// \brief Number of initial samples + size_t n_init_samples; + + /// \brief All samples seen so far. + std::vector samples; + + /// \brief All fitnesses seen so far. Called observations in limbo context + std::vector< Eigen::VectorXd > observations; + + /// \brief function to turn the controller into a sample + std::function vectorize_controller; + + /// \brief function to turn a sample into a controller + std::function devectorize_controller; + + /// \brief Best fitness seen so far + double best_fitness = -std::numeric_limits::infinity(); + + /// \brief Sample corresponding to best fitness + Eigen::VectorXd best_sample; + + /// \brief root output directory + std::string output_dir; +}; +} diff --git a/cpprevolve/revolve/gazebo/brains/DifferentialCPG_BO.h b/cpprevolve/revolve/brains/learner/BoDefinitions.h similarity index 88% rename from cpprevolve/revolve/gazebo/brains/DifferentialCPG_BO.h rename to cpprevolve/revolve/brains/learner/BoDefinitions.h index c859415630..115b9be24c 100644 --- a/cpprevolve/revolve/gazebo/brains/DifferentialCPG_BO.h +++ b/cpprevolve/revolve/brains/learner/BoDefinitions.h @@ -2,8 +2,7 @@ // Created by maarten on 03/02/19. // -#ifndef REVOLVE_BOPTIMIZER_CPG_H -#define REVOLVE_BOPTIMIZER_CPG_H +#pragma once // Standard libraries #include @@ -25,16 +24,35 @@ namespace limbo { BO_PARAM(int, hp_period, -1); }; } + namespace init { + template + struct FlexibleLHS + { + template + void operator()(const StateFunction &seval, const AggregatorFunction &, Opt &opt) const + { + assert(Params::bayes_opt_bobase::bounded()); + + Eigen::MatrixXd H = tools::random_lhs(seval.dim_in(), Params::init_lhs::samples()); + + for (int i = 0; i < Params::init_lhs::samples(); i++) { + opt.eval_and_add(seval, H.row(i)); + } + } + }; + } BOOST_PARAMETER_TEMPLATE_KEYWORD(acquiopt) namespace bayes_opt { - using boptimizer_signature = boost::parameter::parameters, + using boptimizer_signature = boost::parameter::parameters< + boost::parameter::optional, boost::parameter::optional, boost::parameter::optional, boost::parameter::optional, boost::parameter::optional, - boost::parameter::optional>; + boost::parameter::optional + >; // clang-format off /** @@ -97,7 +115,7 @@ namespace limbo { } else { std::cout << "OBSERVATION SET IS EMPTY \n"; - _model = model_t(StateFunction::dim_in(), StateFunction::dim_out()); + _model = model_t(sfun.dim_in(), StateFunction::dim_out()); } acqui_optimizer_t acqui_optimizer; @@ -106,13 +124,13 @@ namespace limbo { while (!this->_stop(*this, afun)) { - gettimeofday(&timeStart,NULL); + gettimeofday(&timeStart, nullptr); acquisition_function_t acqui(_model, this->_current_iteration); auto acqui_optimization = [&](const Eigen::VectorXd& x, bool g) { return acqui(x, afun, g); }; - Eigen::VectorXd starting_point = tools::random_vector(StateFunction::dim_in(), Params::bayes_opt_bobase::bounded()); + Eigen::VectorXd starting_point = tools::random_vector(sfun.dim_in(), Params::bayes_opt_bobase::bounded()); // new samples are from the acquisition optimizer Eigen::VectorXd new_sample = acqui_optimizer(acqui_optimization, starting_point, Params::bayes_opt_bobase::bounded()); @@ -131,7 +149,7 @@ namespace limbo { this->_current_iteration++; this->_total_iterations++; - gettimeofday(&timeEnd,NULL); + gettimeofday(&timeEnd, nullptr); timeDiff = 1000000 * (timeEnd.tv_sec - timeStart.tv_sec) + timeEnd.tv_usec - timeStart.tv_usec; //tv_sec: value of second, tv_usec: value of microsecond @@ -191,5 +209,3 @@ namespace limbo { using BOptimizerHPOpt = BOptimizer>, acquifun<_default_hp::acqui_t>, A1, A2, A3, A4>; } } - -#endif //REVOLVE_BOPTIMIZER_CPG_H diff --git a/cpprevolve/revolve/brains/learner/DifferentialEvo.cpp b/cpprevolve/revolve/brains/learner/DifferentialEvo.cpp new file mode 100644 index 0000000000..9bb1834f8d --- /dev/null +++ b/cpprevolve/revolve/brains/learner/DifferentialEvo.cpp @@ -0,0 +1,269 @@ +// +// Created by fuda on 18-12-20. +// + +#include "DifferentialEvo.h" +#include +#include +#include +const static Eigen::IOFormat CSVFormat(11, Eigen::DontAlignCols, ", ", ","); + +namespace revolve { + DifferentialEvo::DifferentialEvo(std::unique_ptr controller, + Evaluator *evaluator, + EvaluationReporter *reporter, + const DifferentialEvo::DE_Parameters ¶ms, + int seed, + double evaluation_time, + unsigned int n_learning_evaluations, + const std::string &model_name) + : EA(std::move(controller), evaluator, reporter, + params.EA_params, seed, evaluation_time, + n_learning_evaluations, + model_name) { + std::cout << "[DiffEvo] constructor" << std::endl; + this->DE_Param = params; + this->F = DE_Param.F; + this->CR = DE_Param.CR; + this->type = DE_Param.type; + this->n_parents = DE_Param.n_parents; + this->elitism = DE_Param.elitism; + + std::vector de_types{"de", "ade", "revde", "dex3"}; + + assert ((0. <= this->F) && (this->F <= 2.) && "F must be in [0, 2]"); + assert ((0. < this->CR) && (this->CR <= 1.) && "CR must be in (0, 1]"); + assert ((this->elitism>=0.) && (this->elitism < 1.0) && "Elitism must be in [0, 1)"); + assert ((std::find(de_types.begin(), de_types.end(), this->type) != de_types.end()) + && "type must be one in {de, dex3, ade, revde}"); + + + int n_params = this->get_genome().size(); + + std::vector initial_samples = randomNum->randVectd(EA_Params.min_weight, EA_Params.max_weight, + n_params*EA_Params.population_size); + + std::vector genome(n_params); + + for (int u = 0; u < EA_Params.population_size; u++) { + for (int v = 0; v < n_params; v++) + genome[v] = initial_samples[v+u]; + + Individual::indPtr ind(new Individual()); + ind->setGenome(genome); + population.push_back(ind); + } + std::cout << "[DiffEvo] population initialized" << std::endl; + } + + DifferentialEvo::~DifferentialEvo()= default; + + void DifferentialEvo::epoch() { + /** NOVELTY **/ + if (DE_Param.novelty_ratio > 0.) { + if (Novelty::k_value >= population.size()) + Novelty::k_value = int(population.size() / 2); + else Novelty::k_value = DE_Param.novelty_k_value; + + std::vector pop_desc; + for (const auto &ind : population) + pop_desc.push_back(ind->descriptor()); + //compute novelty + for (const auto &ind : population) { + Eigen::VectorXd ind_desc = ind->descriptor(); + double ind_nov = Novelty::sparseness(Novelty::distances(ind_desc, archive, pop_desc)); + ind->setNovelty(ind_nov); + } + + //update archive + for (const auto &ind : population) { + Eigen::VectorXd ind_desc = ind->descriptor(); + double ind_nov = ind->get_ctrl_novelty(); + Novelty::update_archive(ind_desc, ind_nov, archive, randomNum); + } + } + + this->selection(); + } + + void DifferentialEvo::selection() { + this->population.insert(this->population.end(), pop_s.begin(), pop_s.end()); + + std::vector pop_ind(population.size()); + std::size_t n(0); + std::generate(std::begin(pop_ind), std::end(pop_ind), [&]{ return n++; }); + + std::sort( std::begin(pop_ind), + std::end(pop_ind), + [&](int i1, int i2) { + return this->population[i1]->getFitness() > this->population[i2]->getFitness(); } ); + + std::ofstream gen_fitness; + gen_fitness.open(this->output_dir+"/gen_best_fitness.txt", std::ios::app); + gen_fitness<< std::setprecision(std::numeric_limits::digits10 +1) + << this->population[pop_ind[0]]->getFitness() << std::endl; + gen_fitness.close(); + + std::ofstream gen_genome; + gen_genome.open(this->output_dir+"/gen_best_genome.txt", std::ios::app); + gen_genome<< std::setprecision(std::numeric_limits::digits10 +1) + << this->population[pop_ind[0]]->genome.format(CSVFormat) << std::endl; + gen_genome.close(); + + pop_s.clear(); + for (int j = 0; j < EA_Params.population_size; j++) { + pop_s.push_back(this->population[pop_ind[j]]); + } + } + + void DifferentialEvo::init_next_pop() { + + auto [new_samples, parent_ind] = recombination(); + + int n_param = population[0]->get_ctrl_genome().size(); + std::vector genome(n_param); + + population.clear(); + for (int i = 0; i < new_samples.cols(); i++) { + + for (int j = 0; j < n_param; j++) + genome[j] = new_samples(j, i); + + Individual::indPtr ind(new Individual()); + ind->setGenome(genome); + population.push_back(ind); + } + } + + std::tuple> > DifferentialEvo::recombination(){ + std::vector> parent_ind; + + std::vector pop_ind(pop_s.size()); + + Eigen::ArrayXXd genomes(this->get_genome().size(),this->pop_s.size()); + std::vector parent_genomes; + // Prepare for vector permutation + for (size_t j = 0; j < pop_s.size(); j++) { + pop_ind[j] = j; + genomes.col(j) = getIndividual(pop_ind[j])->genome; + } + + for (int i=0; igenome; + } + } + + if (this->type == "de"){ + Eigen::ArrayXXd y_1(this->get_genome().size(),pop_s.size()); + y_1 = (parent_genomes[0] + this->F * (parent_genomes[1] - parent_genomes[2])) + .cwiseMax(EA_Params.min_weight).cwiseMin(EA_Params.max_weight); + + if (this->CR < 1.) { + int shape[]= {static_cast(y_1.rows()), static_cast(y_1.cols())}; + Eigen::ArrayXXd p_1 = this->randomNum->Bernoulli(this->CR, shape).cast(); + + y_1 = p_1* y_1 + (1. - p_1) * parent_genomes[0]; + } + return {y_1, parent_ind}; + } + else if ((this->type == "revde")) { + Eigen::ArrayXXd y_1(this->get_genome().size(),pop_s.size()); + Eigen::ArrayXXd y_2(this->get_genome().size(),pop_s.size()); + Eigen::ArrayXXd y_3(this->get_genome().size(),pop_s.size()); + + y_1 = (parent_genomes[0] + this->F * (parent_genomes[1] - parent_genomes[2])) + .cwiseMax(EA_Params.min_weight).cwiseMin(EA_Params.max_weight); + y_2 = (parent_genomes[1] + this->F * (parent_genomes[2] - y_1)) + .cwiseMax(EA_Params.min_weight).cwiseMin(EA_Params.max_weight); + y_3 = (parent_genomes[2] + this->F * (y_1 - y_2)) + .cwiseMax(EA_Params.min_weight).cwiseMin(EA_Params.max_weight); + +// uniform crossover + if (this->CR < 1.) { + int shape[]= {static_cast(y_1.rows()), static_cast(y_1.cols())}; + Eigen::ArrayXXd p_1 = this->randomNum->Bernoulli(this->CR, shape).cast(); + Eigen::ArrayXXd p_2 = this->randomNum->Bernoulli(this->CR, shape).cast(); + Eigen::ArrayXXd p_3 = this->randomNum->Bernoulli(this->CR, shape).cast(); + y_1 = p_1 * y_1 + (1. - p_1) * parent_genomes[0]; + y_2 = p_2 * y_2 + (1. - p_2) * parent_genomes[1]; + y_3 = p_3 * y_3 + (1. - p_3) * parent_genomes[2]; + } + + Eigen::ArrayXXd population(this->get_genome().size(),pop_s.size()*3); + population << y_1 , y_2 , y_3; + return {population, parent_ind}; + } + else if (this->type == "ade") { + Eigen::ArrayXXd y_1(this->get_genome().size(),pop_s.size()); + Eigen::ArrayXXd y_2(this->get_genome().size(),pop_s.size()); + Eigen::ArrayXXd y_3(this->get_genome().size(),pop_s.size()); + + y_1 = (parent_genomes[0] + this->F * (parent_genomes[1] - parent_genomes[2])) + .cwiseMax(EA_Params.min_weight).cwiseMin(EA_Params.max_weight); + y_2 = (parent_genomes[1] + this->F * (parent_genomes[2] - parent_genomes[0])) + .cwiseMax(EA_Params.min_weight).cwiseMin(EA_Params.max_weight); + y_3 = (parent_genomes[2] + this->F * (parent_genomes[0] - parent_genomes[1])) + .cwiseMax(EA_Params.min_weight).cwiseMin(EA_Params.max_weight); + +// uniform crossover + if (this->CR < 1.) { + int shape[]= {static_cast(y_1.rows()), static_cast(y_1.cols())}; + Eigen::ArrayXXd p_1 = this->randomNum->Bernoulli(this->CR, shape).cast(); + Eigen::ArrayXXd p_2 = this->randomNum->Bernoulli(this->CR, shape).cast(); + Eigen::ArrayXXd p_3 = this->randomNum->Bernoulli(this->CR, shape).cast(); + y_1 = p_1 * y_1 + (1. - p_1) * parent_genomes[0]; + y_2 = p_2 * y_2 + (1. - p_2) * parent_genomes[1]; + y_3 = p_3 * y_3 + (1. - p_3) * parent_genomes[2]; + } + + Eigen::ArrayXXd population(this->get_genome().size(),pop_s.size()*3); + population << y_1 , y_2 , y_3; + return {population, parent_ind}; + } + if (this->type == "dex3") { + Eigen::ArrayXXd y_1(this->get_genome().size(),pop_s.size()); + Eigen::ArrayXXd y_2(this->get_genome().size(),pop_s.size()); + Eigen::ArrayXXd y_3(this->get_genome().size(),pop_s.size()); + + y_1 = (parent_genomes[0] + this->F * (parent_genomes[1] - parent_genomes[2])) + .cwiseMax(EA_Params.min_weight).cwiseMin(EA_Params.max_weight); + y_2 = (parent_genomes[0] + this->F * (parent_genomes[3] - parent_genomes[4])) + .cwiseMax(EA_Params.min_weight).cwiseMin(EA_Params.max_weight); + y_3 = (parent_genomes[0] + this->F * (parent_genomes[5] - parent_genomes[6])) + .cwiseMax(EA_Params.min_weight).cwiseMin(EA_Params.max_weight); + +// uniform crossover + if (this->CR < 1.) { + int shape[]= {static_cast(y_1.rows()), static_cast(y_1.cols())}; + Eigen::ArrayXXd p_1 = this->randomNum->Bernoulli(this->CR, shape).cast(); + Eigen::ArrayXXd p_2 = this->randomNum->Bernoulli(this->CR, shape).cast(); + Eigen::ArrayXXd p_3 = this->randomNum->Bernoulli(this->CR, shape).cast(); + y_1 = p_1 * y_1 + (1. - p_1) * parent_genomes[0]; + y_2 = p_2 * y_2 + (1. - p_2) * parent_genomes[1]; + y_3 = p_3 * y_3 + (1. - p_3) * parent_genomes[2]; + } + + Eigen::ArrayXXd population(this->get_genome().size(),pop_s.size()*3); + population << y_1 , y_2 , y_3; + return {population, parent_ind}; + } + else { + throw std::runtime_error("Wrong name of the differential mutation!"); + } + } + + bool DifferentialEvo::is_finish() { + int maxNbrEval = EA_Params.max_eval; + return _is_finish || numberEvaluation >= maxNbrEval; + } + + bool DifferentialEvo::finish_eval() { + return EA::finish_eval(); + } +} \ No newline at end of file diff --git a/cpprevolve/revolve/brains/learner/DifferentialEvo.h b/cpprevolve/revolve/brains/learner/DifferentialEvo.h new file mode 100644 index 0000000000..c136fbbaff --- /dev/null +++ b/cpprevolve/revolve/brains/learner/DifferentialEvo.h @@ -0,0 +1,75 @@ +// +// Created by fuda on 18-12-20. +// + +#pragma once +#include +//#include +#include "Learner.h" +#include "Evaluator.h" +#include "EA.h" + +#ifndef REVOLVE_DIFFERENTIALEVO_H +#define REVOLVE_DIFFERENTIALEVO_H + +namespace revolve { + class DifferentialEvo : public EA { + public: + struct DE_Parameters { + std::string type = "de"; + + double F = 1.0; + double CR = 0.5; + double elitism = 0.; + int n_parents = 3; + + int novelty_k_value = 15; + double novelty_ratio = 1.; +// double novelty_decrement = 0.05; +// double novelty_threshold = 0.9; +// double novelty_archive_probability = 0.4; + + EA::Parameters EA_params; + }; + + explicit DifferentialEvo(std::unique_ptr controller, + Evaluator *evaluator, + EvaluationReporter *reporter, + const DifferentialEvo::DE_Parameters ¶ms, + int seed, + double evaluation_time, + unsigned int n_learning_evaluations, + const std::string &model_name); + + Controller *controller() override { return _controller.get(); } + + ~DifferentialEvo(); + + void epoch() override; + + void init_next_pop() override; + + void selection() override; + + std::tuple>> recombination(); + + bool is_finish() override; + + bool finish_eval() override; + + const std::vector &get_archive() { return archive; } + + protected: + std::vector pop_s; + std::string type; + bool _is_finish = false; + std::vector archive; + DifferentialEvo::DE_Parameters DE_Param; + double F; + double CR; + int n_parents; + double elitism; + }; +}; + +#endif //REVOLVE_DIFFERENTIALEVO_H diff --git a/cpprevolve/revolve/brains/learner/EA.cpp b/cpprevolve/revolve/brains/learner/EA.cpp new file mode 100644 index 0000000000..1c95f75800 --- /dev/null +++ b/cpprevolve/revolve/brains/learner/EA.cpp @@ -0,0 +1,177 @@ +// +// Created by fuda on 11/23/20. +// + +#include "../controller/DifferentialCPG.h" +#include "EA.h" +#include +using namespace revolve; +const static Eigen::IOFormat CSVFormat(11, Eigen::DontAlignCols, ", ", ","); + +EA::EA(std::unique_ptr controller, + Evaluator *evaluator, + EvaluationReporter *reporter, + const EA::Parameters ¶ms, + int seed, + const double evaluation_time, + unsigned int n_learning_evaluations, + const std::string& model_name) + : Learner(evaluator, reporter, evaluation_time, n_learning_evaluations) + , _controller(std::move(controller)) +// , params(params) +// , population(nullptr) +{ + max_learning_evaluations = int(n_learning_evaluations); + EA_Params = params; + this->output_dir = "./experiments/IMC/output"+model_name; + + revolve::RandNum rn(seed); + this->set_randomNum(std::make_shared(rn)); + + assert(this->_controller && "EA: passed null controller"); + switch (this->_controller->controller_type) + { + case revolve::Controller::DIFFERENTIAL_CPG: + EA_Params.max_weight = 1.0; + EA_Params.min_weight = 0.0; + + load_genome = [this](Eigen::VectorXd weights) { + std::vector std_weights(weights.size()); + for (size_t j = 0; j < weights.size(); j++) { + std_weights[j] = weights(j); + } + + auto *temp_controller = dynamic_cast<::revolve::DifferentialCPG*>(this->_controller->into_DifferentialCPG()); + temp_controller->set_connection_weights(std_weights); + }; + + get_genome = [this]() { + auto *controller = dynamic_cast<::revolve::DifferentialCPG*>(this->_controller->into_DifferentialCPG()); + const std::vector &weights = controller->get_connection_weights(); + Eigen::VectorXd eigen_weights(weights.size()); + for (size_t j = 0; j < weights.size(); j++) { + eigen_weights(j) = weights.at(j); + } + + return eigen_weights; + }; + break; + default: + std::cerr << "Controller not supported" << std::endl; + throw std::runtime_error("Controller not supported"); + } +} + +void EA::init_first_controller() +{ + current_Ind = population.begin(); + this->set_current_Ind_Index(0); + this->set_generation(0); + + //TODO load genome in controller + this->load_genome((*current_Ind)->genome); + + std::cout<<"[EA] initialized first controller"<generation << "\t Pop size: " << this->population.size()<< + "\n###### Best ind:\t"<< this->best_Ind << "\t Fitness: " << this->best_fitness << + "\n###### Genome: \t"; + for (auto g : this->getIndividual(this->best_Ind)->get_ctrl_genome()) { + std::cout << g << ", "; + }; + std::cout<epoch(); + this->init_next_pop(); + this->incr_generation(); + this->set_current_Ind_Index(0); + current_Ind = population.begin(); + } + if(this->is_finish()){ + if(EA_Params.verbose) + { + std::cout << "---------------------" << std::endl; + std::cout << "Evolution is Finished" << std::endl; + std::cout << "---------------------" << std::endl; + } + exit(0); + } + const Eigen::VectorXd &genome = (*current_Ind)->genome; + this->load_genome(genome); + std::cout <<"[EA] end update" << std::endl; +} + +void EA::finalize_current_controller(double fitness) +{ + (*current_Ind)->setFitness(fitness); + + if(fitness>best_fitness) + { + this->best_fitness = fitness; + this->best_genome = (*current_Ind)->genome; + this->best_Ind = current_Ind_Index; + } + + // Write fitness to file + std::ofstream fitness_file; + fitness_file.open(this->output_dir+"/fitnesses.txt", std::ios::app); + fitness_file<< std::setprecision(std::numeric_limits::digits10 +1) + <output_dir+"/genotype.log", std::ios::app); + if (genolog.is_open()) + { + genolog << (*current_Ind)->genome.format(CSVFormat) <load_genome(this->best_genome); +} + + + + +/// ############## virtual part for different EA implementations ############## +EA::~EA() +{ + randomNum.reset(); +// parameters.reset(); + for(auto& ind : population) + ind.reset(); +} +void EA::setSettings(const RandNum::Ptr &rn) +{ +// parameters = param; + randomNum = rn; +} +void EA::epoch(){ + evaluation(); + selection(); +} +void EA::init_next_pop(){ + replacement(); + crossover(); + mutation(); +} +Individual::indPtr EA::getIndividual(size_t index) const +{ + return population[index]; +} diff --git a/cpprevolve/revolve/brains/learner/EA.h b/cpprevolve/revolve/brains/learner/EA.h new file mode 100644 index 0000000000..42e90894d8 --- /dev/null +++ b/cpprevolve/revolve/brains/learner/EA.h @@ -0,0 +1,190 @@ +// +// Created by fuda on 11/23/20. +// + +#pragma once +#ifndef REVOLVE_EA_H +#define REVOLVE_EA_H + +#include "Learner.h" +#include "Evaluator.h" +#include +#include +#include "EA_misc/RandNum.h" +#include "EA_misc/Novelty.h" + +//#include "multineat/Genome.h" +//#include + +namespace revolve +{ + class Individual + { + public: + Individual(){}; + typedef std::shared_ptr indPtr; + Eigen::VectorXd genome; + double fitness = -std::numeric_limits::infinity(); + double novelty; + + void setFitness(double value){fitness = value;}; + void setNovelty(double ind_nov){novelty = ind_nov;} + void setGenome(std::vector gen){ +// this->genome = Eigen::VectorXd::Map(gen.data(), gen.size(), 0); + Eigen::VectorXd V(gen.size()); + for (int i=0; i get_ctrl_genome(){ + std::vector vec(genome.data(), genome.data() + genome.size()); + return vec; + } + Eigen::VectorXd descriptor(){ +// Eigen::VectorXd desc(3); +// desc << 0.0, 0.0, 0.0; + return genome; + }; + protected: + }; + + + typedef std::chrono::high_resolution_clock hr_clock; + class EA : public Learner + { + public: + struct Parameters { + bool verbose = false; + int population_size = 10; + int max_eval = 300; + double max_weight = 1.0; + double min_weight = 0.0; + }; + + /// \brief Constructor + explicit EA( + std::unique_ptr controller, + Evaluator *evaluator, + EvaluationReporter *reporter, + const EA::Parameters ¶ms, + int seed, + double evaluation_time, + unsigned int n_learning_evaluations, + const std::string& model_name); + + /// \brief Destructor + ~EA(); + + Controller *controller() override + { return _controller.get(); } + + void init_first_controller() override; + void init_next_controller() override; + void finalize_current_controller(double fitness) override; + void load_best_controller() override; + + + protected: + EA::Parameters EA_Params; + std::vector population; + std::vector::iterator current_Ind; +// Eigen::VectorXd current_genome_evaluating; + double best_fitness = -std::numeric_limits::infinity(); + Eigen::VectorXd best_genome; + int max_learning_evaluations; + + /// \brief ptr to the current robot controller + std::unique_ptr _controller; + + /// \brief function to load a genome into a controller + std::function load_genome; + + /// \brief function to turn the controller into a sample + std::function get_genome; + + + /// ############## virtual part for different EA implementations ############## + public: + typedef std::unique_ptr Ptr; + typedef std::unique_ptr ConstPtr; + typedef EA::Ptr (Factory)(const RandNum::Ptr&); + + std::vector popNextIndNumbers; + /// This method initilizes setting for EA and random number generator seed + void setSettings(const RandNum::Ptr &rn); + /// This method sets the fitness value of an individual + virtual void setObjectives(size_t indIndex, const double &objectives) + { + current_Ind_Index = indIndex; + population[indIndex]->setFitness(objectives); + } + + /** + * @brief Epoch method is called at the end of each generation + */ + virtual void epoch(); + /** + * @brief Initialisation of the population for next generation. Called at the end of each generation after the epoch function. + */ + virtual void init_next_pop(); + /** + * @brief ending condition of the algorithm + * @return true if ending condition is meet + */ + virtual bool is_finish(){ +// int maxGen = max_learning_evaluations; + return generation >= max_learning_evaluations; + } + /** + * @brief ending condition of the evaluation. This condition is added with OR to the default condition the time limit. + * @return + */ + virtual bool finish_eval(){ + return false; + } + + void incr_generation(){generation++;} + //GETTERS & SETTERS + Individual::indPtr getIndividual(size_t index) const; + size_t getPopSize() const {return population.size();} + const std::vector &get_population() const {return population;} + const EA::Parameters &get_parameters() const {return EA_Params;} + const RandNum::Ptr get_randomNum() const {return randomNum;} + int get_generation() const {return generation;} + int get_numberEvaluation() const {return numberEvaluation;} + std::chrono::nanoseconds getEvalCompTime() const { + return std::chrono::duration_cast + (endEvalTime - startEvalTime); + } + + void set_randomNum(const RandNum::Ptr& rn){randomNum = rn;} + void set_generation(int gen){generation = gen;} + void set_current_Ind_Index(int index){current_Ind_Index = index;} + void set_startEvalTime(const hr_clock::time_point& t){startEvalTime = t;} + void set_endEvalTime(const hr_clock::time_point& t){endEvalTime = t;} + protected: + /// This method initilizes a population of genomes + virtual void evaluation(){} // This is now only used by NEAT but can also be done for the other genomes. However, by passing the update function to the EA different EA objects can contain different scenarios making the plugin more flexible. + virtual void selection(){} // selection operator + virtual void replacement(){} // replacement operator + virtual void mutation(){} // mutation operator + virtual void crossover(){} //crossover + virtual void end(){} // last call to the EA, when simulation stops + + ///set the environment type, evolution type... + + ///random number generator for EA + RandNum::Ptr randomNum; + int generation = 0; + int numberEvaluation = 0; + int current_Ind_Index = 0; + int best_Ind = 0; + hr_clock::time_point startEvalTime; + hr_clock::time_point endEvalTime; + std::string output_dir; + }; +}//revolve +#endif //REVOLVE_EA_H diff --git a/cpprevolve/revolve/brains/learner/EA_misc/Novelty.cpp b/cpprevolve/revolve/brains/learner/EA_misc/Novelty.cpp new file mode 100644 index 0000000000..f0e819e8f5 --- /dev/null +++ b/cpprevolve/revolve/brains/learner/EA_misc/Novelty.cpp @@ -0,0 +1,104 @@ +#include "Novelty.h" + +using namespace revolve; + +int Novelty::k_value = 15; +double Novelty::novelty_thr = 0.9; +double Novelty::archive_adding_prob = 0.4; + +double Novelty::sparseness(const std::vector &dist){ + + + double sum = 0; + if(dist.size() > k_value + 1){ + for(int i = 0; i < k_value; i++) + sum += dist[i]; + } + if(std::isnan(sum/static_cast(k_value))){ + std::cerr << "NaN found" << std::endl; + } + return sum/static_cast(k_value); +} + +std::vector Novelty::distances(const Eigen::VectorXd &ind_desc, + const std::vector &archive, + const std::vector &pop){ + + std::vector dist(archive.size() + pop.size()); + // Comparing with archive + tbb::parallel_for(tbb::blocked_range(0,archive.size()), + [&](tbb::blocked_range r){ + + for(size_t i = r.begin(); i != r.end(); i++){ + dist[i] = (archive[i] - ind_desc).norm(); + } + }); + + + // Comparing with population + tbb::parallel_for(tbb::blocked_range(0,pop.size()), + [&](tbb::blocked_range r){ + for(size_t i = r.begin(); i != r.end(); i++){ + if(pop[i] == ind_desc) + dist[i+archive.size()] = 1.; + else + dist[i+archive.size()] = (pop[i] - ind_desc).norm(); + } + }); + + std::sort(dist.begin(),dist.end()); // Sorting distances + + return dist; +} + + +std::vector Novelty::distances(const Eigen::VectorXd &ind_desc, + const std::vector &archive, + const std::vector &pop, + std::vector & sorted_pop_indexes){ + + std::vector dist(archive.size() + pop.size()); + + // Comparing with achive + tbb::parallel_for(tbb::blocked_range(0,archive.size()), + [&](tbb::blocked_range r){ + + for(size_t i = r.begin(); i != r.end(); i++){ + dist[i] = (archive[i] - ind_desc).norm(); + } + }); + + // Comparing with population + std::vector pop_dist(pop.size()); + sorted_pop_indexes.resize(pop.size()); + tbb::parallel_for(tbb::blocked_range(0,pop.size()), + [&](tbb::blocked_range r){ + for(size_t i = r.begin(); i != r.end(); i++){ + sorted_pop_indexes[i] = i; + if(pop[i] == ind_desc) + pop_dist[i] = 1.; + else + pop_dist[i] = (pop[i] - ind_desc).norm(); + dist[i+archive.size()] = pop_dist[i]; + } + }); + + //sort indexes of population from closest to farthest of ind_desc. + std::sort(sorted_pop_indexes.begin(),sorted_pop_indexes.end(),[&](size_t a, size_t b){ + return pop_dist[a] < pop_dist[b]; + }); + std::sort(dist.begin(),dist.end()); // Sorting distances + + return dist; +} + +void Novelty::update_archive(const Eigen::VectorXd &ind_desc, + double ind_nov, + std::vector &archive, + const RandNum::Ptr &rn){ + + if(ind_nov > novelty_thr || rn->randInt(0,1) < archive_adding_prob){ + archive.push_back(ind_desc); + } + +} diff --git a/cpprevolve/revolve/brains/learner/EA_misc/Novelty.h b/cpprevolve/revolve/brains/learner/EA_misc/Novelty.h new file mode 100644 index 0000000000..dbe714881a --- /dev/null +++ b/cpprevolve/revolve/brains/learner/EA_misc/Novelty.h @@ -0,0 +1,68 @@ +// +// Created by fuda on 12/7/20. +// + +#ifndef REVOLVE_NOVELTY_H +#define REVOLVE_NOVELTY_H + +#include "../EA.h" +#include +#include +#include "RandNum.h" + +namespace revolve { + + struct Novelty { + + /** + * @brief compute sparseness from a given list of distances and k_value a static parameter + * @param a sorted list of distances. + * @return sparseness + */ + static double sparseness(const std::vector &dist); + + /** + * @brief add ind to the archive if its novelty score is above a threshold or according a certain probability + * @param individual + * @param novelty score of the individual + * @param archive + * @param seed for the add archive probability + */ + static void update_archive(const Eigen::VectorXd& ind_desc, + double ind_nov, + std::vector &archive, + const RandNum::Ptr &rn); + + + /** + * @brief Compute distances of a descriptor to the archive and a population. And return the vector of distances sorted in increasing order. + * @param descriptor + * @param archive of descriptor + * @param population + * @return vector of distances + */ + static std::vector distances(const Eigen::VectorXd& desc, + const std::vector &archive, + const std::vector &pop); + + /** + * @brief Compute distances of a descriptor to the archive and a population. And return the vector of distances sorted in increasing order. + * @param descriptor + * @param archive of descriptor + * @param population + * @param output : sorted indexes of the population from the closest to the farthest of the desc + * @return vector of distances + */ + static std::vector distances(const Eigen::VectorXd& desc, + const std::vector &archive, + const std::vector &pop, + std::vector & sorted_pop_indexes); + + + static int k_value; + static double novelty_thr; + static double archive_adding_prob; + }; +} + +#endif //REVOLVE_NOVELTY_H diff --git a/cpprevolve/revolve/brains/learner/EA_misc/RandNum.cpp b/cpprevolve/revolve/brains/learner/EA_misc/RandNum.cpp new file mode 100644 index 0000000000..d8d6fb2f5c --- /dev/null +++ b/cpprevolve/revolve/brains/learner/EA_misc/RandNum.cpp @@ -0,0 +1,61 @@ +#include "RandNum.h" +#include +#include + +using namespace revolve; + +RandNum::RandNum(int seed) +{ + gen.seed(seed); + std::cout << "seed set to " << seed << std::endl; + m_seed = seed; +} + + +RandNum::~RandNum() += default; + +double RandNum::randDouble(double lower, double upper) { + std::uniform_real_distribution<> dist(lower,upper); + return dist(gen); +} + + +float RandNum::randFloat(float lower, float upper) { + std::uniform_real_distribution<> dist(lower,upper); + return dist(gen); +} + +int RandNum::randInt(int lower, int upper) { + std::uniform_int_distribution<> dist(lower,upper); + return dist(gen); +} + +std::vector RandNum::randVectd(double lower, double upper, int size){ + std::vector res; + for(int i = 0; i < size; i++) + res.push_back(randDouble(lower,upper)); + return res; +} + +Eigen::ArrayXXi RandNum::Bernoulli(double p, const int shape[2]) { + Eigen::ArrayXXd bernoulli = Eigen::MatrixXd::Random(shape[0], shape[1]).array(); + return (bernoulli.abs()(); +} + +double RandNum::normalDist(double mu, double sigma){ + std::normal_distribution<> nd(mu,sigma); + return nd(gen); +} + + +void RandNum::setSeed(int seed) { + gen.seed(seed); + m_seed = seed; + std::cout << "Seed set to " << seed << std::endl; +} + +int RandNum::getSeed() +{ + return m_seed; +} diff --git a/cpprevolve/revolve/brains/learner/EA_misc/RandNum.h b/cpprevolve/revolve/brains/learner/EA_misc/RandNum.h new file mode 100644 index 0000000000..60206471aa --- /dev/null +++ b/cpprevolve/revolve/brains/learner/EA_misc/RandNum.h @@ -0,0 +1,71 @@ +#pragma once +#ifndef RANDNUM_H +#define RANDNUM_H + +#include +#include +#include + +namespace revolve { + +class RandNum +{ +public: + typedef std::shared_ptr Ptr; + typedef std::shared_ptr ConstPtr; + + explicit RandNum(int seed); // instantiate the class and specify the initial seed. + ~RandNum(); + + /** + * @brief random number between a lower bound and a upper bound included + * @param lower bound (double) + * @param upper bound (double) + * @return a double + */ + double randDouble(double lower, double upper); // creates a random float between two specified values. + + /** + * @brief random number between a lower bound and a upper bound included + * @param lower bound (float) + * @param upper bound (float) + * @return a float + */ + float randFloat(float lower, float upper); // creates a random float between two specified values. + + /** + * @brief random number between a lower bound and a upper bound included + * @param lower bound (int) + * @param upper bound (int) + * @return an integer + */ + int randInt(int lower, int upper); // creates a random integer (range, offset) + + /** + * @brief Generate a random vector of double + * @param lower bound + * @param upper bound (included + * @param size of the vector + * @return + */ + std::vector randVectd(double lower, double upper, int size); + + /** + * @brief Generate a random number from a normal distribution + * @param mean of the distribution + * @param variance of the distribution + * @return + */ + double normalDist(double mu, double sigma); + + void setSeed(int seed); // sets the seed of the random number generator + int m_seed = 0; + int getSeed(); + std::mt19937 gen; + + Eigen::ArrayXXi Bernoulli(double p, const int *len); +}; + +}//EA_misc + +#endif //RANDNUM_H diff --git a/cpprevolve/revolve/brains/learner/EvaluationReporter.h b/cpprevolve/revolve/brains/learner/EvaluationReporter.h new file mode 100644 index 0000000000..908ea754f0 --- /dev/null +++ b/cpprevolve/revolve/brains/learner/EvaluationReporter.h @@ -0,0 +1,93 @@ +// +// Created by matteo on 12/5/19. +// + +#pragma once + +#include +#include +#include +#include + +namespace revolve { + +// -------------------------------------------------------- +/// Reporter Abstract Class +class EvaluationReporter +{ +public: + explicit EvaluationReporter(const std::string id) + : robot_id(std::move(id)) + {} + + virtual ~EvaluationReporter() = default; + + virtual void report( + unsigned int eval, + bool dead, + double fitness) = 0; + + const std::string robot_id; +}; + +// -------------------------------------------------------- +/// Simple Reporter that prints the reported data on stdout +class PrintReporter : public EvaluationReporter +{ +public: + explicit PrintReporter(const std::string id) + : EvaluationReporter(std::move(id)) + {} + + ~PrintReporter() override = default; + + void report(unsigned int eval, + bool dead, + double fitness) override + { + std::cout << "Evaluation Report: robot id("<< robot_id + << ") eval(" << eval + << ") dead(" << dead + << ") fitness(" << fitness + << ')' << std::endl; + } +}; + +// -------------------------------------------------------- +/// Aggregated Reporter +class AggregatedReporter : public EvaluationReporter +{ +public: + explicit AggregatedReporter(const std::string robot_id) + : EvaluationReporter(std::move(robot_id)) + {} + ~AggregatedReporter() override = default; + + void report(unsigned int eval, + bool dead, + double fitness) override + { + for (std::shared_ptr &reporter: reporters) + { + reporter->report(eval, dead, fitness); + } + } + + /// Create a new Reporter in place + template + void create(Args &&... args) + { + reporters.emplace_back(new ReporterType(robot_id, std::forward(args)...)); + } + + void append(std::shared_ptr reporter) + { + assert(reporter->robot_id == this->robot_id); + reporters.emplace_back(std::move(reporter)); + } + +private: + std::vector> reporters; +}; + +} diff --git a/cpprevolve/revolve/brains/learner/Evaluator.h b/cpprevolve/revolve/brains/learner/Evaluator.h new file mode 100644 index 0000000000..be7a3cedf5 --- /dev/null +++ b/cpprevolve/revolve/brains/learner/Evaluator.h @@ -0,0 +1,33 @@ +// +// Created by andi on 27-11-19. +// + +#ifndef REVOLVE_EVALUATOR_H +#define REVOLVE_EVALUATOR_H + +namespace revolve { +class Evaluator +{ + /// \brief Constructor +public: + Evaluator() = default; + + /// \brief Destructor + virtual ~Evaluator() = default; + + /// \brief Initialisation method + virtual void reset() = 0; + + /// \brief Retrieve the fitness + /// \return A fitness value according to a given formula + virtual double fitness() = 0; + + /// \brief Update the position + /// \param[in] _pose Current position of a robot + //virtual void update(const ignition::math::Pose3d &_pose, + // const double time, + // const double step) = 0; +}; +} + +#endif //REVOLVE_EVALUATOR_H \ No newline at end of file diff --git a/cpprevolve/revolve/brains/learner/HyperNEAT.cpp b/cpprevolve/revolve/brains/learner/HyperNEAT.cpp new file mode 100644 index 0000000000..4b2a25f74f --- /dev/null +++ b/cpprevolve/revolve/brains/learner/HyperNEAT.cpp @@ -0,0 +1,101 @@ +// +// Created by matteo on 8/21/19. +// + +#include "HyperNEAT.h" +#include "../controller/DifferentialCPG.h" + +using namespace revolve; + +HyperNEAT::HyperNEAT( + std::unique_ptr controller, + Evaluator *evaluator, + EvaluationReporter *reporter, + const NEAT::Parameters ¶ms, + const int seed, + const double evaluation_time, + unsigned int n_evaluations) + : Learner(evaluator, reporter, evaluation_time, n_evaluations) + , _controller(std::move(controller)) + , params(params) + , population(nullptr) +{ + NEAT::Genome start_genome(0, 3, 0, 1, //TODO these are also parameters + false, + NEAT::UNSIGNED_SIGMOID, + NEAT::UNSIGNED_SIGMOID, + 0, + this->params, + 0); + + population.reset(new NEAT::Population( + start_genome, + params, + true, + 1.0, + seed + )); + assert(this->_controller && "HyperNEAT: passed null controller"); + switch (this->_controller->controller_type) + { + case revolve::Controller::DIFFERENTIAL_CPG: + load_genome = [this](std::vector::iterator config_cppn_genome) + { + auto *temp_controller = dynamic_cast<::revolve::DifferentialCPG *>(this->_controller.get()->into_DifferentialCPG()); + temp_controller->load_genome_to_controller(*config_cppn_genome); + }; + break; + default: + std::cerr << "Controller not supported" << std::endl; + throw std::runtime_error("Controller not supported"); + } +} + +void HyperNEAT::init_first_controller() +{ + current_specie_evaluating = population->m_Species.begin(); + current_genome_evaluating = current_specie_evaluating->m_Individuals.begin(); + + //TODO load genome in controller + this->load_genome(current_genome_evaluating); +} + +void HyperNEAT::init_next_controller() +{ + // load next genome + current_genome_evaluating++; + + // Finished a species + if (current_genome_evaluating == current_specie_evaluating->m_Individuals.end()) + { + current_specie_evaluating++; + + // Finished all species -> Generate new generation + if (current_specie_evaluating == population->m_Species.end()) + { + population->Epoch(); + current_specie_evaluating = population->m_Species.begin(); + } + + current_genome_evaluating = current_specie_evaluating->m_Individuals.begin(); + } + //TODO load genome in controller + this->load_genome(current_genome_evaluating); +} + +void HyperNEAT::finalize_current_controller(double fitness) +{ + current_genome_evaluating->SetFitness(fitness); + if(fitness>best_fitness) + { + this->best_fitness = fitness; + this->best_genome = current_genome_evaluating; + } +} + +void HyperNEAT::load_best_controller() +{ + //TODO load best genome into controller +// this->load_genome(current_genome_evaluating); + this->load_genome(this->best_genome); +} diff --git a/cpprevolve/revolve/brains/learner/HyperNEAT.h b/cpprevolve/revolve/brains/learner/HyperNEAT.h new file mode 100644 index 0000000000..83f4033fcb --- /dev/null +++ b/cpprevolve/revolve/brains/learner/HyperNEAT.h @@ -0,0 +1,51 @@ +// +// Created by matteo on 8/21/19. +// + +#pragma once + +#include "Learner.h" +#include "Evaluator.h" +#include +#include + +namespace revolve { + +class HyperNEAT: public Learner +{ +public: + explicit HyperNEAT( + std::unique_ptr controller, + Evaluator *evaluator, + EvaluationReporter *reporter, + const NEAT::Parameters ¶ms, + int seed, + double evaluation_time, + unsigned int n_evaluations); + + ~HyperNEAT() override = default; + + Controller *controller() override + { return _controller.get(); } + + void init_first_controller() override; + void init_next_controller() override; + void finalize_current_controller(double fitness) override; + void load_best_controller() override; + +private: + std::unique_ptr _controller; + + const NEAT::Parameters params; + std::unique_ptr population; + std::vector::iterator current_specie_evaluating; + std::vector::iterator current_genome_evaluating; + double best_fitness = -std::numeric_limits::infinity(); + std::vector::iterator best_genome; + +protected: + /// \brief function to load a genome into a controller + std::function::iterator)> load_genome; +}; + +} diff --git a/cpprevolve/revolve/brains/learner/Learner.cpp b/cpprevolve/revolve/brains/learner/Learner.cpp new file mode 100644 index 0000000000..e60be1260c --- /dev/null +++ b/cpprevolve/revolve/brains/learner/Learner.cpp @@ -0,0 +1,46 @@ +// +// Created by matteo on 12/6/19. +// + +#include "Learner.h" + +using namespace revolve; + +void Learner::optimize(double time, double /*dt*/) +{ + if (time < end_controller_time) return; + + bool finished = evaluation_counter >= static_cast(n_evaluations); + if (finished) + { + evaluation_reporter->report(evaluation_counter, true, evaluator->fitness()); + } + else + { + std::cout << "Learner evaluation_counter: " << evaluation_counter + 1 << std::endl; + // first evaluation + if (evaluation_counter < 0) { + evaluation_counter = 0; + this->init_first_controller(); + } else { + // finalize previous run + evaluation_counter++; + double fitness = evaluator->fitness(); + finished = evaluation_counter >= static_cast(n_evaluations); + + evaluation_reporter->report(evaluation_counter, finished, fitness); + this->finalize_current_controller(fitness); + std::cout << "Fitness: " << fitness << std::endl; + + // load next genome + if (finished) { + this->load_best_controller(); + } else { + this->init_next_controller(); + } + } + } + + evaluator->reset(); + end_controller_time = time + evaluation_time; +} diff --git a/cpprevolve/revolve/brains/learner/Learner.h b/cpprevolve/revolve/brains/learner/Learner.h new file mode 100644 index 0000000000..fbd2e0c5e5 --- /dev/null +++ b/cpprevolve/revolve/brains/learner/Learner.h @@ -0,0 +1,56 @@ +// +// Created by andi on 25-11-19. +// + +#pragma once + +#include +#include "Evaluator.h" +#include "EvaluationReporter.h" +#include "../controller/Controller.h" + +namespace revolve { + +class Learner +{ +public: + /// \brief Constructor + explicit Learner( + Evaluator *const evaluator, + EvaluationReporter *const reporter, + const double evaluation_time, + const unsigned int n_evaluations) + : evaluation_time(evaluation_time) + , end_controller_time(-std::numeric_limits::infinity()) + , evaluation_counter(-1) + , n_evaluations(n_evaluations) + , evaluator(evaluator) + , evaluation_reporter(reporter) + {} + + /// \brief Deconstructor + virtual ~Learner() = default; + + /// \brief performes the optimization of the controller + virtual void optimize(double time, double dt); + virtual void init_first_controller() = 0; + virtual void init_next_controller() = 0; + virtual void finalize_current_controller(double fitness) = 0; + virtual void load_best_controller() = 0; + + virtual revolve::Controller *controller() = 0; + +protected: + const double evaluation_time; + double end_controller_time; + + /// \brief Learning iterations counter + long evaluation_counter; + /// \brief Max number of learning iterations + const unsigned int n_evaluations; + + revolve::Evaluator *evaluator; + revolve::EvaluationReporter *evaluation_reporter; +}; + +} diff --git a/cpprevolve/revolve/brains/learner/NIPES.cpp b/cpprevolve/revolve/brains/learner/NIPES.cpp new file mode 100644 index 0000000000..aa5935cc38 --- /dev/null +++ b/cpprevolve/revolve/brains/learner/NIPES.cpp @@ -0,0 +1,337 @@ +// +// Created by matteo on 8/21/19. +// + +#include "NIPES.h" + +#include +#include "../controller/DifferentialCPG.h" +#include "EA.h" + +namespace revolve { + +std::map IPOPCMAStrategy::scriterias = {{cma::CONT, "OK"}, + {cma::AUTOMAXITER, "The automatically set maximal number of iterations per run has been reached"}, + {cma::TOLHISTFUN, "[Success] The optimization has converged"}, + {cma::EQUALFUNVALS, "[Partial Success] The objective function values are the same over too many iterations, check the formulation of your objective function"}, + {cma::TOLX, "[Partial Success] All components of covariance matrix are very small (e.g. < 1e-12)"}, + {cma::TOLUPSIGMA, "[Error] Mismatch between step size increase and decrease of all eigenvalues in covariance matrix. Try to restart the optimization."}, + {cma::STAGNATION, "[Partial Success] Median of newest values is not smaller than the median of older values"}, + {cma::CONDITIONCOV, "[Error] The covariance matrix's condition numfber exceeds 1e14. Check out the formulation of your problem"}, + {cma::NOEFFECTAXIS, "[Partial Success] Mean remains constant along search axes"}, + {cma::NOEFFECTCOOR, "[Partial Success] Mean remains constant in coordinates"}, + {cma::MAXFEVALS, "The maximum number of function evaluations allowed for optimization has been reached"}, + {cma::MAXITER, "The maximum number of iterations specified for optimization has been reached"}, + {cma::FTARGET, "[Success] The objective function target value has been reached"}}; + +bool IPOPCMAStrategy::reach_ftarget() { + cma::CMAES_LOG_IF(cma::INFO, !_parameters.quiet()) << "Best fitness : " << best_fitnesses.back() << std::endl; + + if (_parameters.get_ftarget() != std::numeric_limits::infinity()) { + if (best_fitnesses.back() <= _parameters.get_ftarget()) { + std::stringstream sstr; + sstr << "stopping criteria fTarget => fvalue=" << best_fitnesses.back() << " / ftarget=" + << _parameters.get_ftarget(); + log_stopping_criterias.push_back(sstr.str()); + cma::CMAES_LOG_IF(cma::INFO, !_parameters.quiet()) << sstr.str() << std::endl; + return true; + } + } + return false; +} + +bool IPOPCMAStrategy::pop_desc_stagnation() { + std::vector descriptors; + for (const auto &ind: _pop) + descriptors.push_back(std::dynamic_pointer_cast(ind)->descriptor()); + + Eigen::VectorXd mean = Eigen::VectorXd::Zero(_pop[0]->genome.size()); + for (Eigen::VectorXd desc : descriptors) { + mean += desc; + } + mean = mean / static_cast(descriptors.size()); + + Eigen::VectorXd stddev = Eigen::VectorXd::Zero(_pop[0]->genome.size()); + for (Eigen::VectorXd desc : descriptors) + stddev += (desc - mean).cwiseProduct(desc - mean); + + bool stop = true; + for (int i = 0; i < stddev.rows(); i++) + stop = stop && sqrt(stddev(i / static_cast(descriptors.size() - 1))) <= pop_stag_thres; + + if (stop) { + std::stringstream sstr; + sstr << "Stopping : standard deviation of the descriptor population is smaller than " << pop_stag_thres + << " : " << stddev; + log_stopping_criterias.push_back(sstr.str()); + cma::CMAES_LOG_IF(cma::INFO, !_parameters.quiet()) << sstr.str() << std::endl; + } + return stop; +} + +bool IPOPCMAStrategy::pop_fit_stagnation() { + std::vector fvalues; + for (const auto &ind : _pop) + fvalues.push_back(ind->getFitness()); + + + double mean = 0.0; + for (double fv : fvalues) + mean += fv; + mean = mean / static_cast(fvalues.size()); + + double stddev = 0.0; + for (double fv : fvalues) + stddev += (fv - mean) * (fv - mean); + + stddev = sqrt(stddev / static_cast(fvalues.size() - 1)); + cma::CMAES_LOG_IF(cma::INFO, !_parameters.quiet()) << "pop standard deviation : " << stddev << std::endl; + + if (stddev <= pop_stag_thres) { + std::stringstream sstr; + sstr << "Stopping : standard deviation of the population is smaller than 0.05 : " << stddev; + log_stopping_criterias.push_back(sstr.str()); + cma::CMAES_LOG_IF(cma::INFO, !_parameters.quiet()) << sstr.str() << std::endl; + return true; + } else return false; +} + +bool IPOPCMAStrategy::best_sol_stagnation() { + if (best_fitnesses.size() < len_of_stag) + return false; + double mean = 0.0; + for (size_t i = best_fitnesses.size() - len_of_stag; i < best_fitnesses.size(); i++) { + mean += best_fitnesses[i]; + } + mean = mean / static_cast(len_of_stag); + double stddev = 0.0; + for (size_t i = best_fitnesses.size() - len_of_stag; i < best_fitnesses.size(); i++) { + stddev += (best_fitnesses[i] - mean) * (best_fitnesses[i] - mean); + } + stddev = sqrt(stddev / static_cast(len_of_stag - 1)); + + if (stddev <= 0.05) { + std::stringstream sstr; + + sstr << "Stopping : standard deviation of the last " << len_of_stag + << " best fitnesses is smaller than 0.05 : " << stddev; + log_stopping_criterias.push_back(sstr.str()); + cma::CMAES_LOG_IF(cma::INFO, !_parameters.quiet()) << sstr.str() << std::endl; + return true; + } else return false; +} + +void IPOPCMAStrategy::eval(const dMat &candidates, const dMat &phenocandidates) { + // custom eval. + _solutions.candidates().clear(); + for (Individual::indPtr &r : _pop) { + dVec x; + x = r->genome; + double fvalue = r->getFitness(); + _solutions.candidates().push_back(cma::Candidate(fvalue, x)); + } + update_fevals(candidates.cols()); +} + +void IPOPCMAStrategy::tell() { + ipop_cmaes_t::tell(); + std::vector best_sample; + best_fitnesses.push_back(best_fitness(best_sample)); + if (novelty_ratio > 0) + novelty_ratio -= novelty_decr; + if (best_fitnesses.back() < best_seen_solution.first || best_fitnesses.size() == 1) + best_seen_solution = std::make_pair(best_fitnesses.back(), best_sample); + inc_iter(); +} + +bool IPOPCMAStrategy::stop() { + reached_ft = reach_ftarget(); + bool ipop_stop = ipop_cmaes_t::stop(); + bool pop_stag = pop_desc_stagnation(); + bool fit_stag = pop_fit_stagnation(); + bool best_sol_stag = false; + if (len_of_stag > 0) + best_sol_stag = best_sol_stagnation(); + + if (ipop_stop) { + log_stopping_criterias.push_back(scriterias[_solutions.run_status()]); + } + return pop_stag || best_sol_stag || ipop_stop || fit_stag; +} + +void IPOPCMAStrategy::reset_search_state() { + if (elitist_restart) + _parameters.set_x0(best_seen_solution.second, best_seen_solution.second); + + ipop_cmaes_t::reset_search_state(); + novelty_ratio = start_novelty_ratio; + best_fitnesses.clear(); +} + +double IPOPCMAStrategy::best_fitness(std::vector &best_sample) { + double bf = -std::numeric_limits::infinity(); + for (const auto &ind : _pop) { + if (bf < ind->getFitness()) { + bf = ind->getFitness(); + best_sample = ind->get_ctrl_genome(); + } + } + + return bf; +} + +/// ################### ARE: top ################# +NIPES::NIPES(std::unique_ptr controller, + Evaluator *evaluator, + EvaluationReporter *reporter, + const NIPES::NIPES_Parameters ¶ms, + int seed, + double evaluation_time, + unsigned int n_learning_evaluations, + const std::string &model_name) + : EA(std::move(controller), evaluator, reporter, + params.EA_params, seed, evaluation_time, + n_learning_evaluations, + model_name) { + + int lenStag = Nipes_Param.stagnation_length; + double step_size = Nipes_Param.CMAES_step; +// double ftarget = Nipes_Param.ftarget; + bool elitist_restart = Nipes_Param.elitist_restart; + double novelty_ratio = Nipes_Param.novelty_ratio; + double novelty_decr = Nipes_Param.novelty_decrement; + float pop_stag_thres = Nipes_Param.population_stagnation_threshold; + + Novelty::k_value = Nipes_Param.novelty_k_value; + Novelty::novelty_thr = Nipes_Param.novelty_threshold; + Novelty::archive_adding_prob = Nipes_Param.novelty_archive_probability; + + int n_params = this->get_genome().size(); + + std::vector initial_point = randomNum->randVectd(EA_Params.min_weight, EA_Params.max_weight, n_params); + + double lb[n_params], ub[n_params]; + for (int i = 0; i < n_params; i++) { + lb[i] = EA_Params.min_weight; + ub[i] = EA_Params.max_weight; + } + + geno_pheno_t gp(lb, ub, n_params); + + cma::CMAParameters cmaParam(initial_point, step_size, EA_Params.population_size, + randomNum->getSeed(), gp); +// cmaParam.set_ftarget(ftarget); + cmaParam.set_quiet(!EA_Params.verbose); + + cmaStrategy.reset(new IPOPCMAStrategy([](const double *, const int &) -> double {}, cmaParam)); + cmaStrategy->set_elitist_restart(elitist_restart); + cmaStrategy->set_length_of_stagnation(lenStag); + cmaStrategy->set_novelty_ratio(novelty_ratio); + cmaStrategy->set_novelty_decr(novelty_decr); + cmaStrategy->set_pop_stag_thres(pop_stag_thres); + + dMat init_samples = cmaStrategy->ask(); + + std::vector genome(n_params); + + for (int u = 0; u < EA_Params.population_size; u++) { + + for (int v = 0; v < n_params; v++) + genome[v] = init_samples(v, u); + + Individual::indPtr ind(new Individual()); + ind->setGenome(genome); + population.push_back(ind); + } + std::cout << "[NIPES] population initialized" << std::endl; +} + +NIPES::~NIPES() { + cmaStrategy.reset(); +} + +void NIPES::epoch() { + bool withRestart = Nipes_Param.restart; + bool incrPop = Nipes_Param.incremental_population; + bool elitist_restart = Nipes_Param.elitist_restart; + + /** NOVELTY **/ + if (Nipes_Param.novelty_ratio > 0.) { + if (Novelty::k_value >= population.size()) + Novelty::k_value = population.size() / 2; + else Novelty::k_value = Nipes_Param.novelty_k_value; + + std::vector pop_desc; + for (const auto &ind : population) + pop_desc.push_back(ind->descriptor()); + //compute novelty + for (const auto &ind : population) { + Eigen::VectorXd ind_desc = ind->descriptor(); + double ind_nov = Novelty::sparseness(Novelty::distances(ind_desc, archive, pop_desc)); + ind->setNovelty(ind_nov); + } + + //update archive + for (const auto &ind : population) { + Eigen::VectorXd ind_desc = ind->descriptor(); + double ind_nov = ind->get_ctrl_novelty(); + Novelty::update_archive(ind_desc, ind_nov, archive, randomNum); + } + } + /**/ + + cmaStrategy->set_population(population); + cmaStrategy->eval(); + cmaStrategy->tell(); + bool stop = cmaStrategy->stop(); +// if(cmaStrategy->have_reached_ftarget()){ +// _is_finish = true; +//// return; +// } + + if (withRestart && stop) { + if (EA_Params.verbose) + std::cout << "Restart !" << std::endl; + + cmaStrategy->capture_best_solution(best_run); + + if (incrPop) + cmaStrategy->lambda_inc(); + + cmaStrategy->reset_search_state(); + if (!elitist_restart) { + cmaStrategy->get_parameters().set_x0(EA_Params.min_weight, EA_Params.max_weight); + } + } +} + +void NIPES::init_next_pop() { + int pop_size = cmaStrategy->get_parameters().lambda(); + + dMat new_samples = cmaStrategy->ask(); + + int n_param = population[0]->get_ctrl_genome().size(); + std::vector genome(n_param); + + population.clear(); + for (int i = 0; i < pop_size; i++) { + + for (int j = 0; j < n_param; j++) + genome[j] = new_samples(j, i); + + Individual::indPtr ind(new Individual()); + ind->setGenome(genome); + population.push_back(ind); + } +} + + +bool NIPES::is_finish() { + int maxNbrEval = EA_Params.max_eval; + return _is_finish || numberEvaluation >= maxNbrEval; +} + +bool NIPES::finish_eval() { + return EA::finish_eval(); +} +}; \ No newline at end of file diff --git a/cpprevolve/revolve/brains/learner/NIPES.h b/cpprevolve/revolve/brains/learner/NIPES.h new file mode 100644 index 0000000000..8ab3192004 --- /dev/null +++ b/cpprevolve/revolve/brains/learner/NIPES.h @@ -0,0 +1,180 @@ +// +// Created by matteo on 8/21/19. +// + +#pragma once +#ifndef NIPES_HPP +#define NIPES_HPP + +#include "Learner.h" +#include "Evaluator.h" +#include "EA.h" + +#include + +namespace cma = libcmaes; +using geno_pheno_t = cma::GenoPheno; +using cov_update_t = cma::CovarianceUpdate; +using ipop_cmaes_t = cma::IPOPCMAStrategy; +using eostrat_t = cma::ESOStrategy,cma::CMASolutions,cma::CMAStopCriteria>; + +namespace revolve { + + class IPOPCMAStrategy : public ipop_cmaes_t + { + private: +// cma::FitFunc emptyObj = [](const double*,const int&) -> double{}; + + + public: + + static std::map scriterias; + + typedef std::shared_ptr Ptr; + typedef std::shared_ptr ConstPtr; + + IPOPCMAStrategy(cma::FitFunc func,cma::CMAParameters ¶meters) + :ipop_cmaes_t(func, parameters) + { +// std::cout<<"[[IPOPCMA] Constructor"<& pop){_pop = pop;} + void set_elitist_restart(bool er){elitist_restart = er;} + void set_length_of_stagnation(int los){len_of_stag = los;} + void set_novelty_ratio(double nr){novelty_ratio = nr; start_novelty_ratio = nr;} + void set_novelty_decr(double nd){novelty_decr = nd;} + void set_pop_stag_thres(float pst){pop_stag_thres = pst;} + + bool have_reached_ftarget(){return reached_ft;} + + std::vector log_stopping_criterias; + + + private: + std::vector _pop; + bool elitist_restart = false; + std::vector best_fitnesses; + std::pair> best_seen_solution; + int len_of_stag; + float pop_stag_thres; + double novelty_ratio; + double start_novelty_ratio; + double novelty_decr ; + + + + double best_fitness(std::vector &); + + bool reached_ft = false; + + }; + + + + +class NIPES: public EA +{ +public: + struct NIPES_Parameters{ + int stagnation_length = 20; + bool elitist_restart = true; + double CMAES_step = 1.0; + + int novelty_k_value = 15; + double novelty_ratio = 1.; + double novelty_decrement = 0.05; + double novelty_threshold = 0.9; + double novelty_archive_probability = 0.4; + + float population_stagnation_threshold = 0.05; + + bool restart = true; + bool incremental_population = true; + + EA::Parameters EA_params; + }; + + explicit NIPES(std::unique_ptr controller, + Evaluator *evaluator, + EvaluationReporter *reporter, + const NIPES::NIPES_Parameters ¶ms, + int seed, + double evaluation_time, + unsigned int n_learning_evaluations, + const std::string& model_name); + + Controller *controller() override + { return _controller.get(); } + + ~NIPES(); + + + void epoch() override; + void init_next_pop() override; + + bool is_finish() override; + bool finish_eval() override; + + bool restarted(){return !cmaStrategy->log_stopping_criterias.empty();} + std::string pop_stopping_criterias(){ + std::string res = cmaStrategy->log_stopping_criterias.back(); + cmaStrategy->log_stopping_criterias.pop_back(); + return res; + } + const std::vector &get_archive(){return archive;} + +protected: + IPOPCMAStrategy::Ptr cmaStrategy; + cma::CMASolutions best_run; + bool _is_finish = false; + std::vector archive; + int reevaluated = 0; + NIPES::NIPES_Parameters Nipes_Param; +}; +} + + +#endif //NIPES_HPP \ No newline at end of file diff --git a/cpprevolve/revolve/brains/learner/NoLearner.h b/cpprevolve/revolve/brains/learner/NoLearner.h new file mode 100644 index 0000000000..019038e8d1 --- /dev/null +++ b/cpprevolve/revolve/brains/learner/NoLearner.h @@ -0,0 +1,41 @@ +// +// Created by matteo on 11/22/19. +// +#pragma once + +#include +#include "Learner.h" +#include "../controller/DifferentialCPG.h" + +namespace revolve { + +template +class NoLearner : public Learner +{ +public: + explicit NoLearner(std::unique_ptr controller) + : Learner(nullptr, nullptr, 0, 0) //TODO add report + , _controller(std::move(controller)) + {} + + // This is inspired from the GNU `std::make_unique` source code + template + explicit NoLearner(_Args &&... args) + : Learner(nullptr, nullptr, 0, 0) //TODO add report + , _controller(new ControllerType(std::forward<_Args>(args)...)) + {} + + void optimize(double /*time*/, double /*dt*/) override {} + void init_first_controller() override {} + void init_next_controller() override {} + void finalize_current_controller(double /*fitness*/) override {} + void load_best_controller() override {} + + Controller *controller() override + { return this->_controller.get(); } + +protected: + std::unique_ptr _controller; +}; + +} diff --git a/cpprevolve/revolve/gazebo/CMakeLists.txt b/cpprevolve/revolve/gazebo/CMakeLists.txt index e01a3970fa..37cbce5ad5 100644 --- a/cpprevolve/revolve/gazebo/CMakeLists.txt +++ b/cpprevolve/revolve/gazebo/CMakeLists.txt @@ -78,7 +78,7 @@ if (LOCAL_GAZEBO_DIR) NO_DEFAULT_PATH) message(WARNING "Using local Gazebo @ ${gazebo_DIR}") else() - find_package(gazebo 9 REQUIRED) + find_package(gazebo 10 REQUIRED) endif() include_directories(${GAZEBO_INCLUDE_DIRS}) link_directories(${GAZEBO_LIBRARY_DIRS}) @@ -201,6 +201,7 @@ add_library( target_link_libraries( revolve-gazebo revolve-controllers + revolve-learners ${GAZEBO_LIBRARIES} ${Boost_LIBRARIES} ${GSL_LIBRARIES} @@ -226,6 +227,19 @@ target_link_libraries( ${GAZEBO_LIBRARIES} ) +# Create Realtime World plugin +add_library( + RealtimeWorldControlPlugin SHARED + plugin/RealtimeWorldController.cpp + plugin/register_realtime_world_plugin.cpp +) +target_link_libraries( + RealtimeWorldControlPlugin + revolve-gazebo + revolve-proto + ${GAZEBO_LIBRARIES} +) + # Create Analyzer plugin add_library( AnalyzerPlugin SHARED diff --git a/cpprevolve/revolve/gazebo/brains/Brains.h b/cpprevolve/revolve/gazebo/brains/Brains.h index c2bec687a1..81b978f76a 100644 --- a/cpprevolve/revolve/gazebo/brains/Brains.h +++ b/cpprevolve/revolve/gazebo/brains/Brains.h @@ -25,7 +25,7 @@ #include #include #include -#include +#include #include #endif // REVOLVE_GAZEBO_BRAINS_BRAINS_H_ diff --git a/cpprevolve/revolve/gazebo/brains/DifferentialCPG.cpp b/cpprevolve/revolve/gazebo/brains/DifferentialCPG.cpp index 99b75e8233..5892c797f6 100644 --- a/cpprevolve/revolve/gazebo/brains/DifferentialCPG.cpp +++ b/cpprevolve/revolve/gazebo/brains/DifferentialCPG.cpp @@ -17,1175 +17,65 @@ * Author: Milan Jelisavcic & Maarten van Hooft * Date: December 29, 2018 * + * Cleaned up by andi on 06-10-19. + * */ -// STL macros -#include -#include -#include -#include -#include -#include -#include -#include - -// Other libraries -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -// Project headers -#include "../motors/Motor.h" - -#include "../sensors/Sensor.h" - #include "DifferentialCPG.h" -#include "DifferentialCPG_BO.h" - -// TODO: Resolve odd behaviour at the end of the validation procedure -// This behaviour is not present if you directly load a trained controller - -// Define namespaces -namespace gz = gazebo; using namespace revolve::gazebo; -// Copied from the limbo tutorial the BO implementation is based on -using Mean_t = limbo::mean::Data; -using Init_t = limbo::init::LHS; -using Kernel_t = limbo::kernel::MaternFiveHalves; -using GP_t = limbo::model::GP; - -/** - * Constructor for DifferentialCPG class. - * - * @param _model - * @param robot_config - */ -DifferentialCPG::DifferentialCPG( - const ::gazebo::physics::ModelPtr &_model, - const sdf::ElementPtr robot_config, - const std::vector< revolve::gazebo::MotorPtr > &_motors, - const std::vector< revolve::gazebo::SensorPtr > &_sensors) - : next_state(nullptr) - , input(new double[_sensors.size()]) - , output(new double[_motors.size()]) -{ - - this->learner = robot_config->GetElement("rv:brain")->GetElement("rv:learner"); - - // Check for brain - if (not robot_config->HasElement("rv:brain")) - { - throw std::runtime_error("DifferentialCPG brain did not receive brain"); - } - auto brain = robot_config->GetElement("rv:brain"); - - // Check for learner - if (not brain->HasElement("rv:learner")) - { - throw std::runtime_error("DifferentialCPG brain did not receive learner"); - } - auto learner = brain->GetElement("rv:learner"); - - // Check for controller - if (not brain->HasElement("rv:controller")) - { - throw std::runtime_error("DifferentialCPG brain did not receive controller"); - } - auto controller = brain->GetElement("rv:controller"); - - // Check for actuators - if (not brain->HasElement("rv:actuators")) - { - throw std::runtime_error("DifferentialCPG brain did not receive actuators"); - } - auto actuators = brain->GetElement("rv:actuators"); - - // Controller parameters - this->reset_neuron_state_bool = std::stoi(controller->GetAttribute("reset_neuron_state_bool")->GetAsString()); - this->reset_neuron_random = std::stoi(controller->GetAttribute("reset_neuron_random")->GetAsString()); - this->init_neuron_state = std::stod(controller->GetAttribute("init_neuron_state")->GetAsString()); - this->range_lb = -std::stod(controller->GetAttribute("range_ub")->GetAsString()); - this->range_ub = std::stod(controller->GetAttribute("range_ub")->GetAsString()); - this->use_frame_of_reference = std::stoi(controller->GetAttribute("use_frame_of_reference")->GetAsString()); - this->signal_factor_all_ = std::stod(controller->GetAttribute("signal_factor_all")->GetAsString()); - this->signal_factor_mid = std::stod(controller->GetAttribute("signal_factor_mid")->GetAsString()); - this->signal_factor_left_right = std::stod(controller->GetAttribute("signal_factor_left_right")->GetAsString()); - - // Limbo BO Learner parameters - this->kernel_noise_ = std::stod(learner->GetAttribute("kernel_noise")->GetAsString()); - this->kernel_optimize_noise_ = std::stoi(learner->GetAttribute("kernel_optimize_noise")->GetAsString()); - this->kernel_sigma_sq_ = std::stod(learner->GetAttribute("kernel_sigma_sq")->GetAsString()); - this->kernel_l_ = std::stod(learner->GetAttribute("kernel_l")->GetAsString()); - this->kernel_squared_exp_ard_k_ = std::stoi(learner->GetAttribute("kernel_squared_exp_ard_k")->GetAsString()); - this->acqui_gpucb_delta_ = std::stod(learner->GetAttribute("acqui_gpucb_delta")->GetAsString()); - this->acqui_ucb_alpha_ = std::stod(learner->GetAttribute("acqui_ucb_alpha")->GetAsString()); - this->acqui_ei_jitter_ = std::stod(learner->GetAttribute("acqui_ei_jitter")->GetAsString()); - - // Non-limbo BO learner para - this->n_init_samples = std::stoi(learner->GetAttribute("n_init_samples")->GetAsString()); - this->n_learning_iterations = std::stoi(learner->GetAttribute("n_learning_iterations")->GetAsString()); - this->n_cooldown_iterations = std::stoi(learner->GetAttribute("n_cooldown_iterations")->GetAsString()); - this->init_method = learner->GetAttribute("init_method")->GetAsString(); - - // Meta parameters - this->startup_time = std::stoi(controller->GetAttribute("startup_time")->GetAsString()); - this->reset_robot_position = std::stoi(controller->GetAttribute("reset_robot_position")->GetAsString()); - this->run_analytics = std::stoi(controller->GetAttribute("run_analytics")->GetAsString()); - this->load_brain = controller->GetAttribute("load_brain")->GetAsString(); - this->evaluation_rate = std::stoi(learner->GetAttribute("evaluation_rate")->GetAsString()); - this->abs_output_bound = std::stoi(learner->GetAttribute("abs_output_bound")->GetAsString()); - this->verbose = std::stoi(controller->GetAttribute("verbose")->GetAsString()); - - // Create transport node - this->node_.reset(new gz::transport::Node()); - this->node_->Init(); - - // Get Robot - this->robot = _model; - this->n_motors = _motors.size(); - auto name = _model->GetName(); - - if(this->verbose) - { - std::cout << robot_config->GetDescription() << std::endl; - } - auto motor = actuators->HasElement("rv:servomotor") - ? actuators->GetElement("rv:servomotor") - : sdf::ElementPtr(); - auto j = 0; - while(motor) - { - if (not motor->HasAttribute("coordinates")) - { - std::cerr << "Missing required motor coordinates" << std::endl; - throw std::runtime_error("Robot brain error"); - } - - // Split string and get coordinates - auto coordinate_string = motor->GetAttribute("coordinates")->GetAsString(); - std::vector coordinates; - boost::split(coordinates, coordinate_string, boost::is_any_of(";")); - - // Check if we have exactly 2 coordinates - if (not coordinates.size() == 2) - { - throw std::runtime_error("Coordinates are not exactly of length two "); - } - - // Check if the coordinates are integers - try - { - for(auto coord : coordinates) - { - std::stoi(coord); - } - } - catch(std::invalid_argument e1) - { - std::cout << "Invalid argument: Cannot cast coordinates to integers " << std::endl; - }; - - // Pass coordinates - auto coord_x = std::stoi(coordinates[0]); - auto coord_y = std::stoi(coordinates[1]); - if (this->verbose) - { - std::cout << "coord_x,coord_y = " << coord_x << "," << coord_y << std::endl; - } - auto motor_id = motor->GetAttribute("part_id")->GetAsString(); - this->positions[motor_id] = {coord_x, coord_y}; - this->motor_coordinates[{coord_x, coord_y}] = j; - - // Set frame of reference - int frame_of_reference = 0; - // We are a left neuron - if (coord_x < 0) - { - frame_of_reference = -1; - } - // We are a right neuron - else if (coord_x > 0) - { - frame_of_reference = 1; - } - - // Save neurons: bias/gain/state. Make sure initial states are of different sign. - this->neurons[{coord_x, coord_y, 1}] = {0.f, 0.f, this->init_neuron_state, frame_of_reference}; //Neuron A - this->neurons[{coord_x, coord_y, -1}] = {0.f, 0.f, -this->init_neuron_state, frame_of_reference}; // Neuron B - - // TODO: Add check for duplicate coordinates - motor = motor->GetNextElement("rv:servomotor"); - j++; - } - - // Add connections between neighbouring neurons - int i = 0; - for (const auto &position : this->positions) - { - // Get name and x,y-coordinates of all neurons. - auto name = position.first; - int x, y; std::tie(x, y) = position.second; - - // Continue to next iteration in case there is already a connection between the 1 and -1 neuron. - // These checks feel a bit redundant. - // if A->B connection exists. - if (this->connections.count({x, y, 1, x, y, -1})) - { - continue; - } - // if B->A connection exists: - if (this->connections.count({x, y, -1, x, y, 1})) - { - continue; - } - - // Loop over all positions. We call it neighbours, but we still need to check if they are a neighbour. - for (const auto &neighbour : this->positions) - { - // Get information of this neuron (that we call neighbour). - int near_x, near_y; std::tie(near_x, near_y) = neighbour.second; - - // If there is a node that is a Moore neighbour, we set it to be a neighbour for their A-nodes. - // Thus the connections list only contains connections to the A-neighbourhood, and not the - // A->B and B->A for some node (which makes sense). - int dist_x = std::abs(x - near_x); - int dist_y = std::abs(y - near_y); - - // TODO: Verify for non-spiders - if (dist_x + dist_y == 2) - { - if(std::get<0>(this->connections[{x, y, 1, near_x, near_y, 1}]) != 1 or - std::get<0>(this->connections[{near_x, near_y, 1, x, y, 1}]) != 1) - { - if(this->verbose) - { - std::cout << "New connection at index " << i << ": " << x << ", " << y << ", " << near_x << ", " << near_y << std::endl; - } - this->connections[{x, y, 1, near_x, near_y, 1}] = std::make_tuple(1, i); - this->connections[{near_x, near_y, 1, x, y, 1}] = std::make_tuple(1, i); - i++; - } - } - } - } - - // Create directory for output. - this->directory_name = controller->GetAttribute("output_directory")->GetAsString(); - if(this->directory_name.empty()) - { - this->directory_name = "output/cpg_bo/"; - this->directory_name += std::to_string(time(0)) + "/"; - } - - std::system(("mkdir -p " + this->directory_name).c_str()); - - // Initialise array of neuron states for Update() method - this->next_state = new double[this->neurons.size()]; - this->n_weights = (int)(this->connections.size()/2) + this->n_motors; - - // Check if we want to load a pre-trained brain - if(!this->load_brain.empty()) - { - // Get line - if(this->verbose) - { - std::cout << "I will load the following brain:" << std::endl; - } - std::ifstream brain_file(this->load_brain); - std::string line; - std::getline(brain_file, line); - - // Get weights in line - std::vector weights; - boost::split(weights, line, boost::is_any_of(",")); - - // Save weights for brain - Eigen::VectorXd loaded_brain(this->n_weights); - for(size_t j = 0; j < this->n_weights; j++) - { - loaded_brain(j) = std::stod(weights.at(j)); - if(this->verbose) - { - std::cout << loaded_brain(j) << ","; - } - } - if(this->verbose) - { - std::cout << std::endl; - } - - // Close brain - brain_file.close(); - - // Save these weights - this->samples.push_back(loaded_brain); - - // Set ODE matrix at initialization - this->set_ode_matrix(); - - // Go directly into cooldown phase: Note we do require that best_sample is filled. Check this - this->current_iteration = this->n_init_samples + this->n_learning_iterations; - - if(this->verbose) - { - std::cout << std::endl << "Brain has been loaded." << std::endl; - } - } - else - { - if (this->verbose) - { - std::cout << "Don't load existing brain" << std::endl; - } +DifferentialCPG::DifferentialCPG(const sdf::ElementPtr brain_sdf, + const std::vector &_motors) + : Brain() + , revolve::DifferentialCPG(load_params_from_sdf(brain_sdf), _motors) +{} - // Initialize BO - this->bo_init_sampling(); - } +DifferentialCPG::DifferentialCPG(const sdf::ElementPtr brain_sdf, + const std::vector &_motors, + const NEAT::Genome &genome) + : Brain() + , revolve::DifferentialCPG(load_params_from_sdf(brain_sdf), _motors, genome) +{} - // Initiate the cpp Evaluator - this->evaluator.reset(new Evaluator(this->evaluation_rate)); - this->evaluator->directory_name = this->directory_name; -} -/** - * Destructor - */ -DifferentialCPG::~DifferentialCPG() +void DifferentialCPG::Update(const std::vector &_motors, + const std::vector &_sensors, + const double _time, + const double _step) { - delete[] this->next_state; - delete[] this->input; - delete[] this->output; + this->::revolve::DifferentialCPG::update(_motors, _sensors, _time, _step); } -/** - * Dummy function for limbo - */ -struct DifferentialCPG::evaluation_function{ - // Number of input dimension (samples.size()) - BO_PARAM(size_t, dim_in, 18); - - // number of dimensions of the fitness - BO_PARAM(size_t, dim_out, 1); - - Eigen::VectorXd operator()(const Eigen::VectorXd &x) const { - return limbo::tools::make_vector(0); - }; -}; - -/** - * Performs the initial random sampling for BO - */ -void DifferentialCPG::bo_init_sampling(){ - if(this->verbose) - { - // We only want to optimize the weights for now. - std::cout << "Number of weights = connections/2 + n_motors are " - << this->connections.size()/2 - << " + " - << this->n_motors - << std::endl; - - // Information purposes - std::cout << std::endl << "Sample method: " << this->init_method << ". Initial " - "samples are: " << std::endl; - } - - // Random sampling - if(this->init_method == "RS") - { - for (size_t i = 0; i < this->n_init_samples; i++) - { - // Working variable to hold a random number for each weight to be optimized - Eigen::VectorXd init_sample(this->n_weights); - - // For all weights - for (size_t j = 0; j < this->n_weights; j++) - { - // Generate a random number in [0, 1]. Transform later - double f = ((double) rand() / (RAND_MAX)); - - // Append f to vector - init_sample(j) = f; - } - - // Save vector in samples. - this->samples.push_back(init_sample); - } - } - // Latin Hypercube Sampling - else if(this->init_method == "LHS") - { - // Working variable - double my_range = 1.f/this->n_init_samples; - - // If we have n dimensions, create n such vectors that we will permute - std::vector> all_dimensions; - - // Fill vectors - for (size_t i=0; i < this->n_weights; i++) - { - std::vector one_dimension; - - // Prepare for vector permutation - for (size_t j = 0; j < this->n_init_samples; j++) - { - one_dimension.push_back(j); - } - - // Vector permutation - std::random_shuffle(one_dimension.begin(), one_dimension.end() ); - - // Save permuted vector - all_dimensions.push_back(one_dimension); - } - - // For all samples - for (size_t i = 0; i < this->n_init_samples; i++) - { - // Initialize Eigen::VectorXd here. - Eigen::VectorXd init_sample(this->n_weights); - - // For all dimensions - for (size_t j = 0; j < this->n_weights; j++) - { - // Take a LHS - init_sample(j) = all_dimensions.at(j).at(i)*my_range + ((double) rand() / (RAND_MAX))*my_range; - } - - // Append sample to samples - this->samples.push_back(init_sample); - } - } - else - { - std::cout << "Please provide a choice of init_method in {LHS, RS}" << std::endl; - } - - // Print samples - if(this->verbose) - { - for(auto init_sample :this->samples) - { - for (int h = 0; h < init_sample.size(); h++) - { - std::cout << init_sample(h) << ", "; - } - std::cout << std::endl; - } - } -} - -/** - * Function that obtains the current fitness by calling the evaluator and stores it - */ -void DifferentialCPG::save_fitness(){ - // Get fitness - double fitness = this->evaluator->Fitness(); - - // Save sample if it is the best seen so far - if(fitness >this->best_fitness) - { - this->best_fitness = fitness; - this->best_sample = this->samples.back(); - } - - if (this->verbose) - { - std::cout << "Iteration number " << this->current_iteration << " has fitness " << - fitness << ". Best fitness: " << this->best_fitness << std::endl; - } - - // Limbo requires fitness value to be of type Eigen::VectorXd - Eigen::VectorXd observation = Eigen::VectorXd(1); - observation(0) = fitness; - - // Save fitness to std::vector. This fitness corresponds to the solution of the previous iteration - this->observations.push_back(observation); - - // Write fitness to file - std::ofstream fitness_file; - fitness_file.open(this->directory_name + "fitnesses.txt", std::ios::app); - fitness_file << fitness << std::endl; - fitness_file.close(); -} - - - -/** - * Struct that holds the parameters on which BO is called. This is required - * by limbo. - */ -struct DifferentialCPG::Params - { - - struct bayes_opt_boptimizer : public limbo::defaults::bayes_opt_boptimizer { - }; - - // depending on which internal optimizer we use, we need to import different parameters -#ifdef USE_NLOPT - struct opt_nloptnograd : public limbo::defaults::opt_nloptnograd { - }; -#elif defined(USE_LIBCMAES) - struct opt_cmaes : public lm::defaults::opt_cmaes { - }; -#else -#error(NO SOLVER IS DEFINED) -#endif - struct kernel : public limbo::defaults::kernel { - BO_PARAM(double, noise, 0.001); - BO_PARAM(bool, optimize_noise, false); - }; - - struct bayes_opt_bobase : public limbo::defaults::bayes_opt_bobase { - // set stats_enabled to prevent creating all the directories - BO_PARAM(bool, stats_enabled, false); - BO_PARAM(bool, bounded, true); - }; - - // 1 Iteration as we will perform limbo step by steop - struct stop_maxiterations : public limbo::defaults::stop_maxiterations { - BO_PARAM(int, iterations, 1); - }; - - struct kernel_exp : public limbo::defaults::kernel_exp { - /// @ingroup kernel_defaults - BO_PARAM(double, sigma_sq, 0.1); - BO_PARAM(double, l, 0.1); // the width of the kernel. Note that it assumes equally sized ranges over dimensions - }; - - struct kernel_squared_exp_ard : public limbo::defaults::kernel_squared_exp_ard { - /// @ingroup kernel_defaults - BO_PARAM(int, k, 3); // k number of columns used to compute M - /// @ingroup kernel_defaults - BO_PARAM(double, sigma_sq, 0.1); //brochu2010tutorial p.9 without sigma_sq - }; - - struct kernel_maternfivehalves : public limbo::defaults::kernel_maternfivehalves - { - BO_DYN_PARAM(double, sigma_sq); //brochu2010tutorial p.9 without sigma_sq - BO_DYN_PARAM(double, l); //characteristic length scale - }; - - struct acqui_gpucb : public limbo::defaults::acqui_gpucb { - //UCB(x) = \mu(x) + \kappa \sigma(x). - BO_PARAM(double, delta, 0.1 );//acqui_gpucb_delta_); // default delta = 0.1, delta in (0,1) convergence guaranteed - }; - - struct acqui_ei : public limbo::defaults::acqui_ei{ - BO_PARAM(double, jitter, 0.5); - }; - - // This is just a placeholder to be able to use limbo with revolve - struct init_lhs : public limbo::defaults::init_lhs{ - BO_PARAM(int, samples, 0); - }; - - struct acqui_ucb : public limbo::defaults::acqui_ucb { - //constexpr double ra = acqui_ucb_alpha_; - //UCB(x) = \mu(x) + \alpha \sigma(x). high alpha have high exploration - //iterations is high, alpha can be low for high accuracy in enough iterations. - // In contrast, the lsow iterations should have high alpha for high - // searching in limited iterations, which guarantee to optimal. - // BO_PARAM(double, alpha, transform_double(acqui_ucb_alpha_)); // default alpha = 0.5 - BO_DYN_PARAM(double, alpha); // default alpha = 0.5 - - }; -}; - -BO_DECLARE_DYN_PARAM(double, DifferentialCPG::Params::acqui_ucb, alpha); -BO_DECLARE_DYN_PARAM(double, DifferentialCPG::Params::kernel_maternfivehalves, sigma_sq); -BO_DECLARE_DYN_PARAM(double, DifferentialCPG::Params::kernel_maternfivehalves, l); - -/** - * Wrapper function that makes calls to limbo to solve the current BO - * iteration and returns the best sample - */ -void DifferentialCPG::bo_step(){ - Params::acqui_ucb::set_alpha(this->acqui_ucb_alpha_); - Params::kernel_maternfivehalves::set_l(this->kernel_l_); - Params::kernel_maternfivehalves::set_sigma_sq(this->kernel_sigma_sq_); - - // Save all parameters once - if (this->current_iteration == 0) - { - // Save parameters - this->save_parameters(); - } - Eigen::VectorXd x; - - // In case we are done with the initial random sampling. - if (this->current_iteration >= this->n_init_samples) - { - // std::cout << "Acquisition function: " << this->acquisition_function << std::endl; - if(true) - { - - // Specify bayesian optimizer. TODO: Make attribute and initialize at bo_init - limbo::bayes_opt::BOptimizer, - limbo::modelfun, - limbo::acquifun>> boptimizer; - - // Optimize. Pass dummy evaluation function and observations . - boptimizer.optimize(DifferentialCPG::evaluation_function(), - this->samples, - this->observations); - x = boptimizer.last_sample(); - - // Write parametesr to verify thread-stability after the run - std::ofstream dyn_parameters_file; - dyn_parameters_file.open(this->directory_name + "dynamic_parameters.txt", std::ios::app); - dyn_parameters_file << Params::acqui_ucb::alpha() << ","; - dyn_parameters_file << Params::kernel_maternfivehalves::sigma_sq() << ","; - dyn_parameters_file << Params::kernel_maternfivehalves::l() << std::endl; - dyn_parameters_file.close(); - - - } - // else if(this->acquisition_function == "GP_UCB") - // { - // // Specify bayesian optimizer. TODO: Make attribute and initialize at bo_init - // limbo::bayes_opt::BOptimizer, - // limbo::modelfun, - // limbo::acquifun>> boptimizer; - // - // // Optimize. Pass dummy evaluation function and observations . - // boptimizer.optimize(DifferentialCPG::evaluation_function(), - // this->samples, - // this->observations); - // x = boptimizer.last_sample(); - // } - // else if(this->acquisition_function == "EI") - // { - // // Specify bayesian optimizer. TODO: Make attribute and initialize at bo_init - // limbo::bayes_opt::BOptimizer, - // limbo::modelfun, - // limbo::acquifun>> boptimizer; - // - // // Optimize. Pass dummy evaluation function and observations . - // boptimizer.optimize(DifferentialCPG::evaluation_function(), - // this->samples, - // this->observations); - // x = boptimizer.last_sample(); - // } - else - { - std::cout << "Specify correct acquisition function: {EI, UCB, GP_UCB}" << std::endl; - } - - // Save this x_hat_star - this->samples.push_back(x); - } -} - -/** - * Callback function that defines the movement of the robot - * - * @param _motors - * @param _sensors - * @param _time - * @param _step - */ -void DifferentialCPG::Update( - const std::vector< revolve::gazebo::MotorPtr > &_motors, - const std::vector< revolve::gazebo::SensorPtr > &_sensors, - const double _time, - const double _step) +revolve::DifferentialCPG::ControllerParams DifferentialCPG::load_params_from_sdf(sdf::ElementPtr brain_sdf) { - // Prevent two threads from accessing the same resource at the same time - boost::mutex::scoped_lock lock(this->networkMutex_); - - // Read sensor data and feed the neural network - unsigned int p = 0; - for (const auto &sensor : _sensors) - { - sensor->read(this->input + p); - p += sensor->n_inputs(); - } - - this->evaluator->Update(this->robot->WorldPose(), _time, _step); - - // Only start recording the fitness after the startup time each iteration - double elapsed_evaluation_time = _time - this->start_time; - if((std::fmod(elapsed_evaluation_time, (int)this->evaluation_rate) >= this->startup_time) & this->start_fitness_recording) - { - // Update position -// this->evaluator->Update(this->robot->WorldPose(), _time, _step); - this->start_fitness_recording = false; - } - // Evaluate policy on certain time limit, or if we just started - if ((elapsed_evaluation_time > this->evaluation_rate) or ((_time - _step) < 0.001)) - { - // Update position -// this->evaluator->Update(this->robot->WorldPose(), _time, _step); - this->start_fitness_recording = true; - - // Get and save fitness (but not at start) - if(not (_time - _step < 0.001 )) - { - this->save_fitness(); - } - - // Reset robot if opted to do - if(this->reset_robot_position) - { - //this->robot->Reset(); - this->robot->ResetPhysicsStates(); - auto start_pose = ::ignition::math::Pose3d(); - start_pose.Set(0.0, 0.0, 0.05, 0.0, 0.0, 0.0); - this->robot->SetWorldPose(start_pose); - this->robot->Update(); - } - - // Reset neuron state if opted to do - if(this->reset_neuron_state_bool) - { - this->reset_neuron_state(); - } - - // If we are still learning - if(this->current_iteration < this->n_init_samples + this->n_learning_iterations) - { - if(this->verbose) - { - if (this->current_iteration < this->n_init_samples) - { - std::cout << std::endl << "Evaluating initial random sample" << std::endl; - } - else - { - std::cout << std::endl << "I am learning " << std::endl; + // Get all params from the sdf + // TODO: Add exception handling + sdf::ElementPtr controller_sdf = brain_sdf->GetElement("rv:controller"); + revolve::DifferentialCPG::ControllerParams params; + params.reset_neuron_random = (controller_sdf->GetAttribute("reset_neuron_random")->GetAsString() == "true"); + params.use_frame_of_reference = (controller_sdf->GetAttribute("use_frame_of_reference")->GetAsString() == "true"); + params.init_neuron_state = stod(controller_sdf->GetAttribute("init_neuron_state")->GetAsString()); + params.range_ub = stod(controller_sdf->GetAttribute("range_ub")->GetAsString()); + params.output_signal_factor = stod(controller_sdf->GetAttribute("signal_factor_all")->GetAsString()); + params.abs_output_bound = stod(controller_sdf->GetAttribute("abs_output_bound")->GetAsString()); + + // Get the weights from the sdf: + // If loading with CPPN, the weights attribute does not exist + if (controller_sdf->HasAttribute("weights")) { + std::string sdf_weights = controller_sdf->GetAttribute("weights")->GetAsString(); + std::string delimiter = ";"; + + size_t pos = 0; + std::string token; + while ((pos = sdf_weights.find(delimiter)) != std::string::npos) { + token = sdf_weights.substr(0, pos); + params.weights.push_back(stod(token)); + sdf_weights.erase(0, pos + delimiter.length()); } - } - // Get new sample (weights) and add sample - this->bo_step(); - - // Set new weights - this->set_ode_matrix(); - - // Update position -// this->evaluator->Update(this->robot->WorldPose(), _time, _step); - } - // If we are finished learning but are cooling down - reset once - else if((this->current_iteration >= (this->n_init_samples + - this->n_learning_iterations)) - and (this->current_iteration < (this->n_init_samples + - this->n_learning_iterations + - this->n_cooldown_iterations - 1))) - { - if(this->verbose) - { - std::cout << std::endl << "I am cooling down " << std::endl; - } - - // Update robot position -// this->evaluator->Update(this->robot->WorldPose(), _time, _step); - - // Use best sample in next iteration - this->samples.push_back(this->best_sample); - - // Set ODE matrix - this->set_ode_matrix(); + // push the last element that does not end with the delimiter + params.weights.push_back(stod(sdf_weights)); } - // Else we don't want to update anything, but construct plots from this run once. - else - { -// // Create plots -// if(this->run_analytics) -// { -// // Construct plots -// this->get_analytics(); -// } - - // Exit - if(this->verbose) - { - std::cout << std::endl << "I am finished " << std::endl; - } - std::exit(0); - } - - // Evaluation policy here - this->start_time = _time; - this->evaluator->Reset(); - this->current_iteration += 1; - } - - // Send new signals to the motors - this->step(_time, this->output); - p = 0; - for (const auto &motor: _motors) - { - motor->write(this->output + p, _step); - p += motor->n_outputs(); - } -} - -/** - * Make matrix of weights A as defined in dx/dt = Ax. - * Element (i,j) specifies weight from neuron i to neuron j in the system of ODEs - */ -void DifferentialCPG::set_ode_matrix(){ - // Initiate new matrix - std::vector> matrix; - - // Fill with zeroes - for(size_t i =0; i neurons.size(); i++) - { - // Initialize row in matrix with zeros - std::vector< double > row; - for (size_t j = 0; j < this->neurons.size(); j++) - { - row.push_back(0); - } - matrix.push_back(row); - } - - // Process A<->B connections - int index = 0; - for(size_t i =0; i neurons.size(); i++) - { - // Get correct index - int c = 0; - if (i%2 == 0){ - c = i + 1; - } - else{ - c = i - 1; - } - - // Add a/b connection weight - index = (int)(i/2); - auto w = this->samples.at(this->current_iteration)(index) * - (this->range_ub - this->range_lb) + this->range_lb; - matrix[i][c] = w; - matrix[c][i] = -w; - } - - // A<->A connections - index++; - int k = 0; - std::vector connections_seen; - - for (auto const &connection : this->connections) - { - // Get connection information - int x1, y1, z1, x2, y2, z2; - std::tie(x1, y1, z1, x2, y2, z2) = connection.first; - - // Find location of the two neurons in this->neurons list - int l1, l2; - int c = 0; - for(auto const &neuron : this->neurons) - { - int x, y, z; - std::tie(x, y, z) = neuron.first; - if (x == x1 and y == y1 and z == z1) - { - l1 = c; - } - else if (x == x2 and y == y2 and z == z2) - { - l2 = c; - } - // Update counter - c++; - } - - // Add connection to seen connections - if(l1 > l2) - { - int l1_old = l1; - l1 = l2; - l2 = l1_old; - } - std::string connection_string = std::to_string(l1) + "-" + std::to_string(l2); - - // if not in list, add to list - auto connections_list = std::find(connections_seen.begin(), connections_seen.end(), connection_string); - if(connections_list == connections_seen.end()) - { - connections_seen.push_back(connection_string); - } - // else continue to next iteration - else{ - continue; - } - - // Get weight - auto w = this->samples.at(this->current_iteration)(index + k) * - (this->range_ub - this->range_lb) + this->range_lb; - - // Set connection in weight matrix - matrix[l1][l2] = w; - matrix[l2][l1] = -w; - k++; - } - - // Update matrix - this->ode_matrix = matrix; - - // Reset neuron state - this->reset_neuron_state(); - - // Save this sample to file - std::ofstream samples_file; - samples_file.open(this->directory_name + "samples.txt", std::ios::app); - auto sample = this->samples.at(this->current_iteration); - for(size_t j = 0; j < this->n_weights; j++) - { - samples_file << sample(j) << ", "; - } - samples_file << std::endl; - samples_file.close(); -} - - -/** - * Set states back to original value (that is on the unit circle) - */ -void DifferentialCPG::reset_neuron_state(){ - int c = 0; - for(auto const &neuron : this->neurons) - { - // Get neuron properties - int x, y, z, frame_of_reference; - double bias ,gain ,state; - std::tie(x, y, z) = neuron.first; - std::tie(bias, gain, state, frame_of_reference) = neuron.second; - - if (z == -1) - { - // Neuron B - if (this->reset_neuron_random) - { - this->neurons[{x, y, z}] = {0.f, - 0.f, - ((double) rand() / (RAND_MAX))*2*this->init_neuron_state - this->init_neuron_state, - frame_of_reference}; - } - else - { - this->neurons[{x, y, z}] = {0.f, 0.f, -this->init_neuron_state, frame_of_reference}; - } - } - else - { - // Neuron A - if (this->reset_neuron_random) - { - this->neurons[{x, y, z}] = {0.f, - 0.f, - ((double) rand() / (RAND_MAX))*2*this->init_neuron_state - this->init_neuron_state, - frame_of_reference}; - } - else - { - this->neurons[{x, y, z}] = {0.f, 0.f, +this->init_neuron_state, frame_of_reference}; - } - } - c++; - } -} - -/** - * Step function that is called from within Update() - * - * @param _time - * @param _output - */ -void DifferentialCPG::step( - const double _time, - double *_output) -{ - int neuron_count = 0; - for (const auto &neuron : this->neurons) - { - // Neuron.second accesses the second 3-tuple of a neuron, containing the bias/gain/state. - double recipient_bias, recipient_gain, recipient_state; - int frame_of_reference; - std::tie(recipient_bias, recipient_gain, recipient_state, frame_of_reference) = neuron.second; - - // Save for ODE - this->next_state[neuron_count] = recipient_state; - neuron_count++; - } - - // Copy values from next_state into x for ODEINT - state_type x(this->neurons.size()); - for (size_t i = 0; i < this->neurons.size(); i++) - { - x[i] = this->next_state[i]; - } - - // Stepper. The result is saved in x. Begin time t, time step dt - double dt = (_time - this->previous_time); - this->previous_time = _time; - - // Perform one step - stepper.do_step( - [this](const state_type &x, state_type &dxdt, double t) - { - for(size_t i = 0; i < this->neurons.size(); i++) - { - dxdt[i] = 0; - for(size_t j = 0; j < this->neurons.size(); j++) - { - dxdt[i] += x[j]*this->ode_matrix[j][i]; - } - } - }, - x, - _time, - dt); - - // Copy values into nextstate - for (size_t i = 0; i < this->neurons.size(); i++) - { - this->next_state[i] = x[i]; - } - - // Loop over all neurons to actually update their states. Note that this is a new outer for loop - auto i = 0; auto j = 0; - for (auto &neuron : this->neurons) - { - // Get bias gain and state for this neuron. Note that we don't take the coordinates. - // However, they are implicit as their order did not change. - double bias, gain, state; - int frame_of_reference; - std::tie(bias, gain, state, frame_of_reference) = neuron.second; - double x, y, z; - std::tie(x, y, z) = neuron.first; - neuron.second = {bias, gain, this->next_state[i], frame_of_reference}; - j = this->motor_coordinates[{x,y}]; - // Should be one, as output should be based on +1 neurons, which are the A neurons - if (i % 2 == 1) - { - // TODO: Add Milan's function here as soon as things are working a bit - // f(a) = (w_ao*a - bias)*gain - - // Apply saturation formula - auto x = this->next_state[i]; - - // Use frame of reference - if(use_frame_of_reference) - { - - if (std::abs(frame_of_reference) == 1) - { - this->output[j] = this->signal_factor_left_right*this->abs_output_bound*((2.0)/(1.0 + std::pow(2.718, -2.0*x/this->abs_output_bound)) -1); - } - else if (frame_of_reference == 0) - { - this->output[j] = this->signal_factor_mid*this->abs_output_bound*((2.0)/(1.0 + std::pow(2.718, -2.0*x/this->abs_output_bound)) -1); - } - else - { - std::cout << "WARNING: frame_of_reference not in {-1,0,1}." << std::endl; - } - - } - // Don't use frame of reference - else{ - this->output[j] = this->signal_factor_all_*this->abs_output_bound*((2.0)/(1.0 + std::pow(2.718, -2.0*x/this->abs_output_bound)) -1); - } - } - i++; - } - - // Comment to save disk space -// // Write state to file -// std::ofstream state_file; -// state_file.open(this->directory_name + "states.txt", std::ios::app); -// for(size_t i = 0; i < this->neurons.size(); i++) -// { -// state_file << this->next_state[i] << ","; -// } -// state_file << std::endl; -// state_file.close(); -// -// // Write signal to file -// std::ofstream signal_file; -// signal_file.open(this->directory_name + "signal.txt", std::ios::app); -// for(size_t i = 0; i < this->n_motors; i++) -// { -// signal_file << this->output[i] << ","; -// } -// signal_file << std::endl; -// signal_file.close(); -} - - -/** - * Save the parameters used in this run to a file. - */ -void DifferentialCPG::save_parameters(){ - // Write parameters to file - std::ofstream parameters_file; - parameters_file.open(this->directory_name + "parameters.txt"); - - // Various parameters - parameters_file << "Dimensions: " << this->n_weights << std::endl; - parameters_file << "n_init_samples: " << this->n_init_samples << std::endl; - parameters_file << "n_learning_iterations: " << this->n_learning_iterations << std::endl; - parameters_file << "n_cooldown_iterations: " << this->n_cooldown_iterations << std::endl; - parameters_file << "evaluation_rate: " << this->evaluation_rate << std::endl; - parameters_file << "abs_output_bound: " << this->abs_output_bound << std::endl; - parameters_file << "signal_factor_all: " << this->signal_factor_all_ << std::endl; - parameters_file << "range_lb: " << this->range_lb << std::endl; - parameters_file << "range_ub: " << this->range_ub << std::endl; - parameters_file << "run_analytics: " << this->run_analytics << std::endl; - parameters_file << "load_brain: " << this->load_brain << std::endl; - parameters_file << "reset_robot_position: " << this->reset_robot_position << std::endl; - parameters_file << "reset_neuron_state_bool: " << this->reset_neuron_state_bool << std::endl; - parameters_file << "reset_neuron_random: " << this->reset_neuron_random << std::endl; - parameters_file << "initial state value: " << this->init_neuron_state << std::endl; - - // BO hyper-parameters - parameters_file << std::endl << "Initialization method used: " << this->init_method << std::endl; - parameters_file << "Acqui. function used: " << this->acquisition_function << std::endl; - parameters_file << "EI jitter: " <directory_name - + " " - + std::to_string((int)this->n_init_samples) - + " " - + std::to_string((int)this->n_cooldown_iterations); - // Execute python command - std::system(std::string("python3 " + plot_command).c_str()); + return params; } diff --git a/cpprevolve/revolve/gazebo/brains/DifferentialCPG.h b/cpprevolve/revolve/gazebo/brains/DifferentialCPG.h index 2595f83621..d85e978984 100644 --- a/cpprevolve/revolve/gazebo/brains/DifferentialCPG.h +++ b/cpprevolve/revolve/gazebo/brains/DifferentialCPG.h @@ -1,275 +1,52 @@ -/* - * Copyright (C) 2015-2018 Vrije Universiteit Amsterdam - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * Description: TODO: - * Author: Milan Jelisavcic - * Date: December 29, 2018 - * - */ +// +// Created by andi on 20-09-19. +// -#ifndef REVOLVE_DIFFERENTIALCPG_H_ -#define REVOLVE_DIFFERENTIALCPG_H_ +#pragma once -// Standard libraries -#include -#include - -// External libraries -#include -#include - -// Project headers -#include "Evaluator.h" +#include +#include #include "Brain.h" -/// These numbers are quite arbitrary. It used to be in:13 out:8 for the -/// Arduino, but I upped them both to 20 to accommodate other scenarios. -/// Should really be enforced in the Python code, this implementation should -/// not be the limit. -#define MAX_INPUT_NEURONS 20 -#define MAX_OUTPUT_NEURONS 20 - -/// Arbitrary value -#define MAX_HIDDEN_NEURONS 30 - -/// Convenience -#define MAX_NON_INPUT_NEURONS (MAX_HIDDEN_NEURONS + MAX_OUTPUT_NEURONS) - -/// (bias, tau, gain) or (phase offset, period, gain) -#define MAX_NEURON_PARAMS 3 - -typedef std::vector< double > state_type; - namespace revolve { - namespace gazebo { - class DifferentialCPG - : public Brain + namespace gazebo { - /// \brief Constructor - /// \param[in] _modelName Name of the robot - /// \param[in] _node The brain node - /// \param[in] _motors Reference to a motor list, it be reordered - /// \param[in] _sensors Reference to a sensor list, it might be reordered - public: - DifferentialCPG( - const ::gazebo::physics::ModelPtr &_model, - const sdf::ElementPtr robot_config, - const std::vector< MotorPtr > &_motors, - const std::vector< SensorPtr > &_sensors); - - public: void set_ode_matrix(); - - /// \brief Destructor - public: virtual ~DifferentialCPG(); - - /// \brief The default update method for the controller - /// \param[in] _motors Motor list - /// \param[in] _sensors Sensor list - /// \param[in] _time Current world time - /// \param[in] _step Current time step - public: - virtual void Update( - const std::vector< MotorPtr > &_motors, - const std::vector< SensorPtr > &_sensors, - const double _time, - const double _step); - - protected: - void step( - const double _time, - double *_output); - - /// \brief Register of motor IDs and their x,y-coordinates - protected: std::map< std::string, std::tuple< int, int > > positions; - - public: std::map< std::tuple< int, int>, int> motor_coordinates; - - - /// \brief Register of individual neurons in x,y,z-coordinates - /// \details x,y-coordinates define position of a robot's module and - // z-coordinate define A or B neuron (z=1 or -1 respectively). Stored - // values are a bias, gain, state and frame of reference of each neuron. - protected: - std::map< std::tuple< int, int, int >, std::tuple< double, double, double, int > > - neurons; - - /// \brief Register of connections between neighnouring neurons - /// \details Coordinate set of two neurons (x1, y1, z1) and (x2, y2, z2) - // define a connection. The second tuple contains 1: the connection value and - // 2: the weight index corresponding to this connection. - protected: - std::map< std::tuple< int, int, int, int, int, int >, std::tuple > - connections; - - /// \brief Runge-Kutta 45 stepper - protected: boost::numeric::odeint::runge_kutta4< state_type > stepper; - - /// \brief Pointer to access parameters - private: sdf::ElementPtr learner; - - /// \brief Used to determine the next state array - private: double *next_state; - - /// \brief Used for ODE-int - protected: std::vector> ode_matrix; - protected: state_type x; - - /// \brief One input state for each input neuron - private: double *input; - - /// \brief Used to determine the output to the motors array - private: double *output; - - /// \brief Location where to save output - private: std::string directory_name; - - /// \brief Name of the robot - private: ::gazebo::physics::ModelPtr robot; - - /// \brief Init BO loop - public: void bo_init_sampling(); - - /// \brief Main BO loop - public: void bo_step(); - - /// \brief evaluation rate - private: double evaluation_rate; - - /// \brief Get fitness - private: void save_fitness(); - - /// \brief Pointer to the fitness evaluator - protected: EvaluatorPtr evaluator; - - /// \brief Holder for BO parameters - public: struct Params; - - /// \brief Save parameters - private: void save_parameters(); - - /// \brief Best fitness seen so far - private: double best_fitness = -10.0; - - /// \brief Sample corresponding to best fitness - private: Eigen::VectorXd best_sample; - - /// \brief Starting time - private: double start_time; - - /// \brief BO attributes - private: size_t current_iteration = 0; - - /// \brief Max number of iterations learning is allowed - private: size_t n_learning_iterations; - - /// \brief Number of initial samples - private: size_t n_init_samples; - - /// \brief Cool down period - private: size_t n_cooldown_iterations; - - /// \brief Limbo optimizes in [0,1] - private: double range_lb; - - /// \brief Limbo optimizes in [0,1] - private: double range_ub; - - /// \brief How to take initial random samples - private: std::string init_method; - - /// \brief All fitnesses seen so far. Called observations in limbo context - private: std::vector< Eigen::VectorXd > observations; - - /// \brief All samples seen so far. - private: std::vector< Eigen::VectorXd > samples; - - /// \brief The number of weights to optimize - private: size_t n_weights; - - /// \brief Dummy evaluation funtion to reduce changes to be made on the limbo package - public: struct evaluation_function; - - /// \brief Reset the robot to starting position each iteration. - private: bool reset_robot_position; - - /// \brief Reset neuron state at each iteration (also during validation) - private: bool reset_neuron_state_bool; - - /// \brief Factor to multiply output signal with - private: double signal_factor_all_; - - /// \brief Factor to multiply output signal with - private: double signal_factor_mid; - - /// \brief Factor to multiply output signal with - private: double signal_factor_left_right; - - /// \brief Function that resets neuron state - private: void reset_neuron_state(); - - /// \brief When reset a neuron state,do it randomly: - private: bool reset_neuron_random; - - /// \brief Boolean to enable/disable constructing plots - private: bool run_analytics; - - /// \brief Automatically generate plots - public: void get_analytics(); - - /// \brief Show output (1) or not (0) - public: int verbose; - - /// \brief Time to skip for fitness evaluation during training - public: int startup_time; - - /// \brief Helper for startup time - private: bool start_fitness_recording = true; - - /// \brief absolute bound on motor signal value - public: double abs_output_bound; - - /// \brief Holds the number of motors in the robot - private: size_t n_motors; - - /// \brief Helper for numerical integrator - private: double previous_time = 0; - - /// \brief Initial neuron state - private: double init_neuron_state; - - /// \brief Holder for loading a brain - private: std::string load_brain = ""; - - /// \brief Specifies the acquisition function used - public: std::string acquisition_function; - - /// \brief Use frame of reference {-1,0,1} version or not - private: bool use_frame_of_reference; - - // BO Learner parameters - private: double kernel_noise_; - private: bool kernel_optimize_noise_; - public: double kernel_sigma_sq_; - public: double kernel_l_; - private: int kernel_squared_exp_ard_k_; - private: double acqui_gpucb_delta_ ; - public: double acqui_ucb_alpha_; - private: double acqui_ei_jitter_; - }; - } + /// \brief connection between gazebo and revolve CPG + /// \details gets the sdf - model data and passes them to revolve + class DifferentialCPG: public Brain, public revolve::DifferentialCPG + { + public: + /// \brief Constructor + /// \param[in] brain_sdf ElementPtr containing the "brain" - tag of the model sdf + /// \param[in] _motors vector list of motors + /// \details Extracts controller parameters + /// from brain_sdf and calls revolve::DifferentialCPG's contructor. + explicit DifferentialCPG(const sdf::ElementPtr brain_sdf, + const std::vector< MotorPtr > &_motors); + + /// \brief updates the motor signals + /// \param[in] _motors vector list of motors + /// \param[in] _sensors vector list of sensors + /// \param[in] _time double + /// \param[in] _step double + void Update(const std::vector &_motors, + const std::vector &_sensors, + const double _time, + const double _step) override; + + protected: + explicit DifferentialCPG(const sdf::ElementPtr brain_sdf, + const std::vector &_motors, + const NEAT::Genome &genome); + + /// \brief extracts CPG controller parameters from brain_sdf + /// \param[in] brain_sdf ElementPtr containing the "brain" - tag of the model sdf + /// \return parameters of the CPG controller + /// \details get the strings of the controller parameters and convert them to the + /// appropriate datatype. Store them in a revolve::DifferentialCPG::ControllerParams + /// struct and return them. + static revolve::DifferentialCPG::ControllerParams load_params_from_sdf(sdf::ElementPtr brain_sdf); + }; + } } - -#endif //REVOLVE_DIFFERENTIALCPG_H_ diff --git a/cpprevolve/revolve/gazebo/brains/DifferentialCPGClean.cpp b/cpprevolve/revolve/gazebo/brains/DifferentialCPGClean.cpp deleted file mode 100644 index ebedfd3b6f..0000000000 --- a/cpprevolve/revolve/gazebo/brains/DifferentialCPGClean.cpp +++ /dev/null @@ -1,63 +0,0 @@ -// -// Created by andi on 06-10-19. -// - -#include "DifferentialCPGClean.h" - -using namespace revolve::gazebo; - -DifferentialCPGClean::DifferentialCPGClean(const sdf::ElementPtr brain_sdf, - const std::vector &_motors) - : Brain() - , revolve::DifferentialCPG(load_params_from_sdf(brain_sdf), _motors) -{} - -DifferentialCPGClean::DifferentialCPGClean(const sdf::ElementPtr brain_sdf, - const std::vector &_motors, - const NEAT::Genome &genome) - : Brain() - , revolve::DifferentialCPG(load_params_from_sdf(brain_sdf), _motors, genome) -{} - - -void DifferentialCPGClean::Update(const std::vector &_motors, - const std::vector &_sensors, - const double _time, - const double _step) -{ - this->::revolve::DifferentialCPG::update(_motors, _sensors, _time, _step); -} - -revolve::DifferentialCPG::ControllerParams DifferentialCPGClean::load_params_from_sdf(sdf::ElementPtr brain_sdf) { - // Get all params from the sdf - // TODO: Add exception handling - sdf::ElementPtr controller_sdf = brain_sdf->GetElement("rv:controller"); - revolve::DifferentialCPG::ControllerParams params; - params.reset_neuron_random = (controller_sdf->GetAttribute("reset_neuron_random")->GetAsString() == "true"); - params.use_frame_of_reference = (controller_sdf->GetAttribute("use_frame_of_reference")->GetAsString() == "true"); - params.init_neuron_state = stod(controller_sdf->GetAttribute("init_neuron_state")->GetAsString()); - params.range_ub = stod(controller_sdf->GetAttribute("range_ub")->GetAsString()); - params.signal_factor_all = stod(controller_sdf->GetAttribute("signal_factor_all")->GetAsString()); - params.signal_factor_mid = stod(controller_sdf->GetAttribute("signal_factor_mid")->GetAsString()); - params.signal_factor_left_right = stod(controller_sdf->GetAttribute("signal_factor_left_right")->GetAsString()); - params.abs_output_bound = stod(controller_sdf->GetAttribute("abs_output_bound")->GetAsString()); - - // Get the weights from the sdf: - // If loading with CPPN, the weights attribute does not exist - if (controller_sdf->HasAttribute("weights")) { - std::string sdf_weights = controller_sdf->GetAttribute("weights")->GetAsString(); - std::string delimiter = ";"; - - size_t pos = 0; - std::string token; - while ((pos = sdf_weights.find(delimiter)) != std::string::npos) { - token = sdf_weights.substr(0, pos); - params.weights.push_back(stod(token)); - sdf_weights.erase(0, pos + delimiter.length()); - } - // push the last element that does not end with the delimiter - params.weights.push_back(stod(sdf_weights)); - } - - return params; -} diff --git a/cpprevolve/revolve/gazebo/brains/DifferentialCPGClean.h b/cpprevolve/revolve/gazebo/brains/DifferentialCPGClean.h deleted file mode 100644 index 46ea487a54..0000000000 --- a/cpprevolve/revolve/gazebo/brains/DifferentialCPGClean.h +++ /dev/null @@ -1,55 +0,0 @@ -// -// Created by andi on 20-09-19. -// - -#ifndef REVOLVE_DIFFERENTIALCPGCLEAN_H -#define REVOLVE_DIFFERENTIALCPGCLEAN_H - -#include -#include -#include "Brain.h" - -namespace revolve -{ - namespace gazebo - { - /// \brief connection between gazebo and revolve CPG - /// \details gets the sdf - model data and passes them to revolve - class DifferentialCPGClean: public Brain, private revolve::DifferentialCPG - { - public: - /// \brief Constructor - /// \param[in] brain_sdf ElementPtr containing the "brain" - tag of the model sdf - /// \param[in] _motors vector list of motors - /// \details Extracts controller parameters - /// from brain_sdf and calls revolve::DifferentialCPG's contructor. - explicit DifferentialCPGClean(const sdf::ElementPtr brain_sdf, - const std::vector< MotorPtr > &_motors); - - /// \brief updates the motor signals - /// \param[in] _motors vector list of motors - /// \param[in] _sensors vector list of sensors - /// \param[in] _time double - /// \param[in] _step double - void Update(const std::vector &_motors, - const std::vector &_sensors, - const double _time, - const double _step) override; - - protected: - explicit DifferentialCPGClean(const sdf::ElementPtr brain_sdf, - const std::vector &_motors, - const NEAT::Genome &genome); - - /// \brief extracts CPG controller parameters from brain_sdf - /// \param[in] brain_sdf ElementPtr containing the "brain" - tag of the model sdf - /// \return parameters of the CPG controller - /// \details get the strings of the controller parameters and convert them to the - /// appropriate datatype. Store them in a revolve::DifferentialCPG::ControllerParams - /// struct and return them. - static revolve::DifferentialCPG::ControllerParams load_params_from_sdf(sdf::ElementPtr brain_sdf); - }; - } -} - -#endif //REVOLVE_DIFFERENTIALCPGCLEAN_H diff --git a/cpprevolve/revolve/gazebo/brains/DifferentialCPPNCPG.cpp b/cpprevolve/revolve/gazebo/brains/DifferentialCPPNCPG.cpp index eabd2013e3..8cf33fe469 100644 --- a/cpprevolve/revolve/gazebo/brains/DifferentialCPPNCPG.cpp +++ b/cpprevolve/revolve/gazebo/brains/DifferentialCPPNCPG.cpp @@ -6,7 +6,7 @@ #include "Brain.h" #include "DifferentialCPPNCPG.h" -#include "DifferentialCPGClean.h" +#include "DifferentialCPG.h" using namespace revolve::gazebo; @@ -26,7 +26,7 @@ bool string_replace(std::string& str, const std::string& from, const std::string DifferentialCPPNCPG::DifferentialCPPNCPG(const sdf::ElementPtr brain_sdf, const std::vector &motors) - : DifferentialCPGClean( + : DifferentialCPG( brain_sdf, motors, DifferentialCPPNCPG::load_cppn_genome_from_sdf(brain_sdf)) diff --git a/cpprevolve/revolve/gazebo/brains/DifferentialCPPNCPG.h b/cpprevolve/revolve/gazebo/brains/DifferentialCPPNCPG.h index 73b58f35e7..71efd803ff 100644 --- a/cpprevolve/revolve/gazebo/brains/DifferentialCPPNCPG.h +++ b/cpprevolve/revolve/gazebo/brains/DifferentialCPPNCPG.h @@ -6,7 +6,7 @@ #define REVOLVE_DIFFERENTIALCPPNCPG_H #include -#include "DifferentialCPGClean.h" +#include "DifferentialCPG.h" #include "Brain.h" @@ -15,7 +15,7 @@ namespace revolve { /// \brief connection between gazebo and revolve CPG with config CPPN /// \details gets the sdf - model data and passes them to revolve - class DifferentialCPPNCPG : public DifferentialCPGClean + class DifferentialCPPNCPG : public DifferentialCPG { public: /// \brief Constructor diff --git a/cpprevolve/revolve/gazebo/brains/Evaluator.cpp b/cpprevolve/revolve/gazebo/brains/Evaluator.cpp index e082cd1596..79c2004578 100644 --- a/cpprevolve/revolve/gazebo/brains/Evaluator.cpp +++ b/cpprevolve/revolve/gazebo/brains/Evaluator.cpp @@ -20,6 +20,10 @@ */ #include +#include +#include +#include + #include "Evaluator.h" @@ -35,66 +39,101 @@ double Evaluator::measure_distance( } ///////////////////////////////////////////////// -Evaluator::Evaluator(const double _evaluationRate, +Evaluator::Evaluator(const double evaluation_rate, + const bool reset_robot_position, + const ::gazebo::physics::ModelPtr &robot, const double step_saving_rate) - : last_step_time(-1) - , step_saving_rate(step_saving_rate) - , step_poses(0) + : ::revolve::Evaluator() + , evaluation_rate_(evaluation_rate) + , last_step_time(-1) + , step_saving_rate(step_saving_rate) + , step_poses(0) + , reset_robot_position(reset_robot_position) + , robot(robot) { - assert(_evaluationRate > 0 and "`_evaluationRate` should be greater than 0"); - this->evaluation_rate_ = _evaluationRate; - this->current_position_.Reset(); this->previous_position_.Reset(); this->start_position_.Reset(); - this->locomotion_type = "directed"; // {directed, gait} + this->locomotion_type = "gait"; // {turing_left,directed, gait} this->path_length = 0.0; + + std::string model_name = robot->GetName(); + this->output_dir = "./experiments/IMC/output"+model_name; + +// std::ofstream decom_file; +// decom_file.open(this->output_dir+"/fitness_decom.txt", std::ofstream::out | std::ofstream::trunc); +// decom_file.close(); + std::cout<<"Loading evaluator with task: "+this->locomotion_type<step_poses.clear(); //cleared to null - this->path_length = 0.0; - this->last_step_time = 0.0; - this->start_position_ = this->current_position_; + // Reset robot if opted to do + if (this->reset_robot_position) { + //this->robot->Reset(); + ::gazebo::physics::ModelPtr _robot = robot.lock(); + _robot->ResetPhysicsStates(); + auto start_pose = ::ignition::math::Pose3d(); + start_pose.Set(0.0, 0.0, 0.005, 0.0, 0.0, -.1); + for (const auto &joint_ : _robot->GetJoints()) { + std::string joint_name = joint_->GetScopedName(); + _robot->SetJointPosition(joint_name, 0.0); + joint_->SetPosition(0, 0.0); + } + _robot->SetWorldPose(start_pose); + for (const auto& joint_ : _robot->GetJoints()) { + std::string joint_name = joint_->GetScopedName(); + _robot->SetJointPosition(joint_name, 0.0); + joint_->SetPosition(0, 0.0); + + } + _robot->Update(); + this->current_position_ = start_pose; + } + + this->step_poses.clear(); //cleared to null + this->path_length = 0.0; + this->last_step_time = 0.0; + this->start_position_ = this->current_position_; } ///////////////////////////////////////////////// -double Evaluator::Fitness() +double Evaluator::fitness() { double fitness_value = 0.0; if(this->locomotion_type == "gait") { double dS; - dS = std::sqrt(std::pow(this->previous_position_.Pos().X() - + dS = std::sqrt(std::pow(this->start_position_.Pos().X() - this->current_position_.Pos().X(), 2) + - std::pow(this->previous_position_.Pos().Y() - + std::pow(this->start_position_.Pos().Y() - this->current_position_.Pos().Y(), 2)); fitness_value = dS / this->evaluation_rate_; + if(fitness_value > 5e-11){ + std::ofstream fitness_file; + fitness_file.open(this->output_dir + "/fitness_decom.txt", std::ios::app); + fitness_file << std::fixed + <start_position_.Pos().X() - this->current_position_.Pos().X(), 2)) + <<","<start_position_.Pos().Y() - this->current_position_.Pos().Y(), 2)) + <locomotion_type == "directed") { this->step_poses.push_back(this->current_position_); //step_poses: x y z roll pitch yaw - for (int i=1; i < this->step_poses.size(); i++) + for (size_t i=1; i < this->step_poses.size(); i++) { const auto &pose_i_1 = this->step_poses[i-1]; const auto &pose_i = this->step_poses[i]; this->path_length += Evaluator::measure_distance(pose_i_1, pose_i); - //save coordinations to coordinates.txt - std::ofstream coordinates; - coordinates.open(this->directory_name + "/coordinates.txt",std::ios::app); - - if(i == 1) - { - coordinates << std::fixed << start_position_.Pos().X() << " " << start_position_.Pos().Y() << std::endl; - } - coordinates << std::fixed << pose_i.Pos().X() << " " << pose_i.Pos().Y() << std::endl; } ////********** directed locomotion fitness function **********//// @@ -105,11 +144,6 @@ double Evaluator::Fitness() beta0 = 2 * M_PI - std::abs(beta0); } - //save direction to coordinates.txt: This is used to make Figure 8 - std::ofstream coordinates; - coordinates.open(this->directory_name + "/coordinates.txt",std::ios::app); - coordinates << std::fixed << beta0 << std::endl; - double beta1 = std::atan2( this->current_position_.Pos().Y() - this->start_position_.Pos().Y(), this->current_position_.Pos().X() - this->start_position_.Pos().X()); @@ -159,39 +193,90 @@ double Evaluator::Fitness() } //fitness_direction = dist_projection / (alpha + ksi) - penalty; - fitness_direction = std::abs(dist_projection) / path_length * - (dist_projection / (alpha + ksi) - penalty); +// fitness_direction = (dist_projection / (alpha + ksi) - penalty); + fitness_direction = dist_projection*std::abs(dist_projection) - dist_penalty*dist_penalty; fitness_value = fitness_direction; + + + double tot_dist = std::sqrt( + std::pow(dist_projection, 2.0) + std::pow(dist_penalty, 2.0)); + + // Write fitness to file + std::ofstream fitness_file; + fitness_file.open(this->output_dir + "/fitness_decom.txt", std::ios::app); + fitness_file << std::fixed + << fitness_value + <<","<locomotion_type == "turing_left") //anticlockwise + { + double orientations = 0.0; + double delta_orientations = 0.0; + double dS = 0.0; + for(int i = 1; i < this->step_poses.size(); i++) + { + const auto &pose_i_1 = this->step_poses[i-1]; + const auto &pose_i = this->step_poses[i]; + + dS = dS + Evaluator::measure_distance(pose_i_1, pose_i); + + double angle_i = pose_i.Rot().Yaw(); + double angle_i_1 = pose_i_1.Rot().Yaw(); + if(angle_i_1 > M_PI_2 and angle_i < - M_PI_2 ) // rotating left + { + delta_orientations = 2.0 * M_PI + angle_i - angle_i_1; + } + else if((angle_i_1 < - M_PI_2) and (angle_i > M_PI_2)) + { + delta_orientations = - (2.0 * M_PI - angle_i + angle_i_1); + } + else + { + delta_orientations = angle_i - angle_i_1; + } + orientations += delta_orientations; + + } + std::cout << "orientations: " << orientations << " dS: " << dS << std::endl; + double factor_orien_ds = 3.0; //TODO param + fitness_value = orientations - factor_orien_ds * dS; //dS in (0, 1.5) in 30s + } + + return fitness_value; } // update is always running in the loop -void Evaluator::Update(const ignition::math::Pose3d &_pose, - const double time, - const double step) +void Evaluator::simulation_update(const ignition::math::Pose3d &pose, + const double time, + const double step) { - // this->path_length += measure_distance(current_position_, _pose); + // this->path_length += measure_distance(current_position_, pose); this->previous_position_ = current_position_; - this->current_position_ = _pose; + this->current_position_ = pose; //If `last_step_time` is not initialized, do the initialization now if (this->last_step_time < 0) { this->last_step_time = time; // 0.005 - this->step_poses.push_back(_pose); + this->step_poses.push_back(pose); } //save the startPosition in the beginning of each iteration if (this->last_step_time < 0.001) // 0.001 < 0.005 { - this->step_poses.push_back(_pose); + this->step_poses.push_back(pose); this->last_step_time = time; } //update information each step if ((time - this->last_step_time) > this->evaluation_rate_ * this->step_saving_rate) { - this->step_poses.push_back(_pose); + this->step_poses.push_back(pose); this->last_step_time = time; }; } diff --git a/cpprevolve/revolve/gazebo/brains/Evaluator.h b/cpprevolve/revolve/gazebo/brains/Evaluator.h index 357634da57..4d22888f65 100644 --- a/cpprevolve/revolve/gazebo/brains/Evaluator.h +++ b/cpprevolve/revolve/gazebo/brains/Evaluator.h @@ -17,67 +17,70 @@ * */ -#ifndef REVOLVEBRAIN_BRAIN_EVALUATOR_H -#define REVOLVEBRAIN_BRAIN_EVALUATOR_H +#pragma once #include #include +#include -namespace revolve +namespace revolve { +namespace gazebo { +class Evaluator : public ::revolve::Evaluator { - namespace gazebo - { - class Evaluator - { - /// \brief Constructor - public: Evaluator(const double _evaluationRate, - const double step_saving_rate = 0.1); - - /// \brief Destructor - public: ~Evaluator(); - - /// \brief Initialisation method - public: void Reset(); - - /// \brief Retrieve the fitness - /// \return A fitness value according to a given formula - public: double Fitness(); - - public: double measure_distance( - const ignition::math::Pose3d &_pose1, - const ignition::math::Pose3d &_pose2); - - /// brief Specifies locomotion type - public: std::string locomotion_type; - - /// \brief Update the position - /// \param[in] _pose Current position of a robot - public: void Update(const ignition::math::Pose3d &_pose, - const double time, - const double step); - - /// \brief start position of a robot - protected: ignition::math::Pose3d start_position_; - - /// \brief Previous position of a robot - protected: ignition::math::Pose3d previous_position_; - - /// \brief Current position of a robot - protected: ignition::math::Pose3d current_position_; - - /// \brief - protected: double evaluation_rate_; - - protected: double path_length = 0.0; - - protected: double last_step_time; - protected: double step_saving_rate; - protected: std::vector step_poses; - // public: double current_dist_pro = 0.0; - public: std::string directory_name = ""; - }; - } -} +public: + /// \brief Constructor + Evaluator(double _evaluationRate, + bool reset_robot_position = true, + const ::gazebo::physics::ModelPtr &robot = nullptr, + double step_saving_rate = 0.1); + + /// \brief Destructor + ~Evaluator(); + + /// \brief Initialisation method + void reset() override; + + /// \brief Retrieve the fitness + /// \return A fitness value according to a given formula + double fitness() override; + + double measure_distance( + const ignition::math::Pose3d &_pose1, + const ignition::math::Pose3d &_pose2); + + /// brief Specifies locomotion type + std::string locomotion_type; + + /// \brief Update the position + /// \param[in] pose Current position of a robot + void simulation_update(const ignition::math::Pose3d &pose, + double time, + double step); + +protected: + /// \brief start position of a robot + ignition::math::Pose3d start_position_; -#endif // REVOLVEBRAIN_BRAIN_EVALUATOR_H + /// \brief Previous position of a robot + ignition::math::Pose3d previous_position_; + + /// \brief Current position of a robot + ignition::math::Pose3d current_position_; + + /// \brief + double evaluation_rate_; + + double path_length = 0.0; + + double last_step_time; + double step_saving_rate; + std::vector step_poses; + + const bool reset_robot_position; + const boost::weak_ptr<::gazebo::physics::Model> robot; + std::string output_dir; +}; + +} +} diff --git a/cpprevolve/revolve/gazebo/brains/GazeboReporter.cpp b/cpprevolve/revolve/gazebo/brains/GazeboReporter.cpp new file mode 100644 index 0000000000..fba278e9e5 --- /dev/null +++ b/cpprevolve/revolve/gazebo/brains/GazeboReporter.cpp @@ -0,0 +1,51 @@ +// +// Created by matteo on 12/5/19. +// + +#include "GazeboReporter.h" +#include +#include +#include + +using namespace revolve::gazebo; + + +GazeboReporter::GazeboReporter(const std::string id, ::gazebo::transport::NodePtr &node) + : EvaluationReporter(std::move(id)) + , last_eval(-1) +{ + robot_report_publisher = node->Advertise( + "~/revolve/robot_reports", 500); + message.set_id(robot_id); +} + +void GazeboReporter::report(const unsigned int eval, const bool dead, const double fitness) +{ + const std::lock_guard lock(message_mutex); + // construct protobuf message + message.set_eval(eval); + message.set_dead(dead); + message.set_fitness(fitness); + // behaviour is already collected in `simulation_update` + + // send msgs to Gazebo in python + this->robot_report_publisher->Publish(message); + + if (last_eval != eval) { + // Clear behaviour data + message.clear_behaviour(); + + last_eval = eval; + } +} + + +void GazeboReporter::simulation_update(const ignition::math::Pose3d &pose, + const ::gazebo::common::Time &time, + double /*step*/) +{ + const std::lock_guard lock(message_mutex); + ::revolve::msgs::BehaviourData *behaviour_data = message.add_behaviour(); + ::gazebo::msgs::Set(behaviour_data->mutable_pose(), pose); + ::gazebo::msgs::Set(behaviour_data->mutable_time(), time); +} diff --git a/cpprevolve/revolve/gazebo/brains/GazeboReporter.h b/cpprevolve/revolve/gazebo/brains/GazeboReporter.h new file mode 100644 index 0000000000..cfdb88898f --- /dev/null +++ b/cpprevolve/revolve/gazebo/brains/GazeboReporter.h @@ -0,0 +1,39 @@ +// +// Created by matteo on 12/5/19. +// + +#pragma once + +#include +#include +#include +#include +#include +#include + +namespace revolve { +namespace gazebo { + +class GazeboReporter : public EvaluationReporter +{ +public: + explicit GazeboReporter(std::string id, ::gazebo::transport::NodePtr &node); + ~GazeboReporter() override = default; + + /// \brief Sends proto message to python in gazebo + void report(unsigned int eval, bool dead, double fitness) override; + + void simulation_update(const ignition::math::Pose3d &pose, + const ::gazebo::common::Time &time, + double step); + +private: + ::gazebo::transport::PublisherPtr robot_report_publisher; + ::revolve::msgs::LearningRobotStates message; + + std::mutex message_mutex; + long last_eval; +}; + +} +} diff --git a/cpprevolve/revolve/gazebo/brains/NeuralNetwork.cpp b/cpprevolve/revolve/gazebo/brains/NeuralNetwork.cpp index 77eebbbba7..c30df9b21b 100644 --- a/cpprevolve/revolve/gazebo/brains/NeuralNetwork.cpp +++ b/cpprevolve/revolve/gazebo/brains/NeuralNetwork.cpp @@ -54,7 +54,8 @@ NeuralNetwork::NeuralNetwork( const sdf::ElementPtr &_settings, const std::vector< MotorPtr > &_motors, const std::vector< SensorPtr > &_sensors) - : flipState_(false) + : Controller(ControllerType::NEURAL_NETWORK) + , flipState_(false) , nInputs_(0) , nOutputs_(0) , nHidden_(0) @@ -395,7 +396,7 @@ void NeuralNetwork::Step(const double _time) } ///////////////////////////////////////////////// -void NeuralNetwork::Update( +void NeuralNetwork::update( const std::vector< MotorPtr > &_motors, const std::vector< SensorPtr > &_sensors, const double _time, diff --git a/cpprevolve/revolve/gazebo/brains/NeuralNetwork.h b/cpprevolve/revolve/gazebo/brains/NeuralNetwork.h index a1147f6663..cf98dcb526 100644 --- a/cpprevolve/revolve/gazebo/brains/NeuralNetwork.h +++ b/cpprevolve/revolve/gazebo/brains/NeuralNetwork.h @@ -19,14 +19,14 @@ * */ -#ifndef REVOLVE_GAZEBO_BRAIN_NEURALNETWORK_H_ -#define REVOLVE_GAZEBO_BRAIN_NEURALNETWORK_H_ +#pragma once #include #include #include #include +#include #include @@ -53,8 +53,7 @@ namespace revolve SUPG }; - class NeuralNetwork - : public Brain + class NeuralNetwork : public ::revolve::Controller { /// \brief Constructor /// \param[in] _modelName Name of the robot @@ -75,11 +74,11 @@ namespace revolve /// \param[in] _sensors Sensor list /// \param[in] _time Current world time /// \param[in] _step Current time step - public: virtual void Update( + public: void update( const std::vector< MotorPtr > &_motors, const std::vector< SensorPtr > &_sensors, - const double _time, - const double _step); + double _time, + double _step) override; /// \brief Steps the neural network protected: void Step(const double _time); @@ -153,5 +152,3 @@ namespace revolve }; } /* namespace gazebo */ } /* namespace revolve */ - -#endif /* REVOLVE_GAZEBO_BRAIN_NEURALNETWORK_H_ */ diff --git a/cpprevolve/revolve/gazebo/brains/RLPower.cpp b/cpprevolve/revolve/gazebo/brains/RLPower.cpp index a2b8e7f1dc..4cffc23d72 100644 --- a/cpprevolve/revolve/gazebo/brains/RLPower.cpp +++ b/cpprevolve/revolve/gazebo/brains/RLPower.cpp @@ -41,16 +41,13 @@ RLPower::RLPower( const ::gazebo::physics::ModelPtr &_model, const sdf::ElementPtr &_settings, const std::vector< MotorPtr > &_motors, - const std::vector< SensorPtr > &_sensors) - : generationCounter_(0) + const std::vector< SensorPtr > &/*_sensors*/) + : Controller(ControllerType::SPLINES) + , generationCounter_(0) , cycleStartTime_(-1) - , startTime_(-1) , evaluationRate_(30.0) // default + , startTime_(-1) { - // Create transport node - this->node_.reset(new gz::transport::Node()); - this->node_->Init(); - auto learner_settings = _settings->GetElement("rv:learner"); this->robot_ = _model; @@ -82,7 +79,7 @@ RLPower::RLPower( RLPower::~RLPower() = default; ///////////////////////////////////////////////// -void RLPower::Update( +void RLPower::update( const std::vector< MotorPtr > &_motors, const std::vector< SensorPtr > &/* _sensors */, double _time, @@ -103,7 +100,7 @@ void RLPower::Update( { this->UpdatePolicy(numMotors); this->startTime_ = _time; - this->evaluator_->Reset(); + this->evaluator_->reset(); } // generate outputs @@ -119,7 +116,7 @@ void RLPower::Update( } auto currPosition = this->robot_->WorldPose(); - this->evaluator_->Update(currPosition, _time, _step); +// this->evaluator_->update(currPosition, _time, _step); delete[] output; } @@ -476,7 +473,7 @@ const double RLPower::SIGMA = 0.98; double RLPower::Fitness() { - return this->evaluator_->Fitness(); + return this->evaluator_->fitness(); } void RLPower::Modify(ConstModifyPolicyPtr &/* _request */) diff --git a/cpprevolve/revolve/gazebo/brains/RLPower.h b/cpprevolve/revolve/gazebo/brains/RLPower.h index 2e49c0e9c7..c14e5bf195 100644 --- a/cpprevolve/revolve/gazebo/brains/RLPower.h +++ b/cpprevolve/revolve/gazebo/brains/RLPower.h @@ -30,6 +30,7 @@ #include #include +#include #include #include "Evaluator.h" @@ -39,8 +40,7 @@ namespace revolve { namespace gazebo { - class RLPower - : public Brain + class RLPower : public ::revolve::Controller { typedef const std::shared_ptr ConstModifyPolicyPtr; @@ -82,7 +82,7 @@ namespace revolve /// \param[in] _sensors: vector list of robot's sensors /// \param[in] _time: /// \param[in] _step: - public: void Update( + public: void update( const std::vector< MotorPtr > &_motors, const std::vector< SensorPtr > &_sensors, double _time, diff --git a/cpprevolve/revolve/gazebo/motors/PositionMotor.cpp b/cpprevolve/revolve/gazebo/motors/PositionMotor.cpp index 5ddbbb2899..744cf67d5b 100644 --- a/cpprevolve/revolve/gazebo/motors/PositionMotor.cpp +++ b/cpprevolve/revolve/gazebo/motors/PositionMotor.cpp @@ -18,7 +18,7 @@ #include #include - +#include #include "PositionMotor.h" namespace gz = gazebo; @@ -27,45 +27,45 @@ using namespace revolve::gazebo; ///////////////////////////////////////////////// PositionMotor::PositionMotor( - gz::physics::ModelPtr _model, - const std::string &_partId, - const std::string &_motorId, - const sdf::ElementPtr _motor, - const std::string &_coordinates) - : JointMotor(std::move(_model), _partId, _motorId, _motor, 1, _coordinates) - , positionTarget_(0) - , noise_(0) + gz::physics::ModelPtr _model, + const std::string &_partId, + const std::string &_motorId, + const sdf::ElementPtr _motor, + const std::string &_coordinates) + : JointMotor(std::move(_model), _partId, _motorId, _motor, 1, _coordinates) + , positionTarget_(0) + , noise_(0) { - // Retrieve upper / lower limit from joint set in parent constructor - // Truncate ranges to [-pi, pi] - this->upperLimit_ = std::fmin(M_PI, this->joint_->UpperLimit(0)); - this->lowerLimit_ = std::fmax(-M_PI, this->joint_->LowerLimit(0)); - this->fullRange_ = ((this->upperLimit_ - this->lowerLimit_ + 1e-12) >= - (2 * M_PI)); - - if (_motor->HasElement("rv:pid")) - { - auto pidElem = _motor->GetElement("rv:pid"); - this->pid_ = Motor::CreatePid(pidElem); - } - - auto noise = _motor->GetAttribute("noise"); - if (noise) - { - noise->Get(this->noise_); - } - - // I've asked this question at the Gazebo forums: - // http://answers.gazebosim.org/question/9071/joint-target-velocity-with-maximum-force/ - // Until it is answered I'm resorting to calling ODE functions directly - // to get this to work. This will result in some deprecation warnings. - // It has the added benefit of not requiring the world update - // connection though. - // updateConnection_ = gz::event::Events::ConnectWorldUpdateBegin(boost::bind( - // &PositionMotor::OnUpdate, this, _1)); - - auto maxEffort = joint_->GetEffortLimit(0); - joint_->SetParam("fmax", 0, maxEffort); + // Retrieve upper / lower limit from joint set in parent constructor + // Truncate ranges to [-pi, pi] + this->upperLimit_ = std::fmin(M_PI, this->joint_->UpperLimit(0)); + this->lowerLimit_ = std::fmax(-M_PI, this->joint_->LowerLimit(0)); + this->fullRange_ = ((this->upperLimit_ - this->lowerLimit_ + 1e-12) >= + (2 * M_PI)); + + if (_motor->HasElement("rv:pid")) + { + auto pidElem = _motor->GetElement("rv:pid"); + this->pid_ = Motor::CreatePid(pidElem); + } + + auto noise = _motor->GetAttribute("noise"); + if (noise) + { + noise->Get(this->noise_); + } + + // I've asked this question at the Gazebo forums: + // http://answers.gazebosim.org/question/9071/joint-target-velocity-with-maximum-force/ + // Until it is answered I'm resorting to calling ODE functions directly + // to get this to work. This will result in some deprecation warnings. + // It has the added benefit of not requiring the world update + // connection though. + // updateConnection_ = gz::event::Events::ConnectWorldUpdateBegin(boost::bind( + // &PositionMotor::OnUpdate, this, _1)); + + auto maxEffort = joint_->GetEffortLimit(0); + joint_->SetParam("fmax", 0, maxEffort); } ///////////////////////////////////////////////// @@ -76,60 +76,86 @@ PositionMotor::~PositionMotor() = default; // DoUpdate(info.simTime); // } +double PositionMotor::Current_State( Actuator::StateType type) +{ + if (type==0) + { + return this->joint_->Position(0); + } + else if (type == 1) + { + return this->joint_->GetVelocity(0); + } + else if (type == 2) + { + return this->joint_->GetForce(0); + } +} + ///////////////////////////////////////////////// void PositionMotor::write( - const double *outputs, - double /*step*/) + const double *outputs, + double /*step*/) { - // Just one network output, which is the first - auto output = outputs[0]; - - // Motor noise in range +/- noiseLevel * actualValue - output += ((2 * ignition::math::Rand::DblUniform() * this->noise_) - - this->noise_) * - output; - - // Truncate output to [0, 1] - // Note: Don't actually target the full joint range, this way a low update - // rate won't mess with the joint constraints as much leading to a more - // stable system. - output = std::fmin(std::fmax(1e-5, output), 0.99999); - this->positionTarget_ = this->lowerLimit_ + - (output * (this->upperLimit_ - this->lowerLimit_)); - - // Perform the actual motor update - this->DoUpdate(this->joint_->GetWorld()->SimTime()); + // Just one network output, which is the first + auto output = outputs[0]; + + // Motor noise in range +/- noiseLevel * actualValue + output += ((2 * ignition::math::Rand::DblUniform() * this->noise_) - + this->noise_) * + output; + + // Truncate output to [0, 1] + // Note: Don't actually target the full joint range, this way a low update + // rate won't mess with the joint constraints as much leading to a more + // stable system. + output = std::fmin(std::fmax(1e-5, output), 0.99999); +// this->positionTarget_ = this->lowerLimit_ + +// (output * (this->upperLimit_ - this->lowerLimit_)); + + this->positionTarget_ = output*2-1;//2*5.235988-5.235988; + // Perform the actual motor update + this->DoUpdate(this->joint_->GetWorld()->SimTime()); } ///////////////////////////////////////////////// void PositionMotor::DoUpdate(const ::gazebo::common::Time &_simTime) { - auto stepTime = _simTime - this->prevUpdateTime_; - if (stepTime <= 0) - { - // Only respond to positive step times - return; - } - - this->prevUpdateTime_ = _simTime; - auto position = this->joint_->Position(0); - - // TODO Make sure normalized angle lies within possible range - // I get the feeling we might be moving motors outside their - // allowed range. Also something to be aware of when setting - // the direction. - - if (this->fullRange_ and std::fabs(position - positionTarget_) > M_PI) - { - // Both the position and the position target will be in the range - // [-pi, pi]. For a full range of motion joint, using an angle +- 2 PI - // might result in a much shorter required movement. In this case we - // best correct the current position to something outside the range. - position += (position > 0 ? -2 * M_PI : 2 * M_PI); - } - - auto error = position - this->positionTarget_; - auto cmd = this->pid_.Update(error, stepTime); - - this->joint_->SetParam("vel", 0, cmd); + auto stepTime = _simTime - this->prevUpdateTime_; + if (stepTime <= 0) + { + // Only respond to positive step times + return; + } + + this->prevUpdateTime_ = _simTime; + auto position = this->joint_->Position(0); + + // TODO Make sure normalized angle lies within possible range + // I get the feeling we might be moving motors outside their + // allowed range. Also something to be aware of when setting + // the direction. + + if (this->fullRange_ and std::fabs(position - positionTarget_) > M_PI) + { + // Both the position and the position target will be in the range + // [-pi, pi]. For a full range of motion joint, using an angle +- 2 PI + // might result in a much shorter required movement. In this case we + // best correct the current position to something outside the range. + position += (position > 0 ? -2 * M_PI : 2 * M_PI); + } + const double mean = 0.0; + const double stddev = 0.05; + std::default_random_engine generator; + auto dist = std::bind(std::normal_distribution{mean, stddev}, + std::mt19937(std::random_device{}())); +// std::normal_distribution dist(mean, stddev); // + auto error = (position - this->positionTarget_); + auto cmd = this->pid_.Update(error, stepTime)/stepTime.Double(); +// auto cmd = this->positionTarget_;//##################################### + auto velLimit = joint_->GetVelocityLimit(0); + cmd = std::fmax(-velLimit,std::fmin(velLimit,cmd)); + + double pert = dist()*velLimit; + this->joint_->SetParam("vel", 0, cmd); } diff --git a/cpprevolve/revolve/gazebo/motors/PositionMotor.h b/cpprevolve/revolve/gazebo/motors/PositionMotor.h index 429b696404..4fdb0ab95a 100644 --- a/cpprevolve/revolve/gazebo/motors/PositionMotor.h +++ b/cpprevolve/revolve/gazebo/motors/PositionMotor.h @@ -55,6 +55,8 @@ namespace revolve const double *_outputs, double _step) override; + public: virtual double Current_State( Actuator::StateType type ) override ; + /// \brief World update event function // protected: void OnUpdate(const ::gazebo::common::UpdateInfo info); diff --git a/cpprevolve/revolve/gazebo/motors/VelocityMotor.cpp b/cpprevolve/revolve/gazebo/motors/VelocityMotor.cpp index d8be304c46..0d286925a3 100644 --- a/cpprevolve/revolve/gazebo/motors/VelocityMotor.cpp +++ b/cpprevolve/revolve/gazebo/motors/VelocityMotor.cpp @@ -67,6 +67,18 @@ VelocityMotor::~VelocityMotor() { } +double VelocityMotor::Current_State( Actuator::StateType type) { + if (type == 0) { + return this->joint_->Position(0); + } else if (type == 1) { + return this->joint_->GetVelocity(0); + } + else if (type == 2) + { + return this->joint_->GetForce(0); + } +} + void VelocityMotor::write( const double *outputs, double /*step*/) @@ -90,4 +102,12 @@ void VelocityMotor::DoUpdate(const ::gazebo::common::Time &/*simTime*/) // I'm caving for now and am setting ODE parameters directly. // See https://tinyurl.com/y7he7y8l this->joint_->SetParam("vel", 0, this->velocityTarget_); +// this->pid_.S + this->model_->GetJointController()->SetVelocityPID( + this->joint_->GetScopedName(),this->pid_); + + this->model_->GetJointController()->SetVelocityTarget( + this->joint_->GetScopedName(),this->velocityTarget_); + + } diff --git a/cpprevolve/revolve/gazebo/motors/VelocityMotor.h b/cpprevolve/revolve/gazebo/motors/VelocityMotor.h index 29868d5359..b2ccf3c54a 100644 --- a/cpprevolve/revolve/gazebo/motors/VelocityMotor.h +++ b/cpprevolve/revolve/gazebo/motors/VelocityMotor.h @@ -60,6 +60,8 @@ namespace revolve const double *outputs, double step); + public: virtual double Current_State( Actuator::StateType type ) override ; + /// \brief World update event function // protected: void OnUpdate(const ::gazebo::common::UpdateInfo info); diff --git a/cpprevolve/revolve/gazebo/msgs/robot_states_learning.proto b/cpprevolve/revolve/gazebo/msgs/robot_states_learning.proto new file mode 100644 index 0000000000..9c0f32b82d --- /dev/null +++ b/cpprevolve/revolve/gazebo/msgs/robot_states_learning.proto @@ -0,0 +1,19 @@ +syntax = "proto2"; +package revolve.msgs; +import "time.proto"; +import "pose.proto"; + +message BehaviourData { + required gazebo.msgs.Time time = 1; + optional gazebo.msgs.Pose pose = 2; + // optional gazebo.msgs.Balance balance = 3; + // optional gazebo.msgs.Touching touching = 4; +} + +message LearningRobotStates { + required string id = 1; + required uint32 eval = 2; + optional bool dead = 3; + required double fitness = 4; + repeated BehaviourData behaviour = 5; +} diff --git a/cpprevolve/revolve/gazebo/plugin/RealtimeWorldController.cpp b/cpprevolve/revolve/gazebo/plugin/RealtimeWorldController.cpp new file mode 100644 index 0000000000..1ab329aafa --- /dev/null +++ b/cpprevolve/revolve/gazebo/plugin/RealtimeWorldController.cpp @@ -0,0 +1,438 @@ +/* +* Copyright (C) 2017 Vrije Universiteit Amsterdam +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* +* Author: Elte Hupkes +* Date: June 6, 2015 +* +*/ + +#include + +#include "RealtimeWorldController.h" + +namespace gz = gazebo; + +using namespace revolve::gazebo; + +///////////////////////////////////////////////// +RealtimeWorldController::RealtimeWorldController() + : delete_robot_queue() + , robotStatesPubFreq_(5) + , lastRobotStatesUpdateTime_(0) + , robotStatesPub_(nullptr) +{ +} + +void unsubscribe(gz::transport::SubscriberPtr &subscription) +{ + if (subscription) + subscription->Unsubscribe(); +} + +void fini(gz::transport::PublisherPtr &publisher) +{ + if (publisher) + publisher->Fini(); +} + +RealtimeWorldController::~RealtimeWorldController() +{ + unsubscribe(this->requestSub_); + unsubscribe(this->responseSub_); + unsubscribe(this->modelSub_); + fini(this->requestPub_); + fini(this->responsePub_); + fini(this->robotStatesPub_); +} + +///////////////////////////////////////////////// +void RealtimeWorldController::Load( + gz::physics::WorldPtr world, + sdf::ElementPtr /*_sdf*/) +{ + gz::physics::PhysicsEnginePtr physicsEngine = world->Physics(); + assert(physicsEngine != nullptr); + + // Turn on threading + physicsEngine->SetParam("thread_position_correction", true); + physicsEngine->SetParam("island_threads", 8); + + + std::cout << "World plugin loaded." << std::endl; + + // Store the world + this->world_ = world; + + // Create transport node + this->node_.reset(new gz::transport::Node()); + this->node_->Init(); + + // Subscribe to insert request messages + this->requestSub_ = this->node_->Subscribe( + "~/request", + &RealtimeWorldController::HandleRequest, + this); + + // Publisher for `entity_delete` requests. + this->requestPub_ = this->node_->Advertise< gz::msgs::Request >( + "~/request"); + + // Publisher for inserted models + this->responseSub_ = this->node_->Subscribe( + "~/response", + &RealtimeWorldController::HandleResponse, + this); + + // Publisher for inserted models + this->responsePub_ = this->node_->Advertise< gz::msgs::Response >( + "~/response"); + + // Since models are added asynchronously, we need some way of detecting + // our model add. We do this using a model info subscriber. + this->modelSub_ = this->node_->Subscribe( + "~/model/info", + &RealtimeWorldController::OnModel, + this); + + // Bind to the world update event to perform some logic + this->onBeginUpdateConnection = gz::event::Events::ConnectWorldUpdateBegin( + [this] (const ::gazebo::common::UpdateInfo &_info) {this->OnBeginUpdate(_info);}); + + // Bind to the world update event to perform some logic + this->onEndUpdateConnection = gz::event::Events::ConnectWorldUpdateEnd( + [this] () {this->OnEndUpdate();}); + + // Robot pose publisher + this->robotStatesPub_ = this->node_->Advertise< revolve::msgs::RobotStates >( + "~/revolve/robot_states", 500); +} + +void RealtimeWorldController::Reset() +{ + this->lastRobotStatesUpdateTime_ = 0; //this->world_->SimTime().Double(); +} + +///////////////////////////////////////////////// +void RealtimeWorldController::OnBeginUpdate(const ::gazebo::common::UpdateInfo &_info) { + if (not this->robotStatesPubFreq_) { + return; + } + + auto secs = 1.0 / this->robotStatesPubFreq_; + auto time = _info.simTime.Double(); + if ((time - this->lastRobotStatesUpdateTime_) >= secs) { + // Send robot info update message, this only sends the + // main pose of the robot (which is all we need for now) + msgs::RobotStates msg; + gz::msgs::Set(msg.mutable_time(), _info.simTime); + + { + boost::recursive_mutex::scoped_lock lock_physics(*this->world_->Physics()->GetPhysicsUpdateMutex()); + for (const auto &model : this->world_->Models()) { + if (model->IsStatic()) { + // Ignore static models such as the ground and obstacles + continue; + } + + revolve::msgs::RobotState *stateMsg = msg.add_robot_state(); + const std::string scoped_name = model->GetScopedName(); + stateMsg->set_name(scoped_name); + stateMsg->set_id(model->GetId()); + + auto poseMsg = stateMsg->mutable_pose(); + auto relativePose = model->RelativePose(); + + gz::msgs::Set(poseMsg, relativePose); + + // Death sentence check + const std::string name = model->GetName(); + bool death_sentence = false; + double death_sentence_value = 0; + { + boost::mutex::scoped_lock lock_death(death_sentences_mutex_); + death_sentence = death_sentences_.count(name) > 0; + if (death_sentence) + death_sentence_value = death_sentences_[name]; + } + + if (death_sentence) { + if (death_sentence_value < 0) { + // Initialize death sentence + death_sentences_[name] = time - death_sentence_value; + stateMsg->set_dead(false); + } else { + bool alive = death_sentence_value > time; + stateMsg->set_dead(not alive); + + if (not alive) { + boost::mutex::scoped_lock lock(this->death_sentences_mutex_); + this->death_sentences_.erase(model->GetName()); + + this->models_to_remove.emplace_back(model); + } + } + } + } + } + + if (msg.robot_state_size() > 0) { + this->robotStatesPub_->Publish(msg); + this->lastRobotStatesUpdateTime_ = time; + } + } + + +// if (world_insert_remove_mutex.try_lock()) { + for (const auto &model: this->models_to_remove) { + std::cout << "Removing " << model->GetScopedName() << std::endl; +// this->world_->RemoveModel(model); +// gz::msgs::Request deleteReq; +// auto id = gz::physics::getUniqueId(); +// deleteReq.set_id(id); +// deleteReq.set_request("entity_delete"); +// deleteReq.set_data(model->GetScopedName()); +// this->requestPub_->Publish(deleteReq); + gz::transport::requestNoReply(this->world_->Name(), "entity_delete", model->GetScopedName()); + std::cout << "Removed " << model->GetScopedName() << std::endl; + + } + this->models_to_remove.clear(); +// this->world_insert_remove_mutex.unlock(); +// } +} + +void RealtimeWorldController::OnEndUpdate() +{ + { // check if there are robots to delete + std::tuple< ::gazebo::physics::ModelPtr, int> delete_robot; + { + boost::mutex::scoped_lock lock(this->deleteMutex_); + if (not this->delete_robot_queue.empty()) { + delete_robot = this->delete_robot_queue.front(); + this->delete_robot_queue.pop(); + } + } + auto model = std::get<0>(delete_robot); + auto request_id = std::get<1>(delete_robot); + if (model) + { + { +// boost::recursive_mutex::scoped_lock lock_physics(*this->world_->Physics()->GetPhysicsUpdateMutex()); + this->world_->RemoveModel(model); + } + + gz::msgs::Response resp; + resp.set_id(request_id); + resp.set_request("delete_robot"); + resp.set_response("success"); + this->responsePub_->Publish(resp); + } + } + + { // check if there are robots to insert + boost::mutex::scoped_lock lock(this->insertMutex_); + for (auto &iterator: this->insertMap_) + { + bool &insert_operation_pending = std::get<2>(iterator.second); + //std::cout << "trying to insert " << iterator.first << " - " << insert_operation_pending << std::endl; +// if (insert_operation_pending and this->world_insert_remove_mutex.try_lock()) + if (insert_operation_pending) + { + // Start insert operation! +// boost::recursive_mutex::scoped_lock lock_physics(*this->world_->Physics()->GetPhysicsUpdateMutex()); + const std::string &robotSDF = std::get<1>(iterator.second); + this->world_->InsertModelString(robotSDF); + insert_operation_pending = false; + break; + } + } + } +} + + +///////////////////////////////////////////////// +// Process insert and delete requests +void RealtimeWorldController::HandleRequest(ConstRequestPtr &request) +{ + if (request->request() == "delete_robot") + { + auto name = request->data(); + std::cout << "Processing request `" << request->id() + << "` to delete robot `" << name << "`" << std::endl; + +// auto model = this->world_->ModelByName(name); +// if (model) +// { +// // Tell the world to remove the model +// // Using `World::RemoveModel()` from here crashes the transport +// // library, the cause of which I've yet to figure out - it has +// // something to do with race conditions where the model is used by +// // the world while it is being updated. Fixing this completely +// // appears to be a rather involved process, instead, we'll use an +// // `entity_delete` request, which will make sure deleting the model +// // happens on the world thread. +// gz::msgs::Request deleteReq; +// auto id = gz::physics::getUniqueId(); +// deleteReq.set_id(id); +// deleteReq.set_request("entity_delete"); +// deleteReq.set_data(model->GetScopedName()); +// +// { +// boost::mutex::scoped_lock lock(this->deleteMutex_); +// this->delete_robot_queue.emplace(std::make_tuple(model, request->id())); +// } +// { +// boost::mutex::scoped_lock lock(this->death_sentences_mutex_); +// this->death_sentences_.erase(model->GetName()); +// } +// +// this->requestPub_->Publish(deleteReq); +// } +// else +// { + std::cerr << "Model `" << name << "` could not be found in the world." + << std::endl; + gz::msgs::Response resp; + resp.set_id(request->id()); + resp.set_request("delete_robot"); + resp.set_response("error"); + this->responsePub_->Publish(resp); +// } + } + else if (request->request() == "insert_sdf") + { + std::cout << "Processing insert model request ID `" << request->id() << "`." + << std::endl; + sdf::SDF robotSDF; + robotSDF.SetFromString(request->data()); + double lifespan_timeout = request->dbl_data(); + + // Get the model name, store in the expected map + auto name = robotSDF.Root()->GetElement("model")->GetAttribute("name") + ->GetAsString(); + + if (lifespan_timeout > 0) + { + boost::mutex::scoped_lock lock(death_sentences_mutex_); + // Initializes the death sentence negative because I don't dare to take the + // simulation time from this thread. + death_sentences_[name] = -lifespan_timeout; + } + + { + boost::mutex::scoped_lock lock(this->insertMutex_); + this->insertMap_[name] = std::make_tuple(request->id(), robotSDF.ToString(), true); + } + + //TODO insert here, it's better + //this->world_->InsertModelString(robotSDF.ToString()); + + // Don't leak memory + // https://bitbucket.org/osrf/sdformat/issues/104/memory-leak-in-element + robotSDF.Root()->Reset(); + } + else if (request->request() == "set_robot_state_update_frequency") + { + auto frequency = request->data(); + assert(frequency.find_first_not_of( "0123456789" ) == std::string::npos); + this->robotStatesPubFreq_ = (unsigned int)std::stoul(frequency); + std::cout << "Setting robot state update frequency to " + << this->robotStatesPubFreq_ << "." << std::endl; + + gz::msgs::Response resp; + resp.set_id(request->id()); + resp.set_request("set_robot_state_update_frequency"); + resp.set_response("success"); + + this->responsePub_->Publish(resp); + } +} + +///////////////////////////////////////////////// +void RealtimeWorldController::OnModel(ConstModelPtr &msg) +{ + auto name = msg->name(); + std::cout << "RealtimeWorldController::OnModel(" << name << ')' << std::endl; + + + int id; + bool insert_operation_pending; + { + boost::mutex::scoped_lock lock(this->insertMutex_); + if (this->insertMap_.count(name) <= 0) + { + // Insert was not requested here, ignore it + return; + } + const std::tuple &entry = this->insertMap_[name]; + id = std::get<0>(entry); + insert_operation_pending = std::get<2>(entry); + if (insert_operation_pending) + { + // Insert operation has not been done yet + // (but you should never be here, because we are in the "OnModel" function + return; + } + this->insertMap_.erase(name); + } + + // Respond with the inserted model + gz::msgs::Response resp; + resp.set_request("insert_sdf"); + resp.set_response("success"); + resp.set_id(id); + + msgs::ModelInserted inserted; + inserted.mutable_model()->CopyFrom(*msg); + gz::msgs::Set(inserted.mutable_time(), this->world_->SimTime()); + inserted.SerializeToString(resp.mutable_serialized_data()); + + this->responsePub_->Publish(resp); + +// this->world_insert_remove_mutex.unlock(); + + std::cout << "Model `" << name << "` inserted, world now contains " + << this->world_->ModelCount() << " models." << std::endl; +} + +///////////////////////////////////////////////// +void RealtimeWorldController::HandleResponse(ConstResponsePtr &response) +{ +// std::cout << "RealtimeWorldController::HandleResponse(" << response->request() << ')' << std::endl; + + if (response->request() not_eq "entity_delete") + { + return; + } + +// int id; +// { +// boost::mutex::scoped_lock lock(this->deleteMutex_); +// if (this->deleteMap_.count(response->id()) <= 0) +// { +// return; +// } +// +// id = this->deleteMap_[response->id()]; +// this->deleteMap_.erase(id); +// } + +// this->world_insert_remove_mutex.unlock(); + +// gz::msgs::Response resp; +// resp.set_id(id); +// resp.set_request("delete_robot"); +// resp.set_response("success"); +// this->responsePub_->Publish(resp); +} diff --git a/cpprevolve/revolve/gazebo/plugin/RealtimeWorldController.h b/cpprevolve/revolve/gazebo/plugin/RealtimeWorldController.h new file mode 100644 index 0000000000..3651b40729 --- /dev/null +++ b/cpprevolve/revolve/gazebo/plugin/RealtimeWorldController.h @@ -0,0 +1,133 @@ +/* +* Copyright (C) 2017 Vrije Universiteit Amsterdam +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* +* Description: TODO: +* Author: Elte Hupkes +* +*/ + +// +// Created by elte on 6-6-15. +// + +#pragma once + +#include +#include +#include + +#include + +#include +#include +#include +#include + +#include +#include + +namespace revolve { +namespace gazebo { + +class RealtimeWorldController: public ::gazebo::WorldPlugin +{ +public: + RealtimeWorldController(); + + virtual ~RealtimeWorldController(); + + virtual void Load( + ::gazebo::physics::WorldPtr _parent, + sdf::ElementPtr _sdf) override; + + virtual void Reset() override; + +protected: + // Listener for analysis requests + virtual void HandleRequest(ConstRequestPtr &request); + + // Listener for entity delete responses + virtual void HandleResponse(ConstResponsePtr &request); + + // Callback for model insertion + virtual void OnModel(ConstModelPtr &msg); + + // Method called + virtual void OnBeginUpdate(const ::gazebo::common::UpdateInfo &_info); + + virtual void OnEndUpdate(); + + // Maps model names to insert request IDs + // model_name -> request_id, SDF, insert_operation_pending + std::map > insertMap_; + + // Queue of `delete_robot` requests + std::queue> delete_robot_queue; + + // Stores the world + ::gazebo::physics::WorldPtr world_; + + // Transport node + ::gazebo::transport::NodePtr node_; + + // Mutex for the insertMap_ + boost::mutex insertMutex_; + + // Mutex for the deleteMap_ + boost::mutex deleteMutex_; + + // Request subscriber + ::gazebo::transport::SubscriberPtr requestSub_; + + // Request publisher + ::gazebo::transport::PublisherPtr requestPub_; + + // Response subscriber + ::gazebo::transport::SubscriberPtr responseSub_; + + // Response publisher + ::gazebo::transport::PublisherPtr responsePub_; + + // Subscriber for actual model insertion + ::gazebo::transport::SubscriberPtr modelSub_; + + // Publisher for periodic robot poses + ::gazebo::transport::PublisherPtr robotStatesPub_; + + // Frequency at which robot info is published + // Defaults to 0, which means no update at all + unsigned int robotStatesPubFreq_; + + // Pointer to the update event connection + ::gazebo::event::ConnectionPtr onBeginUpdateConnection; + ::gazebo::event::ConnectionPtr onEndUpdateConnection; + + // Last (simulation) time robot info was sent + double lastRobotStatesUpdateTime_; + + // Death sentence list. It collects all the end time for all robots that have + // a death sentence + // NEGATIVE DEATH SENTENCES mean total lifetime, death sentence not yet initialized. + std::map death_sentences_; + + // Mutex for the deleteMap_ + boost::mutex death_sentences_mutex_; + +// boost::mutex world_insert_remove_mutex; + + ::gazebo::physics::Model_V models_to_remove; +}; + +} // namespace gazebo +} // namespace revolve diff --git a/cpprevolve/revolve/gazebo/plugin/RobotController.cpp b/cpprevolve/revolve/gazebo/plugin/RobotController.cpp index 566efa4b92..30efed0b4c 100644 --- a/cpprevolve/revolve/gazebo/plugin/RobotController.cpp +++ b/cpprevolve/revolve/gazebo/plugin/RobotController.cpp @@ -17,13 +17,22 @@ * */ -#include - +#include +#include +#include #include #include #include #include +#include +#include +#include +#include +#include +#include +#include +#include #include "RobotController.h" @@ -199,44 +208,196 @@ SensorFactoryPtr RobotController::SensorFactory( ///////////////////////////////////////////////// void RobotController::LoadBrain(const sdf::ElementPtr _sdf) { - if (not _sdf->HasElement("rv:brain")) - { - std::cerr << "No robot brain detected, this is probably an error." - << std::endl; - return; - } + if (not _sdf->HasElement("rv:brain")) { + std::cerr << "No robot brain detected, this is probably an error." + << std::endl; + return; + } - auto brain_sdf = _sdf->GetElement("rv:brain"); - auto controller_type = brain_sdf->GetElement("rv:controller")->GetAttribute("type")->GetAsString(); - auto learner = brain_sdf->GetElement("rv:learner")->GetAttribute("type")->GetAsString(); - std::cout << "Loading controller " << controller_type << " and learner " << learner << std::endl; + auto brain_sdf = _sdf->GetElement("rv:brain"); + auto controller_type = brain_sdf->GetElement("rv:controller")->GetAttribute("type")->GetAsString(); +// auto IMC_params = brain_sdf->GetElement("rv:IMC")->GetElement("rv:params"); + auto learner_type = brain_sdf->GetElement("rv:learner")->GetAttribute("type")->GetAsString(); + std::cout << "Loading controller " << controller_type << " and learner " << learner_type << std::endl; - if ("offline" == learner and "ann" == controller_type) - { - brain_.reset(new NeuralNetwork(this->model_, brain_sdf, motors_, sensors_)); - } - else if ("rlpower" == learner and "spline" == controller_type) - { - if (not motors_.empty()) { - brain_.reset(new RLPower(this->model_, brain_sdf, motors_, sensors_)); + + //TODO parameters from SDF + const double evaluation_rate = 60.0; + const unsigned int n_learning_evaluations = 300; + + this->evaluator = std::make_unique<::revolve::gazebo::Evaluator>(evaluation_rate, true, this->model_); + + // aggregated reporter + std::unique_ptr aggregated_reporter(new AggregatedReporter(this->model_->GetName())); + + aggregated_reporter->create<::revolve::PrintReporter>(); + // gazebo network publisher reporter + this->gazebo_reporter.reset(new GazeboReporter(aggregated_reporter->robot_id, this->node_)); + aggregated_reporter->append(this->gazebo_reporter); + + this->reporter = std::move(aggregated_reporter); + + // SELECT CONTROLLER ------------------------------------------------------ + std::unique_ptr<::revolve::Controller> controller; + + if ("ann" == controller_type) { + controller = std::make_unique(this->model_, brain_sdf, motors_, sensors_); + } else if ("spline" == controller_type) { + if (not motors_.empty()) { + controller = std::make_unique(this->model_, brain_sdf, motors_, sensors_); + } + } else if ("cpg" == controller_type) { + controller = std::make_unique(brain_sdf, motors_); + } else if ("cppn-cpg" == controller_type) { + controller = std::make_unique(brain_sdf, motors_); + } else { + throw std::runtime_error("Robot brain: Controller \"" + controller_type + "\" is not supported."); + } + + sdf::ElementPtr IMC_sdf = brain_sdf->GetElement("rv:IMC"); + if( IMC_sdf->GetAttribute("active")->GetAsString() == "1"){ + std::cout << "Initializing IMC" << std::endl; + // ================= INITIALIZE IMC ==================== + IMC::IMCParams imc_params = IMC::IMCParams(); + imc_params.restore_checkpoint = (IMC_sdf->GetAttribute("restore_checkpoint")->GetAsString() == "1"); + imc_params.save_checkpoint = (IMC_sdf->GetAttribute("save_checkpoint")->GetAsString() == "1"); + imc_params.learning_rate = stod(IMC_sdf->GetAttribute("learning_rate")->GetAsString()); + imc_params.beta1 = stod(IMC_sdf->GetAttribute("beta1")->GetAsString()); + imc_params.beta2 = stod(IMC_sdf->GetAttribute("beta2")->GetAsString()); + imc_params.weight_decay = stod(IMC_sdf->GetAttribute("weight_decay")->GetAsString()); + imc_params.model_name = this->model_->GetName(); + + std::cout<<"IMC Parameters: lr:"<< imc_params.learning_rate<<", beta1:" << imc_params.beta1<<", beta2:" << imc_params.beta2<<", wd:" << imc_params.weight_decay << std::endl; + controller = std::make_unique(std::move(controller), motors_, imc_params); + std::cout<<"IMC has been Loaded"<>(std::move(controller)); + } else if ("rlpower" == learner_type) { + //TODO make RLPower generic + if ("spline" != controller_type) { + throw std::runtime_error("Robot brain: Learner RLPower not supported for \"" + controller_type + "\" controller."); + } + learner = std::make_unique>(std::move(controller)); + } else if ("bo" == learner_type) { + learner = std::make_unique( + std::move(controller), + this->evaluator.get(), + this->reporter.get(), + evaluation_rate, + n_learning_evaluations, + this->model_->GetName()); + } else if ("nipes" == learner_type) { + NIPES::NIPES_Parameters params = NIPES::NIPES_Parameters(); + + EA::Parameters EA_params = EA::Parameters(); + params.EA_params = EA_params; + params.EA_params.verbose = (brain_sdf->GetElement("rv:learner")->GetAttribute("verbose")->GetAsString() == "1"); + params.EA_params.population_size = stoi(brain_sdf->GetElement("rv:learner")->GetAttribute("population_size")->GetAsString()); + params.EA_params.max_eval = std::min(int(n_learning_evaluations), stoi(brain_sdf->GetElement("rv:learner")->GetAttribute("max_eval")->GetAsString())); + + auto dist = std::bind(std::uniform_int_distribution(), + std::mt19937(std::random_device{}())); + + learner = std::make_unique( + std::move(controller), + this->evaluator.get(), + this->reporter.get(), + params, + dist(), + evaluation_rate, + params.EA_params.max_eval, + this->model_->GetName()); + } else if ("de"==learner_type) { + DifferentialEvo::DE_Parameters params = DifferentialEvo::DE_Parameters(); + params.type = brain_sdf->GetElement("rv:learner")->GetAttribute("subtype")->GetAsString(); + params.CR = stod(brain_sdf->GetElement("rv:learner")->GetAttribute("CR")->GetAsString()); + params.F = stod(brain_sdf->GetElement("rv:learner")->GetAttribute("F")->GetAsString()); + params.n_parents = stoi(brain_sdf->GetElement("rv:learner")->GetAttribute("n_parents")->GetAsString());; + if (params.type == "dex3"){ + params.n_parents = 7; + } + + EA::Parameters EA_params = EA::Parameters(); + params.EA_params = EA_params; + params.EA_params.verbose = (brain_sdf->GetElement("rv:learner")->GetAttribute("verbose")->GetAsString() == "1"); + params.EA_params.population_size = stoi(brain_sdf->GetElement("rv:learner")->GetAttribute("population_size")->GetAsString()); + params.EA_params.max_eval = std::min(int(n_learning_evaluations), stoi(brain_sdf->GetElement("rv:learner")->GetAttribute("max_eval")->GetAsString())); + + auto dist = std::bind(std::uniform_int_distribution(), + std::mt19937(std::random_device{}())); + + learner = std::make_unique( + std::move(controller), + this->evaluator.get(), + this->reporter.get(), + params, + dist(), + evaluation_rate, + n_learning_evaluations, + this->model_->GetName()); +// } else if ("hyperneat" == learner_type) { +// NEAT::Parameters neat_params = NEAT::Parameters(); +// +// const sdf::ElementPtr learner_sdf = brain_sdf->GetElement("rv:learner")->GetElement("rv:params"); +// +//#define WRITE_DOUBLE_PARAM(x) std::cout << #x << " is set to: " << learner_sdf->GetAttribute(#x)->GetAsString() << std::endl; neat_params.x = stod(learner_sdf->GetAttribute(#x)->GetAsString()); +//#define CHECK_PARAM(x) {stod(std::to_string(neat_params.x))==stod(learner_sdf->GetAttribute(#x)->GetAsString()) ? std::cout << std::left <<#x << " is set to: Default" << std::endl : WRITE_DOUBLE_PARAM(x)} +// CHECK_PARAM(PopulationSize) +// CHECK_PARAM(WeightDiffCoeff) +// CHECK_PARAM(CompatTreshold) +// CHECK_PARAM(YoungAgeTreshold) +// CHECK_PARAM(OldAgeTreshold) +// CHECK_PARAM(MinSpecies) +// CHECK_PARAM(MaxSpecies) +// CHECK_PARAM(RouletteWheelSelection) +// CHECK_PARAM(RecurrentProb) +// CHECK_PARAM(OverallMutationRate) +// CHECK_PARAM(ArchiveEnforcement) +// CHECK_PARAM(MutateWeightsProb) +// CHECK_PARAM(WeightMutationMaxPower) +// CHECK_PARAM(WeightReplacementMaxPower) +// CHECK_PARAM(MutateWeightsSevereProb) +// CHECK_PARAM(WeightMutationRate) +// CHECK_PARAM(WeightReplacementRate) +// CHECK_PARAM(MaxWeight) +// CHECK_PARAM(MutateAddNeuronProb) +// CHECK_PARAM(MutateAddLinkProb) +// CHECK_PARAM(MutateRemLinkProb) +// CHECK_PARAM(MinActivationA) +// CHECK_PARAM(MaxActivationA) +// CHECK_PARAM(ActivationFunction_SignedSigmoid_Prob) +// CHECK_PARAM(ActivationFunction_UnsignedSigmoid_Prob) +// CHECK_PARAM(ActivationFunction_Tanh_Prob) +// CHECK_PARAM(ActivationFunction_SignedStep_Prob) +// CHECK_PARAM(CrossoverRate) +// CHECK_PARAM(MultipointCrossoverRate) +// CHECK_PARAM(SurvivalRate) +// CHECK_PARAM(MutateNeuronTraitsProb) +// CHECK_PARAM(MutateLinkTraitsProb) +//#undef CHECK_PARAM +//#undef WRITE_DOUBLE_PARAM +// +// neat_params.DynamicCompatibility = (learner_sdf->GetAttribute("DynamicCompatibility")->GetAsString() == "true"); +// neat_params.NormalizeGenomeSize = (learner_sdf->GetAttribute("NormalizeGenomeSize")->GetAsString() == "true"); +// neat_params.AllowLoops = (learner_sdf->GetAttribute("AllowLoops")->GetAsString() == "true"); +// neat_params.AllowClones = (learner_sdf->GetAttribute("AllowClones")->GetAsString() == "true"); +// +// int seed = 0; +// +// learner = std::make_unique( +// std::move(controller), +// this->evaluator.get(), +// this->reporter.get(), +// neat_params, +// seed, +// evaluation_rate, +// n_learning_evaluations); + } else { + throw std::runtime_error("Robot brain: Learner \"" + learner_type + "\" is not supported."); } - } - else if ("bo" == learner and "cpg" == controller_type) - { - brain_.reset(new DifferentialCPG(this->model_, _sdf, motors_, sensors_)); - } - else if ("offline" == learner and "cpg" == controller_type) - { - brain_.reset(new DifferentialCPGClean(brain_sdf, motors_)); - } - else if ("offline" == learner and "cppn-cpg" == controller_type) - { - brain_.reset(new DifferentialCPPNCPG(brain_sdf, motors_)); - } - else - { - throw std::runtime_error("Robot brain is not defined."); - } } ///////////////////////////////////////////////// @@ -254,7 +415,7 @@ void RobotController::CheckUpdate(const ::gazebo::common::UpdateInfo _info) { auto diff = _info.simTime - lastActuationTime_; - if (diff.Double() > actuationTime_) + if (diff.Double() >= actuationTime_) { this->DoUpdate(_info); lastActuationTime_ = _info.simTime; @@ -265,10 +426,28 @@ void RobotController::CheckUpdate(const ::gazebo::common::UpdateInfo _info) /// Default update function simply tells the brain to perform an update void RobotController::DoUpdate(const ::gazebo::common::UpdateInfo _info) { - auto currentTime = _info.simTime.Double() - initTime_; + const gz::common::Time current_time = _info.simTime - initTime_; + const double current_time_d = current_time.Double(); + const double delta_time = (_info.simTime - lastActuationTime_).Double(); + + //const ::ignition::math::Pose3d &relative_pose = model_->RelativePose(); + const ::ignition::math::Pose3d &world_pose = model_->WorldPose(); + + if (evaluator) { + evaluator->simulation_update(world_pose, current_time_d, delta_time); + } + + if (gazebo_reporter) { + gazebo_reporter->simulation_update(world_pose, current_time, delta_time); + } - if (brain_) - brain_->Update(motors_, sensors_, currentTime, actuationTime_); + if (learner) { + learner->optimize(current_time_d, delta_time); + revolve::Controller *controller = learner->controller(); + if (controller) { + controller->update(motors_, sensors_, current_time_d, delta_time); + } + } } ///////////////////////////////////////////////// diff --git a/cpprevolve/revolve/gazebo/plugin/RobotController.h b/cpprevolve/revolve/gazebo/plugin/RobotController.h index c72e36b3d2..48ec51d886 100644 --- a/cpprevolve/revolve/gazebo/plugin/RobotController.h +++ b/cpprevolve/revolve/gazebo/plugin/RobotController.h @@ -18,8 +18,7 @@ * */ -#ifndef REVOLVE_GAZEBO_PLUGIN_ROBOTCONTROLLER_H_ -#define REVOLVE_GAZEBO_PLUGIN_ROBOTCONTROLLER_H_ +#pragma once #include @@ -29,123 +28,133 @@ #include #include +#include #include "revolve/brains/controller/sensors/Sensor.h" #include "revolve/brains/controller/actuators/Actuator.h" +#include -namespace revolve +#include "revolve/brains/learner/NIPES.h" +#include "revolve/brains/learner/DifferentialEvo.h" + +namespace revolve { +namespace gazebo { + +class RobotController : public ::gazebo::ModelPlugin { - namespace gazebo - { - class RobotController - : public ::gazebo::ModelPlugin - { - /// \brief Constructor - public: RobotController(); +public: + /// \brief Constructor + RobotController(); - /// \brief Destructor - public: virtual ~RobotController(); + /// \brief Destructor + virtual ~RobotController(); - /// \brief Load method - public: void Load( - ::gazebo::physics::ModelPtr _parent, - sdf::ElementPtr _sdf) override; + /// \brief Load method + void Load( + ::gazebo::physics::ModelPtr _parent, + sdf::ElementPtr _sdf) override; - /// \return Factory class that creates motors for this model - public: virtual MotorFactoryPtr MotorFactory( - ::gazebo::physics::ModelPtr _model); + /// \return Factory class that creates motors for this model + virtual MotorFactoryPtr MotorFactory( + ::gazebo::physics::ModelPtr _model); - /// \return Factory class that creates motors for this robot - public: virtual SensorFactoryPtr SensorFactory( - ::gazebo::physics::ModelPtr _model); + /// \return Factory class that creates motors for this robot + virtual SensorFactoryPtr SensorFactory( + ::gazebo::physics::ModelPtr _model); - /// \brief Update event which, by default, is called periodically - /// according to the update rate specified in the robot plugin. - public: virtual void DoUpdate(const ::gazebo::common::UpdateInfo _info); + /// \brief Update event which, by default, is called periodically + /// according to the update rate specified in the robot plugin. + virtual void DoUpdate(const ::gazebo::common::UpdateInfo _info); - /// \brief Returns the battery level - /// \details Methods allows reading and writing the battery level in - /// the robot SDF. This is mostly useful for the `BatterySensor` to - /// obtain the battery state, and storing it in the SDF also means it - /// will be adequately backed up in an eventual snapshot. - public: double BatteryLevel(); + /// \brief Returns the battery level + /// \details Methods allows reading and writing the battery level in + /// the robot SDF. This is mostly useful for the `BatterySensor` to + /// obtain the battery state, and storing it in the SDF also means it + /// will be adequately backed up in an eventual snapshot. + double BatteryLevel(); - /// \brief Sets the battery level if possible - public: void SetBatteryLevel(double _level); + /// \brief Sets the battery level if possible + void SetBatteryLevel(double _level); - /// \brief Request listener for battery update - public: void UpdateBattery(ConstRequestPtr &_request); + /// \brief Request listener for battery update + void UpdateBattery(ConstRequestPtr &_request); - /// \brief Detects and loads motors in the plugin spec - protected: virtual void LoadActuators(const sdf::ElementPtr _sdf); + /// \brief Detects and loads motors in the plugin spec +protected: + virtual void LoadActuators(const sdf::ElementPtr _sdf); - /// \brief Detects and loads sensors in the plugin spec. - protected: virtual void LoadSensors(const sdf::ElementPtr _sdf); + /// \brief Detects and loads sensors in the plugin spec. + virtual void LoadSensors(const sdf::ElementPtr _sdf); - /// \brief Loads the brain from the `rv:brain` element. - /// \details By default this tries to construct a `StandardNeuralNetwork`. - protected: virtual void LoadBrain(const sdf::ElementPtr _sdf); + /// \brief Loads the brain from the `rv:brain` element. + /// \details By default this tries to construct a `StandardNeuralNetwork`. + virtual void LoadBrain(const sdf::ElementPtr _sdf); - /// \brief Loads / initializes the robot battery - protected: virtual void LoadBattery(const sdf::ElementPtr _sdf); + /// \brief Loads / initializes the robot battery + virtual void LoadBattery(const sdf::ElementPtr _sdf); - /// \brief Method called at the end of the default `Load` function. - /// \details This should be used to initialize robot actuation, i.e. - /// register some update event. By default, this grabs the - /// `update_rate` from the robot config pointer, and binds - protected: virtual void Startup( - ::gazebo::physics::ModelPtr _parent, - sdf::ElementPtr _sdf); + /// \brief Method called at the end of the default `Load` function. + /// \details This should be used to initialize robot actuation, i.e. + /// register some update event. By default, this grabs the + /// `update_rate` from the robot config pointer, and binds + virtual void Startup( + ::gazebo::physics::ModelPtr _parent, + sdf::ElementPtr _sdf); - /// \brief Default method bound to world update event, checks whether the - /// \brief actuation time has passed and updates if required. - protected: void CheckUpdate(const ::gazebo::common::UpdateInfo _info); + /// \brief Default method bound to world update event, checks whether the + /// \brief actuation time has passed and updates if required. + void CheckUpdate(const ::gazebo::common::UpdateInfo _info); - /// \brief Networking node - protected: ::gazebo::transport::NodePtr node_; +protected: + std::unique_ptr<::revolve::gazebo::Evaluator> evaluator; + std::unique_ptr<::revolve::EvaluationReporter> reporter; + std::shared_ptr<::revolve::gazebo::GazeboReporter> gazebo_reporter; - /// \brief Subscriber for battery update request - protected: ::gazebo::transport::SubscriberPtr batterySetSub_; + /// \brief Networking node + ::gazebo::transport::NodePtr node_; - /// \brief Responder for battery update request - protected: ::gazebo::transport::PublisherPtr batterySetPub_; + /// \brief Subscriber for battery update request + ::gazebo::transport::SubscriberPtr batterySetSub_; - /// \brief Holds an instance of the motor factory - protected: MotorFactoryPtr motorFactory_; + /// \brief Responder for battery update request + ::gazebo::transport::PublisherPtr batterySetPub_; - /// \brief Holds an instance of the sensor factory - protected: SensorFactoryPtr sensorFactory_; + /// \brief Holds an instance of the motor factory + MotorFactoryPtr motorFactory_; - /// \brief Brain controlling this model - protected: BrainPtr brain_; + /// \brief Holds an instance of the sensor factory + SensorFactoryPtr sensorFactory_; - /// \brief Actuation time, in seconds - protected: double actuationTime_; + /// \brief Learner for the brain controlling this model + std::unique_ptr<::revolve::Learner> learner; - /// \brief Time of initialisation - protected: double initTime_; + /// \brief Actuation time, in seconds + double actuationTime_; - /// \brief rv:battery element, if present - protected: sdf::ElementPtr batteryElem_; + /// \brief Time of initialisation + ::gazebo::common::Time initTime_; - /// \brief Time of the last actuation, in seconds and nanoseconds - protected: ::gazebo::common::Time lastActuationTime_; + /// \brief rv:battery element, if present + sdf::ElementPtr batteryElem_; - /// \brief Motors in this model - protected: std::vector< MotorPtr > motors_; + /// \brief Time of the last actuation, in seconds and nanoseconds + ::gazebo::common::Time lastActuationTime_; - /// \brief Sensors in this model - protected: std::vector< SensorPtr > sensors_; + /// \brief Motors in this model + std::vector motors_; - /// \brief Pointer to the model - protected: ::gazebo::physics::ModelPtr model_; + /// \brief Sensors in this model + std::vector sensors_; - /// \brief Pointer to the world - protected: ::gazebo::physics::WorldPtr world_; + /// \brief Pointer to the model + ::gazebo::physics::ModelPtr model_; - /// \brief Driver update event pointer - private: ::gazebo::event::ConnectionPtr updateConnection_; - }; - } /* namespace gazebo */ -} /* namespace revolve */ + /// \brief Pointer to the world + ::gazebo::physics::WorldPtr world_; -#endif /* REVOLVE_GAZEBO_PLUGIN_ROBOTCONTROLLER_H_ */ + /// \brief Driver update event pointer +private: + ::gazebo::event::ConnectionPtr updateConnection_; +}; + +} /* namespace gazebo */ +} /* namespace revolve */ diff --git a/cpprevolve/revolve/gazebo/plugin/WorldController.cpp b/cpprevolve/revolve/gazebo/plugin/WorldController.cpp index ea491681d7..cb584d0c8d 100644 --- a/cpprevolve/revolve/gazebo/plugin/WorldController.cpp +++ b/cpprevolve/revolve/gazebo/plugin/WorldController.cpp @@ -27,9 +27,20 @@ using namespace revolve::gazebo; ///////////////////////////////////////////////// WorldController::WorldController() - : delete_robot_queue() - , robotStatesPubFreq_(5) - , lastRobotStatesUpdateTime_(0) + : enable_parallelization(false) + , insertMap_() + , world_(nullptr) + , node_(nullptr) + , insertMutex_() + , requestSub_(nullptr) + , requestPub_(nullptr) + , responseSub_(nullptr) + , responsePub_(nullptr) + , modelSub_(nullptr) + , robotLearningStatesSub(nullptr) + , onBeginUpdateConnection(nullptr) + , onEndUpdateConnection(nullptr) + , models_to_remove() { } @@ -50,213 +61,95 @@ WorldController::~WorldController() unsubscribe(this->requestSub_); unsubscribe(this->responseSub_); unsubscribe(this->modelSub_); + unsubscribe(this->robotLearningStatesSub); fini(this->requestPub_); fini(this->responsePub_); - fini(this->robotStatesPub_); } ///////////////////////////////////////////////// void WorldController::Load( - gz::physics::WorldPtr world, - sdf::ElementPtr /*_sdf*/) + gz::physics::WorldPtr world, + sdf::ElementPtr /*_sdf*/) { gz::physics::PhysicsEnginePtr physicsEngine = world->Physics(); - assert(physicsEngine != nullptr); + assert(physicsEngine != nullptr && "Physics Engine is nullptr"); // Turn on threading - physicsEngine->SetParam("thread_position_correction", true); - physicsEngine->SetParam("island_threads", 8); - - - std::cout << "World plugin loaded." << std::endl; - - // Store the world - this->world_ = world; - - // Create transport node - this->node_.reset(new gz::transport::Node()); - this->node_->Init(); - - // Subscribe to insert request messages - this->requestSub_ = this->node_->Subscribe( - "~/request", - &WorldController::HandleRequest, - this); - - // Publisher for `entity_delete` requests. - this->requestPub_ = this->node_->Advertise< gz::msgs::Request >( - "~/request"); - - // Publisher for inserted models - this->responseSub_ = this->node_->Subscribe( - "~/response", - &WorldController::HandleResponse, - this); - - // Publisher for inserted models - this->responsePub_ = this->node_->Advertise< gz::msgs::Response >( - "~/response"); - - // Since models are added asynchronously, we need some way of detecting - // our model add. We do this using a model info subscriber. - this->modelSub_ = this->node_->Subscribe( - "~/model/info", - &WorldController::OnModel, - this); - - // Bind to the world update event to perform some logic - this->onBeginUpdateConnection = gz::event::Events::ConnectWorldUpdateBegin( - [this] (const ::gazebo::common::UpdateInfo &_info) {this->OnBeginUpdate(_info);}); - - // Bind to the world update event to perform some logic - this->onEndUpdateConnection = gz::event::Events::ConnectWorldUpdateEnd( - [this] () {this->OnEndUpdate();}); + if (this->enable_parallelization) { + physicsEngine->SetParam("thread_position_correction", true); + physicsEngine->SetParam("island_threads", 8); + } - // Robot pose publisher - this->robotStatesPub_ = this->node_->Advertise< revolve::msgs::RobotStates >( - "~/revolve/robot_states", 500); + // Store the world + this->world_ = world; + + // Create transport node + this->node_.reset(new gz::transport::Node()); + this->node_->Init(); + + // Subscribe to insert request messages + this->requestSub_ = this->node_->Subscribe( + "~/request", + &WorldController::HandleRequest, + this); + + // Publisher for `entity_delete` requests. + this->requestPub_ = this->node_->Advertise( + "~/request"); + + // Publisher for inserted models + this->responseSub_ = this->node_->Subscribe( + "~/response", + &WorldController::HandleResponse, + this); + + // Publisher for inserted models + this->responsePub_ = this->node_->Advertise( + "~/response"); + + // Since models are added asynchronously, we need some way of detecting + // our model add. We do this using a model info subscriber. + this->modelSub_ = this->node_->Subscribe( + "~/model/info", + &WorldController::OnModel, + this); + + // Bind to the world update event to perform some logic + this->onBeginUpdateConnection = gz::event::Events::ConnectWorldUpdateBegin( + [this](const ::gazebo::common::UpdateInfo &_info) { this->OnBeginUpdate(_info); }); + + // Bind to the world update event to perform some logic + this->onEndUpdateConnection = gz::event::Events::ConnectWorldUpdateEnd( + [this]() { this->OnEndUpdate(); }); + + // Robot reports subscription + this->robotLearningStatesSub = this->node_->Subscribe( + "~/revolve/robot_reports", + &WorldController::OnRobotReport, + this); + + std::cout << "World plugin loaded." << std::endl; } void WorldController::Reset() -{ - this->lastRobotStatesUpdateTime_ = 0; //this->world_->SimTime().Double(); -} +{} ///////////////////////////////////////////////// -void WorldController::OnBeginUpdate(const ::gazebo::common::UpdateInfo &_info) { - if (not this->robotStatesPubFreq_) { - return; - } - - auto secs = 1.0 / this->robotStatesPubFreq_; - auto time = _info.simTime.Double(); - if ((time - this->lastRobotStatesUpdateTime_) >= secs) { - // Send robot info update message, this only sends the - // main pose of the robot (which is all we need for now) - msgs::RobotStates msg; - gz::msgs::Set(msg.mutable_time(), _info.simTime); - - { - boost::recursive_mutex::scoped_lock lock_physics(*this->world_->Physics()->GetPhysicsUpdateMutex()); - for (const auto &model : this->world_->Models()) { - if (model->IsStatic()) { - // Ignore static models such as the ground and obstacles - continue; - } - - revolve::msgs::RobotState *stateMsg = msg.add_robot_state(); - const std::string scoped_name = model->GetScopedName(); - stateMsg->set_name(scoped_name); - stateMsg->set_id(model->GetId()); - - auto poseMsg = stateMsg->mutable_pose(); - auto relativePose = model->RelativePose(); - - gz::msgs::Set(poseMsg, relativePose); - - // Death sentence check - const std::string name = model->GetName(); - bool death_sentence = false; - double death_sentence_value = 0; - { - boost::mutex::scoped_lock lock_death(death_sentences_mutex_); - death_sentence = death_sentences_.count(name) > 0; - if (death_sentence) - death_sentence_value = death_sentences_[name]; - } - - if (death_sentence) { - if (death_sentence_value < 0) { - // Initialize death sentence - death_sentences_[name] = time - death_sentence_value; - stateMsg->set_dead(false); - } else { - bool alive = death_sentence_value > time; - stateMsg->set_dead(not alive); - - if (not alive) { - boost::mutex::scoped_lock lock(this->death_sentences_mutex_); - this->death_sentences_.erase(model->GetName()); - - this->models_to_remove.emplace_back(model); - } - } - } - } - } - - if (msg.robot_state_size() > 0) { - this->robotStatesPub_->Publish(msg); - this->lastRobotStatesUpdateTime_ = time; - } - } - +void WorldController::OnBeginUpdate(const ::gazebo::common::UpdateInfo &_info) +{ + boost::recursive_mutex::scoped_lock lock_physics(*this->world_->Physics()->GetPhysicsUpdateMutex()); -// if (world_insert_remove_mutex.try_lock()) { + if (model_remove_mutex.try_lock()) { for (const auto &model: this->models_to_remove) { - std::cout << "Removing " << model->GetScopedName() << std::endl; -// this->world_->RemoveModel(model); -// gz::msgs::Request deleteReq; -// auto id = gz::physics::getUniqueId(); -// deleteReq.set_id(id); -// deleteReq.set_request("entity_delete"); -// deleteReq.set_data(model->GetScopedName()); -// this->requestPub_->Publish(deleteReq); gz::transport::requestNoReply(this->world_->Name(), "entity_delete", model->GetScopedName()); - std::cout << "Removed " << model->GetScopedName() << std::endl; - } this->models_to_remove.clear(); -// this->world_insert_remove_mutex.unlock(); -// } + this->model_remove_mutex.unlock(); + } } void WorldController::OnEndUpdate() { - { // check if there are robots to delete - std::tuple< ::gazebo::physics::ModelPtr, int> delete_robot; - { - boost::mutex::scoped_lock lock(this->deleteMutex_); - if (not this->delete_robot_queue.empty()) { - delete_robot = this->delete_robot_queue.front(); - this->delete_robot_queue.pop(); - } - } - auto model = std::get<0>(delete_robot); - auto request_id = std::get<1>(delete_robot); - if (model) - { - { -// boost::recursive_mutex::scoped_lock lock_physics(*this->world_->Physics()->GetPhysicsUpdateMutex()); - this->world_->RemoveModel(model); - } - - gz::msgs::Response resp; - resp.set_id(request_id); - resp.set_request("delete_robot"); - resp.set_response("success"); - this->responsePub_->Publish(resp); - } - } - - { // check if there are robots to insert - boost::mutex::scoped_lock lock(this->insertMutex_); - for (auto &iterator: this->insertMap_) - { - bool &insert_operation_pending = std::get<2>(iterator.second); - //std::cout << "trying to insert " << iterator.first << " - " << insert_operation_pending << std::endl; -// if (insert_operation_pending and this->world_insert_remove_mutex.try_lock()) - if (insert_operation_pending) - { - // Start insert operation! -// boost::recursive_mutex::scoped_lock lock_physics(*this->world_->Physics()->GetPhysicsUpdateMutex()); - const std::string &robotSDF = std::get<1>(iterator.second); - this->world_->InsertModelString(robotSDF); - insert_operation_pending = false; - break; - } - } - } } @@ -264,98 +157,61 @@ void WorldController::OnEndUpdate() // Process insert and delete requests void WorldController::HandleRequest(ConstRequestPtr &request) { - if (request->request() == "delete_robot") - { - auto name = request->data(); - std::cout << "Processing request `" << request->id() - << "` to delete robot `" << name << "`" << std::endl; - -// auto model = this->world_->ModelByName(name); -// if (model) -// { -// // Tell the world to remove the model -// // Using `World::RemoveModel()` from here crashes the transport -// // library, the cause of which I've yet to figure out - it has -// // something to do with race conditions where the model is used by -// // the world while it is being updated. Fixing this completely -// // appears to be a rather involved process, instead, we'll use an -// // `entity_delete` request, which will make sure deleting the model -// // happens on the world thread. -// gz::msgs::Request deleteReq; -// auto id = gz::physics::getUniqueId(); -// deleteReq.set_id(id); -// deleteReq.set_request("entity_delete"); -// deleteReq.set_data(model->GetScopedName()); -// -// { -// boost::mutex::scoped_lock lock(this->deleteMutex_); -// this->delete_robot_queue.emplace(std::make_tuple(model, request->id())); -// } -// { -// boost::mutex::scoped_lock lock(this->death_sentences_mutex_); -// this->death_sentences_.erase(model->GetName()); -// } -// -// this->requestPub_->Publish(deleteReq); -// } -// else -// { - std::cerr << "Model `" << name << "` could not be found in the world." - << std::endl; - gz::msgs::Response resp; - resp.set_id(request->id()); - resp.set_request("delete_robot"); - resp.set_response("error"); - this->responsePub_->Publish(resp); -// } - } - else if (request->request() == "insert_sdf") - { - std::cout << "Processing insert model request ID `" << request->id() << "`." - << std::endl; - sdf::SDF robotSDF; - robotSDF.SetFromString(request->data()); - double lifespan_timeout = request->dbl_data(); - - // Get the model name, store in the expected map - auto name = robotSDF.Root()->GetElement("model")->GetAttribute("name") - ->GetAsString(); - - if (lifespan_timeout > 0) - { - boost::mutex::scoped_lock lock(death_sentences_mutex_); - // Initializes the death sentence negative because I don't dare to take the - // simulation time from this thread. - death_sentences_[name] = -lifespan_timeout; + const std::string &request_type = request->request(); + + if (request_type == "delete_robot") { + const std::string &name = request->data(); + std::cerr << "Removing Model `" << name << "` operation is not supported." << std::endl; + gz::msgs::Response resp; + resp.set_id(request->id()); + resp.set_request("delete_robot"); + resp.set_response("not_supported"); + this->responsePub_->Publish(resp); } - + else if (request_type == "insert_sdf") { - boost::mutex::scoped_lock lock(this->insertMutex_); - this->insertMap_[name] = std::make_tuple(request->id(), robotSDF.ToString(), true); - } + std::cout << "Processing insert model request ID `" << request->id() << "`." + << std::endl; + sdf::SDF robotSDF; + robotSDF.SetFromString(request->data()); - //TODO insert here, it's better - //this->world_->InsertModelString(robotSDF.ToString()); - - // Don't leak memory - // https://bitbucket.org/osrf/sdformat/issues/104/memory-leak-in-element - robotSDF.Root()->Reset(); - } - else if (request->request() == "set_robot_state_update_frequency") - { - auto frequency = request->data(); - assert(frequency.find_first_not_of( "0123456789" ) == std::string::npos); - this->robotStatesPubFreq_ = (unsigned int)std::stoul(frequency); - std::cout << "Setting robot state update frequency to " - << this->robotStatesPubFreq_ << "." << std::endl; + // Get the model name, store in the expected map + auto name = robotSDF.Root() + ->GetElement("model") + ->GetAttribute("name") + ->GetAsString(); - gz::msgs::Response resp; - resp.set_id(request->id()); - resp.set_request("set_robot_state_update_frequency"); - resp.set_response("success"); + { + boost::mutex::scoped_lock lock(this->insertMutex_); + this->insertMap_[name] = request->id(); + } - this->responsePub_->Publish(resp); - } + // insert here, it's better. Here you can insert when the world is paused + { + boost::recursive_mutex::scoped_lock lock_physics(*this->world_->Physics()->GetPhysicsUpdateMutex()); + this->world_->InsertModelString(robotSDF.ToString()); + } + + // Don't leak memory + // https://bitbucket.org/osrf/sdformat/issues/104/memory-leak-in-element + robotSDF.Root()->Reset(); + } + else if (request_type == "set_robot_state_update_frequency") + { + // Handle and fail this message, it could cause weird deadlocks if it's not responded properly + auto frequency = request->data(); + assert(frequency.find_first_not_of("0123456789") == std::string::npos); + unsigned int value = std::stoul(frequency); + std::cout << "Ignoring command to set robot state update frequency to " + << value << '.' << std::endl; + + gz::msgs::Response resp; + resp.set_id(request->id()); + resp.set_request("set_robot_state_update_frequency"); + resp.set_response("not_supported"); + + this->responsePub_->Publish(resp); + } } ///////////////////////////////////////////////// @@ -364,9 +220,7 @@ void WorldController::OnModel(ConstModelPtr &msg) auto name = msg->name(); std::cout << "WorldController::OnModel(" << name << ')' << std::endl; - int id; - bool insert_operation_pending; { boost::mutex::scoped_lock lock(this->insertMutex_); if (this->insertMap_.count(name) <= 0) @@ -374,15 +228,7 @@ void WorldController::OnModel(ConstModelPtr &msg) // Insert was not requested here, ignore it return; } - const std::tuple &entry = this->insertMap_[name]; - id = std::get<0>(entry); - insert_operation_pending = std::get<2>(entry); - if (insert_operation_pending) - { - // Insert operation has not been done yet - // (but you should never be here, because we are in the "OnModel" function - return; - } + id = this->insertMap_[name]; this->insertMap_.erase(name); } @@ -399,8 +245,6 @@ void WorldController::OnModel(ConstModelPtr &msg) this->responsePub_->Publish(resp); -// this->world_insert_remove_mutex.unlock(); - std::cout << "Model `" << name << "` inserted, world now contains " << this->world_->ModelCount() << " models." << std::endl; } @@ -408,30 +252,14 @@ void WorldController::OnModel(ConstModelPtr &msg) ///////////////////////////////////////////////// void WorldController::HandleResponse(ConstResponsePtr &response) { -// std::cout << "WorldController::HandleResponse(" << response->request() << ')' << std::endl; +} - if (response->request() not_eq "entity_delete") +void WorldController::OnRobotReport(const boost::shared_ptr &msg) +{ + if (msg->dead()) { - return; + boost::mutex::scoped_lock lock(this->model_remove_mutex); + gz::physics::ModelPtr model = world_->ModelByName(msg->id()); + this->models_to_remove.emplace_back(std::move(model)); } - -// int id; -// { -// boost::mutex::scoped_lock lock(this->deleteMutex_); -// if (this->deleteMap_.count(response->id()) <= 0) -// { -// return; -// } -// -// id = this->deleteMap_[response->id()]; -// this->deleteMap_.erase(id); -// } - -// this->world_insert_remove_mutex.unlock(); - -// gz::msgs::Response resp; -// resp.set_id(id); -// resp.set_request("delete_robot"); -// resp.set_response("success"); -// this->responsePub_->Publish(resp); } diff --git a/cpprevolve/revolve/gazebo/plugin/WorldController.h b/cpprevolve/revolve/gazebo/plugin/WorldController.h index 359badbd96..b4d10a3477 100644 --- a/cpprevolve/revolve/gazebo/plugin/WorldController.h +++ b/cpprevolve/revolve/gazebo/plugin/WorldController.h @@ -13,16 +13,11 @@ * limitations under the License. * * Description: TODO: -* Author: Elte Hupkes +* Author: Elte Hupkes and Matteo De Carlo * */ -// -// Created by elte on 6-6-15. -// - -#ifndef REVOLVE_WORLDCONTROLLER_H -#define REVOLVE_WORLDCONTROLLER_H +#pragma once #include #include @@ -37,22 +32,23 @@ #include #include +#include namespace revolve { namespace gazebo { -class WorldController: public ::gazebo::WorldPlugin +class WorldController : public ::gazebo::WorldPlugin { public: WorldController(); - virtual ~WorldController(); + ~WorldController() override; - virtual void Load( + void Load( ::gazebo::physics::WorldPtr _parent, sdf::ElementPtr _sdf) override; - virtual void Reset() override; + void Reset() override; protected: // Listener for analysis requests @@ -69,12 +65,14 @@ class WorldController: public ::gazebo::WorldPlugin virtual void OnEndUpdate(); + void OnRobotReport(const boost::shared_ptr &msg); + +protected: + const bool enable_parallelization; + // Maps model names to insert request IDs // model_name -> request_id, SDF, insert_operation_pending - std::map > insertMap_; - - // Queue of `delete_robot` requests - std::queue> delete_robot_queue; + std::map insertMap_; // Stores the world ::gazebo::physics::WorldPtr world_; @@ -85,9 +83,6 @@ class WorldController: public ::gazebo::WorldPlugin // Mutex for the insertMap_ boost::mutex insertMutex_; - // Mutex for the deleteMap_ - boost::mutex deleteMutex_; - // Request subscriber ::gazebo::transport::SubscriberPtr requestSub_; @@ -103,34 +98,16 @@ class WorldController: public ::gazebo::WorldPlugin // Subscriber for actual model insertion ::gazebo::transport::SubscriberPtr modelSub_; - // Publisher for periodic robot poses - ::gazebo::transport::PublisherPtr robotStatesPub_; - - // Frequency at which robot info is published - // Defaults to 0, which means no update at all - unsigned int robotStatesPubFreq_; + // Subscriber for periodic robot learning reports + ::gazebo::transport::SubscriberPtr robotLearningStatesSub; // Pointer to the update event connection ::gazebo::event::ConnectionPtr onBeginUpdateConnection; ::gazebo::event::ConnectionPtr onEndUpdateConnection; - // Last (simulation) time robot info was sent - double lastRobotStatesUpdateTime_; - - // Death sentence list. It collects all the end time for all robots that have - // a death sentence - // NEGATIVE DEATH SENTENCES mean total lifetime, death sentence not yet initialized. - std::map death_sentences_; - - // Mutex for the deleteMap_ - boost::mutex death_sentences_mutex_; - -// boost::mutex world_insert_remove_mutex; - + boost::mutex model_remove_mutex; ::gazebo::physics::Model_V models_to_remove; }; } // namespace gazebo } // namespace revolve - -#endif // REVOLVE_WORLDCONTROLLER_H diff --git a/cpprevolve/revolve/gazebo/plugin/register_realtime_world_plugin.cpp b/cpprevolve/revolve/gazebo/plugin/register_realtime_world_plugin.cpp new file mode 100644 index 0000000000..8d0be3ebd6 --- /dev/null +++ b/cpprevolve/revolve/gazebo/plugin/register_realtime_world_plugin.cpp @@ -0,0 +1,23 @@ +/* +* Copyright (C) 2018 Vrije Universiteit Amsterdam +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* +* Author: Matteo De Carlo +* +*/ + +#include +#include + +using namespace gazebo; +GZ_REGISTER_WORLD_PLUGIN(revolve::gazebo::RealtimeWorldController) diff --git a/cpprevolve/revolve/gazebo/util/YamlBodyParser.cpp b/cpprevolve/revolve/gazebo/util/YamlBodyParser.cpp index 2be14ae083..ec96bd756a 100644 --- a/cpprevolve/revolve/gazebo/util/YamlBodyParser.cpp +++ b/cpprevolve/revolve/gazebo/util/YamlBodyParser.cpp @@ -72,7 +72,7 @@ YamlBodyParser::YamlBodyParser(const std::string &_genome) } this->Init(yaml_genome); -}; +} /////////////////////////////////////////////////// void YamlBodyParser::ParseFile(const std::string &_file_path) diff --git a/cpprevolve/revolve/raspberry/AngleToTargetDetector.cpp b/cpprevolve/revolve/raspberry/AngleToTargetDetector.cpp new file mode 100644 index 0000000000..3576830bc4 --- /dev/null +++ b/cpprevolve/revolve/raspberry/AngleToTargetDetector.cpp @@ -0,0 +1,56 @@ +// +// Created by matteo on 2/28/20. +// + +#include "AngleToTargetDetector.h" +#include +#include + +using namespace revolve::raspberry; + +AngleToTargetDetector::AngleToTargetDetector(int camera_index, unsigned int shrink_factor, bool show_image) + : ::revolve::AngleToTargetDetector(shrink_factor, show_image) + , camera(nullptr) + , usb_camera(nullptr) +{ + if (camera_index == -1) { + camera = new raspicam::RaspiCam_Cv(); + if (not camera->open()) + throw std::runtime_error("Error opening the raspberry camera"); + } else { + usb_camera = new cv::VideoCapture(camera_index); + if (not usb_camera->isOpened()) + throw std::runtime_error("Error opening the usb camera at index " + std::to_string(camera_index)); + } +} + +AngleToTargetDetector::~AngleToTargetDetector() +{ + delete camera; + delete usb_camera; +} + +void AngleToTargetDetector::get_image(cv::Mat &raw_image) +{ + bool result = false; + + if (camera) { + result = camera->grab(); + camera->retrieve(raw_image); + } else if (usb_camera) { + result = usb_camera->read(raw_image); + } else { + throw std::runtime_error("Camera device not found"); + } + + if (not result or raw_image.empty()) { + throw std::runtime_error("Error! could not capture image from the camera"); + } +} + +float AngleToTargetDetector::detect_angle() +{ + float angle = ::revolve::AngleToTargetDetector::detect_angle(); + std::cout << "Detected angle: " << angle << std::endl; + return angle; +} diff --git a/cpprevolve/revolve/raspberry/AngleToTargetDetector.h b/cpprevolve/revolve/raspberry/AngleToTargetDetector.h new file mode 100644 index 0000000000..cac8d20835 --- /dev/null +++ b/cpprevolve/revolve/raspberry/AngleToTargetDetector.h @@ -0,0 +1,38 @@ +// +// Created by matteo on 2/28/20. +// + +#ifndef REVOLVE_RASPBERRY_ANGLETOTARGETDETECTOR_H +#define REVOLVE_RASPBERRY_ANGLETOTARGETDETECTOR_H + +#include +#include +#include "../brains/controller/sensors/AngleToTargetDetector.h" + +namespace revolve { +namespace raspberry { + +class AngleToTargetDetector : public ::revolve::AngleToTargetDetector { +public: + /** + * @param camera_index pass -1 to load the raspberry camera + */ + explicit AngleToTargetDetector(int camera_index, unsigned int shrink_factor = 4, bool show_image = false); + virtual ~AngleToTargetDetector(); + +public: + float detect_angle() override; + +private: + void get_image(cv::Mat &image) override; + +protected: + raspicam::RaspiCam_Cv *camera; + cv::VideoCapture *usb_camera; +}; + +} +} + + +#endif // REVOLVE_RASPBERRY_ANGLETOTARGETDETECTOR_H diff --git a/cpprevolve/revolve/raspberry/CMakeLists.txt b/cpprevolve/revolve/raspberry/CMakeLists.txt index 20101930cf..ecc2d7c2a3 100644 --- a/cpprevolve/revolve/raspberry/CMakeLists.txt +++ b/cpprevolve/revolve/raspberry/CMakeLists.txt @@ -2,6 +2,8 @@ message(WARNING "Building Raspberry code") # Find Yaml-cpp find_package(yaml-cpp REQUIRED) +find_package(raspicam REQUIRED) + include_directories(${YAML_CPP_INCLUDE_DIR}) file(GLOB_RECURSE @@ -15,9 +17,11 @@ target_link_libraries(revolve-raspberry PUBLIC revolve-controllers PUBLIC pigpio_if2 ${YAML_CPP_LIBRARIES} + ${raspicam_CV_LIBS} ) -include_directories(${PIGPIO_HEADER_DIR}) +target_include_directories(revolve-raspberry + PUBLIC ${PIGPIO_HEADER_DIR}) install(TARGETS revolve-raspberry RUNTIME DESTINATION bin diff --git a/cpprevolve/revolve/raspberry/PIGPIOConnection.h b/cpprevolve/revolve/raspberry/PIGPIOConnection.h index f0239d0272..474c0724ff 100644 --- a/cpprevolve/revolve/raspberry/PIGPIOConnection.h +++ b/cpprevolve/revolve/raspberry/PIGPIOConnection.h @@ -2,8 +2,8 @@ // Created by matteo on 14/06/19. // -#ifndef REVOLVE_PIGPIOCONNECTION_H -#define REVOLVE_PIGPIOCONNECTION_H +#ifndef REVOLVE_RASPBERRY_PIGPIOCONNECTION_H +#define REVOLVE_RASPBERRY_PIGPIOCONNECTION_H extern "C" { #include "pigpiod_if2.h" @@ -47,4 +47,4 @@ class PIGPIOConnection { }; -#endif //REVOLVE_PIGPIOCONNECTION_H +#endif //REVOLVE_RASPBERRY_PIGPIOCONNECTION_H diff --git a/cpprevolve/revolve/raspberry/RaspController.cpp b/cpprevolve/revolve/raspberry/RaspController.cpp index 1a424b2fc0..f98bb16097 100644 --- a/cpprevolve/revolve/raspberry/RaspController.cpp +++ b/cpprevolve/revolve/raspberry/RaspController.cpp @@ -3,31 +3,36 @@ // #include "RaspController.h" +#include +#include +#include #include "../brains/controller/DifferentialCPG.h" +#include "../brains/controller/BrokenDifferentialCPG.h" +#include "AngleToTargetDetector.h" #include -using namespace revolve; +using namespace revolve::raspberry; RaspController::RaspController( - std::vector > actuators, - std::vector > sensors, + std::vector > actuators, + std::vector > sensors, const YAML::Node &conf) : revolve_controller(nullptr) + , camera(nullptr) , actuators(std::move(actuators)) , sensors(std::move(sensors)) { + // camera can be nullptr + this->load_camera(conf["angle_to_target_detector"]); this->set_new_controller(conf); } RaspController::~RaspController() = default; -#include -#include - void RaspController::update() { - double step = this->timer.step(); - double time = this->timer.elapsed(); + const double step = this->timer.step_double(); + const double time = this->timer.elapsed_double(); if (step == 0) return; this->revolve_controller->update( @@ -36,33 +41,89 @@ void RaspController::update() time, step ); -// std::this_thread::sleep_for(std::chrono::milliseconds(125)); + + // negative update rate means run as fast as possible + if (this->update_rate <= std::chrono::milliseconds::zero()) return; + + const Timer::Seconds cpu_time_spent = this->timer.step_elapsed(); + const Timer::Seconds remaining_wait_time = this->update_rate - cpu_time_spent; + if (remaining_wait_time > Timer::Seconds::zero()) { + std::this_thread::sleep_for(remaining_wait_time); + } else { + std::clog << "CPU too slow, we missed the cycle deadline of " << (remaining_wait_time).count() / -1000 << " ms " << std::endl; + } +} + +void RaspController::load_camera(const YAML::Node &conf) +{ + if (not conf) { + std::cout << "Camera not found, the camera will be deactivated." << std::endl; + return; + } + + int camera_index; + std::string camera_type = conf["type"].as(); + if (camera_type == "raspberry-camera") { + camera_index = -1; + std::cout << "Loading Raspberry camera" << std::endl; + } else if (camera_type == "usb") { + camera_index = conf["index"].as(); + std::cout << "Loading usb camera at index " << camera_index << std::endl; + } else { + throw std::runtime_error("Camera type " + camera_type + "not recognized"); + } + + const unsigned int camera_shrink_factor = conf["shrink_factor"].as(4); + const bool show_camera = conf["show_camera"].as(false); + + std::cout << "Camera shrink_factor( " << camera_shrink_factor << " ) show_camera( " << (show_camera ? "true" : "false") << " )" << std::endl; + + camera.reset(new ::revolve::raspberry::AngleToTargetDetector(camera_index, camera_shrink_factor, show_camera)); } void RaspController::set_new_controller(const YAML::Node &conf) { + // Update rate in ms + int64_t _update_rate = conf["update_rate"].as(-1); + this->update_rate = std::chrono::duration_cast(std::chrono::milliseconds(_update_rate)); + std::string type = conf["type"].as(""); if (type.empty()) { throw std::runtime_error("Controller type not set"); + } else if (type == "broken-differential-cpg") { + BrokenDifferentialCPG::ControllerParams params; + params.reset_neuron_random = conf["reset_neuron_random"].as(false); + params.use_frame_of_reference = conf["use_frame_of_reference"].as(false); + params.init_neuron_state = conf["init_neuron_state"].as(0.707); + params.range_ub = conf["range_ub"].as(1.0); + params.signal_factor_all = conf["output_signal_factor"].as(1.0); + params.abs_output_bound = conf["abs_output_bound"].as(1.0); + + YAML::Node yaml_weights = conf["weights"]; + for(const YAML::Node &weight: yaml_weights) { + params.weights.emplace_back(weight.as()); + } + + revolve_controller = std::make_unique( + params, this->actuators, camera + ); } else if (type == "differential-cpg") { DifferentialCPG::ControllerParams params; - params.reset_neuron_random = false; - params.use_frame_of_reference = false; - params.init_neuron_state = 0.707; - params.range_ub = 1.0; - params.signal_factor_all = 1.0; - params.signal_factor_mid = 2.5; - params.signal_factor_left_right = 2.5; - params.abs_output_bound = 1.0; + params.reset_neuron_random = conf["reset_neuron_random"].as(false); + params.use_frame_of_reference = conf["use_frame_of_reference"].as(false); + params.init_neuron_state = conf["init_neuron_state"].as(0.707); + params.range_ub = conf["range_ub"].as(1.0); + params.output_signal_factor = conf["output_signal_factor"].as(1.0); + params.abs_output_bound = conf["abs_output_bound"].as(1.0); YAML::Node yaml_weights = conf["weights"]; for(const YAML::Node &weight: yaml_weights) { params.weights.emplace_back(weight.as()); } - revolve_controller.reset( - new DifferentialCPG(params,this->actuators) + revolve_controller = std::make_unique( + params, this->actuators, camera ); } else { throw std::runtime_error("Controller " + type + " not supported (yet)"); diff --git a/cpprevolve/revolve/raspberry/RaspController.h b/cpprevolve/revolve/raspberry/RaspController.h index 00ba47996e..c327b8abca 100644 --- a/cpprevolve/revolve/raspberry/RaspController.h +++ b/cpprevolve/revolve/raspberry/RaspController.h @@ -2,38 +2,43 @@ // Created by matteo on 14/06/19. // -#ifndef REVOLVE_RASPCONTROLLER_H -#define REVOLVE_RASPCONTROLLER_H +#ifndef REVOLVE_RASPBERRY_RASPCONTROLLER_H +#define REVOLVE_RASPBERRY_RASPCONTROLLER_H #include #include #include "../brains/controller/Controller.h" +#include "../brains/controller/sensors/AngleToTargetDetector.h" #include "Timer.h" namespace revolve { +namespace raspberry { -class RaspController -{ +class RaspController { public: explicit RaspController( - std::vector > actuators, - std::vector > sensors, + std::vector > actuators, + std::vector > sensors, const YAML::Node &conf); ~RaspController(); void update(); + void load_camera(const YAML::Node &conf); void set_new_controller(const YAML::Node &conf); private: - std::unique_ptr revolve_controller; + std::unique_ptr<::revolve::Controller> revolve_controller; + std::shared_ptr<::revolve::AngleToTargetDetector> camera; Timer timer; - std::vector< std::unique_ptr< revolve::Actuator > > actuators; - std::vector< std::unique_ptr< revolve::Sensor > > sensors; + std::vector > actuators; + std::vector > sensors; + + /// Update rate in seconds + Timer::Seconds update_rate; }; } - - -#endif //REVOLVE_RASPCONTROLLER_H +} +#endif //REVOLVE_RASPBERRY_RASPCONTROLLER_H diff --git a/cpprevolve/revolve/raspberry/Servo.cpp b/cpprevolve/revolve/raspberry/Servo.cpp index 2f221c0c23..23e4f9ef2c 100644 --- a/cpprevolve/revolve/raspberry/Servo.cpp +++ b/cpprevolve/revolve/raspberry/Servo.cpp @@ -5,7 +5,7 @@ #include "Servo.h" #include "PIGPIOConnection.h" -using namespace revolve; +using namespace revolve::raspberry; #define POSITION_OFF 0 #define POSITION_BEGIN 40 diff --git a/cpprevolve/revolve/raspberry/Servo.h b/cpprevolve/revolve/raspberry/Servo.h index 6c2ee1ef3c..33fdb6efa7 100644 --- a/cpprevolve/revolve/raspberry/Servo.h +++ b/cpprevolve/revolve/raspberry/Servo.h @@ -2,17 +2,17 @@ // Created by matteo on 14/06/19. // -#ifndef REVOLVE_SERVO_H -#define REVOLVE_SERVO_H +#ifndef REVOLVE_RASPBERRY_SERVO_H +#define REVOLVE_RASPBERRY_SERVO_H #include "PIGPIOConnection.h" #include "../brains/controller/actuators/Actuator.h" #include namespace revolve { +namespace raspberry { -class Servo: public revolve::Actuator -{ +class Servo : public ::revolve::Actuator { public: explicit Servo( double coordinate_x, @@ -20,12 +20,13 @@ class Servo: public revolve::Actuator double coordinate_z, PIGPIOConnection *connection, unsigned short pin, - unsigned int frequency=50, - int range=1000, - bool inverse=false + unsigned int frequency = 50, + int range = 1000, + bool inverse = false ); - ~Servo() { + ~Servo() + { this->off(); } @@ -36,7 +37,8 @@ class Servo: public revolve::Actuator */ void move_to_position(double position); - void center() { move_to_position(0); } + void center() + { move_to_position(0); } void off(); @@ -45,7 +47,8 @@ class Servo: public revolve::Actuator std::ostream &print(std::ostream &os) const { os << "Servo pin:\t" << this->pin << std::endl; - os << "\tcoordinates [" << this->coordinate_x() << ',' << this->coordinate_y() << ',' << this->coordinate_z() << ']' << std::endl; + os << "\tcoordinates [" << this->coordinate_x() << ',' << this->coordinate_y() << ',' + << this->coordinate_z() << ']' << std::endl; os << "\tfrequency:\t" << this->frequency << std::endl; os << "\trange: \t" << this->range << std::endl; os << "\tinverse: \t" << this->inverse; @@ -63,8 +66,9 @@ class Servo: public revolve::Actuator float maxPWM; }; +} } -std::ostream &operator<<(std::ostream &os, revolve::Servo const &s); +std::ostream &operator<<(std::ostream &os, revolve::raspberry::Servo const &s); -#endif //REVOLVE_SERVO_H +#endif //REVOLVE_RASPBERRY_SERVO_H diff --git a/cpprevolve/revolve/raspberry/Timer.h b/cpprevolve/revolve/raspberry/Timer.h index 16d0c17cfc..fec49c7fd3 100644 --- a/cpprevolve/revolve/raspberry/Timer.h +++ b/cpprevolve/revolve/raspberry/Timer.h @@ -2,38 +2,48 @@ // Created by matteo on 17/06/19. // -#ifndef REVOLVE_TIMER_H -#define REVOLVE_TIMER_H +#ifndef REVOLVE_RASPBERRY_TIMER_H +#define REVOLVE_RASPBERRY_TIMER_H + #include #include class Timer { public: + typedef std::chrono::high_resolution_clock Clock; + typedef std::chrono::duration > Seconds; + Timer() - : beg_(clock_::now()) + : beg_(Clock::now()) , last_step_(beg_) {} - void reset() { beg_ = clock_::now(); } - double step() { - std::chrono::time_point prev = last_step_; - last_step_ = clock_::now(); - return time_difference(prev, last_step_); + void reset() { beg_ = Clock::now(); } + + Seconds step() { + std::chrono::time_point prev = last_step_; + last_step_ = Clock::now(); + return last_step_ - prev; } - double elapsed() const { return time_difference(beg_, last_step_); } - double elapsed_now() const { return time_difference(beg_, clock_::now()); } + + Seconds step_elapsed() const { return Clock::now() - last_step_; } + Seconds elapsed() const { return last_step_ - beg_; } + Seconds elapsed_now() const { return Clock::now() - beg_; } + + double step_double() { return into_double(step()); } + double step_elapsed_double() const { return into_double(step_elapsed()); } + double elapsed_double() const { return into_double(elapsed()); } + double elapsed_now_double() const { return into_double(elapsed_now()); } private: - typedef std::chrono::high_resolution_clock clock_; - typedef std::chrono::duration > second_; - std::chrono::time_point beg_; - std::chrono::time_point last_step_; + std::chrono::time_point beg_; + std::chrono::time_point last_step_; - static double time_difference(const std::chrono::time_point start, const std::chrono::time_point end) + template + static double into_double(const T &duration) { - return std::chrono::duration_cast - (end - start).count(); + return std::chrono::duration_cast(duration).count(); } }; -#endif //REVOLVE_TIMER_H +#endif //REVOLVE_RASPBERRY_TIMER_H diff --git a/cpprevolve/revolve/raspberry/rasp_main.cpp b/cpprevolve/revolve/raspberry/rasp_main.cpp index 5f7681b4a3..c26ccaf6b2 100644 --- a/cpprevolve/revolve/raspberry/rasp_main.cpp +++ b/cpprevolve/revolve/raspberry/rasp_main.cpp @@ -8,7 +8,7 @@ #include #include -typedef std::unique_ptr Servo_p; +typedef std::shared_ptr Servo_p; std::vector read_conf(PIGPIOConnection &pigpio, const YAML::Node &yaml_servos); void reset(std::vector &servos); @@ -93,7 +93,7 @@ std::vector read_conf(PIGPIOConnection &pigpio, const YAML::Node &yaml_ auto frequency = yaml_servo["frequency"].as(50); auto range = yaml_servo["range"] .as(1000); auto inverse = yaml_servo["inverse"] .as(false); - servos.emplace_back(new revolve::Servo( + servos.emplace_back(new revolve::raspberry::Servo( x, y, z, @@ -112,14 +112,14 @@ std::vector read_conf(PIGPIOConnection &pigpio, const YAML::Node &yaml_ void control(std::vector &servos, const YAML::Node &controller_conf) { std::cout << "Staring controller" << std::endl; - std::vector> sensors; - std::vector> actuators; + std::vector> sensors; + std::vector> actuators; actuators.reserve(servos.size()); for (Servo_p &servo: servos) { actuators.emplace_back(std::move(servo)); } - revolve::RaspController controller( + revolve::raspberry::RaspController controller( std::move(actuators), std::move(sensors), controller_conf @@ -133,14 +133,14 @@ void control(std::vector &servos, const YAML::Node &controller_conf) void learner(std::vector &servos, const YAML::Node &controllers_conf) { std::cout << "Staring controller" << std::endl; - std::vector> sensors; - std::vector> actuators; + std::vector> sensors; + std::vector> actuators; actuators.reserve(servos.size()); for (Servo_p &servo: servos) { actuators.emplace_back(std::move(servo)); } - revolve::RaspController controller( + revolve::raspberry::RaspController controller( std::move(actuators), std::move(sensors), controllers_conf[0] @@ -152,7 +152,7 @@ void learner(std::vector &servos, const YAML::Node &controllers_conf) std::cout << "Loading controller[" << ++counter << "] fitness: " << controller_conf["fitness"].as(-1) << std::endl; controller.set_new_controller(controller_conf); timer.reset(); - while (timer.elapsed_now() < 30.0) { + while (timer.elapsed_now() < Timer::Seconds(30.0)) { controller.update(); } } diff --git a/cpprevolve/revolve/raspberry/robot_conf.yaml b/cpprevolve/revolve/raspberry/robot_conf.yaml index e139ac18e2..59b25a5402 100644 --- a/cpprevolve/revolve/raspberry/robot_conf.yaml +++ b/cpprevolve/revolve/raspberry/robot_conf.yaml @@ -1,7 +1,7 @@ robot_name: "spider9" robot_id: 1, robot_address: - ip: "192.168.1.25" + #ip: "192.168.1.25" #port: 8888 #"servo_pins": [17,18,27,22,23,10,9,25], @@ -34,31 +34,39 @@ servos: rgb_pins: [15,14,4] -#controller: + + +controller: # spider weights - #type: "differential-cpg" - #weights: [0.545275, 0.48118, 0.677335, 0.834078, 0.331732, 0.479091, 0.87384, 0.527239, 0.0148421, 0.131508, 0.711216, 0.672872, 0.648163, 0.204883, 0.788699, 0.38614, 0.483561, 0.0777244] + type: "differential-cpg" + weights: [0.545275, 0.48118, 0.677335, 0.834078, 0.331732, 0.479091, 0.87384, 0.527239, 0.0148421, 0.131508, 0.711216, 0.672872, 0.648163, 0.204883, 0.788699, 0.38614, 0.483561, 0.0777244] + angle_to_target_detector: + type: "raspberry-camera" +# type: "usb" +# index: 0 + shrink_factor: 4 + show_camera: False -controllers: - # baby+1 learning weights, crescendo - - type: "differential-cpg" - weights: [0.395516, 0.83811, 0.463123, 0.702373, 0.936804, 0.166412, 0.631632, 0.287821, 0.799552, 0.218267, 0.0623519, 0.799128] - fitness: 0.000503405 - - type: "differential-cpg" - weights: [0.871967, 0.441235, 0.684449, 0.58511, 0.899357, 0.497721, 0.342563, 0.164466, 0.676871, 0.475722, 0.405172, 0.370701] - fitness: 0.202879 - - type: "differential-cpg" - weights: [0.223283, 0.912134, 0.374909, 0.931379, 0.733706, 0.283053, 0.0923915, 0.0706944, 0.0915673, 0.53245, 0.962511, 0.512361] - fitness: 0.403299 - - type: "differential-cpg" - weights: [0.302763, 0.913937, 0.376248, 0.911346, 0.712692, 0.298535, 0.0272732, 0.0706944, 0.095008, 0.46318, 0.990895, 0.564726] - fitness: 0.600739 - - type: "differential-cpg" - weights: [0.319842, 0.848368, 0.415024, 0.946114, 0.665586, 0.26537, 0.0609814, 0.0757356, 0.100649, 0.461241, 0.972517, 0.485218] - fitness: 0.801518 - - type: "differential-cpg" - weights: [0.698179, 0.828084, 0.381213, 0.974549, 0.663302, 0.22419, 0.163161, 0.0362292, 0.0558966, 0.357249, 0.960544, 0.512658] - fitness: 1.00555 - - type: "differential-cpg" - weights: [0.65835, 0.723842, 0.431859, 0.919208, 0.721147, 0.160043, 0.138266, 0.0751595, 0.0335994, 0.395186, 0.88612, 0.601392] - fitness: 1.27441 +#controllers: +# # baby+1 learning weights, crescendo +# - type: "differential-cpg" +# weights: [0.395516, 0.83811, 0.463123, 0.702373, 0.936804, 0.166412, 0.631632, 0.287821, 0.799552, 0.218267, 0.0623519, 0.799128] +# fitness: 0.000503405 +# - type: "differential-cpg" +# weights: [0.871967, 0.441235, 0.684449, 0.58511, 0.899357, 0.497721, 0.342563, 0.164466, 0.676871, 0.475722, 0.405172, 0.370701] +# fitness: 0.202879 +# - type: "differential-cpg" +# weights: [0.223283, 0.912134, 0.374909, 0.931379, 0.733706, 0.283053, 0.0923915, 0.0706944, 0.0915673, 0.53245, 0.962511, 0.512361] +# fitness: 0.403299 +# - type: "differential-cpg" +# weights: [0.302763, 0.913937, 0.376248, 0.911346, 0.712692, 0.298535, 0.0272732, 0.0706944, 0.095008, 0.46318, 0.990895, 0.564726] +# fitness: 0.600739 +# - type: "differential-cpg" +# weights: [0.319842, 0.848368, 0.415024, 0.946114, 0.665586, 0.26537, 0.0609814, 0.0757356, 0.100649, 0.461241, 0.972517, 0.485218] +# fitness: 0.801518 +# - type: "differential-cpg" +# weights: [0.698179, 0.828084, 0.381213, 0.974549, 0.663302, 0.22419, 0.163161, 0.0362292, 0.0558966, 0.357249, 0.960544, 0.512658] +# fitness: 1.00555 +# - type: "differential-cpg" +# weights: [0.65835, 0.723842, 0.431859, 0.919208, 0.721147, 0.160043, 0.138266, 0.0751595, 0.0335994, 0.395186, 0.88612, 0.601392] +# fitness: 1.27441 diff --git a/experiments/IMC/IMC-experiment b/experiments/IMC/IMC-experiment new file mode 100644 index 0000000000..262829331c --- /dev/null +++ b/experiments/IMC/IMC-experiment @@ -0,0 +1,25 @@ +#!/bin/bash + +set -e +set -x +runs=5 +runs_start=0 +start_port=16000 +exp_name=IMC_spider +log_suffix='' +project_folder=experiments/IMC/ +manager="${project_folder}"manager.py + +for i in $(seq $runs) + do + run=$((i+runs_start)) + if ! [ -d "${project_folder}/output${exp_name}${run}" ]; then + mkdir "${project_folder}/output${exp_name}${run}" + fi + if ! [ -d "${project_folder}/output${exp_name}${run}/act_info" ]; then + mkdir "${project_folder}/output${exp_name}${run}/act_info" + fi + screen -d -m -S "${exp_name}_${run}" -L -Logfile "${project_folder}${exp_name}${log_suffix}_${run}.log" nice -n19 ./revolve.py --test-robot "${project_folder}/yaml/${exp_name}${run}.yaml" --simulator-cmd=/home/fuda/Projects/gazebo/build/gazebo/gzserver --experiment-name $exp_name --n-cores 1 --pose-update-frequency 5 --port-start $((${start_port} + ($run*10))) --run $run + done + +date +"%H:%M:%S" diff --git a/experiments/IMC/manager.py b/experiments/IMC/manager.py new file mode 100755 index 0000000000..5b2d8ba656 --- /dev/null +++ b/experiments/IMC/manager.py @@ -0,0 +1,108 @@ +import asyncio +import logging +import sys +import os + +from pygazebo.connection import DisconnectError +from pyrevolve import parser +from pyrevolve.custom_logging import logger +from pyrevolve.revolve_bot import RevolveBot +from pyrevolve.SDF.math import Vector3 +# from pyrevolve.tol.manage import World +from pyrevolve.tol.manage.single_robot_world import SingleRobotWorld as World +from pyrevolve.util.supervisor.supervisor_multi import DynamicSimSupervisor + + + + + +async def run(): + """ + The main coroutine, which is started below + """ + log = logger.create_logger('experiment', handlers=[ + logging.StreamHandler(sys.stdout), + ]) + + # Set debug level to DEBUG + log.setLevel(logging.DEBUG) + + # Parse command line / file input arguments + settings = parser.parse_args() + + + # Start Simulator + if settings.simulator_cmd != 'debug': + simulator_supervisor = DynamicSimSupervisor( + world_file=settings.world, + simulator_cmd=settings.simulator_cmd, + simulator_args=["--verbose"], + # simulator_args=[""] + plugins_dir_path=os.path.join('.', 'build', 'lib'), + models_dir_path=os.path.join('.', 'models'), + simulator_name='/home/fuda/Projects/gazebo/build/gazebo/gzserver' # /home/fuda/Projects/gazebo/build/gazebo/gzserver + ) + await simulator_supervisor.launch_simulator(port=settings.port_start) + await asyncio.sleep(0.1) + + + # Connect to the simulator and pause + connection = await World.create(settings, world_address=('127.0.0.1', settings.port_start)) + await asyncio.sleep(1) + await connection.pause(True) + await connection.reset(True) + + # initialization finished + + + # load robot file + robot = RevolveBot() + + # robot_file_path = "experiments/IMC/yaml/Single_link.yaml" #single link testing + # robot_file_path = "experiments/IMC/yaml/IMC_667710.yaml" #sven + # robot_file_path = "experiments/IMC/yaml/IMC_babyA4.yaml" + # robot_file_path = "experiments/IMC/yaml/IMC_babyB9.yaml" + # robot_file_path = "experiments/IMC/yaml/IMC_gecko5.yaml" #sven8 + robot_file_path = "experiments/IMC/yaml/IMC_snake1.yaml" #sven + # robot_file_path = "experiments/IMC/yaml/IMC_spider9.yaml" #spider9 + + + robot.load_file(robot_file_path, conf_type='yaml') + robot.update_substrate() + robot.save_file(f'{robot_file_path}.sdf', conf_type='sdf') + + + # insert robot into the simulator + robot_manager = await connection.insert_robot(robot, Vector3(0, 0, 0.05), life_timeout=None) + await connection.pause(False) + + + + # Start the main life loop + while True: + status = 'dead' if robot_manager.dead else 'alive' + best_fitness = None if robot_manager.best_evaluation is None else robot_manager.best_evaluation.fitness + log.info(f"status: {status} - Robot fitness: {best_fitness}") + await asyncio.sleep(5.0) + +def main(): + def handler(loop, context): + exc = context['exception'] + if isinstance(exc, DisconnectError) \ + or isinstance(exc, ConnectionResetError): + print("Got disconnect / connection reset - shutting down.") + sys.exit(0) + raise context['exception'] + + try: + loop = asyncio.get_event_loop() + loop.set_exception_handler(handler) + loop.run_until_complete(run()) + except KeyboardInterrupt: + print("Got CtrlC, shutting down.") + + +if __name__ == '__main__': + print("STARTING") + main() + print("FINISHED") diff --git a/experiments/IMC/yaml/Double_link.yaml b/experiments/IMC/yaml/Double_link.yaml new file mode 100644 index 0000000000..c086acedc6 --- /dev/null +++ b/experiments/IMC/yaml/Double_link.yaml @@ -0,0 +1,63 @@ +--- +id: Double_link +body: + id : Core + type : Core + params: + red: 0.04 + green: 0.26 + blue: 0.72 + children : + 0: + id : Leg00Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + orientation: 0 + children : + 1: + id : Leg00 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : -90 + children: + 1: + id : Leg01Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + children : + 1: + id : Leg01 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : 0 + +brain: + type: cpg + learner: + type : hyperneat + params: + PopulationSize: 350 + OldAgeTreshold: 20 + MaxSpecies: 15 + controller: + reset_neuron_random : false; + use_frame_of_reference : false; + init_neuron_state : 0.707; + range_ub : 1.0; + signal_factor_all : 1.0; + signal_factor_mid : 1.0; + signal_factor_left_right : 1.0; + abs_output_bound : 1.0; + weights: [ 0.86315, 0.26597, 0.941] diff --git a/experiments/IMC/yaml/IMCspider.yaml b/experiments/IMC/yaml/IMCspider.yaml new file mode 100644 index 0000000000..f7de62a0cb --- /dev/null +++ b/experiments/IMC/yaml/IMCspider.yaml @@ -0,0 +1,182 @@ +--- +id: IMC_spider3 +body: + id : Core + type : Core + params: + red: 0.04 + green: 0.26 + blue: 0.72 + children : + 0: + id : Leg00Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + orientation : 90 + children : + 1: + id : Leg00 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : -90 + children : + 1: + id : Leg01Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + children : + 1: + id : Leg01 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : 0 + 1: + id : Leg10Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + orientation : 90 + children : + 1: + id : Leg10 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : -90 + children : + 1: + id : Leg11Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + children : + 1: + id : Leg11 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : 0 + 2: + id : Leg20Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + orientation : 90 + children : + 1: + id : Leg20 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : -90 + children : + 1: + id : Leg21Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + children : + 1: + id : Leg21 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : 0 + 3: + id : Leg30Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + orientation : 90 + children : + 1: + id : Leg30 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : -90 + children : + 1: + id : Leg31Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + children : + 1: + id : Leg31 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : 0 +brain: + type: cpg + learner: + type : bo + n_init_samples: 50 #1450 + init_method: "LHS" + kernel_noise: 0.00000001 + kernel_optimize_noise: "false" + kernel_sigma_sq: 1.0 + kernel_l: 0.2 + kernel_squared_exp_ard_k: 4 + acqui_gpucb_delta: 0.5 + acqui_ucb_alpha: 3.0 + acqui_ei_jitter: 0 + acquisition_function: "UCB" + meta: + robot_size: 18 + run_analytics: "true" + n_learning_iterations: 1500 + n_cooldown_iterations: 1 + reset_robot_position: "false" + evaluation_rate: 60 + output_directory: "/home/fuda/Projects/revolve/experiments/IMC/output" + verbose: 0 + startup_time: 0 + controller: + reset_neuron_random : false; + use_frame_of_reference : false; + init_neuron_state : 0.707; + range_ub : 1.0; + signal_factor_all : 1.0; + signal_factor_mid : 1.0; + signal_factor_left_right : 1.0; + abs_output_bound : 1.0; + weights: [0.482167, 0.560357, 0.753772, 0.221536, 0.44513, 0.667353, 0.580933, 0.246228, 0.111797, + 0.110425, 0.667353, 0.519204, 0.11134, 0.667353, 0.70439, 0.000228624, 0.444673, 0.287837] diff --git a/experiments/IMC/yaml/Single_link.yaml b/experiments/IMC/yaml/Single_link.yaml new file mode 100644 index 0000000000..2347de1527 --- /dev/null +++ b/experiments/IMC/yaml/Single_link.yaml @@ -0,0 +1,47 @@ +--- +id: Single_link +body: + id : Core + type : Core + params: + red: 0.04 + green: 0.26 + blue: 0.72 + children : + 0: + id : Leg00Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + orientation: 0 + children : + 1: + id : Leg00 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : -90 + + +brain: + type: cpg + learner: + type : bo +# params: +# PopulationSize: 350 +# OldAgeTreshold: 20 +# MaxSpecies: 15 + controller: + reset_neuron_random : false; + use_frame_of_reference : false; + init_neuron_state : 0.707; + range_ub : 1.0; + signal_factor_all : 1.0; + signal_factor_mid : 1.0; + signal_factor_left_right : 1.0; + abs_output_bound : 1.0; + weights: [ 1.482167] diff --git a/experiments/IMC/yaml/model.config b/experiments/IMC/yaml/model.config new file mode 100644 index 0000000000..5a29819ddf --- /dev/null +++ b/experiments/IMC/yaml/model.config @@ -0,0 +1,16 @@ + + + + IMCspider + 1.0 + IMCspider.yaml.sdf + + + Fuda + f.van.diggelen@student.vu.nl + + + + this is a robot + + diff --git a/experiments/examples/yaml/spider_cpg_bo.yaml b/experiments/examples/yaml/spider_cpg_bo.yaml new file mode 100644 index 0000000000..969bda227d --- /dev/null +++ b/experiments/examples/yaml/spider_cpg_bo.yaml @@ -0,0 +1,161 @@ +--- +id: example_spider +body: + id : Core + type : Core + params: + red: 0.04 + green: 0.26 + blue: 0.72 + children : + 0: + id : Leg00Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + orientation : 90 + children : + 1: + id : Leg00 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : -90 + children : + 1: + id : Leg01Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + children : + 1: + id : Leg01 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : 0 + 1: + id : Leg10Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + orientation : 90 + children : + 1: + id : Leg10 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : -90 + children : + 1: + id : Leg11Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + children : + 1: + id : Leg11 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : 0 + 2: + id : Leg20Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + orientation : 90 + children : + 1: + id : Leg20 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : -90 + children : + 1: + id : Leg21Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + children : + 1: + id : Leg21 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : 0 + 3: + id : Leg30Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + orientation : 90 + children : + 1: + id : Leg30 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : -90 + children : + 1: + id : Leg31Joint + type : ActiveHinge + params: + red: 0.98 + green: 0.98 + blue: 0.98 + children : + 1: + id : Leg31 + type : FixedBrick + params: + red: 0.04 + green: 0.26 + blue: 0.72 + orientation : 0 +brain: + type: cpg + learner: + type : bo + controller: + reset_neuron_random : false; + use_frame_of_reference : false; + init_neuron_state : 0.707; + range_ub : 1.0; + signal_factor_all : 4.0; + signal_factor_mid : 2.5; + signal_factor_left_right : 2.5; + abs_output_bound : 1.0; + weights: [0.482167, 0.560357, 0.753772, 0.221536, 0.44513, 0.667353, 0.580933, 0.246228, 0.111797, + 0.110425, 0.667353, 0.519204, 0.11134, 0.667353, 0.70439, 0.000228624, 0.444673, 0.287837] diff --git a/models/rg_robot/meshes/ActiveCardanHinge_Servo_Holder.dae b/models/rg_robot/meshes/ActiveCardanHinge_Servo_Holder.dae index bb5147bdfd..67b9b00e02 100644 --- a/models/rg_robot/meshes/ActiveCardanHinge_Servo_Holder.dae +++ b/models/rg_robot/meshes/ActiveCardanHinge_Servo_Holder.dae @@ -1,63 +1,110 @@ - - - - - VCGLab - VCGLib | MeshLab - - Y_UP - do sep. 17 13:41:27 2015 - do sep. 17 13:41:27 2015 - - - - - - - - - -0.00462891 0.013625 0.00102316 -0.00468326 0.015125 0.00121587 -0.00477896 0.013625 0.00149116 -0.00487462 0.013625 0.00171763 -0.0049836 0.013625 0.001938 -0.0050839 0.015125 0.00211571 -0.00538637 0.013625 0.00255479 -0.00536183 0.015125 0.00252336 -0.00551858 0.015125 0.002714 -0.00554432 0.013625 0.00274318 -0.00568645 0.015125 0.00289492 -0.00648564 0.013625 0.00352687 -0.00689103 0.015125 0.00374286 -0.00714708 0.013625 0.00385115 -0.00831674 0.015125 0.00411347 -0.00858875 0.013625 0.00412484 -0.00883454 0.013625 0.00411968 -0.00905618 0.015125 0.0041024 -0.00907958 0.013625 0.00409988 -0.00930075 0.015125 0.00406928 -0.0102462 0.015125 0.00379305 -0.010702 0.013625 0.00356392 -0.0108974 0.015125 0.00344262 -0.0110993 0.015125 0.00330055 -0.0112922 0.015125 0.00314667 -0.0114756 0.015125 0.00298153 -0.0116488 0.015125 0.00280571 -0.0119622 0.015125 0.00242462 -0.0124419 0.015125 0.00156431 -0.0126025 0.013625 0.00109324 -0.0126013 0.015125 0.0010974 -0.0127039 0.015125 0.000614801 -0.0127482 0.015125 0.000123403 -0.00462708 0.0139446 0.00101601 -0.00460362 0.0141276 0.000918764 -0.00459151 0.0141777 0.000864038 -0.00458464 0.0142015 0.000831338 -0.00452596 0.0143304 0.00046208 -0.00451188 0.0143478 0.000312794 -0.00450738 0.015125 0.000246694 -0.00450321 0.0143564 0.00016276 -0.00450002 0.0143591 1.25336e-05 -0.00452334 0.014334 -0.000438175 -0.00453301 0.0143201 -0.000520838 -0.00456629 0.015125 -0.000736545 -0.00452951 0.015125 -0.000492499 -0.00455933 0.0142704 -0.000697079 -0.00458285 0.0142072 -0.000822609 -0.00457503 0.0142307 -0.000783179 -0.00459694 0.0141567 -0.000889016 -0.00461349 0.0140744 -0.00096096 -0.00462705 0.0139453 -0.00101588 -0.00462844 0.0139104 -0.00102132 -0.0046176 0.015125 -0.000977955 -0.00468326 0.015125 -0.00121586 -0.00462891 0.013625 -0.00102316 -0.00469695 0.013625 -0.0012594 -0.00476303 0.015125 -0.00144942 -0.00487462 0.013625 -0.00171763 -0.00510552 0.013625 -0.00215148 -0.00523993 0.013625 -0.00235732 -0.00737912 0.013625 -0.00393235 -0.00807129 0.015125 -0.00408767 -0.00785565 0.013625 -0.00405262 -0.00858875 0.013625 -0.00412484 -0.00956396 0.013625 -0.00401671 -0.00980158 0.013625 -0.00395364 -0.0102462 0.015125 -0.00379305 -0.0104702 0.015125 -0.00368931 -0.0106875 0.015125 -0.00357235 -0.0108974 0.015125 -0.00344261 -0.010702 0.013625 -0.00356392 -0.0109107 0.013625 -0.00343385 -0.0111112 0.013625 -0.0032916 -0.0113028 0.013625 -0.00313764 -0.011657 0.013625 -0.0027969 -0.0118112 0.015125 -0.00261985 -0.0121012 0.015125 -0.00222069 -0.0122318 0.013625 -0.00200169 -0.0125303 0.013625 -0.00132824 -0.0127043 0.013625 -0.000612423 -0.00456629 0.015125 0.000736549 -0.0045 0.015125 1.9834e-09 -0.00536183 0.015125 -0.00252336 -0.00568645 0.015125 -0.00289491 -0.00586484 0.015125 -0.00306547 -0.00589244 0.013625 -0.0030901 -0.00627934 0.013625 -0.00339315 -0.00667029 0.015125 -0.00363245 -0.00669954 0.013625 -0.00364804 -0.00689103 0.015125 -0.00374285 -0.00782782 0.015125 -0.00404724 -0.00834309 0.013625 -0.00411535 -0.00856329 0.015125 -0.00412454 -0.0095429 0.015125 -0.00402158 -0.0102634 0.013625 -0.00378565 -0.0116488 0.015125 -0.00280571 -0.0122278 0.015125 -0.00200883 -0.0123446 0.013625 -0.00178327 -0.0126013 0.015125 -0.0010974 -0.0126025 0.013625 -0.00109323 -0.0126599 0.015125 -0.000857634 -0.0127039 0.015125 -0.000614798 -0.0126606 0.013625 0.000854348 -0.0126599 0.015125 0.000857637 -0.0124442 0.013625 0.00155853 -0.0123446 0.013625 0.00178328 -0.0122318 0.013625 0.00200169 -0.0121061 0.013625 0.002213 -0.0121012 0.015125 0.0022207 -0.0118112 0.015125 0.00261985 -0.011485 0.013625 0.00297255 -0.0113028 0.013625 0.00313765 -0.0111112 0.013625 0.0032916 -0.0104702 0.015125 0.00368931 -0.00980158 0.013625 0.00395365 -0.00956396 0.013625 0.00401671 -0.00932301 0.013625 0.00406552 -0.00834309 0.013625 0.00411536 -0.00809843 0.013625 0.00409126 -0.00785565 0.013625 0.00405262 -0.00761559 0.013625 0.00399959 -0.00782782 0.015125 0.00404724 -0.00737912 0.013625 0.00393236 -0.00627934 0.013625 0.00339316 -0.0060531 0.015125 0.00322506 -0.0046176 0.015125 0.000977959 -0.00452951 0.015125 0.000492503 -0.00476303 0.015125 0.00144942 -0.00485662 0.015125 0.00167779 -0.00693989 0.015125 0.000472146 -0.00712977 0.015125 0.000909274 -0.00521677 0.015125 0.0023237 -0.00496371 0.015125 0.00190015 -0.00721864 0.015125 0.00104148 -0.00731918 0.015125 0.00116505 -0.00755178 0.015125 0.00138228 -0.00586484 0.015125 0.00306547 -0.00625058 0.015125 0.00337309 -0.00768193 0.015125 0.00147415 -0.00645655 0.015125 0.00350906 -0.00667029 0.015125 0.00363246 -0.00781988 0.015125 0.0015538 -0.00711797 0.015125 0.00383986 -0.00796452 0.015125 0.00162058 -0.0073503 0.015125 0.00392311 -0.0075872 0.015125 0.00399232 -0.00826895 0.015125 0.0017134 -0.00807129 0.015125 0.00408767 -0.00856329 0.015125 0.00412454 -0.00881007 0.015125 0.00412085 -0.0095429 0.015125 0.00402158 -0.00978176 0.015125 0.00395949 -0.0100165 0.015125 0.00388322 -0.0106875 0.015125 0.00357236 -0.0095 0.015125 0.00151555 -0.00963419 0.015125 0.0014297 -0.00976002 0.015125 0.00133201 -0.00987644 0.015125 0.00122327 -0.00998249 0.015125 0.00110441 -0.0122278 0.015125 0.00200883 -0.0123415 0.015125 0.00178977 -0.0125286 0.015125 0.00133324 -0.0101601 0.015125 0.000840277 -0.0102301 0.015125 0.000697204 -0.0103298 0.015125 0.00039496 -0.0103587 0.015125 0.000238294 -0.0127334 0.015125 0.000369764 -0.0127482 0.015125 -0.000123399 -0.0103732 0.015125 -7.96484e-05 -0.0127334 0.015125 -0.00036976 -0.0103587 0.015125 -0.00023829 -0.0125286 0.015125 -0.00133324 -0.0103298 0.015125 -0.000394956 -0.0124419 0.015125 -0.0015643 -0.0123415 0.015125 -0.00178977 -0.0100773 0.015125 -0.000976383 -0.0119622 0.015125 -0.00242461 -0.00998249 0.015125 -0.0011044 -0.0114756 0.015125 -0.00298153 -0.0112922 0.015125 -0.00314667 -0.0110993 0.015125 -0.00330055 -0.00963419 0.015125 -0.0014297 -0.00935856 0.015125 -0.00158883 -0.00921104 0.015125 -0.00164895 -0.0100165 0.015125 -0.00388322 -0.00978176 0.015125 -0.00395948 -0.00930075 0.015125 -0.00406927 -0.00890269 0.015125 -0.00172783 -0.00905618 0.015125 -0.0041024 -0.00881007 0.015125 -0.00412084 -0.00831674 0.015125 -0.00411346 -0.00842624 0.015125 -0.00173867 -0.0075872 0.015125 -0.00399232 -0.0073503 0.015125 -0.00392311 -0.00711797 0.015125 -0.00383985 -0.00645655 0.015125 -0.00350905 -0.0060531 0.015125 -0.00322505 -0.00625058 0.015125 -0.00337309 -0.00755178 0.015125 -0.00138228 -0.00743053 0.015125 -0.00127896 -0.00551858 0.015125 -0.00271399 -0.00521677 0.015125 -0.00232369 -0.00721864 0.015125 -0.00104147 -0.0050839 0.015125 -0.00211571 -0.00496371 0.015125 -0.00190015 -0.00485662 0.015125 -0.00167779 -0.00693989 0.015125 -0.000472142 -0.00690394 0.015125 -0.000316951 -0.00450738 0.015125 -0.00024669 -0.00469695 0.013625 0.0012594 -0.00397035 0.013625 0.00180348 -0.00363902 0.013625 0.00232165 -0.00352604 0.013625 0.00206171 -0.00445009 0.013625 0.00131588 -0.00390438 0.013625 0.00282236 -0.00405604 0.013625 0.00306181 -0.00510552 0.013625 0.00215148 -0.00458207 0.013625 0.00372891 -0.00589244 0.013625 0.0030901 -0.00608137 0.013625 0.0032474 -0.00669954 0.013625 0.00364805 -0.00692028 0.013625 0.00375627 -0.00498722 0.013625 0.00412512 -0.0056659 0.013625 0.00463614 -0.00590867 0.013625 0.00478242 -0.00641518 0.013625 0.00503654 -0.00721718 0.013625 0.00531677 -0.00749295 0.013625 0.00538224 -0.00777173 0.013625 0.00543341 -0.00833533 0.013625 0.00549237 -0.00861866 0.013625 0.0055 -0.00918462 0.013625 0.00547146 -0.00523993 0.013625 0.00235732 -0.00571321 0.013625 0.00292183 -0.00890201 0.013625 0.00549302 -0.0105606 0.013625 0.00514816 -0.010486 0.013625 0.00368132 -0.00974465 0.013625 0.00538483 -0.0102928 0.013625 0.00524104 -0.010035 0.013625 0.00387653 -0.0102634 0.013625 0.00378565 -0.01108 0.013625 0.00492168 -0.0109107 0.013625 0.00343386 -0.0118087 0.013625 0.00448488 -0.0124614 0.013625 0.00394109 -0.011657 0.013625 0.0027969 -0.0118182 0.013625 0.00261131 -0.0131869 0.013625 0.00307232 -0.0134788 0.013625 0.00258662 -0.0133391 0.013625 0.00283323 -0.0128465 0.013625 0.00352542 -0.0119681 0.013625 0.00241645 -0.0136056 0.013625 0.00233313 -0.0125303 0.013625 0.00132824 -0.0139777 0.013625 0.00126419 -0.0138192 0.013625 0.00180826 -0.0139055 0.013625 0.00153827 -0.0140358 0.013625 0.000986761 -0.0127043 0.013625 0.000612426 -0.0127335 0.013625 0.000368328 -0.0127482 0.013625 0.000122923 -0.0140794 0.013625 0.000706708 -0.0127482 0.013625 -0.000122919 -0.0141086 0.013625 -0.000424774 -0.0127335 0.013625 -0.000368325 -0.0140358 0.013625 -0.000986758 -0.0139055 0.013625 -0.00153827 -0.0139777 0.013625 -0.00126419 -0.0126606 0.013625 -0.000854345 -0.0138192 0.013625 -0.00180826 -0.0136056 0.013625 -0.00233313 -0.0133391 0.013625 -0.00283323 -0.0124442 0.013625 -0.00155852 -0.0119681 0.013625 -0.00241644 -0.0118182 0.013625 -0.00261131 -0.0126593 0.013625 -0.00373821 -0.0122533 0.013625 -0.00413349 -0.0120355 0.013625 -0.00431491 -0.0121061 0.013625 -0.002213 -0.011485 0.013625 -0.00297255 -0.010486 0.013625 -0.00368132 -0.00932301 0.013625 -0.00406551 -0.00907958 0.013625 -0.00409987 -0.00918462 0.013625 -0.00547145 -0.00946575 0.013625 -0.00543536 -0.01108 0.013625 -0.00492167 -0.010035 0.013625 -0.00387653 -0.0100206 0.013625 -0.00532 -0.00890201 0.013625 -0.00549302 -0.00883454 0.013625 -0.00411967 -0.00833533 0.013625 -0.00549236 -0.00777173 0.013625 -0.00543341 -0.00809843 0.013625 -0.00409125 -0.00749295 0.013625 -0.00538223 -0.00761559 0.013625 -0.00399959 -0.00667758 0.013625 -0.00514369 -0.00714708 0.013625 -0.00385115 -0.00641518 0.013625 -0.00503653 -0.00648564 0.013625 -0.00352686 -0.00694515 0.013625 -0.00523718 -0.00692028 0.013625 -0.00375627 -0.00608137 0.013625 -0.00324739 -0.00554432 0.013625 -0.00274318 -0.00439534 0.013625 -0.00351568 -0.00538637 0.013625 -0.00255478 -0.00421984 0.013625 -0.00329311 -0.00405604 0.013625 -0.0030618 -0.00571321 0.013625 -0.00292183 -0.00477954 0.013625 -0.00393223 -0.00390438 0.013625 -0.00282236 -0.0049836 0.013625 -0.00193799 -0.00397035 0.013625 -0.00180348 -0.00352604 0.013625 -0.0020617 -0.00368057 0.013625 -0.00198702 -0.00410391 0.013625 -0.00169568 -0.00454508 0.013625 -0.00117293 -0.00477896 0.013625 -0.00149116 -0.00445009 0.013625 -0.00131588 -0.00422886 0.013625 0.00157802 -0.000875 0.014375 1.88505e-09 -0.000817376 0.014371 0.000145978 -0.000864188 0.014371 0.000434003 -0.000707291 0.0143215 0.000472675 -0.000523635 0.014155 0.000517942 -0.000483477 0.014081 0.00052784 -0.00044213 0.0139174 0.000538031 -0.000406148 0.0139174 0.000362069 -0.000382358 0.013875 0.000181816 -0.000541477 0.014221 0.000339355 -0.000656319 0.0143215 0.000159131 -0.000834991 0.014371 0.000291008 -0.000939895 0.014375 0.000472146 -0.00109712 0.014081 0.0015907 -0.0012122 0.014001 0.00173037 -0.00135444 0.013875 0.00185693 -0.00120314 0.0139174 0.00174147 -0.00106762 0.0139174 0.00162141 -0.000894802 0.014155 0.00130016 -0.000861737 0.014081 0.00132501 -0.000795793 0.014155 0.00115672 -0.000840822 0.014221 0.00112825 -0.00074151 0.014277 0.000802482 -0.000783695 0.0143531 0.000453843 -0.000826184 0.0143531 0.000600535 -0.000989811 0.014375 0.000623425 -0.000956609 0.014371 0.000710837 -0.000945811 0.0143531 0.000881308 -0.000894629 0.014277 0.00109422 -0.000937394 0.014221 0.00126816 -0.00123007 0.014081 0.00170848 -0.00134788 0.0139174 0.00185023 -0.00101922 0.014371 0.000842782 -0.00102217 0.0143531 0.00101357 -0.00109256 0.014277 0.00135762 -0.00150807 0.014001 0.00193459 -0.00150864 0.013875 0.00195352 -0.00112977 0.014375 0.000909274 -0.00114662 0.0143215 0.00130974 -0.00120677 0.014277 0.00147653 -0.00137207 0.014081 0.00181519 -0.00167015 0.013875 0.00203734 -0.00120552 0.0143531 0.00125756 -0.0012568 0.0143215 0.00142445 -0.00133018 0.014277 0.00158586 -0.00139556 0.014155 0.00178115 -0.0018379 0.013875 0.00210784 -0.00182778 0.0139174 0.0021021 -0.00121865 0.014375 0.00104148 -0.00154287 0.014155 0.0018743 -0.00126757 0.014371 0.00120258 -0.00136874 0.014371 0.00130791 -0.00146199 0.014277 0.00168491 -0.0016972 0.014155 0.0019553 -0.00185754 0.014155 0.00202362 -0.0021753 0.0139174 0.00220277 -0.00168193 0.014375 0.00147415 -0.0020554 0.014277 0.00196649 -0.00235914 0.014081 0.00218953 -0.00289599 0.0139174 0.00223181 -0.00273092 0.013875 0.00224751 -0.00254896 0.013875 0.00224872 -0.00253447 0.0139174 0.00224638 -0.00202287 0.014155 0.00207881 -0.00189901 0.014277 0.00191428 -0.00177829 0.0143215 0.00178441 -0.00154772 0.0143531 0.00156072 -0.00159481 0.014371 0.00149249 -0.00147805 0.014371 0.00140476 -0.00171825 0.014371 0.00157055 -0.00184756 0.014371 0.00163842 -0.00181202 0.0143531 0.00171332 -0.00237055 0.014221 0.00209559 -0.00236413 0.014155 0.00214848 -0.00289426 0.014001 0.00221758 -0.0030747 0.0139174 0.00220277 -0.00181989 0.014375 0.0015538 -0.00198192 0.014371 0.00169566 -0.00222993 0.0143215 0.00193519 -0.00254 0.014221 0.00210927 -0.00209739 0.0143531 0.00182154 -0.00212045 0.014371 0.00174191 -0.00224567 0.0143531 0.00185809 -0.00271 0.014221 0.00210927 -0.00325049 0.0139174 0.00215944 -0.00211462 0.014375 0.00167392 -0.00238693 0.0143215 0.0019607 -0.00323864 0.014081 0.00211853 -0.00341714 0.014001 0.0020887 -0.00342222 0.0139174 0.0021021 -0.00287178 0.014277 0.00203239 -0.00287945 0.014221 0.00209559 -0.00305791 0.014155 0.00212052 -0.00304725 0.014221 0.00206832 -0.00358264 0.014001 0.00201819 -0.00360684 0.013875 0.00202448 -0.00358879 0.0139174 0.00203114 -0.00242623 0.014375 0.00173868 -0.00284359 0.014371 0.00180029 -0.0035528 0.014155 0.0019553 -0.00406456 0.013875 0.00172921 -0.00418238 0.0139174 0.00162141 -0.00404686 0.0139174 0.00174147 -0.00374193 0.014001 0.00193459 -0.00357053 0.014081 0.00199267 -0.0031946 0.014277 0.00196649 -0.00285359 0.0143531 0.00188259 -0.00286307 0.0143215 0.0019607 -0.00269802 0.014371 0.00181204 -0.00274442 0.014375 0.00174592 -0.00350267 0.014277 0.00184965 -0.00368049 0.014221 0.00182816 -0.0043078 0.0139174 0.00149083 -0.00315261 0.0143531 0.00182154 -0.00415288 0.014081 0.0015907 -0.00429707 0.014001 0.00148133 -0.00442231 0.0139174 0.00135059 -0.00432431 0.013875 0.00147474 -0.00321104 0.014375 0.00164896 -0.00326808 0.014371 0.00169566 -0.00329748 0.0143531 0.00177318 -0.00343798 0.0143531 0.00171332 -0.00399377 0.014155 0.00167644 -0.00427593 0.014081 0.00146259 -0.00452515 0.0139174 0.00120159 -0.00335856 0.014375 0.00158884 -0.00361255 0.0143215 0.00171049 -0.00391982 0.014277 0.00158586 -0.00441085 0.014001 0.00134198 -0.00451304 0.014001 0.00119393 -0.00453968 0.013875 0.00118175 -0.00340244 0.014371 0.00163842 -0.00387414 0.0143215 0.00152993 -0.00408733 0.014221 0.00152244 -0.00424497 0.014155 0.00143517 -0.00438826 0.014081 0.00132501 -0.00461568 0.0139174 0.00104479 -0.0035 0.014375 0.00151555 -0.00365519 0.014371 0.00149249 -0.00382438 0.0143531 0.00146897 -0.0039932 0.0143215 0.00142445 -0.00415744 0.014277 0.00135762 -0.00460299 0.014001 0.00103813 -0.00462182 0.0140114 0.000995091 -0.00376002 0.014375 0.00133201 -0.00387644 0.014375 0.00122327 -0.00454135 0.014155 0.00100578 -0.00445421 0.014155 0.00115672 -0.00440918 0.014221 0.00112825 -0.00431261 0.014221 0.00126816 -0.00420398 0.0143215 0.00118653 -0.00414107 0.0143531 0.00113926 -0.00422783 0.0143531 0.00101357 -0.00443781 0.014277 0.000951437 -0.0045773 0.0142242 0.000794817 -0.00456706 0.014221 0.000827435 -0.0040748 0.014371 0.00108945 -0.00415776 0.014371 0.00096926 -0.00456194 0.0142643 0.000712138 -0.00450849 0.014277 0.000802482 -0.00436966 0.0143531 0.000743332 -0.00454782 0.0142944 0.000626288 -0.00444205 0.0143215 0.000774176 -0.00423078 0.014371 0.000842782 -0.00453572 0.0143158 0.000541698 -0.00449846 0.0143215 0.000625454 -0.00423012 0.014375 0.000697204 -0.00428687 0.014375 0.000548354 -0.0044663 0.0143531 0.000453843 -0.00449685 0.0143531 0.000304208 -0.0043587 0.014375 0.000238294 -0.00441502 0.014371 0.000290909 -0.00443851 0.014371 1.88452e-09 -0.00443263 0.014371 0.000145929 -0.00438581 0.014371 0.000434003 -0.00437319 0.014375 7.96523e-05 -0.00437319 0.014375 -7.96485e-05 -0.00449685 0.0143531 -0.000304204 -0.00451005 0.0143497 -0.000287795 -0.0043587 0.014375 -0.00023829 -0.00441502 0.014371 -0.000290906 -0.00454511 0.0142995 -0.00060839 -0.00449846 0.0143215 -0.00062545 -0.00450849 0.014277 -0.000802478 -0.00440918 0.014221 -0.00112824 -0.00427593 0.014081 -0.00146259 -0.00404686 0.0139174 -0.00174146 -0.00418238 0.0139174 -0.0016214 -0.00429707 0.014001 -0.00148133 -0.00456706 0.014221 -0.000827431 -0.00444205 0.0143215 -0.000774172 -0.00438581 0.014371 -0.000433999 -0.00436966 0.0143531 -0.000743328 -0.00437387 0.0143215 -0.000917874 -0.00390212 0.0139174 -0.00185023 -0.00392013 0.013875 -0.00183988 -0.00434518 0.014371 -0.000574278 -0.00430419 0.0143531 -0.000881304 -0.00429434 0.0143215 -0.00105562 -0.00420509 0.014221 -0.00139984 -0.00412423 0.014155 -0.00156086 -0.00389398 0.014001 -0.00183843 -0.0037491 0.0139174 -0.001947 -0.0040773 0.014375 -0.000976383 -0.0040748 0.014371 -0.00108945 -0.00382417 0.014221 -0.0017373 -0.00370713 0.014155 -0.0018743 -0.00358264 0.014001 -0.00201819 -0.00342222 0.0139174 -0.0021021 -0.00325049 0.0139174 -0.00215943 -0.00326791 0.013875 -0.00215619 -0.00344004 0.013875 -0.00209719 -0.00399377 0.014155 -0.00167644 -0.00410338 0.0143215 -0.00130973 -0.00415776 0.014371 -0.000969256 -0.00414107 0.0143531 -0.00113925 -0.00398249 0.014375 -0.0011044 -0.00398243 0.014371 -0.00120258 -0.00391982 0.014277 -0.00158586 -0.00368049 0.014221 -0.00182816 -0.0032465 0.014001 -0.00214567 -0.00374699 0.0143215 -0.00162548 -0.0035528 0.014155 -0.0019553 -0.0030747 0.0139174 -0.00220276 -0.00273092 0.013875 -0.0022475 -0.00388126 0.014371 -0.00130791 -0.00377194 0.014371 -0.00140475 -0.00370228 0.0143531 -0.00156071 -0.00361255 0.0143215 -0.00171049 -0.00352996 0.014221 -0.00190716 -0.00289599 0.0139174 -0.0022318 -0.00271553 0.0139174 -0.00224637 -0.00254896 0.013875 -0.00224871 -0.00337356 0.014221 -0.0019738 -0.00335099 0.014277 -0.00191427 -0.00271495 0.014001 -0.00223205 -0.00253447 0.0139174 -0.00224637 -0.00363419 0.014375 -0.0014297 -0.00353175 0.014371 -0.00157054 -0.00347171 0.0143215 -0.0017844 -0.00321231 0.014221 -0.00202763 -0.00288587 0.014155 -0.00214847 -0.00253505 0.014001 -0.00223205 -0.00343798 0.0143531 -0.00171331 -0.00340244 0.014371 -0.00163841 -0.00329748 0.0143531 -0.00177317 -0.0031946 0.014277 -0.00196648 -0.00218772 0.013875 -0.0022071 -0.00312955 0.014371 -0.00174191 -0.00305866 0.014375 -0.00169541 -0.00298775 0.014371 -0.00177686 -0.00285359 0.0143531 -0.00188258 -0.00270744 0.014277 -0.00204565 -0.00254256 0.014277 -0.00204565 -0.00166121 0.0139174 -0.00203113 -0.00167015 0.013875 -0.00203734 -0.00199951 0.0139174 -0.00215943 -0.00253785 0.014155 -0.0021625 -0.00271 0.014221 -0.00210926 -0.00300433 0.0143531 -0.00185808 -0.00315261 0.0143531 -0.00182154 -0.00290269 0.014375 -0.00172783 -0.00237822 0.014277 -0.00203239 -0.00220275 0.014221 -0.00206831 -0.00166736 0.014001 -0.00201819 -0.0015009 0.0139174 -0.001947 -0.00269802 0.014371 -0.00181204 -0.00238693 0.0143215 -0.0019607 -0.00202287 0.014155 -0.0020788 -0.00184288 0.014081 -0.00206228 -0.00150807 0.014001 -0.00193458 -0.00258516 0.014375 -0.00174954 -0.00255198 0.014371 -0.00181204 -0.00203769 0.014221 -0.00202763 -0.00185754 0.014155 -0.00202361 -0.00152219 0.014081 -0.00191012 -0.00134788 0.0139174 -0.00185023 -0.00120854 0.013875 -0.00174818 -0.00120314 0.0139174 -0.00174146 -0.00222993 0.0143215 -0.00193518 -0.00224567 0.0143531 -0.00185808 -0.00172004 0.014221 -0.00190716 -0.00154287 0.014155 -0.0018743 -0.00137207 0.014081 -0.00181518 -0.00106762 0.0139174 -0.0016214 -0.00240641 0.014371 -0.00180028 -0.00226225 0.014371 -0.00177686 -0.00192462 0.0143215 -0.00184675 -0.00139556 0.014155 -0.00178115 -0.00212045 0.014371 -0.00174191 -0.00209739 0.0143531 -0.00182154 -0.00156951 0.014221 -0.00182816 -0.00125623 0.014155 -0.00167644 -0.00211462 0.014375 -0.00167392 -0.00184756 0.014371 -0.00163841 -0.00167679 0.0143531 -0.00164234 -0.00150301 0.0143215 -0.00162548 -0.00120677 0.014277 -0.00147653 -0.000736958 0.014001 -0.00119392 -0.000634319 0.0139174 -0.00104479 -0.00064701 0.014001 -0.00103813 -0.000556704 0.0139174 -0.000881217 -0.000556552 0.013875 -0.000885448 -0.000724845 0.0139174 -0.00120158 -0.00112577 0.014155 -0.00156086 -0.00133018 0.014277 -0.00158586 -0.00181202 0.0143531 -0.00171331 -0.00198192 0.014371 -0.00169566 -0.00159481 0.014371 -0.00149249 -0.00109256 0.014277 -0.00135762 -0.000937394 0.014221 -0.00126815 -0.000894802 0.014155 -0.00130016 -0.00155178 0.014375 -0.00138228 -0.00131131 0.0143531 -0.00136769 -0.000840822 0.014221 -0.00112824 -0.000795793 0.014155 -0.00115672 -0.000708648 0.014155 -0.00100578 -0.000569889 0.014001 -0.000875599 -0.000440932 0.013875 -0.000540691 -0.000492501 0.0139174 -0.000711931 -0.00147805 0.014371 -0.00140475 -0.00136874 0.014371 -0.00130791 -0.000755822 0.014221 -0.000981019 -0.000404384 0.013875 -0.000362437 -0.00104602 0.0143215 -0.00118653 -0.00063393 0.014155 -0.000848314 -0.000532894 0.014081 -0.000698445 -0.000483477 0.014081 -0.000527836 -0.000420266 0.014001 -0.000359594 -0.000382358 0.013875 -0.000181812 -0.000406148 0.0139174 -0.000362065 -0.00110893 0.0143531 -0.00113925 -0.000955664 0.0143215 -0.00105562 -0.000812191 0.014277 -0.000951434 -0.000876134 0.0143215 -0.000917874 -0.000398429 0.014001 -0.000180385 -0.00038415 0.0139174 0.000181629 -0.00121865 0.014375 -0.00104147 -0.00112977 0.014375 -0.00090927 -0.00101922 0.014371 -0.000842778 -0.000956609 0.014371 -0.000710833 -0.0004678 0.014155 -0.000174605 -0.0004678 0.014155 0.000174609 -0.000419386 0.014081 1.84649e-09 -0.000426582 0.014081 0.000178027 -0.000398429 0.014001 0.000180389 -0.000391133 0.014001 1.83601e-09 -0.000426582 0.014081 -0.000178023 -0.000448124 0.014081 -0.000354886 -0.000520897 0.014221 -0.000170229 -0.000675539 0.0143215 -0.000317222 -0.000514022 0.014221 1.86486e-09 -0.000460745 0.014155 1.8562e-09 -0.00090482 0.014371 -0.000574278 -0.000864188 0.014371 -0.000433999 -0.000753186 0.0143531 -0.000304445 -0.000656319 0.0143215 -0.000159127 -0.000584345 0.014277 0.000165024 -0.000577683 0.014277 1.8722e-09 -0.000649898 0.0143215 1.87803e-09 -0.000734748 0.0143531 0.000152721 -0.000817376 0.014371 -0.000145975 -0.000811491 0.014371 1.88452e-09 -0.000728589 0.0143531 1.88218e-09 -0.00462891 0.013875 -0.00102316 -0.00448916 0.014081 -0.00117882 -0.00451304 0.014001 -0.00119392 -0.00441085 0.014001 -0.00134198 -0.00452515 0.0139174 -0.00120158 -0.0046217 0.0140126 -0.000994576 -0.00460308 0.0141301 -0.000916388 -0.00457797 0.014081 -0.001025 -0.00454135 0.014155 -0.00100578 -0.00459018 0.0141825 -0.000857822 -0.00461568 0.0139174 -0.00104479 -0.00460299 0.014001 -0.00103813 -0.00443781 0.014277 -0.000951434 -0.00443263 0.014371 -0.000145925 -0.0045023 0.0143572 -0.00013771 -0.00461375 0.0140728 0.000962019 -0.00339245 0.014155 0.00202362 -0.00340712 0.014081 0.00206229 -0.00322713 0.014155 0.00207881 -0.00254864 0.0143531 0.00189488 -0.00240641 0.014371 0.00180029 -0.00255198 0.014371 0.00181204 -0.00217817 0.014001 0.00218872 -0.00187644 0.014221 0.0019738 -0.00143053 0.014375 0.00127897 -0.00107191 0.013875 0.00162801 -0.000942199 0.0139174 0.00149083 -0.000708648 0.014155 0.00100578 -0.000736958 0.014001 0.00119393 -0.000622661 0.014221 0.000668481 -0.000829945 0.013875 0.00135657 -0.000572125 0.014155 0.000685352 -0.000637175 0.014277 0.000489957 -0.000724845 0.0139174 0.00120159 -0.000575363 0.014221 0.000505192 -0.000556552 0.013875 0.000885452 -0.000456045 0.014001 0.000534601 -0.000492501 0.0139174 0.000711935 -0.000376801 0.0139174 1.82505e-09 -0.000634866 0.013875 -0.0010497 -0.000726195 0.013875 -0.00120708 -0.000827694 0.0139174 -0.00135059 -0.00109712 0.014081 -0.00159069 -0.00142583 0.014221 -0.0017373 -0.00163745 0.0143215 -0.00171049 -0.00160134 0.014277 -0.00177303 -0.00177829 0.0143215 -0.0017844 -0.00226895 0.014375 -0.0017134 -0.0018379 0.013875 -0.00210783 -0.0020108 0.013875 -0.00216454 -0.00271215 0.014155 -0.0021625 -0.00287945 0.014221 -0.00209558 -0.00335856 0.014375 -0.00158883 -0.00326808 0.014371 -0.00169566 -0.00426171 0.014277 -0.00122991 -0.00420398 0.0143215 -0.00118653 -0.00423078 0.014371 -0.000842778 -0.00429339 0.014371 -0.000710833 -0.0043078 0.0139174 -0.00149083 -0.00432431 0.013875 -0.00147473 -0.00442231 0.0139174 -0.00135059 -0.000584345 0.014277 -0.000165021 -0.00442382 0.0143531 0.000600535 -0.00434518 0.014371 0.000574282 -0.00357321 0.0143531 -0.00164234 -0.0011752 0.014371 -0.00108945 -0.00109224 0.014371 -0.000969256 -0.000488919 0.014155 -0.000348074 -0.000753186 0.0143531 0.000304449 -0.000675539 0.0143215 0.000317225 -0.000520897 0.014221 0.000170233 -0.000604286 0.014277 0.000328973 -0.000448124 0.014081 0.00035489 -0.000488919 0.014155 0.000348078 -0.000420266 0.014001 0.000359598 -0.000751544 0.0143215 0.000625454 -0.000880341 0.0143531 0.000743332 -0.00090482 0.014371 0.000574282 -0.000683045 0.014277 0.000648322 -0.00063393 0.014155 0.000848318 -0.000532894 0.014081 0.000698449 -0.000506096 0.014001 0.000707396 -0.000812191 0.014277 0.000951437 -0.000807947 0.0143215 0.000774176 -0.000682943 0.014221 0.000827435 -0.000672026 0.014081 0.001025 -0.00059588 0.014081 0.000864529 -0.00064701 0.014001 0.00103813 -0.000569889 0.014001 0.000875603 -0.000556704 0.0139174 0.000881221 -0.000955664 0.0143215 0.00105563 -0.000876134 0.0143215 0.000917878 -0.000755822 0.014221 0.000981022 -0.000634319 0.0139174 0.00104479 -0.0011752 0.014371 0.00108945 -0.00110893 0.0143531 0.00113926 -0.00109224 0.014371 0.00096926 -0.00104602 0.0143215 0.00118653 -0.000760837 0.014081 0.00117883 -0.000988288 0.014277 0.00122991 -0.00100503 0.014155 0.00143517 -0.000974074 0.014081 0.00146259 -0.000839151 0.014001 0.00134198 -0.000827694 0.0139174 0.00135059 -0.00131131 0.0143531 0.0013677 -0.00128992 0.014221 0.00163518 -0.00104491 0.014221 0.00139984 -0.00116267 0.014221 0.00152244 -0.00112577 0.014155 0.00156087 -0.00125623 0.014155 0.00167644 -0.000952927 0.014001 0.00148133 -0.00107755 0.014001 0.00161107 -0.00142562 0.0143531 0.00146897 -0.00137586 0.0143215 0.00152993 -0.00142583 0.014221 0.0017373 -0.00150301 0.0143215 0.00162548 -0.00156951 0.014221 0.00182816 -0.00160134 0.014277 0.00177303 -0.0015009 0.0139174 0.001947 -0.00135602 0.014001 0.00183844 -0.00167679 0.0143531 0.00164234 -0.00163745 0.0143215 0.00171049 -0.00172004 0.014221 0.00190717 -0.00152219 0.014081 0.00191012 -0.00167947 0.014081 0.00199267 -0.00166121 0.0139174 0.00203114 -0.00166736 0.014001 0.00201819 -0.00174733 0.014277 0.00184965 -0.00195252 0.0143531 0.00177318 -0.00183286 0.014001 0.0020887 -0.00207549 0.0143215 0.00189712 -0.00192462 0.0143215 0.00184675 -0.00184288 0.014081 0.00206229 -0.00201136 0.014081 0.00211853 -0.00199951 0.0139174 0.00215944 -0.0020035 0.014001 0.00214567 -0.00226225 0.014371 0.00177686 -0.00221548 0.014277 0.00200594 -0.00219209 0.014155 0.00212052 -0.00203769 0.014221 0.00202763 -0.00220275 0.014221 0.00206832 -0.00218382 0.014081 0.00216104 -0.00237822 0.014277 0.00203239 -0.00235574 0.014001 0.00221758 -0.00270136 0.0143531 0.00189488 -0.00239641 0.0143531 0.00188259 -0.00254547 0.0143215 0.0019735 -0.00270453 0.0143215 0.0019735 -0.00254256 0.014277 0.00204566 -0.00270744 0.014277 0.00204566 -0.00253785 0.014155 0.0021625 -0.00253619 0.014081 0.00220383 -0.00271381 0.014081 0.00220383 -0.00253505 0.014001 0.00223206 -0.00271495 0.014001 0.00223206 -0.00235401 0.0139174 0.00223181 -0.00271553 0.0139174 0.00224638 -0.00289086 0.014081 0.00218953 -0.00271215 0.014155 0.0021625 -0.00288587 0.014155 0.00214848 -0.00298775 0.014371 0.00177686 -0.00300433 0.0143531 0.00185809 -0.00307183 0.014001 0.00218872 -0.00312955 0.014371 0.00174191 -0.00302007 0.0143215 0.00193519 -0.00321231 0.014221 0.00202763 -0.00303452 0.014277 0.00200594 -0.00306618 0.014081 0.00216104 -0.00335099 0.014277 0.00191428 -0.00332538 0.0143215 0.00184675 -0.00317451 0.0143215 0.00189712 -0.0032465 0.014001 0.00214567 -0.00347171 0.0143215 0.00178441 -0.00337356 0.014221 0.0019738 -0.00357321 0.0143531 0.00164234 -0.00364866 0.014277 0.00177303 -0.00370713 0.014155 0.0018743 -0.00352996 0.014221 0.00190717 -0.00353175 0.014371 0.00157055 -0.00374699 0.0143215 0.00162548 -0.00370228 0.0143531 0.00156072 -0.00378801 0.014277 0.00168491 -0.00382417 0.014221 0.0017373 -0.00389398 0.014001 0.00183844 -0.00372781 0.014081 0.00191012 -0.00385444 0.014155 0.00178115 -0.0037491 0.0139174 0.001947 -0.0040378 0.014001 0.00173037 -0.00387793 0.014081 0.00181519 -0.00390212 0.0139174 0.00185023 -0.00363419 0.014375 0.0014297 -0.00426171 0.014277 0.00122991 -0.00404323 0.014277 0.00147653 -0.00412423 0.014155 0.00156087 -0.00396008 0.014221 0.00163518 -0.00417245 0.014001 0.00161107 -0.00401993 0.014081 0.00170848 -0.00377194 0.014371 0.00140476 -0.00393869 0.0143531 0.0013677 -0.00388126 0.014371 0.00130791 -0.00410338 0.0143215 0.00130974 -0.00404448 0.0143531 0.00125756 -0.00398243 0.014371 0.00120258 -0.00420509 0.014221 0.00139984 -0.00449418 0.014221 0.000981022 -0.00435537 0.014277 0.00109422 -0.0043552 0.014155 0.00130016 -0.00437387 0.0143215 0.000917878 -0.00429434 0.0143215 0.00105563 -0.00448916 0.014081 0.00117883 -0.00457797 0.014081 0.001025 -0.00429339 0.014371 0.000710837 -0.00430419 0.0143531 0.000881308 -0.0044663 0.0143531 -0.000453839 -0.00442382 0.0143531 -0.000600531 -0.00435537 0.014277 -0.00109422 -0.00449418 0.014221 -0.000981019 -0.00422783 0.0143531 -0.00101357 -0.00431261 0.014221 -0.00126815 -0.0043552 0.014155 -0.00130016 -0.00445421 0.014155 -0.00115672 -0.00424497 0.014155 -0.00143516 -0.00438826 0.014081 -0.001325 -0.00415744 0.014277 -0.00135762 -0.0039932 0.0143215 -0.00142445 -0.00404323 0.014277 -0.00147653 -0.00404448 0.0143531 -0.00125755 -0.00415288 0.014081 -0.00159069 -0.00417245 0.014001 -0.00161107 -0.00382438 0.0143531 -0.00146897 -0.00393869 0.0143531 -0.00136769 -0.00408733 0.014221 -0.00152244 -0.00396008 0.014221 -0.00163517 -0.00387414 0.0143215 -0.00152992 -0.0040378 0.014001 -0.00173036 -0.00365519 0.014371 -0.00149249 -0.00378801 0.014277 -0.00168491 -0.00387793 0.014081 -0.00181518 -0.00401993 0.014081 -0.00170848 -0.00364866 0.014277 -0.00177303 -0.00372781 0.014081 -0.00191012 -0.00385444 0.014155 -0.00178115 -0.00332538 0.0143215 -0.00184675 -0.00350267 0.014277 -0.00184965 -0.00339245 0.014155 -0.00202361 -0.00357053 0.014081 -0.00199266 -0.00340712 0.014081 -0.00206228 -0.00374193 0.014001 -0.00193458 -0.00358879 0.0139174 -0.00203113 -0.00323864 0.014081 -0.00211853 -0.00322713 0.014155 -0.0020788 -0.00341714 0.014001 -0.0020887 -0.00317451 0.0143215 -0.00189712 -0.00303452 0.014277 -0.00200594 -0.00306618 0.014081 -0.00216104 -0.00305791 0.014155 -0.00212052 -0.00307183 0.014001 -0.00218872 -0.00302007 0.0143215 -0.00193518 -0.00304725 0.014221 -0.00206831 -0.00270136 0.0143531 -0.00189487 -0.00284359 0.014371 -0.00180028 -0.00287178 0.014277 -0.00203239 -0.00286307 0.0143215 -0.0019607 -0.00289086 0.014081 -0.00218953 -0.00289426 0.014001 -0.00221758 -0.00254864 0.0143531 -0.00189487 -0.00254547 0.0143215 -0.0019735 -0.00254 0.014221 -0.00210926 -0.00270453 0.0143215 -0.0019735 -0.00271381 0.014081 -0.00220382 -0.00253619 0.014081 -0.00220382 -0.00237055 0.014221 -0.00209558 -0.00235401 0.0139174 -0.0022318 -0.00235574 0.014001 -0.00221758 -0.00239641 0.0143531 -0.00188258 -0.00221548 0.014277 -0.00200594 -0.00235914 0.014081 -0.00218953 -0.00236413 0.014155 -0.00214847 -0.00219209 0.014155 -0.00212052 -0.00195252 0.0143531 -0.00177317 -0.0020554 0.014277 -0.00196648 -0.00189901 0.014277 -0.00191427 -0.00207549 0.0143215 -0.00189712 -0.00187644 0.014221 -0.0019738 -0.00218382 0.014081 -0.00216104 -0.00183286 0.014001 -0.0020887 -0.00201136 0.014081 -0.00211853 -0.0021753 0.0139174 -0.00220276 -0.00217817 0.014001 -0.00218872 -0.00182778 0.0139174 -0.0021021 -0.0020035 0.014001 -0.00214567 -0.00174733 0.014277 -0.00184965 -0.0016972 0.014155 -0.0019553 -0.00167947 0.014081 -0.00199266 -0.00154772 0.0143531 -0.00156071 -0.00171825 0.014371 -0.00157054 -0.00142562 0.0143531 -0.00146897 -0.00146199 0.014277 -0.00168491 -0.00137586 0.0143215 -0.00152992 -0.0012122 0.014001 -0.00173036 -0.00135602 0.014001 -0.00183843 -0.00128992 0.014221 -0.00163517 -0.00123007 0.014081 -0.00170848 -0.00126757 0.014371 -0.00120258 -0.00114662 0.0143215 -0.00130973 -0.0012568 0.0143215 -0.00142445 -0.00116267 0.014221 -0.00152244 -0.000942199 0.0139174 -0.00149083 -0.00107755 0.014001 -0.00161107 -0.00120552 0.0143531 -0.00125755 -0.000974074 0.014081 -0.00146259 -0.00100503 0.014155 -0.00143516 -0.00104491 0.014221 -0.00139984 -0.000952927 0.014001 -0.00148133 -0.000894629 0.014277 -0.00109422 -0.00102217 0.0143531 -0.00101357 -0.000988288 0.014277 -0.00122991 -0.000839151 0.014001 -0.00134198 -0.000861737 0.014081 -0.001325 -0.000760837 0.014081 -0.00117882 -0.000945811 0.0143531 -0.000881304 -0.000880341 0.0143531 -0.000743328 -0.000807947 0.0143215 -0.000774172 -0.000751544 0.0143215 -0.00062545 -0.000682943 0.014221 -0.000827431 -0.00059588 0.014081 -0.000864525 -0.000672026 0.014081 -0.001025 -0.000826184 0.0143531 -0.000600531 -0.000683045 0.014277 -0.000648318 -0.00074151 0.014277 -0.000802478 -0.000622661 0.014221 -0.000668477 -0.000572125 0.014155 -0.000685349 -0.000783695 0.0143531 -0.000453839 -0.000707291 0.0143215 -0.000472671 -0.000637175 0.014277 -0.000489953 -0.000575363 0.014221 -0.000505188 -0.000523635 0.014155 -0.000517938 -0.00044213 0.0139174 -0.000538027 -0.000506096 0.014001 -0.000707392 -0.000834991 0.014371 -0.000291004 -0.000541477 0.014221 -0.000339352 -0.000456045 0.014001 -0.000534597 -0.000734748 0.0143531 -0.000152718 -0.000604286 0.014277 -0.000328969 -0.00038415 0.0139174 -0.000181625 -0.00688225 0.015125 -0.000159134 -0.00693989 0.01575 -0.000472142 -0.00693989 0.017875 -0.000472142 -0.00690394 0.01575 -0.000316951 -0.00690394 0.017875 -0.000316951 -0.00688225 0.01575 -0.000159134 -0.00693989 0.01575 0.000472146 -0.00698981 0.01575 0.000623425 -0.00705328 0.017875 0.000769538 -0.00721864 0.017875 0.00104148 -0.00731918 0.017875 0.00116505 -0.00743053 0.01575 0.00127897 -0.00755178 0.01575 0.00138228 -0.00768193 0.017875 0.00147415 -0.00781988 0.017875 0.0015538 -0.00811462 0.01575 0.00167392 -0.00842624 0.01575 0.00173868 -0.00858516 0.017875 0.00174955 -0.00874442 0.01575 0.00174592 -0.00890269 0.01575 0.00172783 -0.00905866 0.017875 0.00169542 -0.00963419 0.017875 0.0014297 -0.0101601 0.01575 0.000840277 -0.0103732 0.01575 7.96525e-05 -0.0103732 0.01575 -7.96483e-05 -0.0103732 0.017875 7.96527e-05 -0.0103298 0.01575 -0.000394956 -0.0103298 0.017875 -0.000394956 -0.0102301 0.01575 -0.0006972 -0.00998249 0.017875 -0.0011044 -0.00963419 0.01575 -0.0014297 -0.00976002 0.017875 -0.001332 -0.0095 0.017875 -0.00151554 -0.00921104 0.017875 -0.00164895 -0.00905866 0.017875 -0.00169541 -0.00890269 0.01575 -0.00172783 -0.00890269 0.017875 -0.00172782 -0.00874442 0.01575 -0.00174592 -0.00874442 0.017875 -0.00174592 -0.00858516 0.017875 -0.00174954 -0.00826895 0.01575 -0.0017134 -0.00826895 0.017875 -0.00171339 -0.00796452 0.01575 -0.00162057 -0.00796452 0.017875 -0.00162057 -0.00768193 0.01575 -0.00147415 -0.00731918 0.017875 -0.00116504 -0.00712977 0.01575 -0.00090927 -0.00698981 0.015125 -0.000623421 -0.00698981 0.01575 -0.000623421 -0.00705328 0.01575 -0.000769534 -0.00705328 0.015125 -0.000769534 -0.00712977 0.015125 -0.00090927 -0.00721864 0.01575 -0.00104147 -0.00731918 0.015125 -0.00116504 -0.00731918 0.01575 -0.00116504 -0.00743053 0.01575 -0.00127896 -0.00755178 0.01575 -0.00138228 -0.00768193 0.015125 -0.00147415 -0.00781988 0.01575 -0.0015538 -0.00781988 0.015125 -0.0015538 -0.00796452 0.015125 -0.00162057 -0.00811462 0.01575 -0.00167392 -0.00811462 0.015125 -0.00167392 -0.00826895 0.015125 -0.0017134 -0.00842624 0.01575 -0.00173867 -0.00842624 0.017875 -0.00173867 -0.00858516 0.01575 -0.00174954 -0.00858516 0.015125 -0.00174954 -0.00874442 0.015125 -0.00174592 -0.00905866 0.015125 -0.00169541 -0.00921104 0.01575 -0.00164895 -0.00905866 0.01575 -0.00169541 -0.00935856 0.01575 -0.00158883 -0.0095 0.01575 -0.00151554 -0.0095 0.015125 -0.00151554 -0.00976002 0.01575 -0.001332 -0.00976002 0.015125 -0.001332 -0.00987644 0.01575 -0.00122327 -0.00987644 0.015125 -0.00122327 -0.0100773 0.017875 -0.000976382 -0.00998249 0.01575 -0.0011044 -0.0101601 0.017875 -0.000840273 -0.0100773 0.01575 -0.000976383 -0.0101601 0.01575 -0.000840273 -0.0102301 0.017875 -0.000697199 -0.0101601 0.015125 -0.000840273 -0.0102301 0.015125 -0.0006972 -0.0102869 0.015125 -0.00054835 -0.0102869 0.01575 -0.00054835 -0.0103587 0.01575 -0.00023829 -0.0103732 0.015125 7.96524e-05 -0.0103587 0.01575 0.000238294 -0.0103298 0.017875 0.00039496 -0.0102869 0.01575 0.000548354 -0.0103298 0.01575 0.00039496 -0.0102869 0.015125 0.000548354 -0.0102301 0.017875 0.000697204 -0.0102301 0.01575 0.000697204 -0.0101601 0.017875 0.000840277 -0.0100773 0.01575 0.000976387 -0.0100773 0.015125 0.000976387 -0.00998249 0.01575 0.00110441 -0.00987644 0.017875 0.00122327 -0.00987644 0.01575 0.00122327 -0.00976002 0.01575 0.00133201 -0.0095 0.017875 0.00151555 -0.00963419 0.01575 0.0014297 -0.0095 0.01575 0.00151555 -0.00935856 0.017875 0.00158884 -0.00935856 0.01575 0.00158884 -0.00935856 0.015125 0.00158884 -0.00921104 0.01575 0.00164896 -0.00921104 0.015125 0.00164896 -0.00905866 0.01575 0.00169542 -0.00905866 0.015125 0.00169542 -0.00890269 0.017875 0.00172783 -0.00890269 0.015125 0.00172783 -0.00874442 0.015125 0.00174592 -0.00858516 0.015125 0.00174955 -0.00858516 0.01575 0.00174955 -0.00842624 0.015125 0.00173868 -0.00826895 0.01575 0.0017134 -0.00796452 0.01575 0.00162058 -0.00811462 0.015125 0.00167392 -0.00781988 0.01575 0.0015538 -0.00768193 0.01575 0.00147415 -0.00743053 0.015125 0.00127897 -0.00731918 0.01575 0.00116505 -0.00721864 0.01575 0.00104148 -0.00705328 0.01575 0.000769538 -0.00712977 0.01575 0.000909274 -0.00705328 0.015125 0.000769538 -0.00698981 0.015125 0.000623425 -0.00690394 0.01575 0.000316955 -0.00690394 0.015125 0.000316955 -0.00688225 0.01575 0.000159138 -0.006875 0.01575 2.06536e-09 -0.00688225 0.015125 0.000159138 -0.006875 0.015125 1.9834e-09 -0.00688225 0.017875 -0.000159133 -0.0103587 0.017875 -0.000238289 -0.00705328 0.017875 -0.000769533 -0.00712977 0.017875 -0.000909269 -0.00743053 0.017875 -0.00127896 -0.00963419 0.017875 -0.0014297 -0.0102869 0.017875 -0.000548349 -0.00698981 0.017875 -0.000623421 -0.00721864 0.017875 -0.00104147 -0.00987644 0.017875 -0.00122327 -0.00755178 0.017875 -0.00138228 -0.00768193 0.017875 -0.00147415 -0.00781988 0.017875 -0.0015538 -0.00935856 0.017875 -0.00158883 -0.00811462 0.017875 -0.00167392 -0.0103732 0.017875 -7.96481e-05 -0.006875 0.017875 2.34402e-09 -0.0103587 0.017875 0.000238294 -0.00712977 0.017875 0.000909274 -0.00743053 0.017875 0.00127897 -0.00976002 0.017875 0.00133201 -0.00755178 0.017875 0.00138229 -0.00826895 0.017875 0.0017134 -0.00842624 0.017875 0.00173868 -0.00874442 0.017875 0.00174592 -0.00688225 0.017875 0.000159138 -0.00690394 0.017875 0.000316956 -0.00693989 0.017875 0.000472146 -0.0102869 0.017875 0.000548354 -0.00698981 0.017875 0.000623425 -0.0100773 0.017875 0.000976387 -0.00998249 0.017875 0.00110441 -0.00796452 0.017875 0.00162058 -0.00921104 0.017875 0.00164896 -0.00811462 0.017875 0.00167392 -0.00454508 0.013625 0.00117294 -0.00462891 0.013875 0.00102316 -0.00434448 0.013625 0.00145117 -0.00419959 0.013875 0.00160723 -0.00410391 0.013625 0.00169568 -0.00382897 0.013625 0.00190078 -0.00348089 0.0136226 0.00208086 -0.00339117 0.0136034 0.00211554 -0.0033474 0.0135869 0.00213088 -0.00326791 0.013875 0.0021562 -0.00330519 0.0135659 0.00214473 -0.00326489 0.0135407 0.00215709 -0.00322685 0.0135115 0.00216801 -0.0031915 0.0134785 0.00217752 -0.00313067 0.013403 0.00219244 -0.00309157 0.013875 0.0022011 -0.00310574 0.0133609 0.00219804 -0.00306844 0.0132704 0.00220587 -0.00308489 0.0133165 0.0022025 -0.00304687 0.009875 0.0022101 -0.00218772 0.013875 0.0022071 -0.00215321 0.009875 0.00219998 -0.00164396 0.009875 0.00202486 -0.0011909 0.009875 0.00173374 -0.00105698 0.009875 0.00161363 -0.000945434 0.013875 0.00149719 -0.000726195 0.013875 0.00120708 -0.000440932 0.013875 0.000540695 -0.000375 0.013875 1.81949e-09 -0.00040372 0.009875 -0.000358346 -0.000439446 0.009875 -0.000534655 -0.000552496 0.009875 -0.000875913 -0.000718452 0.009875 -0.00119481 -0.000933081 0.009875 -0.00148321 -0.00107191 0.013875 -0.001628 -0.0011909 0.009875 -0.00173374 -0.00133399 0.009875 -0.00184277 -0.00150864 0.013875 -0.00195352 -0.0023675 0.013875 -0.00223521 -0.00250962 0.009875 -0.00224704 -0.0026895 0.009875 -0.00224907 -0.00286897 0.009875 -0.00223673 -0.00304929 0.0131741 -0.00220963 -0.00305649 0.0132226 -0.00220824 -0.00309157 0.013875 -0.00220109 -0.00348074 0.0136226 -0.00208091 -0.00382897 0.013625 -0.00190078 -0.00443792 0.013875 -0.0013326 -0.00434448 0.013625 -0.00145117 -0.00453968 0.013875 -0.00118174 -0.00443792 0.013875 0.0013326 -0.00392013 0.013875 0.00183988 -0.00376722 0.013875 0.00193852 -0.00368057 0.013625 0.00198703 -0.00344004 0.013875 0.00209719 -0.0030493 0.0131742 0.00220963 -0.00291218 0.013875 0.0022316 -0.0026895 0.009875 0.00224908 -0.0023675 0.013875 0.00223522 -0.00233047 0.009875 0.00223064 -0.0020108 0.013875 0.00216455 -0.00148533 0.009875 0.00194002 -0.00120854 0.013875 0.00174818 -0.000933081 0.009875 0.00148321 -0.000718452 0.009875 0.00119481 -0.000634866 0.013875 0.0010497 -0.000629095 0.009875 0.00103868 -0.000552496 0.009875 0.000875916 -0.000491766 0.013875 0.000715413 -0.000489144 0.009875 0.000707548 -0.000404384 0.013875 0.000362441 -0.000491766 0.013875 -0.000715409 -0.000829945 0.013875 -0.00135657 -0.000945434 0.013875 -0.00149718 -0.00105698 0.009875 -0.00161363 -0.00135444 0.013875 -0.00185692 -0.00148533 0.009875 -0.00194001 -0.00164396 0.009875 -0.00202486 -0.00233047 0.009875 -0.00223064 -0.00291218 0.013875 -0.0022316 -0.00319149 0.0134785 -0.00217752 -0.00334739 0.0135869 -0.00213088 -0.00360684 0.013875 -0.00202447 -0.00376722 0.013875 -0.00193851 -0.00406456 0.013875 -0.00172921 -0.00419959 0.013875 -0.00160722 -0.00422886 0.013625 -0.00157802 -0.000903942 0.014375 0.000316955 -0.00105328 0.014375 0.000769538 -0.00168998 0.014375 0.000354607 -0.00173954 0.014375 0.000464725 -0.00131918 0.014375 0.00116505 -0.00155178 0.014375 0.00138228 -0.00196188 0.014375 0.000748513 -0.00205694 0.014375 0.000822986 -0.00216028 0.014375 0.000885458 -0.00196452 0.014375 0.00162058 -0.0022704 0.014375 0.000935018 -0.00238568 0.014375 0.000970944 -0.00258516 0.014375 0.00174955 -0.00226895 0.014375 0.0017134 -0.00250446 0.014375 0.000992711 -0.002625 0.014375 0.001 -0.00305866 0.014375 0.00169542 -0.00286432 0.014375 0.000970944 -0.00290269 0.014375 0.00172783 -0.0029796 0.014375 0.000935018 -0.00319306 0.014375 0.000822986 -0.00308972 0.014375 0.000885458 -0.00344798 0.014375 0.000568067 -0.0040773 0.014375 0.000976387 -0.00416007 0.014375 0.000840277 -0.00337351 0.014375 0.000663125 -0.00398249 0.014375 0.00110441 -0.00432985 0.014375 0.00039496 -0.00359594 0.014375 0.000239317 -0.00361771 0.014375 0.000120539 -0.00432985 0.014375 -0.000394956 -0.00428687 0.014375 -0.00054835 -0.00423012 0.014375 -0.0006972 -0.00416007 0.014375 -0.000840273 -0.00351046 0.014375 -0.000464721 -0.00344798 0.014375 -0.000568063 -0.00337351 0.014375 -0.000663121 -0.00387644 0.014375 -0.00122327 -0.00328812 0.014375 -0.000748509 -0.0035 0.014375 -0.00151554 -0.00376002 0.014375 -0.001332 -0.00319306 0.014375 -0.000822982 -0.00321104 0.014375 -0.00164896 -0.00274442 0.014375 -0.00174592 -0.00242623 0.014375 -0.00173867 -0.00216028 0.014375 -0.000885454 -0.00181989 0.014375 -0.0015538 -0.00196452 0.014375 -0.00162057 -0.00168193 0.014375 -0.00147415 -0.00143053 0.014375 -0.00127896 -0.00187649 0.014375 -0.000663121 -0.00131918 0.014375 -0.00116504 -0.00196188 0.014375 -0.000748509 -0.00180202 0.014375 -0.000568063 -0.00105328 0.014375 -0.000769534 -0.00173954 0.014375 -0.000464721 -0.000989811 0.014375 -0.000623421 -0.000939895 0.014375 -0.000472142 -0.000903942 0.014375 -0.000316951 -0.00165406 0.014375 -0.000239314 -0.000882251 0.014375 -0.000159134 -0.00163229 0.014375 -0.000120535 -0.000882251 0.014375 0.000159138 -0.0031298 0.013331 0.00229612 -0.00327586 0.013527 0.00223509 -0.00343582 0.0136154 0.00209883 -0.00360395 0.013621 0.00239619 -0.0038197 0.013527 0.00324314 -0.00462615 0.0131674 0.00447075 -0.00432649 0.013125 0.00418603 -0.00440994 0.0131674 0.00426752 -0.00420406 0.0131674 0.00405385 -0.00421462 0.013251 0.00404416 -0.00402002 0.013251 0.0038211 -0.00385929 0.013331 0.00357175 -0.00376693 0.013471 0.00327875 -0.00372277 0.013405 0.00330856 -0.00352913 0.0136031 0.0024319 -0.0034151 0.0136031 0.00217691 -0.00309051 0.0131674 0.00231254 -0.00305653 0.0132228 0.00220823 -0.00334249 0.0135715 0.00220725 -0.00315935 0.0134423 0.00218563 -0.00321712 0.013471 0.00225963 -0.0034916 0.013621 0.00214495 -0.00376525 0.013625 0.00257543 -0.0037943 0.0136031 0.0029234 -0.00407359 0.013405 0.00377665 -0.00485213 0.0131674 0.00466303 -0.0037286 0.013621 0.00264157 -0.00404375 0.0135715 0.0034335 -0.00416358 0.013527 0.00370198 -0.00443987 0.013331 0.00423722 -0.00446893 0.013405 0.00420779 -0.0046357 0.013251 0.00446006 -0.00508735 0.0131674 0.00484391 -0.00386522 0.013621 0.00288048 -0.00487892 0.013331 0.00462993 -0.00509581 0.013251 0.00483234 -0.00533123 0.0131674 0.00501293 -0.00421984 0.013625 0.00329312 -0.00410672 0.0136031 0.00338631 -0.00421915 0.0135715 0.00365586 -0.00471763 0.013471 0.00436847 -0.00547948 0.013125 0.00510938 -0.00417306 0.013621 0.00333659 -0.00427971 0.0136031 0.00360561 -0.00455111 0.013527 0.0041246 -0.00513686 0.013405 0.00477612 -0.00535462 0.013331 0.00497734 -0.00559044 0.013251 0.00515734 -0.00573543 0.013125 0.00525837 -0.00599842 0.013125 0.00539454 -0.00558317 0.0131674 0.00516969 -0.00584255 0.0131674 0.00531379 -0.00452444 0.013621 0.00376006 -0.00537733 0.013405 0.00494278 -0.00560477 0.013331 0.00513299 -0.0058492 0.013251 0.0053011 -0.00610874 0.0131674 0.00544489 -0.0062678 0.013125 0.00551757 -0.00471541 0.013621 0.00395825 -0.00477954 0.013625 0.00393224 -0.00491594 0.013621 0.00414675 -0.00512555 0.013621 0.0043251 -0.00544154 0.013527 0.00484505 -0.00568504 0.013527 0.00499655 -0.00588149 0.013405 0.00523943 -0.00639702 0.0133309 0.00552318 -0.00665893 0.0131674 0.00566683 -0.00666363 0.013251 0.00565329 -0.00694158 0.0131674 0.00575713 -0.00638109 0.0131674 0.00556267 -0.00611476 0.013251 0.00543188 -0.00612661 0.0133309 0.00540624 -0.00586231 0.013331 0.00527607 -0.00520583 0.013527 0.00468169 -0.00520457 0.013625 0.00430705 -0.00552441 0.0136031 0.00471893 -0.00616631 0.013471 0.00532033 -0.0064125 0.013405 0.00548482 -0.00739601 0.013125 0.00587278 -0.00543099 0.013625 0.00447754 -0.00534373 0.013621 0.00449287 -0.00556993 0.013621 0.00464965 -0.00576157 0.0136031 0.00486649 -0.00643243 0.013471 0.00543541 -0.00695354 0.0133309 0.00571626 -0.00694561 0.013251 0.00574337 -0.00768729 0.013125 0.00592627 -0.00722836 0.0131674 0.00583333 -0.00751855 0.0131674 0.00589527 -0.00600574 0.0136031 0.00500215 -0.00596924 0.0135715 0.00507186 -0.00619301 0.013527 0.00526254 -0.00668645 0.013405 0.00558753 -0.00696514 0.013405 0.00567656 -0.00580361 0.013621 0.00479504 -0.0060442 0.013621 0.0049287 -0.00622331 0.0135715 0.00519699 -0.00781144 0.0131674 0.00594277 -0.00827599 0.013125 0.00598984 -0.00615865 0.013625 0.00491601 -0.00648326 0.0135715 0.0053094 -0.00781722 0.0133309 0.00590058 -0.00810757 0.013251 0.00596145 -0.00754386 0.013471 0.0057604 -0.00782283 0.013405 0.0058596 -0.00840248 0.0131674 0.00599407 -0.00667758 0.013625 0.00514369 -0.00654371 0.013621 0.00515954 -0.00704031 0.0136031 0.00541948 -0.00729194 0.0135715 0.00556774 -0.0075556 0.013527 0.00569783 -0.00869901 0.013251 0.00598341 -0.00869919 0.0131674 0.00599774 -0.00886809 0.013125 0.00599508 -0.00680141 0.013621 0.00525616 -0.00694515 0.013625 0.00523719 -0.00756892 0.0135715 0.00562686 -0.00783869 0.013527 0.00574375 -0.00811359 0.013405 0.0058921 -0.00758344 0.0136031 0.00554952 -0.00784848 0.0135715 0.0056722 -0.00869866 0.0133309 0.00595516 -0.00899482 0.013251 0.00597243 -0.00899307 0.0133309 0.00594423 -0.00928973 0.013251 0.00594683 -0.00945782 0.013125 0.00594192 -0.0095853 0.0131674 0.00592083 -0.00732957 0.013621 0.00541059 -0.0081237 0.013527 0.0057756 -0.00869749 0.013471 0.00586053 -0.00987693 0.0131674 0.0058661 -0.0078704 0.013621 0.0055121 -0.00813675 0.0136031 0.00562526 -0.00841262 0.0135715 0.00572116 -0.00957848 0.0133309 0.0058788 -0.0101655 0.0131674 0.005797 -0.0100394 0.013125 0.0058309 -0.00814392 0.013621 0.00554267 -0.00869581 0.0135715 0.00572467 -0.00898723 0.013471 0.00584978 -0.00986805 0.0133309 0.00582445 -0.00805277 0.013625 0.00547015 -0.00841553 0.0136031 0.00564253 -0.00869484 0.0136031 0.00564598 -0.00897883 0.0135715 0.00571416 -0.009269 0.013527 0.00576144 -0.0104459 0.013251 0.00570008 -0.00841861 0.013621 0.00555968 -0.00897396 0.0136031 0.00563562 -0.0104373 0.0133309 0.00567316 -0.0108843 0.013125 0.00555838 -0.0107306 0.0131674 0.00561647 -0.00925224 0.0136031 0.00561147 -0.00955314 0.013527 0.00572254 -0.0101439 0.013405 0.00571588 -0.0110001 0.013251 0.00549231 -0.0112752 0.0131674 0.00538099 -0.00924303 0.013621 0.00552908 -0.0101139 0.013527 0.00560286 -0.0104248 0.013405 0.00563377 -0.0104085 0.013471 0.00558302 -0.0107157 0.0133309 0.0055766 -0.0116797 0.013125 0.00516418 -0.0114212 0.013125 0.00530862 -0.011538 0.0131674 0.00524334 -0.00946575 0.013625 0.00543536 -0.0100954 0.0135715 0.00553307 -0.0106825 0.013471 0.00548798 -0.0109725 0.013405 0.00542842 -0.0115311 0.013251 0.00523082 -0.0100752 0.0136031 0.00545702 -0.0106601 0.013527 0.00542837 -0.0117862 0.013251 0.0050807 -0.0119308 0.013125 0.00500716 -0.0120418 0.0131674 0.00492993 -0.0097862 0.013621 0.00544098 -0.0109261 0.013527 0.00532109 -0.0109513 0.013471 0.00537952 -0.0112145 0.013471 0.00525789 -0.0114973 0.013405 0.00516996 -0.0115173 0.013331 0.00520612 -0.0100206 0.013625 0.00532 -0.0100539 0.013621 0.0053769 -0.0106348 0.0135715 0.00536075 -0.0120175 0.013331 0.00489493 -0.0122814 0.0131674 0.00475493 -0.010578 0.013621 0.00520944 -0.0114405 0.013527 0.00506774 -0.0122554 0.013331 0.00472117 -0.0125028 0.013251 0.00455737 -0.0127332 0.0131674 0.00437047 -0.0111197 0.0136031 0.0050654 -0.0114054 0.0135715 0.00500462 -0.0116876 0.013527 0.00492231 -0.0122302 0.013405 0.00468838 -0.0124845 0.013331 0.00453585 -0.0127234 0.013251 0.00436003 -0.0129443 0.0131674 0.00416195 -0.0113303 0.013625 0.00478867 -0.0113269 0.013621 0.00486336 -0.0115641 0.013621 0.00472379 -0.0121149 0.0135715 0.00453844 -0.0121589 0.013527 0.00459568 -0.0124231 0.013471 0.00446378 -0.0123819 0.013527 0.00441529 -0.0133231 0.013251 0.00370603 -0.0134314 0.013125 0.00359148 -0.0135123 0.0131674 0.00347747 -0.0133343 0.0131674 0.00371491 -0.012704 0.013331 0.00433944 -0.0119273 0.013527 0.00476483 -0.0116495 0.0135715 0.00486099 -0.0113672 0.0136031 0.00493583 -0.0116079 0.0136031 0.00479418 -0.0120669 0.0136031 0.00447606 -0.0123351 0.0135715 0.00436029 -0.0125956 0.013527 0.0042241 -0.0128455 0.013471 0.00406674 -0.0133009 0.013331 0.00368853 -0.0136783 0.0131674 0.00323152 -0.0115734 0.013625 0.00464294 -0.0117941 0.013621 0.00457266 -0.0122841 0.0136031 0.00430036 -0.0135006 0.013251 0.00346916 -0.013762 0.013125 0.00310024 -0.0120355 0.013625 0.00431492 -0.0120164 0.013621 0.00441034 -0.0125462 0.0135715 0.00417148 -0.0124923 0.0136031 0.00411415 -0.0127477 0.0135715 0.00397246 -0.0134439 0.013405 0.0034288 -0.0136424 0.013331 0.00320858 -0.0139087 0.013125 0.00284299 -0.0139728 0.0131674 0.00271651 -0.0122304 0.013621 0.00423722 -0.0122533 0.013625 0.00413349 -0.0141006 0.0131674 0.00244872 -0.0140425 0.013125 0.00257881 -0.0124355 0.013621 0.00405374 -0.0136076 0.013405 0.00318629 -0.0139348 0.013331 0.00269723 -0.0126313 0.013621 0.00386034 -0.0130581 0.0136031 0.00349703 -0.0132898 0.0135715 0.00331914 -0.013509 0.013527 0.00312329 -0.0137128 0.013471 0.00290954 -0.0142703 0.013125 0.00203226 -0.014215 0.0131674 0.00217494 -0.0126593 0.013625 0.00373822 -0.0132257 0.0136031 0.00327352 -0.0134482 0.0135715 0.00308439 -0.0138505 0.013471 0.00265437 -0.014024 0.013405 0.00241445 -0.0140617 0.013331 0.00243134 -0.0144432 0.013125 0.00146592 -0.0130226 0.013625 0.00330326 -0.0129931 0.013621 0.00344569 -0.0133819 0.0136031 0.00304199 -0.0137937 0.013527 0.00262554 -0.0143021 0.013251 0.0018913 -0.0144025 0.0131674 0.00161208 -0.0135265 0.0136031 0.00280302 -0.0139753 0.013471 0.0023927 -0.0142361 0.013405 0.0018693 -0.0142753 0.013331 0.00188237 -0.0144752 0.0131674 0.00132439 -0.0133121 0.013621 0.00299733 -0.0139172 0.013527 0.00236671 -0.0141855 0.013471 0.00185246 -0.0144612 0.013251 0.00132123 -0.0145335 0.0131674 0.00103346 -0.0134546 0.013621 0.00276187 -0.0136592 0.0136031 0.00255719 -0.0143217 0.013405 0.00158952 -0.0145194 0.013251 0.00103099 -0.0135852 0.013621 0.00251965 -0.0137795 0.0136031 0.00230511 -0.0142703 0.013471 0.0015752 -0.0143413 0.013471 0.00129409 -0.0145924 0.013251 0.000443662 -0.0146067 0.0131674 0.000444725 -0.013982 0.0136031 0.00178464 -0.014209 0.013527 0.00155809 -0.0142792 0.013527 0.00128004 -0.014625 0.013125 1.72114e-09 -0.0137192 0.013625 0.00207345 -0.0139033 0.013621 0.00175844 -0.0140637 0.0136031 0.00151754 -0.0141395 0.0135715 0.00153869 -0.0142088 0.0135715 0.00126409 -0.0144941 0.013405 0.000729641 -0.0144412 0.013471 0.000723069 -0.014523 0.013405 0.000438501 -0.0145642 0.013331 0.000441567 -0.014607 0.013251 0.000148009 -0.0143356 0.013527 0.000998848 -0.0145788 0.013331 0.000147311 -0.0145788 0.013331 -0.000147307 -0.0146067 0.0131674 -0.000444721 -0.0145774 0.0131674 -0.000739994 -0.0141321 0.0136031 0.00124672 -0.0142645 0.0135715 0.000986406 -0.014187 0.0136031 0.000972848 -0.0144842 0.013471 0.00014497 -0.0145642 0.013331 -0.000441564 -0.0145632 0.013251 -0.000738226 -0.0145335 0.0131674 -0.00103346 -0.0141053 0.013621 0.000958565 -0.0143343 0.0135715 0.000424477 -0.0145351 0.013331 -0.00073474 -0.0145194 0.013251 -0.00103099 -0.0144752 0.0131674 -0.00132439 -0.0145084 0.013125 -0.00117704 -0.014146 0.013621 0.00068637 -0.0142559 0.0136031 0.000418642 -0.0144205 0.013527 0.000143395 -0.0143484 0.0135715 0.000141609 -0.0144941 0.013405 -0.000729638 -0.0144612 0.013251 -0.00132122 -0.0144025 0.0131674 -0.00161208 -0.0144432 0.013125 -0.00146592 -0.0141086 0.013625 0.000424777 -0.0144412 0.013471 -0.000723065 -0.0144336 0.013331 -0.00131499 -0.0143637 0.013125 -0.00175122 -0.0143157 0.0131674 -0.00189583 -0.0142703 0.013125 -0.00203226 -0.0141232 0.013625 0.000141719 -0.0142697 0.0136031 0.000139663 -0.0143983 0.013471 -0.00100981 -0.0143615 0.013331 -0.00160063 -0.0143887 0.013251 -0.00160823 -0.014215 0.0131674 -0.00217493 -0.0141868 0.013621 -0.000137609 -0.0142016 0.013251 -0.00216974 -0.0141006 0.0131674 -0.00244872 -0.0141232 0.013625 -0.000141716 -0.0142559 0.0136031 -0.000418639 -0.0143356 0.013527 -0.000998845 -0.0140794 0.013625 -0.000706704 -0.0140512 0.013621 -0.00122841 -0.0140637 0.0136031 -0.00151753 -0.0141251 0.013527 -0.00183233 -0.0140278 0.013527 -0.00210209 -0.013898 0.013405 -0.00267849 -0.0135123 0.0131674 -0.00347746 -0.0136027 0.013125 -0.00334994 -0.0140617 0.013331 -0.00243133 -0.0141368 0.013405 -0.0021445 -0.0141053 0.013621 -0.000958561 -0.0141321 0.0136031 -0.00124671 -0.0139838 0.013621 -0.00149525 -0.0136424 0.013331 -0.00320857 -0.0134314 0.013125 -0.00359147 -0.013982 0.0136031 -0.00178464 -0.0139605 0.0135715 -0.00207591 -0.0138871 0.0136031 -0.00204738 -0.0138505 0.013471 -0.00265437 -0.0137128 0.013471 -0.00290954 -0.0136076 0.013405 -0.00318629 -0.0133231 0.013251 -0.00370603 -0.0131449 0.0131674 -0.00394325 -0.0139033 0.013621 -0.00175844 -0.0131341 0.013251 -0.00393383 -0.0138099 0.013621 -0.00201732 -0.0137192 0.013625 -0.00207344 -0.0135948 0.0135715 -0.00284208 -0.012934 0.013251 -0.00415201 -0.0129443 0.0131674 -0.00416195 -0.0137038 0.013621 -0.00227126 -0.0135852 0.013621 -0.00251964 -0.0135265 0.0136031 -0.00280302 -0.0134482 0.0135715 -0.00308438 -0.0134005 0.013471 -0.00339791 -0.0132266 0.013471 -0.00362992 -0.0130816 0.013405 -0.00388807 -0.0127332 0.0131674 -0.00437047 -0.0134788 0.013625 -0.00258661 -0.0133486 0.013527 -0.003361 -0.0132898 0.0135715 -0.00331913 -0.0128839 0.013405 -0.0041037 -0.0125121 0.0131674 -0.00456828 -0.0124083 0.013125 -0.00465693 -0.0122814 0.0131674 -0.00475492 -0.0129935 0.013527 -0.00381119 -0.0128455 0.013471 -0.00406674 -0.012704 0.013331 -0.00433944 -0.0120418 0.0131674 -0.00492993 -0.0119308 0.013125 -0.00500715 -0.0121739 0.013125 -0.00483793 -0.0131869 0.013625 -0.00307232 -0.0131581 0.013621 -0.00322545 -0.0130581 0.0136031 -0.00349703 -0.0127997 0.013527 -0.00402257 -0.0126757 0.013405 -0.0043093 -0.0120336 0.013251 -0.00491815 -0.0117938 0.0131674 -0.00509287 -0.0129391 0.0135715 -0.00376372 -0.0122302 0.013405 -0.00468838 -0.0120175 0.013331 -0.00489493 -0.011538 0.0131674 -0.00524334 -0.0130226 0.013625 -0.00330325 -0.0129931 0.013621 -0.00344568 -0.0128465 0.013625 -0.00352541 -0.0119939 0.013405 -0.00486093 -0.0112752 0.0131674 -0.00538099 -0.012691 0.0136031 -0.00391786 -0.0119636 0.013471 -0.00481714 -0.0117494 0.013405 -0.00502159 -0.0110058 0.0131674 -0.00550546 -0.0119273 0.013527 -0.00476482 -0.0112563 0.013331 -0.00534278 -0.0106073 0.013125 -0.00566308 -0.0108843 0.013125 -0.00555837 -0.0124614 0.013625 -0.00394108 -0.0122841 0.0136031 -0.00430036 -0.0122304 0.013621 -0.00423722 -0.0118862 0.0135715 -0.00470547 -0.0117213 0.013471 -0.00497636 -0.0116876 0.013527 -0.0049223 -0.0114973 0.013405 -0.00516996 -0.0109889 0.013331 -0.00546638 -0.0110001 0.013251 -0.00549231 -0.0104503 0.0131674 -0.00571372 -0.0112145 0.013471 -0.00525789 -0.0112381 0.013405 -0.00530568 -0.0107157 0.013331 -0.00557659 -0.0101655 0.0131674 -0.005797 -0.0103254 0.013125 -0.005754 -0.0118087 0.013625 -0.00448488 -0.0117941 0.013621 -0.00457265 -0.0116495 0.0135715 -0.00486099 -0.0114054 0.0135715 -0.00500461 -0.00987693 0.0131674 -0.00586609 -0.00975 0.013125 -0.00589359 -0.0100394 0.013125 -0.0058309 -0.0115734 0.013625 -0.00464294 -0.0106825 0.013471 -0.00548798 -0.0095853 0.0131674 -0.00592083 -0.00945782 0.013125 -0.00594192 -0.0115641 0.013621 -0.00472378 -0.0113303 0.013625 -0.00478866 -0.0111197 0.0136031 -0.0050654 -0.0104085 0.013471 -0.00558301 -0.0104248 0.013405 -0.00563376 -0.0101546 0.013331 -0.00575585 -0.00986805 0.013331 -0.00582445 -0.0113269 0.013621 -0.00486336 -0.0106348 0.0135715 -0.00536075 -0.0103892 0.013527 -0.00552237 -0.0101439 0.013405 -0.00571587 -0.0101303 0.013471 -0.00566438 -0.00957848 0.013331 -0.00587879 -0.00928973 0.013251 -0.00594683 -0.0108232 0.013625 -0.00504161 -0.0103672 0.0135715 -0.00545358 -0.0101139 0.013527 -0.00560286 -0.00928659 0.013331 -0.00591875 -0.00869919 0.0131674 -0.00599774 -0.00857197 0.013125 -0.00599976 -0.0108333 0.013621 -0.00510648 -0.0100752 0.0136031 -0.00545701 -0.0100954 0.0135715 -0.00553307 -0.00981993 0.0135715 -0.00559901 -0.009835 0.013527 -0.00566963 -0.00928199 0.013405 -0.00587765 -0.00899307 0.013331 -0.00594423 -0.00869866 0.013331 -0.00595516 -0.00869901 0.013251 -0.00598341 -0.00810633 0.0131674 -0.00597573 -0.0102928 0.013625 -0.00524104 -0.0097862 0.013621 -0.00544098 -0.00974465 0.013625 -0.00538483 -0.00951571 0.013621 -0.00549174 -0.00925224 0.0136031 -0.00561146 -0.00926098 0.0135715 -0.00568967 -0.00869749 0.013471 -0.00586053 -0.0086967 0.013527 -0.00579687 -0.00840758 0.013471 -0.00585694 -0.00811359 0.013405 -0.0058921 -0.00811001 0.013331 -0.00593331 -0.00710772 0.013125 -0.00580498 -0.00739601 0.013125 -0.00587278 -0.00722836 0.0131674 -0.00583333 -0.00781144 0.0131674 -0.00594277 -0.00840406 0.013331 -0.00595151 -0.00898723 0.013471 -0.00584977 -0.009269 0.013527 -0.00576143 -0.00952898 0.0136031 -0.00557358 -0.00897396 0.0136031 -0.00563562 -0.00781722 0.013331 -0.00590058 -0.0075264 0.013331 -0.00585341 -0.00694158 0.0131674 -0.00575712 -0.00924303 0.013621 -0.00552907 -0.00896884 0.013621 -0.00555287 -0.00840994 0.013527 -0.00579332 -0.00811819 0.013471 -0.00583902 -0.00694561 0.013251 -0.00574337 -0.00869484 0.0136031 -0.00564598 -0.00812994 0.0135715 -0.00570366 -0.00695354 0.013331 -0.00571625 -0.00666363 0.013251 -0.00565329 -0.00665893 0.0131674 -0.00566683 -0.00638109 0.0131674 -0.00556267 -0.0062678 0.013125 -0.00551757 -0.00869381 0.013621 -0.00556308 -0.00841861 0.013621 -0.00555968 -0.00726031 0.013471 -0.00569988 -0.00610874 0.0131674 -0.00544489 -0.00861866 0.013625 -0.00549999 -0.00756892 0.0135715 -0.00562685 -0.0075556 0.013527 -0.00569783 -0.00668645 0.013405 -0.00558752 -0.00667289 0.013331 -0.0056266 -0.00611476 0.013251 -0.00543188 -0.00599842 0.013125 -0.00539454 -0.00584255 0.0131674 -0.00531379 -0.00805277 0.013625 -0.00547015 -0.0078704 0.013621 -0.00551209 -0.00758344 0.0136031 -0.00554951 -0.00729194 0.0135715 -0.00556774 -0.0058492 0.013251 -0.00530109 -0.00759873 0.013621 -0.00546803 -0.00731027 0.0136031 -0.00549121 -0.00701823 0.0135715 -0.005495 -0.00670391 0.013471 -0.00553719 -0.00672478 0.013527 -0.00547704 -0.00558317 0.0131674 -0.00516968 -0.00547948 0.013125 -0.00510938 -0.00533123 0.0131674 -0.00501293 -0.00643243 0.013471 -0.00543541 -0.00616631 0.013471 -0.00532033 -0.00614396 0.013405 -0.00536869 -0.00586231 0.013331 -0.00527606 -0.00588149 0.013405 -0.00523942 -0.00559044 0.013251 -0.00515733 -0.00560477 0.013331 -0.00513298 -0.0052312 0.013125 -0.00494794 -0.00508735 0.0131674 -0.00484391 -0.00732957 0.013621 -0.00541059 -0.00721718 0.013625 -0.00531677 -0.00677424 0.0136031 -0.00533448 -0.00648326 0.0135715 -0.0053094 -0.00619301 0.013527 -0.00526254 -0.00535462 0.013331 -0.00497734 -0.0053391 0.013251 -0.00500095 -0.00509581 0.013251 -0.00483233 -0.00485213 0.0131674 -0.00466303 -0.00680141 0.013621 -0.00525615 -0.00622331 0.0135715 -0.00519698 -0.00593574 0.013527 -0.00513583 -0.00562574 0.013405 -0.00509734 -0.00537733 0.013405 -0.00494277 -0.00476002 0.013125 -0.00458933 -0.00462615 0.0131674 -0.00447074 -0.00654371 0.013621 -0.00515954 -0.00596924 0.0135715 -0.00507185 -0.00568504 0.013527 -0.00499655 -0.00432649 0.013125 -0.00418602 -0.00600574 0.0136031 -0.00500214 -0.00572166 0.0135715 -0.00493431 -0.00513686 0.013405 -0.00477612 -0.0046357 0.013251 -0.00446006 -0.00465454 0.013331 -0.004439 -0.00420406 0.0131674 -0.00405384 -0.0062911 0.013621 -0.0050503 -0.00615865 0.013625 -0.004916 -0.00544154 0.013527 -0.00484504 -0.00520583 0.013527 -0.00468168 -0.00516828 0.013471 -0.00473309 -0.00493845 0.013471 -0.00455635 -0.00468211 0.013405 -0.00440817 -0.00442002 0.013251 -0.00425732 -0.00421462 0.013251 -0.00404416 -0.00400899 0.0131674 -0.00383025 -0.0060442 0.013621 -0.0049287 -0.00552441 0.0136031 -0.00471893 -0.00402002 0.013251 -0.0038211 -0.00393482 0.013125 -0.00374195 -0.00590867 0.013625 -0.00478242 -0.0056659 0.013625 -0.00463613 -0.00471763 0.013471 -0.00436846 -0.00404176 0.013331 -0.00380306 -0.00365319 0.0131674 -0.00335551 -0.00382522 0.0131674 -0.00359728 -0.00543099 0.013625 -0.00447753 -0.00529483 0.0136031 -0.00455982 -0.00507341 0.0136031 -0.00438955 -0.00430519 0.013471 -0.0039611 -0.00407359 0.013405 -0.00377665 -0.00383669 0.013251 -0.00358869 -0.00512555 0.013621 -0.0043251 -0.00460185 0.0135715 -0.00407321 -0.00385929 0.013331 -0.00357175 -0.00368849 0.013331 -0.00333169 -0.00349333 0.0131674 -0.00310553 -0.00334603 0.0131674 -0.00284796 -0.00343403 0.013125 -0.00300896 -0.00329188 0.013125 -0.00274915 -0.00520457 0.013625 -0.00430704 -0.00498722 0.013625 -0.00412512 -0.00491594 0.013621 -0.00414675 -0.00465715 0.0136031 -0.00401723 -0.00435212 0.013527 -0.00391808 -0.00389239 0.013405 -0.00354694 -0.00393502 0.013471 -0.00351499 -0.00372277 0.013405 -0.00330855 -0.00321165 0.0131674 -0.00258341 -0.00421915 0.0135715 -0.00365586 -0.00338351 0.013331 -0.00282774 -0.00309051 0.0131674 -0.00231253 -0.00304687 0.013125 -0.00221009 -0.00458207 0.013625 -0.00372891 -0.00446334 0.0136031 -0.00381609 -0.00427971 0.0136031 -0.00360561 -0.00356515 0.013405 -0.00306207 -0.00361073 0.013471 -0.00303449 -0.00325008 0.013331 -0.00256506 -0.00322458 0.013251 -0.00257723 -0.00310373 0.013251 -0.00230701 -0.00452444 0.013621 -0.00376006 -0.00341991 0.013405 -0.0028081 -0.00306839 0.0132702 -0.00220588 -0.00434351 0.013621 -0.00355267 -0.00366519 0.013527 -0.00300153 -0.00328741 0.013405 -0.00254725 -0.00333549 0.013471 -0.0025243 -0.0031298 0.013331 -0.00229612 -0.00308488 0.0133165 -0.0022025 -0.0040135 0.013621 -0.00311234 -0.00334249 0.0135715 -0.00220724 -0.00326474 0.0135406 -0.00215713 -0.00322677 0.0135114 -0.00216803 -0.00315926 0.0134422 -0.00218565 -0.0031057 0.0133608 -0.00219805 -0.00313057 0.0134028 -0.00219246 -0.0037286 0.013621 -0.00264156 -0.00376525 0.013625 -0.00257542 -0.00360395 0.013621 -0.00239619 -0.00330507 0.0135658 -0.00214476 -0.00363902 0.013625 -0.00232164 -0.0034916 0.013621 -0.00214494 -0.0034151 0.0136031 -0.00217691 -0.00339104 0.0136034 -0.00211558 -0.0034356 0.0136153 -0.00209891 -0.00352282 0.013527 -0.00275258 -0.00345812 0.0135715 -0.00246578 -0.00798085 0.013125 -0.00596532 -0.00840302 0.013251 -0.00597975 -0.00869815 0.013405 -0.0059138 -0.0100539 0.013621 -0.00537689 -0.010318 0.013621 -0.00529965 -0.0105606 0.013625 -0.00514816 -0.0139087 0.013125 -0.00284298 -0.0138319 0.0131674 -0.00297766 -0.0139728 0.0131674 -0.00271651 -0.0141753 0.013331 -0.00215949 -0.0142361 0.013405 -0.00186929 -0.0141855 0.013471 -0.00185246 -0.0142703 0.013471 -0.0015752 -0.014187 0.0136031 -0.000972845 -0.014146 0.013621 -0.000686366 -0.0142283 0.0136031 -0.000696594 -0.0141732 0.013621 -0.000412492 -0.0119636 0.013471 0.00481715 -0.0110831 0.013621 0.00499103 -0.0108232 0.013625 0.00504162 -0.0108333 0.013621 0.00510648 -0.00540658 0.013471 0.00489825 -0.00516828 0.013471 0.0047331 -0.00486067 0.0136031 0.00420854 -0.00480821 0.0135715 0.00426719 -0.00439534 0.013625 0.00351568 -0.00352282 0.013527 0.00275258 -0.00345812 0.0135715 0.00246579 -0.00341991 0.013405 0.0028081 -0.00339294 0.013527 0.00249689 -0.0034668 0.013471 0.00278281 -0.00382522 0.0131674 0.00359729 -0.00366507 0.013251 0.0033475 -0.00352976 0.013331 0.00308349 -0.00328741 0.013405 0.00254725 -0.00333549 0.013471 0.00252431 -0.00350559 0.013251 0.00309812 -0.00325008 0.013331 0.00256507 -0.00316797 0.013405 0.00228017 -0.00349333 0.0131674 0.00310554 -0.00335864 0.013251 0.00284115 -0.00322458 0.013251 0.00257724 -0.00334603 0.0131674 0.00284796 -0.00321165 0.0131674 0.00258341 -0.00310373 0.013251 0.00230701 -0.00316272 0.013125 0.00248265 -0.0142697 0.0136031 -0.000139659 -0.0141868 0.013621 0.000137612 -0.00417306 0.013621 -0.00333659 -0.00365563 0.0136031 0.00268093 -0.00366519 0.013527 0.00300153 -0.00358638 0.0135715 0.00271829 -0.00372697 0.0135715 0.00296414 -0.00338351 0.013331 0.00282774 -0.00394478 0.0136031 0.00315872 -0.00361073 0.013471 0.00303449 -0.00387956 0.0135715 0.00320274 -0.00356515 0.013405 0.00306208 -0.00365319 0.0131674 0.00335552 -0.0040135 0.013621 0.00311234 -0.00389239 0.013405 0.00354694 -0.00398596 0.013527 0.00347681 -0.00368849 0.013331 0.0033317 -0.00393502 0.013471 0.00351499 -0.00411459 0.013471 0.00374263 -0.00383669 0.013251 0.00358869 -0.00434351 0.013621 0.00355267 -0.00440534 0.0135715 0.00386927 -0.00460185 0.0135715 0.00407322 -0.00446334 0.0136031 0.00381609 -0.00465715 0.0136031 0.00401723 -0.00430519 0.013471 0.00396111 -0.00435212 0.013527 0.00391808 -0.00450637 0.013471 0.00416989 -0.00423544 0.013331 0.00402506 -0.00426593 0.013405 0.00399711 -0.00404176 0.013331 0.00380306 -0.00442002 0.013251 0.00425732 -0.00400899 0.0131674 0.00383025 -0.00476007 0.013527 0.00432102 -0.00468211 0.013405 0.00440818 -0.00465454 0.013331 0.00443901 -0.00490493 0.013405 0.00459778 -0.00507341 0.0136031 0.00438955 -0.00529483 0.0136031 0.00455982 -0.00497849 0.013527 0.00450687 -0.00502391 0.0135715 0.00445073 -0.00493845 0.013471 0.00455636 -0.00511247 0.013331 0.00480952 -0.00486115 0.013251 0.00465189 -0.00524842 0.0135715 0.00462337 -0.0054812 0.0135715 0.0047847 -0.00572166 0.0135715 0.00493431 -0.0053391 0.013251 0.00500095 -0.00593574 0.013527 0.00513583 -0.00562574 0.013405 0.00509734 -0.00565276 0.013471 0.00505142 -0.0062911 0.013621 0.0050503 -0.00590621 0.013471 0.00519223 -0.00651269 0.0136031 0.00523643 -0.00625632 0.0136031 0.00512556 -0.00638645 0.013251 0.00554938 -0.00614396 0.013405 0.00536869 -0.00674845 0.0135715 0.00540882 -0.00672478 0.013527 0.00547705 -0.00645624 0.013527 0.00537637 -0.00670391 0.013471 0.00553719 -0.00706358 0.013621 0.00533991 -0.00677424 0.0136031 0.00533448 -0.0069801 0.013471 0.00562542 -0.00667289 0.0133309 0.0056266 -0.00731027 0.0136031 0.00549122 -0.00727513 0.013527 0.00563797 -0.00701823 0.0135715 0.00549501 -0.00699796 0.013527 0.00556432 -0.0072479 0.013405 0.0057517 -0.00723827 0.0133309 0.00579192 -0.00723169 0.013251 0.0058194 -0.00759873 0.013621 0.00546804 -0.00753403 0.013405 0.00581276 -0.00726031 0.013471 0.00569989 -0.0075264 0.0133309 0.00585341 -0.00752119 0.013251 0.00588118 -0.00785916 0.0136031 0.00559424 -0.00783005 0.013471 0.00580682 -0.00781338 0.013251 0.00592857 -0.00811819 0.013471 0.00583903 -0.00811001 0.0133309 0.00593331 -0.00810633 0.0131674 0.00597573 -0.00840994 0.013527 0.00579333 -0.00812994 0.0135715 0.00570366 -0.0084056 0.013405 0.00591019 -0.00840302 0.013251 0.00597975 -0.00869381 0.013621 0.00556309 -0.00869815 0.013405 0.0059138 -0.00840758 0.013471 0.00585695 -0.00840406 0.0133309 0.00595152 -0.00896884 0.013621 0.00555288 -0.0086967 0.013527 0.00579688 -0.00899052 0.013405 0.00590295 -0.00899571 0.0131674 0.00598673 -0.00926098 0.0135715 0.00568967 -0.00898329 0.013527 0.00578624 -0.00928659 0.0133309 0.00591876 -0.00929132 0.0131674 0.00596108 -0.00952898 0.0136031 0.00557358 -0.00954158 0.0135715 0.00565126 -0.00927608 0.013471 0.0058247 -0.00957186 0.013405 0.00583797 -0.00928199 0.013405 0.00587765 -0.00951571 0.013621 0.00549175 -0.00980351 0.0136031 0.00552206 -0.00981993 0.0135715 0.00559901 -0.009835 0.013527 0.00566964 -0.00985941 0.013405 0.005784 -0.00956333 0.013471 0.00578538 -0.00958301 0.013251 0.00590668 -0.0101303 0.013471 0.00566439 -0.00984829 0.013471 0.0057319 -0.00987394 0.013251 0.00585208 -0.010318 0.013621 0.00529965 -0.0103672 0.0135715 0.00545358 -0.0103892 0.013527 0.00552237 -0.0101546 0.0133309 0.00575585 -0.0104503 0.0131674 0.00571373 -0.0101618 0.013251 0.00578315 -0.0103432 0.0136031 0.00537863 -0.0107012 0.013405 0.00553787 -0.0107256 0.013251 0.00560305 -0.0108974 0.0135715 0.0052548 -0.0106071 0.0136031 0.00528707 -0.0111545 0.0135715 0.00513599 -0.0108662 0.0136031 0.00518258 -0.0112381 0.013405 0.00530568 -0.0112563 0.0133309 0.00534279 -0.0109889 0.0133309 0.00546638 -0.0112688 0.013251 0.00536813 -0.0110058 0.0131674 0.00550547 -0.0111864 0.013527 0.00520078 -0.0117213 0.013471 0.00497636 -0.0114714 0.013471 0.00512339 -0.0117494 0.013405 0.0050216 -0.0117938 0.0131674 0.00509287 -0.0118862 0.0135715 0.00470547 -0.0117713 0.013331 0.00505671 -0.0120336 0.013251 0.00491815 -0.0118414 0.0136031 0.0046408 -0.0121977 0.013471 0.00464615 -0.0119939 0.013405 0.00486094 -0.0122726 0.013251 0.00474356 -0.0124577 0.013405 0.00450436 -0.0125121 0.0131674 0.00456829 -0.0126757 0.013405 0.0043093 -0.0128839 0.013405 0.00410371 -0.0126392 0.013471 0.00427049 -0.0128173 0.013621 0.00365749 -0.0128798 0.0136031 0.00371199 -0.012691 0.0136031 0.00391786 -0.0129935 0.013527 0.00381119 -0.0130415 0.013471 0.00385305 -0.0127997 0.013527 0.00402257 -0.0130816 0.013405 0.00388807 -0.0129137 0.013331 0.00413241 -0.012934 0.013251 0.00415201 -0.0131199 0.0135715 0.00354577 -0.0129391 0.0135715 0.00376372 -0.0131766 0.013527 0.00359049 -0.0132266 0.013471 0.00362992 -0.0131449 0.0131674 0.00394326 -0.0131341 0.013251 0.00393383 -0.0131128 0.013331 0.00391526 -0.0133486 0.013527 0.00336101 -0.0132684 0.013405 0.00366292 -0.0131581 0.013621 0.00322545 -0.0134005 0.013471 0.00339791 -0.0134776 0.013331 0.00345278 -0.0136575 0.013527 0.00287794 -0.013759 0.013405 0.00293599 -0.0135627 0.013471 0.00315759 -0.0138319 0.0131674 0.00297766 -0.0136662 0.013251 0.0032238 -0.0135948 0.0135715 0.00284209 -0.01396 0.013251 0.00271002 -0.0137949 0.013331 0.00295652 -0.0138195 0.013251 0.00297054 -0.0138513 0.0135715 0.00233723 -0.0137293 0.0135715 0.00259283 -0.013898 0.013405 0.0026785 -0.0138871 0.0136031 0.00204738 -0.0137038 0.013621 0.00227126 -0.0139605 0.0135715 0.00207591 -0.0140278 0.013527 0.0021021 -0.0140871 0.013471 0.00212518 -0.0140875 0.013251 0.00244287 -0.0138099 0.013621 0.00201732 -0.0141368 0.013405 0.0021445 -0.0141753 0.013331 0.0021595 -0.0143157 0.0131674 0.00189583 -0.0142016 0.013251 0.00216974 -0.0139838 0.013621 0.00149526 -0.0140566 0.0135715 0.00180951 -0.0141251 0.013527 0.00183234 -0.0143615 0.013331 0.00160064 -0.0143887 0.013251 0.00160823 -0.0140512 0.013621 0.00122841 -0.0144508 0.013405 0.001019 -0.0143933 0.013405 0.00130586 -0.0144916 0.013331 0.00102612 -0.0144336 0.013331 0.00131499 -0.0142283 0.0136031 0.000696597 -0.0143064 0.0135715 0.000706305 -0.0143983 0.013471 0.00100982 -0.0145351 0.013331 0.000734744 -0.0145632 0.013251 0.000738229 -0.0145774 0.0131674 0.000739997 -0.0144698 0.013471 0.000434551 -0.014378 0.013527 0.000715215 -0.0141732 0.013621 0.000412495 -0.0144064 0.013527 0.000429831 -0.0145374 0.013405 0.000146287 -0.0146214 0.0131674 0.000148364 -0.0146214 0.0131674 -0.00014836 -0.0143343 0.0135715 -0.000424473 -0.0143484 0.0135715 -0.000141606 -0.0144064 0.013527 -0.000429827 -0.0144698 0.013471 -0.000434547 -0.0144205 0.013527 -0.000143392 -0.014378 0.013527 -0.000715211 -0.0144842 0.013471 -0.000144966 -0.0145374 0.013405 -0.000146284 -0.014523 0.013405 -0.000438497 -0.0145924 0.013251 -0.000443659 -0.014607 0.013251 -0.000148006 -0.0143064 0.0135715 -0.000706302 -0.0142645 0.0135715 -0.000986403 -0.0144916 0.013331 -0.00102612 -0.0142088 0.0135715 -0.00126409 -0.0144508 0.013405 -0.00101899 -0.0143933 0.013405 -0.00130585 -0.014209 0.013527 -0.00155809 -0.0142792 0.013527 -0.00128003 -0.0143413 0.013471 -0.00129409 -0.0140566 0.0135715 -0.00180951 -0.0141395 0.0135715 -0.00153868 -0.0143217 0.013405 -0.00158952 -0.0143021 0.013251 -0.0018913 -0.0142753 0.013331 -0.00188237 -0.0137795 0.0136031 -0.0023051 -0.0138513 0.0135715 -0.00233723 -0.0139172 0.013527 -0.00236671 -0.0139753 0.013471 -0.0023927 -0.0140871 0.013471 -0.00212518 -0.0140875 0.013251 -0.00244287 -0.0136592 0.0136031 -0.00255719 -0.0137293 0.0135715 -0.00259283 -0.0139348 0.013331 -0.00269722 -0.014024 0.013405 -0.00241445 -0.01396 0.013251 -0.00271002 -0.0134546 0.013621 -0.00276186 -0.0136575 0.013527 -0.00287793 -0.0137937 0.013527 -0.00262553 -0.013759 0.013405 -0.00293598 -0.0137949 0.013331 -0.00295652 -0.0133819 0.0136031 -0.00304199 -0.013509 0.013527 -0.00312329 -0.0136783 0.0131674 -0.00323151 -0.0136662 0.013251 -0.00322379 -0.0138195 0.013251 -0.00297054 -0.0132257 0.0136031 -0.00327351 -0.0133121 0.013621 -0.00299733 -0.0134439 0.013405 -0.0034288 -0.0135627 0.013471 -0.00315759 -0.0132684 0.013405 -0.00366291 -0.0133009 0.013331 -0.00368853 -0.0134776 0.013331 -0.00345278 -0.0133343 0.0131674 -0.0037149 -0.0135006 0.013251 -0.00346915 -0.0128173 0.013621 -0.00365748 -0.0128798 0.0136031 -0.00371198 -0.0131199 0.0135715 -0.00354576 -0.0131766 0.013527 -0.00359049 -0.0130415 0.013471 -0.00385304 -0.0126313 0.013621 -0.00386033 -0.0131128 0.013331 -0.00391526 -0.0124355 0.013621 -0.00405374 -0.0127477 0.0135715 -0.00397246 -0.0125956 0.013527 -0.0042241 -0.0127234 0.013251 -0.00436002 -0.0129137 0.013331 -0.0041324 -0.0123351 0.0135715 -0.00436029 -0.0124923 0.0136031 -0.00411414 -0.0123819 0.013527 -0.00441529 -0.0125462 0.0135715 -0.00417148 -0.0124231 0.013471 -0.00446378 -0.0124577 0.013405 -0.00450435 -0.0126392 0.013471 -0.00427048 -0.0124845 0.013331 -0.00453585 -0.0125028 0.013251 -0.00455737 -0.0120669 0.0136031 -0.00447605 -0.0121149 0.0135715 -0.00453843 -0.0122554 0.013331 -0.00472117 -0.0120164 0.013621 -0.00441033 -0.0121589 0.013527 -0.00459568 -0.0121977 0.013471 -0.00464615 -0.0122726 0.013251 -0.00474356 -0.0116079 0.0136031 -0.00479417 -0.0118414 0.0136031 -0.00464079 -0.0117862 0.013251 -0.0050807 -0.0113672 0.0136031 -0.00493583 -0.0114405 0.013527 -0.00506774 -0.0115173 0.013331 -0.00520612 -0.0117713 0.013331 -0.00505671 -0.0115311 0.013251 -0.00523081 -0.0110831 0.013621 -0.00499103 -0.0111864 0.013527 -0.00520077 -0.0114714 0.013471 -0.00512339 -0.0112688 0.013251 -0.00536813 -0.0108662 0.0136031 -0.00518257 -0.0108974 0.0135715 -0.0052548 -0.0111545 0.0135715 -0.00513599 -0.0109513 0.013471 -0.00537951 -0.0106071 0.0136031 -0.00528707 -0.0109261 0.013527 -0.00532108 -0.0107012 0.013405 -0.00553786 -0.0109725 0.013405 -0.00542841 -0.0107256 0.013251 -0.00560305 -0.0107306 0.0131674 -0.00561647 -0.010578 0.013621 -0.00520944 -0.0106601 0.013527 -0.00542837 -0.0104373 0.013331 -0.00567316 -0.00980351 0.0136031 -0.00552205 -0.0103432 0.0136031 -0.00537862 -0.0104459 0.013251 -0.00570007 -0.0101618 0.013251 -0.00578315 -0.00956333 0.013471 -0.00578538 -0.00955314 0.013527 -0.00572254 -0.00984829 0.013471 -0.00573189 -0.00985941 0.013405 -0.005784 -0.00957186 0.013405 -0.00583797 -0.00958301 0.013251 -0.00590668 -0.00987394 0.013251 -0.00585208 -0.00954158 0.0135715 -0.00565125 -0.00927608 0.013471 -0.0058247 -0.00929132 0.0131674 -0.00596107 -0.00898329 0.013527 -0.00578623 -0.00897883 0.0135715 -0.00571416 -0.00899482 0.013251 -0.00597243 -0.00899571 0.0131674 -0.00598673 -0.00869581 0.0135715 -0.00572466 -0.00899052 0.013405 -0.00590295 -0.00841553 0.0136031 -0.00564252 -0.00841262 0.0135715 -0.00572116 -0.00840248 0.0131674 -0.00599407 -0.00813675 0.0136031 -0.00562526 -0.0084056 0.013405 -0.00591018 -0.00814392 0.013621 -0.00554267 -0.00785916 0.0136031 -0.00559423 -0.00784848 0.0135715 -0.00567219 -0.00783869 0.013527 -0.00574374 -0.0081237 0.013527 -0.0057756 -0.00781338 0.013251 -0.00592857 -0.00810757 0.013251 -0.00596145 -0.00754386 0.013471 -0.0057604 -0.00753403 0.013405 -0.00581276 -0.00783005 0.013471 -0.00580682 -0.00782283 0.013405 -0.0058596 -0.00752119 0.013251 -0.00588118 -0.00751855 0.0131674 -0.00589526 -0.00727513 0.013527 -0.00563797 -0.00723827 0.013331 -0.00579192 -0.00723169 0.013251 -0.00581939 -0.00704031 0.0136031 -0.00541948 -0.0069801 0.013471 -0.00562542 -0.00696514 0.013405 -0.00567655 -0.0072479 0.013405 -0.0057517 -0.00706358 0.013621 -0.00533991 -0.00674845 0.0135715 -0.00540882 -0.00699796 0.013527 -0.00556432 -0.00651269 0.0136031 -0.00523642 -0.00645624 0.013527 -0.00537637 -0.0064125 0.013405 -0.00548482 -0.00638645 0.013251 -0.00554937 -0.00612661 0.013331 -0.00540623 -0.00639702 0.013331 -0.00552317 -0.00625632 0.0136031 -0.00512555 -0.00590621 0.013471 -0.00519223 -0.00580361 0.013621 -0.00479504 -0.0054812 0.0135715 -0.00478469 -0.00576157 0.0136031 -0.00486649 -0.00565276 0.013471 -0.00505142 -0.00534373 0.013621 -0.00449287 -0.00556993 0.013621 -0.00464964 -0.00524842 0.0135715 -0.00462336 -0.00540658 0.013471 -0.00489825 -0.00502391 0.0135715 -0.00445072 -0.00497849 0.013527 -0.00450686 -0.00487892 0.013331 -0.00462992 -0.00511247 0.013331 -0.00480952 -0.00486115 0.013251 -0.00465189 -0.00490493 0.013405 -0.00459777 -0.00486067 0.0136031 -0.00420854 -0.00455111 0.013527 -0.00412459 -0.00480821 0.0135715 -0.00426719 -0.00476007 0.013527 -0.00432102 -0.00450637 0.013471 -0.00416989 -0.00446893 0.013405 -0.00420779 -0.00443987 0.013331 -0.00423722 -0.00440994 0.0131674 -0.00426751 -0.00471541 0.013621 -0.00395825 -0.00426593 0.013405 -0.00399711 -0.00423544 0.013331 -0.00402506 -0.00440534 0.0135715 -0.00386927 -0.00416358 0.013527 -0.00370197 -0.00404375 0.0135715 -0.0034335 -0.00398596 0.013527 -0.00347681 -0.00411459 0.013471 -0.00374262 -0.00387956 0.0135715 -0.00320274 -0.00410672 0.0136031 -0.00338631 -0.0038197 0.013527 -0.00324314 -0.00394478 0.0136031 -0.00315872 -0.00376693 0.013471 -0.00327875 -0.00372697 0.0135715 -0.00296414 -0.00366507 0.013251 -0.0033475 -0.00352976 0.013331 -0.00308349 -0.00365563 0.0136031 -0.00268093 -0.0037943 0.0136031 -0.0029234 -0.00386522 0.013621 -0.00288048 -0.0034668 0.013471 -0.0027828 -0.00350559 0.013251 -0.00309811 -0.00352913 0.0136031 -0.00243189 -0.00358638 0.0135715 -0.00271829 -0.00327586 0.013527 -0.00223509 -0.00339294 0.013527 -0.00249689 -0.00321712 0.013471 -0.00225963 -0.00335864 0.013251 -0.00284115 -0.00316797 0.013405 -0.00228017 0.00860795 -0.0109977 -0.00554111 0.00854207 -0.0111485 -0.0055355 0.00836212 -0.0113147 -0.00552018 0.00858124 -0.0110758 -0.00553883 0.008431 -0.0112696 -0.00552605 0.00841889 -0.0112795 -0.0055 0.00835199 -0.0113205 -0.0055 0.00828676 -0.0113479 -0.00551377 0.00827951 -0.0113505 -0.0055 0.00820322 -0.0113688 -0.0055 0.00849154 -0.0112136 -0.0055312 0.00816205 -0.0113682 -0.00557349 0.00817394 -0.0113682 -0.00556617 0.00819318 -0.0113682 -0.00554609 0.008125 -0.0113505 -0.00565451 0.00815523 -0.0113147 -0.00573605 0.00816401 -0.0112696 -0.00580462 0.00821493 -0.0112696 -0.00579365 0.00823214 -0.0113147 -0.00571249 0.00826326 -0.0112696 -0.00577423 0.0084222 -0.0112696 -0.00557739 0.00823272 -0.0112136 -0.00585174 0.00834374 -0.0112136 -0.00579576 0.00837944 -0.0112696 -0.00567197 0.008125 -0.0112286 -0.00585356 0.008125 -0.0111689 -0.00590451 0.00817173 -0.0112136 -0.00586488 0.00831345 -0.0111485 -0.00587377 0.00839058 -0.0112136 -0.00575454 0.00850642 -0.0111485 -0.00567242 0.00853008 -0.0111485 -0.00560548 0.008125 -0.011102 -0.00594551 0.00825908 -0.0110758 -0.00593782 0.0083739 -0.0111485 -0.00583654 0.00850436 -0.0110758 -0.00575641 0.00856811 -0.0110758 -0.00561538 0.00818316 -0.0110758 -0.00595418 0.00826693 -0.0109977 -0.00596346 0.00856667 -0.0109977 -0.00569965 0.00834321 -0.0109977 -0.0059328 0.00834933 -0.0109163 -0.00594494 0.0084213 -0.0109163 -0.00590063 0.00847493 -0.0109977 -0.00583538 0.00848474 -0.0109163 -0.00584479 0.00859406 -0.0109977 -0.00562214 0.00827951 -0.010875 -0.00597553 0.00835199 -0.010875 -0.00594551 0.00841889 -0.010875 -0.00590451 0.00852951 -0.010875 -0.00579389 0.00860721 -0.0109163 -0.00562556 0.00861884 -0.010875 -0.00557822 0.00827091 -0.0109163 -0.00597645 0.00828211 -0.0113479 -0.00554091 0.008207 -0.0113682 -0.00550698 0.00820464 -0.0113682 -0.00552074 0.00819999 -0.0113682 -0.0055339 0.00825951 -0.0113479 -0.00559091 0.00818441 -0.0113682 -0.00555695 0.00814562 -0.0113479 -0.00566104 0.00813545 -0.0113682 -0.00558163 0.00827294 -0.0113479 -0.00556687 0.00824221 -0.0113479 -0.00561234 0.00822154 -0.0113479 -0.00563053 0.00819468 -0.0113147 -0.00572754 0.00819809 -0.0113479 -0.00564497 0.00861884 -0.0109532 -0.0055 0.0086215 -0.0109163 -0.00554226 0.00818829 -0.0109163 -0.00599426 0.00818656 -0.0109977 -0.00598078 0.00817817 -0.0111485 -0.00591519 0.0081491 -0.0113682 -0.00557869 0.00817254 -0.0113479 -0.00565524 0.00833114 -0.0110758 -0.00590886 0.00824757 -0.0111485 -0.00590024 0.00841322 -0.0109977 -0.0058897 0.00829061 -0.0112136 -0.00582847 0.00839727 -0.0110758 -0.00586814 0.00834672 -0.0112696 -0.0057125 0.00830761 -0.0112696 -0.00574691 0.00829681 -0.0113147 -0.00566466 0.00826651 -0.0113147 -0.00569133 0.00852658 -0.0109977 -0.00577142 0.00845557 -0.0110758 -0.00581683 0.0084718 -0.0111485 -0.0057344 0.0084272 -0.0111485 -0.00578964 0.00853784 -0.0109163 -0.00577903 0.00854224 -0.0110758 -0.00568861 0.00842978 -0.0112136 -0.005706 0.00840484 -0.0112696 -0.0056265 0.00834185 -0.0113147 -0.00559802 0.00832216 -0.0113147 -0.00563326 0.00857906 -0.0109163 -0.00570525 0.00848099 -0.0112136 -0.00559269 0.00846021 -0.0112136 -0.00565152 0.00835529 -0.0113147 -0.00555997 -0.002375 -0.0108856 -0.00599989 0.008125 -0.010875 -0.006 0.002875 -0.0108856 -0.00599989 0.006375 -0.0108856 -0.00599989 0.002875 -0.0111281 -0.00593123 -0.000625 -0.0112573 -0.0058223 -0.002375 -0.0113062 -0.00575306 -0.012875 -0.0113062 -0.00575306 -0.012875 -0.0112573 -0.0058223 -0.012875 -0.0111973 -0.00588227 -0.012875 -0.0109699 -0.00599091 -0.009375 -0.0108856 -0.00599989 -0.005875 -0.0108856 -0.00599989 -0.014125 -0.010875 -0.006 -0.005875 -0.0109699 -0.00599091 -0.004125 -0.0109699 -0.00599091 -0.007625 -0.0110515 -0.0059678 -0.009375 -0.0111281 -0.00593123 -0.007625 -0.0111281 -0.00593123 -0.011125 -0.0111973 -0.00588227 0.008125 -0.0109532 -0.00599385 0.006375 -0.0109699 -0.00599091 0.008125 -0.0110295 -0.00597553 0.001125 -0.0112573 -0.0058223 -0.004125 -0.0113428 -0.00567654 -0.005875 -0.0113659 -0.00559494 -0.000625 -0.0113428 -0.00567654 -0.004125 -0.0113749 -0.00551061 -0.005875 -0.0113749 -0.00551061 0.006375 -0.0111281 -0.00593123 0.006375 -0.0111973 -0.00588227 0.004625 -0.0111973 -0.00588227 0.002875 -0.0112573 -0.0058223 0.001125 -0.0113062 -0.00575306 0.002875 -0.0113062 -0.00575306 -0.000625 -0.0113659 -0.00559494 -0.002375 -0.0113659 -0.00559494 0.004625 -0.0112573 -0.0058223 0.001125 -0.0113749 -0.00551061 0.008125 -0.0112795 -0.00579389 0.008125 -0.0113205 -0.005727 0.004625 -0.0113659 -0.00559494 0.006375 -0.0113659 -0.00559494 0.002875 -0.0113749 -0.00551061 0.004625 -0.0113749 -0.00551061 0.006375 -0.0113749 -0.00551061 0.008125 -0.0113688 -0.00557822 -0.012875 -0.0113749 -0.00551061 -0.0146226 -0.0113726 -0.00554901 -0.0146035 -0.0113535 -0.00564514 -0.012875 -0.0113659 -0.00559494 -0.014566 -0.011316 -0.0057357 -0.0145407 -0.0112907 -0.00577779 -0.012875 -0.0111281 -0.00593123 -0.0144028 -0.0111528 -0.00591574 -0.0143163 -0.0110663 -0.00596194 -0.012875 -0.0110515 -0.0059678 -0.0142701 -0.0110201 -0.00597847 -0.014174 -0.010924 -0.00599759 -0.012875 -0.0108856 -0.00599989 -0.012875 -0.0113428 -0.00567654 0.004625 -0.0109699 -0.00599091 -0.002375 -0.0109699 -0.00599091 -0.000625 -0.0108856 -0.00599989 0.001125 -0.0109699 -0.00599091 0.004625 -0.0108856 -0.00599989 0.001125 -0.0108856 -0.00599989 -0.004125 -0.0108856 -0.00599989 -0.007625 -0.0109699 -0.00599091 -0.009375 -0.0109699 -0.00599091 -0.011125 -0.0108856 -0.00599989 -0.011125 -0.0109699 -0.00599091 -0.007625 -0.0108856 -0.00599989 0.006375 -0.0110515 -0.0059678 0.004625 -0.0110515 -0.0059678 0.002875 -0.0110515 -0.0059678 0.002875 -0.0109699 -0.00599091 -0.002375 -0.0110515 -0.0059678 -0.000625 -0.0109699 -0.00599091 -0.005875 -0.0110515 -0.0059678 -0.009375 -0.0110515 -0.0059678 -0.011125 -0.0110515 -0.0059678 0.004625 -0.0111281 -0.00593123 0.001125 -0.0110515 -0.0059678 0.001125 -0.0111281 -0.00593123 -0.005875 -0.0111281 -0.00593123 -0.004125 -0.0110515 -0.0059678 -0.002375 -0.0111281 -0.00593123 -0.000625 -0.0110515 -0.0059678 -0.011125 -0.0111281 -0.00593123 0.002875 -0.0111973 -0.00588227 -0.000625 -0.0111281 -0.00593123 -0.002375 -0.0111973 -0.00588227 -0.004125 -0.0111281 -0.00593123 -0.009375 -0.0111973 -0.00588227 0.001125 -0.0111973 -0.00588227 -0.002375 -0.0112573 -0.0058223 -0.000625 -0.0111973 -0.00588227 -0.004125 -0.0112573 -0.0058223 -0.007625 -0.0111973 -0.00588227 -0.004125 -0.0111973 -0.00588227 -0.005875 -0.0111973 -0.00588227 0.006375 -0.0112573 -0.0058223 -0.005875 -0.0112573 -0.0058223 -0.005875 -0.0113062 -0.00575306 -0.007625 -0.0112573 -0.0058223 -0.011125 -0.0112573 -0.0058223 -0.009375 -0.0112573 -0.0058223 -0.007625 -0.0113062 -0.00575306 -0.009375 -0.0113062 -0.00575306 0.006375 -0.0113062 -0.00575306 0.004625 -0.0113428 -0.00567654 0.002875 -0.0113428 -0.00567654 0.004625 -0.0113062 -0.00575306 0.001125 -0.0113428 -0.00567654 -0.002375 -0.0113428 -0.00567654 -0.000625 -0.0113062 -0.00575306 -0.004125 -0.0113062 -0.00575306 -0.005875 -0.0113428 -0.00567654 -0.011125 -0.0113428 -0.00567654 -0.011125 -0.0113062 -0.00575306 0.006375 -0.0113428 -0.00567654 0.002875 -0.0113659 -0.00559494 0.001125 -0.0113659 -0.00559494 -0.004125 -0.0113659 -0.00559494 -0.009375 -0.0113659 -0.00559494 -0.009375 -0.0113428 -0.00567654 -0.007625 -0.0113428 -0.00567654 -0.002375 -0.0113749 -0.00551061 -0.000625 -0.0113749 -0.00551061 -0.007625 -0.0113749 -0.00551061 -0.007625 -0.0113659 -0.00559494 -0.011125 -0.0113659 -0.00559494 -0.011125 -0.0113749 -0.00551061 -0.009375 -0.0113749 -0.00551061 0.00827951 0.005375 -0.00597553 0.00821994 0.0037 -0.0059909 0.00830154 0.0037 -0.0059678 0.0084473 -0.001325 -0.00588226 0.0084473 0.000350001 -0.00588226 0.00850726 -0.003 -0.0058223 0.00850726 -0.001325 -0.0058223 0.0085928 -0.00635 -0.00567654 0.0086159 -0.0097 -0.00559494 0.00860053 -0.010875 -0.00565451 0.00855623 -0.00635 -0.00575306 0.00837806 -0.003 -0.00593123 0.00821994 0.000350001 -0.0059909 0.00821994 0.002025 -0.0059909 0.00813561 0.002025 -0.00599989 0.00813561 0.0037 -0.00599989 0.00830154 0.002025 -0.0059678 0.00837806 0.000350001 -0.00593123 0.00855623 -0.004675 -0.00575306 0.00862489 -0.0097 -0.00551061 0.0086159 -0.00635 -0.00559494 0.0086159 -0.004675 -0.00559494 0.00862489 -0.00635 -0.00551061 0.00837806 0.0037 -0.00593123 0.00855623 -0.001325 -0.00575306 0.00855623 0.000350001 -0.00575306 0.0086159 -0.003 -0.00559494 0.00841889 0.005375 -0.00590451 0.0084473 0.0037 -0.00588226 0.00847855 0.005375 -0.00585355 0.00850726 0.0037 -0.0058223 0.00850726 0.002025 -0.0058223 0.0085928 0.000350001 -0.00567654 0.0085928 -0.001325 -0.00567654 0.0086159 -0.001325 -0.00559494 0.00862489 -0.004675 -0.00551061 0.00862489 -0.003 -0.00551061 0.0085928 0.002025 -0.00567654 0.00862489 -0.001325 -0.00551061 0.008625 0.005375 -0.0055 0.008625 -0.010875 -0.0055 0.00852951 0.005375 -0.00579389 0.00855623 0.0037 -0.00575306 0.0086159 0.002025 -0.00559494 0.00862489 0.000350001 -0.00551061 0.0085928 0.0037 -0.00567654 0.00862489 0.002025 -0.00551061 0.0086159 0.0037 -0.00559494 0.00862489 0.0037 -0.00551061 0.00850726 -0.00635 -0.0058223 0.00850726 -0.008025 -0.0058223 0.0084473 -0.004675 -0.00588226 0.00821994 -0.001325 -0.0059909 0.00813561 0.000350001 -0.00599989 0.0085705 -0.010875 -0.005727 0.00855623 -0.0097 -0.00575306 0.00830154 -0.004675 -0.0059678 0.00850726 -0.0097 -0.0058223 0.00847855 -0.010875 -0.00585356 0.00830154 -0.00635 -0.0059678 0.00813561 -0.001325 -0.00599989 0.0084473 -0.0097 -0.00588227 0.00821994 -0.004675 -0.0059909 0.00837806 -0.0097 -0.00593123 0.00821994 -0.008025 -0.00599091 0.00813561 -0.008025 -0.00599989 0.00813561 -0.00635 -0.00599989 0.00821994 -0.0097 -0.00599091 0.00820322 -0.010875 -0.00599385 0.00813561 -0.0097 -0.00599989 0.00813561 -0.004675 -0.00599989 0.00813561 -0.003 -0.00599989 0.00821994 -0.003 -0.0059909 0.00821994 -0.00635 -0.00599091 0.00830154 -0.0097 -0.0059678 0.00837806 -0.001325 -0.00593123 0.00830154 0.000350001 -0.0059678 0.00837806 0.002025 -0.00593123 0.00830154 -0.003 -0.0059678 0.00830154 -0.001325 -0.0059678 0.00837806 -0.00635 -0.00593123 0.00830154 -0.008025 -0.0059678 0.0084473 0.002025 -0.00588226 0.00837806 -0.004675 -0.00593123 0.00837806 -0.008025 -0.00593123 0.00850726 0.000350001 -0.0058223 0.00850726 -0.004675 -0.0058223 0.0084473 -0.003 -0.00588226 0.0084473 -0.00635 -0.00588226 0.0084473 -0.008025 -0.00588227 0.00855623 -0.003 -0.00575306 0.00855623 0.002025 -0.00575306 0.0085928 -0.003 -0.00567654 0.0085928 -0.004675 -0.00567654 0.0085928 -0.008025 -0.00567654 0.0085928 -0.0097 -0.00567654 0.00855623 -0.008025 -0.00575306 0.0086159 0.000350001 -0.00559494 0.0086159 -0.008025 -0.00559494 0.00862489 -0.008025 -0.00551061 0.00861884 -0.0109532 0.0055 0.00854224 -0.0110758 0.0056886 0.00850642 -0.0111485 0.00567241 0.0084718 -0.0111485 0.00573439 0.00823214 -0.0113147 0.00571249 0.00819809 -0.0113479 0.00564496 0.0085705 -0.010875 0.00572699 0.00847493 -0.0109977 0.00583538 0.00823272 -0.0112136 0.00585174 0.00816401 -0.0112696 0.00580462 0.00815523 -0.0113147 0.00573605 0.008125 -0.0112795 0.00579389 0.00813545 -0.0113682 0.00558163 0.00819468 -0.0113147 0.00572754 0.00817254 -0.0113479 0.00565523 0.00821493 -0.0112696 0.00579364 0.00852951 -0.010875 0.00579389 0.00848474 -0.0109163 0.00584479 0.00839727 -0.0110758 0.00586814 0.00841322 -0.0109977 0.0058897 0.00831345 -0.0111485 0.00587376 0.00824757 -0.0111485 0.00590023 0.00817173 -0.0112136 0.00586488 0.00847855 -0.010875 0.00585355 0.00833114 -0.0110758 0.00590886 0.0084213 -0.0109163 0.00590062 0.00834933 -0.0109163 0.00594494 0.00826693 -0.0109977 0.00596345 0.008125 -0.011102 0.0059455 0.00817817 -0.0111485 0.00591519 0.00827951 -0.010875 0.00597553 0.00820322 -0.010875 0.00599384 0.00818656 -0.0109977 0.00598077 0.00818316 -0.0110758 0.00595418 0.00827091 -0.0109163 0.00597645 0.00818829 -0.0109163 0.00599426 0.00820464 -0.0113682 0.00552074 0.00819318 -0.0113682 0.00554608 0.008125 -0.011375 0.0055 0.00818441 -0.0113682 0.00555694 0.00816205 -0.0113682 0.00557348 0.0081491 -0.0113682 0.00557869 0.00828676 -0.0113479 0.00551377 0.00836212 -0.0113147 0.00552018 0.008431 -0.0112696 0.00552604 0.00837944 -0.0112696 0.00567197 0.00834672 -0.0112696 0.0057125 0.00822154 -0.0113479 0.00563053 0.00826651 -0.0113147 0.00569133 0.00841889 -0.0112795 0.0055 0.00847855 -0.0112286 0.0055 0.00842978 -0.0112136 0.00570599 0.00849154 -0.0112136 0.0055312 0.00848099 -0.0112136 0.00559269 0.00846021 -0.0112136 0.00565152 0.00858124 -0.0110758 0.00553883 0.00856811 -0.0110758 0.00561538 0.00853008 -0.0111485 0.00560547 0.0085705 -0.011102 0.0055 0.00860053 -0.0110295 0.0055 0.00860795 -0.0109977 0.0055411 0.00854207 -0.0111485 0.0055355 0.0084222 -0.0112696 0.00557738 0.00835529 -0.0113147 0.00555996 0.008207 -0.0113682 0.00550698 0.00828211 -0.0113479 0.00554091 0.0086215 -0.0109163 0.00554226 0.00859406 -0.0109977 0.00562213 0.00860721 -0.0109163 0.00562556 0.00856667 -0.0109977 0.00569965 0.00850436 -0.0110758 0.0057564 0.00839058 -0.0112136 0.00575454 0.00826326 -0.0112696 0.00577422 0.00830761 -0.0112696 0.00574691 0.00819999 -0.0113682 0.0055339 0.00857906 -0.0109163 0.00570525 0.0083739 -0.0111485 0.00583654 0.00829061 -0.0112136 0.00582847 0.00840484 -0.0112696 0.0056265 0.00834185 -0.0113147 0.00559802 0.00825951 -0.0113479 0.00559091 0.00827294 -0.0113479 0.00556687 0.00853784 -0.0109163 0.00577903 0.00852658 -0.0109977 0.00577142 0.00845557 -0.0110758 0.00581683 0.0084272 -0.0111485 0.00578963 0.00832216 -0.0113147 0.00563326 0.00829681 -0.0113147 0.00566466 0.00834374 -0.0112136 0.00579576 0.00824221 -0.0113479 0.00561234 0.00817394 -0.0113682 0.00556617 0.00834321 -0.0109977 0.0059328 0.00814562 -0.0113479 0.00566103 0.00825908 -0.0110758 0.00593782 -0.002375 -0.0113749 0.00551061 0.001125 -0.0113749 0.00551061 0.008125 -0.0113688 0.00557822 0.004625 -0.0113659 0.00559494 0.002875 -0.0113062 0.00575306 -0.002375 -0.0111281 0.00593123 -0.004125 -0.0110515 0.0059678 -0.012875 -0.0109699 0.0059909 -0.012875 -0.0112573 0.00582229 -0.012875 -0.0113062 0.00575306 -0.012875 -0.0113428 0.00567654 -0.014625 -0.011375 0.0055 -0.0146226 -0.0113726 0.00554901 -0.011125 -0.0113749 0.00551061 -0.009375 -0.0113749 0.00551061 -0.005875 -0.0113749 0.00551061 -0.007625 -0.0113749 0.00551061 -0.005875 -0.0113659 0.00559494 -0.007625 -0.0113659 0.00559494 -0.007625 -0.0113428 0.00567654 -0.009375 -0.0113428 0.00567654 -0.011125 -0.0113062 0.00575306 0.008125 -0.0113505 0.00565451 0.004625 -0.0113428 0.00567654 -0.002375 -0.0110515 0.0059678 -0.014125 -0.010875 0.006 -0.007625 -0.0108856 0.00599989 0.006375 -0.0113428 0.00567654 0.008125 -0.0113205 0.00572699 0.004625 -0.0113062 0.00575306 0.004625 -0.0112573 0.00582229 0.001125 -0.0111281 0.00593123 -0.005875 -0.0108856 0.00599989 0.006375 -0.0113062 0.00575306 0.006375 -0.0112573 0.00582229 0.004625 -0.0111973 0.00588226 0.002875 -0.0111973 0.00588226 -0.000625 -0.0110515 0.0059678 0.001125 -0.0110515 0.0059678 -0.000625 -0.0109699 0.0059909 -0.002375 -0.0108856 0.00599989 -0.004125 -0.0108856 0.00599989 -0.000625 -0.0108856 0.00599989 0.008125 -0.0112286 0.00585355 0.008125 -0.0111689 0.00590451 0.006375 -0.0111973 0.00588226 0.004625 -0.0110515 0.0059678 0.002875 -0.0110515 0.0059678 0.001125 -0.0108856 0.00599989 0.004625 -0.0109699 0.0059909 0.002875 -0.0108856 0.00599989 0.002875 -0.0109699 0.0059909 0.006375 -0.0110515 0.0059678 0.008125 -0.0110295 0.00597553 0.008125 -0.0109532 0.00599384 0.004625 -0.0108856 0.00599989 0.006375 -0.0109699 0.0059909 0.008125 -0.010875 0.006 0.006375 -0.0108856 0.00599989 -0.0144028 -0.0111528 0.00591573 -0.0144422 -0.0111922 0.0058865 -0.0144786 -0.0112286 0.00585355 -0.014566 -0.011316 0.0057357 -0.0146035 -0.0113535 0.00564514 -0.012875 -0.0113749 0.00551061 -0.012875 -0.0113659 0.00559494 0.006375 -0.0113659 0.00559494 0.006375 -0.0113749 0.00551061 0.004625 -0.0113749 0.00551061 -0.000625 -0.0113749 0.00551061 -0.004125 -0.0113749 0.00551061 0.002875 -0.0113749 0.00551061 -0.009375 -0.0113659 0.00559494 -0.011125 -0.0113659 0.00559494 0.002875 -0.0113428 0.00567654 0.001125 -0.0113428 0.00567654 -0.000625 -0.0113659 0.00559494 -0.000625 -0.0113428 0.00567654 -0.002375 -0.0113659 0.00559494 -0.002375 -0.0113428 0.00567654 -0.004125 -0.0113659 0.00559494 0.001125 -0.0113659 0.00559494 0.002875 -0.0113659 0.00559494 -0.004125 -0.0113428 0.00567654 -0.007625 -0.0113062 0.00575306 -0.009375 -0.0113062 0.00575306 -0.002375 -0.0113062 0.00575306 -0.000625 -0.0113062 0.00575306 -0.005875 -0.0113428 0.00567654 -0.011125 -0.0113428 0.00567654 0.002875 -0.0112573 0.00582229 0.001125 -0.0113062 0.00575306 -0.000625 -0.0112573 0.00582229 -0.004125 -0.0112573 0.00582229 -0.005875 -0.0112573 0.00582229 -0.005875 -0.0113062 0.00575306 -0.004125 -0.0113062 0.00575306 0.001125 -0.0111973 0.00588226 0.001125 -0.0112573 0.00582229 -0.002375 -0.0111973 0.00588226 -0.004125 -0.0111973 0.00588226 -0.011125 -0.0112573 0.00582229 -0.011125 -0.0111973 0.00588226 -0.012875 -0.0111973 0.00588226 -0.009375 -0.0112573 0.00582229 -0.005875 -0.0111973 0.00588226 -0.002375 -0.0112573 0.00582229 -0.007625 -0.0111973 0.00588226 -0.007625 -0.0112573 0.00582229 -0.009375 -0.0111973 0.00588226 -0.000625 -0.0111973 0.00588226 -0.004125 -0.0111281 0.00593123 -0.009375 -0.0111281 0.00593123 -0.012875 -0.0111281 0.00593123 0.006375 -0.0111281 0.00593123 0.004625 -0.0111281 0.00593123 0.002875 -0.0111281 0.00593123 -0.000625 -0.0111281 0.00593123 -0.011125 -0.0110515 0.0059678 -0.011125 -0.0111281 0.00593123 -0.012875 -0.0110515 0.0059678 -0.009375 -0.0110515 0.0059678 -0.007625 -0.0111281 0.00593123 -0.005875 -0.0111281 0.00593123 0.001125 -0.0109699 0.0059909 -0.002375 -0.0109699 0.0059909 -0.004125 -0.0109699 0.0059909 -0.005875 -0.0109699 0.0059909 -0.007625 -0.0109699 0.0059909 -0.005875 -0.0110515 0.0059678 -0.007625 -0.0110515 0.0059678 -0.011125 -0.0109699 0.0059909 -0.009375 -0.0108856 0.00599989 -0.012875 -0.0108856 0.00599989 -0.009375 -0.0109699 0.0059909 -0.011125 -0.0108856 0.00599989 0.00855623 -0.0111281 -1.45927e-09 0.00855623 -0.0111281 0.00183333 0.0084473 -0.0112573 -0.00366667 0.0086159 -0.0109699 0.00366667 0.0085928 -0.0110515 0.00183333 0.0086159 -0.0109699 0.00183333 0.0085928 -0.0110515 0.00366667 0.00855623 -0.0111281 0.00366667 0.00850726 -0.0111973 -1.46835e-09 0.00837806 -0.0113062 -0.00366667 0.00830154 -0.0113428 -0.00366667 0.00852951 -0.0111689 0.0055 0.00821994 -0.0113659 -0.00366667 0.00850726 -0.0111973 0.00366667 0.0084473 -0.0112573 0.00183333 0.00837806 -0.0113062 0.00183333 0.00837806 -0.0113062 -1.48263e-09 0.00830154 -0.0113428 -0.00183334 0.00830154 -0.0113428 -1.48743e-09 0.00837806 -0.0113062 0.00366667 0.00830154 -0.0113428 0.00183333 0.008125 -0.011375 -0.0055 0.00813561 -0.0113749 -0.00366667 0.00835199 -0.0113205 0.0055 0.00830154 -0.0113428 0.00366667 0.00821994 -0.0113659 0.00183333 0.00813561 -0.0113749 -0.00183334 0.00813561 -0.0113749 -1.49164e-09 0.00827951 -0.0113505 0.0055 0.00821994 -0.0113659 0.00366667 0.00813561 -0.0113749 0.00366667 0.00813561 -0.0113749 0.00183333 0.00820322 -0.0113688 0.0055 0.00847855 -0.0112286 -0.0055 0.00852951 -0.0111689 -0.0055 0.00855623 -0.0111281 -0.00183334 0.00862489 -0.0108856 0.00366667 0.00862489 -0.0108856 0.00183333 0.0085705 -0.011102 -0.0055 0.0086159 -0.0109699 -1.43853e-09 0.00860053 -0.0110295 -0.0055 0.0086159 -0.0109699 -0.00366667 0.00862489 -0.0108856 -1.42748e-09 0.00862489 -0.0108856 -0.00183334 0.00862489 -0.0108856 -0.00366667 0.0086159 -0.0109699 -0.00183334 0.0085928 -0.0110515 -1.44923e-09 0.0085928 -0.0110515 -0.00183334 0.0085928 -0.0110515 -0.00366667 0.00850726 -0.0111973 0.00183333 0.00850726 -0.0111973 -0.00366667 0.00855623 -0.0111281 -0.00366667 0.0084473 -0.0112573 0.00366667 0.0084473 -0.0112573 -1.47621e-09 0.00850726 -0.0111973 -0.00183334 0.0084473 -0.0112573 -0.00183334 0.00837806 -0.0113062 -0.00183334 0.00821994 -0.0113659 -1.49046e-09 0.00821994 -0.0113659 -0.00183334 0.00861884 0.005375 0.00557822 0.00862489 0.0037 0.00551061 0.00860053 0.005375 0.00565451 0.0086159 0.0037 0.00559494 0.0085928 0.0037 0.00567654 0.0085928 0.002025 0.00567654 0.00850726 0.00035 0.0058223 0.00837806 -0.004675 0.00593123 0.00821994 -0.00635 0.0059909 0.00813561 -0.008025 0.00599989 0.00813561 -0.0097 0.00599989 0.00821994 -0.0097 0.0059909 0.0084473 -0.004675 0.00588226 0.00850726 -0.004675 0.0058223 0.00855623 -0.001325 0.00575306 0.0085928 0.00035 0.00567654 0.00862489 0.002025 0.00551061 0.00837806 -0.00635 0.00593123 0.00821994 -0.008025 0.0059909 0.00855623 0.0037 0.00575306 0.00850726 0.002025 0.0058223 0.0084473 0.00035 0.00588226 0.00852951 0.005375 0.00579389 0.00837806 -0.001325 0.00593123 0.00813561 -0.00635 0.00599989 0.00821994 -0.004675 0.0059909 0.00813561 -0.004675 0.00599989 0.00850726 0.0037 0.0058223 0.00847855 0.005375 0.00585355 0.00837806 0.00035 0.00593123 0.00821994 -0.001325 0.0059909 0.00813561 -0.003 0.00599989 0.0084473 0.0037 0.00588226 0.00837806 0.002025 0.00593123 0.00830154 0.00035 0.0059678 0.00813561 -0.001325 0.00599989 0.00841889 0.005375 0.00590451 0.00837806 0.0037 0.00593123 0.00821994 0.00035 0.0059909 0.00821994 0.002025 0.0059909 0.00813561 0.002025 0.00599989 0.00813561 0.00035 0.00599989 0.00821994 0.0037 0.0059909 0.00820322 0.005375 0.00599385 0.00813561 0.0037 0.00599989 0.00830154 -0.0097 0.0059678 0.00837806 -0.008025 0.00593123 0.00855623 -0.004675 0.00575306 0.0086159 -0.001325 0.00559494 0.00862489 0.00035 0.00551061 0.0086159 0.00035 0.00559494 0.00835199 -0.010875 0.0059455 0.00841889 -0.010875 0.00590451 0.00837806 -0.0097 0.00593123 0.0084473 -0.0097 0.00588226 0.00850726 -0.008025 0.00582229 0.00855623 -0.00635 0.00575306 0.00862489 -0.001325 0.00551061 0.008625 0.005375 0.0055 0.00850726 -0.0097 0.00582229 0.0085928 -0.004675 0.00567654 0.00862489 -0.003 0.00551061 0.008625 -0.010875 0.0055 0.0085928 -0.0097 0.00567654 0.00862489 -0.004675 0.00551061 0.00862489 -0.00635 0.00551061 0.0086159 -0.0097 0.00559494 0.0086159 -0.008025 0.00559494 0.00862489 -0.008025 0.00551061 0.00860053 -0.010875 0.00565451 0.00861884 -0.010875 0.00557822 0.00862489 -0.0097 0.00551061 0.0086159 0.002025 0.00559494 0.0085928 -0.001325 0.00567654 0.0085928 -0.003 0.00567654 0.0086159 -0.003 0.00559494 0.0086159 -0.00635 0.00559494 0.0086159 -0.004675 0.00559494 0.0085928 -0.008025 0.00567654 0.00855623 -0.003 0.00575306 0.00855623 -0.008025 0.00575306 0.0085928 -0.00635 0.00567654 0.00855623 -0.0097 0.00575306 0.00855623 0.002025 0.00575306 0.00850726 -0.001325 0.0058223 0.00855623 0.00035 0.00575306 0.0084473 -0.001325 0.00588226 0.00850726 -0.003 0.0058223 0.00850726 -0.00635 0.0058223 0.0084473 0.002025 0.00588226 0.00837806 -0.003 0.00593123 0.0084473 -0.00635 0.00588226 0.0084473 -0.003 0.00588226 0.0084473 -0.008025 0.00588226 0.00830154 -0.003 0.0059678 0.00830154 -0.008025 0.0059678 0.00830154 -0.004675 0.0059678 0.00830154 0.0037 0.0059678 0.00830154 0.002025 0.0059678 0.00830154 -0.001325 0.0059678 0.00821994 -0.003 0.0059909 0.00830154 -0.00635 0.0059678 -0.0146249 -0.001 0.00551061 -0.0146249 -0.00272917 0.00551061 -0.0146249 -0.00964583 0.00551061 -0.0146159 -0.00964583 0.00559494 -0.0145073 -0.00272917 0.0058223 -0.0143015 0.00245833 0.0059678 -0.0142199 0.00245833 0.0059909 -0.0141356 0.00591667 0.00599989 -0.0142032 0.009375 0.00599385 -0.014352 0.009375 0.00594551 -0.0144189 0.009375 0.00590451 -0.0144786 0.009375 0.00585356 -0.0145562 0.00764583 0.00575306 -0.0146005 0.009375 0.00565451 -0.0146159 0.00764583 0.00559494 -0.014625 0.009375 0.0055 -0.0146159 0.00591667 0.00559494 -0.0146249 0.0041875 0.00551061 -0.0146159 0.0041875 0.00559494 -0.0145562 0.00591667 0.00575306 -0.0143781 0.0041875 0.00593123 -0.0146154 -0.0113654 0.00559754 -0.0145869 -0.0113369 0.00569134 -0.0145928 -0.00964583 0.00567654 -0.0145562 -0.00964583 0.00575306 -0.0145407 -0.0112907 0.00577778 -0.0145073 -0.00964583 0.00582229 -0.0144473 -0.00791667 0.00588226 -0.0143015 -0.00445833 0.0059678 -0.0142199 -0.00272917 0.0059909 -0.0141356 0.0041875 0.00599989 -0.0143015 0.000729166 0.0059678 -0.0145928 -0.00791667 0.00567654 -0.0145073 -0.0061875 0.0058223 -0.0143781 -0.00272917 0.00593123 -0.0143781 -0.001 0.00593123 -0.0143015 -0.001 0.0059678 -0.0145115 -0.0112615 0.0058172 -0.0143607 -0.0111107 0.00594096 -0.0142701 -0.0110201 0.00597847 -0.0142225 -0.0109725 0.00599039 -0.0142199 -0.00964583 0.0059909 -0.014174 -0.010924 0.00599759 -0.0141356 -0.00791667 0.00599989 -0.0142199 -0.00791667 0.0059909 -0.0143781 -0.00791667 0.00593123 -0.0143163 -0.0110663 0.00596194 -0.0143015 -0.00964583 0.0059678 -0.0141356 -0.00272917 0.00599989 -0.0142199 -0.00445833 0.0059909 -0.0142199 -0.0061875 0.0059909 -0.0143015 -0.00791667 0.0059678 -0.0141356 -0.00445833 0.00599989 -0.0144473 0.0041875 0.00588226 -0.0144473 0.00591667 0.00588226 -0.0144473 0.00764583 0.00588226 -0.0145073 0.00591667 0.0058223 -0.0145295 0.009375 0.00579389 -0.0145928 0.00591667 0.00567654 -0.0145928 0.00764583 0.00567654 -0.0146249 0.00591667 0.00551061 -0.0146249 0.00764583 0.00551061 -0.0146159 -0.00791667 0.00559494 -0.0146159 -0.0061875 0.00559494 -0.0146159 -0.00272917 0.00559494 -0.0146249 0.000729166 0.00551061 -0.0146159 0.00245833 0.00559494 -0.0146249 0.00245833 0.00551061 -0.0146249 -0.0061875 0.00551061 -0.0146249 -0.00791667 0.00551061 -0.0146249 -0.00445833 0.00551061 -0.0146159 -0.00445833 0.00559494 -0.0145928 -0.00272917 0.00567654 -0.0145928 0.00245833 0.00567654 -0.0145928 0.0041875 0.00567654 -0.0145928 -0.001 0.00567654 -0.0146159 -0.001 0.00559494 -0.0146159 0.000729166 0.00559494 -0.0145928 -0.0061875 0.00567654 -0.0145562 0.0041875 0.00575306 -0.0145928 -0.00445833 0.00567654 -0.0145562 -0.00272917 0.00575306 -0.0145928 0.000729166 0.00567654 -0.0145562 0.00245833 0.00575306 -0.0144473 -0.0061875 0.00588226 -0.0145562 -0.00791667 0.00575306 -0.0145073 -0.00445833 0.0058223 -0.0145562 -0.0061875 0.00575306 -0.0145562 -0.00445833 0.00575306 -0.0145562 0.000729166 0.00575306 -0.0145073 0.0041875 0.0058223 -0.0145562 -0.001 0.00575306 -0.0145073 -0.00791667 0.0058223 -0.0144473 -0.00445833 0.00588226 -0.0144473 -0.00272917 0.00588226 -0.0144473 -0.001 0.00588226 -0.0145073 -0.001 0.0058223 -0.0144473 0.000729166 0.00588226 -0.0145073 0.00245833 0.0058223 -0.0145073 0.000729166 0.0058223 -0.0145073 0.00764583 0.0058223 -0.0143781 -0.00964583 0.00593123 -0.0144473 -0.00964583 0.00588226 -0.0143781 0.000729166 0.00593123 -0.0144473 0.00245833 0.00588226 -0.0143781 -0.0061875 0.00593123 -0.0143015 -0.0061875 0.0059678 -0.0143015 -0.00272917 0.0059678 -0.0143781 -0.00445833 0.00593123 -0.0143015 0.0041875 0.0059678 -0.0143781 0.00245833 0.00593123 -0.0142199 0.0041875 0.0059909 -0.0143781 0.00764583 0.00593123 -0.0143781 0.00591667 0.00593123 -0.0142199 0.000729166 0.0059909 -0.0141356 0.000729166 0.00599989 -0.0141356 -0.001 0.00599989 -0.0142199 -0.001 0.0059909 -0.0141356 0.00245833 0.00599989 -0.0142199 0.00764583 0.0059909 -0.0143015 0.00764583 0.0059678 -0.0143015 0.00591667 0.0059678 -0.0141356 -0.00964583 0.00599989 -0.0141356 0.00764583 0.00599989 -0.0142199 0.00591667 0.0059909 -0.0141356 -0.0061875 0.00599989 -0.01087 0.00937855 -0.006 -0.0107176 0.00938849 -0.006 -0.0105657 0.00940394 -0.006 0.008125 0.005375 -0.006 -0.00683572 0.00942419 -0.006 -0.0104143 0.0094242 -0.006 -0.00803465 0.00970009 -0.006 -0.00981222 0.00954239 -0.006 -0.00773688 0.00961777 -0.006 -0.014625 -0.011375 -0.0055 -0.012625 -0.011375 0.002 -0.012625 -0.011375 -0.002 -0.014625 0.009375 -0.00239791 -0.014625 0.00937855 -0.00224503 -0.014625 -0.011375 -0.002 -0.014625 -0.009375 -0.002 -0.014625 0.0094768 0.00148758 -0.014625 0.00938849 0.00209264 -0.014625 0.00937855 0.00224504 -0.014625 -0.011375 0.002 -0.014625 0.00970009 0.000590338 -0.014625 0.00942419 -0.00178928 -0.014625 0.00940394 -0.00194073 0.008125 0.00986884 0.00557822 0.00819999 0.00986818 0.0055339 0.00820322 0.00986884 0.0055 0.00815523 0.00981474 0.00573605 0.00814562 0.00984791 0.00566104 0.008125 0.0098205 0.005727 0.00821493 0.00976957 0.00579365 0.00826651 0.00981474 0.00569133 0.00826326 0.00976957 0.00577423 0.00837944 0.00976957 0.00567197 0.00840484 0.00976957 0.0056265 0.008431 0.00976957 0.00552605 0.00841889 0.00977951 0.0055 0.00836212 0.00981474 0.00552018 0.00823214 0.00981474 0.00571249 0.00822154 0.00984791 0.00563053 0.00817254 0.00984791 0.00565524 0.008125 0.00977951 0.00579389 0.00823272 0.00971364 0.00585174 0.00834672 0.00976957 0.0057125 0.00847855 0.00972855 0.0055 0.008125 0.00972855 0.00585356 0.00817173 0.00971364 0.00586488 0.008125 0.00966889 0.00590451 0.00829061 0.00971364 0.00582848 0.00824757 0.00964847 0.00590024 0.00831345 0.00964847 0.00587377 0.0084718 0.00964847 0.0057344 0.00817817 0.00964847 0.00591519 0.008125 0.009602 0.00594551 0.0084272 0.00964847 0.00578964 0.00818316 0.00957585 0.00595418 0.00825908 0.00957585 0.00593782 0.00826693 0.00949774 0.00596346 0.00839727 0.00957585 0.00586814 0.00845557 0.00957585 0.00581683 0.00856811 0.00957585 0.00561538 0.008125 0.00952951 0.00597553 0.00841322 0.00949774 0.0058897 0.00859406 0.00949774 0.00562214 0.00857906 0.00941629 0.00570525 0.00860795 0.00949774 0.00554111 0.00861884 0.00945322 0.0055 0.008125 0.00945322 0.00599385 0.00834933 0.00941629 0.00594494 0.00841889 0.009375 0.00590451 0.0084213 0.00941629 0.00590063 0.00848474 0.00941629 0.00584479 0.00852951 0.009375 0.00579389 0.0086215 0.00941629 0.00554226 0.00827091 0.00941629 0.00597645 0.00827951 0.009375 0.00597553 0.00827951 0.00985053 0.0055 0.00820464 0.00986818 0.00552074 0.00816205 0.00986818 0.00557349 0.0081491 0.00986818 0.00557869 0.00813545 0.00986818 0.00558163 0.00818829 0.00941629 0.00599426 0.00818656 0.00949774 0.00598078 0.00816401 0.00976957 0.00580462 0.00834321 0.00949774 0.0059328 0.00819809 0.00984791 0.00564497 0.00819468 0.00981474 0.00572754 0.00833114 0.00957585 0.00590886 0.0083739 0.00964847 0.00583654 0.00834374 0.00971364 0.00579576 0.00817394 0.00986818 0.00556617 0.00830761 0.00976957 0.00574691 0.00824221 0.00984791 0.00561234 0.00818441 0.00986818 0.00555695 0.00847493 0.00949774 0.00583538 0.00839058 0.00971364 0.00575454 0.00829681 0.00981474 0.00566466 0.00832216 0.00981474 0.00563326 0.00819318 0.00986818 0.00554609 0.00852658 0.00949774 0.00577142 0.00853784 0.00941629 0.00577903 0.00854224 0.00957585 0.00568861 0.00850642 0.00964847 0.00567242 0.00850436 0.00957585 0.00575641 0.00842978 0.00971364 0.005706 0.00834185 0.00981474 0.00559802 0.00825951 0.00984791 0.00559091 0.00827294 0.00984791 0.00556687 0.00856667 0.00949774 0.00569965 0.00848099 0.00971364 0.00559269 0.0084222 0.00976957 0.00557739 0.00846021 0.00971364 0.00565152 0.00828211 0.00984791 0.00554091 0.00835529 0.00981474 0.00555997 0.00860721 0.00941629 0.00562556 0.00858124 0.00957585 0.00553883 0.00854207 0.00964847 0.0055355 0.00849154 0.00971364 0.0055312 0.00853008 0.00964847 0.00560548 0.008207 0.00986818 0.00550698 0.00828676 0.00984791 0.00551377 0.00820322 0.009375 0.00599385 0.00837806 0.00771428 0.00593123 0.00860053 0.009375 0.00565451 0.00862489 0.00771428 0.00551061 0.00827951 0.006875 0.00597553 0.00821994 0.00771428 0.0059909 0.00841889 0.006875 0.00590451 0.00847855 0.006875 0.00585355 0.0085705 0.006875 0.005727 0.0086159 0.00771428 0.00559494 0.00861884 0.006875 0.00557822 0.0084473 0.00771428 0.00588226 0.0085928 0.00771428 0.00567654 0.00860053 0.006875 0.00565451 0.00861884 0.009375 0.00557822 0.0085705 0.009375 0.005727 0.00855623 0.00771428 0.00575306 0.00847855 0.009375 0.00585356 0.00850726 0.00771428 0.0058223 0.00835199 0.009375 0.00594551 0.00830154 0.00771428 0.0059678 0.00813561 0.00771428 0.00599989 0.00819318 0.00986818 -0.00554608 0.00818441 0.00986818 -0.00555694 0.00816205 0.00986818 -0.00557348 0.00828676 0.00984791 -0.00551377 0.00835199 0.00982051 -0.0055 0.00841889 0.00977951 -0.0055 0.00836212 0.00981474 -0.00552018 0.00832216 0.00981474 -0.00563326 0.00823214 0.00981474 -0.00571249 0.00819468 0.00981474 -0.00572754 0.008125 0.00977951 -0.00579389 0.00815523 0.00981474 -0.00573604 0.00819809 0.00984791 -0.00564496 0.00822154 0.00984791 -0.00563053 0.00824221 0.00984791 -0.00561234 0.00828211 0.00984791 -0.00554091 0.008431 0.00976957 -0.00552604 0.00848099 0.00971364 -0.00559269 0.00846021 0.00971364 -0.00565152 0.00837944 0.00976957 -0.00567197 0.00842978 0.00971364 -0.00570599 0.00834672 0.00976957 -0.0057125 0.00830761 0.00976957 -0.00574691 0.00829061 0.00971364 -0.00582847 0.00816401 0.00976957 -0.00580462 0.00817173 0.00971364 -0.00586488 0.008125 0.00972856 -0.00585355 0.008125 0.00966889 -0.00590451 0.00847855 0.00972856 -0.0055 0.00849154 0.00971364 -0.0055312 0.00852951 0.00966889 -0.0055 0.00854207 0.00964847 -0.0055355 0.00834374 0.00971364 -0.00579576 0.008125 0.009602 -0.0059455 0.00845557 0.00957585 -0.00581683 0.00839727 0.00957585 -0.00586814 0.00833114 0.00957585 -0.00590886 0.00817817 0.00964847 -0.00591519 0.0085705 0.009602 -0.0055 0.00860053 0.00952951 -0.0055 0.00860795 0.00949774 -0.0055411 0.00841322 0.00949774 -0.0058897 0.00861884 0.00945322 -0.0055 0.00859406 0.00949774 -0.00562213 0.00856667 0.00949774 -0.00569965 0.00852658 0.00949774 -0.00577142 0.00853784 0.00941629 -0.00577903 0.0084213 0.00941629 -0.00590062 0.00834933 0.00941629 -0.00594494 0.008125 0.00945322 -0.00599384 0.00818656 0.00949774 -0.00598077 0.0086215 0.00941629 -0.00554226 0.00857906 0.00941629 -0.00570525 0.00860053 0.009375 -0.00565451 0.0085705 0.009375 -0.00572699 0.00852951 0.009375 -0.00579389 0.00848474 0.00941629 -0.00584479 0.00841889 0.009375 -0.00590451 0.00818829 0.00941629 -0.00599425 0.00861884 0.009375 -0.00557822 0.00860721 0.00941629 -0.00562556 0.00817254 0.00984791 -0.00565523 0.00817394 0.00986818 -0.00556617 0.00819999 0.00986818 -0.0055339 0.00820464 0.00986818 -0.00552074 0.008207 0.00986818 -0.00550698 0.00856811 0.00957585 -0.00561538 0.00858124 0.00957585 -0.00553883 0.0084222 0.00976957 -0.00557738 0.00853008 0.00964847 -0.00560547 0.00834185 0.00981474 -0.00559802 0.00835529 0.00981474 -0.00555996 0.00827294 0.00984791 -0.00556687 0.00850436 0.00957585 -0.0057564 0.0084718 0.00964847 -0.00573439 0.00854224 0.00957585 -0.0056886 0.00850642 0.00964847 -0.00567241 0.00825951 0.00984791 -0.00559091 0.00840484 0.00976957 -0.0056265 0.00847493 0.00949774 -0.00583538 0.0083739 0.00964847 -0.00583654 0.0084272 0.00964847 -0.00578963 0.00839058 0.00971364 -0.00575454 0.00829681 0.00981474 -0.00566466 0.00826326 0.00976957 -0.00577422 0.00826651 0.00981474 -0.00569133 0.00834321 0.00949774 -0.0059328 0.00831345 0.00964847 -0.00587376 0.00821493 0.00976957 -0.00579364 0.0081491 0.00986818 -0.00557869 0.00826693 0.00949774 -0.00596345 0.00827091 0.00941629 -0.00597645 0.00818316 0.00957585 -0.00595418 0.00825908 0.00957585 -0.00593781 0.00824757 0.00964847 -0.00590023 0.00823272 0.00971364 -0.00585174 0.00814562 0.00984791 -0.00566103 0.00813545 0.00986818 -0.00558163 0.008625 0.009375 -0.0055 0.00862489 0.00771429 -0.00551061 0.0086159 0.00771429 -0.00559494 0.00850726 0.00771429 -0.0058223 0.00827951 0.009375 -0.00597553 0.00821994 0.00771429 -0.0059909 0.00820322 0.009375 -0.00599384 0.00861884 0.006875 -0.00557822 0.00860053 0.006875 -0.00565451 0.0085705 0.006875 -0.00572699 0.00855623 0.00771429 -0.00575306 0.0084473 0.00771429 -0.00588226 0.00847855 0.006875 -0.00585355 0.00841889 0.006875 -0.00590451 0.00837806 0.00771429 -0.00593123 0.00830154 0.00771429 -0.0059678 0.008125 0.006875 -0.006 0.00813561 0.00771429 -0.00599989 0.00835199 0.006875 -0.0059455 0.00835199 0.009375 -0.0059455 0.00847855 0.009375 -0.00585355 0.0085928 0.00771429 -0.00567654 -0.00622708 0.00945322 -0.00599384 -0.00034375 0.00975726 -0.00582229 0.0047375 0.00986591 -0.00559494 0.008125 0.00986885 -0.00557822 0.00643125 0.00987489 -0.00551061 0.008125 0.00985053 -0.00565451 0.00643125 0.0098428 -0.00567654 0.00304375 0.00980623 -0.00575306 0.00304375 0.00975726 -0.00582229 -0.00373125 0.00955154 -0.0059678 -0.0020375 0.00955154 -0.0059678 -0.005425 0.00946994 -0.0059909 -0.005425 0.00938561 -0.00599989 -0.00622708 0.00952951 -0.00597553 -0.005425 0.00955154 -0.0059678 0.0047375 0.00987489 -0.00551061 -0.005425 0.0096973 -0.00588226 -0.0020375 0.00975726 -0.00582229 0.00304375 0.00986591 -0.00559494 -0.00622708 0.00972856 -0.00585355 -0.005425 0.00975726 -0.00582229 -0.0020375 0.0098428 -0.00567654 0.00304375 0.00987489 -0.00551061 -0.00622708 0.00977951 -0.00579389 0.00135 0.00987489 -0.00551061 -0.00622708 0.00982051 -0.00572699 -0.00373125 0.0098428 -0.00567654 -0.00373125 0.00986591 -0.00559494 -0.00034375 0.00987489 -0.00551061 -0.0020375 0.00987489 -0.00551061 -0.00622708 0.00985053 -0.00565451 -0.005425 0.00986591 -0.00559494 -0.00373125 0.00987489 -0.00551061 -0.005425 0.00987489 -0.00551061 0.008125 0.00982051 -0.00572699 -0.00622708 0.009375 -0.006 -0.00373125 0.00938561 -0.00599989 0.00643125 0.00980623 -0.00575306 0.0047375 0.0096973 -0.00588226 0.00135 0.00962806 -0.00593123 -0.00034375 0.00946994 -0.0059909 -0.0020375 0.00938561 -0.00599989 0.00643125 0.0096973 -0.00588226 -0.00034375 0.00938561 -0.00599989 0.0047375 0.00962806 -0.00593123 0.0047375 0.00946994 -0.0059909 0.00304375 0.00946994 -0.0059909 0.00304375 0.00938561 -0.00599989 0.00135 0.00938561 -0.00599989 0.008125 0.009375 -0.006 0.008125 0.00952951 -0.00597553 0.00643125 0.00946994 -0.0059909 0.0047375 0.00938561 -0.00599989 0.00643125 0.00938561 -0.00599989 -0.00373125 0.00946994 -0.0059909 -0.00034375 0.00955154 -0.0059678 -0.0020375 0.00946994 -0.0059909 0.00135 0.00946994 -0.0059909 0.00304375 0.00955154 -0.0059678 0.0047375 0.00955154 -0.0059678 0.00643125 0.00955154 -0.0059678 -0.00373125 0.00962806 -0.00593123 -0.0020375 0.00962806 -0.00593123 -0.005425 0.00962806 -0.00593123 0.00135 0.00955154 -0.0059678 -0.0020375 0.0096973 -0.00588226 -0.00373125 0.0096973 -0.00588226 -0.00034375 0.00962806 -0.00593123 0.00304375 0.00962806 -0.00593123 0.00643125 0.00962806 -0.00593123 -0.00034375 0.0096973 -0.00588226 0.00135 0.0096973 -0.00588226 0.00304375 0.0096973 -0.00588226 0.00643125 0.00975726 -0.00582229 -0.005425 0.00980623 -0.00575306 -0.00373125 0.00980623 -0.00575306 -0.0020375 0.00980623 -0.00575306 -0.00373125 0.00975726 -0.00582229 0.00135 0.00980623 -0.00575306 0.00135 0.00975726 -0.00582229 -0.00034375 0.00980623 -0.00575306 0.0047375 0.00980623 -0.00575306 0.0047375 0.00975726 -0.00582229 -0.005425 0.0098428 -0.00567654 -0.00034375 0.0098428 -0.00567654 0.00304375 0.0098428 -0.00567654 -0.0020375 0.00986591 -0.00559494 -0.00034375 0.00986591 -0.00559494 0.00135 0.00986591 -0.00559494 0.00135 0.0098428 -0.00567654 0.00643125 0.00986591 -0.00559494 0.0047375 0.0098428 -0.00567654 0.00813561 0.00987489 0.00366667 0.00850726 0.0096973 -0.00183333 0.00850726 0.0096973 -0.00366667 0.00855623 0.00962806 -0.00366667 0.0084473 0.00975726 -0.00366667 0.00830154 0.0098428 1.29073e-09 0.00821994 0.0098659 0.00366667 0.00830154 0.0098428 0.00183333 0.00837806 0.00980623 0.00183333 0.00837806 0.00980623 0.00366667 0.0084473 0.00975726 1.27951e-09 0.00835199 0.0098205 0.0055 0.00855623 0.00962806 1.26257e-09 0.0086159 0.00946994 -0.00366667 0.00850726 0.0096973 0.00366667 0.00862489 0.00938561 -0.00366667 0.00852951 0.00966889 0.0055 0.00855623 0.00962806 0.00366667 0.0085928 0.00955154 0.00183333 0.00862489 0.00938561 -0.00183333 0.00862489 0.00938561 1.23077e-09 0.0085705 0.009602 0.0055 0.00860053 0.00952951 0.0055 0.0085928 0.00955154 0.00366667 0.0086159 0.00946994 0.00183333 0.0086159 0.00946994 0.00366667 0.00862489 0.00938561 0.00183333 0.00862489 0.00938561 0.00366667 0.00837806 0.00980623 -0.00366667 0.00821994 0.0098659 1.29376e-09 0.00830154 0.0098428 -0.00366667 0.00813561 0.00987489 1.29494e-09 0.00813561 0.00987489 0.00183333 0.00827951 0.00985053 -0.0055 0.00821994 0.0098659 -0.00366667 0.00820322 0.00986885 -0.0055 0.00813561 0.00987489 -0.00366667 0.00821994 0.0098659 -0.00183333 0.00813561 0.00987489 -0.00183333 0.00821994 0.0098659 0.00183333 0.00830154 0.0098428 -0.00183333 0.00830154 0.0098428 0.00366667 0.0084473 0.00975726 0.00366667 0.00837806 0.00980623 1.28593e-09 0.0084473 0.00975726 0.00183333 0.00837806 0.00980623 -0.00183333 0.0084473 0.00975726 -0.00183333 0.00850726 0.0096973 0.00183333 0.00850726 0.0096973 1.27165e-09 0.00855623 0.00962806 -0.00183333 0.0085928 0.00955154 1.25253e-09 0.00855623 0.00962806 0.00183333 0.0085928 0.00955154 -0.00366667 0.0086159 0.00946994 1.24183e-09 0.0086159 0.00946994 -0.00183333 0.0085928 0.00955154 -0.00183333 -0.00622708 0.00986884 0.00557822 -0.00443307 0.0098659 0.00559494 -0.00263906 0.00980623 0.00575306 -0.000845053 0.00975726 0.0058223 0.00453698 0.00962806 0.00593123 -0.00263906 0.0098428 0.00567654 -0.00443307 0.0098428 0.00567654 -0.000845053 0.0096973 0.00588227 0.000948958 0.0096973 0.00588227 0.00274297 0.00962806 0.00593123 0.00453698 0.00946994 0.00599091 -0.00443307 0.00975726 0.0058223 -0.00263906 0.0096973 0.00588227 -0.000845053 0.00962806 0.00593123 0.000948958 0.00955154 0.0059678 0.00633099 0.00938561 0.00599989 -0.00622708 0.00972855 0.00585356 -0.00443307 0.0096973 0.00588227 -0.00263906 0.00962806 0.00593123 0.00274297 0.00938561 0.00599989 0.00453698 0.00938561 0.00599989 0.000948958 0.00946994 0.00599091 -0.000845053 0.00938561 0.00599989 0.000948958 0.00938561 0.00599989 -0.00443307 0.00946994 0.00599091 -0.00263906 0.00938561 0.00599989 -0.00622708 0.00952951 0.00597553 -0.00622708 0.009375 0.006 -0.00622708 0.00945322 0.00599385 -0.00443307 0.00938561 0.00599989 0.00633099 0.00962806 0.00593123 0.00453698 0.00975726 0.0058223 0.000948958 0.00980623 0.00575306 -0.00443307 0.00987489 0.00551061 -0.00263906 0.0098659 0.00559494 0.00633099 0.00975726 0.0058223 0.00453698 0.00980623 0.00575306 0.00274297 0.00980623 0.00575306 -0.00263906 0.00987489 0.00551061 -0.000845053 0.0098659 0.00559494 0.00453698 0.0098428 0.00567654 -0.000845053 0.00987489 0.00551061 0.000948958 0.0098659 0.00559494 0.008125 0.00985053 0.00565451 0.00274297 0.00987489 0.00551061 0.00453698 0.0098659 0.00559494 0.00633099 0.0098659 0.00559494 0.00453698 0.00987489 0.00551061 0.00633099 0.00987489 0.00551061 0.00274297 0.0098659 0.00559494 0.000948958 0.00987489 0.00551061 0.00274297 0.0098428 0.00567654 -0.000845053 0.00980623 0.00575306 -0.000845053 0.0098428 0.00567654 0.000948958 0.0098428 0.00567654 0.00633099 0.0098428 0.00567654 -0.00263906 0.00975726 0.0058223 -0.00443307 0.00980623 0.00575306 0.00633099 0.00980623 0.00575306 0.000948958 0.00975726 0.0058223 0.00274297 0.0096973 0.00588227 0.00453698 0.0096973 0.00588227 0.00274297 0.00975726 0.0058223 0.00633099 0.0096973 0.00588227 -0.00443307 0.00962806 0.00593123 -0.000845053 0.00955154 0.0059678 0.000948958 0.00962806 0.00593123 0.00633099 0.00955154 0.0059678 -0.00263906 0.00946994 0.00599091 -0.00443307 0.00955154 0.0059678 -0.00263906 0.00955154 0.0059678 0.00274297 0.00946994 0.00599091 0.00274297 0.00955154 0.0059678 0.00633099 0.00946994 0.00599091 0.00453698 0.00955154 0.0059678 -0.000845053 0.00946994 0.00599091 -0.00622708 0.00986885 -0.00557822 -0.00646553 0.00985527 -0.0056688 -0.00646784 0.00982522 -0.00573655 -0.00646934 0.00980565 -0.00576993 -0.00648204 0.0096405 -0.00592994 -0.00661244 0.00963659 -0.00593903 -0.00649172 0.00951442 -0.00598313 -0.0066339 0.00943888 -0.00599836 -0.00668427 0.00940394 -0.006 -0.00676946 0.0094526 -0.00599859 -0.00694117 0.00947518 -0.00599885 -0.00710694 0.00953243 -0.00599633 -0.00727274 0.0095868 -0.00599355 -0.00760544 0.00969094 -0.00598906 -0.00831385 0.00983595 -0.00599728 -0.00634744 0.00979497 -0.00577521 -0.00634656 0.00981631 -0.0057397 -0.00647104 0.00978356 -0.00580169 -0.00647292 0.00975909 -0.00583164 -0.00634843 0.00977077 -0.00580883 -0.00634473 0.0098609 -0.00562766 -0.00634953 0.00974387 -0.00584034 -0.00622708 0.009602 -0.0059455 -0.00648855 0.00955571 -0.00597014 -0.00635709 0.00955936 -0.00596584 -0.00635489 0.00961322 -0.00594105 -0.00635074 0.00971445 -0.0058695 -0.00635889 0.00951538 -0.00598069 -0.00636075 0.00947021 -0.00599139 -0.00649497 0.00947216 -0.00599248 -0.00636263 0.00942424 -0.00599784 -0.00649827 0.00942924 -0.00599811 -0.00637996 0.00937855 -0.006 -0.00653236 0.00938849 -0.006 -0.00711273 0.00950281 -0.00599908 -0.00761164 0.00966679 -0.00599371 -0.00811884 0.00980396 -0.00599412 -0.00831535 0.00983093 -0.0059978 -0.00846888 0.00985788 -0.00599918 -0.00846963 0.00985537 -0.00599932 -0.00728413 0.00953479 -0.00599928 -0.00744455 0.00961563 -0.00599512 -0.00744994 0.00959313 -0.00599783 -0.00778427 0.00969833 -0.00599569 -0.00812122 0.00979578 -0.00599536 -0.00831684 0.00982591 -0.00599827 -0.00847374 0.00984173 -0.00599986 -0.0078053 0.009875 -0.00594374 -0.0077429 0.00985111 -0.00594711 -0.00757641 0.00980411 -0.00594858 -0.00706954 0.00972378 -0.00592982 -0.0067403 0.00966584 -0.00593144 -0.00660909 0.00966743 -0.00592065 -0.00661592 0.00960451 -0.00595514 -0.00648464 0.00960657 -0.0059484 -0.00739713 0.00981377 -0.00591862 -0.00722792 0.00979134 -0.00590773 -0.00723216 0.00977199 -0.00592109 -0.00689595 0.00974405 -0.00589504 -0.00647954 0.00967291 -0.0059089 -0.00673281 0.00972066 -0.00589385 -0.00660287 0.00972477 -0.00587741 -0.00647719 0.0097036 -0.00588541 -0.00660589 0.00969687 -0.00590007 -0.00687899 0.00984488 -0.00578797 -0.00700096 0.009875 -0.00577603 -0.00659274 0.00981806 -0.00576847 -0.00659494 0.00979774 -0.00579818 -0.00659738 0.00977533 -0.00582633 -0.00671545 0.00984756 -0.00574456 -0.00658918 0.00985085 -0.00570777 -0.00847627 0.00983328 -0.00599999 -0.00847543 0.0098361 -0.00599996 -0.00813481 0.00974907 -0.00599963 -0.00796751 0.00969456 -0.00599983 -0.00832534 0.00979735 -0.00599986 -0.00813751 0.00973981 -0.00599991 -0.00847458 0.00983891 -0.00599992 -0.00832366 0.009803 -0.00599969 -0.00795243 0.009748 -0.00599528 -0.00795692 0.0097321 -0.00599728 -0.00796044 0.00971961 -0.00599847 -0.00779281 0.00966677 -0.00599892 -0.00847039 0.00985286 -0.00599945 -0.00811174 0.00982836 -0.00598951 -0.00810939 0.00983644 -0.00598768 -0.00793088 0.00982436 -0.00597762 -0.00775309 0.00981347 -0.00596459 -0.00740123 0.00979662 -0.00592962 -0.00758039 0.00978859 -0.00595612 -0.00740544 0.00977904 -0.00593988 -0.00740974 0.00976106 -0.00594938 -0.00723653 0.00975204 -0.00593349 -0.00810259 0.00985982 -0.00598153 -0.00792218 0.00985518 -0.00596651 -0.00774955 0.00982654 -0.00595897 -0.00832702 0.0097917 -0.00599996 -0.00756521 0.00984778 -0.00592342 -0.00635342 0.0096489 -0.00592003 -0.00635204 0.00968272 -0.00589612 -0.00622708 0.00966889 -0.00590451 -0.00634581 0.00983464 -0.00570254 -0.00660002 0.00975097 -0.00585279 -0.00647497 0.00973239 -0.0058596 -0.00672933 0.00974607 -0.00587237 -0.00688842 0.00978883 -0.00585519 -0.00672018 0.00981301 -0.00579833 -0.00672606 0.00977001 -0.00584922 -0.00676441 0.00948953 -0.00599438 -0.00759691 0.00972422 -0.00598047 -0.00776762 0.00975979 -0.00598302 -0.00662529 0.00951815 -0.00598535 -0.00662957 0.00947877 -0.00599347 -0.00692458 0.00957381 -0.00598174 -0.00725034 0.00968904 -0.00596464 -0.00776394 0.0097734 -0.00597902 -0.0066211 0.00955674 -0.00597406 -0.00674841 0.00960655 -0.00596131 -0.00675451 0.00956193 -0.00597765 -0.0070835 0.00965236 -0.00596544 -0.00707872 0.00967678 -0.00595488 -0.00758854 0.00975682 -0.00596947 -0.00776029 0.00978689 -0.00597462 -0.00724563 0.00971052 -0.00595529 -0.00775667 0.00980025 -0.0059698 -0.00675942 0.00952604 -0.00598738 -0.00693558 0.00950842 -0.00599541 -0.00693004 0.00954134 -0.0059897 -0.0072671 0.00961251 -0.00598856 -0.00743243 0.00966627 -0.00598495 -0.00760116 0.00970765 -0.00598507 -0.00811647 0.00981212 -0.00599273 -0.00831087 0.00984596 -0.00599608 -0.00846813 0.00986039 -0.00599902 -0.00691776 0.0096144 -0.00596836 -0.00691312 0.00964198 -0.00595693 -0.00674429 0.00963669 -0.00594737 -0.00690422 0.00969487 -0.00592912 -0.0069086 0.00966884 -0.00594384 -0.00707406 0.00970061 -0.005943 -0.0069 0.00971997 -0.00591283 -0.00673647 0.00969387 -0.00591357 -0.006723 0.00979235 -0.0058245 -0.00709551 0.0095909 -0.00598537 -0.00708837 0.0096274 -0.00597463 -0.00706515 0.00974621 -0.00591539 -0.00706092 0.00976785 -0.00589974 -0.00689208 0.00976704 -0.0058758 -0.00705686 0.00978862 -0.00588292 -0.00688497 0.00980936 -0.00583328 -0.00704609 0.00984372 -0.00582768 -0.00705298 0.00980847 -0.00586499 -0.00727842 0.00956087 -0.00599713 -0.00710119 0.00956183 -0.00599175 -0.00726001 0.0096449 -0.00598015 -0.0074392 0.009638 -0.00599133 -0.00725513 0.00966715 -0.00597293 -0.00742776 0.00968577 -0.00597947 -0.00742315 0.00970503 -0.00597316 -0.00741861 0.00972402 -0.00596604 -0.00724103 0.00973153 -0.0059449 -0.00741413 0.00974271 -0.0059581 -0.00722383 0.00981004 -0.00589344 -0.00753472 0.009875 -0.00590011 -0.0072164 0.00984393 -0.00586358 -0.00762636 0.00960941 -0.00599961 -0.0077971 0.00965093 -0.00599973 -0.00778853 0.00968258 -0.00599757 -0.00761653 0.00964774 -0.00599646 -0.00745535 0.00957052 -0.00599946 -0.00762143 0.00962861 -0.00599842 -0.00758444 0.00977282 -0.00596309 -0.0075725 0.00981937 -0.00594048 -0.00738957 0.00984533 -0.00589553 -0.00713742 0.00947679 -0.006 -0.00743777 0.00954239 -0.006 -0.00796397 0.0097071 -0.00599932 -0.00813212 0.00975833 -0.00599916 -0.00832198 0.00980865 -0.00599944 -0.00812943 0.00976757 -0.00599851 -0.008126 0.00977936 -0.00599741 -0.00847265 0.00984533 -0.00599976 -0.00831983 0.00981586 -0.00599903 -0.00777884 0.00971836 -0.0059925 -0.00777134 0.00974607 -0.0059866 -0.00777508 0.00973226 -0.00598976 -0.0079431 0.00978106 -0.00598929 -0.0075927 0.00974061 -0.00597527 -0.00794002 0.00979198 -0.00598677 -0.00793391 0.00981364 -0.00598093 -0.00793695 0.00980284 -0.00598398 -0.00831833 0.00982088 -0.00599868 -0.00812361 0.00978758 -0.00599646 -0.00794931 0.00975907 -0.00599355 -0.0079462 0.00977009 -0.00599156 -0.0081141 0.00982025 -0.00599119 -0.00831236 0.00984096 -0.00599671 -0.00792787 0.00983501 -0.00597406 -0.00810705 0.00984449 -0.00598571 -0.00830506 0.00986549 -0.00599308 -0.0083079 0.00985595 -0.00599465 -0.00846737 0.0098629 -0.00599885 -0.00830938 0.00985096 -0.00599539 -0.00846662 0.0098654 -0.00599867 -0.0084719 0.00984784 -0.00599967 -0.00847114 0.00985035 -0.00599957 -0.008351 0.009875 -0.00599374 -0.00846518 0.0098702 -0.00599827 -0.014125 0.00986885 -0.00557822 -0.0141285 0.00986818 -0.00558222 -0.0141423 0.00986818 -0.00558045 -0.0141968 0.00986818 -0.00554014 -0.0142061 0.00986818 -0.00551391 -0.014125 0.00982051 -0.00572699 -0.0141319 0.00984791 -0.0056622 -0.014125 0.00977951 -0.00579389 -0.0144189 0.00977951 -0.0055 -0.0144277 0.00976957 -0.0055519 -0.0143596 0.00981474 -0.00554022 -0.0143102 0.00981474 -0.0056495 -0.0142513 0.00984791 -0.00560199 -0.0141592 0.00984791 -0.0056587 -0.0141351 0.00981474 -0.00573776 -0.014138 0.00976957 -0.00580683 -0.0141897 0.00976957 -0.00580021 -0.0141406 0.00971364 -0.00586753 -0.014368 0.00971364 -0.00577614 -0.0143931 0.00976957 -0.00564977 -0.0144146 0.00976957 -0.00560231 -0.0144786 0.00972856 -0.0055 -0.0142025 0.00971364 -0.0058596 -0.0141428 0.00964847 -0.00591821 -0.0144112 0.00971364 -0.0057311 -0.0144461 0.00971364 -0.0056794 -0.0144507 0.00964847 -0.00576296 -0.0144876 0.00971364 -0.00556217 -0.0145376 0.00964847 -0.00557074 -0.0145295 0.00966889 -0.0055 -0.014125 0.00966889 -0.00590451 -0.0141444 0.00957585 -0.00595747 -0.0142215 0.00957585 -0.00594761 -0.0144275 0.00957585 -0.00584372 -0.0144813 0.00957585 -0.00578765 -0.0145247 0.00957585 -0.00572331 -0.0145567 0.00957585 -0.00565254 -0.0145705 0.009602 -0.0055 -0.014125 0.009602 -0.0059455 -0.0142957 0.00957585 -0.00592487 -0.0142271 0.00949774 -0.00597382 -0.0143057 0.00949774 -0.00594975 -0.0145763 0.00957585 -0.00557738 -0.0146027 0.00949774 -0.00558191 -0.014125 0.00945322 -0.00599384 -0.0141456 0.00949774 -0.00598426 -0.0141461 0.00941629 -0.00599784 -0.0144542 0.00941629 -0.00587405 -0.0145481 0.00949774 -0.00573638 -0.01456 0.00941629 -0.00574301 -0.0145948 0.00941629 -0.005666 -0.0142032 0.009375 -0.00599384 -0.0142795 0.009375 -0.00597553 -0.0143863 0.00941629 -0.00592431 -0.0145705 0.009375 -0.00572699 -0.0143108 0.00941629 -0.00596236 -0.0146161 0.00941629 -0.00558421 -0.014352 0.00982051 -0.0055 -0.0142026 0.00986818 -0.00552742 -0.014189 0.00986818 -0.0055517 -0.0142323 0.00984791 -0.00562187 -0.0141794 0.00986818 -0.00556178 -0.0141855 0.00984791 -0.00565064 -0.0141681 0.00986818 -0.00557008 -0.0141557 0.00986818 -0.00557636 -0.01423 0.00941629 -0.00598711 -0.0142132 0.00964847 -0.00590919 -0.0141751 0.00981474 -0.00573263 -0.0143445 0.00964847 -0.00585643 -0.0142622 0.00971364 -0.00584133 -0.0142811 0.00964847 -0.0058884 -0.0142137 0.00981474 -0.00572081 -0.0142498 0.00981474 -0.00570264 -0.0142395 0.00976957 -0.00578496 -0.0142101 0.00984791 -0.00563824 -0.0143791 0.00949774 -0.00591273 -0.0143651 0.00957585 -0.0058899 -0.014286 0.00976957 -0.00576151 -0.0143179 0.00971364 -0.00581324 -0.0144452 0.00949774 -0.00586385 -0.0144015 0.00964847 -0.00581422 -0.0143639 0.00976957 -0.00569293 -0.0143279 0.00976957 -0.00573054 -0.0142822 0.00981474 -0.00567864 -0.0145021 0.00949774 -0.0058045 -0.0145127 0.00941629 -0.00581304 -0.0143328 0.00981474 -0.00561606 -0.0142667 0.00984791 -0.00557918 -0.0144904 0.00964847 -0.00570414 -0.0143494 0.00981474 -0.00557928 -0.0142781 0.00984791 -0.00555408 -0.014582 0.00949774 -0.00566147 -0.0145197 0.00964847 -0.00563945 -0.0144718 0.00971364 -0.00562255 -0.014285 0.00984791 -0.00552744 -0.0146249 0.00245833 -0.00551061 -0.0146249 0.000729168 -0.00551061 -0.0146249 0.0041875 -0.00551061 -0.0146188 0.009375 -0.00557822 -0.0146159 0.00591667 -0.00559494 -0.0145928 0.00591667 -0.00567654 -0.0144473 -0.000999999 -0.00588226 -0.0143015 -0.00445833 -0.0059678 -0.0141356 -0.00964583 -0.00599989 -0.0142225 -0.0109725 -0.00599039 -0.0143015 -0.00964583 -0.0059678 -0.0144422 -0.0111922 -0.00588651 -0.0144786 -0.0112286 -0.00585356 -0.0145115 -0.0112615 -0.0058172 -0.0145073 -0.00964583 -0.0058223 -0.0145928 -0.00964583 -0.00567654 -0.0145869 -0.0113369 -0.00569134 -0.0146154 -0.0113654 -0.00559755 -0.0146159 -0.00964583 -0.00559494 -0.0146249 -0.00791667 -0.00551061 -0.0146249 -0.0061875 -0.00551061 -0.0145928 -0.0061875 -0.00567654 -0.0145562 -0.0061875 -0.00575306 -0.0143781 -0.0061875 -0.00593123 -0.0141356 -0.0061875 -0.00599989 -0.0141356 -0.00791667 -0.00599989 -0.0146005 0.009375 -0.00565451 -0.0145928 0.00764584 -0.00567654 -0.0145562 0.0041875 -0.00575306 -0.0145073 0.0041875 -0.0058223 -0.0143781 0.000729168 -0.00593123 -0.0142199 -0.00445833 -0.0059909 -0.0142199 -0.00272917 -0.0059909 -0.0141356 -0.00445833 -0.00599989 -0.0145562 0.00591667 -0.00575306 -0.0145073 0.00591667 -0.0058223 -0.0143781 0.00245833 -0.00593123 -0.0143015 0.000729168 -0.0059678 -0.0145562 0.00764584 -0.00575306 -0.0143015 0.00245833 -0.0059678 -0.0142199 0.000729168 -0.0059909 -0.0141356 -0.000999999 -0.00599989 -0.0141356 -0.00272917 -0.00599989 -0.0145295 0.009375 -0.00579389 -0.0145073 0.00764584 -0.0058223 -0.0144786 0.009375 -0.00585355 -0.0144473 0.00591667 -0.00588226 -0.0143781 0.00591667 -0.00593123 -0.0141356 0.000729168 -0.00599989 -0.014125 0.009375 -0.006 -0.0144189 0.009375 -0.00590451 -0.0143781 0.00764584 -0.00593123 -0.0143015 0.00764584 -0.0059678 -0.0142199 0.00591667 -0.0059909 -0.0141356 0.00245833 -0.00599989 -0.0141356 0.0041875 -0.00599989 -0.014352 0.009375 -0.0059455 -0.0142199 0.00764584 -0.0059909 -0.0141356 0.00591667 -0.00599989 -0.0141356 0.00764584 -0.00599989 -0.0143015 -0.0061875 -0.0059678 -0.0143607 -0.0111107 -0.00594096 -0.0143781 -0.00964583 -0.00593123 -0.0144473 -0.0061875 -0.00588226 -0.0144473 -0.00964583 -0.00588227 -0.0144473 -0.00791667 -0.00588226 -0.0145073 -0.00791667 -0.0058223 -0.0145562 -0.00964583 -0.00575306 -0.0145562 -0.00791667 -0.00575306 -0.0146249 -0.00964583 -0.00551061 -0.0146249 0.00764584 -0.00551061 -0.0146159 0.000729168 -0.00559494 -0.0146249 -0.00272917 -0.00551061 -0.0146159 -0.00445833 -0.00559494 -0.0146249 -0.00445833 -0.00551061 -0.0146159 -0.0061875 -0.00559494 -0.0146249 0.00591667 -0.00551061 -0.0146159 0.0041875 -0.00559494 -0.0146249 -0.000999999 -0.00551061 -0.0146159 0.00764584 -0.00559494 -0.0145928 0.00245833 -0.00567654 -0.0146159 0.00245833 -0.00559494 -0.0145928 -0.000999999 -0.00567654 -0.0146159 -0.000999999 -0.00559494 -0.0146159 -0.00272917 -0.00559494 -0.0146159 -0.00791667 -0.00559494 -0.0145928 0.0041875 -0.00567654 -0.0145928 -0.00272917 -0.00567654 -0.0145562 -0.00445833 -0.00575306 -0.0145562 -0.000999999 -0.00575306 -0.0145928 0.000729168 -0.00567654 -0.0145928 -0.00445833 -0.00567654 -0.0145928 -0.00791667 -0.00567654 -0.0145073 0.000729168 -0.0058223 -0.0145562 0.00245833 -0.00575306 -0.0145562 0.000729168 -0.00575306 -0.0145562 -0.00272917 -0.00575306 -0.0144473 0.00764584 -0.00588226 -0.0144473 0.0041875 -0.00588226 -0.0144473 0.00245833 -0.00588226 -0.0144473 0.000729168 -0.00588226 -0.0145073 0.00245833 -0.0058223 -0.0145073 -0.0061875 -0.0058223 -0.0145073 -0.00445833 -0.0058223 -0.0144473 -0.00445833 -0.00588226 -0.0145073 -0.00272917 -0.0058223 -0.0145073 -0.000999999 -0.0058223 -0.0143781 0.0041875 -0.00593123 -0.0143781 -0.000999999 -0.00593123 -0.0144473 -0.00272917 -0.00588226 -0.0143781 -0.00272917 -0.00593123 -0.0143015 0.00591667 -0.0059678 -0.0143015 -0.000999999 -0.0059678 -0.0143781 -0.00445833 -0.00593123 -0.0143015 -0.00272917 -0.0059678 -0.0142199 -0.0061875 -0.00599091 -0.0143015 -0.00791667 -0.0059678 -0.0143781 -0.00791667 -0.00593123 -0.0142199 -0.00964583 -0.00599091 -0.0142199 0.0041875 -0.0059909 -0.0142199 0.00245833 -0.0059909 -0.0143015 0.0041875 -0.0059678 -0.0142199 -0.000999999 -0.0059909 -0.0142199 -0.00791667 -0.00599091 -0.0146249 0.00938561 -0.0038 -0.0143781 0.00980623 -0.0038 -0.0142795 0.00985053 -0.0055 -0.0141356 0.00987489 -0.0038 -0.0145928 0.00955154 -0.0038 -0.0142795 0.00985053 -0.00239791 -0.0142199 0.0098659 -0.0038 -0.0145562 0.00962806 -0.0038 -0.0145073 0.0096973 -0.0038 -0.0143015 0.0098428 -0.0038 -0.0142032 0.00986885 -0.0055 -0.0144473 0.00975726 -0.0038 -0.0146005 0.00952951 -0.0055 -0.0146188 0.00945322 -0.0055 -0.014625 0.009375 -0.0055 -0.0146159 0.00946994 -0.0038 -0.0126719 0.00986591 -0.00559494 -0.0112187 0.00986591 -0.00559494 -0.014125 0.00985053 -0.00565451 -0.014125 0.009875 -0.0055 -0.0126719 0.00987489 -0.00551061 -0.0112187 0.00987489 -0.00551061 -0.0112187 0.0098428 -0.00567654 -0.0110229 0.00982051 -0.00572699 -0.0126719 0.00980623 -0.00575306 -0.0112187 0.00980623 -0.00575306 -0.0110229 0.00972856 -0.00585355 -0.0110229 0.00977951 -0.00579389 -0.0112187 0.00975726 -0.00582229 -0.0112187 0.0096973 -0.00588226 -0.0126719 0.00962806 -0.00593123 -0.0126719 0.0096973 -0.00588226 -0.014125 0.00972856 -0.00585355 -0.0126719 0.00975726 -0.00582229 -0.0112187 0.00962806 -0.00593123 -0.0110229 0.009602 -0.0059455 -0.0112187 0.00955154 -0.0059678 -0.0110229 0.00952951 -0.00597553 -0.0126719 0.00946994 -0.0059909 -0.014125 0.00952951 -0.00597553 -0.0126719 0.00955154 -0.0059678 -0.0110229 0.00945322 -0.00599384 -0.0110229 0.009375 -0.006 -0.0112187 0.00946994 -0.0059909 -0.0112187 0.00938561 -0.00599989 -0.0126719 0.00938561 -0.00599989 -0.0126719 0.0098428 -0.00567654 -0.0101126 0.0094768 -0.006 -0.0108325 0.00951434 -0.0059818 -0.0106493 0.00960453 -0.00595439 -0.00987702 0.00962777 -0.00599032 -0.0101424 0.00962916 -0.00597529 -0.00931086 0.00972853 -0.00599714 -0.00931545 0.00974486 -0.00599502 -0.0089728 0.00981971 -0.00599779 -0.0106527 0.00963682 -0.00593802 -0.0101472 0.00965381 -0.00596633 -0.00931864 0.00975622 -0.0059932 -0.0110229 0.00966889 -0.00590451 -0.0104015 0.00963933 -0.00595342 -0.00988871 0.0096782 -0.00597707 -0.0101519 0.00967796 -0.00595605 -0.00989342 0.00969848 -0.00597003 -0.00914786 0.00979291 -0.005995 -0.00932182 0.00976753 -0.0059911 -0.0108483 0.00978952 -0.00578716 -0.0101736 0.00978879 -0.00588593 -0.0099114 0.00977601 -0.00593296 -0.00991565 0.00979431 -0.00592155 -0.00965352 0.00980509 -0.00595055 -0.00949235 0.00980032 -0.00596987 -0.00933747 0.00982318 -0.00597642 -0.00898284 0.00985357 -0.00599319 -0.00898602 0.00986431 -0.00599119 -0.0110229 0.00985053 -0.00565451 -0.0106718 0.00981878 -0.00576489 -0.0104283 0.00981041 -0.00582033 -0.00949593 0.00981353 -0.00596467 -0.00917242 0.009875 -0.00597497 -0.008625 0.009875 -0.006 -0.0106752 0.0098513 -0.00570339 -0.00991977 0.0098121 -0.00590933 -0.0096574 0.00982011 -0.00594276 -0.00916771 0.00985927 -0.00598006 -0.00934053 0.00983408 -0.00597266 -0.0110229 0.00986885 -0.00557822 -0.0110229 0.009875 -0.0055 -0.0107693 0.009875 -0.00560376 -0.010249 0.009875 -0.00577603 -0.00971528 0.009875 -0.00590011 -0.00966463 0.00984812 -0.00592633 -0.0094447 0.009875 -0.00594374 -0.00950612 0.00985113 -0.00594723 -0.0104338 0.00984571 -0.00577184 -0.0101843 0.00984368 -0.00583205 -0.00934634 0.00985473 -0.00596471 -0.00951311 0.00961777 -0.006 -0.00960416 0.00961402 -0.00599962 -0.00986046 0.00955638 -0.00599939 -0.010124 0.00953538 -0.00599642 -0.0101297 0.0095644 -0.00599197 -0.0106442 0.0095564 -0.00597363 -0.0103797 0.00950056 -0.00599503 -0.0106402 0.00951749 -0.00598511 -0.0108349 0.00955713 -0.00596779 -0.0108379 0.00960968 -0.00594438 -0.010636 0.00947779 -0.00599336 -0.00880535 0.00986959 -0.0059978 -0.00880287 0.00986135 -0.00599854 -0.00898117 0.00984795 -0.00599413 -0.0101654 0.0097467 -0.00591757 -0.0104179 0.0097443 -0.00588667 -0.00880203 0.00985852 -0.00599875 -0.00915292 0.00980985 -0.00599216 -0.00989805 0.00971844 -0.00596208 -0.0108419 0.00967782 -0.00590191 -0.00880033 0.00985286 -0.00599914 -0.00879863 0.0098472 -0.00599945 -0.00879778 0.00984436 -0.00599958 -0.00896943 0.00980837 -0.00599877 -0.00913912 0.00976369 -0.00599839 -0.00946052 0.00968278 -0.00599758 -0.0101353 0.00959311 -0.00598575 -0.0103904 0.00956863 -0.00598024 -0.010397 0.00961076 -0.00596577 -0.00879571 0.00983747 -0.00599982 -0.00945624 0.00966699 -0.00599892 -0.00986601 0.00958031 -0.00599757 -0.00987153 0.00960413 -0.00599454 -0.0087938 0.00983111 -0.00599995 -0.00896324 0.00978748 -0.00599982 -0.00896135 0.0097811 -0.00599995 -0.00921534 0.00970009 -0.006 -0.00930002 0.00968999 -0.00599982 -0.00913339 0.00974453 -0.0059996 -0.00913052 0.00973494 -0.0059999 -0.0108506 0.00983018 -0.00571784 -0.0108522 0.00985826 -0.00564612 -0.0108495 0.00981124 -0.00575332 -0.0108469 0.00976515 -0.00581913 -0.0106591 0.00969745 -0.00589844 -0.0108453 0.00973829 -0.00584904 -0.010656 0.00966786 -0.00591934 -0.0108436 0.00970912 -0.00587669 -0.01041 0.00969389 -0.00592339 -0.00963739 0.00974268 -0.00597623 -0.00947769 0.0097462 -0.00598663 -0.00948141 0.0097599 -0.00598306 -0.00932498 0.00977879 -0.00598872 -0.0108399 0.0096446 -0.00592452 -0.0108274 0.00942585 -0.00599797 -0.0108299 0.00947046 -0.00599188 -0.0106318 0.00943755 -0.00599834 -0.0106697 0.00979852 -0.00579501 -0.0104216 0.0097677 -0.00586597 -0.0106673 0.00977613 -0.00582358 -0.010662 0.00972546 -0.00587542 -0.0106648 0.00975173 -0.00585043 -0.00990705 0.00975724 -0.00594353 -0.00964557 0.00977432 -0.00596451 -0.00915544 0.00981828 -0.0059905 -0.00933127 0.00980112 -0.00598312 -0.0103851 0.00953485 -0.00598885 -0.0101695 0.00976816 -0.00590232 -0.0104251 0.00978976 -0.00584383 -0.010161 0.00972447 -0.00593164 -0.0104141 0.00971967 -0.00590583 -0.0104058 0.00966708 -0.00593927 -0.0101183 0.00950614 -0.0059991 -0.0103743 0.00946593 -0.00599876 -0.0101774 0.00980853 -0.00586844 -0.00990259 0.00973804 -0.00595325 -0.0101565 0.00970154 -0.00594448 -0.00992733 0.00984466 -0.00588372 -0.00964957 0.00978982 -0.00595781 -0.0096248 0.00969394 -0.00598949 -0.00961869 0.00967025 -0.00599395 -0.00988394 0.00965763 -0.00598319 -0.00930364 0.00970286 -0.00599928 -0.00913625 0.00975411 -0.0059991 -0.00949947 0.00982658 -0.00595906 -0.00948874 0.00978697 -0.00597468 -0.00964151 0.0097586 -0.00597065 -0.00948509 0.0097735 -0.00597907 -0.00963324 0.00972658 -0.00598123 -0.00962904 0.00971033 -0.00598565 -0.00945196 0.00965117 -0.00599973 -0.00960902 0.00963284 -0.00599848 -0.00961386 0.00965159 -0.00599659 -0.00915796 0.00982668 -0.00598869 -0.00933438 0.00981219 -0.00597991 -0.00932813 0.00978999 -0.00598606 -0.0094702 0.00971852 -0.00599252 -0.00947396 0.0097324 -0.00598978 -0.00946478 0.00969851 -0.0059957 -0.00930725 0.00971571 -0.00599839 -0.00916046 0.00983505 -0.00598671 -0.00880372 0.00986418 -0.0059983 -0.00897783 0.00983668 -0.0059958 -0.00880118 0.00985569 -0.00599895 -0.0089795 0.00984232 -0.005995 -0.00897616 0.00983103 -0.00599654 -0.00915039 0.00980139 -0.00599366 -0.00897448 0.00982538 -0.0059972 -0.00914532 0.00978442 -0.00599618 -0.00879948 0.00985003 -0.0059993 -0.00914277 0.0097759 -0.00599721 -0.00897112 0.00981404 -0.00599831 -0.00896513 0.00979386 -0.0059996 -0.00896702 0.00980023 -0.00599929 -0.00879475 0.00983429 -0.0059999 -0.00879284 0.00982793 -0.00599999 -0.00916296 0.00984339 -0.00598458 -0.00879692 0.00984153 -0.00599969 -0.0142288 0.009875 -0.00214425 -0.0142938 0.00985527 -0.00215947 -0.0143616 0.00982521 -0.00215716 -0.0144566 0.00975909 -0.00215208 -0.0145251 0.00969687 -0.00201911 -0.0145339 0.00967291 -0.00214546 -0.0145734 0.00960657 -0.00214036 -0.0146104 0.00951814 -0.00199971 -0.0146213 0.00953243 -0.00151806 -0.0146186 0.0095868 -0.00135226 -0.01461 0.00966627 -0.00119256 -0.0146101 0.00970765 -0.00102384 -0.0146177 0.00981212 -0.000508522 -0.0142032 0.00986885 -0.00239791 -0.0143647 0.00981631 -0.00227844 -0.0143275 0.00983464 -0.00227919 -0.014352 0.0098205 -0.00239791 -0.0144189 0.00977951 -0.00239791 -0.0144267 0.00978356 -0.00215396 -0.0144002 0.00979497 -0.00227756 -0.0143949 0.00980565 -0.00215566 -0.0142527 0.0098609 -0.00228027 -0.0144338 0.00977077 -0.00227657 -0.0144786 0.00972855 -0.00239791 -0.0144653 0.00974387 -0.00227547 -0.0144846 0.00973239 -0.00215003 -0.0144945 0.00971445 -0.00227426 -0.0145104 0.0097036 -0.00214782 -0.0145705 0.009602 -0.00239791 -0.014566 0.00961321 -0.00227012 -0.0145908 0.00955936 -0.00226791 -0.0146057 0.00951538 -0.00226611 -0.014545 0.0096489 -0.00227158 -0.0145211 0.00968272 -0.00227296 -0.0146005 0.00952951 -0.00239791 -0.0146164 0.00947021 -0.00226425 -0.0146188 0.00945322 -0.00239791 -0.0146228 0.00942424 -0.00226237 -0.0146231 0.00942924 -0.00212673 -0.014625 0.00938849 -0.00209263 -0.0146163 0.009638 -0.00118579 -0.0146187 0.00966679 -0.00101335 -0.0146141 0.00969094 -0.00101955 -0.0146204 0.00979578 -0.000503769 -0.0146241 0.00950281 -0.00151227 -0.0146243 0.00953479 -0.00134087 -0.0146221 0.00956087 -0.00134658 -0.0146228 0.00959313 -0.00117505 -0.0146215 0.00964774 -0.00100847 -0.0146186 0.00975907 -0.000675683 -0.0146203 0.00974801 -0.000672561 -0.0146215 0.00978758 -0.000501383 -0.0146228 0.00983093 -0.000309659 -0.0146243 0.00985537 -0.000155365 -0.014625 0.00961777 -0.000888107 -0.014625 0.00954239 -0.00118722 -0.014625 0.00970009 -0.000590335 -0.0146248 0.00969457 -0.00065748 -0.0146242 0.00975833 -0.000492871 -0.0146244 0.00980865 -0.000303028 -0.0146065 0.00985982 -0.000522398 -0.0145915 0.00985518 -0.000702806 -0.0145687 0.009875 -0.000819703 -0.014584 0.00982654 -0.00087544 -0.0145736 0.00980411 -0.00104858 -0.0145548 0.00972378 -0.00155546 -0.0145541 0.00969486 -0.00172078 -0.0145386 0.00969387 -0.00188854 -0.0145456 0.00966743 -0.00201592 -0.014564 0.00963659 -0.00201257 -0.0145801 0.0096045 -0.00200909 -0.0145724 0.00963669 -0.00188072 -0.0145721 0.00985111 -0.000882094 -0.0145655 0.00981937 -0.00105249 -0.0145327 0.00979134 -0.00139707 -0.0145189 0.00972065 -0.0018922 -0.0145024 0.00972477 -0.00202214 -0.014401 0.009875 -0.00162404 -0.0143935 0.00981806 -0.00203227 -0.0144232 0.00979774 -0.00203006 -0.0144233 0.00981301 -0.00190483 -0.0144495 0.00979235 -0.001902 -0.0143696 0.00984756 -0.00190955 -0.0143328 0.00985085 -0.00203582 -0.014625 0.0098361 -0.000149573 -0.0146249 0.00973981 -0.000487483 -0.0146249 0.00983891 -0.000150419 -0.0146249 0.00979735 -0.000299667 -0.0146247 0.009803 -0.000301348 -0.0146247 0.00984784 -0.000153102 -0.0146237 0.00982088 -0.000306669 -0.0146207 0.00969833 -0.000840725 -0.0146243 0.0097071 -0.000661017 -0.0146245 0.00985286 -0.000154611 -0.0146246 0.00985035 -0.000153856 -0.0145811 0.00978859 -0.0010446 -0.0145896 0.00981347 -0.000871902 -0.0145649 0.00977904 -0.00121956 -0.0145585 0.00975204 -0.00138846 -0.0145744 0.00976106 -0.00121525 -0.0146181 0.00986549 -0.000319943 -0.0146107 0.00984449 -0.000517939 -0.014625 0.0097917 -0.000297986 -0.014625 0.00983328 -0.000148726 -0.0146239 0.00947518 -0.00168383 -0.0145549 0.0096405 -0.00214297 -0.0145295 0.00966889 -0.00239791 -0.0144778 0.00975097 -0.00202498 -0.0144974 0.00974607 -0.00189568 -0.0144802 0.00978883 -0.00173658 -0.014413 0.00984488 -0.00174601 -0.0144513 0.00977533 -0.00202763 -0.0144742 0.00977001 -0.00189895 -0.0146234 0.00943888 -0.00199111 -0.0146194 0.00948953 -0.0018606 -0.0146104 0.0095909 -0.00152949 -0.0146045 0.00968577 -0.00119723 -0.014604 0.0097734 -0.000861051 -0.0146118 0.00979198 -0.000684972 -0.0146145 0.00982836 -0.000513248 -0.0146175 0.00947216 -0.00213003 -0.0146067 0.00957381 -0.00170042 -0.0145996 0.0096274 -0.00153663 -0.0145896 0.00968904 -0.00137466 -0.014591 0.00972402 -0.00120639 -0.0145982 0.00970503 -0.00120184 -0.0146127 0.00983644 -0.000515598 -0.0146081 0.00951442 -0.00213328 -0.0145991 0.00955674 -0.0020039 -0.0146026 0.00956193 -0.0018705 -0.0145934 0.0096144 -0.00170725 -0.0146059 0.00981364 -0.000691083 -0.0145948 0.00980025 -0.000868323 -0.0146185 0.00947877 -0.00199544 -0.0145951 0.00955571 -0.00213645 -0.0145688 0.00966884 -0.0017164 -0.0145799 0.00967678 -0.00154628 -0.0145803 0.00971052 -0.00137937 -0.0146051 0.0096449 -0.00136499 -0.0146116 0.00974607 -0.000853651 -0.014624 0.00986039 -0.000156873 -0.0146236 0.0094526 -0.00185555 -0.0146147 0.00954134 -0.00169496 -0.0146124 0.00952604 -0.00186559 -0.0145819 0.00964198 -0.00171189 -0.0145863 0.00960655 -0.0018766 -0.0145564 0.00966584 -0.00188471 -0.014568 0.00970061 -0.00155094 -0.0145378 0.00971997 -0.001725 -0.0146204 0.00950841 -0.00168942 -0.0145904 0.00965235 -0.0015415 -0.0145247 0.00976785 -0.00156408 -0.01452 0.00974405 -0.00172905 -0.0145008 0.00976704 -0.00173292 -0.0144583 0.00980936 -0.00174004 -0.0146168 0.00956183 -0.00152381 -0.0146136 0.00961251 -0.00135789 -0.0145979 0.00966715 -0.00136987 -0.0145404 0.00974621 -0.00155985 -0.0145699 0.00973153 -0.00138397 -0.0145831 0.00974271 -0.00121086 -0.0145079 0.00978862 -0.00156814 -0.0145251 0.009875 -0.00109028 -0.0144886 0.00984393 -0.0014086 -0.0145205 0.00984533 -0.00123542 -0.0145461 0.00977199 -0.00139283 -0.0144527 0.00984372 -0.00157891 -0.01449 0.00980847 -0.00157202 -0.0145184 0.00981004 -0.00140117 -0.0146246 0.00960941 -0.000998634 -0.0146247 0.00965093 -0.000827891 -0.0146234 0.00962861 -0.00100356 -0.0146201 0.00961564 -0.00118044 -0.0146055 0.00972422 -0.00102809 -0.0145881 0.00977282 -0.00104055 -0.0145945 0.00975682 -0.00103645 -0.0145436 0.00981377 -0.00122787 -0.0145546 0.00979662 -0.00122376 -0.0145484 0.00984778 -0.00105978 -0.014625 0.0094768 -0.00148758 -0.0146245 0.00957052 -0.00116964 -0.0146239 0.00966678 -0.00083218 -0.0146246 0.00974907 -0.000490178 -0.0146223 0.0097321 -0.000668072 -0.0146226 0.00968258 -0.000836459 -0.0146235 0.00971961 -0.000664548 -0.014624 0.00981585 -0.000305173 -0.0146235 0.00976757 -0.000495561 -0.0146175 0.00971836 -0.000846147 -0.0146166 0.00977009 -0.000678794 -0.0146148 0.00973226 -0.00084991 -0.014608 0.00975979 -0.000857365 -0.0146143 0.00978106 -0.00068189 -0.0146003 0.00974061 -0.00103229 -0.0145996 0.00978689 -0.000864704 -0.0145991 0.00983501 -0.000697115 -0.0146026 0.00982436 -0.00069411 -0.0146204 0.00985096 -0.000315619 -0.0146237 0.0098654 -0.00015838 -0.0146224 0.00977937 -0.000498992 -0.0146233 0.00982591 -0.000308165 -0.0146223 0.00983595 -0.000311152 -0.0146191 0.00980396 -0.000506148 -0.0146162 0.00982026 -0.000510889 -0.014609 0.00980284 -0.000688037 -0.0146211 0.00984596 -0.000314131 -0.0146217 0.00984096 -0.000312642 -0.0146239 0.00986289 -0.000157627 -0.0146197 0.00985595 -0.000317104 -0.0146242 0.00985788 -0.000156119 -0.0146249 0.00984173 -0.000151266 -0.0146248 0.00984533 -0.000152347 -0.0146187 0.009875 -0.000273995 -0.0146233 0.0098702 -0.000159823 -0.00687568 0.00946593 0.00599876 -0.00683572 0.00942419 0.006 -0.00642005 0.00947045 0.00599188 -0.00641753 0.00951434 0.0059818 -0.00622708 0.009602 0.00594551 -0.00762519 0.00969393 0.00598949 -0.00778521 0.00969851 0.0059957 -0.00793454 0.00974486 0.00599502 -0.00810722 0.0097759 0.00599721 -0.00641006 0.0096446 0.00592452 -0.00710279 0.00965381 0.00596633 -0.00622708 0.00966889 0.00590451 -0.00640815 0.00967782 0.00590191 -0.00659404 0.00966785 0.00591934 -0.00810213 0.00979291 0.005995 -0.0080996 0.00980139 0.00599366 -0.00845052 0.00985003 0.0059993 -0.00622708 0.00977951 0.00579389 -0.00640174 0.00978952 0.00578716 -0.00622708 0.0098205 0.005727 -0.00640049 0.00981124 0.00575332 -0.00708045 0.00976816 0.00590232 -0.00759647 0.00980509 0.00595056 -0.00622708 0.00985053 0.00565451 -0.00682169 0.0098104 0.00582033 -0.00707257 0.00980853 0.00586844 -0.00733022 0.0098121 0.00590933 -0.00775405 0.00981353 0.00596467 -0.00775052 0.00982658 0.00595906 -0.00790945 0.00983408 0.00597266 -0.00807758 0.009875 0.00597498 -0.00844464 0.00986959 0.0059978 -0.008351 0.009875 0.00599374 -0.00639779 0.00985826 0.00564612 -0.00657818 0.00981878 0.00576489 -0.00681616 0.00984571 0.00577184 -0.00790365 0.00985473 0.00596471 -0.00808229 0.00985927 0.00598007 -0.00657476 0.0098513 0.00570339 -0.00726643 0.009875 0.00584417 -0.00753472 0.009875 0.00590011 -0.0078053 0.009875 0.00594374 -0.00774387 0.00985113 0.00594723 -0.00706573 0.00984368 0.00583205 -0.00660069 0.00960452 0.00595439 -0.00685959 0.00956862 0.00598024 -0.00686488 0.00953485 0.00598885 -0.00641508 0.00955713 0.00596779 -0.00660984 0.00951749 0.00598511 -0.00668427 0.00940394 0.006 -0.00653236 0.00938849 0.006 -0.00844627 0.00986418 0.0059983 -0.00808953 0.00983505 0.00598671 -0.00809204 0.00982668 0.00598869 -0.00760442 0.00977431 0.00596451 -0.00683205 0.0097443 0.00588667 -0.00658522 0.00975173 0.00585043 -0.00640314 0.00976515 0.00581913 -0.00827216 0.00983668 0.00599581 -0.00809707 0.00980984 0.00599216 -0.00776858 0.0097599 0.00598306 -0.0073474 0.00973803 0.00595325 -0.0076126 0.00974268 0.00597623 -0.00683591 0.00971967 0.00590583 -0.00683995 0.00969389 0.00592339 -0.00659093 0.00969745 0.00589844 -0.00640468 0.00973829 0.00584905 -0.00845137 0.00984719 0.00599945 -0.00845307 0.00984153 0.00599969 -0.00828056 0.00980836 0.00599877 -0.00811087 0.00976369 0.0059984 -0.00712028 0.0095644 0.00599197 -0.00828486 0.00979385 0.0059996 -0.00794997 0.00968998 0.00599982 -0.00764583 0.00961402 0.00599962 -0.00764097 0.00963283 0.00599849 -0.00738398 0.00958031 0.00599758 -0.00828864 0.0097811 0.00599996 -0.00811947 0.00973494 0.0059999 -0.00828675 0.00978747 0.00599982 -0.00738953 0.00955638 0.00599939 -0.00713168 0.00950614 0.00599911 -0.0063994 0.00983018 0.00571785 -0.00658031 0.00979851 0.00579502 -0.00658798 0.00972546 0.00587543 -0.00640635 0.00970912 0.0058767 -0.00684416 0.00966708 0.00593927 -0.00709807 0.00967796 0.00595605 -0.00709346 0.00970154 0.00594448 -0.00735657 0.00969847 0.00597003 -0.00735194 0.00971843 0.00596209 -0.0065973 0.00963682 0.00593803 -0.00641206 0.00960967 0.00594438 -0.00661824 0.00943754 0.00599834 -0.00661401 0.00947779 0.00599336 -0.00642261 0.00942585 0.00599797 -0.00682493 0.00978976 0.00584383 -0.00658266 0.00977613 0.00582358 -0.00708464 0.0097467 0.00591757 -0.00792185 0.00978999 0.00598606 -0.00809455 0.00981827 0.0059905 -0.00826882 0.00984795 0.00599413 -0.00685298 0.00961075 0.00596577 -0.00660575 0.00955639 0.00597363 -0.00682839 0.0097677 0.00586597 -0.0068485 0.00963933 0.00595342 -0.00712596 0.00953537 0.00599643 -0.00687025 0.00950056 0.00599504 -0.00707642 0.00978879 0.00588593 -0.00734294 0.00975724 0.00594353 -0.00733859 0.00977601 0.00593296 -0.00708898 0.00972447 0.00593164 -0.00736128 0.00967819 0.00597707 -0.00710761 0.00962916 0.00597529 -0.00711466 0.0095931 0.00598575 -0.00737297 0.00962777 0.00599032 -0.00732267 0.00984466 0.00588372 -0.00733434 0.00979431 0.00592155 -0.00762095 0.00971032 0.00598565 -0.00736605 0.00965763 0.00598319 -0.00763613 0.00965158 0.0059966 -0.00737846 0.00960413 0.00599455 -0.00778947 0.00968277 0.00599758 -0.00794635 0.00970285 0.00599929 -0.00759259 0.00982011 0.00594276 -0.00758536 0.00984811 0.00592633 -0.00776125 0.00978697 0.00597468 -0.00760042 0.00978982 0.00595781 -0.00760848 0.00975859 0.00597066 -0.00761675 0.00972658 0.00598123 -0.0076313 0.00967025 0.00599395 -0.00779803 0.00965116 0.00599973 -0.00779375 0.00966699 0.00599892 -0.00791252 0.00982318 0.00597642 -0.00775763 0.00980032 0.00596987 -0.00791561 0.00981219 0.00597991 -0.0077649 0.00977349 0.00597907 -0.00791872 0.00980112 0.00598312 -0.00777229 0.00974619 0.00598663 -0.00777603 0.00973239 0.00598978 -0.00793135 0.00975622 0.0059932 -0.007925 0.00977878 0.00598872 -0.00777979 0.00971851 0.00599252 -0.00792817 0.00976753 0.0059911 -0.00810467 0.00978442 0.00599619 -0.00793913 0.00972853 0.00599714 -0.00794274 0.00971571 0.00599839 -0.00808704 0.00984339 0.00598458 -0.00826715 0.00985357 0.00599319 -0.00826397 0.00986431 0.00599119 -0.00827049 0.00984232 0.005995 -0.00844967 0.00985286 0.00599914 -0.00827551 0.00982537 0.0059972 -0.00827384 0.00983103 0.00599654 -0.00827887 0.00981404 0.00599832 -0.00845222 0.00984436 0.00599958 -0.00827719 0.00981971 0.00599779 -0.00811374 0.00975411 0.0059991 -0.00828297 0.00980023 0.00599929 -0.0081166 0.00974452 0.0059996 -0.0084562 0.00983111 0.00599996 -0.00803465 0.00970009 0.006 -0.00845715 0.00982793 0.00599999 -0.00844797 0.00985852 0.00599876 -0.00844712 0.00986135 0.00599854 -0.00844882 0.00985569 0.00599896 -0.00845429 0.00983747 0.00599983 -0.00845525 0.00983429 0.0059999 -0.0142026 0.00986818 0.00552742 -0.014352 0.0098205 0.0055 -0.0143596 0.00981474 0.00554022 -0.0144189 0.00977951 0.0055 -0.0143931 0.00976957 0.00564978 -0.014286 0.00976957 0.00576151 -0.0142137 0.00981474 0.00572082 -0.014125 0.00977951 0.00579389 -0.014125 0.0098205 0.005727 -0.0141751 0.00981474 0.00573263 -0.0141855 0.00984791 0.00565064 -0.0142323 0.00984791 0.00562187 -0.0142513 0.00984791 0.00560199 -0.0142667 0.00984791 0.00557918 -0.0143494 0.00981474 0.00557928 -0.0144277 0.00976957 0.0055519 -0.0144146 0.00976957 0.00560231 -0.0144876 0.00971364 0.00556217 -0.0144461 0.00971364 0.00567941 -0.0143279 0.00976957 0.00573054 -0.0142395 0.00976957 0.00578496 -0.0141897 0.00976957 0.00580022 -0.0141406 0.00971364 0.00586753 -0.014125 0.00972855 0.00585356 -0.0144786 0.00972855 0.0055 -0.0145295 0.00966889 0.0055 -0.0145376 0.00964847 0.00557074 -0.014368 0.00971364 0.00577615 -0.0142622 0.00971364 0.00584134 -0.0142132 0.00964847 0.00590919 -0.014125 0.009602 0.00594551 -0.0144904 0.00964847 0.00570414 -0.0145247 0.00957585 0.00572331 -0.0143651 0.00957585 0.00588991 -0.0141428 0.00964847 0.00591821 -0.0145567 0.00957585 0.00565254 -0.014582 0.00949774 0.00566147 -0.0144813 0.00957585 0.00578766 -0.0142957 0.00957585 0.00592487 -0.0143791 0.00949774 0.00591274 -0.0143057 0.00949774 0.00594975 -0.0142271 0.00949774 0.00597382 -0.0141456 0.00949774 0.00598426 -0.0146027 0.00949774 0.00558192 -0.0146161 0.00941629 0.00558421 -0.0145948 0.00941629 0.005666 -0.0145021 0.00949774 0.0058045 -0.0143108 0.00941629 0.00596236 -0.0141461 0.00941629 0.00599784 -0.0146188 0.009375 0.00557822 -0.0145705 0.009375 0.005727 -0.0144542 0.00941629 0.00587406 -0.01423 0.00941629 0.00598711 -0.01456 0.00941629 0.00574302 -0.0142795 0.009375 0.00597553 -0.0141557 0.00986818 0.00557636 -0.0141681 0.00986818 0.00557008 -0.0141794 0.00986818 0.00556178 -0.014189 0.00986818 0.0055517 -0.0141968 0.00986818 0.00554014 -0.0142795 0.00985053 0.0055 -0.0142061 0.00986818 0.00551391 -0.0141285 0.00986818 0.00558222 -0.0145763 0.00957585 0.00557738 -0.014285 0.00984791 0.00552744 -0.0145197 0.00964847 0.00563945 -0.0144718 0.00971364 0.00562255 -0.0143328 0.00981474 0.00561606 -0.0142781 0.00984791 0.00555409 -0.0145481 0.00949774 0.00573639 -0.0144507 0.00964847 0.00576296 -0.0143639 0.00976957 0.00569293 -0.0145127 0.00941629 0.00581304 -0.0144112 0.00971364 0.0057311 -0.0143102 0.00981474 0.0056495 -0.0142822 0.00981474 0.00567864 -0.0144452 0.00949774 0.00586385 -0.0144275 0.00957585 0.00584372 -0.0143445 0.00964847 0.00585644 -0.0144015 0.00964847 0.00581422 -0.0142498 0.00981474 0.00570264 -0.0143863 0.00941629 0.00592431 -0.0142811 0.00964847 0.0058884 -0.0143179 0.00971364 0.00581325 -0.0142101 0.00984791 0.00563825 -0.0141423 0.00986818 0.00558045 -0.0141444 0.00957585 0.00595748 -0.0142215 0.00957585 0.00594761 -0.014138 0.00976957 0.00580683 -0.0142025 0.00971364 0.00585961 -0.0141351 0.00981474 0.00573776 -0.0141592 0.00984791 0.00565871 -0.0141319 0.00984791 0.00566221 -0.0124312 0.00946994 0.00599091 -0.0124312 0.0098659 0.00559494 -0.014125 0.00986884 0.00557822 -0.0110229 0.00952951 0.00597553 -0.0110229 0.00945322 0.00599385 -0.0110229 0.009602 0.00594551 -0.0124312 0.00980623 0.00575306 -0.0110229 0.00977951 0.00579389 -0.0110229 0.00985053 0.00565451 -0.0110229 0.00986884 0.00557822 -0.0124312 0.00987489 0.00551061 -0.0124312 0.00962806 0.00593123 -0.0110229 0.00972855 0.00585356 -0.0124312 0.0098428 0.00567654 -0.014125 0.00985053 0.00565451 -0.0124312 0.00975726 0.0058223 -0.0124312 0.0096973 0.00588227 -0.014125 0.00966889 0.00590451 -0.014125 0.00952951 0.00597553 -0.0124312 0.00955154 0.0059678 -0.014125 0.00945322 0.00599385 -0.0124312 0.00938561 0.00599989 -0.0142199 0.0098659 0.00394896 -0.0144473 0.00975726 0.00394896 -0.0145705 0.009602 0.0055 -0.0146159 0.00946994 0.00394896 -0.0146188 0.00945322 0.0055 -0.0142795 0.00985053 0.00239792 -0.0143015 0.0098428 0.00394896 -0.0145295 0.00966889 0.00239792 -0.0145705 0.009602 0.00239792 -0.0146188 0.00945322 0.00239792 -0.0146249 0.00938561 0.00394896 -0.0144786 0.00972855 0.00239792 -0.0145073 0.0096973 0.00394896 -0.0146005 0.00952951 0.0055 -0.0145928 0.00955154 0.00394896 -0.0145562 0.00962806 0.00394896 -0.0143781 0.00980623 0.00394896 -0.0142032 0.00986884 0.0055 -0.0142032 0.00986885 0.00239792 -0.0141356 0.00987489 0.00394896 -0.014625 0.00940394 0.00194073 -0.014625 0.00942419 0.00178928 -0.0146068 0.00951434 0.00220747 -0.0146005 0.00952951 0.00239792 -0.0145694 0.00960968 0.00221294 -0.0146052 0.00956863 0.00176541 -0.0146108 0.00959311 0.00151033 -0.0146003 0.00962916 0.00151738 -0.014619 0.00967025 0.000993688 -0.0145794 0.00960452 0.00202431 -0.0145784 0.00963933 0.00177649 -0.0145913 0.00965382 0.0015222 -0.0146021 0.0096782 0.00126372 -0.0146145 0.00969394 0.000999805 -0.0146245 0.0098472 0.000173627 -0.0146222 0.00982537 0.000349479 -0.0146243 0.00985003 0.000174477 -0.014563 0.00963682 0.0020277 -0.0145643 0.00966708 0.00178084 -0.0146062 0.00972658 0.00100824 -0.0146182 0.00975622 0.000693642 -0.0146187 0.00980139 0.000525396 -0.0144189 0.00977951 0.00239792 -0.0144122 0.00978952 0.00222326 -0.014352 0.0098205 0.00239792 -0.0143783 0.00981124 0.00222451 -0.01442 0.00979852 0.00204469 -0.0145109 0.00978879 0.00154857 -0.0145466 0.00979431 0.00129065 -0.0145897 0.00981353 0.000870934 -0.0146049 0.00981219 0.000709379 -0.0146233 0.00986418 0.000178726 -0.0144688 0.00978976 0.00180007 -0.0144453 0.00981041 0.0018033 -0.0144934 0.00980853 0.00155242 -0.0145841 0.00982658 0.00087447 -0.0146228 0.00986959 0.000180352 -0.0146162 0.00986431 0.000361022 -0.0143899 0.00981878 0.00204682 -0.0145897 0.00985473 0.000721339 -0.0142711 0.00985826 0.00222721 -0.014125 0.009875 0.00239792 -0.0143208 0.009875 0.00188611 -0.0143968 0.00984571 0.00180884 -0.0145687 0.009875 0.000819705 -0.0145513 0.00984811 0.00103963 -0.014457 0.00984368 0.00155927 -0.0146 0.009875 0.000547423 -0.014625 0.00961777 0.000888109 -0.014625 0.00954239 0.00118723 -0.0146244 0.00955638 0.00123546 -0.0145986 0.0095564 0.00201925 -0.0146139 0.00953485 0.00176012 -0.0145928 0.00955713 0.00220992 -0.0146238 0.00946593 0.00174932 -0.014623 0.00942585 0.00220239 -0.0146182 0.00985357 0.000357839 -0.0146235 0.00986135 0.000177877 -0.0146137 0.00982668 0.000532961 -0.0145949 0.00980032 0.000867357 -0.0145828 0.00978982 0.00102457 -0.0145685 0.00975724 0.00128205 -0.0145426 0.0097467 0.00154036 -0.014491 0.0097677 0.00179661 -0.0145117 0.0097443 0.00179294 -0.0144441 0.00976515 0.00222187 -0.01462 0.00984232 0.000354503 -0.0146208 0.00983668 0.000352831 -0.0146111 0.00978999 0.000703136 -0.0146041 0.0097735 0.000860092 -0.0145782 0.00973804 0.0012776 -0.0145566 0.00972447 0.00153602 -0.0145871 0.00971844 0.00127305 -0.0145269 0.00967782 0.00221685 -0.0146246 0.00984436 0.000172776 -0.0146233 0.00981404 0.000346119 -0.0146247 0.00984153 0.000171925 -0.0146234 0.00976369 0.00051412 -0.0146226 0.00968278 0.000835521 -0.0146195 0.00960413 0.00124654 -0.0145908 0.00961075 0.00177201 -0.0146248 0.00968999 0.000675021 -0.0146239 0.00966699 0.000831245 -0.0146247 0.00965117 0.000826959 -0.0146246 0.00961402 0.000979164 -0.0146226 0.00958031 0.00124101 -0.014617 0.0095644 0.00150471 -0.014625 0.00982793 0.000167842 -0.014625 0.0097811 0.000336352 -0.0146249 0.00973494 0.000505521 -0.0146214 0.00953538 0.00149904 -0.0143428 0.00983018 0.0022256 -0.014474 0.00973829 0.00222032 -0.0145017 0.00970912 0.00221865 -0.0145234 0.00969745 0.00203407 -0.0146081 0.0097599 0.000856409 -0.0146137 0.00977879 0.000699987 -0.014624 0.00985569 0.000176178 -0.0145495 0.0096446 0.00221494 -0.014625 0.009375 0.00239792 -0.0146233 0.00943755 0.00200676 -0.0146184 0.00947779 0.00201099 -0.0146169 0.00947046 0.00220495 -0.0143284 0.0098513 0.00205024 -0.0144486 0.00977613 0.00204234 -0.0145004 0.00972546 0.00203701 -0.0144754 0.00975173 0.00203977 -0.0145308 0.00971967 0.00178908 -0.0145484 0.00969389 0.00178504 -0.0146081 0.00980112 0.000706268 -0.0146238 0.00985852 0.000177028 -0.0146191 0.00984795 0.000356172 -0.0145443 0.00966785 0.00203096 -0.0146101 0.00951749 0.00201516 -0.0145273 0.00976816 0.00154455 -0.0145695 0.00970154 0.00153153 -0.0146241 0.00950614 0.00149332 -0.01462 0.00950056 0.00175474 -0.0145343 0.0098121 0.00129477 -0.014558 0.00977601 0.0012864 -0.014595 0.00969848 0.00126842 -0.0145811 0.00967796 0.00152693 -0.0146153 0.00962777 0.00125202 -0.0145678 0.00982011 0.0010324 -0.0145087 0.00984466 0.00130233 -0.0145756 0.00980509 0.00102852 -0.0145895 0.00977432 0.00102057 -0.0146012 0.00974268 0.0010124 -0.0145957 0.0097586 0.00101651 -0.0146107 0.00971033 0.00100404 -0.0146082 0.00965763 0.00125895 -0.0146235 0.00963283 0.000984023 -0.0146243 0.00970286 0.000678639 -0.0146234 0.00971571 0.000682252 -0.0146116 0.0097462 0.000852697 -0.0146148 0.0097324 0.00084896 -0.0146207 0.00969851 0.000839783 -0.0146216 0.00965159 0.000988867 -0.0145722 0.00985113 0.000881119 -0.0145977 0.00983408 0.000715535 -0.0146014 0.00982318 0.000712469 -0.0146117 0.00983505 0.000535466 -0.0145997 0.00978697 0.000863742 -0.0146172 0.00980985 0.000527925 -0.0146155 0.00981828 0.000530447 -0.0146161 0.00976753 0.000696822 -0.01462 0.00974486 0.000690449 -0.0146175 0.00971852 0.0008452 -0.0146221 0.00972853 0.000685858 -0.0146212 0.00978442 0.000520319 -0.0146222 0.0097759 0.000517773 -0.0146241 0.00975411 0.000511256 -0.01462 0.00979291 0.00052286 -0.0146215 0.00983103 0.000351157 -0.0146228 0.00981971 0.0003478 -0.0146243 0.00980023 0.000342024 -0.0146248 0.00983747 0.000170706 -0.0146238 0.00980837 0.000344436 -0.0146246 0.00974453 0.000508389 -0.0146246 0.00979386 0.000340134 -0.0146248 0.00978748 0.000338243 -0.014625 0.00983111 0.000168797 -0.0146249 0.00983429 0.000169752 -0.0146096 0.00984339 0.000537961 -0.0146051 0.00985927 0.000542714 -0.0146241 0.00985286 0.000175328 -0.0106573 0.00981806 0.00576847 -0.0107822 0.00982521 0.00573655 -0.010779 0.00978356 0.00580169 -0.01065 0.00975097 0.00585279 -0.0106471 0.00972477 0.00587741 -0.0106409 0.00966743 0.00592065 -0.010768 0.0096405 0.00592994 -0.0107615 0.00955571 0.00597014 -0.010755 0.00947215 0.00599248 -0.0107517 0.00942924 0.00599812 -0.0107176 0.00938849 0.006 -0.0106161 0.00943888 0.00599837 -0.0104143 0.00942419 0.006 -0.0101431 0.00953242 0.00599633 -0.0098108 0.00963799 0.00599134 -0.00964455 0.00969094 0.00598906 -0.00947491 0.00973226 0.00598976 -0.00930379 0.00977009 0.00599156 -0.00913115 0.00980396 0.00599412 -0.00878112 0.00985788 0.00599918 -0.0109053 0.0098609 0.00562766 -0.0109042 0.00983464 0.00570254 -0.0110229 0.0098205 0.005727 -0.0109026 0.00979497 0.00577521 -0.0107807 0.00980565 0.00576993 -0.0109016 0.00977077 0.00580884 -0.0109005 0.00974387 0.00584034 -0.0107771 0.00975909 0.00583164 -0.010775 0.00973239 0.0058596 -0.0107728 0.0097036 0.00588541 -0.0108993 0.00971445 0.0058695 -0.0108929 0.00955935 0.00596585 -0.0108951 0.00961321 0.00594105 -0.0107654 0.00960657 0.00594841 -0.0108966 0.0096489 0.00592003 -0.010898 0.00968272 0.00589613 -0.0108911 0.00951537 0.00598069 -0.0107583 0.00951442 0.00598313 -0.0108893 0.00947021 0.00599139 -0.0108874 0.00942424 0.00599784 -0.01087 0.00937854 0.006 -0.00980544 0.00961563 0.00599512 -0.00930068 0.00975907 0.00599355 -0.00912877 0.00979578 0.00599536 -0.00996587 0.00953479 0.00599928 -0.00980006 0.00959313 0.00599783 -0.00963347 0.00964774 0.00599646 -0.00929756 0.009748 0.00599528 -0.00951311 0.00961777 0.006 -0.00911518 0.00974907 0.00599963 -0.00877373 0.00983328 0.00599999 -0.00921534 0.00970009 0.006 -0.00917242 0.009875 0.00597498 -0.0091474 0.00985982 0.00598153 -0.00932781 0.00985517 0.00596651 -0.00984877 0.00979662 0.00592963 -0.00984456 0.00977904 0.00593989 -0.0100135 0.00975204 0.00593349 -0.01035 0.00971997 0.00591283 -0.0106376 0.00963658 0.00593904 -0.00971528 0.009875 0.00590011 -0.0096775 0.00981936 0.00594048 -0.00968478 0.00984777 0.00592342 -0.00985287 0.00981377 0.00591862 -0.0100221 0.00979134 0.00590774 -0.0100178 0.00977199 0.0059211 -0.0107705 0.00967291 0.0059089 -0.0106441 0.00969687 0.00590007 -0.00986043 0.00984533 0.00589553 -0.0101931 0.00978862 0.00588292 -0.0105207 0.00974607 0.00587237 -0.010249 0.009875 0.00577603 -0.0102039 0.00984372 0.00582768 -0.010371 0.00984488 0.00578797 -0.0106551 0.00979774 0.00579818 -0.0105346 0.00984755 0.00574456 -0.00892299 0.0097917 0.00599997 -0.00928248 0.00969456 0.00599983 -0.00911248 0.00973981 0.00599991 -0.00877542 0.00983891 0.00599992 -0.00877627 0.00984173 0.00599986 -0.00892635 0.009803 0.00599969 -0.0087781 0.00984784 0.00599967 -0.00912399 0.00977936 0.00599741 -0.00912638 0.00978758 0.00599646 -0.00929307 0.0097321 0.00599729 -0.00946573 0.00969833 0.00599569 -0.00928602 0.0097071 0.00599932 -0.00945289 0.00965093 0.00599973 -0.00877961 0.00985286 0.00599945 -0.00893317 0.00982591 0.00599827 -0.00893167 0.00982088 0.00599868 -0.00893913 0.00984596 0.00599608 -0.00878263 0.00986289 0.00599885 -0.00894062 0.00985096 0.00599539 -0.0094969 0.00981347 0.00596459 -0.00878338 0.0098654 0.00599867 -0.00878482 0.0098702 0.00599828 -0.0089421 0.00985595 0.00599465 -0.00932211 0.00983501 0.00597406 -0.00950709 0.00985111 0.00594711 -0.00950044 0.00982654 0.00595897 -0.00967358 0.00980411 0.00594858 -0.00877457 0.0098361 0.00599997 -0.0101373 0.0095028 0.00599908 -0.0105111 0.009875 0.00569584 -0.0106608 0.00985085 0.00570778 -0.0107693 0.009875 0.00560377 -0.0107845 0.00985527 0.0056688 -0.0110229 0.00966889 0.00590451 -0.0109034 0.00981631 0.0057397 -0.0103616 0.00978883 0.00585519 -0.0106526 0.00977533 0.00582633 -0.0105239 0.00977 0.00584922 -0.0104856 0.00948953 0.00599438 -0.01032 0.00954134 0.00598971 -0.0101616 0.0096274 0.00597463 -0.0101545 0.0095909 0.00598537 -0.00982223 0.00968577 0.00597947 -0.00948237 0.00975979 0.00598302 -0.00930997 0.00979198 0.00598677 -0.0106204 0.00947877 0.00599347 -0.0103322 0.00961439 0.00596837 -0.00999487 0.00966715 0.00597294 -0.00999966 0.00968904 0.00596465 -0.00965729 0.00974061 0.00597527 -0.00931304 0.00980284 0.00598399 -0.0091406 0.00983644 0.00598769 -0.0106247 0.00951814 0.00598536 -0.00966556 0.00977282 0.00596309 -0.00949332 0.00980025 0.00596981 -0.00931608 0.00981364 0.00598094 -0.00931911 0.00982436 0.00597763 -0.0106341 0.0096045 0.00595514 -0.0105057 0.00963669 0.00594738 -0.00984025 0.00976106 0.00594938 -0.00983586 0.00974271 0.00595811 -0.0106289 0.00955674 0.00597406 -0.00964884 0.00970765 0.00598507 -0.00893764 0.00984096 0.00599671 -0.0103088 0.00947518 0.00599885 -0.0103144 0.00950841 0.00599542 -0.0104805 0.0094526 0.0059986 -0.0103254 0.00957381 0.00598175 -0.0104906 0.00952604 0.00598739 -0.0104955 0.00956193 0.00597765 -0.0105016 0.00960655 0.00596131 -0.0105097 0.00966584 0.00593144 -0.0103414 0.00966884 0.00594384 -0.0103369 0.00964198 0.00595694 -0.0101713 0.00967678 0.00595489 -0.0103458 0.00969486 0.00592912 -0.0101759 0.00970061 0.005943 -0.0105135 0.00969387 0.00591357 -0.0103541 0.00974405 0.00589504 -0.0105172 0.00972065 0.00589385 -0.0105298 0.00981301 0.00579834 -0.010527 0.00979235 0.0058245 -0.0101488 0.00956183 0.00599176 -0.0101665 0.00965235 0.00596544 -0.0101805 0.00972378 0.00592982 -0.0103579 0.00976703 0.0058758 -0.010365 0.00980935 0.00583328 -0.00997158 0.00956087 0.00599713 -0.00998289 0.0096125 0.00598857 -0.00997726 0.0095868 0.00599356 -0.00981756 0.00966627 0.00598495 -0.00998999 0.0096449 0.00598015 -0.0100044 0.00971052 0.00595529 -0.00982684 0.00970503 0.00597316 -0.0101849 0.00974621 0.00591539 -0.010009 0.00973153 0.0059449 -0.0101891 0.00976784 0.00589974 -0.010197 0.00980847 0.00586499 -0.0100336 0.00984393 0.00586358 -0.0100262 0.00981004 0.00589345 -0.00945718 0.00966677 0.00599892 -0.00962856 0.00962861 0.00599843 -0.00979465 0.00957052 0.00599946 -0.00963835 0.00966679 0.00599371 -0.00966145 0.00975682 0.00596948 -0.00983139 0.00972402 0.00596604 -0.0096696 0.00978859 0.00595612 -0.00962364 0.00960941 0.00599961 -0.00928955 0.00971961 0.00599847 -0.00911787 0.00975833 0.00599916 -0.00912056 0.00976757 0.00599851 -0.00946146 0.00968258 0.00599757 -0.00947115 0.00971836 0.0059925 -0.00947865 0.00974607 0.0059866 -0.00965309 0.00972422 0.00598047 -0.00930689 0.00978106 0.0059893 -0.00948605 0.0097734 0.00597903 -0.00948971 0.00978689 0.00597462 -0.00893017 0.00981585 0.00599903 -0.00893466 0.00983093 0.0059978 -0.00893615 0.00983594 0.00599728 -0.00913589 0.00982025 0.00599119 -0.00913352 0.00981212 0.00599273 -0.00913825 0.00982836 0.00598951 -0.00914294 0.00984449 0.00598571 -0.00878187 0.00986039 0.00599902 -0.00878037 0.00985537 0.00599932 -0.00877886 0.00985035 0.00599957 -0.00877735 0.00984532 0.00599976 -0.00892803 0.00980864 0.00599945 -0.00892467 0.00979735 0.00599986 -0.00894494 0.00986549 0.00599308 0.008625 0.009375 0.0055 -0.0137355 0.009875 -0.00314372 -0.014125 0.009875 -0.00239791 -0.014125 0.009375 0.006 -0.0110229 0.009375 0.006 -0.0105657 0.00940394 0.006 -0.00713742 0.00947679 0.006 -0.00743777 0.00954239 0.006 -0.00637996 0.00937854 0.006 0.008125 0.009375 0.006 0.008125 0.005375 0.006 -0.0101126 0.00947679 0.006 -0.00981222 0.00954239 0.006 -0.00773688 0.00961777 0.006 -0.0138769 0.009875 0.00290129 -0.0135829 0.009875 0.00337927 -0.014125 0.009875 0.0055 -0.0128676 0.009875 0.00424264 -0.0122324 0.009875 0.00479442 -0.0120043 0.009875 0.00495788 -0.0112775 0.009875 0.00538184 -0.00165406 0.014375 0.000239317 -0.00163229 0.014375 0.000120539 -0.00173954 0.014875 0.000464725 -0.00187649 0.014375 0.000663125 -0.00187649 0.014875 0.000663125 -0.00205694 0.014875 0.000822986 -0.00250446 0.014875 0.000992711 -0.00308972 0.014875 0.000885458 -0.00328812 0.014875 0.000748513 -0.00351046 0.014375 0.000464725 -0.00351046 0.014875 0.000464725 -0.003625 0.014375 1.88505e-09 -0.00361771 0.014875 0.000120539 -0.00361771 0.014875 -0.000120535 -0.00359594 0.014875 -0.000239314 -0.00337351 0.014875 -0.000663121 -0.00308972 0.014375 -0.000885454 -0.00308972 0.014875 -0.000885454 -0.0029796 0.014875 -0.000935014 -0.00286432 0.014875 -0.00097094 -0.002625 0.014375 -0.000999998 -0.00250446 0.014375 -0.000992707 -0.00238568 0.014375 -0.00097094 -0.0022704 0.014375 -0.000935014 -0.00205694 0.014375 -0.000822982 -0.00216028 0.014875 -0.000885454 -0.00168998 0.014375 -0.000354603 -0.00163229 0.014875 -0.000120535 -0.001625 0.014375 1.88505e-09 -0.00180202 0.014375 0.000568067 -0.002625 0.014875 0.001 -0.00274554 0.014375 0.000992711 -0.0029796 0.014875 0.000935018 -0.00328812 0.014375 0.000748513 -0.00356002 0.014375 0.000354607 -0.00356002 0.014875 0.000354607 -0.00359594 0.014875 0.000239317 -0.00361771 0.014375 -0.000120535 -0.00359594 0.014375 -0.000239314 -0.00356002 0.014375 -0.000354603 -0.00351046 0.014875 -0.000464721 -0.00319306 0.014875 -0.000822982 -0.0029796 0.014375 -0.000935014 -0.00286432 0.014375 -0.00097094 -0.00274554 0.014375 -0.000992707 -0.00274554 0.014875 -0.000992707 -0.00238568 0.014875 -0.00097094 -0.0022704 0.014875 -0.000935014 -0.00196188 0.014875 -0.000748509 -0.001625 0.014875 1.95062e-09 -0.00356002 0.014875 -0.000354603 -0.00165406 0.014875 -0.000239314 -0.00168998 0.014875 -0.000354603 -0.00173954 0.014875 -0.000464721 -0.00187649 0.014875 -0.000663121 -0.00205694 0.014875 -0.000822982 -0.002625 0.014875 -0.000999998 -0.00250446 0.014875 -0.000992707 -0.00344798 0.014875 -0.000568063 -0.00180202 0.014875 -0.000568063 -0.00328812 0.014875 -0.000748509 -0.003625 0.014875 1.95062e-09 -0.00344798 0.014875 0.000568067 -0.00337351 0.014875 0.000663125 -0.00196188 0.014875 0.000748513 -0.00216028 0.014875 0.000885458 -0.00238568 0.014875 0.000970944 -0.00286432 0.014875 0.000970944 -0.00274554 0.014875 0.000992711 -0.00163229 0.014875 0.000120539 -0.00165406 0.014875 0.000239317 -0.00168998 0.014875 0.000354607 -0.00180202 0.014875 0.000568067 -0.00319306 0.014875 0.000822986 -0.0022704 0.014875 0.000935018 -0.00304687 0.013125 0.0022101 -0.00342986 0.009875 0.00300176 -0.00343403 0.013125 0.00300896 -0.00375589 0.013125 0.00350596 -0.00411236 0.009875 0.00395425 -0.00393482 0.013125 0.00374195 -0.00453828 0.013125 0.00439303 -0.00476002 0.013125 0.00458933 -0.00520382 0.009875 0.00492905 -0.00499119 0.013125 0.00477445 -0.00544886 0.009875 0.0050904 -0.0052312 0.013125 0.00494794 -0.00682313 0.013125 0.00572305 -0.00798085 0.013125 0.00596532 -0.00857197 0.013125 0.00599977 -0.0094447 0.009875 0.00594374 -0.00916361 0.013125 0.00597578 -0.00329188 0.013125 0.00274915 -0.00358281 0.009875 0.00325212 -0.00358883 0.013125 0.00326143 -0.00412517 0.013125 0.00396882 -0.00431105 0.009875 0.00417011 -0.00452005 0.009875 0.004376 -0.0059611 0.009875 0.00537621 -0.00622708 0.009875 0.0055 -0.00648075 0.009875 0.00560377 -0.00654293 0.013125 0.00562717 -0.00673889 0.009875 0.00569584 -0.00700096 0.009875 0.00577603 -0.00710772 0.013125 0.00580499 -0.008625 0.009875 0.006 -0.008899 0.009875 0.00599374 -0.0103254 0.013125 0.005754 -0.0106073 0.013125 0.00566309 -0.0121739 0.013125 0.00483794 -0.0124083 0.013125 0.00465693 -0.0126646 0.009875 0.00443641 -0.0128489 0.013125 0.00426135 -0.0132455 0.009875 0.0038277 -0.0132483 0.013125 0.00382427 -0.0134194 0.009875 0.00360743 -0.0130614 0.009875 0.00403959 -0.00975 0.013125 0.00589359 -0.00998357 0.009875 0.00584417 -0.0110229 0.009875 0.0055 -0.0111558 0.013125 0.00544012 -0.0115263 0.009875 0.00525191 -0.0117687 0.009875 0.00511048 -0.0124527 0.009875 0.00462047 -0.0126334 0.013125 0.00446458 -0.013054 0.013125 0.00404774 -0.0136027 0.013125 0.00334994 -0.0142288 0.009875 0.00214425 -0.0141632 0.013125 0.00230834 -0.0143637 0.013125 0.00175122 -0.014625 0.009875 1.29495e-09 -0.0137355 0.009875 0.00314372 -0.0140068 0.009875 0.0026525 -0.014401 0.009875 0.00162404 -0.0144692 0.009875 0.00135858 -0.0145084 0.013125 0.00117704 -0.0145251 0.009875 0.00109028 -0.0145593 0.013125 0.000885302 -0.0145958 0.013125 0.000591403 -0.0146177 0.013125 0.000296063 -0.0146187 0.009875 0.000273998 -0.0146 0.009875 -0.00054742 -0.0146177 0.013125 -0.000296059 -0.0145593 0.013125 -0.000885298 -0.0134194 0.009875 -0.00360743 -0.0132455 0.009875 -0.0038277 -0.0132483 0.013125 -0.00382426 -0.0145958 0.013125 -0.000591399 -0.0144692 0.009875 -0.00135857 -0.0143208 0.009875 -0.00188611 -0.0141632 0.013125 -0.00230834 -0.0140425 0.013125 -0.0025788 -0.0140068 0.009875 -0.0026525 -0.0138769 0.009875 -0.00290128 -0.013762 0.013125 -0.00310024 -0.0135829 0.009875 -0.00337927 -0.0130614 0.009875 -0.00403959 -0.013054 0.013125 -0.00404774 -0.0128676 0.009875 -0.00424264 -0.0120043 0.009875 -0.00495787 -0.0115263 0.009875 -0.00525191 -0.0112775 0.009875 -0.00538184 -0.0114212 0.013125 -0.00530861 -0.0111558 0.013125 -0.00544012 -0.0105111 0.009875 -0.00569584 -0.00998357 0.009875 -0.00584416 -0.00916361 0.013125 -0.00597577 -0.0128489 0.013125 -0.00426135 -0.0126646 0.009875 -0.00443641 -0.0126334 0.013125 -0.00446457 -0.0124527 0.009875 -0.00462047 -0.0122324 0.009875 -0.00479442 -0.0117687 0.009875 -0.00511048 -0.0116797 0.013125 -0.00516417 -0.008899 0.009875 -0.00599374 -0.00886809 0.013125 -0.00599507 -0.00807758 0.009875 -0.00597497 -0.00827599 0.013125 -0.00598984 -0.00726643 0.009875 -0.00584416 -0.00570148 0.009875 -0.00523956 -0.00573543 0.013125 -0.00525836 -0.00412517 0.013125 -0.00396882 -0.00392446 0.009875 -0.00372894 -0.00375589 0.013125 -0.00350596 -0.00358883 0.013125 -0.00326143 -0.00342986 0.009875 -0.00300176 -0.00768729 0.013125 -0.00592627 -0.00682313 0.013125 -0.00572305 -0.00673889 0.009875 -0.00569584 -0.00654293 0.013125 -0.00562716 -0.00648075 0.009875 -0.00560376 -0.00499119 0.013125 -0.00477445 -0.00453828 0.013125 -0.00439303 -0.00316272 0.013125 -0.00248265 -0.00316158 0.009875 -0.00248012 0.012125 0.005375 -0.006 0.0122455 0.006875 -0.00599271 0.0125897 0.006875 -0.00588546 0.0125897 0.005375 -0.00588546 0.0126931 0.006875 -0.00582298 0.0128735 0.006875 -0.00566312 0.0130105 0.005375 -0.00546472 0.0130959 0.006875 -0.00523932 0.013125 0.006875 -0.005 0.0123643 0.005375 -0.00597094 0.0124796 0.006875 -0.00593502 0.0126931 0.005375 -0.00582298 0.0127881 0.006875 -0.00574851 0.0127881 0.005375 -0.00574851 0.0130105 0.006875 -0.00546472 0.01306 0.005375 -0.0053546 0.0106265 0.005375 0.000663123 0.0107119 0.005375 0.000748512 0.0108069 0.005375 0.000822984 0.0110204 0.005375 0.000935017 0.0112545 0.005375 0.000992709 0.0116143 0.005375 0.000970943 0.0117296 0.005375 0.000935017 0.0119431 0.005375 0.000822984 0.0125897 0.005375 0.00588546 0.01231 0.005375 0.000354605 0.0123677 0.005375 -0.000120536 0.0119431 0.005375 -0.000822983 0.0122455 0.005375 -0.00599271 0.0118397 0.005375 -0.000885455 0.0117296 0.005375 -0.000935016 0.0112545 0.005375 -0.000992708 0.0110204 0.005375 -0.000935016 0.0108069 0.005375 -0.000822983 0.00861884 0.005375 -0.00557822 0.00860053 0.005375 -0.00565451 0.0085705 0.005375 -0.00572699 0.00835199 0.005375 -0.0059455 0.00820322 0.005375 -0.00599384 0.012375 0.005375 7.04846e-10 0.0104041 0.005375 -0.000239315 0.0103823 0.005375 -0.000120536 0.0124796 0.005375 -0.00593502 0.013125 0.005375 -0.005 0.0128735 0.005375 -0.00566312 0.012948 0.005375 -0.00556806 0.0130959 0.005375 -0.00523932 0.0131177 0.005375 -0.00512054 0.0130959 0.005375 0.00523932 0.01306 0.005375 0.00535461 0.0130105 0.005375 0.00546472 0.012948 0.005375 0.00556807 0.0127881 0.005375 0.00574851 0.0126931 0.005375 0.00582298 0.00827951 0.005375 0.00597553 0.012125 0.005375 0.006 0.00835199 0.005375 0.0059455 0.0085705 0.005375 0.005727 0.0131177 0.006875 0.00512054 0.013125 0.005375 0.005 0.0131177 0.005375 0.00512054 0.012948 0.006875 0.00556807 0.0128735 0.006875 0.00566312 0.0125897 0.006875 0.00588546 0.0124796 0.006875 0.00593502 0.0123643 0.005375 0.00597094 0.0122455 0.006875 0.00599271 0.0130959 0.006875 0.00523932 0.01306 0.006875 0.00535461 0.0130105 0.006875 0.00546472 0.0128735 0.005375 0.00566312 0.0127881 0.006875 0.00574851 0.0126931 0.006875 0.00582298 0.0124796 0.005375 0.00593502 0.0123643 0.006875 0.00597094 0.0122455 0.005375 0.00599271 -0.012625 -0.009375 0.002 -0.014625 -0.009375 0.002 -0.012625 -0.009375 -0.002 -0.00622708 0.009875 -0.0055 -0.0059611 0.009875 -0.00537621 -0.00544886 0.009875 -0.00509039 -0.00520382 0.009875 -0.00492905 -0.00496697 0.009875 -0.00475592 -0.00473886 0.009875 -0.00457143 0.008125 0.009875 -0.0055 -0.00452005 0.009875 -0.004376 -0.00431105 0.009875 -0.00417011 -0.00411236 0.009875 -0.00395425 -0.000819998 0.009875 -0.0013433 -0.000629095 0.009875 -0.00103868 -0.000489144 0.009875 -0.000707545 -0.000382192 0.009875 -0.000179747 -0.000375 0.009875 1.29495e-09 -0.000382192 0.009875 0.000179749 -0.00040372 0.009875 0.000358349 -0.000439446 0.009875 0.000534657 -0.000819998 0.009875 0.00134331 -0.00133399 0.009875 0.00184277 -0.00180885 0.009875 0.00209676 -0.00392446 0.009875 0.00372894 -0.00374781 0.009875 0.00349471 -0.00197897 0.009875 0.00215526 -0.00250962 0.009875 0.00224704 -0.00316158 0.009875 0.00248013 -0.00286897 0.009875 0.00223674 -0.00197897 0.009875 -0.00215526 -0.00374781 0.009875 -0.0034947 -0.00215321 0.009875 -0.00219998 -0.00180885 0.009875 -0.00209676 -0.00358281 0.009875 -0.00325212 -0.00328934 0.009875 -0.00274422 -0.00304687 0.009875 -0.00221009 -0.00328934 0.009875 0.00274422 -0.00473886 0.009875 0.00457143 -0.00496697 0.009875 0.00475593 0.008125 0.009875 0.0055 -0.00570148 0.009875 0.00523957 0.0103823 0.005375 0.000120538 0.0104041 0.005375 0.000239316 0.0103823 0.006875 0.000120538 0.0104041 0.006875 0.000239316 0.01044 0.005375 0.000354605 0.010552 0.005375 0.000568065 0.0106265 0.006875 0.000663124 0.0109103 0.006875 0.000885457 0.0112545 0.006875 0.00099271 0.011375 0.005375 0.001 0.0114955 0.006875 0.00099271 0.0121235 0.005375 0.000663123 0.012198 0.005375 0.000568065 0.0123459 0.006875 0.000239316 0.0123459 0.005375 -0.000239315 0.0123459 0.006875 -0.000239315 0.01231 0.005375 -0.000354604 0.0122605 0.005375 -0.000464722 0.01231 0.006875 -0.000354604 0.0119431 0.006875 -0.000822983 0.0118397 0.006875 -0.000885455 0.011375 0.005375 -0.000999999 0.0114955 0.006875 -0.000992708 0.0111357 0.005375 -0.000970941 0.0109103 0.005375 -0.000885455 0.0106265 0.005375 -0.000663122 0.010552 0.005375 -0.000568064 0.0106265 0.006875 -0.000663122 0.0104895 0.005375 -0.000464722 0.010552 0.006875 -0.000568064 0.0104895 0.006875 -0.000464722 0.0103823 0.006875 -0.000120536 0.010375 0.005375 7.04846e-10 0.0104895 0.005375 0.000464724 0.0107119 0.006875 0.000748512 0.0108069 0.006875 0.000822985 0.0109103 0.005375 0.000885457 0.0110204 0.006875 0.000935017 0.0111357 0.005375 0.000970943 0.0111357 0.006875 0.000970943 0.011375 0.006875 0.001 0.0114955 0.005375 0.000992709 0.0118397 0.005375 0.000885457 0.0118397 0.006875 0.000885457 0.0119431 0.006875 0.000822985 0.0120381 0.005375 0.000748512 0.0120381 0.006875 0.000748512 0.0122605 0.005375 0.000464724 0.0123459 0.005375 0.000239316 0.0123677 0.005375 0.000120538 0.012198 0.005375 -0.000568064 0.0121235 0.005375 -0.000663122 0.0120381 0.005375 -0.00074851 0.0120381 0.006875 -0.00074851 0.0117296 0.006875 -0.000935015 0.0116143 0.005375 -0.000970941 0.0116143 0.006875 -0.000970941 0.0114955 0.005375 -0.000992708 0.011375 0.006875 -0.000999999 0.0110204 0.006875 -0.000935015 0.0107119 0.005375 -0.00074851 0.01044 0.005375 -0.000354604 0.01044 0.006875 -0.000354604 0.0104041 0.006875 -0.000239315 0.0131177 0.006875 -0.00512054 0.01306 0.006875 -0.0053546 0.012948 0.006875 -0.00556806 0.0123643 0.006875 -0.00597094 0.0121235 0.006875 -0.000663122 0.012198 0.006875 -0.000568064 0.0122605 0.006875 -0.000464722 0.0123677 0.006875 -0.000120536 0.012375 0.006875 9.01547e-10 0.0123677 0.006875 0.000120538 0.01231 0.006875 0.000354606 0.0122605 0.006875 0.000464724 0.012198 0.006875 0.000568066 0.0121235 0.006875 0.000663124 0.0117296 0.006875 0.000935017 0.0116143 0.006875 0.000970943 0.00852951 0.006875 0.00579389 0.00835199 0.006875 0.0059455 0.00820322 0.006875 0.00599385 0.008125 0.006875 0.006 0.012125 0.006875 0.006 0.012125 0.006875 -0.006 0.00820322 0.006875 -0.00599384 0.00827951 0.006875 -0.00597553 0.00852951 0.006875 -0.00579389 0.0109103 0.006875 -0.000885455 0.0111357 0.006875 -0.000970941 0.0112545 0.006875 -0.000992708 0.008625 0.006875 0.0055 0.010375 0.006875 9.01547e-10 0.01044 0.006875 0.000354606 0.0104895 0.006875 0.000464724 0.010552 0.006875 0.000568066 0.013125 0.006875 0.005 0.008625 0.006875 -0.0055 0.0107119 0.006875 -0.00074851 0.0108069 0.006875 -0.000822983 0.014625 -0.003625 0.008 0.013625 -0.004125 0.009 0.013125 -0.004625 0.013 0.014125 0.005375 0.012 0.013625 0.005875 0.009 0.014125 0.005375 0.0085 0.014125 -0.003625 0.012 0.014125 -0.003625 0.0085 0.013125 -0.004625 -0.009 0.014125 -0.003625 -0.012 0.014125 0.005375 -0.012 0.014625 -0.003625 -0.012 0.014625 -0.003625 -0.009 0.014125 0.005375 -0.009 0.014125 -0.003625 -0.009 0.00829657 -0.007875 0.006125 0.008625 -0.00764277 0.006125 0.011125 -0.00728922 0.008125 0.011125 -0.005875 0.006125 0.013125 -0.005875 0.008125 0.013125 -0.004625 0.009 0.013125 0.013375 -0.0115 0.013125 0.0126265 0.0118369 0.013125 0.006375 0.009 0.013125 0.01244 0.0121454 0.013125 0.006375 0.013 0.013125 0.0123823 0.0126205 0.013125 0.01244 0.0128546 0.013125 0.012552 0.0130681 0.013125 0.0126265 0.0131631 0.013125 0.0127119 0.0132485 0.013125 0.0129103 0.0133855 0.013125 0.0131357 0.0134709 0.013125 -0.0112704 0.013435 0.013125 -0.0118643 0.0134709 0.013125 -0.014625 0.0155 0.013125 -0.0123735 0.0131631 0.013125 -0.01256 0.0128546 0.013125 -0.0126177 0.0123795 0.013125 -0.0123735 0.0118369 0.013125 -0.0122881 0.0117515 0.013125 -0.0120897 0.0116145 0.013125 -0.0119796 0.011565 0.013125 -0.0117455 0.0115073 0.013125 -0.012875 0.010125 0.013125 -0.011625 0.0115 0.013125 -0.0112704 0.011565 0.013125 -0.0111603 0.0116145 0.013125 -0.0109619 0.0117515 0.013125 -0.0108765 0.0118369 0.013125 -0.00728922 0.010125 0.013125 -0.01069 0.0121454 0.013125 -0.010802 0.0130681 0.013125 -0.0108765 0.0131631 0.013125 0.005375 -0.008125 0.013125 -0.005875 -0.008125 0.013125 -0.0106323 -0.0126205 0.013125 -0.00728921 -0.010125 0.013125 -0.01069 -0.0121454 0.013125 -0.0107395 -0.0120353 0.013125 -0.0108765 -0.0118369 0.013125 -0.0111603 -0.0116145 0.013125 -0.0110569 -0.011677 0.013125 -0.0113857 -0.0115291 0.013125 -0.0117455 -0.0115073 0.013125 -0.0120897 -0.0116145 0.013125 -0.012875 -0.010125 0.013125 -0.012448 -0.0119319 0.013125 -0.0125105 -0.0120353 0.013125 -0.0126177 -0.0123795 0.013125 0.006375 -0.013 0.013125 0.0129103 -0.0133855 0.013125 0.0127119 -0.0132485 0.013125 0.01244 -0.0128546 0.013125 0.0123823 -0.0126205 0.013125 0.01244 -0.0121454 0.013125 0.006375 -0.009 0.013125 0.012552 -0.0119319 0.013125 0.0127119 -0.0117515 0.013125 0.0128069 -0.011677 0.013125 0.0131357 -0.0115291 0.013125 0.0132545 -0.0115073 0.013125 0.016375 -0.0155 0.013125 0.0143459 -0.0122607 0.013125 0.014198 -0.0130681 0.013125 0.0142605 -0.0129647 0.013125 0.0139431 -0.013323 0.013125 0.0138397 -0.0133855 0.013125 0.0134955 -0.0134927 0.013125 0.0132545 -0.0134927 0.013125 0.0131357 -0.0134709 0.013125 -0.012625 -0.0125 0.013125 -0.01256 -0.0128546 0.013125 -0.0125105 -0.0129647 0.013125 -0.014625 -0.0155 0.013125 -0.0122881 -0.0132485 0.013125 -0.0117455 -0.0134927 0.013125 -0.0112704 -0.013435 0.013125 -0.004625 -0.013 0.013125 -0.0108765 -0.0131631 0.013125 -0.010802 -0.0130681 0.013125 -0.0107395 -0.0129647 0.013125 -0.01069 -0.0128546 0.013125 -0.0106541 -0.0127393 0.013125 0.0142605 -0.0120353 0.013125 0.0140381 0.0117515 0.013125 0.0139431 0.011677 0.013125 0.0139431 -0.011677 0.013125 0.0137296 0.011565 0.013125 0.0134955 0.0115073 0.013125 0.0134955 -0.0115073 0.013125 0.0141235 0.0118369 0.013125 0.0140381 -0.0117515 0.013125 0.0136143 0.0115291 0.013125 0.0143677 0.0123795 0.013125 0.016375 0.0155 0.013125 0.0143677 0.0126205 0.013125 0.01431 0.0128546 0.013125 0.0143459 0.0127393 0.013125 0.013375 0.0135 0.013125 0.0132545 0.0134927 0.011125 -0.00728921 -0.008125 0.0102966 -0.007875 -0.008125 0.008625 -0.00764277 -0.006125 0.011125 -0.005875 -0.006125 -0.00820748 -0.012875 -0.00297081 -0.00883427 -0.012875 -0.00299269 -0.00904252 -0.012875 -0.00297081 -0.00945191 -0.012875 -0.00288379 -0.00965106 -0.012875 -0.00281908 -0.00984521 -0.012875 -0.00274064 -0.0105534 -0.012875 -0.00229813 -0.010989 -0.012875 -0.00184699 -0.0112231 -0.012875 -0.0015 -0.014375 -0.012875 -0.007125 -0.0115794 -0.012875 -0.000520946 -0.0116086 -0.012875 -0.000313587 -0.0116232 -0.012875 -0.0001047 -0.0116232 -0.012875 0.000104697 -0.0114782 -0.012875 0.000927049 -0.0111121 -0.012875 0.00167758 -0.010709 -0.012875 0.00215802 -0.0105534 -0.012875 0.00229813 -0.014375 -0.012875 0.007125 -0.0103884 -0.012875 0.00242705 -0.0100334 -0.012875 0.00264884 -0.00965106 -0.012875 0.00281908 -0.008625 -0.012875 0.003 -0.00820748 -0.012875 0.0029708 -0.013375 -0.012875 0.008125 -0.00759894 -0.012875 0.00281908 -0.00740479 -0.012875 0.00274063 -0.00721658 -0.012875 0.00264884 -0.00703524 -0.012875 0.00254414 -0.00669664 -0.012875 0.00229813 -0.00654102 -0.012875 0.00215802 -0.00639557 -0.012875 0.00200739 -0.00626097 -0.012875 0.00184698 -0.00613789 -0.012875 0.00167758 -0.00592862 -0.012875 0.00131511 -0.00584345 -0.012875 0.00112382 -0.00577183 -0.012875 0.000927049 -0.00571411 -0.012875 0.000725764 -0.00577183 -0.012875 -0.000927053 -0.00613789 -0.012875 -0.00167758 -0.00626097 -0.012875 -0.00184699 -0.00669664 -0.012875 -0.00229813 -0.00721658 -0.012875 -0.00264884 -0.013375 -0.012875 -0.008125 -0.00759894 -0.012875 -0.00281908 -0.00800126 -0.012875 -0.00293444 0.011125 -0.012875 -0.008125 0.011125 -0.012875 0.008125 -0.013375 -0.007875 -0.008125 -0.013375 -0.007875 0.008125 -0.014375 -0.007875 -0.007125 -0.014375 -0.011375 -0.006125 -0.014375 -0.011375 0.006125 -0.014375 -0.007875 0.006125 0.0102966 -0.007875 0.008125 -0.014375 -0.007875 0.007125 -0.014375 -0.007875 -0.006125 0.00829657 -0.007875 -0.006125 0.013125 -0.0106323 0.0126205 0.014625 -0.0106323 0.0126205 0.013125 -0.0106541 0.0127393 0.013125 -0.01069 0.0128546 0.014625 -0.01069 0.0128546 0.013125 -0.0107395 0.0129647 0.014625 -0.0107395 0.0129647 0.013125 -0.0109619 0.0132485 0.013125 -0.0111603 0.0133855 0.013125 -0.0113857 0.0134709 0.013125 -0.0120897 0.0133855 0.013125 -0.0122881 0.0132485 0.014625 -0.0122881 0.0132485 0.013125 -0.012448 0.0130681 0.014625 -0.012448 0.0130681 0.014625 -0.0125105 0.0129647 0.013125 -0.0126177 0.0126205 0.014625 -0.012625 0.0125 0.014625 -0.0126177 0.0123795 0.013125 -0.01256 0.0121454 0.013125 -0.0125105 0.0120353 0.013125 -0.0118643 0.0115291 0.014625 -0.011625 0.0115 0.014625 -0.0111603 0.0116145 0.014625 -0.0110569 0.011677 0.013125 -0.010802 0.0119319 0.014625 -0.010802 0.0119319 0.013125 -0.010625 0.0125 0.014625 -0.010802 0.0130681 0.013125 -0.0110569 0.013323 0.013125 -0.0115045 0.0134927 0.013125 -0.011625 0.0135 0.014625 -0.011625 0.0135 0.013125 -0.0117455 0.0134927 0.013125 -0.0119796 0.013435 0.014625 -0.0120897 0.0133855 0.013125 -0.0121931 0.013323 0.014625 -0.0121931 0.013323 0.013125 -0.0125105 0.0129647 0.013125 -0.0125959 0.0127393 0.014625 -0.0125959 0.0127393 0.013125 -0.012625 0.0125 0.013125 -0.0125959 0.0122607 0.014625 -0.0125105 0.0120353 0.013125 -0.012448 0.0119319 0.014625 -0.012448 0.0119319 0.014625 -0.0123735 0.0118369 0.014625 -0.0122881 0.0117515 0.013125 -0.0121931 0.011677 0.013125 -0.0115045 0.0115073 0.014625 -0.0115045 0.0115073 0.013125 -0.0113857 0.0115291 0.013125 -0.0110569 0.011677 0.013125 -0.0107395 0.0120353 0.013125 -0.0106541 0.0122607 0.013125 -0.0106323 0.0123795 0.014625 0.0143677 0.0126205 0.014625 0.014375 0.0125 0.013125 0.0142605 0.0129647 0.014625 0.01431 0.0128546 0.014625 0.014198 0.0130681 0.014625 0.0141235 0.0131631 0.013125 0.0137296 0.013435 0.014625 0.0136143 0.0134709 0.014625 0.0134955 0.0134927 0.013125 0.0130204 0.013435 0.014625 0.0131357 0.0134709 0.013125 0.0128069 0.013323 0.014625 0.0128069 0.013323 0.014625 0.0126265 0.0131631 0.014625 0.012552 0.0130681 0.014625 0.01244 0.0128546 0.014625 0.0124041 0.0127393 0.014625 0.0124041 0.0122607 0.014625 0.0124895 0.0120353 0.013125 0.0127119 0.0117515 0.013125 0.0131357 0.0115291 0.014625 0.0132545 0.0115073 0.013125 0.013375 0.0115 0.013125 0.0138397 0.0116145 0.014625 0.0137296 0.011565 0.013125 0.0142605 0.0120353 0.013125 0.0143459 0.0122607 0.013125 0.014198 0.0130681 0.013125 0.0141235 0.0131631 0.013125 0.0140381 0.0132485 0.013125 0.0139431 0.013323 0.014625 0.0139431 0.013323 0.013125 0.0138397 0.0133855 0.013125 0.0136143 0.0134709 0.013125 0.0134955 0.0134927 0.014625 0.013375 0.0135 0.014625 0.0132545 0.0134927 0.014625 0.0130204 0.013435 0.013125 0.0124895 0.0129647 0.014625 0.0124895 0.0129647 0.013125 0.0124041 0.0127393 0.013125 0.012375 0.0125 0.013125 0.0123823 0.0123795 0.013125 0.0124041 0.0122607 0.013125 0.0124895 0.0120353 0.013125 0.012552 0.0119319 0.014625 0.012552 0.0119319 0.013125 0.0128069 0.011677 0.014625 0.0128069 0.011677 0.013125 0.0129103 0.0116145 0.013125 0.0130204 0.011565 0.014625 0.0130204 0.011565 0.013125 0.0132545 0.0115073 0.014625 0.0139431 0.011677 0.014625 0.0140381 0.0117515 0.014625 0.0141235 0.0118369 0.013125 0.014198 0.0119319 0.014625 0.014198 0.0119319 0.013125 0.01431 0.0121454 0.014625 0.0143677 0.0123795 0.013125 0.014375 0.0125 0.013125 0.0143677 -0.0123795 0.014625 0.0143677 -0.0123795 0.013125 0.01431 -0.0121454 0.014625 0.01431 -0.0121454 0.014625 0.0141235 -0.0118369 0.014625 0.0139431 -0.011677 0.014625 0.0138397 -0.0116145 0.013125 0.0137296 -0.011565 0.014625 0.0137296 -0.011565 0.013125 0.0136143 -0.0115291 0.014625 0.0136143 -0.0115291 0.014625 0.0131357 -0.0115291 0.013125 0.0130204 -0.011565 0.014625 0.0129103 -0.0116145 0.014625 0.0127119 -0.0117515 0.014625 0.012552 -0.0119319 0.013125 0.0124895 -0.0120353 0.013125 0.0124041 -0.0122607 0.014625 0.01244 -0.0121454 0.013125 0.0123823 -0.0123795 0.014625 0.0124041 -0.0122607 0.013125 0.0124041 -0.0127393 0.014625 0.0123823 -0.0126205 0.014625 0.0124041 -0.0127393 0.013125 0.0124895 -0.0129647 0.014625 0.0124895 -0.0129647 0.013125 0.0126265 -0.0131631 0.014625 0.0128069 -0.013323 0.014625 0.0129103 -0.0133855 0.013125 0.0130204 -0.013435 0.014625 0.0130204 -0.013435 0.014625 0.0131357 -0.0134709 0.014625 0.0134955 -0.0134927 0.013125 0.0137296 -0.013435 0.014625 0.0141235 -0.0131631 0.014625 0.014198 -0.0130681 0.013125 0.0143459 -0.0127393 0.014625 0.01431 -0.0128546 0.013125 0.0143677 -0.0126205 0.014625 0.0143459 -0.0127393 0.013125 0.014198 -0.0119319 0.013125 0.0141235 -0.0118369 0.013125 0.0138397 -0.0116145 0.013125 0.0129103 -0.0116145 0.013125 0.0126265 -0.0118369 0.014625 0.0126265 -0.0118369 0.014625 0.0124895 -0.0120353 0.014625 0.0123823 -0.0123795 0.013125 0.012375 -0.0125 0.013125 0.012552 -0.0130681 0.014625 0.0126265 -0.0131631 0.013125 0.0128069 -0.013323 0.014625 0.0132545 -0.0134927 0.013125 0.013375 -0.0135 0.013125 0.0136143 -0.0134709 0.014625 0.0136143 -0.0134709 0.014625 0.0137296 -0.013435 0.014625 0.0139431 -0.013323 0.013125 0.0140381 -0.0132485 0.014625 0.0140381 -0.0132485 0.013125 0.0141235 -0.0131631 0.013125 0.01431 -0.0128546 0.013125 0.014375 -0.0125 0.014625 -0.0106323 -0.0123795 0.013125 -0.0106323 -0.0123795 0.013125 -0.0106541 -0.0122607 0.014625 -0.01069 -0.0121454 0.014625 -0.0107395 -0.0120353 0.013125 -0.010802 -0.0119319 0.014625 -0.010802 -0.0119319 0.014625 -0.0108765 -0.0118369 0.014625 -0.0111603 -0.0116145 0.013125 -0.011625 -0.0115 0.014625 -0.0120897 -0.0116145 0.013125 -0.0122881 -0.0117515 0.014625 -0.0121931 -0.011677 0.014625 -0.0125105 -0.0120353 0.013125 -0.0125959 -0.0122607 0.014625 -0.0125959 -0.0122607 0.013125 -0.0126177 -0.0126205 0.014625 -0.0126177 -0.0126205 0.014625 -0.012448 -0.0130681 0.014625 -0.0122881 -0.0132485 0.013125 -0.0120897 -0.0133855 0.014625 -0.0121931 -0.013323 0.014625 -0.0119796 -0.013435 0.013125 -0.011625 -0.0135 0.014625 -0.0113857 -0.0134709 0.013125 -0.0110569 -0.013323 0.013125 -0.0109619 -0.0132485 0.014625 -0.0109619 -0.0132485 0.014625 -0.0108765 -0.0131631 0.014625 -0.010802 -0.0130681 0.014625 -0.0106323 -0.0126205 0.014625 -0.010625 -0.0125 0.013125 -0.010625 -0.0125 0.013125 -0.0109619 -0.0117515 0.014625 -0.0109619 -0.0117515 0.013125 -0.0112704 -0.011565 0.013125 -0.0115045 -0.0115073 0.014625 -0.0117455 -0.0115073 0.013125 -0.0118643 -0.0115291 0.013125 -0.0119796 -0.011565 0.013125 -0.0121931 -0.011677 0.013125 -0.0123735 -0.0118369 0.013125 -0.01256 -0.0121454 0.014625 -0.01256 -0.0121454 0.014625 -0.0126177 -0.0123795 0.013125 -0.0125959 -0.0127393 0.014625 -0.0125959 -0.0127393 0.014625 -0.0125105 -0.0129647 0.013125 -0.012448 -0.0130681 0.013125 -0.0123735 -0.0131631 0.013125 -0.0121931 -0.013323 0.013125 -0.0119796 -0.013435 0.013125 -0.0118643 -0.0134709 0.014625 -0.011625 -0.0135 0.013125 -0.0115045 -0.0134927 0.013125 -0.0113857 -0.0134709 0.014625 -0.0112704 -0.013435 0.013125 -0.0111603 -0.0133855 0.014625 -0.0110569 -0.013323 0.014625 -0.0107395 -0.0129647 0.014625 -0.01069 -0.0128546 0.014625 -0.0107395 0.0120353 0.014625 -0.01069 0.0121454 0.014625 -0.0106541 0.0122607 0.014625 -0.0106323 0.0123795 0.014625 -0.010625 0.0125 0.014625 -0.0106541 0.0127393 0.014625 -0.0108765 0.0131631 0.014625 -0.0109619 0.0132485 0.014625 -0.003625 0.012 0.014625 -0.0110569 0.013323 0.014625 -0.0111603 0.0133855 0.014625 0.005375 0.012 0.014625 0.0137296 0.013435 0.014625 0.0138397 0.0133855 0.014625 0.0140381 0.0132485 0.014625 0.0142605 0.0129647 0.014625 0.0143459 0.0127393 0.014625 0.017875 0.017 0.014625 0.0143459 0.0122607 0.014625 0.01431 0.0121454 0.014625 0.0142605 0.0120353 0.014625 0.0142605 -0.0120353 0.014625 0.014198 -0.0119319 0.014625 0.0143459 -0.0122607 0.014625 0.014375 -0.0125 0.014625 0.0143677 -0.0126205 0.014625 0.0142605 -0.0129647 0.014625 0.0138397 -0.0133855 0.014625 0.013375 -0.0135 0.014625 0.0127119 -0.0132485 0.014625 0.012552 -0.0130681 0.014625 0.01244 -0.0128546 0.014625 0.012375 -0.0125 0.014625 0.005375 -0.009 0.014625 0.0128069 -0.011677 0.014625 0.0130204 -0.011565 0.014625 0.0132545 -0.0115073 0.014625 0.013375 -0.0115 0.014625 0.013375 0.0115 0.014625 0.0131357 0.0115291 0.014625 0.0129103 0.0116145 0.014625 0.0127119 0.0117515 0.014625 0.0126265 0.0118369 0.014625 0.01244 0.0121454 0.014625 0.0123823 0.0123795 0.014625 0.0123823 0.0126205 0.014625 0.012375 0.0125 0.014625 0.0127119 0.0132485 0.014625 0.0129103 0.0133855 0.014625 0.005375 0.008 0.014625 -0.0115045 -0.0115073 0.014625 -0.0113857 -0.0115291 0.014625 -0.0112704 -0.011565 0.014625 -0.0110569 -0.011677 0.014625 -0.0106541 -0.0122607 0.014625 -0.0106541 -0.0127393 0.014625 -0.0115045 -0.0134927 0.014625 -0.0117455 -0.0134927 0.014625 -0.0118643 -0.0134709 0.014625 -0.0120897 -0.0133855 0.014625 -0.0123735 -0.0131631 0.014625 -0.01256 -0.0128546 0.014625 -0.012625 -0.0125 0.014625 -0.01256 0.0121454 0.014625 -0.0125959 0.0122607 0.014625 -0.0126177 0.0126205 0.014625 -0.01256 0.0128546 0.014625 -0.0123735 0.0131631 0.014625 -0.016125 0.017 0.014625 -0.0119796 0.013435 0.014625 -0.0118643 0.0134709 0.014625 -0.0117455 0.0134927 0.014625 -0.0115045 0.0134927 0.014625 -0.0113857 0.0134709 0.014625 -0.0112704 0.013435 0.014625 0.017875 -0.017 0.014625 -0.016125 -0.017 0.014625 0.005375 -0.012 0.014625 -0.0111603 -0.0133855 0.014625 -0.0108765 0.0118369 0.014625 -0.0109619 0.0117515 0.014625 -0.0112704 0.011565 0.014625 -0.0113857 0.0115291 0.014625 -0.011625 -0.0115 0.014625 -0.0117455 0.0115073 0.014625 -0.0118643 -0.0115291 0.014625 -0.0119796 -0.011565 0.014625 -0.0119796 0.011565 0.014625 -0.0120897 0.0116145 0.014625 -0.0121931 0.011677 0.014625 -0.0118643 0.0115291 0.014625 -0.0122881 -0.0117515 0.014625 -0.0123735 -0.0118369 0.014625 -0.012448 -0.0119319 0.014625 0.0138397 0.0116145 0.014625 0.0136143 0.0115291 0.014625 0.0134955 -0.0115073 0.014625 0.0134955 0.0115073 0.014625 0.0140381 -0.0117515 0.0112706 0.000375 0.000742701 0.0112706 0.00204167 0.000742701 0.0111683 0.00204167 0.000720947 0.0109776 0.005375 0.000636037 0.0110699 0.00370833 0.00068516 0.0110699 0.00204167 0.00068516 0.0109776 0.00204167 0.000636036 0.0108176 0.005375 0.000501848 0.0108929 0.005375 0.000574534 0.0109776 0.00370833 0.000636036 0.0108929 0.00370833 0.000574534 0.0108176 0.00370833 0.000501848 0.0108176 0.00204167 0.000501848 0.0107532 0.00370833 0.000419395 0.0108929 0.000375 0.000574534 0.0107532 0.00204167 0.000419395 0.0107009 0.00370833 0.000328779 0.0107009 0.005375 0.000328779 0.0106364 0.005375 0.000130237 0.0107009 0.000375 0.000328778 0.0106617 0.00204167 0.000231763 0.0106617 0.00370833 0.000231763 0.0106364 0.00370833 0.000130237 0.0106255 0.005375 2.61753e-05 0.0106291 0.005375 -7.83956e-05 0.0106473 0.005375 -0.000181441 0.0106364 0.000375 0.000130236 0.0106291 0.00204167 -7.83961e-05 0.0106291 0.00370833 -7.83958e-05 0.0106291 0.000375 -7.83963e-05 0.0107255 0.005375 -0.000374999 0.0106796 0.00204167 -0.000280955 0.0106796 0.00370833 -0.000280954 0.0107255 0.00204167 -0.000375 0.010784 0.00370833 -0.000461746 0.010784 0.00204167 -0.000461746 0.0109342 0.00370833 -0.000606762 0.0109342 0.005375 -0.000606762 0.010854 0.00370833 -0.000539505 0.010784 0.000375 -0.000461746 0.0111185 0.00370833 -0.000704769 0.0110229 0.00370833 -0.00066221 0.0110229 0.000375 -0.00066221 0.0112191 0.00370833 -0.00073361 0.0112191 0.005375 -0.00073361 0.0113227 0.00370833 -0.000748173 0.0111185 0.000375 -0.00070477 0.0112191 0.00204167 -0.00073361 0.0113227 0.00204167 -0.000748173 0.0114273 0.005375 -0.000748173 0.0112191 0.000375 -0.000733611 0.0114273 0.00204167 -0.000748173 0.0114273 0.00370833 -0.000748173 0.0115309 0.00370833 -0.00073361 0.0115309 0.005375 -0.00073361 0.0113227 0.000375 -0.000748173 0.0116315 0.005375 -0.000704769 0.0116315 0.00370833 -0.000704769 0.0115309 0.000375 -0.000733611 0.0116315 0.00204167 -0.000704769 0.0117271 0.005375 -0.00066221 0.0117271 0.00370833 -0.00066221 0.0118158 0.005375 -0.000606762 0.0117271 0.00204167 -0.00066221 0.0118158 0.00370833 -0.000606762 0.0118158 0.00204167 -0.000606763 0.0118158 0.000375 -0.000606763 0.011896 0.00204167 -0.000539505 0.011966 0.005375 -0.000461746 0.011966 0.00370833 -0.000461746 0.0120704 0.00370833 -0.000280954 0.0120245 0.000375 -0.000375 0.0121027 0.00204167 -0.000181441 0.0121027 0.00370833 -0.000181441 0.0121209 0.00370833 -7.83958e-05 0.0121245 0.005375 2.61753e-05 0.0121209 0.00204167 -7.83961e-05 0.0121136 0.00370833 0.000130237 0.0121209 0.000375 -7.83963e-05 0.0121245 0.000375 2.61746e-05 0.0121245 0.00204167 2.61748e-05 0.0120883 0.005375 0.000231764 0.0120491 0.005375 0.000328779 0.0121136 0.00204167 0.000130236 0.0120883 0.00204167 0.000231763 0.0120883 0.00370833 0.000231763 0.0119968 0.00370833 0.000419395 0.0120491 0.00370833 0.000328779 0.0120883 0.000375 0.000231763 0.0119324 0.005375 0.000501848 0.0118571 0.005375 0.000574534 0.0119324 0.00370833 0.000501848 0.0120491 0.000375 0.000328778 0.0119324 0.00204167 0.000501848 0.0118571 0.00370833 0.000574534 0.0117724 0.005375 0.000636037 0.0119324 0.000375 0.000501848 0.0117724 0.00370833 0.000636036 0.0117724 0.00204167 0.000636036 0.0116801 0.00370833 0.00068516 0.0115817 0.005375 0.000720947 0.0117724 0.000375 0.000636036 0.0115817 0.00370833 0.000720947 0.0115817 0.00204167 0.000720947 0.0114794 0.005375 0.000742702 0.0114794 0.00370833 0.000742702 0.0112706 0.005375 0.000742702 0.011375 0.00370833 0.000750001 0.0114794 0.000375 0.000742701 0.0114794 0.00204167 0.000742701 0.0112706 0.00370833 0.000742702 0.011375 0.000375 0.00075 0.0111683 0.00370833 0.000720947 0.011375 0.00204167 0.00075 0.0108929 0.00204167 0.000574534 0.0107009 0.00204167 0.000328779 0.0106364 0.00204167 0.000130236 0.0106255 0.00370833 2.6175e-05 0.0106255 0.00204167 2.61748e-05 0.0106473 0.00370833 -0.000181441 0.0106473 0.00204167 -0.000181441 0.0107255 0.00370833 -0.000375 0.010854 0.00204167 -0.000539505 0.0109342 0.00204167 -0.000606763 0.0110229 0.00204167 -0.00066221 0.0111185 0.00204167 -0.000704769 0.0115309 0.00204167 -0.00073361 0.011896 0.00370833 -0.000539505 0.011966 0.00204167 -0.000461746 0.0120245 0.00370833 -0.000375 0.0120245 0.00204167 -0.000375 0.0120704 0.00204167 -0.000280955 0.0121245 0.00370833 2.6175e-05 0.0120491 0.00204167 0.000328779 0.0119968 0.00204167 0.000419395 0.0118571 0.00204167 0.000574534 0.0116801 0.00204167 0.00068516 0.0114273 0.000375 -0.000748173 0.0115817 0.000375 0.000720946 0.0116801 0.000375 0.000685159 0.011896 0.000375 -0.000539505 0.0121027 0.000375 -0.000181441 0.0121136 0.000375 0.000130236 0.0116315 0.000375 -0.00070477 0.0117271 0.000375 -0.00066221 0.0118571 0.000375 0.000574534 0.011966 0.000375 -0.000461746 0.0119968 0.000375 0.000419395 0.0120704 0.000375 -0.000280955 0.0109342 0.000375 -0.000606763 0.010854 0.000375 -0.000539505 0.0106796 0.000375 -0.000280955 0.0106473 0.000375 -0.000181441 0.0106255 0.000375 2.61746e-05 0.0111683 0.000375 0.000720946 0.0110699 0.000375 0.000685159 0.0109776 0.000375 0.000636036 0.0108176 0.000375 0.000501848 0.0107532 0.000375 0.000419395 0.0107255 0.000375 -0.000375 0.0106617 0.000375 0.000231763 -0.00924874 -0.012875 -0.00293444 -0.0100334 -0.012875 -0.00264884 -0.00989586 -0.013875 -0.00154431 -0.0100239 -0.013875 -0.00142935 -0.0108544 -0.012875 -0.00200739 -0.010709 -0.012875 -0.00215802 -0.0102148 -0.012875 -0.00254415 -0.0097584 -0.013875 -0.00164785 -0.0103884 -0.012875 -0.00242705 -0.0111121 -0.012875 -0.00167758 -0.0103425 -0.013875 -0.00102474 -0.0113214 -0.012875 -0.00131512 -0.0114066 -0.012875 -0.00112382 -0.0104927 -0.013875 -0.000715398 -0.0105876 -0.013875 -0.000384905 -0.0114782 -0.012875 -0.000927053 -0.0115359 -0.012875 -0.000725767 -0.0106134 -0.013875 -0.000214765 -0.0106245 -0.013875 -4.3034e-05 -0.0116086 -0.012875 0.000313584 -0.0115794 -0.012875 0.000520943 -0.0115359 -0.012875 0.000725764 -0.0105217 -0.013875 0.000634381 -0.0114066 -0.012875 0.00112382 -0.0102968 -0.013875 0.00109768 -0.010989 -0.012875 0.00184698 -0.0113214 -0.012875 0.00131511 -0.010385 -0.013875 0.0009499 -0.0112231 -0.012875 0.0015 -0.0101963 -0.013875 0.00123734 -0.0108544 -0.012875 0.00200739 -0.0100841 -0.013875 0.00136784 -0.00982825 -0.013875 0.00159756 -0.0102148 -0.012875 0.00254414 -0.00953681 -0.013875 0.00178005 -0.00924874 -0.012875 0.00293444 -0.00984521 -0.012875 0.00274063 -0.00945191 -0.012875 0.00288378 -0.00905204 -0.013875 0.00195388 -0.00904252 -0.012875 0.0029708 -0.00883427 -0.012875 0.00299269 -0.0088825 -0.013875 0.00198335 -0.00871104 -0.013875 0.00199815 -0.00853895 -0.013875 0.00199815 -0.00800126 -0.012875 0.00293444 -0.00841573 -0.012875 0.00299269 -0.00779809 -0.012875 0.00288378 -0.00771319 -0.013875 0.00178005 -0.00686164 -0.012875 0.00242705 -0.00678983 -0.013875 0.000795084 -0.00672828 -0.013875 0.000634381 -0.00567058 -0.012875 0.000520943 -0.00602692 -0.012875 0.0015 -0.00686498 -0.013875 0.0009499 -0.00668076 -0.013875 0.000468981 -0.00664765 -0.013875 0.000300109 -0.00564143 -0.012875 0.000313584 -0.00564143 -0.012875 -0.000313587 -0.00662917 -0.013875 0.000129015 -0.00562683 -0.012875 0.000104697 -0.00562683 -0.012875 -0.0001047 -0.00567058 -0.012875 -0.000520946 -0.00666239 -0.013875 -0.000384905 -0.00592862 -0.012875 -0.00131512 -0.00682574 -0.013875 -0.000873304 -0.00584345 -0.012875 -0.00112382 -0.00571411 -0.012875 -0.000725767 -0.00602692 -0.012875 -0.0015 -0.00690747 -0.013875 -0.00102474 -0.00654102 -0.012875 -0.00215802 -0.00710839 -0.013875 -0.0013038 -0.00639557 -0.012875 -0.00200739 -0.00722608 -0.013875 -0.00142935 -0.00735414 -0.013875 -0.00154431 -0.00703524 -0.012875 -0.00254415 -0.00686164 -0.012875 -0.00242705 -0.0074916 -0.013875 -0.00164785 -0.00740479 -0.012875 -0.00274064 -0.00779809 -0.012875 -0.00288379 -0.00811429 -0.013875 -0.0019337 -0.008625 -0.012875 -0.003 -0.00841573 -0.012875 -0.00299269 -0.008625 -0.017875 -0.002 -0.00879693 -0.017875 -0.0019926 -0.00896759 -0.017875 -0.00197044 -0.00896759 -0.015375 -0.00197044 -0.00913571 -0.015375 -0.0019337 -0.00913571 -0.013875 -0.0019337 -0.00896759 -0.013875 -0.00197044 -0.00879693 -0.015375 -0.0019926 -0.008625 -0.015375 -0.002 -0.00845307 -0.015375 -0.0019926 -0.00828241 -0.013875 -0.00197044 -0.00811429 -0.015375 -0.0019337 -0.00794995 -0.013875 -0.00188264 -0.00779061 -0.015375 -0.00181764 -0.00779061 -0.013875 -0.00181764 -0.00710839 -0.015375 -0.0013038 -0.00670274 -0.015375 -0.000552196 -0.00670274 -0.013875 -0.000552196 -0.00663656 -0.013875 -0.000214765 -0.00662546 -0.015375 -4.30342e-05 -0.00662546 -0.013875 -4.3034e-05 -0.00662917 -0.015375 0.000129015 -0.00695315 -0.015375 0.00109768 -0.0070537 -0.013875 0.00123734 -0.00716589 -0.013875 0.00136784 -0.00728887 -0.013875 0.00148821 -0.00742175 -0.013875 0.00159756 -0.00756354 -0.013875 0.00169508 -0.00803157 -0.015375 0.00190993 -0.00786958 -0.013875 0.00185185 -0.00819796 -0.015375 0.00195387 -0.00803157 -0.013875 0.00190993 -0.00819796 -0.013875 0.00195388 -0.0083675 -0.013875 0.00198335 -0.00905204 -0.015375 0.00195387 -0.00921843 -0.015375 0.00190993 -0.00938042 -0.015375 0.00185185 -0.00921843 -0.013875 0.00190993 -0.00968646 -0.013875 0.00169508 -0.00982825 -0.015375 0.00159756 -0.00996113 -0.015375 0.00148821 -0.00996113 -0.013875 0.00148821 -0.0102968 -0.015375 0.00109768 -0.0104602 -0.013875 0.000795084 -0.0105692 -0.013875 0.000468981 -0.0106208 -0.015375 0.000129015 -0.0106208 -0.013875 0.000129015 -0.0105876 -0.015375 -0.000384905 -0.0104243 -0.015375 -0.000873305 -0.0102481 -0.015375 -0.0011686 -0.0101416 -0.015375 -0.0013038 -0.00961255 -0.015375 -0.00173918 -0.00961255 -0.013875 -0.00173918 -0.00945939 -0.013875 -0.00181764 -0.00930004 -0.013875 -0.00188264 -0.00913571 -0.017875 -0.0019337 -0.00930004 -0.015375 -0.00188264 -0.00930004 -0.017875 -0.00188264 -0.00945939 -0.017875 -0.00181764 -0.00945939 -0.015375 -0.00181764 -0.00961255 -0.017875 -0.00173918 -0.0097584 -0.017875 -0.00164785 -0.0097584 -0.015375 -0.00164785 -0.00989586 -0.017875 -0.00154432 -0.00989586 -0.015375 -0.00154432 -0.0100239 -0.015375 -0.00142935 -0.0101416 -0.013875 -0.0013038 -0.0102481 -0.013875 -0.0011686 -0.0102481 -0.017875 -0.0011686 -0.0103425 -0.015375 -0.00102474 -0.0104243 -0.013875 -0.000873304 -0.0103425 -0.017875 -0.00102474 -0.0105473 -0.013875 -0.000552196 -0.0104927 -0.015375 -0.000715398 -0.0105473 -0.015375 -0.000552196 -0.0106134 -0.015375 -0.000214765 -0.0106134 -0.017875 -0.000214765 -0.0106245 -0.015375 -4.30342e-05 -0.0106245 -0.017875 -4.30345e-05 -0.0106208 -0.017875 0.000129015 -0.0106024 -0.013875 0.000300109 -0.0106024 -0.015375 0.000300109 -0.0105692 -0.015375 0.000468981 -0.0105217 -0.017875 0.00063438 -0.0105217 -0.015375 0.000634381 -0.0104602 -0.015375 0.000795083 -0.010385 -0.015375 0.0009499 -0.010385 -0.017875 0.0009499 -0.0101963 -0.015375 0.00123734 -0.0100841 -0.015375 0.00136784 -0.00982825 -0.017875 0.00159756 -0.00968646 -0.015375 0.00169508 -0.00968646 -0.017875 0.00169508 -0.00953681 -0.015375 0.00178005 -0.00938042 -0.013875 0.00185185 -0.0088825 -0.015375 0.00198335 -0.00871104 -0.017875 0.00199815 -0.00853895 -0.015375 0.00199815 -0.00871104 -0.015375 0.00199815 -0.0083675 -0.015375 0.00198335 -0.00819796 -0.017875 0.00195387 -0.00786958 -0.017875 0.00185185 -0.00786958 -0.015375 0.00185185 -0.00771319 -0.015375 0.00178005 -0.00771319 -0.017875 0.00178005 -0.00756354 -0.015375 0.00169508 -0.00742175 -0.015375 0.00159756 -0.00728887 -0.017875 0.00148821 -0.00728887 -0.015375 0.00148821 -0.0070537 -0.015375 0.00123734 -0.00716589 -0.015375 0.00136784 -0.00695315 -0.013875 0.00109768 -0.0070537 -0.017875 0.00123734 -0.00695315 -0.017875 0.00109768 -0.00686498 -0.015375 0.0009499 -0.00686498 -0.017875 0.0009499 -0.00678983 -0.017875 0.000795083 -0.00678983 -0.015375 0.000795083 -0.00672828 -0.015375 0.000634381 -0.00668076 -0.017875 0.00046898 -0.00668076 -0.015375 0.000468981 -0.00664765 -0.015375 0.000300109 -0.00664765 -0.017875 0.000300108 -0.00662917 -0.017875 0.000129015 -0.00662546 -0.017875 -4.30345e-05 -0.00663656 -0.015375 -0.000214765 -0.00666239 -0.015375 -0.000384905 -0.00663656 -0.017875 -0.000214765 -0.00670274 -0.017875 -0.000552196 -0.00675733 -0.013875 -0.000715398 -0.00682574 -0.015375 -0.000873305 -0.00675733 -0.015375 -0.000715398 -0.00690747 -0.017875 -0.00102474 -0.00690747 -0.015375 -0.00102474 -0.00700192 -0.015375 -0.0011686 -0.00700192 -0.013875 -0.0011686 -0.00700192 -0.017875 -0.0011686 -0.00722608 -0.015375 -0.00142935 -0.00735414 -0.017875 -0.00154432 -0.00735414 -0.015375 -0.00154432 -0.00763745 -0.013875 -0.00173918 -0.0074916 -0.015375 -0.00164785 -0.0074916 -0.017875 -0.00164785 -0.00763745 -0.015375 -0.00173918 -0.00794995 -0.015375 -0.00188264 -0.00794995 -0.017875 -0.00188264 -0.00811429 -0.017875 -0.0019337 -0.00828241 -0.015375 -0.00197044 -0.00845307 -0.013875 -0.0019926 -0.00845307 -0.017875 -0.0019926 -0.008625 -0.013875 -0.002 -0.00879693 -0.013875 -0.0019926 -0.00853895 -0.017875 0.00199815 -0.00828241 -0.017875 -0.00197044 -0.0083675 -0.017875 0.00198335 -0.00779061 -0.017875 -0.00181764 -0.00763745 -0.017875 -0.00173918 -0.00716589 -0.017875 0.00136784 -0.00710839 -0.017875 -0.0013038 -0.00682574 -0.017875 -0.000873305 -0.00666239 -0.017875 -0.000384905 -0.00803157 -0.017875 0.00190993 -0.00756354 -0.017875 0.00169508 -0.00742175 -0.017875 0.00159756 -0.00722608 -0.017875 -0.00142935 -0.00675733 -0.017875 -0.000715399 -0.00672828 -0.017875 0.00063438 -0.00905204 -0.017875 0.00195387 -0.00921843 -0.017875 0.00190993 -0.00938042 -0.017875 0.00185185 -0.00953681 -0.017875 0.00178005 -0.00996113 -0.017875 0.00148821 -0.0100239 -0.017875 -0.00142935 -0.0100841 -0.017875 0.00136784 -0.0102968 -0.017875 0.00109768 -0.0104602 -0.017875 0.000795083 -0.0105876 -0.017875 -0.000384905 -0.0105692 -0.017875 0.00046898 -0.0088825 -0.017875 0.00198335 -0.0101416 -0.017875 -0.0013038 -0.0101963 -0.017875 0.00123734 -0.0104243 -0.017875 -0.000873305 -0.0104927 -0.017875 -0.000715399 -0.0105473 -0.017875 -0.000552196 -0.0106024 -0.017875 0.000300108 0.008625 -0.011375 -0.006125 0.008625 -0.011375 0.006125 0.008625 0.005375 -0.006125 0.0116801 0.005375 0.00068516 0.011375 0.005375 0.000750001 0.011125 0.005375 0.006125 0.0111683 0.005375 0.000720947 0.0110699 0.005375 0.00068516 0.0107532 0.005375 0.000419395 0.0106617 0.005375 0.000231764 0.0106796 0.005375 -0.000280954 0.010784 0.005375 -0.000461746 0.010854 0.005375 -0.000539504 0.0110229 0.005375 -0.00066221 0.0111185 0.005375 -0.000704769 0.0113227 0.005375 -0.000748173 0.011896 0.005375 -0.000539504 0.011125 0.005375 -0.006125 0.0120245 0.005375 -0.000374999 0.0120704 0.005375 -0.000280954 0.0121027 0.005375 -0.000181441 0.0121209 0.005375 -7.83956e-05 0.0121136 0.005375 0.000130237 0.013125 0.005375 0.008125 0.0119968 0.005375 0.000419395 0.008625 0.005375 0.006125 - - - - - - - - - - 0.960928 -0.000738615 0.276797 0.946327 0.000741691 0.32321 0.942732 -0.000726469 0.33355 0.925305 0.000729491 0.379223 0.921188 -0.000714553 0.389116 0.900968 0.00071707 0.433886 0.873408 0.000705253 0.486988 0.84272 0.000692385 0.538352 0.809019 0.000679942 0.587783 0.772415 0.000666477 0.635118 0.733052 0.000653816 0.680173 0.691063 0.000640407 0.722794 0.646601 0.000627152 0.762828 0.59982 0.000613554 0.800135 0.550898 0.000600161 0.834572 0.5 0.000586023 0.866025 0.447313 0.000572032 0.894377 0.393024 0.000558078 0.919528 0.33733 0.000543791 0.941386 0.280428 0.000529378 0.959875 0.22252 0.000514854 0.974928 0.163818 0.000500352 0.98649 0.104529 0.000485523 0.994522 0.0448638 0.000470284 0.998993 -0.0149602 0.000455364 0.999888 -0.0747299 0.00044028 0.997204 -0.134231 0.000424774 0.99095 -0.193257 0.000408806 0.981148 -0.251587 0.000393288 0.967835 -0.309014 0.000377491 0.951057 -0.365342 0.000361096 0.930873 -0.420357 0.000345125 0.907359 -0.47387 0.000328745 0.880595 -0.525684 0.00031243 0.85068 -0.575614 0.000295883 0.817721 -0.623492 0.000278284 0.78183 -0.669131 0.000262246 0.743145 -0.712378 0.000245456 0.701796 -0.753068 0.000227674 0.657943 -0.791074 0.000210091 0.611721 -0.826236 0.000193291 0.563325 -0.858453 0.000174279 0.512893 -0.887582 0.000158057 0.460651 -0.913546 0.000138924 0.406736 -0.936236 0.000120672 0.351372 -0.955573 0.000103366 0.294756 -0.971491 8.48324e-05 0.237077 -0.983929 6.66017e-05 0.178561 -0.992847 4.70276e-05 0.119395 -0.99821 2.76809e-05 0.0597995 -1 1.0555e-05 0 -0.998224 -8.52809e-06 -0.0595785 -1 1.0555e-05 0 0.968313 0.000275585 0.249739 0.969772 7.93292e-05 0.244014 0.971474 -3.27843e-05 0.237147 0.973633 0.000111871 0.22812 0.976213 0.00081914 0.216812 0.978146 0.00194309 0.207909 0.978392 0.00178045 0.206749 0.980285 0.000802203 0.197585 0.983192 -0.000204153 0.182575 0.98673 0.000386844 0.16237 0.98989 0.00101875 0.141836 0.992576 -0.000230358 0.121629 0.995568 0.000839369 0.0940402 0.995972 0.00186587 0.0896406 0.998339 -0.000872686 0.0576019 0.999774 0.000322872 0.021256 0.999885 -0.000273178 -0.0151724 0.99955 0.00237188 -0.0299147 0.99867 -0.000704605 -0.0515494 0.996107 0.00124429 -0.0881404 0.993224 -0.000236695 -0.116219 0.990572 0.000506318 -0.136995 0.988828 0.00223049 -0.149042 0.987362 0.00072762 -0.15848 0.983792 -0.000263584 -0.179315 0.980805 0.000582869 -0.194988 0.978819 0.0015317 -0.204724 0.978146 0.00195856 -0.207909 0.977105 0.00131043 -0.212756 0.975623 0.000663642 -0.219453 0.973726 0.000124176 -0.227724 0.971517 -3.4032e-05 -0.23697 0.969795 7.62062e-05 -0.243923 0.968705 0.000222857 -0.248214 0.965569 0.000676847 -0.260145 0.962445 0 -0.271476 0.960928 -0.000738529 -0.276797 0.946327 0.000741791 -0.32321 0.942732 -0.000726365 -0.33355 0.925305 0.000729609 -0.379223 0.921188 -0.000714435 -0.389116 0.896373 -0.000701333 -0.443301 0.868371 -0.00068888 -0.495915 0.837287 -0.000676035 -0.546763 0.803228 -0.000664035 -0.595672 0.766316 -0.000650379 -0.642463 0.726683 -0.000637549 -0.686973 0.684469 -0.000624307 -0.729042 0.639823 -0.000610951 -0.768522 0.592905 -0.000597061 -0.805272 0.543881 -0.000583194 -0.839162 0.492923 -0.000569598 -0.870073 0.440217 -0.000555608 -0.897891 0.385943 -0.000541512 -0.922523 0.330307 -0.000527194 -0.943874 0.273489 -0.000512691 -0.961875 0.215704 -0.000498457 -0.976459 0.157151 -0.000483135 -0.987574 0.0980393 -0.000468452 -0.995182 0.038582 -0.000453503 -0.999255 -0.0210136 -0.000438386 -0.999779 -0.0805355 -0.000422764 -0.996752 -0.139769 -0.000407418 -0.990184 -0.198511 -0.000391778 -0.980099 -0.256542 -0.000375874 -0.966533 -0.313666 -0.000359971 -0.949533 -0.369673 -0.00034384 -0.929162 -0.424367 -0.000327819 -0.90549 -0.477559 -0.000311073 -0.8786 -0.529047 -0.000294464 -0.848593 -0.57866 -0.00027796 -0.815569 -0.626215 -0.000260686 -0.77965 -0.671552 -0.000243976 -0.740958 -0.714501 -0.00022704 -0.699634 -0.754904 -0.00020991 -0.655836 -0.792636 -0.000191477 -0.609695 -0.82755 -0.000175443 -0.561392 -0.859521 -0.000156038 -0.511101 -0.888442 -0.00013941 -0.458989 -0.914208 -0.00012151 -0.405245 -0.936724 -0.00010241 -0.350068 -0.955916 -8.42065e-05 -0.293642 -0.971711 -6.53571e-05 -0.236174 -0.984054 -4.76035e-05 -0.177868 -0.992903 -2.93252e-05 -0.11893 -0.99821 2.76971e-05 -0.0597995 0.988828 0.00220278 0.149042 0.99955 0.00215728 0.0299148 0.995973 0.00161018 -0.0896406 0.963962 0.000904423 -0.266037 0.900968 0.000717206 -0.433886 0.873408 0.000705431 -0.486989 0.84272 0.000692706 -0.538351 0.809019 0.000680003 -0.587782 0.772414 0.000666524 -0.635119 0.733052 0.000653994 -0.680172 0.691063 0.000640519 -0.722794 0.6466 0.000627186 -0.762829 0.599821 0.000613642 -0.800134 0.550897 0.000600327 -0.834573 0.500001 0.000586657 -0.866024 0.447314 0.00057243 -0.894377 0.393023 0.000558401 -0.919529 0.337332 0.0005439 -0.941386 0.280427 0.000529712 -0.959875 0.222524 0.000515084 -0.974927 0.163815 0.000500116 -0.986491 0.104529 0.000485832 -0.994522 0.0448657 0.000470561 -0.998993 -0.0149583 0.000455332 -0.999888 -0.0747318 0.000440001 -0.997204 -0.134231 0.000424774 -0.99095 -0.193257 0.00040911 -0.981148 -0.251587 0.000393288 -0.967835 -0.309018 0.00037754 -0.951056 -0.365341 0.000361373 -0.930874 -0.420355 0.000345234 -0.90736 -0.473873 0.000328784 -0.880593 -0.525683 0.000312544 -0.850681 -0.575615 0.000296026 -0.81772 -0.62349 0.00027899 -0.781831 -0.66913 0.000262586 -0.743145 -0.712378 0.000245244 -0.701795 -0.753068 0.000227669 -0.657943 -0.791075 0.000210294 -0.61172 -0.826235 0.000193633 -0.563326 -0.858453 0.000174438 -0.512893 -0.887582 0.000158271 -0.460651 -0.913546 0.00013905 -0.406736 -0.936236 0.000120781 -0.351372 -0.955573 0.000103458 -0.294756 -0.971491 8.4906e-05 -0.237077 -0.983929 6.66572e-05 -0.178561 -0.992847 4.70647e-05 -0.119395 -0.998224 -8.54312e-06 0.0595785 -0.992903 -2.93576e-05 0.11893 -0.984054 -4.76587e-05 0.177868 -0.971711 -6.54304e-05 0.236174 -0.955916 -8.42976e-05 0.293642 -0.936724 -0.000102519 0.350068 -0.914208 -0.000121636 0.405245 -0.888442 -0.000139555 0.458989 -0.859521 -0.000156273 0.511101 -0.82755 -0.000175617 0.561392 -0.792636 -0.000191859 0.609696 -0.754905 -0.0002101 0.655834 -0.714502 -0.000227034 0.699634 -0.67155 -0.00024377 0.74096 -0.626215 -0.000261049 0.77965 -0.578662 -0.000278697 0.815568 -0.529048 -0.000294578 0.848592 -0.477557 -0.000311237 0.878601 -0.424369 -0.000327799 0.90549 -0.369673 -0.000343985 0.929162 -0.313665 -0.000360289 0.949534 -0.256542 -0.000375873 0.966533 -0.198509 -0.000391805 0.980099 -0.139771 -0.000407697 0.990184 -0.0805355 -0.000422764 0.996752 -0.0210136 -0.000438075 0.999779 0.038582 -0.000453503 0.999255 0.0980393 -0.000468761 0.995182 0.157151 -0.000483441 0.987574 0.215704 -0.000498154 0.976459 0.27349 -0.000513025 0.961875 0.330304 -0.000527432 0.943874 0.385945 -0.000541709 0.922522 0.440217 -0.000555887 0.897891 0.492923 -0.000570003 0.870073 0.543879 -0.00058381 0.839163 0.592904 -0.00059715 0.805273 0.639823 -0.000611087 0.768522 0.68447 -0.000624325 0.729041 0.726682 -0.000637638 0.686973 0.766317 -0.000650594 0.642463 0.803227 -0.000664021 0.595672 0.837288 -0.000676147 0.546762 0.868371 -0.000689187 0.495915 0.896373 -0.000701471 0.443301 0.963962 0.000904324 0.266037 0.962445 0 0.271476 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -2.76257e-06 1 2.58896e-06 1.20143e-07 1 6.54509e-07 8.74229e-08 1 6.68693e-07 5.31514e-08 1 6.76037e-07 2.61081e-08 1 6.76189e-07 -1.42281e-08 1 6.76939e-07 -5.45758e-08 1 6.7546e-07 -8.63338e-08 1 6.72403e-07 -5.15103e-08 1 6.7507e-07 0 1 0 0 1 0 0 1 0 0 1 0 1.82473e-08 1 6.77646e-07 -1.66768e-08 1 6.77322e-07 -9.45776e-08 1 6.70019e-07 -1.20896e-07 1 6.67276e-07 -1.34284e-07 1 6.62998e-07 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 3.11861e-06 1 1.09261e-06 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 1.34285e-07 1 6.62998e-07 9.45763e-08 1 6.70019e-07 5.15103e-08 1 6.7507e-07 8.63317e-08 1 6.72403e-07 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -3.11861e-06 1 1.09261e-06 1.20897e-07 1 6.67276e-07 5.45759e-08 1 6.75461e-07 1.66779e-08 1 6.77323e-07 1.42281e-08 1 6.76939e-07 -2.61081e-08 1 6.76189e-07 -6.6321e-08 1 6.73215e-07 -1.06302e-07 1 6.68025e-07 -1.45937e-07 1 6.60635e-07 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -1.82473e-08 1 6.77646e-07 -5.31514e-08 1 6.76037e-07 -8.79217e-08 1 6.72499e-07 -1.22442e-07 1 6.67044e-07 3.15092e-06 1 9.53071e-07 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0.0636169 0.99797 0.00289862 0.0627945 0.998023 0.0025316 0.0638482 0.99793 0.00775501 0.0617973 0.998053 0.00849377 0.0634687 0.9979 0.0129591 0.206596 0.977516 0.0421909 0.365563 0.927787 0.0746688 0.513997 0.851341 0.105008 0.647589 0.750412 0.132324 0.762524 0.62791 0.155839 0.855492 0.487396 0.174871 0.923848 0.332912 0.188879 0.965657 0.168859 0.197465 0.978826 0.0402581 0.200695 0.978842 0.0424842 0.200161 0.991757 0.0423267 0.120938 0.991756 0.0424441 0.120911 0.978395 0.16891 0.119221 0.936031 0.33294 0.114007 0.866773 0.487411 0.105526 0.77257 0.627931 0.0940132 0.656119 0.750426 0.0798048 0.520765 0.851349 0.063312 0.370378 0.92779 0.0450093 0.209313 0.977518 0.0254233 0.209334 0.977513 0.0254388 0.206585 0.977519 0.0421805 0.0604257 0.998075 0.0139986 0.0624806 0.997882 0.0180977 0.197156 0.977516 0.0747724 0.337092 0.927785 0.159951 0.454329 0.85134 0.262308 0.543966 0.750413 0.375475 0.602864 0.627912 0.492221 0.629748 0.487379 0.604879 0.625296 0.33291 0.705815 0.592115 0.168863 0.787962 0.597259 0.0341044 0.801323 0.59999 0.0500126 0.798442 0.66004 0.0347564 0.750426 0.662314 0.0493604 0.747598 0.710846 0.16886 0.682777 0.730418 0.332914 0.596371 0.718619 0.487387 0.496025 0.674019 0.627907 0.389142 0.597158 0.750408 0.283356 0.490519 0.851342 0.18603 0.358387 0.927784 0.103806 0.202536 0.977516 0.058664 0.202534 0.977517 0.0586645 0.0587523 0.998084 0.019386 0.0608844 0.997878 0.0230904 0.190502 0.977516 0.0903946 0.323125 0.927786 0.186558 0.431749 0.85134 0.298015 0.511992 0.750411 0.418028 0.5613 0.627912 0.539137 0.579032 0.487384 0.653589 0.566477 0.332904 0.753843 0.526792 0.168861 0.833052 0.530565 0.0334509 0.846984 0.53378 0.0506755 0.844104 0.0567751 0.998082 0.0246607 0.0587081 0.997886 0.0278574 0.182611 0.977516 0.10543 0.307068 0.927785 0.211954 0.406369 0.85134 0.331789 0.476695 0.750411 0.457871 0.516097 0.627911 0.582556 0.524558 0.487391 0.698061 0.503978 0.332908 0.796981 0.458049 0.168868 0.87274 0.460397 0.0328049 0.887107 0.46411 0.0513224 0.884289 0.0544915 0.998069 0.0298282 0.0559869 0.997908 0.0323242 0.173534 0.977516 0.119782 0.289018 0.927785 0.235975 0.378353 0.85134 0.363413 0.438306 0.750411 0.494744 0.467552 0.627907 0.622196 0.466688 0.487385 0.738009 0.438216 0.332904 0.83495 0.386338 0.168866 0.906768 0.387217 0.0321664 0.921427 0.391437 0.051965 0.918736 0.0518983 0.998043 0.034891 0.0527718 0.997942 0.0364257 0.163333 0.977516 0.133357 0.269092 0.927785 0.258466 0.347884 0.85134 0.392679 0.397073 0.750411 0.52841 0.415966 0.627911 0.657799 0.405791 0.487387 0.773167 0.369611 0.332902 0.867505 0.31212 0.168864 0.934915 0.311499 0.0315318 0.949723 0.31623 0.0526028 0.947223 0.0489892 0.998004 0.0398558 0.0491196 0.997987 0.0401051 0.152072 0.977516 0.146067 0.24742 0.927785 0.279281 0.315157 0.851341 0.419398 0.353267 0.75041 0.558648 0.361688 0.62791 0.689138 0.342259 0.487388 0.803313 0.298606 0.332907 0.894431 0.235879 0.168862 0.956999 0.233742 0.0309026 0.971807 0.238977 0.0532323 0.969565 0.0460889 0.997956 0.0442689 0.139826 0.977516 0.15783 0.224147 0.927784 0.298286 0.280388 0.851342 0.443396 0.307171 0.750409 0.585263 0.305061 0.62791 0.716008 0.276511 0.487387 0.828249 0.225663 0.332907 0.915559 0.158107 0.168866 0.972875 0.154453 0.0302851 0.987536 0.160178 0.0538545 0.985618 0.0403662 0.998061 0.0473718 0.0389836 0.997892 0.0518779 0.0357268 0.998079 0.0506132 0.0347906 0.997879 0.0550167 0.0979905 0.977517 0.186706 0.146249 0.927785 0.343259 0.166128 0.851341 0.497614 0.15818 0.750412 0.641764 0.124846 0.62791 0.768207 0.0702628 0.487386 0.870355 0 0.332904 0.942961 -0.0793125 0.168869 0.982442 -0.0873914 0.0284549 0.995768 -0.0803432 0.0556876 0.99521 -0.00664061 0.0290518 0.999556 0 0.0550789 0.998482 0.0793098 0.168869 0.982443 0.151261 0.332908 0.930748 0.208966 0.487388 0.847813 0.24646 0.627909 0.738232 0.259079 0.75041 0.608081 0.2438 0.85134 0.464522 0.199419 0.927784 0.315355 0.126671 0.977517 0.168569 0.112697 0.977517 0.178215 0.112697 0.977517 0.178216 0.0309289 0.998085 0.0535705 0.0302507 0.997879 0.0576377 0.0826497 0.977516 0.193987 0.118155 0.927785 0.353914 0.125549 0.85134 0.50937 0.106027 0.75041 0.652413 0.0626267 0.62791 0.775762 0 0.487385 0.873187 -0.0758756 0.332904 0.939903 -0.158107 0.168863 0.972876 -0.167572 0.0278551 0.985466 -0.160156 0.0562909 0.985485 0.0259689 0.998079 0.0562468 0.0254357 0.997892 0.0597001 0.0667728 0.977516 0.200008 0.0892925 0.927785 0.362273 0.0841537 0.85134 0.51782 0.0531869 0.75041 0.658829 0 0.627911 0.778286 -0.0702628 0.487385 0.870356 -0.151262 0.332908 0.930748 -0.235879 0.168867 0.956998 -0.246661 0.0272625 0.968718 -0.238929 0.0568812 0.96937 0.0208424 0.998061 0.0586445 0.0204234 0.997918 0.0611752 0.0504618 0.977516 0.204732 0.0598515 0.927785 0.368283 0.0422141 0.851341 0.522912 0 0.750411 0.660972 -0.0626267 0.62791 0.775762 -0.140069 0.487386 0.861879 -0.225664 0.332905 0.91556 -0.31212 0.168862 0.934915 -0.324136 0.0266811 0.945634 -0.316145 0.0574661 0.946969 0.0155423 0.998031 0.0607634 0.0152936 0.997956 0.062048 0.0338247 0.977516 0.20813 0.030024 0.927785 0.371906 0 0.85134 0.524614 -0.0531868 0.750411 0.658828 -0.124846 0.627911 0.768207 -0.208967 0.487387 0.847813 -0.298606 0.332907 0.894431 -0.386338 0.168864 0.906768 -0.399497 0.0260973 0.916363 -0.391306 0.0580518 0.918428 0.0101719 0.997987 0.0625908 0.0169672 0.977516 0.210175 0 0.927785 0.373116 -0.0422142 0.85134 0.522913 -0.106027 0.75041 0.652413 -0.186255 0.62791 0.755671 -0.27651 0.487386 0.828249 -0.36961 0.332906 0.867503 -0.458048 0.168867 0.87274 -0.472245 0.0255191 0.881098 -0.463923 0.0586193 0.883934 0.00426762 0.998043 0.0623911 0 0.997908 0.0646488 -0.00141413 0.998069 0.0621052 -0.00522882 0.997886 0.0647712 -0.0338245 0.977516 0.208128 -0.0892923 0.927785 0.362273 -0.166128 0.85134 0.497616 -0.259078 0.75041 0.608081 -0.361688 0.62791 0.689138 -0.466688 0.487387 0.738008 -0.566477 0.332909 0.753841 -0.6536 0.168865 0.737761 -0.670169 0.0238478 0.741825 -0.661916 0.060313 0.747148 -0.608025 0.0244025 0.793543 -0.599669 0.0597568 0.798014 -0.526789 0.16886 0.833054 -0.438217 0.332904 0.83495 -0.342259 0.487388 0.803313 -0.246459 0.627911 0.738232 -0.158181 0.750412 0.641763 -0.0841535 0.85134 0.51782 -0.0300235 0.927785 0.371907 0 0.977517 0.210859 -0.0169671 0.977517 0.210175 -0.0169669 0.977517 0.210175 -0.00703047 0.998082 0.0614994 -0.0104453 0.997878 0.0642732 -0.050462 0.977516 0.204733 -0.118154 0.927785 0.353913 -0.205631 0.851341 0.482633 -0.307169 0.750412 0.58526 -0.415966 0.62791 0.6578 -0.52456 0.487385 0.698063 -0.625297 0.332904 0.705818 -0.710845 0.168861 0.682777 -0.727932 0.0232906 0.685254 -0.719865 0.0608621 0.691441 -0.0125876 0.998084 0.0605741 -0.0155673 0.997882 0.0631586 -0.0667726 0.977516 0.200008 -0.146248 0.927785 0.343259 -0.243799 0.85134 0.464523 -0.353268 0.750408 0.558649 -0.46755 0.627911 0.622194 -0.57903 0.487389 0.653588 -0.680065 0.33291 0.65321 -0.763483 0.168864 0.623361 -0.780934 0.0227505 0.624199 -0.773145 0.0613966 0.63125 -0.0180895 0.998075 0.0593297 -0.0205145 0.997899 0.061448 -0.0826496 0.977516 0.193986 -0.173396 0.927785 0.330378 -0.280387 0.851342 0.443395 -0.397074 0.75041 0.52841 -0.5161 0.627907 0.582558 -0.629746 0.487384 0.604877 -0.730419 0.332912 0.59637 -0.811166 0.168857 0.559908 -0.828826 0.0222239 0.559065 -0.821404 0.061934 0.566975 -0.0235423 0.998053 0.0577634 -0.0252105 0.997929 0.0591717 -0.097991 0.977516 0.186707 -0.199419 0.927784 0.315355 -0.315157 0.851342 0.419397 -0.438306 0.750412 0.494743 -0.5613 0.627912 0.539137 -0.676376 0.487379 0.552247 -0.776042 0.332908 0.535659 -0.853589 0.168867 0.492819 -0.871305 0.021693 0.490263 -0.864334 0.0624801 0.499022 -0.0289495 0.998018 0.05587 -0.0295883 0.997971 0.0563757 -0.112697 0.977517 0.178216 -0.224147 0.927784 0.298287 -0.347883 0.851341 0.392679 -0.476695 0.75041 0.457873 -0.602864 0.627912 0.492222 -0.71862 0.487386 0.496025 -0.816628 0.332905 0.471481 -0.851249 0.16883 0.496862 -0.904312 0.0670243 0.421577 -0.0340287 0.997971 0.053812 -0.126672 0.977517 0.168569 -0.247422 0.927784 0.279283 -0.378354 0.85134 0.363413 -0.511992 0.750412 0.418027 -0.640516 0.627913 0.442114 -0.756202 0.487387 0.436593 -0.831528 0.333139 0.444499 -0.877514 0.204898 0.433574 -0.0382533 0.998053 0.0492693 -0.0429579 0.9979 0.0484895 -0.0423361 0.998075 0.0453311 -0.0469139 0.997882 0.0450612 -0.163333 0.977516 0.133357 -0.307068 0.927785 0.211954 -0.454328 0.85134 0.262309 -0.597157 0.750408 0.283357 -0.661083 0.665995 0.345572 -0.680488 0.657779 0.322898 -0.696398 0.628135 0.347098 -0.744325 0.566193 0.354128 -0.775413 0.487681 0.401125 -0.756202 0.487386 0.436593 -0.674018 0.627906 0.389145 -0.640518 0.627906 0.442121 -0.543966 0.750413 0.375475 -0.43175 0.851339 0.298016 -0.431749 0.85134 0.298015 -0.0461654 0.998084 0.0411881 -0.0504391 0.997878 0.0411824 -0.173535 0.977516 0.119782 -0.323127 0.927785 0.186559 -0.473962 0.851341 0.224898 -0.605775 0.750667 0.263698 -0.683824 0.673514 0.28065 -0.0497443 0.998082 0.0368383 -0.0534794 0.997886 0.0369139 -0.18261 0.977516 0.105429 -0.33709 0.927786 0.15995 -0.487674 0.851452 0.19288 -0.532794 0.822254 0.200071 -0.60674 0.75341 0.253456 -0.0530779 0.998069 0.0322774 -0.0559872 0.997908 0.0323245 -0.190502 0.977516 0.0903952 -0.34887 0.927784 0.132309 -0.473379 0.867676 0.151824 -0.470875 0.863936 0.17858 -0.0561658 0.998043 0.0274993 -0.0579306 0.997942 0.0274885 -0.197158 0.977516 0.0747723 -0.360441 0.927671 0.0975067 -0.34887 0.927784 0.13231 -0.0590099 0.998004 0.0224978 -0.0592913 0.997987 0.0224861 -0.202534 0.977517 0.0586638 -0.34447 0.935689 0.0763323 -0.341048 0.934841 0.0987843 -0.371749 0.920034 0.123855 -0.0613819 0.997956 0.0177796 -0.2066 0.977516 0.0421791 -0.312439 0.949254 0.036032 -0.299172 0.952242 0.0610786 -0.0612084 0.998061 0.0112724 -0.0644195 0.997892 0.00782197 -0.0616953 0.998079 0.00563331 -0.0650404 0.997879 0.00262091 -0.190974 0.981546 -0.00978738 -0.18757 0.982222 0.00755844 -0.1993 0.979843 0.0136958 -0.196923 0.980127 0.0239109 -0.208258 0.97755 0.0320045 -0.206597 0.977517 0.0421772 -0.0631903 0.997918 0.0129004 -0.0603932 0.998031 0.0169215 -0.0618574 0.998085 0 -0.0650404 0.997879 -0.00262091 -0.197458 0.979837 -0.030492 -0.325282 0.94303 -0.0699026 -0.283035 0.95855 -0.0327688 -0.0616953 0.998079 -0.00563331 -0.0644195 0.997892 -0.00782197 -0.206597 0.977517 -0.0421786 -0.341745 0.933568 -0.10799 -0.463494 0.874721 -0.141552 -0.374147 0.920651 -0.111429 -0.0612084 0.998061 -0.0112724 -0.0631903 0.997918 -0.0129004 -0.202536 0.977516 -0.0586644 -0.34887 0.927784 -0.132309 -0.473959 0.851342 -0.224897 -0.572422 0.750408 -0.330486 -0.640521 0.627906 -0.442117 -0.676373 0.487386 -0.552245 -0.680064 0.332912 -0.653209 -0.6536 0.168863 -0.737762 -0.608025 0.0244025 -0.793543 -0.661916 0.060313 -0.747148 -0.670169 0.0238478 -0.741825 -0.719865 0.0608621 -0.691441 -0.76348 0.168866 -0.623363 -0.776042 0.332908 -0.53566 -0.756201 0.487387 -0.436593 -0.696563 0.628131 -0.346772 -0.674819 0.662632 -0.324866 -0.677078 0.683859 -0.27185 -0.601778 0.75059 -0.272905 -0.60071 0.762057 -0.241694 -0.510647 0.837737 -0.193483 -0.473961 0.859992 -0.189141 -0.490519 0.851343 -0.18603 -0.0603932 0.998031 -0.0169215 -0.0613819 0.997956 -0.0177796 -0.197157 0.977516 -0.0747719 -0.337093 0.927784 -0.159952 -0.454327 0.851341 -0.262308 -0.543965 0.750414 -0.375474 -0.602863 0.627913 -0.492221 -0.629748 0.487379 -0.604879 -0.625296 0.33291 -0.705816 -0.592115 0.168868 -0.787962 -0.541905 0.0249627 -0.840069 -0.599669 0.0597568 -0.798014 -0.0592913 0.997987 -0.0224861 -0.190501 0.977516 -0.0903948 -0.323125 0.927786 -0.186558 -0.431749 0.85134 -0.298015 -0.511991 0.750413 -0.418026 -0.5613 0.627912 -0.539137 -0.579031 0.487384 -0.65359 -0.566478 0.332903 -0.753842 -0.526792 0.168863 -0.833052 -0.472245 0.0255191 -0.881098 -0.533531 0.0591836 -0.843708 -0.0561658 0.998043 -0.0274993 -0.0559872 0.997908 -0.0323245 -0.0530779 0.998069 -0.0322774 -0.0534794 0.997886 -0.0369139 -0.163333 0.977516 -0.133357 -0.269091 0.927785 -0.258465 -0.347884 0.85134 -0.39268 -0.397073 0.750412 -0.528409 -0.415966 0.627911 -0.6578 -0.405791 0.487387 -0.773167 -0.36961 0.332902 -0.867505 -0.312119 0.168867 -0.934915 -0.246662 0.027263 -0.968718 -0.316143 0.0574656 -0.946969 -0.324137 0.0266712 -0.945634 -0.391304 0.0580407 -0.918429 -0.458049 0.168872 -0.872739 -0.50398 0.332904 -0.796981 -0.524562 0.487385 -0.698062 -0.516102 0.627907 -0.582556 -0.476696 0.75041 -0.457871 -0.40637 0.851339 -0.33179 -0.307068 0.927785 -0.211954 -0.182609 0.977516 -0.10543 -0.173534 0.977516 -0.119782 -0.173535 0.977516 -0.119782 -0.0497443 0.998082 -0.0368382 -0.0504391 0.997878 -0.0411824 -0.152072 0.977516 -0.146068 -0.24742 0.927786 -0.27928 -0.315158 0.851341 -0.419398 -0.353267 0.75041 -0.558647 -0.361688 0.62791 -0.689138 -0.34226 0.487388 -0.803313 -0.298607 0.332901 -0.894433 -0.235883 0.168862 -0.956998 -0.16757 0.0278434 -0.985467 -0.238932 0.0568721 -0.969369 -0.0461654 0.998084 -0.0411881 -0.0469139 0.997882 -0.0450612 -0.139827 0.977516 -0.15783 -0.224147 0.927784 -0.298286 -0.280388 0.851342 -0.443396 -0.307171 0.750409 -0.585263 -0.30506 0.62791 -0.716008 -0.276507 0.487393 -0.828246 -0.225666 0.332908 -0.915559 -0.158104 0.168869 -0.972875 -0.0873965 0.0284459 -0.995767 -0.160154 0.0562792 -0.985486 -0.0423361 0.998075 -0.0453312 -0.0429579 0.9979 -0.0484895 -0.126672 0.977517 -0.168569 -0.199419 0.927784 -0.315355 -0.243797 0.851342 -0.464521 -0.259077 0.750412 -0.608079 -0.246462 0.627908 -0.738234 -0.208968 0.487384 -0.847814 -0.15126 0.332907 -0.930749 -0.07931 0.168866 -0.982443 -0.00663677 0.0290724 -0.999555 -0.0803406 0.0557085 -0.99521 -0.0382533 0.998053 -0.0492693 -0.038639 0.997929 -0.0514193 -0.112697 0.977517 -0.178215 -0.173396 0.927784 -0.330379 -0.205631 0.85134 -0.482634 -0.20931 0.75041 -0.626956 -0.186256 0.62791 -0.755671 -0.140071 0.487385 -0.861879 -0.0758769 0.3329 -0.939904 0 0.168864 -0.985639 0.074148 0.0296665 -0.996806 0 0.0550844 -0.998482 -0.0339103 0.998018 -0.0530065 -0.0340287 0.997971 -0.053812 -0.0979906 0.977517 -0.186706 -0.146248 0.927785 -0.343259 -0.166128 0.851341 -0.497614 -0.15818 0.750412 -0.641764 -0.124844 0.627912 -0.768207 -0.0702625 0.487392 -0.870352 0 0.3329 -0.942962 0.0793101 0.168864 -0.982443 0.154452 0.0302856 -0.987536 0.0803461 0.0544769 -0.995277 -0.0295883 0.997971 -0.0563757 -0.0826497 0.977516 -0.193987 -0.118155 0.927785 -0.353914 -0.125548 0.85134 -0.50937 -0.106028 0.750412 -0.652411 -0.0626269 0.627907 -0.775764 0 0.487392 -0.873184 0.075877 0.3329 -0.939905 0.158104 0.168865 -0.972876 0.233743 0.0308969 -0.971807 0.160175 0.0538499 -0.985619 -0.0235423 0.998053 -0.0577634 -0.0205145 0.997899 -0.061448 -0.0180895 0.998075 -0.0593297 -0.0155673 0.997882 -0.0631586 -0.0338247 0.977516 -0.20813 -0.030024 0.927785 -0.371906 0 0.85134 -0.524614 0.0531867 0.750412 -0.658827 0.124844 0.627907 -0.76821 0.208968 0.487385 -0.847814 0.298606 0.332907 -0.894431 0.386336 0.168866 -0.906769 0.460397 0.0327952 -0.887107 0.391435 0.0519555 -0.918738 0.387217 0.0321664 -0.921427 0.316228 0.0526032 -0.947224 0.23588 0.168862 -0.956999 0.151263 0.332907 -0.930748 0.0702629 0.487392 -0.870352 0 0.627907 -0.778288 -0.0531855 0.750412 -0.658827 -0.0841533 0.851341 -0.517819 -0.0892919 0.927785 -0.362273 -0.0667728 0.977516 -0.200008 -0.0504618 0.977516 -0.204732 -0.0504625 0.977516 -0.204733 -0.0125876 0.998084 -0.0605741 -0.0104453 0.997878 -0.0642732 -0.0169672 0.977516 -0.210175 0 0.927785 -0.373116 0.0422135 0.85134 -0.522913 0.106028 0.75041 -0.652413 0.186256 0.627911 -0.755669 0.276509 0.487385 -0.828251 0.36961 0.332902 -0.867505 0.458049 0.168871 -0.872739 0.530564 0.0334465 -0.846985 0.464111 0.0513173 -0.884289 -0.00703047 0.998082 -0.0614994 -0.00522882 0.997886 -0.0647712 0 0.977517 -0.210859 0.030024 0.927785 -0.371906 0.0841549 0.851341 -0.517818 0.15818 0.750411 -0.641765 0.246461 0.62791 -0.738232 0.342258 0.487393 -0.80331 0.438216 0.332901 -0.834951 0.526791 0.168871 -0.833051 0.597259 0.0341043 -0.801323 0.53378 0.0506755 -0.844104 -0.00141413 0.998069 -0.0621052 0 0.997908 -0.0646488 0.0169671 0.977517 -0.210175 0.0598518 0.927785 -0.368285 0.125549 0.85134 -0.50937 0.209309 0.750412 -0.626954 0.30506 0.627908 -0.71601 0.405791 0.487388 -0.773167 0.503978 0.332904 -0.796982 0.592115 0.168863 -0.787962 0.66004 0.0347564 -0.750426 0.59999 0.0500126 -0.798442 0.00426762 0.998043 -0.0623911 0.00515966 0.997942 -0.0639145 0.0338245 0.977516 -0.208128 0.0892923 0.927785 -0.362273 0.166128 0.85134 -0.497616 0.259078 0.75041 -0.608082 0.361688 0.62791 -0.689138 0.466688 0.487386 -0.738009 0.566476 0.332908 -0.753842 0.6536 0.168865 -0.737761 0.718504 0.0354296 -0.694619 0.662315 0.0493604 -0.747598 0.0100216 0.998004 -0.0623538 0.0101719 0.997987 -0.0625908 0.050462 0.977516 -0.204733 0.118154 0.927785 -0.353913 0.205631 0.851341 -0.482632 0.307169 0.750412 -0.585259 0.415966 0.62791 -0.6578 0.524559 0.487385 -0.698064 0.625298 0.332904 -0.705817 0.710845 0.168862 -0.682778 0.772267 0.0360972 -0.634272 0.720346 0.0486916 -0.691904 0.0152936 0.997956 -0.062048 0.0667728 0.977516 -0.200008 0.146249 0.927785 -0.343259 0.243799 0.85134 -0.464523 0.353268 0.750408 -0.558649 0.46755 0.627911 -0.622193 0.579029 0.487391 -0.653586 0.680064 0.33291 -0.65321 0.763482 0.168866 -0.623361 0.820973 0.0367794 -0.569781 0.773713 0.0480225 -0.631714 0.0208424 0.998061 -0.0586445 0.0254357 0.997892 -0.0597001 0.0259689 0.998079 -0.0562468 0.0302507 0.997879 -0.0576377 0.112697 0.977517 -0.178216 0.224147 0.927784 -0.298287 0.347883 0.851341 -0.392678 0.476695 0.750411 -0.457871 0.602863 0.627913 -0.492221 0.718619 0.487387 -0.496026 0.816629 0.332904 -0.471479 0.890476 0.168868 -0.422537 0.933766 0.0388388 -0.35577 0.902496 0.0459582 -0.42824 0.901989 0.038153 -0.43007 0.865082 0.0466594 -0.499455 0.811166 0.168866 -0.559905 0.730419 0.332907 -0.596373 0.629747 0.487379 -0.604881 0.516099 0.627911 -0.582554 0.397074 0.750411 -0.528409 0.280387 0.851342 -0.443396 0.173396 0.927784 -0.33038 0.0826496 0.977516 -0.193987 0.097991 0.977516 -0.186707 0.0979911 0.977517 -0.186706 0.0309289 0.998085 -0.0535705 0.0347906 0.997879 -0.0550167 0.126672 0.977517 -0.168569 0.247422 0.927784 -0.279283 0.378353 0.85134 -0.363413 0.511993 0.750411 -0.418028 0.640516 0.627912 -0.442115 0.756201 0.487388 -0.436592 0.851918 0.332903 -0.404242 0.921588 0.168866 -0.349515 0.959435 0.0395339 -0.279145 0.934057 0.0452539 -0.354244 0.0357268 0.998079 -0.0506132 0.0389836 0.997892 -0.0518779 0.139826 0.977517 -0.15783 0.269092 0.927785 -0.258465 0.406369 0.85134 -0.331789 0.543968 0.750411 -0.375476 0.674018 0.627907 -0.389144 0.788881 0.487387 -0.374328 0.881682 0.332907 -0.334381 0.946725 0.168865 -0.274219 0.978826 0.0402581 -0.200695 0.959565 0.0445662 -0.277938 0.0403662 0.998061 -0.0473718 0.0427673 0.997918 -0.0482744 0.152073 0.977516 -0.146068 0.289017 0.927785 -0.235975 0.431749 0.85134 -0.298015 0.572417 0.750413 -0.330483 0.703142 0.627911 -0.333645 0.816443 0.487385 -0.309639 0.90573 0.332907 -0.262348 0.965657 0.168859 -0.197465 0.991757 0.0423267 -0.120938 0.978842 0.0424842 -0.200161 0.044851 0.998031 -0.0438416 0.0460889 0.997956 -0.0442689 0.163333 0.977516 -0.133357 0.307069 0.927784 -0.211955 0.454329 0.85134 -0.262308 0.597157 0.750408 -0.283357 0.727712 0.627907 -0.275986 0.83871 0.487388 -0.242935 0.923848 0.332912 -0.188879 0.978409 0.168783 -0.119283 0.998285 0.042366 -0.0403985 0.991756 0.0424441 -0.120911 0.0491196 0.997987 -0.0401051 0.173534 0.977516 -0.119782 0.323127 0.927785 -0.186558 0.473962 0.85134 -0.224899 0.618021 0.75041 -0.234382 0.747557 0.627912 -0.21653 0.855491 0.487396 -0.174871 0.93606 0.332838 -0.114062 0.984839 0.168833 -0.0398435 0.998284 0.042413 0.0403875 0.998285 0.042366 0.0403985 0.0518983 0.998043 -0.034891 0.0559869 0.997908 -0.0323242 0.0544915 0.998069 -0.0298282 0.0587081 0.997886 -0.0278574 0.197158 0.977516 -0.0747729 0.358386 0.927784 -0.103809 0.513996 0.851341 -0.105008 0.65617 0.750377 -0.0798486 0.777672 0.627885 -0.031423 0.872478 0.48738 0.0352537 0.872493 0.487353 0.0352694 0.942189 0.332909 0.0380868 0.942202 0.332871 0.0381066 0.984835 0.16886 0.0398308 0.984835 0.16886 -0.0398308 0.942202 0.332871 -0.0381066 0.936031 0.33294 -0.114007 0.866812 0.48733 -0.105576 0.77262 0.627862 -0.0940633 0.77257 0.627931 -0.0940132 0.0567751 0.998082 -0.0246607 0.0608844 0.997878 -0.0230904 0.202534 0.977516 -0.0586635 0.365563 0.927787 -0.0746688 0.520808 0.85132 -0.0633467 0.660453 0.750394 -0.0266733 0.777652 0.62791 0.0314065 0.777672 0.627885 0.031423 0.0587523 0.998084 -0.019386 0.0624806 0.997882 -0.0180977 0.206596 0.977516 -0.0421909 0.370409 0.927777 -0.0450325 0.524204 0.85133 -0.0211604 0.660434 0.750411 0.0266595 0.660453 0.750394 0.0266733 0.0604257 0.998075 -0.0139986 0.0634687 0.9979 -0.0129591 0.209334 0.977513 -0.0254388 0.372823 0.92778 -0.0150417 0.524186 0.851341 0.0211485 0.524204 0.85133 0.0211604 0.0617973 0.998053 -0.00849377 0.0638482 0.99793 -0.00775501 0.210694 0.977515 -0.00849682 0.372812 0.927785 0.0150347 0.372823 0.92778 0.0150417 0.0628616 0.998018 -0.00286421 0.0636169 0.997971 -0.00256476 0.21069 0.977516 0.0084941 0.210694 0.977515 0.00849682 -0.871305 0.021693 -0.490263 -0.853589 0.168865 -0.492819 -0.816627 0.332907 -0.47148 -0.756202 0.487386 -0.436593 -0.816628 0.332905 -0.47148 -0.776043 0.332905 -0.53566 -0.811168 0.168859 -0.559905 -0.821403 0.061934 -0.566975 -0.828826 0.0222239 -0.559065 -0.864334 0.0624801 -0.499022 -0.882507 0.0360776 -0.468913 -0.886584 0.105644 -0.450342 -0.877872 0.206405 -0.432132 -0.835612 0.343936 -0.428323 -0.794223 0.463287 -0.393159 -0.776006 0.487677 -0.399982 -0.751411 0.549099 -0.365886 -0.713126 0.608253 -0.348539 -0.851033 0.168828 -0.497231 -0.830858 0.333135 -0.445753 -0.677491 0.661559 -0.321476 -0.597154 0.750411 -0.283356 -0.597158 0.750408 -0.283355 -0.333062 0.94045 -0.0679975 -0.194975 0.980777 -0.00785684 -0.796952 0.45833 0.393448 -0.835469 0.341691 0.430393 -0.541905 0.0249627 0.840069 -0.533531 0.0591836 0.843708 -0.458049 0.168869 0.87274 -0.369609 0.332902 0.867506 -0.276511 0.487387 0.828248 -0.186255 0.62791 0.755671 -0.106027 0.75041 0.652413 -0.0422147 0.851341 0.522912 0 0.927785 0.373116 0.0169669 0.977517 0.210175 0.0100216 0.998004 0.0623538 0.00515967 0.997942 0.0639145 0.0741493 0.0296606 0.996806 0.0803487 0.054476 0.995277 0.158109 0.168863 0.972876 0.225665 0.332905 0.91556 0.276511 0.487387 0.828249 0.30506 0.627911 0.716007 0.307166 0.750412 0.585261 0.280387 0.851342 0.443396 0.224147 0.927784 0.298286 0.139825 0.977517 0.15783 0.044851 0.998031 0.0438416 0.0427673 0.997918 0.0482744 0.718504 0.0354296 0.694619 0.720346 0.0486916 0.691904 0.763481 0.168864 0.623363 0.776041 0.332907 0.535661 0.756201 0.487388 0.436593 0.703141 0.627911 0.333648 0.61802 0.75041 0.234383 0.503899 0.851341 0.145958 0.358381 0.927786 0.103807 0.772267 0.0360972 0.634272 0.773713 0.0480225 0.631714 0.811167 0.16886 0.559906 0.816628 0.332904 0.47148 0.78888 0.487387 0.374329 0.727714 0.627907 0.275982 0.634878 0.750408 0.183894 0.503903 0.851339 0.145957 0.820973 0.0367794 0.569781 0.822061 0.0473487 0.567427 0.853589 0.168865 0.492819 0.851919 0.332903 0.40424 0.816444 0.487385 0.309637 0.747556 0.627912 0.216533 0.634876 0.75041 0.183895 0.864309 0.0374685 0.501563 0.865082 0.0466593 0.499455 0.890475 0.168867 0.422537 0.881682 0.332907 0.334381 0.838711 0.487388 0.242933 0.747561 0.627907 0.216531 0.901989 0.038153 0.43007 0.902495 0.0459582 0.42824 0.921587 0.168866 0.349515 0.90573 0.332907 0.262348 0.838708 0.487392 0.242935 0.933766 0.0388388 0.35577 0.934057 0.0452539 0.354244 0.946724 0.168865 0.274223 0.905729 0.332909 0.262348 0.959435 0.0395339 0.279145 0.959565 0.0445661 0.277938 0.946727 0.168857 0.274219 0.998284 0.042413 -0.0403875 0.864309 0.0374685 -0.501563 0.822061 0.0473488 -0.567427 0.763482 0.168862 -0.623363 0.680065 0.332913 -0.653208 0.579029 0.487384 -0.653592 0.46755 0.627907 -0.622198 0.353268 0.75041 -0.558647 0.243799 0.851342 -0.46452 0.146248 0.927785 -0.343259 0.0667728 0.977516 -0.200008 0.0155423 0.998031 -0.0607634 0.0204234 0.997918 -0.0611752 0.311499 0.031537 -0.949723 0.23898 0.0532365 -0.969564 0.158106 0.168869 -0.972875 0.0758774 0.332901 -0.939904 0 0.487392 -0.873184 -0.062627 0.627907 -0.775765 -0.106029 0.75041 -0.652413 -0.125549 0.85134 -0.50937 -0.118154 0.927785 -0.353913 -0.0826493 0.977516 -0.193986 -0.0289495 0.998018 -0.05587 -0.0252105 0.997929 -0.0591717 -0.399494 0.0260907 -0.916364 -0.463924 0.058615 -0.883933 -0.526788 0.168872 -0.833053 -0.566475 0.332909 -0.753842 -0.579028 0.487389 -0.653589 -0.5613 0.627912 -0.539137 -0.511992 0.750412 -0.418026 -0.43175 0.851339 -0.298015 -0.323128 0.927785 -0.186557 -0.190503 0.977516 -0.0903947 -0.0590099 0.998004 -0.0224978 -0.0579306 0.997942 -0.0274885 -0.727932 0.0232906 -0.685254 -0.773145 0.0613966 -0.63125 -0.763483 0.168859 -0.623362 -0.780934 0.0227505 -0.624199 0.21069 0.977516 -0.0084941 0.984839 0.168833 0.0398435 0.372812 0.927785 -0.0150347 0.524186 0.851341 -0.0211485 0.660434 0.750411 -0.0266595 0.777652 0.62791 -0.0314065 0.872478 0.48738 -0.0352537 0.872493 0.487353 -0.0352694 0.866772 0.487411 -0.105526 0.942189 0.332909 -0.0380868 0.163332 0.977516 0.133357 -0.0979911 0.977517 0.186706 -0.197157 0.977517 0.0747712 -0.112697 0.977517 -0.178216 0.033824 0.977516 -0.20813 0.0489892 0.998004 -0.0398558 0.0527718 0.997942 -0.0364257 0.18261 0.977516 -0.10543 0.33709 0.927786 -0.15995 0.49052 0.851342 -0.186029 0.634878 0.750408 -0.183896 0.762524 0.62791 -0.155839 0.855467 0.487451 -0.174835 0.370409 0.927777 0.0450324 0.365543 0.927796 0.0746509 0.520808 0.85132 0.0633467 0.513969 0.85136 0.104982 0.65617 0.750377 0.0798486 0.647559 0.750443 0.132294 0.77262 0.627862 0.0940633 0.762493 0.627957 0.155803 0.866812 0.48733 0.105576 0.855467 0.487451 0.174835 0.93606 0.332838 0.114062 0.923831 0.33298 0.18884 0.978409 0.168783 0.119283 0.965652 0.168938 0.197425 0.348871 0.927784 0.13231 0.348869 0.927785 0.13231 0.197158 0.977516 0.0747721 0.490521 0.851341 0.18603 0.618023 0.750408 0.234383 0.727708 0.627912 0.275984 0.816442 0.487388 0.309638 0.881681 0.332907 0.334381 0.921588 0.168865 0.349515 0.190502 0.977516 0.0903946 0.47396 0.851342 0.224897 0.473964 0.85134 0.224897 0.337089 0.927786 0.159951 0.597155 0.75041 0.283356 0.703145 0.627907 0.333647 0.788882 0.487385 0.374328 0.851917 0.332907 0.404241 0.890476 0.168866 0.422537 0.18261 0.977516 0.10543 0.323128 0.927785 0.186557 0.572422 0.750408 0.330486 0.572415 0.750413 0.330486 0.454329 0.85134 0.262308 0.674015 0.627911 0.389143 0.756202 0.487387 0.436593 0.816629 0.332903 0.47148 0.853588 0.168867 0.492819 0.173534 0.977516 0.119782 0.307069 0.927784 0.211954 0.431749 0.85134 0.298015 0.64052 0.627907 0.442117 0.640514 0.627912 0.442117 0.543968 0.750411 0.375475 0.718618 0.487388 0.496025 0.776043 0.332904 0.535661 0.811165 0.168865 0.559907 0.289017 0.927785 0.235975 0.406369 0.85134 0.331789 0.511993 0.750411 0.418028 0.602863 0.627913 0.492221 0.676373 0.487387 0.552245 0.676379 0.487379 0.552244 0.730421 0.332907 0.59637 0.763483 0.168861 0.623362 0.152073 0.977516 0.146067 0.269091 0.927785 0.258466 0.247423 0.927784 0.279282 0.378354 0.85134 0.363413 0.347881 0.851341 0.392679 0.476695 0.750411 0.457871 0.438306 0.750411 0.494744 0.561301 0.627911 0.539137 0.516101 0.627907 0.582557 0.629745 0.487384 0.604879 0.579026 0.487391 0.653589 0.680063 0.332913 0.653209 0.680066 0.33291 0.653209 0.6253 0.332904 0.705815 0.710844 0.168864 0.682778 0.653599 0.168863 0.737763 0.6536 0.16886 0.737762 0.126672 0.977517 0.16857 0.315156 0.851342 0.419397 0.397074 0.75041 0.52841 0.467548 0.627911 0.622195 0.524562 0.487385 0.698062 0.566475 0.332908 0.753843 0.592116 0.168861 0.787962 0.199419 0.927784 0.315355 0.353269 0.750408 0.558649 0.415967 0.62791 0.6578 0.466688 0.487386 0.738009 0.50398 0.332904 0.796981 0.526788 0.168869 0.833053 0.173396 0.927784 0.330379 0.173395 0.927785 0.330378 0.0979916 0.977516 0.186707 0.243797 0.851342 0.464521 0.361688 0.62791 0.689138 0.40579 0.487388 0.773167 0.438218 0.332901 0.83495 0.458049 0.168867 0.87274 0.0826495 0.977516 0.193987 0.205631 0.85134 0.482634 0.20563 0.851341 0.482633 0.146248 0.927785 0.343259 0.259077 0.750412 0.608079 0.342259 0.487387 0.803313 0.369608 0.332907 0.867504 0.386339 0.168864 0.906767 0.0667728 0.977516 0.200008 0.118154 0.927785 0.353913 0.20931 0.75041 0.626956 0.209308 0.750412 0.626954 0.16613 0.85134 0.497616 0.246459 0.627911 0.738232 0.298606 0.332907 0.894431 0.312122 0.168861 0.934915 0.0504625 0.977516 0.204733 0.0892919 0.927785 0.362273 0.125548 0.851341 0.509369 0.186255 0.627909 0.755671 0.186255 0.62791 0.755671 0.158182 0.75041 0.641766 0.208967 0.487387 0.847813 0.235877 0.168866 0.956999 0.0338238 0.977516 0.208129 0.0598527 0.927785 0.368285 0.0841535 0.851341 0.51782 0.106027 0.75041 0.652413 0.140068 0.487387 0.861879 0.140069 0.487386 0.861879 0.124845 0.627911 0.768207 0.151262 0.332905 0.930749 0 0.977517 0.210859 0.0300234 0.927785 0.371906 0.0422148 0.85134 0.522913 0 0.85134 0.524614 0.0531865 0.750411 0.658828 0 0.750411 0.660972 0.0626268 0.62791 0.775762 0 0.627911 0.778285 0.0702632 0.487385 0.870356 0 0.487385 0.873187 0.0758756 0.332908 0.939902 0.0758772 0.332905 0.939903 0 0.332904 0.942961 0.0793126 0.168863 0.982443 0 0.168869 0.985639 0 0.168869 0.985639 -0.030024 0.927785 0.371906 -0.0531866 0.75041 0.658829 -0.0626268 0.627911 0.775762 -0.0702632 0.487386 0.870355 -0.0758772 0.332908 0.939902 -0.0793097 0.168863 0.982444 -0.0598518 0.927785 0.368285 -0.0598524 0.927785 0.368283 -0.033824 0.977516 0.20813 -0.0841536 0.851341 0.51782 -0.124845 0.62791 0.768208 -0.140069 0.487387 0.861879 -0.151261 0.332905 0.93075 -0.158109 0.168867 0.972875 -0.0504623 0.977516 0.204732 -0.125549 0.851341 0.509369 -0.125549 0.85134 0.50937 -0.0892921 0.927785 0.362273 -0.15818 0.75041 0.641766 -0.208966 0.487386 0.847814 -0.225665 0.332908 0.915559 -0.235877 0.168862 0.956999 -0.0667727 0.977516 0.200008 -0.118154 0.927785 0.353914 -0.209309 0.750412 0.626954 -0.209309 0.75041 0.626956 -0.166129 0.851341 0.497614 -0.246459 0.62791 0.738232 -0.298605 0.332906 0.894432 -0.312121 0.168864 0.934915 -0.0826495 0.977516 0.193987 -0.146249 0.927785 0.343259 -0.20563 0.85134 0.482635 -0.305061 0.627911 0.716007 -0.305061 0.62791 0.716008 -0.259078 0.750412 0.608079 -0.342259 0.487388 0.803313 -0.386339 0.168867 0.906767 -0.173396 0.927784 0.330379 -0.243799 0.851342 0.46452 -0.307168 0.750409 0.585265 -0.361688 0.62791 0.689138 -0.405791 0.487388 0.773167 -0.405791 0.487387 0.773167 -0.438216 0.332901 0.834951 -0.112697 0.977517 0.178215 -0.199419 0.927784 0.315355 -0.280387 0.851341 0.443397 -0.224147 0.927784 0.298286 -0.353268 0.750411 0.558646 -0.315157 0.85134 0.419399 -0.415967 0.627911 0.657799 -0.397074 0.750412 0.528408 -0.466688 0.487385 0.73801 -0.46755 0.627907 0.622197 -0.503978 0.332904 0.796982 -0.503979 0.332908 0.79698 -0.524561 0.487389 0.69806 -0.526792 0.168869 0.833051 -0.566476 0.332903 0.753843 -0.592117 0.168866 0.787961 -0.592115 0.16886 0.787963 -0.038639 0.997929 0.0514193 -0.0339103 0.998018 0.0530065 -0.139826 0.977517 0.157829 -0.269091 0.927786 0.258465 -0.40637 0.851339 0.331789 -0.511992 0.750413 0.418026 -0.247421 0.927786 0.279279 -0.347883 0.85134 0.39268 -0.438306 0.75041 0.494745 -0.516099 0.627912 0.582553 -0.579029 0.487384 0.653591 -0.625298 0.33291 0.705814 -0.653599 0.168861 0.737763 -0.126672 0.977517 0.16857 -0.139826 0.977516 0.157831 -0.152073 0.977516 0.146068 -0.289017 0.927785 0.235974 -0.406371 0.851339 0.33179 -0.152073 0.977516 0.146067 -0.269091 0.927785 0.258466 -0.378354 0.85134 0.363413 -0.476694 0.750412 0.45787 -0.5613 0.627912 0.539137 -0.629746 0.487379 0.604881 -0.680065 0.332912 0.653209 -0.710846 0.168864 0.682776 -0.163333 0.977516 0.133358 -0.289017 0.927785 0.235975 -0.602863 0.627913 0.492221 -0.676375 0.487386 0.552242 -0.730419 0.332908 0.596373 -0.763482 0.168857 0.623364 -0.173534 0.977516 0.119781 -0.307068 0.927785 0.211954 -0.543965 0.750415 0.375473 -0.57242 0.750408 0.33049 -0.572416 0.750415 0.330482 -0.674017 0.62791 0.389141 -0.718619 0.487387 0.496024 -0.776042 0.332905 0.535661 -0.811167 0.168866 0.559904 -0.182611 0.977516 0.105431 -0.323126 0.927786 0.186556 -0.454327 0.851341 0.262308 -0.816627 0.332907 0.471479 -0.853589 0.168863 0.49282 -0.190502 0.977516 0.0903943 -0.337092 0.927784 0.159954 -0.47396 0.851342 0.224895 -0.597155 0.750411 0.283354 -0.202536 0.977516 0.0586656 -0.209321 0.977517 -0.0254163 -0.2066 0.977516 -0.0421777 -0.202534 0.977517 -0.058665 -0.358386 0.927784 -0.103806 -0.197158 0.977516 -0.0747716 -0.34887 0.927784 -0.13231 -0.182611 0.977516 -0.10543 -0.337089 0.927786 -0.159952 -0.473962 0.851341 -0.224896 -0.454329 0.85134 -0.262308 -0.572413 0.750415 -0.330486 -0.674016 0.62791 -0.389143 -0.67402 0.627906 -0.389143 -0.853588 0.168868 -0.49282 -0.811164 0.168868 -0.559907 -0.307068 0.927785 -0.211954 -0.543967 0.750413 -0.375474 -0.640514 0.627913 -0.442117 -0.718619 0.487387 -0.496025 -0.71862 0.487386 -0.496025 -0.289018 0.927785 -0.235974 -0.289017 0.927785 -0.235974 -0.163333 0.977516 -0.133357 -0.406371 0.851339 -0.33179 -0.602864 0.627912 -0.492221 -0.676378 0.487379 -0.552244 -0.730421 0.332908 -0.596371 -0.730418 0.332912 -0.596372 -0.152073 0.977516 -0.146068 -0.378354 0.85134 -0.363413 -0.378353 0.85134 -0.363413 -0.269091 0.927786 -0.258465 -0.476693 0.750412 -0.457871 -0.629745 0.487384 -0.604879 -0.680065 0.33291 -0.653209 -0.710845 0.168866 -0.682777 -0.710846 0.168863 -0.682776 -0.139825 0.977517 -0.15783 -0.247423 0.927784 -0.279281 -0.438307 0.75041 -0.494744 -0.438305 0.750412 -0.494744 -0.347882 0.851341 -0.392679 -0.516097 0.627912 -0.582555 -0.6253 0.332904 -0.705815 -0.653598 0.168867 -0.737763 -0.126671 0.977517 -0.168569 -0.224148 0.927784 -0.298286 -0.315156 0.851342 -0.419397 -0.467552 0.627908 -0.622196 -0.467548 0.627911 -0.622195 -0.397074 0.75041 -0.528409 -0.524559 0.487389 -0.698062 -0.592117 0.168862 -0.787961 -0.199418 0.927784 -0.315355 -0.280387 0.851342 -0.443396 -0.353269 0.750408 -0.558649 -0.415967 0.62791 -0.6578 -0.466689 0.487385 -0.738009 -0.466687 0.487387 -0.738009 -0.503978 0.332908 -0.796981 -0.0979915 0.977516 -0.186707 -0.173395 0.927785 -0.330378 -0.146249 0.927785 -0.343259 -0.243801 0.85134 -0.464522 -0.20563 0.851341 -0.482633 -0.307166 0.750412 -0.585261 -0.259079 0.75041 -0.608081 -0.361688 0.62791 -0.689138 -0.305062 0.627907 -0.71601 -0.40579 0.487388 -0.773167 -0.342256 0.487394 -0.803311 -0.438216 0.332904 -0.83495 -0.438217 0.332901 -0.834951 -0.36961 0.332902 -0.867505 -0.458049 0.168871 -0.872739 -0.386338 0.168867 -0.906767 -0.386335 0.168872 -0.906768 -0.0667726 0.977516 -0.200008 -0.166129 0.85134 -0.497616 -0.209308 0.750412 -0.626955 -0.246459 0.62791 -0.738232 -0.276514 0.487384 -0.828249 -0.298603 0.332907 -0.894432 -0.312121 0.168862 -0.934915 -0.0892925 0.927785 -0.362273 -0.158181 0.750411 -0.641765 -0.186255 0.627911 -0.75567 -0.208968 0.487385 -0.847814 -0.225666 0.332907 -0.915559 -0.235879 0.168869 -0.956997 -0.0598515 0.927785 -0.368283 -0.0598527 0.927785 -0.368285 -0.0338238 0.977516 -0.208129 -0.0841553 0.85134 -0.517821 -0.124847 0.627907 -0.768209 -0.140066 0.487392 -0.861876 -0.151264 0.332901 -0.930751 -0.158106 0.168865 -0.972876 -0.0169669 0.977517 -0.210175 -0.0422133 0.851341 -0.522911 -0.0422148 0.85134 -0.522913 -0.0300234 0.927785 -0.371906 -0.0531869 0.75041 -0.658829 -0.0702629 0.487391 -0.870352 -0.0758774 0.3329 -0.939905 -0.079311 0.168864 -0.982443 0 0.977517 -0.210859 0 0.927785 -0.373116 0 0.750412 -0.660971 0 0.750412 -0.660971 0 0.85134 -0.524614 0 0.627907 -0.778288 0 0.3329 -0.942962 0 0.168864 -0.985639 0.0169669 0.977517 -0.210175 0.0300235 0.927785 -0.371907 0.0422146 0.851341 -0.522911 0.0626269 0.627907 -0.775765 0.062627 0.627907 -0.775764 0.0531856 0.75041 -0.658829 0.0702625 0.487391 -0.870352 0.0793111 0.168866 -0.982443 0.0598524 0.927785 -0.368283 0.0841537 0.85134 -0.517821 0.106029 0.750411 -0.652412 0.124846 0.627912 -0.768206 0.14007 0.487392 -0.861876 0.140067 0.487385 -0.86188 0.15126 0.332901 -0.930751 0.0504623 0.977516 -0.204732 0.0892921 0.927785 -0.362273 0.118154 0.927785 -0.353914 0.125549 0.85134 -0.50937 0.166129 0.851341 -0.497614 0.158181 0.750412 -0.641763 0.209308 0.75041 -0.626957 0.186255 0.627909 -0.755671 0.24646 0.627908 -0.738234 0.208968 0.487385 -0.847814 0.276512 0.487393 -0.828245 0.225666 0.332907 -0.915559 0.225666 0.332908 -0.915559 0.298604 0.332902 -0.894434 0.235883 0.168869 -0.956997 0.312121 0.168866 -0.934914 0.312119 0.168861 -0.934916 0.0826496 0.977516 -0.193987 0.205631 0.85134 -0.482634 0.259079 0.750412 -0.608079 0.305061 0.62791 -0.716008 0.342257 0.487388 -0.803314 0.369609 0.332902 -0.867505 0.386337 0.168871 -0.906767 0.173396 0.927785 -0.330378 0.307168 0.750409 -0.585265 0.361688 0.62791 -0.689138 0.405791 0.487387 -0.773168 0.438217 0.332904 -0.834949 0.458049 0.16887 -0.872739 0.199419 0.927784 -0.315355 0.199419 0.927784 -0.315355 0.112697 0.977517 -0.178215 0.280387 0.851342 -0.443395 0.415967 0.627911 -0.657799 0.466688 0.487385 -0.738009 0.503979 0.332908 -0.79698 0.526789 0.168863 -0.833054 0.126672 0.977517 -0.16857 0.315156 0.851342 -0.419397 0.315156 0.851341 -0.419398 0.224147 0.927784 -0.298286 0.397074 0.75041 -0.528411 0.52456 0.487391 -0.698059 0.566476 0.332904 -0.753844 0.592116 0.168865 -0.787962 0.139826 0.977516 -0.157831 0.247421 0.927785 -0.27928 0.438306 0.750411 -0.494744 0.438306 0.750411 -0.494744 0.347883 0.85134 -0.39268 0.516099 0.627907 -0.582559 0.625299 0.33291 -0.705814 0.653599 0.168862 -0.737763 0.152072 0.977516 -0.146067 0.269092 0.927785 -0.258466 0.378353 0.85134 -0.363412 0.561301 0.627911 -0.539137 0.561301 0.627912 -0.539136 0.476695 0.750411 -0.457871 0.629747 0.487384 -0.604877 0.710846 0.168866 -0.682776 0.163333 0.977516 -0.133357 0.289018 0.927785 -0.235976 0.406369 0.85134 -0.331789 0.511992 0.750411 -0.418027 0.676376 0.487379 -0.552248 0.676375 0.487387 -0.552242 0.602863 0.627912 -0.492222 0.73042 0.332914 -0.596369 0.173534 0.977516 -0.119782 0.307068 0.927785 -0.211953 0.431749 0.85134 -0.298015 0.323126 0.927786 -0.186556 0.543967 0.750413 -0.375474 0.454328 0.85134 -0.262308 0.640518 0.627907 -0.44212 0.57242 0.750408 -0.330489 0.718618 0.487388 -0.496025 0.674016 0.627911 -0.389141 0.776042 0.332907 -0.535661 0.776042 0.332904 -0.535662 0.756201 0.487387 -0.436593 0.811166 0.168862 -0.559907 0.816629 0.332903 -0.47148 0.853589 0.168868 -0.492818 0.853589 0.168866 -0.492819 0.182611 0.977516 -0.105431 0.190502 0.977516 -0.0903946 0.348869 0.927785 -0.132309 0.5039 0.851341 -0.145956 0.647589 0.750412 -0.132324 0.762493 0.627957 -0.155803 0.190502 0.977516 -0.0903946 0.337091 0.927785 -0.159952 0.47396 0.851342 -0.224895 0.597155 0.75041 -0.283355 0.703144 0.627907 -0.333649 0.788881 0.487385 -0.374329 0.851918 0.332907 -0.404239 0.890475 0.168866 -0.422537 0.197157 0.977516 -0.0747715 0.348871 0.927784 -0.132311 0.490521 0.851341 -0.18603 0.618023 0.750408 -0.234384 0.727709 0.627912 -0.275981 0.816442 0.487388 -0.309637 0.881682 0.332907 -0.334381 0.921588 0.168865 -0.349515 0.202536 0.977516 -0.058665 0.358381 0.927786 -0.103804 0.503902 0.851339 -0.145959 0.634876 0.75041 -0.183894 0.74756 0.627907 -0.216534 0.838709 0.487392 -0.242933 0.905729 0.332909 -0.262347 0.946725 0.168857 -0.274223 0.206585 0.977519 -0.0421805 0.365543 0.927796 -0.0746509 0.513969 0.85136 -0.104982 0.647559 0.750443 -0.132294 0.923831 0.33298 -0.18884 0.965652 0.168938 -0.197425 0.209313 0.977518 -0.0254233 0.370378 0.92779 -0.0450093 0.520765 0.851349 -0.063312 0.656119 0.750426 -0.0798047 0.978395 0.16891 -0.119221 0.998964 6.35858e-09 -0.0455164 0.990686 1.90218e-08 -0.136163 0.990686 1.78415e-08 -0.136163 0.974199 3.15287e-08 -0.225691 0.94964 4.29153e-08 -0.313344 0.974199 3.09104e-08 -0.225691 0.974199 3.1485e-08 -0.225691 0.990686 1.86488e-08 -0.136163 0.990686 1.77164e-08 -0.136163 0.998964 5.96876e-09 -0.0455164 0.998964 -5.9222e-09 0.0455164 0.990686 -1.86488e-08 0.136163 0.974199 -3.09104e-08 0.225691 0.94964 -3.43322e-08 0.313344 0.917212 -4.36517e-08 0.3984 0.87718 -5.261e-08 0.480161 0.829889 -6.11308e-08 0.557929 0.77571 -6.91468e-08 0.631089 0.715108 -7.65891e-08 0.699014 0.648586 -8.33963e-08 0.761142 0.576681 -8.95132e-08 0.81697 0.500002 -9.48879e-08 0.866024 0.419175 -9.94768e-08 0.907905 0.334876 -1.03241e-07 0.942262 0.24781 -1.0615e-07 0.968809 0.158682 -1.08179e-07 0.98733 0.0682419 -1.09312e-07 0.997669 -0.022764 -1.09539e-07 0.999741 -0.113579 -1.08858e-07 0.993529 -0.203457 -1.07276e-07 0.979084 -0.291646 -1.04804e-07 0.956526 -0.37742 -1.01464e-07 0.926042 -0.460063 -9.72833e-08 0.887886 -0.538895 -9.22966e-08 0.842373 -0.613271 -8.65442e-08 0.789873 -0.682553 -8.00758e-08 0.730836 -0.746186 -7.29431e-08 0.665738 -0.80363 -6.52067e-08 0.595129 -0.854416 -5.693e-08 0.519589 -0.89813 -4.818e-08 0.43973 -0.934395 -3.90322e-08 0.356239 -0.962915 -3.69524e-08 0.269806 -0.983463 -2.35641e-08 0.181107 -0.995857 -1.18314e-08 0.0909333 -1 0 0 -0.995857 1.18314e-08 -0.0909333 -0.983463 2.48043e-08 -0.181107 -0.962915 2.95619e-08 -0.269806 -0.934395 3.90322e-08 -0.356239 -0.89813 4.818e-08 -0.43973 -0.854416 5.693e-08 -0.519589 -0.80363 6.52067e-08 -0.595129 -0.746186 7.29431e-08 -0.665738 -0.682553 8.00758e-08 -0.730836 -0.613271 8.65442e-08 -0.789873 -0.538895 9.22966e-08 -0.842373 -0.460063 9.72833e-08 -0.887886 -0.37742 1.01464e-07 -0.926042 -0.291646 1.04804e-07 -0.956526 -0.203457 1.07276e-07 -0.979084 -0.113579 1.08858e-07 -0.993529 -0.022764 1.09539e-07 -0.999741 0.0682419 1.09312e-07 -0.997669 0.158682 1.08179e-07 -0.98733 0.24781 1.0615e-07 -0.968809 0.334876 1.03241e-07 -0.942262 0.419175 9.94768e-08 -0.907905 0.500002 9.48879e-08 -0.866024 0.576681 8.95132e-08 -0.81697 0.648586 8.33963e-08 -0.761142 0.715108 7.65891e-08 -0.699014 0.77571 6.91468e-08 -0.631089 0.829889 6.11308e-08 -0.557929 0.87718 5.261e-08 -0.480161 0.917212 4.36517e-08 -0.3984 0.94964 3.41087e-08 -0.313344 0.94964 4.37736e-08 -0.313344 0.974199 3.15287e-08 -0.225691 0.94964 6.24452e-08 -0.313344 0.917212 7.42079e-08 -0.3984 0.917212 4.36517e-08 -0.3984 0.917212 8.0287e-08 -0.3984 0.87718 8.94371e-08 -0.480161 0.87718 5.261e-08 -0.480161 0.87718 8.92073e-08 -0.480161 0.829889 1.03923e-07 -0.557929 0.829889 6.11308e-08 -0.557929 0.829889 1.07049e-07 -0.557929 0.77571 1.1755e-07 -0.631089 0.77571 6.91468e-08 -0.631089 0.77571 1.2489e-07 -0.631089 0.715108 1.30202e-07 -0.699014 0.715108 7.65891e-08 -0.699014 0.715108 1.33811e-07 -0.699014 0.648586 1.41774e-07 -0.761142 0.648586 8.33963e-08 -0.761142 0.648586 1.42733e-07 -0.761142 0.576681 1.52173e-07 -0.81697 0.576681 8.95132e-08 -0.81697 0.576681 1.51653e-07 -0.81697 0.500002 1.6131e-07 -0.866024 0.500002 9.48879e-08 -0.866024 0.500002 1.60575e-07 -0.866024 0.419175 1.69111e-07 -0.907905 0.419175 9.94768e-08 -0.907905 0.419175 1.69494e-07 -0.907905 0.334876 1.7551e-07 -0.942262 0.334876 1.03241e-07 -0.942262 0.334876 1.78413e-07 -0.942262 0.24781 1.80455e-07 -0.968809 0.24781 1.0615e-07 -0.968809 0.24781 1.78417e-07 -0.968809 0.158682 1.83905e-07 -0.98733 0.158682 1.08179e-07 -0.98733 0.158682 1.82873e-07 -0.98733 0.0682419 1.8583e-07 -0.997669 0.0682419 1.09312e-07 -0.997669 0.0682419 1.86221e-07 -0.997669 -0.022764 1.86216e-07 -0.999741 -0.022764 1.09539e-07 -0.999741 -0.022764 1.86221e-07 -0.999741 -0.113579 1.85059e-07 -0.993529 -0.113579 1.08858e-07 -0.993529 -0.113579 1.85106e-07 -0.993529 -0.203457 1.82369e-07 -0.979084 -0.203457 1.07276e-07 -0.979084 -0.203457 1.78415e-07 -0.979084 -0.291646 1.78167e-07 -0.956526 -0.291646 1.04804e-07 -0.956526 -0.291646 1.78416e-07 -0.956526 -0.37742 1.72489e-07 -0.926042 -0.37742 1.01464e-07 -0.926042 -0.37742 1.78415e-07 -0.926042 -0.460063 1.65382e-07 -0.887886 -0.460063 9.72833e-08 -0.887886 -0.460063 1.69494e-07 -0.887886 -0.538895 1.56904e-07 -0.842373 -0.538895 9.22966e-08 -0.842373 -0.538895 1.51652e-07 -0.842373 -0.613271 1.47125e-07 -0.789873 -0.613271 8.65442e-08 -0.789873 -0.613271 1.51653e-07 -0.789873 -0.682553 1.36129e-07 -0.730836 -0.682553 8.00758e-08 -0.730836 -0.682553 1.33812e-07 -0.730836 -0.746186 1.24003e-07 -0.665738 -0.746186 7.29431e-08 -0.665738 -0.746186 1.24891e-07 -0.665738 -0.80363 1.10852e-07 -0.595129 -0.80363 6.52067e-08 -0.595129 -0.80363 1.07049e-07 -0.595129 -0.854416 9.67811e-08 -0.519589 -0.854416 5.693e-08 -0.519589 -0.854416 9.81281e-08 -0.519589 -0.89813 8.19061e-08 -0.43973 -0.89813 4.818e-08 -0.43973 -0.89813 8.02869e-08 -0.43973 -0.934395 6.63548e-08 -0.356239 -0.934395 3.90322e-08 -0.356239 -0.934395 6.24454e-08 -0.356239 -0.962915 5.02554e-08 -0.269806 -0.962915 3.69524e-08 -0.269806 -0.962915 3.56829e-08 -0.269806 -0.983463 2.53004e-08 -0.181107 -0.983463 2.35641e-08 -0.181107 -0.983463 2.67624e-08 -0.181107 -0.995857 1.27032e-08 -0.0909333 -0.995857 1.18314e-08 -0.0909333 -0.995857 1.33811e-08 -0.0909333 -1 0 0 -1 0 0 -1 0 0 -0.995857 -1.16446e-08 0.0909333 -0.995857 -1.18314e-08 0.0909333 -0.995857 -1.33811e-08 0.0909333 -0.983463 -2.53004e-08 0.181107 -0.983463 -2.48043e-08 0.181107 -0.983463 -2.67624e-08 0.181107 -0.962915 -3.76915e-08 0.269806 -0.962915 -2.95619e-08 0.269806 -0.962915 -4.90641e-08 0.269806 -0.934395 -6.63548e-08 0.356239 -0.934395 -3.90322e-08 0.356239 -0.934395 -6.24454e-08 0.356239 -0.89813 -8.19061e-08 0.43973 -0.89813 -4.818e-08 0.43973 -0.89813 -8.02869e-08 0.43973 -0.854416 -9.67811e-08 0.519589 -0.854416 -5.693e-08 0.519589 -0.854416 -9.81281e-08 0.519589 -0.80363 -1.10852e-07 0.595129 -0.80363 -6.52067e-08 0.595129 -0.80363 -1.07049e-07 0.595129 -0.746186 -1.24003e-07 0.665738 -0.746186 -7.29431e-08 0.665738 -0.746186 -1.24891e-07 0.665738 -0.682553 -1.36129e-07 0.730836 -0.682553 -8.00758e-08 0.730836 -0.682553 -1.33812e-07 0.730836 -0.613271 -1.47125e-07 0.789873 -0.613271 -8.65442e-08 0.789873 -0.613271 -1.51653e-07 0.789873 -0.538895 -1.56904e-07 0.842373 -0.538895 -9.22966e-08 0.842373 -0.538895 -1.51652e-07 0.842373 -0.460063 -1.65382e-07 0.887886 -0.460063 -9.72833e-08 0.887886 -0.460063 -1.69494e-07 0.887886 -0.37742 -1.72489e-07 0.926042 -0.37742 -1.01464e-07 0.926042 -0.37742 -1.78415e-07 0.926042 -0.291646 -1.78167e-07 0.956526 -0.291646 -1.04804e-07 0.956526 -0.291646 -1.78416e-07 0.956526 -0.203457 -1.82369e-07 0.979084 -0.203457 -1.07276e-07 0.979084 -0.203457 -1.78415e-07 0.979084 -0.113579 -1.85059e-07 0.993529 -0.113579 -1.08858e-07 0.993529 -0.113579 -1.85106e-07 0.993529 -0.022764 -1.86216e-07 0.999741 -0.022764 -1.09539e-07 0.999741 -0.022764 -1.86221e-07 0.999741 0.0682419 -1.8583e-07 0.997669 0.0682419 -1.09312e-07 0.997669 0.0682419 -1.86221e-07 0.997669 0.158682 -1.83905e-07 0.98733 0.158682 -1.08179e-07 0.98733 0.158682 -1.82873e-07 0.98733 0.24781 -1.80455e-07 0.968809 0.24781 -1.0615e-07 0.968809 0.24781 -1.78417e-07 0.968809 0.334876 -1.7551e-07 0.942262 0.334876 -1.03241e-07 0.942262 0.334876 -1.78413e-07 0.942262 0.419175 -1.69111e-07 0.907905 0.419175 -9.94768e-08 0.907905 0.419175 -1.69494e-07 0.907905 0.500002 -1.6131e-07 0.866024 0.500002 -9.48879e-08 0.866024 0.500002 -1.60575e-07 0.866024 0.576681 -1.52173e-07 0.81697 0.576681 -8.95132e-08 0.81697 0.576681 -1.51653e-07 0.81697 0.648586 -1.41774e-07 0.761142 0.648586 -8.33963e-08 0.761142 0.648586 -1.42733e-07 0.761142 0.715108 -1.30202e-07 0.699014 0.715108 -7.65891e-08 0.699014 0.715108 -1.33811e-07 0.699014 0.77571 -1.1755e-07 0.631089 0.77571 -6.91468e-08 0.631089 0.77571 -1.2489e-07 0.631089 0.829889 -1.03923e-07 0.557929 0.829889 -6.11308e-08 0.557929 0.829889 -1.07049e-07 0.557929 0.87718 -8.94371e-08 0.480161 0.87718 -5.261e-08 0.480161 0.87718 -8.92073e-08 0.480161 0.917212 -7.42079e-08 0.3984 0.917212 -4.36517e-08 0.3984 0.917212 -8.0287e-08 0.3984 0.94964 -5.83648e-08 0.313344 0.94964 -4.29153e-08 0.313344 0.94964 -4.46037e-08 0.313344 0.974199 -3.15287e-08 0.225691 0.974199 -3.09104e-08 0.225691 0.974199 -3.12227e-08 0.225691 0.990686 -1.90218e-08 0.136163 0.990686 -1.77164e-08 0.136163 0.990686 -1.78415e-08 0.136163 0.998964 -6.35858e-09 0.0455164 0.998964 -5.96876e-09 0.0455164 0.998964 -6.69058e-09 0.0455164 0.998964 5.96875e-09 -0.0455164 0.998964 5.9222e-09 -0.0455164 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -0.872633 0 0.488377 -0.871509 0.00157532 0.490377 -0.832868 -0.00148413 0.55347 -0.788252 -0.0027724 0.615346 -0.739052 -0.00387672 0.673638 -0.68555 -0.0047901 0.72801 -0.62806 -0.00551299 0.778146 -0.566917 -0.00604571 0.823753 -0.502476 -0.00638733 0.864567 -0.435112 -0.00653909 0.900353 -0.390285 0.00357177 0.920687 -0.370423 -0.00017169 0.928863 -0.350418 0.000202186 0.936594 -0.329278 0.00452149 0.944222 -0.310709 0.00220781 0.950503 -0.293412 -8.17567e-05 0.955986 -0.275889 8.99923e-05 0.96119 -0.257632 0.00231585 0.966241 -0.241333 0.0031181 0.970437 -0.22959 0.00117706 0.973287 -0.218826 0.00024007 0.975764 -0.209104 -4.05749e-05 0.977894 -0.200562 6.60695e-05 0.979681 -0.193022 0.000366479 0.981194 -0.184838 0.000823402 0.982769 -0.16424 0.00121591 0.98642 -0.157641 -7.07444e-08 0.987496 -0.148084 -0.000329864 0.988975 -0.0874268 0.000334708 0.996171 -0.068602 -0.000318948 0.997644 0.0113172 -0.000307533 0.999936 0.0911639 -0.000295689 0.995836 0.170427 -0.000283281 0.98537 0.248603 -0.000270476 0.968605 0.325187 -0.00025715 0.94565 0.399696 -0.000243378 0.916648 0.471645 -0.000229094 0.881788 0.540583 -0.000214317 0.84129 0.606064 -0.000199014 0.795416 0.667671 -0.00018336 0.744456 0.725011 -0.000167206 0.688738 0.777716 -0.000150565 0.628615 0.825447 -0.000133377 0.564479 0.867906 -0.000115767 0.496728 0.904814 -9.76277e-05 0.425806 0.935938 -7.89684e-05 0.352165 0.961081 -5.9978e-05 0.276265 0.980081 -4.05275e-05 0.1986 0.992813 -2.06272e-05 0.119673 0.999201 -5.24245e-09 0.0399778 0.999201 5.24587e-09 -0.0399778 0.999182 2.05777e-05 -0.0404348 0.992813 -2.05959e-05 -0.119673 0.992647 4.09225e-05 -0.121047 0.980081 -4.04755e-05 -0.1986 0.961081 -5.99056e-05 -0.276265 0.935938 -7.88762e-05 -0.352165 0.904814 -9.75162e-05 -0.425806 0.867906 -0.000115652 -0.496728 0.825447 -0.000133245 -0.564479 0.777716 -0.000150419 -0.628615 0.725011 -0.000167046 -0.688738 0.667671 -0.000183186 -0.744456 0.606064 -0.000198828 -0.795416 0.540583 -0.000214121 -0.84129 0.471644 -0.000228845 -0.881789 0.399698 -0.000243147 -0.916647 0.325186 -0.000256829 -0.94565 0.248603 -0.000270194 -0.968605 0.170428 -0.00028304 -0.98537 0.0911626 -0.000295353 -0.995836 0.0113133 -0.000307222 -0.999936 -0.0685994 -0.000318922 -0.997644 -0.148084 -0.000329519 -0.988975 -0.157636 7.07445e-08 -0.987497 -0.164214 0.00121225 -0.986424 -0.167634 0.00189942 -0.985847 -0.184755 0.00082939 -0.982784 -0.192994 0.000369053 -0.9812 -0.200566 6.67821e-05 -0.97968 -0.209139 -4.0341e-05 -0.977886 -0.218774 0.000236933 -0.975776 -0.229565 0.00117373 -0.973293 -0.241314 0.00311279 -0.970442 -0.257594 0.00232309 -0.96625 -0.275826 9.64387e-05 -0.961208 -0.293351 -8.39863e-05 -0.956005 -0.31069 0.00220343 -0.950509 -0.329247 0.0045341 -0.944233 -0.350327 0.000216025 -0.936627 -0.370354 -0.000177207 -0.928891 -0.390256 0.00356028 -0.920699 -0.43511 -0.00654074 -0.900354 -0.502475 -0.00638767 -0.864568 -0.566917 -0.00604571 -0.823753 -0.62806 -0.00551299 -0.778146 -0.68555 -0.0047901 -0.72801 -0.739052 -0.00387672 -0.673638 -0.788252 -0.0027724 -0.615346 -0.832868 -0.00148413 -0.55347 -0.872633 0 -0.488377 -0.871509 0.00157532 -0.490377 -0.829027 0.00296667 0.559201 -0.78113 0.00416256 0.624355 -0.72812 0.00516914 0.685431 -0.670348 0.00598631 0.742023 -0.608193 0.00661324 0.793762 -0.542061 0.00705115 0.84031 -0.472386 0.00729971 0.881362 -0.399623 0.00735735 0.91665 -0.324245 0.00634203 0.945952 -0.24675 0.00456483 0.969069 -0.167637 0.00189838 0.985847 -0.00664341 0.000323521 0.999978 0.074182 0.000311854 0.997245 0.154524 0.000299693 0.987989 0.233854 0.000287145 0.972272 0.311654 0.000274005 0.950196 0.387417 0.000260489 0.921904 0.460645 0.000246393 0.887584 0.530862 0.000231914 0.847458 0.597607 0.000216959 0.801789 0.660439 0.000201557 0.750879 0.718956 0.00018557 0.695056 0.772771 0.000169129 0.634685 0.821529 0.000152203 0.570167 0.864917 0.000134902 0.501916 0.902646 0.000117046 0.430384 0.934471 9.87658e-05 0.356039 0.960185 8.006e-05 0.279363 0.97962 6.07221e-05 0.200858 0.992647 4.08907e-05 0.121047 0.999182 2.05671e-05 0.0404348 0.97962 6.07747e-05 -0.200858 0.960185 8.0133e-05 -0.279363 0.934471 9.88592e-05 -0.356039 0.902646 0.000117146 -0.430383 0.864917 0.000135019 -0.501916 0.821529 0.000152336 -0.570167 0.772771 0.000169275 -0.634685 0.718956 0.000185732 -0.695056 0.660439 0.000201732 -0.750879 0.597607 0.000217145 -0.801789 0.530861 0.000232153 -0.847459 0.460645 0.0002466 -0.887584 0.387417 0.000260812 -0.921904 0.311654 0.000274281 -0.950196 0.233855 0.000287382 -0.972272 0.154523 0.000300026 -0.987989 0.0741807 0.00031219 -0.997245 -0.00663957 0.000323561 -0.999978 -0.0874319 0.000335 -0.99617 -0.246751 0.00456453 -0.969068 -0.324246 0.00634302 -0.945952 -0.399619 0.00735604 -0.916652 -0.472386 0.00729889 -0.881362 -0.542061 0.00705114 -0.84031 -0.608193 0.00661324 -0.793762 -0.670348 0.00598631 -0.742023 -0.72812 0.00516914 -0.685431 -0.78113 0.00416257 -0.624355 -0.829027 0.00296667 -0.559201 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0.920312 0.00204237 0.39118 0.899753 0.168939 0.402373 0.860752 0.333062 0.384936 0.79701 0.48758 0.356427 0.710339 0.628096 0.317668 0.603214 0.750575 0.269759 0.478733 0.851457 0.214093 0.34045 0.927854 0.152252 0.214986 0.971873 0.0961438 0.138698 0.989148 0.0484622 0.0396872 0.998694 0.0321597 0.0572237 0.998052 0.0248718 0.0590721 0.997904 0.0264173 0.187906 0.977538 0.0954501 0.323745 0.927854 0.185139 0.441809 0.851458 0.282532 0.538399 0.75058 0.383088 0.610936 0.628094 0.481929 0.657897 0.487578 0.573968 0.678988 0.333058 0.654253 0.675062 0.168949 0.718155 0.662844 0.00123762 0.748757 0.682513 0.0835543 0.726082 0.69898 0.00132291 0.71514 0.71759 0.0834889 0.691444 0.742706 0.168937 0.647956 0.740301 0.333053 0.583978 0.711378 0.487581 0.506168 0.655556 0.628093 0.41922 0.573609 0.75058 0.328028 0.467557 0.851459 0.237504 0.340449 0.927855 0.152252 0.332503 0.927855 0.168901 0.332505 0.927854 0.168902 0.926074 0.0491776 0.374124 0.91488 0.146838 0.376076 0.900316 0.243341 0.360854 0.868281 0.336216 0.364755 0.83889 0.427785 0.336547 0.801662 0.48767 0.345711 0.793672 0.51388 0.325594 0.744102 0.595585 0.302639 0.687544 0.671539 0.276259 0.619616 0.740109 0.261372 0.604118 0.750601 0.267656 0.553944 0.803289 0.218801 0.477697 0.851425 0.216522 0.472677 0.856976 0.205348 0.398032 0.903905 0.156611 0.334839 0.927636 0.165453 0.312261 0.941417 0.127388 0.224479 0.969999 0.0933301 0.907172 0.168987 0.385334 0.868526 0.33315 0.366979 0.712533 0.628154 0.312601 0.166573 0.982532 0.0829734 0.192397 0.977538 0.0860406 0.0560174 0.998041 0.0278619 0.0575239 0.997916 0.0292205 0.182954 0.977538 0.104625 0.314191 0.927854 0.200922 0.427296 0.851459 0.304033 0.518797 0.750578 0.409247 0.586352 0.628097 0.511552 0.628706 0.487582 0.6058 0.645807 0.333051 0.68703 0.63872 0.168944 0.750663 0.625089 0.00117028 0.780553 0.645764 0.083646 0.758941 0.0546758 0.998029 0.0308056 0.0558229 0.99793 0.0319232 0.177557 0.977538 0.113545 0.303875 0.927853 0.216216 0.411735 0.851461 0.324791 0.497926 0.750575 0.434404 0.560341 0.628092 0.539925 0.59798 0.487578 0.636151 0.611037 0.333052 0.718129 0.600817 0.168947 0.78133 0.585813 0.0010905 0.810446 0.607439 0.0837236 0.789942 0.053199 0.998015 0.0336969 0.0539779 0.997945 0.0345181 0.171723 0.977538 0.122187 0.292809 0.927853 0.230978 0.395169 0.85146 0.344758 0.47583 0.750579 0.458495 0.53295 0.628098 0.566972 0.565784 0.487584 0.664944 0.574779 0.333049 0.747468 0.561442 0.168945 0.810087 0.545108 0.00101058 0.838365 0.567627 0.0837995 0.81901 0.0515899 0.998 0.0365328 0.051995 0.997962 0.036996 0.165472 0.977538 0.13053 0.281026 0.927854 0.245176 0.377638 0.851459 0.363879 0.452578 0.750575 0.481468 0.504262 0.628094 0.592636 0.532209 0.487578 0.692113 0.537108 0.333058 0.774976 0.520692 0.168945 0.836861 0.503077 0.000937444 0.864241 0.526425 0.083868 0.846075 0.049849 0.997983 0.0393075 0.0498826 0.99798 0.0393493 0.158814 0.977538 0.138554 0.268558 0.927854 0.258773 0.35918 0.851459 0.38211 0.428209 0.750579 0.503258 0.474332 0.628101 0.616845 0.497334 0.487575 0.717586 0.498123 0.333057 0.800591 0.478669 0.168951 0.861587 0.459819 0.000870737 0.888012 0.483936 0.0839479 0.871068 0.0480479 0.997965 0.0419184 0.151768 0.977538 0.146238 0.255431 0.927854 0.271737 0.339846 0.851457 0.399406 0.402802 0.750574 0.523822 0.443249 0.628097 0.63955 0.461237 0.487575 0.741304 0.457924 0.333051 0.824247 0.435475 0.168948 0.884205 0.415441 0.000790893 0.90962 0.440265 0.0840136 0.893929 0.0452055 0.998013 0.0439033 0.0440153 0.997933 0.0468252 0.0427334 0.998026 0.0460118 0.041787 0.997919 0.0491102 0.128473 0.977538 0.167073 0.21244 0.927854 0.306522 0.277045 0.851458 0.44527 0.32091 0.750577 0.577625 0.343802 0.628094 0.698068 0.346544 0.487575 0.80136 0.331004 0.333051 0.882901 0.299934 0.168941 0.938882 0.27668 0.000596954 0.960962 0.303227 0.0842174 0.94919 0.323759 0.000661599 0.946139 0.349798 0.0841547 0.933038 0.391215 0.168947 0.904659 0.4166 0.333053 0.845884 0.424009 0.487583 0.763204 0.411077 0.6281 0.660686 0.376403 0.750576 0.543099 0.319678 0.851458 0.415724 0.241683 0.927853 0.284038 0.144349 0.977539 0.153564 0.136578 0.977539 0.160514 0.136579 0.977538 0.160515 0.0401791 0.998039 0.0480005 0.0394251 0.997906 0.0512702 0.120055 0.977538 0.173223 0.197021 0.927854 0.316653 0.254687 0.851457 0.458427 0.291951 0.750577 0.592789 0.308858 0.628096 0.714215 0.30649 0.487577 0.817516 0.286933 0.333054 0.89819 0.253137 0.168946 0.952564 0.228925 0.000520283 0.973444 0.255915 0.0842778 0.963019 0.037549 0.99805 0.0498684 0.036939 0.997895 0.0532983 0.11134 0.977538 0.178947 0.181119 0.927855 0.326008 0.231704 0.851458 0.470459 0.262279 0.750574 0.606505 0.273159 0.628095 0.728616 0.265684 0.487582 0.831671 0.242166 0.333047 0.911282 0.205717 0.168949 0.963917 0.180616 0.000468474 0.983554 0.207973 0.0843346 0.974492 0.0348468 0.998059 0.0516132 0.0343365 0.997886 0.0551858 0.102355 0.977538 0.184235 0.164776 0.927854 0.334568 0.20815 0.85146 0.481342 0.231961 0.750579 0.618728 0.236794 0.628098 0.74123 0.22423 0.487577 0.843795 0.196802 0.333054 0.922141 0.157797 0.168946 0.972912 0.131861 0.000415039 0.991268 0.159527 0.0844114 0.983578 0.0320764 0.998067 0.0532318 0.0316255 0.997878 0.0569245 0.0931187 0.977538 0.189071 0.148029 0.927854 0.342309 0.184098 0.851454 0.491054 0.201084 0.750575 0.629446 0.199845 0.628098 0.752034 0.182228 0.487577 0.853851 0.150958 0.333055 0.930745 0.109489 0.168946 0.979525 0.0827876 0.000355097 0.996567 0.110689 0.0844687 0.990259 0.0292425 0.998073 0.0547221 0.0288153 0.997871 0.0585081 0.0836536 0.977539 0.193444 0.130917 0.927854 0.349211 0.159586 0.851459 0.49955 0.169707 0.750576 0.638619 0.162411 0.628096 0.760998 0.139779 0.487576 0.861818 0.104744 0.333053 0.937072 0.0609156 0.168947 0.983741 0.033515 0.000291332 0.999438 0.0615829 0.0845165 0.994517 0.026351 0.998078 0.0560838 0.0259172 0.997866 0.0599315 0.0739827 0.977538 0.197347 0.11349 0.927853 0.355258 0.134685 0.85146 0.506829 0.137918 0.750576 0.646231 0.124579 0.628096 0.768099 0.0969891 0.487574 0.867678 0.0582758 0.333046 0.941108 0.0121902 0.168946 0.98555 -0.0158433 0.000244489 0.999874 0.0123236 0.0845807 0.99634 0.0234046 0.998082 0.0573132 0.0229393 0.997863 0.0611874 0.0641385 0.977536 0.200772 0.095783 0.927853 0.360436 0.109456 0.85146 0.51287 0.10579 0.750576 0.652261 0.0864404 0.628098 0.773318 0.0539577 0.487578 0.87141 0.0116616 0.333054 0.942836 -0.0365628 0.168947 0.984947 -0.0651627 0.000180643 0.997875 -0.036963 0.0846315 0.995727 0.0204096 0.998084 0.05841 0.0198925 0.99786 0.0622803 0.0541278 0.977538 0.203688 0.0778396 0.927854 0.364729 0.0839589 0.85146 0.517655 0.0734044 0.750575 0.656696 0.0480905 0.628098 0.776646 0.0107996 0.487576 0.873014 -0.0349787 0.333049 0.942261 -0.0852294 0.168948 0.981933 -0.114321 0.000129717 0.993444 -0.0861619 0.0846753 0.992676 0.0173737 0.998084 0.0593851 0.0167938 0.99786 0.0631958 0.0439896 0.977538 0.206118 0.0597084 0.927854 0.368134 0.0582554 0.851459 0.521176 0.0408378 0.750577 0.65952 0.00962428 0.628099 0.778074 -0.0323903 0.487578 0.872478 -0.0815354 0.333056 0.939375 -0.133684 0.168948 0.976517 -0.1632 8.22226e-05 0.986593 -0.135146 0.0847258 0.987197 0.0142888 0.998084 0.0602046 0.0136399 0.997862 0.0639119 0.0337418 0.977538 0.208039 0.0414286 0.927854 0.370635 0.0324113 0.851459 0.523419 0.00817397 0.750575 0.660735 -0.0288669 0.628096 0.7776 -0.0754949 0.487575 0.869811 -0.127889 0.333047 0.934197 -0.181812 0.168944 0.968712 -0.211686 2.64527e-05 0.977338 -0.183799 0.0847789 0.979301 0.0111761 0.998082 0.0608847 0.0104551 0.997865 0.0644615 0.0234122 0.977538 0.209453 0.0230487 0.927854 0.37223 0.00648592 0.85146 0.524379 -0.0245142 0.750578 0.660328 -0.0672873 0.628099 0.775219 -0.118419 0.487583 0.865008 -0.173934 0.333052 0.926727 -0.229499 0.16895 0.958533 -0.259651 -6.36562e-06 0.965703 -0.232007 0.0848219 0.969009 0.00803164 0.998079 0.0614336 0.00724627 0.99787 0.064828 0.0130255 0.977538 0.210355 0.00461246 0.927854 0.372915 -0.0194545 0.85146 0.524058 -0.0571391 0.750575 0.65831 -0.105541 0.628096 0.770945 -0.161051 0.48758 0.858096 -0.21955 0.333048 0.916993 -0.276617 0.168938 0.946014 -0.306988 -7.05878e-05 0.951713 -0.279638 0.0848699 0.956347 0.00486221 0.998074 0.0618429 0.00402559 0.997877 0.0650106 0.00260672 0.977538 0.210742 -0.0138343 0.927854 0.372688 -0.0453475 0.851458 0.522459 -0.0896239 0.750575 0.654679 -0.143537 0.628095 0.764783 -0.203293 0.487576 0.849083 -0.264631 0.333057 0.90501 -0.323065 0.168952 0.931174 -0.353572 -0.000108993 0.935408 -0.326593 0.084897 0.941344 0.00167249 0.998068 0.0621108 0.000804113 0.997884 0.0650091 -0.00781806 0.977538 0.210613 -0.0322491 0.927854 0.371547 -0.0711295 0.851458 0.519577 -0.121891 0.750577 0.649443 -0.181186 0.628099 0.756745 -0.245032 0.487579 0.837989 -0.309064 0.333048 0.890819 -0.36872 0.168952 0.914057 -0.399301 -0.000138912 0.91682 -0.372745 0.0849608 0.924036 -0.00153238 0.99806 0.0622373 -0.0024064 0.997894 0.0648249 -0.0182244 0.977538 0.209968 -0.0505841 0.927854 0.369498 -0.0967373 0.851458 0.515423 -0.153861 0.750578 0.64262 -0.218385 0.628099 0.74686 -0.286174 0.487579 0.824846 -0.352738 0.333044 0.874447 -0.41347 0.168941 0.894708 -0.444051 -0.000199458 0.896002 -0.417982 0.084982 0.904472 -0.00474772 0.998051 0.0622214 -0.00559496 0.997905 0.0644596 -0.028586 0.977539 0.208808 -0.0687947 0.927854 0.366543 -0.122109 0.851458 0.510008 -0.185451 0.750574 0.634229 -0.255055 0.628097 0.735148 -0.326619 0.487583 0.809681 -0.395551 0.333064 0.855925 -0.457212 0.168953 0.873162 -0.487724 -0.000235387 0.872998 -0.462201 0.0850224 0.88269 -0.00796865 0.99804 0.0620631 -0.00874959 0.997917 0.0639137 -0.0388775 0.977538 0.207142 -0.0868376 0.927854 0.362693 -0.14718 0.851458 0.503346 -0.21659 0.750576 0.62428 -0.291098 0.628098 0.721634 -0.366257 0.487573 0.792546 -0.437397 0.333051 0.835321 -0.499837 0.168959 0.84948 -0.530204 -0.000240337 0.84787 -0.505289 0.0850656 0.858747 -0.0111896 0.998028 0.06176 -0.0118605 0.997931 0.0631929 -0.0490736 0.977539 0.204964 -0.104668 0.927854 0.357955 -0.171894 0.851459 0.495449 -0.247198 0.750578 0.612802 -0.326427 0.628101 0.706353 -0.405003 0.487579 0.773459 -0.478171 0.333045 0.812671 -0.541233 0.168936 0.823728 -0.571396 -0.000270468 0.820675 -0.547132 0.085124 0.832707 -0.0144036 0.998014 0.0613167 -0.0149193 0.997945 0.0623134 -0.0591497 0.977538 0.202287 -0.122243 0.927854 0.352341 -0.196183 0.851458 0.486345 -0.277198 0.75057 0.599838 -0.360963 0.62809 0.689354 -0.442761 0.48758 0.752482 -0.517774 0.33306 0.788023 -0.581307 0.168941 0.795953 -0.611199 -0.000309512 0.791477 -0.587642 0.0851639 0.804627 -0.0176179 0.997998 0.0607381 -0.017903 0.997963 0.0612386 -0.069082 0.977536 0.199124 -0.139517 0.927851 0.345872 -0.219997 0.851457 0.47605 -0.306524 0.750579 0.585384 -0.394611 0.628098 0.670653 -0.479432 0.487575 0.729669 -0.556112 0.333059 0.761453 -0.619956 0.168942 0.766233 -0.649506 -0.000382786 0.760357 -0.626713 0.0851547 0.774584 -0.0208066 0.997982 0.0599896 -0.0208209 0.99798 0.0600125 -0.0788438 0.977538 0.195456 -0.156451 0.927854 0.338541 -0.243268 0.851459 0.464583 -0.335099 0.750576 0.569513 -0.427297 0.628095 0.650319 -0.514926 0.487582 0.705064 -0.59309 0.333051 0.733022 -0.657096 0.168955 0.734629 -0.68623 -0.000358737 0.727385 -0.664257 0.0851908 0.742634 -0.0238585 0.997964 0.0591456 -0.0884136 0.977538 0.191316 -0.173002 0.927854 0.33039 -0.265945 0.85146 0.451983 -0.362855 0.750577 0.552242 -0.458933 0.628095 0.628392 -0.549165 0.487587 0.678731 -0.628613 0.333057 0.702794 -0.69262 0.168934 0.701241 -0.72129 -0.000381938 0.692633 -0.70016 0.0852654 0.708876 -0.0266617 0.998013 0.0570865 -0.0298162 0.997932 0.0569414 -0.0294637 0.998027 0.0554426 -0.0327052 0.997918 0.0555836 -0.115732 0.977538 0.176138 -0.219956 0.927855 0.301173 -0.329859 0.851461 0.407684 -0.440527 0.750581 0.492509 -0.54681 0.628101 0.553614 -0.643499 0.487575 0.590067 -0.725631 0.333047 0.602112 -0.7887 0.168951 0.591108 -0.81559 -0.000505853 0.578629 -0.797288 0.0852782 0.597545 -0.786047 -0.000479402 0.618167 -0.766765 0.0852566 0.636241 -0.726448 0.168957 0.666128 -0.662603 0.333049 0.670847 -0.582061 0.487577 0.650748 -0.489446 0.628094 0.60493 -0.389724 0.750572 0.533627 -0.287972 0.85146 0.438279 -0.189129 0.927854 0.321431 -0.097766 0.977539 0.186708 -0.10688 0.977539 0.181645 -0.10688 0.977538 0.181646 -0.03218 0.998039 0.0536833 -0.0355202 0.997906 0.0540601 -0.124302 0.977538 0.1702 -0.234582 0.927853 0.28993 -0.349621 0.851457 0.390879 -0.464348 0.750574 0.470127 -0.57352 0.628097 0.525898 -0.671892 0.487578 0.557521 -0.75452 0.333047 0.56549 -0.816969 0.168956 0.551376 -0.843152 -0.000503876 0.537676 -0.825863 0.0853222 0.557378 -0.0348068 0.99805 0.0518111 -0.0382507 0.997895 0.0523745 -0.132567 0.977538 0.163844 -0.248633 0.927855 0.277971 -0.368519 0.851462 0.373105 -0.487027 0.750575 0.446588 -0.598827 0.628094 0.496894 -0.698644 0.487577 0.523608 -0.781558 0.333061 0.527482 -0.843238 0.16894 0.510302 -0.868655 -0.000534146 0.495418 -0.852415 0.0853342 0.515856 -0.037341 0.998059 0.0498308 -0.0408865 0.997885 0.0505332 -0.140507 0.977538 0.157088 -0.262078 0.927852 0.265341 -0.386522 0.851459 0.354426 -0.508517 0.750578 0.421952 -0.622663 0.628106 0.466663 -0.723681 0.487576 0.488421 -0.806691 0.333057 0.488183 -0.867442 0.168946 0.467976 -0.892038 -0.000531174 0.451959 -0.876882 0.085346 0.473069 -0.0397785 0.998067 0.0477449 -0.0434175 0.997877 0.048541 -0.148103 0.977539 0.149945 -0.274876 0.927854 0.252051 -0.403576 0.851461 0.334875 -0.528761 0.750576 0.396292 -0.644987 0.628093 0.435305 -0.746944 0.48759 0.452029 -0.829847 0.33305 0.447695 -0.889524 0.168943 0.424505 -0.913257 -0.000524244 0.407384 -0.899199 0.0854131 0.429122 -0.0421161 0.998074 0.0455571 -0.0458339 0.997871 0.0464044 -0.155338 0.977538 0.14244 -0.287007 0.927853 0.238151 -0.419641 0.85146 0.314511 -0.547717 0.750573 0.369657 -0.665725 0.628092 0.402878 -0.768387 0.487586 0.414537 -0.850971 0.333053 0.406109 -0.90943 0.168933 0.379999 -0.932246 -0.00056166 0.361826 -0.91932 0.0854074 0.384131 -0.0443499 0.998079 0.0432714 -0.0481273 0.997866 0.0441311 -0.162193 0.977538 0.134584 -0.298435 0.927852 0.223668 -0.43468 0.851463 0.293366 -0.565321 0.750578 0.342117 -0.684834 0.628091 0.369464 -0.787951 0.487579 0.376032 -0.870013 0.333059 0.363524 -0.927108 0.168949 0.334556 -0.948963 -0.000571279 0.315387 -0.937192 0.0854088 0.338196 -0.0464777 0.998082 0.040892 -0.0502896 0.997863 0.0417292 -0.168651 0.977538 0.126398 -0.309129 0.927853 0.208633 -0.44866 0.851459 0.271518 -0.581553 0.750575 0.313741 -0.702263 0.628099 0.335138 -0.80558 0.487585 0.336603 -0.886926 0.333054 0.320058 -0.942519 0.168944 0.288299 -0.96337 -0.00058486 0.268175 -0.952769 0.0854279 0.291434 -0.0484956 0.998084 0.0384213 -0.0523116 0.997861 0.0392054 -0.174693 0.977539 0.117901 -0.319068 0.927853 0.193091 -0.461537 0.851462 0.248992 -0.596351 0.750581 0.284594 -0.717975 0.6281 0.300003 -0.821248 0.487569 0.296358 -0.90167 0.33305 0.275806 -0.955624 0.168938 0.241335 -0.975429 -0.00059469 0.220315 -0.966016 0.0854266 0.243959 -0.0504018 0.998085 0.0358664 -0.0541876 0.997861 0.036572 -0.180313 0.977538 0.10912 -0.328224 0.927854 0.177075 -0.473295 0.851453 0.225874 -0.609709 0.750566 0.254765 -0.731935 0.628101 0.264123 -0.83489 0.487588 0.255373 -0.914201 0.333071 0.23087 -0.966387 0.168953 0.193781 -0.985112 -0.000602612 0.171914 -0.976897 0.0854396 0.195888 -0.052191 0.998084 0.0332273 -0.0559093 0.997862 0.0338343 -0.185486 0.977538 0.100069 -0.336582 0.927853 0.160626 -0.483877 0.851461 0.202182 -0.621552 0.750577 0.224294 -0.744107 0.62809 0.227611 -0.846509 0.487567 0.213779 -0.924503 0.333057 0.185384 -0.974789 0.168946 0.14575 -0.992394 -0.000590128 0.123098 -0.985389 0.0854476 0.147335 -0.0538632 0.998082 0.0305113 -0.0574708 0.997866 0.0310053 -0.190208 0.977538 0.0907717 -0.344111 0.927854 0.143784 -0.493287 0.851459 0.178008 -0.631884 0.750577 0.193279 -0.754443 0.628105 0.190526 -0.85604 0.487577 0.171655 -0.93254 0.333057 0.139435 -0.980805 0.16894 0.0973671 -0.99726 -0.0006027 0.073974 -0.991467 0.0854702 0.0984255 -0.0554121 0.998079 0.0277203 -0.0588675 0.99787 0.0280931 -0.194465 0.977538 0.0812552 -0.350799 0.927855 0.126588 -0.501482 0.851462 0.153392 -0.640669 0.750576 0.161796 -0.762947 0.628098 0.152986 -0.863477 0.487585 0.129108 -0.938295 0.333057 0.0931415 -0.984417 0.16896 0.048739 -0.999695 -0.000588272 0.02468 -0.995123 0.0854582 0.0492691 -0.0568386 0.998074 0.0248604 -0.0600955 0.997877 0.0251102 -0.198245 0.977538 0.0715391 -0.356635 0.927853 0.109089 -0.508463 0.851456 0.128408 -0.647885 0.750577 0.129918 -0.769591 0.628084 0.11507 -0.868805 0.487586 0.0862468 -0.941757 0.333046 0.0466377 -0.98563 0.168921 0 -0.999695 -0.000588254 -0.02468 -0.99634 0.0854775 0 -0.0581388 0.998067 0.0219348 -0.061148 0.997885 0.022066 -0.20154 0.977539 0.0616463 -0.361586 0.927856 0.0913145 -0.514187 0.851458 0.103107 -0.653513 0.750582 0.0977107 -0.774332 0.628093 0.0768685 -0.872007 0.487586 0.0431787 -0.942905 0.333063 0 -0.984424 0.168921 -0.0487393 -0.99726 -0.000602665 -0.073974 -0.995123 0.0854583 -0.0492691 -0.0593086 0.99806 0.0189483 -0.0620247 0.997894 0.0189723 -0.204346 0.977538 0.0516068 -0.365667 0.927853 0.0733251 -0.518658 0.851458 0.0775494 -0.65755 0.750578 0.0652766 -0.777189 0.62809 0.0384852 -0.873078 0.48758 0 -0.941751 0.333063 -0.0466374 -0.980802 0.16896 -0.0973667 -0.992394 -0.000590093 -0.123098 -0.991467 0.0854702 -0.0984255 -0.0603481 0.998051 0.0159053 -0.062724 0.997905 0.0158403 -0.206644 0.977538 0.0414363 -0.368842 0.927854 0.0551481 -0.521854 0.85146 0.0518076 -0.65998 0.750572 0.0326769 -0.77813 0.628103 0 -0.87201 0.48758 -0.0431788 -0.938299 0.333046 -0.093142 -0.97479 0.16894 -0.14575 -0.985112 -0.000602612 -0.171914 -0.985389 0.0854476 -0.147335 -0.0612555 0.99804 0.0128104 -0.0632433 0.997918 0.0126813 -0.208438 0.977539 0.0311657 -0.371122 0.927853 0.0368435 -0.523778 0.85146 0.0259353 -0.660795 0.750567 0 -0.777178 0.628103 -0.0384847 -0.868805 0.487586 -0.0862468 -0.93254 0.333057 -0.139435 -0.966388 0.168946 -0.193781 -0.975429 -0.00059469 -0.220315 -0.976897 0.0854396 -0.195888 -0.062028 0.998028 0.00966791 -0.0635801 0.997931 0.00950677 -0.209727 0.977538 0.02082 -0.372486 0.927854 0.0184436 -0.524418 0.851461 0 -0.659986 0.750567 -0.0326772 -0.774335 0.62809 -0.0768688 -0.863476 0.487586 -0.129108 -0.924503 0.333058 -0.185384 -0.955622 0.168953 -0.241335 -0.96337 -0.00058486 -0.268175 -0.966016 0.0854266 -0.243959 -0.0626613 0.998014 0.00648181 -0.0637342 0.997947 0.00632725 -0.210502 0.977538 0.0104228 -0.372941 0.927855 0 -0.523776 0.851461 -0.0259352 -0.657556 0.750572 -0.0652772 -0.769583 0.628093 -0.115069 -0.856035 0.487585 -0.171655 -0.914205 0.333057 -0.230871 -0.94252 0.168939 -0.288299 -0.948963 -0.000570359 -0.315387 -0.952769 0.0854279 -0.291434 -0.0631541 0.997998 0.00325795 -0.063711 0.997963 0.00315454 -0.210759 0.977538 0 -0.372485 0.927855 -0.0184436 -0.521854 0.85146 -0.0518076 -0.653518 0.750578 -0.0977115 -0.762958 0.628084 -0.152988 -0.846503 0.487577 -0.213778 -0.901663 0.333071 -0.275804 -0.927108 0.168944 -0.334557 -0.932246 -0.000558618 -0.361826 -0.937192 0.0854106 -0.338196 -0.0635086 0.997981 0 -0.0635086 0.997981 0 -0.210501 0.977538 -0.0104227 -0.371118 0.927854 -0.0368431 -0.518654 0.85146 -0.0775488 -0.64788 0.750582 -0.129917 -0.754448 0.628098 -0.190527 -0.834902 0.487567 -0.255377 -0.886927 0.33305 -0.320058 -0.909428 0.168948 -0.379997 -0.913257 -0.000526508 -0.407384 -0.91932 0.0854086 -0.38413 -0.063711 0.997963 -0.00315454 -0.209729 0.977538 -0.0208202 -0.368847 0.927853 -0.0551487 -0.514188 0.851458 -0.103107 -0.640668 0.750577 -0.161795 -0.744096 0.628105 -0.227608 -0.821238 0.487588 -0.296355 -0.870013 0.333057 -0.363524 -0.889525 0.168937 -0.424507 -0.892038 -0.000526223 -0.451959 -0.899198 0.0854141 -0.429123 -0.0626613 0.998014 -0.00648181 -0.0635801 0.997931 -0.00950677 -0.062028 0.998028 -0.00966791 -0.0632433 0.997918 -0.0126813 -0.204342 0.977538 -0.0516058 -0.356627 0.927856 -0.109087 -0.493282 0.851462 -0.178006 -0.6097 0.750575 -0.254761 -0.702264 0.628099 -0.335138 -0.768391 0.487578 -0.414539 -0.806691 0.333055 -0.488184 -0.816972 0.168937 -0.551378 -0.815589 -0.000499772 -0.578631 -0.825862 0.0853343 -0.557378 -0.843152 -0.000496236 -0.537675 -0.852414 0.0853355 -0.515856 -0.867442 0.168944 -0.467977 -0.85097 0.333057 -0.406107 -0.805577 0.487586 -0.336608 -0.731933 0.628102 -0.264126 -0.631884 0.750577 -0.193279 -0.508463 0.851456 -0.128407 -0.365668 0.927853 -0.0733238 -0.208438 0.977539 -0.0311665 -0.206641 0.977539 -0.0414358 -0.206644 0.977538 -0.0414355 -0.0612555 0.99804 -0.0128104 -0.062724 0.997905 -0.0158403 -0.201544 0.977538 -0.0616475 -0.350805 0.927853 -0.12659 -0.48388 0.851459 -0.202184 -0.596365 0.750567 -0.2846 -0.68483 0.628098 -0.369461 -0.746947 0.487585 -0.45203 -0.781559 0.333059 -0.527482 -0.7887 0.168951 -0.591107 -0.786047 -0.00047926 -0.618166 -0.797288 0.0852767 -0.597544 -0.0603481 0.998051 -0.0159053 -0.0620247 0.997894 -0.0189723 -0.198244 0.977539 -0.0715389 -0.344109 0.927855 -0.143783 -0.473285 0.851461 -0.225869 -0.581547 0.750581 -0.313739 -0.665724 0.628093 -0.402878 -0.723674 0.487591 -0.488417 -0.754515 0.333063 -0.565487 -0.758505 0.168953 -0.629385 -0.754589 -0.000457901 -0.656198 -0.766766 0.0852554 -0.63624 -0.0593086 0.99806 -0.0189483 -0.061148 0.997885 -0.022066 -0.194464 0.977538 -0.0812548 -0.33658 0.927854 -0.160625 -0.461548 0.851454 -0.248998 -0.565326 0.750574 -0.342119 -0.644988 0.628092 -0.435305 -0.698645 0.487576 -0.523608 -0.725632 0.333046 -0.602112 -0.726446 0.168961 -0.66613 -0.72129 -0.000389577 -0.692633 -0.734359 0.0852546 -0.673386 -0.0581388 0.998067 -0.0219348 -0.0600955 0.997877 -0.0251102 -0.190208 0.977538 -0.0907719 -0.328226 0.927853 -0.177077 -0.448655 0.851462 -0.271515 -0.547713 0.750577 -0.369655 -0.62267 0.628094 -0.466669 -0.671891 0.48758 -0.55752 -0.694966 0.333051 -0.637259 -0.692619 0.168948 -0.701239 -0.68623 -0.000370194 -0.727384 -0.700162 0.085254 -0.708876 -0.0568387 0.998074 -0.0248605 -0.0588676 0.99787 -0.0280932 -0.185485 0.977539 -0.100069 -0.319067 0.927854 -0.19309 -0.434684 0.85146 -0.29337 -0.528763 0.750574 -0.396293 -0.598821 0.628104 -0.496889 -0.643497 0.48758 -0.590066 -0.6626 0.333059 -0.670845 -0.657097 0.168942 -0.73463 -0.649506 -0.000391273 -0.760357 -0.664257 0.0851826 -0.742635 -0.0554122 0.998079 -0.0277203 -0.0574708 0.997866 -0.0310053 -0.18031 0.977538 -0.109118 -0.309128 0.927853 -0.208633 -0.419638 0.851463 -0.314508 -0.508518 0.750576 -0.421953 -0.573521 0.628096 -0.525899 -0.613533 0.487575 -0.621166 -0.628614 0.333056 -0.702793 -0.619953 0.168959 -0.766232 -0.611199 -0.000309512 -0.791477 -0.626712 0.0851501 -0.774585 -0.0538631 0.998082 -0.0305113 -0.0559093 0.997862 -0.0338344 -0.174695 0.977538 -0.117903 -0.298432 0.927853 -0.223666 -0.403577 0.85146 -0.334876 -0.487027 0.750576 -0.446587 -0.546813 0.628095 -0.553617 -0.582058 0.48758 -0.650749 -0.593087 0.333068 -0.733017 -0.581307 0.168938 -0.795954 -0.571396 -0.000261556 -0.820675 -0.587641 0.085168 -0.804627 -0.0521911 0.998084 -0.0332273 -0.0541874 0.997861 -0.0365717 -0.168649 0.977538 -0.126397 -0.28701 0.927852 -0.238153 -0.386519 0.851462 -0.354424 -0.464346 0.750577 -0.470124 -0.518765 0.628096 -0.579982 -0.54917 0.487573 -0.678738 -0.556108 0.333066 -0.761453 -0.541233 0.168941 -0.823727 -0.530203 -0.000245571 -0.847871 -0.547133 0.0851289 -0.832706 -0.0504015 0.998085 -0.0358661 -0.0523115 0.997861 -0.0392054 -0.162193 0.977538 -0.134584 -0.27488 0.927853 -0.252053 -0.368523 0.851458 -0.373108 -0.440529 0.750576 -0.492514 -0.489447 0.628094 -0.604929 -0.514926 0.487584 -0.705062 -0.517775 0.333063 -0.788022 -0.49984 0.168937 -0.849483 -0.487724 -0.000254484 -0.872998 -0.505291 0.085052 -0.858747 -0.0484957 0.998084 -0.0384214 -0.0502897 0.997863 -0.0417292 -0.155339 0.977538 -0.14244 -0.262074 0.927854 -0.265337 -0.349616 0.851461 -0.390874 -0.41563 0.750579 -0.513696 -0.458933 0.628095 -0.628392 -0.479429 0.487582 -0.729665 -0.478166 0.333068 -0.812665 -0.457209 0.168967 -0.873161 -0.444052 -0.000193937 -0.896001 -0.462199 0.0850101 -0.882692 -0.0464778 0.998082 -0.040892 -0.0481275 0.997866 -0.0441314 -0.148104 0.977538 -0.149947 -0.248637 0.927852 -0.277976 -0.329864 0.851457 -0.40769 -0.389723 0.750572 -0.533627 -0.427297 0.628095 -0.650319 -0.442762 0.487575 -0.752484 -0.437401 0.33305 -0.835319 -0.413471 0.168949 -0.894706 -0.3993 -0.000144571 -0.91682 -0.417983 0.0849875 -0.904471 -0.0443503 0.998078 -0.0432717 -0.0458342 0.997871 -0.0464047 -0.140505 0.977539 -0.157086 -0.23458 0.927854 -0.289928 -0.309294 0.851461 -0.423499 -0.362858 0.750571 -0.552247 -0.394612 0.628095 -0.670655 -0.405005 0.48758 -0.773458 -0.395552 0.33306 -0.855926 -0.368721 0.168942 -0.914059 -0.353573 -0.000113591 -0.935407 -0.372746 0.0849506 -0.924037 -0.0421161 0.998074 -0.0455573 -0.0434175 0.997877 -0.048541 -0.132567 0.977538 -0.163844 -0.219957 0.927853 -0.301176 -0.28797 0.851462 -0.438275 -0.335098 0.750577 -0.569512 -0.36096 0.628098 -0.689348 -0.366254 0.487588 -0.792538 -0.352735 0.333072 -0.874438 -0.323063 0.168951 -0.931174 -0.306988 -6.01198e-05 -0.951713 -0.326592 0.0849016 -0.941345 -0.0397784 0.998067 -0.0477448 -0.0408862 0.997885 -0.0505327 -0.124302 0.977538 -0.1702 -0.204794 0.927853 -0.311685 -0.265945 0.851461 -0.451982 -0.306526 0.750576 -0.585387 -0.326433 0.62809 -0.70636 -0.326619 0.487576 -0.809685 -0.309064 0.333059 -0.890815 -0.276618 0.168941 -0.946013 -0.259653 -4.24374e-07 -0.965702 -0.27964 0.0848757 -0.956346 -0.0373407 0.998059 -0.0498302 -0.0382507 0.997895 -0.0523748 -0.115732 0.977538 -0.176138 -0.189129 0.927854 -0.32143 -0.243269 0.851459 -0.464584 -0.2772 0.750579 -0.599826 -0.2911 0.628095 -0.721635 -0.286175 0.487579 -0.824846 -0.264629 0.333063 -0.905008 -0.229499 0.168939 -0.958535 -0.211686 3.72035e-05 -0.977338 -0.232007 0.0848325 -0.969008 -0.034807 0.99805 -0.0518114 -0.0355204 0.997906 -0.0540605 -0.10688 0.977539 -0.181645 -0.173001 0.927854 -0.33039 -0.219994 0.851459 -0.476046 -0.247195 0.750586 -0.612794 -0.255054 0.628094 -0.73515 -0.245031 0.487583 -0.837987 -0.219547 0.333065 -0.916988 -0.181813 0.168955 -0.96871 -0.1632 8.21872e-05 -0.986593 -0.183801 0.0847849 -0.9793 -0.0321802 0.998039 -0.0536836 -0.0327053 0.997918 -0.0555836 -0.0977659 0.977539 -0.186709 -0.156452 0.927853 -0.338543 -0.19619 0.851456 -0.486347 -0.216587 0.75058 -0.624275 -0.218387 0.628093 -0.746864 -0.203292 0.487579 -0.849081 -0.173935 0.333054 -0.926726 -0.133681 0.168949 -0.976517 -0.114318 0.000139195 -0.993444 -0.135143 0.0847351 -0.987196 -0.0294636 0.998027 -0.0554426 -0.029816 0.997932 -0.0569411 -0.0884125 0.977539 -0.191316 -0.139518 0.927855 -0.345862 -0.171894 0.85146 -0.495449 -0.18545 0.750576 -0.634228 -0.181186 0.628099 -0.756746 -0.161053 0.487576 -0.858098 -0.127889 0.333057 -0.934193 -0.085231 0.168943 -0.981934 -0.0651627 0.000169698 -0.997875 -0.0861635 0.0846706 -0.992677 -0.0266615 0.998013 -0.0570861 -0.0268636 0.997948 -0.0581295 -0.0788444 0.977538 -0.195456 -0.122241 0.927857 -0.352333 -0.14718 0.851458 -0.503347 -0.15386 0.750576 -0.642621 -0.143535 0.628103 -0.764778 -0.118417 0.487588 -0.865006 -0.0815353 0.333057 -0.939375 -0.0365628 0.168949 -0.984946 -0.0158433 0.000233508 -0.999874 -0.036963 0.0846207 -0.995727 -0.0237774 0.997998 -0.0586112 -0.0238587 0.997964 -0.0591464 -0.0690844 0.977539 -0.199112 -0.104667 0.927854 -0.357954 -0.122111 0.851457 -0.51001 -0.121892 0.750573 -0.649448 -0.105543 0.628095 -0.770945 -0.0754947 0.487579 -0.869809 -0.0349785 0.333066 -0.942255 0.0121902 0.168947 -0.98555 0.0335135 0.000286637 -0.999438 0.0123237 0.0845698 -0.996341 -0.0208213 0.99798 -0.0600133 -0.0591468 0.977541 -0.202276 -0.086838 0.927853 -0.362694 -0.0967353 0.85146 -0.515421 -0.0896266 0.750577 -0.654676 -0.0672876 0.628093 -0.775223 -0.032389 0.487571 -0.872483 0.0116616 0.333064 -0.942832 0.060914 0.168946 -0.983741 0.0827891 0.000337875 -0.996567 0.0615813 0.0845119 -0.994518 -0.0176096 0.998 -0.0607099 -0.0149124 0.997947 -0.0622844 -0.0144095 0.998015 -0.0613124 -0.0118605 0.997931 -0.0631929 -0.0285864 0.977538 -0.208811 -0.0322497 0.927854 -0.371548 -0.0194557 0.851455 -0.524067 0.00817286 0.750578 -0.660731 0.0480907 0.628096 -0.776649 0.0969889 0.487578 -0.867675 0.150959 0.333063 -0.930742 0.205717 0.168951 -0.963917 0.228925 0.000520283 -0.973444 0.207973 0.0843346 -0.974492 0.180614 0.000463948 -0.983554 0.159528 0.0843944 -0.983579 0.10949 0.168952 -0.979524 0.0582746 0.333061 -0.941103 0.0107972 0.48758 -0.873011 -0.0288668 0.628093 -0.777603 -0.0571374 0.75058 -0.658305 -0.0711289 0.851458 -0.519577 -0.0687944 0.927855 -0.366541 -0.0490734 0.977539 -0.204964 -0.0388772 0.977538 -0.20714 -0.0388779 0.977538 -0.207142 -0.0111898 0.998028 -0.0617604 -0.00874966 0.997917 -0.0639142 -0.0182237 0.977539 -0.209967 -0.0138335 0.927856 -0.372682 0.00648592 0.85146 -0.524379 0.0408399 0.750577 -0.65952 0.0864413 0.628102 -0.773315 0.139779 0.487578 -0.861818 0.196802 0.333059 -0.922139 0.253137 0.168954 -0.952563 0.276679 0.000603037 -0.960962 0.255915 0.0842778 -0.963019 -0.00796841 0.99804 -0.0620628 -0.00559492 0.997905 -0.0644591 -0.00781917 0.977538 -0.210614 0.0046125 0.927853 -0.372918 0.0324103 0.85146 -0.523417 0.0734037 0.750572 -0.656699 0.124578 0.628095 -0.768099 0.182228 0.487576 -0.853851 0.242162 0.333059 -0.911279 0.299931 0.168942 -0.938882 0.323761 0.000645189 -0.946139 0.303224 0.0842191 -0.94919 -0.00474769 0.998051 -0.062221 -0.00240649 0.997894 -0.0648243 0.00260707 0.977538 -0.210742 0.0230482 0.927854 -0.372231 0.0582563 0.851459 -0.521176 0.105791 0.750575 -0.652262 0.16241 0.628096 -0.760998 0.22423 0.487573 -0.843797 0.286934 0.333061 -0.898186 0.345998 0.168956 -0.922897 0.370053 0.000714789 -0.929011 0.3498 0.0841385 -0.933038 -0.00153247 0.99806 -0.0622367 0.00080433 0.997884 -0.0650089 0.0130251 0.977539 -0.210354 0.0414292 0.927854 -0.370635 0.08396 0.851459 -0.517657 0.137918 0.750577 -0.646229 0.199847 0.628102 -0.75203 0.265685 0.487581 -0.831671 0.331002 0.333058 -0.882899 0.391217 0.168952 -0.904657 0.415441 0.000800795 -0.90962 0.395518 0.0840762 -0.914602 0.0016725 0.998068 -0.0621113 0.00402538 0.997876 -0.0650107 0.0234121 0.977538 -0.209453 0.059709 0.927854 -0.368134 0.109456 0.85146 -0.512869 0.169706 0.750575 -0.63862 0.236792 0.628094 -0.741233 0.306489 0.487578 -0.817516 0.37426 0.333055 -0.86545 0.435474 0.168947 -0.884206 0.459819 0.000861063 -0.888012 0.440263 0.0840169 -0.89393 0.00486218 0.998074 -0.0618425 0.00724647 0.99787 -0.0648279 0.0337423 0.977539 -0.208037 0.0778396 0.927854 -0.364731 0.134685 0.851458 -0.506832 0.201083 0.750577 -0.629445 0.273159 0.628101 -0.728611 0.346545 0.48758 -0.801356 0.416601 0.333061 -0.84588 0.478671 0.168953 -0.861585 0.503077 0.00094678 -0.864241 0.483938 0.0839461 -0.871066 0.00803177 0.998079 -0.0614337 0.0104548 0.997865 -0.0644611 0.0439893 0.977538 -0.206118 0.0957815 0.927855 -0.360433 0.159586 0.851459 -0.49955 0.231963 0.750575 -0.618733 0.308859 0.628095 -0.714215 0.385749 0.487575 -0.783242 0.457923 0.333052 -0.824247 0.520693 0.168942 -0.836862 0.545109 0.0010144 -0.838365 0.526425 0.0838771 -0.846074 0.0111759 0.998082 -0.0608841 0.0136399 0.997862 -0.0639116 0.0541283 0.977538 -0.20369 0.11349 0.927854 -0.355257 0.184096 0.851458 -0.491049 0.262281 0.75058 -0.606497 0.343802 0.628104 -0.698059 0.42401 0.48759 -0.763198 0.498122 0.333063 -0.800589 0.561442 0.168936 -0.810089 0.585812 0.00108683 -0.810446 0.567626 0.0837959 -0.819011 0.0142916 0.998085 -0.0601883 0.0167861 0.997862 -0.0631668 0.0641353 0.977538 -0.200762 0.130922 0.927854 -0.349208 0.208153 0.851462 -0.481336 0.291952 0.750574 -0.592793 0.377902 0.628091 -0.680215 0.461237 0.487574 -0.741304 0.537107 0.33307 -0.774972 0.600817 0.168946 -0.78133 0.62509 0.00116519 -0.780552 0.607439 0.0837236 -0.789942 0.0173658 0.998086 -0.0593579 0.0198946 0.997861 -0.0622647 0.0739846 0.977541 -0.197334 0.14803 0.927854 -0.342306 0.231702 0.851461 -0.470455 0.32091 0.750577 -0.577626 0.411079 0.628097 -0.660688 0.497333 0.487578 -0.717585 0.574775 0.333066 -0.747463 0.63872 0.168947 -0.750662 0.662843 0.00123437 -0.748758 0.645765 0.0836378 -0.758942 0.0204096 0.998084 -0.05841 0.0229395 0.997863 -0.0611876 0.0836539 0.977538 -0.193445 0.164777 0.927853 -0.334571 0.254685 0.85146 -0.458423 0.349082 0.750579 -0.561046 0.44325 0.628096 -0.639551 0.532209 0.487578 -0.692113 0.611037 0.333056 -0.718127 0.675062 0.168948 -0.718156 0.69898 0.00132291 -0.71514 0.682513 0.0835543 -0.726082 0.0234048 0.998082 -0.0573137 0.0259174 0.997866 -0.0599321 0.0931182 0.977539 -0.18907 0.18112 0.927854 -0.32601 0.277046 0.851457 -0.445271 0.376402 0.750576 -0.5431 0.474335 0.628095 -0.61685 0.565787 0.487578 -0.664946 0.645805 0.333056 -0.68703 0.709752 0.168949 -0.683892 0.733418 0.00141599 -0.679776 0.71759 0.0834889 -0.691444 0.0263511 0.998078 -0.0560842 0.0288155 0.997871 -0.0585085 0.102355 0.977538 -0.184236 0.19702 0.927855 -0.316652 0.298728 0.851458 -0.431023 0.402798 0.75058 -0.523816 0.504259 0.628101 -0.592632 0.597978 0.487584 -0.636148 0.678988 0.333059 -0.654253 0.742706 0.168937 -0.647957 0.766066 0.00150299 -0.64276 0.750911 0.0834078 -0.655115 0.0292427 0.998073 -0.0547223 0.0316255 0.997878 -0.0569245 0.111341 0.977538 -0.178948 0.212439 0.927855 -0.306521 0.319678 0.851456 -0.415726 0.428212 0.750576 -0.50326 0.532951 0.628096 -0.566973 0.628708 0.487578 -0.605801 0.710513 0.333067 -0.619869 0.773841 0.168942 -0.610434 0.796851 0.00158518 -0.604174 0.782395 0.0833304 -0.617182 0.0320764 0.998067 -0.0532318 0.0343365 0.997886 -0.0551858 0.120054 0.977538 -0.173223 0.227339 0.927854 -0.295642 0.339844 0.851458 -0.399405 0.452576 0.750576 -0.481468 0.560338 0.628098 -0.539921 0.657896 0.487579 -0.573968 0.740301 0.33306 -0.583974 0.80308 0.168947 -0.571419 0.82569 0.00165959 -0.564121 0.811966 0.0832259 -0.577741 0.0348468 0.998059 -0.0516132 0.036939 0.997895 -0.0532983 0.128474 0.977538 -0.167073 0.241682 0.927853 -0.284039 0.359181 0.851458 -0.38211 0.475834 0.750574 -0.458498 0.586354 0.628094 -0.511554 0.685478 0.487574 -0.540733 0.768276 0.333048 -0.546655 0.830358 0.168947 -0.531002 0.852521 0.00175818 -0.52269 0.839551 0.083141 -0.536881 0.037549 0.99805 -0.0498684 0.0394251 0.997906 -0.0512702 0.136579 0.977538 -0.160515 0.255431 0.927855 -0.271737 0.377638 0.851459 -0.363878 0.497924 0.750577 -0.434403 0.610933 0.6281 -0.481925 0.711378 0.487583 -0.506166 0.794367 0.333059 -0.507988 0.855601 0.168943 -0.489291 0.877273 0.00183684 -0.479987 0.86508 0.0830502 -0.494712 0.0401791 0.998039 -0.0480005 0.041787 0.997919 -0.0491102 0.144349 0.977538 -0.153564 0.268558 0.927854 -0.258774 0.395171 0.851458 -0.34476 0.518801 0.750572 -0.409252 0.634023 0.628092 -0.451127 0.73554 0.487579 -0.470369 0.818517 0.333058 -0.468083 0.878751 0.168945 -0.446379 0.899888 0.0019302 -0.436118 0.888494 0.0829467 -0.451329 0.0427334 0.998026 -0.0460117 0.044015 0.997933 -0.0468249 0.151766 0.977539 -0.146237 0.281025 0.927854 -0.245175 0.411736 0.85146 -0.324792 0.538402 0.750577 -0.383089 0.655555 0.628094 -0.41922 0.757902 0.487578 -0.433419 0.840665 0.333059 -0.427029 0.899751 0.168957 -0.402371 0.920312 0.00204619 -0.391179 0.909734 0.0828727 -0.406836 0.0452052 0.998013 -0.0439032 0.0461046 0.997948 -0.0444249 0.158813 0.977538 -0.138554 0.292809 0.927854 -0.230977 0.427294 0.851461 -0.304031 0.556686 0.750579 -0.355995 0.675487 0.628092 -0.386288 0.778409 0.487578 -0.395407 0.860755 0.333053 -0.384937 0.907153 0.168987 -0.385379 0.914941 0.146549 -0.37604 0.926066 0.0491023 -0.374153 0.0475926 0.997997 -0.0416784 0.0480478 0.997965 -0.0419183 0.165471 0.977539 -0.130529 0.303872 0.927854 -0.216214 0.441808 0.851459 -0.282531 0.573609 0.750581 -0.328026 0.693761 0.628096 -0.352407 0.797009 0.487582 -0.356427 0.868556 0.33315 -0.366908 0.900397 0.242981 -0.360894 0.0498825 0.99798 -0.0393492 0.171724 0.977538 -0.122188 0.314196 0.927853 -0.200925 0.455241 0.851458 -0.260337 0.589131 0.750579 -0.29926 0.710343 0.628092 -0.31767 0.801671 0.48767 -0.345689 0.838925 0.4277 -0.336566 0.868328 0.336034 -0.364812 0.0515902 0.998 -0.036533 0.0539782 0.997945 -0.0345183 0.053199 0.998015 -0.0336968 0.0558228 0.99793 -0.0319232 0.187905 0.977538 -0.0954495 0.340449 0.927855 -0.152251 0.477739 0.851426 -0.216422 0.554166 0.803105 -0.218912 0.604104 0.750603 -0.267684 0.619677 0.740052 -0.261387 0.687676 0.671382 -0.276312 0.744291 0.595356 -0.302626 0.79377 0.513637 -0.32574 0.0546756 0.998029 -0.0308056 0.0575238 0.997916 -0.0292204 0.192397 0.977538 -0.0860417 0.312343 0.941372 -0.12752 0.322242 0.935624 -0.14411 0.352429 0.921517 -0.163097 0.398201 0.903832 -0.156599 0.472896 0.856796 -0.205597 0.0560172 0.998041 -0.0278618 0.0590718 0.997904 -0.0264171 0.184327 0.977222 -0.105167 0.22489 0.969902 -0.0933553 0.0572236 0.998052 -0.0248717 0.0398816 0.998689 -0.0320789 0.139047 0.98909 -0.0486459 0.712536 0.628154 -0.312596 0.71034 0.628096 -0.317667 0.603211 0.750578 -0.269758 0.589131 0.750578 -0.299261 0.467557 0.851459 -0.237505 0.332505 0.927854 -0.168902 0.332503 0.927855 -0.168901 0.131861 0.000393255 -0.991268 0.110689 0.0844471 -0.990261 0.0609142 0.168947 -0.983741 0.0116603 0.333059 -0.942834 -0.0323863 0.487578 -0.872478 -0.0672852 0.628099 -0.775219 -0.0896252 0.75058 -0.654674 -0.0967389 0.851456 -0.515427 -0.0868371 0.927854 -0.362693 -0.0591529 0.977539 -0.202285 -0.0208065 0.997982 -0.0599897 -0.0179093 0.997963 -0.0612359 -0.868654 -0.000531601 -0.495419 -0.876881 0.085351 -0.473069 -0.889523 0.168942 -0.424508 -0.870011 0.333061 -0.363525 -0.82125 0.487568 -0.296353 -0.744109 0.62809 -0.227606 -0.640669 0.750576 -0.161795 -0.514187 0.851458 -0.103107 -0.368842 0.927854 -0.0551494 -0.209727 0.977538 -0.0208207 -0.0631541 0.997998 -0.00325795 -0.0637342 0.997947 -0.00632725 -0.754589 -0.000450687 0.656198 -0.734359 0.0852619 0.673385 -0.692621 0.168948 0.701237 -0.628613 0.333053 0.702795 -0.549166 0.487573 0.678741 -0.458933 0.628095 0.628392 -0.362856 0.750571 0.552249 -0.265945 0.851461 0.451982 -0.173002 0.927853 0.330392 -0.0884135 0.977538 0.191316 -0.0237774 0.997998 0.0586116 -0.0268637 0.997948 0.0581299 0.370053 0.000724974 0.929011 0.395515 0.0840781 0.914603 0.435476 0.168947 0.884205 0.457926 0.333047 0.824247 0.461237 0.487574 0.741304 0.443248 0.628099 0.639549 0.402798 0.750578 0.523819 0.339843 0.85146 0.399404 0.255431 0.927855 0.271737 0.151767 0.977538 0.146238 0.0475926 0.997997 0.0416784 0.0461046 0.997948 0.0444249 0.733418 0.00141599 0.679776 0.75091 0.0834021 0.655117 0.77384 0.168947 0.610433 0.768278 0.333048 0.546651 0.735542 0.487575 0.470371 0.675485 0.628094 0.386286 0.589132 0.750578 0.29926 0.478732 0.851458 0.214093 0.467559 0.851458 0.237505 0.766066 0.00149097 0.64276 0.782396 0.0833216 0.617182 0.80308 0.168947 0.571419 0.794366 0.33306 0.507989 0.757903 0.487577 0.433419 0.693761 0.628096 0.352409 0.603208 0.75058 0.269759 0.589129 0.75058 0.299259 0.79685 0.00158263 0.604175 0.811966 0.0832247 0.577742 0.830357 0.168948 0.531003 0.818519 0.333055 0.468082 0.778411 0.487576 0.395406 0.710344 0.628091 0.317668 0.693765 0.628091 0.352409 0.82569 0.00165647 0.564121 0.839552 0.0831404 0.536881 0.855601 0.168942 0.48929 0.840664 0.33306 0.427031 0.79701 0.487581 0.356428 0.778408 0.487581 0.395407 0.852521 0.00176285 0.522691 0.865079 0.0830556 0.494711 0.878753 0.168944 0.446376 0.860756 0.333052 0.384935 0.840667 0.333052 0.42703 0.877274 0.00183867 0.479987 0.888494 0.0829476 0.451328 0.899749 0.168956 0.402376 0.878749 0.168956 0.446378 0.899888 0.00192949 0.436117 0.909734 0.0828688 0.406837 0.165472 0.977538 0.13053 -0.0690817 0.977538 0.199116 -0.210759 0.977538 0 0.0498491 0.997983 -0.0393077 0.0519952 0.997962 -0.0369962 0.177556 0.977539 -0.113544 0.323745 0.927854 -0.185138 0.45524 0.851459 -0.260336 0.187904 0.977538 0.0954498 0.182955 0.977538 0.104626 0.455239 0.851459 0.260336 0.455241 0.851458 0.260336 0.323744 0.927855 0.185138 0.573611 0.750578 0.328028 0.675484 0.628096 0.386286 0.757904 0.487576 0.433419 0.818517 0.33306 0.468083 0.855601 0.168944 0.489291 0.177556 0.977538 0.113545 0.314196 0.927853 0.200923 0.556685 0.75058 0.355994 0.556685 0.75058 0.355994 0.441807 0.851459 0.282531 0.655555 0.628094 0.41922 0.735541 0.487577 0.470371 0.794369 0.333055 0.507989 0.830359 0.168942 0.531002 0.171725 0.977538 0.122187 0.303873 0.927853 0.216216 0.427293 0.851461 0.304032 0.634022 0.628093 0.451126 0.634022 0.628093 0.451126 0.538402 0.750578 0.383088 0.711382 0.487575 0.506168 0.768272 0.33306 0.546652 0.80308 0.168948 0.571419 0.292808 0.927854 0.230977 0.411736 0.85146 0.324792 0.5188 0.750575 0.409248 0.610933 0.628097 0.481928 0.685476 0.487581 0.540729 0.685477 0.487578 0.540729 0.740304 0.333048 0.583978 0.77384 0.168947 0.610433 0.158814 0.977538 0.138554 0.281026 0.927854 0.245175 0.268558 0.927854 0.258773 0.395171 0.851459 0.344759 0.377638 0.851459 0.363879 0.497922 0.750579 0.434403 0.475835 0.750574 0.458497 0.586357 0.628091 0.511554 0.560336 0.628098 0.539923 0.657894 0.487582 0.573968 0.628708 0.487578 0.6058 0.710516 0.333053 0.619873 0.710514 0.333058 0.619873 0.678991 0.333052 0.654253 0.742703 0.168947 0.647958 0.70975 0.168949 0.683894 0.709754 0.168938 0.683893 0.144349 0.977538 0.153564 0.359182 0.851457 0.382111 0.452573 0.750579 0.481465 0.532953 0.628094 0.566974 0.597976 0.487584 0.63615 0.645806 0.333052 0.68703 0.675064 0.168944 0.718155 0.24168 0.927855 0.284037 0.428214 0.750574 0.50326 0.504257 0.628101 0.592634 0.565788 0.487578 0.664945 0.611039 0.333049 0.71813 0.638719 0.168947 0.750663 0.22734 0.927853 0.295643 0.227338 0.927854 0.295642 0.128474 0.977538 0.167074 0.319675 0.851459 0.415722 0.474335 0.628097 0.616847 0.532212 0.487574 0.692114 0.574774 0.333058 0.747467 0.600817 0.168946 0.78133 0.120053 0.977538 0.173222 0.298728 0.851458 0.431024 0.298728 0.851458 0.431024 0.21244 0.927854 0.306522 0.376401 0.750578 0.543097 0.497334 0.487575 0.717586 0.537108 0.333058 0.774976 0.561442 0.168945 0.810087 0.111341 0.977538 0.178948 0.19702 0.927855 0.316653 0.349083 0.750576 0.56105 0.349082 0.750577 0.561049 0.277046 0.851457 0.445271 0.411078 0.628099 0.660687 0.498126 0.333051 0.800592 0.52069 0.168951 0.836861 0.102355 0.977538 0.184235 0.181121 0.927854 0.326009 0.254686 0.851458 0.458425 0.3779 0.6281 0.680208 0.377904 0.628093 0.680211 0.32091 0.750577 0.577625 0.424013 0.487575 0.763206 0.478669 0.168949 0.861587 0.0931175 0.977539 0.18907 0.164777 0.927854 0.334569 0.231702 0.85146 0.470457 0.291954 0.750574 0.592792 0.385747 0.487582 0.783239 0.385751 0.487575 0.783241 0.3438 0.628096 0.698067 0.416603 0.333047 0.845885 0.0836549 0.977538 0.193445 0.148029 0.927854 0.342308 0.208157 0.851454 0.481349 0.262274 0.75058 0.6065 0.308859 0.628095 0.714215 0.374259 0.333053 0.865451 0.37426 0.333051 0.865452 0.346543 0.487577 0.801359 0.391215 0.168947 0.904659 0.0739886 0.977536 0.197355 0.130917 0.927853 0.349211 0.184092 0.851459 0.491047 0.231965 0.750575 0.618732 0.273157 0.628098 0.728614 0.306487 0.487582 0.817514 0.345997 0.168947 0.922899 0.345999 0.168941 0.922899 0.331003 0.333053 0.8829 0.0641321 0.977538 0.200763 0.113491 0.927853 0.355258 0.159585 0.85146 0.499548 0.201082 0.750577 0.629445 0.236794 0.628098 0.74123 0.265687 0.487577 0.831673 0.286935 0.333047 0.898191 0.299932 0.168947 0.938881 0.054129 0.977538 0.20369 0.095781 0.927855 0.360433 0.134686 0.85146 0.506829 0.169708 0.750575 0.63862 0.199846 0.628096 0.752035 0.22423 0.487577 0.843795 0.242163 0.333054 0.911281 0.253136 0.168949 0.952564 0.0439885 0.977538 0.206116 0.077841 0.927853 0.364731 0.109455 0.85146 0.512869 0.137918 0.750575 0.646232 0.162411 0.628096 0.760998 0.182228 0.487576 0.853851 0.196802 0.333055 0.922141 0.205718 0.168945 0.963918 0.0337421 0.977538 0.208039 0.0597075 0.927854 0.368132 0.0839598 0.851459 0.517657 0.105791 0.750575 0.652262 0.124578 0.628098 0.768097 0.13978 0.487574 0.86182 0.150958 0.333053 0.930746 0.157797 0.168946 0.972912 0.023412 0.977538 0.209453 0.0414286 0.927854 0.370635 0.0582557 0.851459 0.521176 0.0734032 0.750577 0.656693 0.0864402 0.628099 0.773318 0.0969876 0.487578 0.867676 0.104746 0.333046 0.937075 0.109489 0.168947 0.979525 0.0130256 0.977538 0.210355 0.0230492 0.927854 0.372231 0.0324102 0.85146 0.523417 0.0408392 0.750574 0.659523 0.0480903 0.628099 0.776646 0.0539584 0.487576 0.871411 0.0582732 0.333054 0.941105 0.0609158 0.168946 0.983741 0.00260672 0.977538 0.210742 0.00461269 0.927854 0.372916 0.00648587 0.85146 0.524379 0.00817243 0.750578 0.660731 0.0096256 0.628096 0.778077 0.0107986 0.487579 0.873012 0.0116633 0.333049 0.942837 0.0121899 0.168947 0.98555 -0.00781824 0.977538 0.210612 -0.0138344 0.927854 0.372688 -0.0194534 0.851458 0.524062 -0.0245133 0.750575 0.66033 -0.0288678 0.628099 0.777598 -0.0323891 0.487575 0.87248 -0.0349808 0.333056 0.942258 -0.0365633 0.168949 0.984946 -0.0182247 0.977539 0.209967 -0.032249 0.927854 0.371548 -0.0453476 0.851458 0.522458 -0.057139 0.750575 0.65831 -0.0672866 0.628096 0.775221 -0.0754972 0.487583 0.869806 -0.0815329 0.333048 0.939378 -0.0852293 0.168948 0.981933 -0.0285856 0.977538 0.208811 -0.0505844 0.927854 0.369497 -0.0711296 0.851458 0.519576 -0.0896245 0.750577 0.654676 -0.10554 0.628095 0.770946 -0.118418 0.487579 0.865011 -0.127891 0.333053 0.934195 -0.133683 0.168944 0.976518 -0.0388776 0.977538 0.20714 -0.0687947 0.927854 0.366543 -0.0967372 0.851458 0.515423 -0.121891 0.750577 0.649443 -0.143538 0.628099 0.76478 -0.161051 0.487576 0.858098 -0.173933 0.333049 0.926729 -0.181813 0.16895 0.96871 -0.0490735 0.977538 0.204965 -0.0868376 0.927854 0.362692 -0.122109 0.851458 0.510008 -0.15386 0.750574 0.642624 -0.181185 0.628099 0.756746 -0.203293 0.487579 0.849081 -0.219551 0.333056 0.91699 -0.229496 0.168938 0.958536 -0.0591495 0.977536 0.202297 -0.104668 0.927854 0.357956 -0.14718 0.851459 0.503344 -0.185451 0.750576 0.634227 -0.218385 0.628097 0.746862 -0.245032 0.487579 0.837989 -0.26463 0.333049 0.905014 -0.27662 0.168952 0.946011 -0.122243 0.927851 0.352348 -0.171894 0.851458 0.495451 -0.21659 0.750578 0.624277 -0.255055 0.628097 0.735147 -0.286174 0.487583 0.824844 -0.309063 0.333045 0.890821 -0.323064 0.168951 0.931174 -0.0788437 0.977538 0.195455 -0.139516 0.927854 0.345864 -0.156451 0.927854 0.338541 -0.196183 0.851457 0.486347 -0.219996 0.851459 0.476045 -0.247198 0.75057 0.612811 -0.277198 0.750579 0.599827 -0.291098 0.628101 0.721631 -0.326427 0.62809 0.706363 -0.326618 0.487573 0.809687 -0.366258 0.487579 0.792541 -0.352741 0.333063 0.874438 -0.39555 0.333051 0.855931 -0.368718 0.168942 0.91406 -0.413472 0.168954 0.894704 -0.0977663 0.977538 0.18671 -0.243268 0.85146 0.464582 -0.306525 0.750576 0.585387 -0.360963 0.628098 0.689347 -0.405003 0.48758 0.773459 -0.437397 0.333046 0.835323 -0.457212 0.168958 0.873161 -0.18913 0.927853 0.321432 -0.335099 0.750577 0.569512 -0.394611 0.628095 0.670656 -0.442761 0.487575 0.752485 -0.478172 0.333059 0.812664 -0.499833 0.168937 0.849486 -0.204793 0.927854 0.311685 -0.204792 0.927855 0.311683 -0.115733 0.977538 0.176139 -0.287971 0.851461 0.438277 -0.427297 0.628095 0.650318 -0.479431 0.487582 0.729664 -0.517774 0.333058 0.788024 -0.541233 0.168941 0.823727 -0.124302 0.977538 0.170199 -0.309295 0.85146 0.423501 -0.309294 0.851461 0.423498 -0.219957 0.927853 0.301176 -0.389724 0.750571 0.533628 -0.514926 0.487587 0.70506 -0.556112 0.33305 0.761457 -0.581307 0.168942 0.795952 -0.132567 0.977538 0.163844 -0.234581 0.927855 0.289927 -0.415635 0.750571 0.513703 -0.415631 0.750581 0.513692 -0.329862 0.851457 0.407691 -0.489446 0.628096 0.604929 -0.59309 0.333056 0.73302 -0.619958 0.168955 0.766229 -0.140506 0.977539 0.157086 -0.248635 0.927852 0.277977 -0.349618 0.851461 0.390872 -0.518765 0.628094 0.579983 -0.518764 0.628101 0.579978 -0.44053 0.750574 0.492516 -0.582062 0.487573 0.650751 -0.657094 0.168935 0.734635 -0.148104 0.977538 0.149948 -0.262076 0.927854 0.265336 -0.36852 0.851459 0.373108 -0.464347 0.750575 0.470126 -0.613532 0.487577 0.621166 -0.613532 0.487575 0.621167 -0.546811 0.628097 0.553617 -0.662603 0.333053 0.670845 -0.155338 0.977538 0.14244 -0.274878 0.927853 0.252053 -0.386521 0.851461 0.354425 -0.487026 0.750578 0.446585 -0.573521 0.628094 0.5259 -0.694966 0.333049 0.63726 -0.694966 0.333047 0.637261 -0.643499 0.487578 0.590065 -0.726448 0.168948 0.666131 -0.162193 0.977538 0.134584 -0.287009 0.927852 0.238153 -0.403576 0.85146 0.334876 -0.508518 0.750576 0.421954 -0.598823 0.628105 0.496885 -0.671892 0.487577 0.557521 -0.758503 0.168958 0.629386 -0.758503 0.168951 0.629388 -0.725631 0.333047 0.602112 -0.168649 0.977539 0.126395 -0.298433 0.927853 0.223665 -0.419638 0.851463 0.314507 -0.528763 0.750573 0.396295 -0.622668 0.628093 0.466673 -0.698644 0.487576 0.523608 -0.754519 0.33306 0.565484 -0.7887 0.168956 0.591106 -0.174695 0.977538 0.117904 -0.309129 0.927853 0.208633 -0.434685 0.851459 0.293372 -0.547713 0.750578 0.369652 -0.644988 0.628092 0.435306 -0.723677 0.48759 0.488413 -0.781559 0.333057 0.527484 -0.816969 0.16894 0.551381 -0.180311 0.977538 0.109118 -0.319066 0.927854 0.193089 -0.448657 0.851462 0.271514 -0.565324 0.750575 0.34212 -0.665725 0.628091 0.402879 -0.746946 0.487586 0.452031 -0.806692 0.33305 0.488186 -0.843238 0.168946 0.5103 -0.185485 0.977538 0.100069 -0.328225 0.927853 0.177077 -0.461547 0.851453 0.249002 -0.581548 0.750581 0.313736 -0.68483 0.628099 0.369458 -0.768389 0.487579 0.414541 -0.829847 0.333053 0.447694 -0.867442 0.168943 0.467977 -0.190209 0.977538 0.0907726 -0.33658 0.927855 0.160624 -0.473286 0.851461 0.225865 -0.596363 0.750567 0.284606 -0.702263 0.6281 0.335137 -0.787949 0.487585 0.376029 -0.850969 0.333059 0.406107 -0.889525 0.168934 0.424509 -0.194465 0.977538 0.0812548 -0.344109 0.927855 0.143783 -0.48388 0.851459 0.202185 -0.6097 0.750577 0.254756 -0.717975 0.6281 0.300003 -0.805586 0.487569 0.336611 -0.870014 0.333054 0.363526 -0.909429 0.168949 0.379993 -0.198244 0.977538 0.0715387 -0.350804 0.927853 0.126592 -0.493283 0.851462 0.178004 -0.621552 0.750577 0.224293 -0.731942 0.62809 0.264129 -0.82124 0.487588 0.296349 -0.886927 0.33305 0.320059 -0.927108 0.168944 0.334558 -0.201543 0.977538 0.0616488 -0.356628 0.927856 0.109084 -0.50149 0.851456 0.153398 -0.631885 0.750576 0.19328 -0.744098 0.628105 0.227603 -0.8349 0.487567 0.255384 -0.901665 0.333071 0.275798 -0.942519 0.168939 0.288301 -0.204342 0.977538 0.0516047 -0.361594 0.927853 0.0913193 -0.50846 0.851458 0.128406 -0.640668 0.750577 0.161795 -0.754448 0.628098 0.190529 -0.846504 0.487577 0.213775 -0.914204 0.333057 0.230875 -0.955623 0.168953 0.24133 -0.206642 0.977539 0.0414349 -0.365664 0.927854 0.0733229 -0.514188 0.851458 0.103107 -0.64788 0.750582 0.129915 -0.762957 0.628084 0.152994 -0.856036 0.487585 0.171652 -0.924503 0.333058 0.185384 -0.966388 0.168946 0.193783 -0.20844 0.977538 0.0311669 -0.368846 0.927853 0.05515 -0.518654 0.85146 0.0775474 -0.653518 0.750578 0.0977135 -0.769584 0.628093 0.115065 -0.863476 0.487586 0.129108 -0.93254 0.333057 0.139435 -0.97479 0.16894 0.145752 -0.209729 0.977538 0.0208209 -0.371118 0.927855 0.0368418 -0.521854 0.85146 0.0518075 -0.657556 0.750572 0.0652796 -0.774335 0.62809 0.0768701 -0.868805 0.487586 0.0862469 -0.938299 0.333046 0.0931455 -0.980802 0.16896 0.0973611 -0.210501 0.977538 0.0104226 -0.372485 0.927855 0.0184432 -0.523776 0.851461 0.0259347 -0.659986 0.750567 0.0326798 -0.777178 0.628103 0.0384796 -0.87201 0.48758 0.0431806 -0.941751 0.333063 0.0466321 -0.984423 0.168921 0.0487506 -0.372941 0.927855 0 -0.524418 0.851461 0 -0.660795 0.750567 0 -0.77813 0.628103 0 -0.873078 0.48758 0 -0.942905 0.333063 0 -0.98563 0.168921 0 -0.210502 0.977538 -0.0104226 -0.372486 0.927854 -0.0184432 -0.371122 0.927853 -0.0368422 -0.523778 0.85146 -0.0259348 -0.521854 0.85146 -0.0518075 -0.65998 0.750573 -0.0326795 -0.65755 0.750578 -0.0652789 -0.777189 0.628089 -0.0384801 -0.774332 0.628093 -0.0768699 -0.872007 0.487586 -0.0431804 -0.868805 0.487586 -0.0862469 -0.941757 0.333046 -0.0466324 -0.938295 0.333058 -0.0931451 -0.984417 0.16896 -0.0487503 -0.980806 0.16894 -0.0973615 -0.20844 0.977538 -0.031166 -0.518658 0.851458 -0.077548 -0.653513 0.750582 -0.0977127 -0.769591 0.628083 -0.115066 -0.863477 0.487585 -0.129108 -0.93254 0.333058 -0.139435 -0.974789 0.168946 -0.145752 -0.365663 0.927854 -0.0733243 -0.647885 0.750577 -0.129916 -0.762946 0.628098 -0.152991 -0.85604 0.487576 -0.171653 -0.924503 0.333057 -0.185384 -0.966387 0.168953 -0.193783 -0.361594 0.927853 -0.0913166 -0.361585 0.927856 -0.0913172 -0.204346 0.977538 -0.0516056 -0.50846 0.851458 -0.128407 -0.754442 0.628105 -0.190528 -0.84651 0.487567 -0.213776 -0.9142 0.333071 -0.230874 -0.955625 0.168938 -0.241331 -0.201539 0.977538 -0.0616476 -0.501491 0.851456 -0.153395 -0.501481 0.851462 -0.153395 -0.356636 0.927853 -0.109087 -0.631885 0.750576 -0.193279 -0.834888 0.487588 -0.25538 -0.901672 0.33305 -0.2758 -0.942518 0.168944 -0.288301 -0.198244 0.977538 -0.0715389 -0.350798 0.927855 -0.12659 -0.621552 0.750577 -0.224294 -0.621554 0.750575 -0.224294 -0.493287 0.851459 -0.178006 -0.731943 0.628091 -0.264125 -0.886925 0.333057 -0.320059 -0.927107 0.168948 -0.334558 -0.194465 0.977538 -0.0812548 -0.344111 0.927854 -0.143783 -0.483877 0.851461 -0.202184 -0.717974 0.628102 -0.300003 -0.717976 0.628099 -0.300003 -0.60971 0.750567 -0.254761 -0.805589 0.487568 -0.336607 -0.909431 0.168936 -0.379995 -0.190207 0.977539 -0.0907718 -0.336583 0.927853 -0.160625 -0.473297 0.851453 -0.22587 -0.596349 0.750581 -0.284599 -0.787947 0.487586 -0.376031 -0.787952 0.487578 -0.376031 -0.702265 0.628098 -0.335138 -0.850968 0.333061 -0.406108 -0.185486 0.977538 -0.100069 -0.328224 0.927854 -0.177076 -0.461533 0.851462 -0.248996 -0.581555 0.750574 -0.31374 -0.684834 0.628093 -0.369461 -0.829845 0.333057 -0.447694 -0.829847 0.333054 -0.447694 -0.768386 0.487585 -0.414539 -0.867442 0.168942 -0.467977 -0.180313 0.977538 -0.109119 -0.319068 0.927853 -0.19309 -0.448659 0.85146 -0.271516 -0.565322 0.750577 -0.342119 -0.665725 0.628092 -0.402878 -0.746943 0.487591 -0.45203 -0.843238 0.168943 -0.510302 -0.843239 0.168937 -0.510301 -0.80669 0.333059 -0.488184 -0.174693 0.977538 -0.117902 -0.309129 0.927853 -0.208633 -0.434679 0.851463 -0.293368 -0.547717 0.750574 -0.369655 -0.644987 0.628094 -0.435305 -0.723684 0.487576 -0.488417 -0.781557 0.333063 -0.527483 -0.816967 0.168952 -0.55138 -0.168651 0.977538 -0.126397 -0.298436 0.927852 -0.223667 -0.419642 0.85146 -0.314509 -0.52876 0.750576 -0.396293 -0.622661 0.628105 -0.466667 -0.698642 0.48758 -0.523608 -0.754523 0.333045 -0.565487 -0.788699 0.168953 -0.591108 -0.162193 0.977538 -0.134584 -0.287007 0.927853 -0.238152 -0.403574 0.851462 -0.334875 -0.508518 0.750576 -0.421953 -0.598829 0.628095 -0.496891 -0.671891 0.487579 -0.55752 -0.72563 0.33305 -0.602112 -0.758502 0.168961 -0.629386 -0.155338 0.977538 -0.14244 -0.274876 0.927854 -0.252052 -0.386524 0.851458 -0.354427 -0.487026 0.750577 -0.446587 -0.573521 0.628095 -0.525899 -0.643499 0.487575 -0.590067 -0.694963 0.333058 -0.637259 -0.72645 0.168948 -0.666128 -0.148102 0.977539 -0.149945 -0.262081 0.927851 -0.26534 -0.368518 0.851462 -0.373106 -0.464347 0.750576 -0.470125 -0.546812 0.628096 -0.553617 -0.613531 0.48758 -0.621165 -0.662602 0.333056 -0.670845 -0.692621 0.168942 -0.701238 -0.140507 0.977538 -0.157088 -0.248631 0.927855 -0.277972 -0.349623 0.851457 -0.390878 -0.440526 0.750579 -0.492512 -0.518766 0.628094 -0.579983 -0.582063 0.487573 -0.65075 -0.628609 0.333067 -0.702792 -0.657092 0.168959 -0.734631 -0.132567 0.977538 -0.163844 -0.234583 0.927853 -0.28993 -0.329858 0.851461 -0.407686 -0.415638 0.750571 -0.5137 -0.489446 0.628096 -0.604928 -0.549163 0.487584 -0.678735 -0.593088 0.333066 -0.733017 -0.61996 0.168937 -0.766231 -0.124301 0.977538 -0.1702 -0.219957 0.927853 -0.301176 -0.309292 0.851462 -0.423498 -0.389724 0.750571 -0.533628 -0.458933 0.628095 -0.628392 -0.514928 0.487582 -0.705063 -0.55611 0.333063 -0.761453 -0.581306 0.168941 -0.795954 -0.115731 0.977539 -0.176138 -0.204793 0.927854 -0.311685 -0.287972 0.851461 -0.438277 -0.362853 0.750577 -0.552243 -0.427297 0.628095 -0.650319 -0.479433 0.487575 -0.729667 -0.517772 0.333068 -0.788021 -0.541235 0.168936 -0.823727 -0.10688 0.977539 -0.181645 -0.189129 0.927854 -0.32143 -0.265947 0.851459 -0.451984 -0.335099 0.750576 -0.569512 -0.39461 0.628098 -0.670653 -0.442759 0.48758 -0.752483 -0.478174 0.333049 -0.812667 -0.499829 0.168968 -0.849483 -0.0977662 0.977539 -0.186709 -0.173002 0.927853 -0.330392 -0.243268 0.851459 -0.464583 -0.306523 0.750579 -0.585384 -0.360966 0.62809 -0.689352 -0.405 0.487589 -0.773455 -0.437396 0.33306 -0.835318 -0.457215 0.168949 -0.873161 -0.0884141 0.977538 -0.191317 -0.156449 0.927855 -0.33854 -0.219999 0.851455 -0.476051 -0.277194 0.750586 -0.599821 -0.32643 0.628095 -0.706357 -0.36626 0.487577 -0.792542 -0.395546 0.333073 -0.855924 -0.413473 0.168941 -0.894706 -0.0788429 0.977539 -0.195454 -0.139514 0.927857 -0.345857 -0.196186 0.85146 -0.486341 -0.247199 0.75058 -0.612799 -0.2911 0.628094 -0.721636 -0.326618 0.487579 -0.809684 -0.352741 0.333058 -0.874441 -0.368717 0.168952 -0.914058 -0.0690779 0.977541 -0.199103 -0.122245 0.927854 -0.352338 -0.171896 0.851458 -0.495451 -0.216591 0.750576 -0.624279 -0.255054 0.628093 -0.73515 -0.286173 0.487583 -0.824844 -0.309062 0.333062 -0.890814 -0.323066 0.168942 -0.931175 -0.0490731 0.977539 -0.204964 -0.104669 0.927853 -0.357956 -0.147181 0.851457 -0.503348 -0.122108 0.85146 -0.510006 -0.18545 0.750576 -0.634227 -0.153862 0.750573 -0.642625 -0.218384 0.628099 -0.74686 -0.181184 0.628103 -0.756743 -0.245033 0.487579 -0.837989 -0.203294 0.487576 -0.849083 -0.264628 0.333066 -0.905008 -0.219552 0.333053 -0.916991 -0.276619 0.168938 -0.946014 -0.229494 0.168955 -0.958534 -0.0687953 0.927854 -0.366543 -0.121889 0.750577 -0.649444 -0.143539 0.628095 -0.764783 -0.161048 0.487588 -0.858092 -0.173934 0.333057 -0.926726 -0.181815 0.168949 -0.96871 -0.050583 0.927855 -0.369495 -0.0505846 0.927854 -0.369498 -0.0285852 0.977539 -0.208809 -0.0711309 0.851455 -0.519581 -0.105544 0.628093 -0.770947 -0.118421 0.487579 -0.86501 -0.127889 0.333057 -0.934194 -0.133683 0.168943 -0.976518 -0.018225 0.977538 -0.20997 -0.0453467 0.851458 -0.522458 -0.0453489 0.851455 -0.522463 -0.0322473 0.927856 -0.371542 -0.0571377 0.75058 -0.658305 -0.0754978 0.487571 -0.869813 -0.0815324 0.333066 -0.939372 -0.0852293 0.168948 -0.981933 -0.00781862 0.977538 -0.210613 -0.0138363 0.927853 -0.37269 -0.0245131 0.75058 -0.660325 -0.0245143 0.750578 -0.660328 -0.0194524 0.85146 -0.524058 -0.0288643 0.628099 -0.777598 -0.0349792 0.333063 -0.942255 -0.0365634 0.168947 -0.984947 0.00260742 0.977539 -0.210741 0.0046133 0.927854 -0.372915 0.00648587 0.85146 -0.524379 0.0096231 0.628092 -0.778079 0.00962435 0.628096 -0.778077 0.00817244 0.750577 -0.660733 0.0107968 0.487579 -0.873012 0.0121899 0.168946 -0.98555 0.0130249 0.977538 -0.210355 0.0230485 0.927854 -0.37223 0.0324095 0.851459 -0.523419 0.0408382 0.750572 -0.659525 0.053959 0.48758 -0.871409 0.0539584 0.487578 -0.87141 0.0480927 0.628102 -0.776644 0.058274 0.333059 -0.941104 0.0234125 0.977539 -0.209451 0.0414288 0.927853 -0.370637 0.0582564 0.851459 -0.521175 0.0734045 0.750575 -0.656696 0.0864393 0.628095 -0.773321 0.104745 0.33306 -0.93707 0.104746 0.333063 -0.937069 0.0969889 0.487578 -0.867675 0.109489 0.168947 -0.979525 0.0337417 0.977538 -0.208041 0.059709 0.927854 -0.368134 0.0839603 0.85146 -0.517655 0.105791 0.750578 -0.652258 0.124578 0.628096 -0.768099 0.139778 0.487576 -0.861819 0.157798 0.168951 -0.972911 0.157798 0.168951 -0.972911 0.150958 0.33306 -0.930743 0.0439893 0.977538 -0.206118 0.0778398 0.927854 -0.364729 0.109455 0.851458 -0.512873 0.137918 0.750576 -0.646231 0.162411 0.628102 -0.760993 0.182227 0.487573 -0.853854 0.196801 0.333058 -0.92214 0.205718 0.168955 -0.963916 0.0541284 0.977538 -0.203688 0.0957814 0.927854 -0.360435 0.134685 0.851459 -0.506831 0.169707 0.750576 -0.638619 0.199846 0.628094 -0.752037 0.224232 0.487581 -0.843792 0.242163 0.333061 -0.911278 0.253134 0.168941 -0.952566 0.0641352 0.977541 -0.200751 0.11349 0.927854 -0.355256 0.159586 0.851458 -0.499552 0.201083 0.750575 -0.629446 0.236793 0.628101 -0.741227 0.265684 0.487578 -0.831673 0.286934 0.333058 -0.898188 0.299934 0.168956 -0.938879 0.0739853 0.977538 -0.197345 0.130922 0.927854 -0.349207 0.184096 0.851462 -0.49104 0.231963 0.75058 -0.618727 0.273158 0.628095 -0.728616 0.306489 0.487581 -0.817514 0.331001 0.333056 -0.8829 0.345997 0.168952 -0.922898 0.0836538 0.977539 -0.193444 0.14803 0.927853 -0.34231 0.208153 0.851461 -0.481338 0.262281 0.750574 -0.606504 0.30886 0.628103 -0.714207 0.346545 0.487575 -0.80136 0.374261 0.333062 -0.865447 0.391216 0.168947 -0.904658 0.0931187 0.977538 -0.189073 0.164777 0.927854 -0.334568 0.231702 0.85146 -0.470458 0.291952 0.750577 -0.592789 0.343802 0.628091 -0.698071 0.38575 0.48759 -0.783233 0.4166 0.333052 -0.845885 0.435475 0.168953 -0.884204 0.102355 0.977538 -0.184235 0.181119 0.927855 -0.326008 0.254686 0.851457 -0.458427 0.320909 0.750579 -0.577623 0.377902 0.628097 -0.68021 0.42401 0.487574 -0.763208 0.457924 0.333064 -0.824242 0.478669 0.168941 -0.861589 0.111341 0.977538 -0.178948 0.19702 0.927855 -0.316652 0.277046 0.851458 -0.44527 0.349083 0.750576 -0.56105 0.411079 0.628096 -0.660688 0.461237 0.487578 -0.741302 0.498123 0.33307 -0.800586 0.520692 0.168936 -0.836864 0.120054 0.977538 -0.173223 0.21244 0.927854 -0.306524 0.298729 0.851456 -0.431026 0.376401 0.75058 -0.543095 0.44325 0.628095 -0.639552 0.497333 0.487578 -0.717585 0.537107 0.333066 -0.774973 0.561443 0.168945 -0.810086 0.128474 0.977538 -0.167073 0.227339 0.927853 -0.295643 0.319677 0.851458 -0.415723 0.402799 0.750576 -0.523821 0.474333 0.628101 -0.616845 0.53221 0.487578 -0.692113 0.574775 0.333056 -0.747467 0.600817 0.168947 -0.781329 0.136579 0.977539 -0.160514 0.241681 0.927855 -0.284036 0.339844 0.851458 -0.399405 0.428212 0.750575 -0.50326 0.50426 0.628096 -0.592636 0.565786 0.487584 -0.664942 0.611037 0.333056 -0.718128 0.63872 0.168948 -0.750662 0.144348 0.977539 -0.153563 0.255432 0.927854 -0.271739 0.35918 0.851459 -0.382109 0.452577 0.750575 -0.481469 0.53295 0.628098 -0.566972 0.597978 0.487578 -0.636152 0.645805 0.333059 -0.687029 0.675062 0.168949 -0.718155 0.151767 0.977538 -0.146238 0.268557 0.927855 -0.258772 0.377639 0.851458 -0.36388 0.475832 0.750577 -0.458495 0.560339 0.628094 -0.539924 0.628708 0.487579 -0.6058 0.678988 0.333068 -0.654249 0.709751 0.168938 -0.683895 0.158813 0.977539 -0.138553 0.281026 0.927854 -0.245177 0.395169 0.85146 -0.344757 0.497927 0.750572 -0.434408 0.586351 0.6281 -0.511549 0.657897 0.487574 -0.573971 0.710513 0.333061 -0.619873 0.742706 0.168942 -0.647956 0.165472 0.977538 -0.13053 0.292808 0.927854 -0.230977 0.411735 0.851461 -0.324791 0.518798 0.750577 -0.409247 0.610936 0.628092 -0.481932 0.685475 0.487584 -0.540727 0.740302 0.333048 -0.58398 0.773841 0.168947 -0.610432 0.171723 0.977539 -0.122186 0.303874 0.927853 -0.216218 0.427295 0.851459 -0.304034 0.314193 0.927854 -0.200922 0.538401 0.750578 -0.383087 0.441809 0.851458 -0.282532 0.634022 0.628094 -0.451125 0.556684 0.750581 -0.355993 0.711379 0.487579 -0.506168 0.655556 0.628092 -0.419222 0.768275 0.333059 -0.546649 0.735541 0.487578 -0.47037 0.80308 0.168947 -0.571419 0.794368 0.333058 -0.507989 0.830358 0.168943 -0.531003 0.177556 0.977538 -0.113545 0.182954 0.977538 -0.104625 0.323746 0.927854 -0.185139 0.182955 0.977538 -0.104626 0.573611 0.750579 -0.328028 0.675484 0.628096 -0.386285 0.757902 0.487578 -0.433419 0.818517 0.333058 -0.468083 0.855601 0.168945 -0.48929 0.187906 0.977538 -0.0954509 0.467557 0.851459 -0.237505 0.478733 0.851457 -0.214093 0.478731 0.851459 -0.214091 0.603212 0.750577 -0.269759 0.693763 0.628092 -0.35241 0.778408 0.487582 -0.395405 0.840666 0.333053 -0.427032 0.878751 0.168957 -0.446375 0.192394 0.977539 -0.0860391 0.79701 0.48758 -0.356428 0.860753 0.333062 -0.384934 0.899751 0.168939 -0.402377 0.996908 -0.00426838 -0.0784612 0.972267 -0.164019 -0.166717 0.932561 -0.323664 -0.159911 0.867535 -0.474614 -0.148744 0.778852 -0.612826 -0.133546 0.668839 -0.734509 -0.114681 0.540449 -0.836259 -0.092659 0.397118 -0.915238 -0.0680976 0.242764 -0.969192 -0.0416269 0.0816931 -0.996559 -0.0140082 0.97236 -0.233437 -0.0047451 0.942364 -0.324711 -0.0806991 0.923873 -0.382692 -0.00232208 0.875982 -0.475938 -0.0783484 0.852645 -0.522489 0.00129019 0.760389 -0.64944 0.00602264 0.649408 -0.76035 0.011727 0.5224 -0.852501 0.0184413 0.382564 -0.923566 0.025904 0.233304 -0.971809 0.0340005 0.0783547 -0.995555 -0.0522525 0.785571 -0.614185 -0.0751992 0.67359 -0.735657 -0.0713147 0.543116 -0.836986 -0.0669221 0.397707 -0.915411 -0.0620701 0.241334 -0.968759 -0.0571345 0.0376816 -0.996209 -0.0784062 0.0174646 -0.996559 -0.0810243 0.0309029 -0.996559 -0.0769089 0.0434553 -0.996559 -0.0705803 0.0547595 -0.996559 -0.0622197 0.0644874 -0.996559 -0.0520699 0.0723608 -0.996559 -0.0404208 0.0781513 -0.996559 -0.0276108 -0.00374137 -0.92387 -0.382688 0.0555626 -0.915762 -0.39786 0.00620562 -0.852628 -0.522482 0.0639168 -0.837157 -0.543215 0.115523 -0.836262 -0.536023 0.204445 -0.836262 -0.50879 0.287498 -0.836262 -0.466917 0.362271 -0.836262 -0.411614 0.426616 -0.836262 -0.344479 0.478701 -0.836262 -0.267416 0.517004 -0.836263 -0.182675 0.540438 -0.836264 -0.0926741 0.015149 -0.760318 -0.649375 0.0714681 -0.735645 -0.673587 0.142974 -0.734503 -0.663373 0.253023 -0.734501 -0.629673 0.355789 -0.734508 -0.577852 0.448346 -0.734505 -0.5094 0.527971 -0.734505 -0.426319 0.59244 -0.734501 -0.330943 0.639836 -0.734504 -0.226083 0.668845 -0.734504 -0.114672 0.0229018 -0.649275 -0.760209 0.0780011 -0.614051 -0.785403 0.166482 -0.612828 -0.77248 0.294631 -0.612831 -0.733233 0.414326 -0.61282 -0.672894 0.522077 -0.612828 -0.593193 0.614823 -0.61282 -0.496432 0.689861 -0.612835 -0.385391 0.74508 -0.612825 -0.263252 0.778853 -0.612825 -0.133545 0.0293153 -0.522273 -0.852274 0.0832959 -0.475753 -0.875626 0.185436 -0.474621 -0.860435 0.328183 -0.474618 -0.816721 0.461488 -0.474625 -0.749506 0.58152 -0.474624 -0.660732 0.68482 -0.474623 -0.55295 0.76842 -0.474616 -0.429267 0.82991 -0.474621 -0.293231 0.86753 -0.47462 -0.148751 0.0342379 -0.382463 -0.923336 0.0871995 -0.324523 -0.94185 0.199334 -0.323658 -0.924939 0.352793 -0.323651 -0.877945 0.49609 -0.323648 -0.805696 0.625111 -0.323653 -0.710271 0.736156 -0.323658 -0.594407 0.826025 -0.323655 -0.461443 0.89213 -0.323649 -0.315207 0.93257 -0.323647 -0.159894 0.0375766 -0.233275 -0.971685 0.089581 -0.164486 -0.982303 0.207817 -0.164019 -0.964319 0.367817 -0.164013 -0.91532 0.517206 -0.164016 -0.839998 0.651726 -0.164013 -0.740508 0.7675 -0.164011 -0.619713 0.861196 -0.164006 -0.481085 0.930104 -0.16402 -0.328639 0.972266 -0.164021 -0.166719 0.0392614 -0.0783955 -0.996149 0.0784427 -0.0184184 -0.996749 0.233355 -0.0268823 -0.97202 0.382421 -0.0373124 -0.923234 0.521961 -0.0453316 -0.851764 0.523665 -0.0494527 -0.850488 0.649078 -0.033628 -0.759978 0.659475 -0.0602041 -0.749312 0.760189 -0.0238596 -0.649264 0.852535 -0.0160357 -0.522425 0.923829 -0.0101718 -0.382671 0.972352 -0.00624036 -0.233435 0.982168 -0.0835561 -0.168415 0.210207 -0.0662616 -0.975409 0.372264 -0.0568243 -0.926386 0.776182 -0.0689833 -0.626724 0.870506 -0.0757971 -0.486286 0.939801 -0.0806648 -0.332065 0.242763 -0.969192 -0.0416274 0.232241 -0.969191 -0.0820505 0.215035 -0.969191 -0.120119 0.191638 -0.969191 -0.154737 0.16273 -0.969191 -0.1849 0.129135 -0.969192 -0.209742 0.0918332 -0.969192 -0.228548 0.0518997 -0.969191 -0.24078 -0.0144546 -0.97227 -0.233415 0.046722 -0.969284 -0.241467 0.39712 -0.915237 -0.0680944 0.379896 -0.915238 -0.134232 0.351748 -0.915239 -0.196498 0.313481 -0.915237 -0.253121 0.266188 -0.915239 -0.302457 0.211254 -0.915239 -0.343089 0.150231 -0.915238 -0.373858 0.084885 -0.915237 -0.393873 0.0848785 -0.915238 -0.393872 0.115511 -0.836265 -0.536021 0.996899 -0.0784605 -0.00596967 0.982937 -0.164596 -0.0821164 0.20782 -0.164015 -0.964318 0.199338 -0.323654 -0.924939 0.185438 -0.474619 -0.860435 0.16649 -0.612823 -0.772482 0.142969 -0.734505 -0.663372 0.0518914 -0.969192 -0.24078 0.367812 -0.164017 -0.915321 0.352785 -0.323658 -0.877945 0.328178 -0.474622 -0.816721 0.294637 -0.612827 -0.733234 0.253018 -0.734503 -0.629672 0.150227 -0.915239 -0.373858 0.204446 -0.836262 -0.50879 0.0918394 -0.969191 -0.228547 0.51721 -0.164012 -0.839996 0.496086 -0.323652 -0.805697 0.461498 -0.474617 -0.749505 0.414308 -0.61283 -0.672895 0.355804 -0.734501 -0.57785 0.21125 -0.915239 -0.343089 0.287495 -0.836263 -0.466917 0.129145 -0.969191 -0.209739 0.651724 -0.164016 -0.74051 0.625118 -0.323647 -0.710268 0.581519 -0.474625 -0.660732 0.522092 -0.612819 -0.593189 0.448337 -0.734509 -0.509402 0.2662 -0.915237 -0.302453 0.362275 -0.836261 -0.411613 0.162728 -0.969191 -0.184901 0.767499 -0.164013 -0.619714 0.73616 -0.323654 -0.594404 0.68482 -0.474623 -0.55295 0.614809 -0.612829 -0.496438 0.527971 -0.734506 -0.42632 0.313476 -0.915238 -0.253124 0.426614 -0.836262 -0.34448 0.191637 -0.969191 -0.154737 0.861193 -0.164011 -0.481089 0.826022 -0.323659 -0.461446 0.768412 -0.474624 -0.429273 0.689883 -0.612819 -0.385376 0.592431 -0.734506 -0.330949 0.351751 -0.915238 -0.196496 0.478704 -0.836261 -0.267414 0.215031 -0.969191 -0.120123 0.930111 -0.164006 -0.328627 0.892125 -0.323655 -0.315214 0.829915 -0.474616 -0.293226 0.745066 -0.612836 -0.263266 0.639842 -0.734501 -0.226077 0.3799 -0.915237 -0.134227 0.517008 -0.836262 -0.182671 0.232236 -0.969192 -0.0820577 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0.000411846 -0.078456 -0.996918 -0.000658331 -0.105931 -0.994373 0 -0.272463 -0.962166 0 -0.431153 -0.902279 0 -0.577432 -0.816439 0 -0.70711 -0.707104 0 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0 -0.962164 -0.272469 0 -0.902282 -0.431146 0 -0.816437 -0.577434 0 -0.70711 -0.707104 0 -0.577432 -0.816439 0 -0.431153 -0.902279 0 -0.272463 -0.962166 0 -0.105931 -0.994374 -0.000324218 -0.0487508 -0.998811 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.105931 -0.994374 0 -0.272463 -0.962166 0 -0.431153 -0.902279 0 -0.577432 -0.816439 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0.000597161 -0.233439 -0.972371 -0.000820796 -0.272463 -0.962166 0 -0.431153 -0.902279 0 -0.577432 -0.816439 0 -0.70711 -0.707104 0 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000735962 -0.382688 -0.923877 -0.000936536 -0.431152 -0.902279 0 -0.577432 -0.816439 0 -0.70711 -0.707104 0 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000828606 -0.522498 -0.85264 -0.00100611 -0.577432 -0.816438 0 -0.70711 -0.707104 0 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000874936 -0.649445 -0.760408 -0.00102949 -0.707109 -0.707104 0 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.999944 -0.0106181 0.000874668 -0.760405 -0.649449 -0.00100642 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000828574 -0.852644 -0.522492 -0.000936535 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000735657 -0.923876 -0.382691 -0.000821035 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000597162 -0.972371 -0.233439 -0.000658331 -0.994373 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000411616 -0.996917 -0.0784619 -0.000233408 -0.998794 -0.0490951 0.00102111 -0.994373 -0.105931 -7.10303e-05 -0.989177 -0.146727 7.86155e-05 -0.970034 -0.242968 0.000992058 -0.962164 -0.272469 -0.00030734 -0.94151 -0.336985 0.000601694 -0.904104 -0.427313 -0.000345316 -0.857636 -0.514257 0.000226575 -0.803289 -0.595589 -0.000172195 -0.740873 -0.671645 0.00112351 -0.707109 -0.707104 -0.000179319 -0.67145 -0.74105 0.000234557 -0.595864 -0.803086 -0.000386242 -0.513812 -0.857903 0.000912404 -0.428295 -0.903639 0.000761071 -0.431152 -0.902279 -0.000370411 -0.336562 -0.941661 0.00122892 -0.272463 -0.962166 0.00010185 -0.243083 -0.970006 -9.18887e-05 -0.146625 -0.989192 0.00137373 -0.105931 -0.994373 0.000764696 -0.902282 -0.431146 0.000975876 -0.816437 -0.577434 0.00106411 -0.577432 -0.816438 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.994374 -0.105931 0 -0.994374 -0.105931 0 -0.994374 -0.105931 0 -0.994374 -0.105931 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0.078456 0.000430563 -0.996917 0.105931 -0.000687532 -0.994373 0.233439 0.00062417 -0.972371 0.272466 -0.000857391 -0.962165 0.431148 2.5084e-07 -0.902281 0.577432 0 -0.816439 0.70711 0 -0.707104 0.816437 1.60531e-07 -0.577434 0.902282 0 -0.431146 0.962165 7.57483e-08 -0.272469 0.994374 2.94495e-08 -0.105931 0.999944 0 -0.0106181 0.996917 -0.000613045 -0.0784619 0.994373 0.000980492 -0.105931 0.972371 -0.000889389 -0.233439 0.962164 0.00122282 -0.272469 0.902282 0 -0.431146 0.816437 1.60531e-07 -0.577434 0.70711 1.9658e-07 -0.707104 0.577432 0 -0.816439 0.431148 2.5084e-07 -0.902281 0.272466 0 -0.962166 0.105931 0 -0.994374 0.0105742 2.77991e-07 -0.999944 0.105931 2.76443e-07 -0.994374 0.105931 2.76678e-07 -0.994374 0.272466 2.67489e-07 -0.962166 0.431148 0 -0.902281 0.577432 0 -0.816439 0.70711 1.9658e-07 -0.707104 0.816437 0 -0.577434 0.902282 1.19861e-07 -0.431146 0.962165 7.57483e-08 -0.272469 0.994374 0 -0.105931 0.962164 0 -0.272469 0.962164 0 -0.272469 0.382688 0.000768959 -0.923877 0.431148 -0.000978303 -0.902281 0.577432 2.26976e-07 -0.816439 0.70711 0 -0.707104 0.816437 0 -0.577434 0.902282 1.19861e-07 -0.431146 0.962164 0 -0.272469 0.994374 2.94495e-08 -0.105931 0.999944 2.95168e-09 -0.0106173 0.994374 2.94495e-08 -0.105931 0.522498 0.000865944 -0.85264 0.577432 -0.00105093 -0.816438 0.70711 1.9658e-07 -0.707104 0.816437 0 -0.577434 0.902282 0 -0.431146 0.962164 7.57483e-08 -0.272469 0.994374 0 -0.105931 0.999944 2.95168e-09 -0.0106173 0.994374 2.94495e-08 -0.105931 0.649445 0.000914323 -0.760408 0.707109 -0.00107539 -0.707104 0.816437 1.60531e-07 -0.577434 0.902282 0 -0.431146 0.962164 0 -0.272469 0.994374 2.94495e-08 -0.105931 0.999944 0 -0.010619 0.994374 0 -0.105931 0.760405 0.000914012 -0.649449 0.816437 -0.00105132 -0.577434 0.902282 1.19861e-07 -0.431146 0.962164 0 -0.272469 0.994374 0 -0.105931 0.999944 2.95216e-09 -0.010619 0.999944 1.52134e-09 -0.0106179 0.852644 0.00086582 -0.522492 0.902282 -0.000978349 -0.431146 0.962164 7.57483e-08 -0.272469 0.994374 0 -0.105931 0.999944 0 -0.0106172 0.994374 0 -0.105931 0.923876 0.000768703 -0.382691 0.962164 -0.000857721 -0.272469 0.994374 2.94495e-08 -0.105931 0.999944 0 -0.0106172 0.994374 0 -0.105931 0.972371 0.000623964 -0.233439 0.994373 -0.000687779 -0.105931 0.999944 2.9519e-09 -0.0106181 0.994374 2.94495e-08 -0.105931 0.996917 0.000430068 -0.0784619 0.923876 -0.00109566 -0.382691 0.902281 0.00139484 -0.431146 0.816437 0 -0.577434 0.70711 1.9658e-07 -0.707104 0.577432 2.26976e-07 -0.816439 0.431148 0 -0.902281 0.272466 2.67489e-07 -0.962166 0.105931 0 -0.994374 0.0104865 0 -0.999945 0.105931 0 -0.994374 0.852644 -0.00123405 -0.522491 0.816436 0.00149892 -0.577434 0.70711 0 -0.707104 0.577432 2.26976e-07 -0.816439 0.431148 2.5084e-07 -0.902281 0.272466 0 -0.962166 0.105931 2.76443e-07 -0.994374 0.0104865 0 -0.999945 0.105931 0 -0.994374 0.760404 -0.00130269 -0.649449 0.707109 0.00153329 -0.707103 0.577432 0 -0.816439 0.431148 2.5084e-07 -0.902281 0.272466 2.67489e-07 -0.962166 0.105931 0 -0.994374 0.010662 2.77991e-07 -0.999943 0.105931 2.76443e-07 -0.994374 0.649445 -0.0013031 -0.760408 0.577431 0.00149846 -0.816438 0.431148 0 -0.902281 0.272466 2.67489e-07 -0.962165 0.105931 2.76443e-07 -0.994374 0.010662 0 -0.999943 0.0105557 1.43272e-07 -0.999944 0.522497 -0.00123409 -0.85264 0.431148 0.00139496 -0.90228 0.272466 0 -0.962166 0.105931 2.76443e-07 -0.994374 0.0104996 2.77991e-07 -0.999945 0.105931 2.76443e-07 -0.994374 0.382688 -0.00109581 -0.923877 0.272465 0.00122262 -0.962165 0.105931 0 -0.994374 0.0104996 2.77991e-07 -0.999945 0.105931 2.76443e-07 -0.994374 0.233439 -0.000889389 -0.972371 0.105931 0.000980492 -0.994373 0.0105742 0 -0.999944 0.105931 0 -0.994374 0.078456 -0.000613387 -0.996917 0.105931 0 -0.994374 0.272466 0 -0.962166 0.272466 0 -0.962166 0.272466 2.67489e-07 -0.962166 0.272466 0 -0.962166 0.272466 2.67299e-07 -0.962166 0.272466 0 -0.962166 0.272466 2.67299e-07 -0.962166 0.272466 2.67299e-07 -0.962165 0.272466 0 -0.962166 0.431148 2.5084e-07 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 2.50101e-07 -0.902281 0.431148 0 -0.902281 0.431148 2.50101e-07 -0.902281 0.431148 2.50101e-07 -0.902281 0.431148 0 -0.902281 0.577432 2.26976e-07 -0.816439 0.577432 0 -0.816439 0.577432 2.26655e-07 -0.816439 0.577432 2.26976e-07 -0.816439 0.577432 0 -0.816439 0.577432 0 -0.816439 0.577432 2.26655e-07 -0.816439 0.577432 2.26655e-07 -0.816439 0.577432 0 -0.816439 0.70711 1.9658e-07 -0.707104 0.70711 0 -0.707104 0.70711 0 -0.707104 0.70711 0 -0.707104 0.70711 0 -0.707104 0.70711 1.9658e-07 -0.707104 0.70711 1.96955e-07 -0.707104 0.70711 1.96955e-07 -0.707104 0.70711 0 -0.707104 0.816437 1.60531e-07 -0.577434 0.816437 0 -0.577434 0.816437 0 -0.577434 0.816437 1.60531e-07 -0.577434 0.816437 1.61004e-07 -0.577434 0.816437 1.60531e-07 -0.577434 0.816437 0 -0.577434 0.816437 1.61004e-07 -0.577434 0.816437 0 -0.577434 0.902282 1.19861e-07 -0.431146 0.902282 0 -0.431146 0.902282 0 -0.431146 0.902282 1.19861e-07 -0.431146 0.902282 0 -0.431146 0.902282 1.18799e-07 -0.431146 0.902282 1.19862e-07 -0.431146 0.902282 1.19861e-07 -0.431146 0.902282 0 -0.431146 0.962164 7.57483e-08 -0.272469 0.962164 0 -0.272469 0.962164 0 -0.272469 0.962164 7.57483e-08 -0.272469 0.962164 0 -0.272469 0.962165 7.57483e-08 -0.272469 0.962165 7.57483e-08 -0.272469 0.994374 2.94495e-08 -0.105931 0.994374 0 -0.105931 0.996899 -0.0784605 0.00596967 0.972267 -0.164019 0.16672 0.892128 -0.323647 0.315216 0.768416 -0.474614 0.429276 0.614817 -0.61282 0.49644 0.448335 -0.734507 0.509407 0.287497 -0.83626 0.46692 0.150231 -0.915239 0.373856 0.0518917 -0.969191 0.240781 0.972353 -0.00623935 0.233434 0.939802 -0.0806635 0.332064 0.923829 -0.0101675 0.382671 0.870507 -0.0757917 0.486286 0.767499 -0.164011 0.619714 0.625119 -0.323657 0.710262 0.461492 -0.474625 0.749504 0.294637 -0.612828 0.733233 0.142969 -0.734505 0.663372 0.0639156 -0.837155 0.543218 0.00619757 -0.852628 0.522482 0.055557 -0.915763 0.397858 -0.00374137 -0.92387 0.382688 0.046722 -0.969284 0.241467 0.0518917 -0.969191 0.240781 0.0848849 -0.915237 0.393872 0.150224 -0.915238 0.373863 0.20444 -0.836262 0.508792 0.253018 -0.734503 0.629672 0.253021 -0.734505 0.629668 0.852535 -0.0160298 0.522425 0.77618 -0.0689777 0.626727 0.651725 -0.164009 0.74051 0.496079 -0.323648 0.805703 0.32818 -0.474614 0.816725 0.166484 -0.612824 0.772482 0.0714611 -0.735644 0.673588 0.0151476 -0.760315 0.649378 0.760186 -0.0238533 0.649267 0.659478 -0.0601946 0.74931 0.517206 -0.164016 0.839998 0.352795 -0.323656 0.877942 0.185443 -0.474626 0.860431 0.0779959 -0.614054 0.7854 0.0228961 -0.649278 0.760206 0.649082 -0.0336189 0.759976 0.523665 -0.0494431 0.850488 0.367812 -0.164018 0.915321 0.199332 -0.323653 0.924941 0.0832935 -0.475749 0.875629 0.0293078 -0.522273 0.852274 0.521961 -0.045322 0.851765 0.382416 -0.0373017 0.923237 0.233361 -0.0268835 0.972019 0.0784427 -0.0184071 0.996749 0.039254 -0.0783955 0.996149 0.0895738 -0.164486 0.982304 0.0375649 -0.23328 0.971684 0.0871878 -0.324528 0.941849 0.199339 -0.323659 0.924937 0.207826 -0.164013 0.964317 0.367808 -0.164012 0.915324 0.372255 -0.0568222 0.92639 0.210217 -0.0662545 0.975407 0.207827 -0.164015 0.964317 0.0342354 -0.382458 0.923338 -0.0144645 -0.97227 0.233414 0.037678 -0.996209 0.0784066 0.0783547 -0.995555 0.052249 0.081693 -0.996559 0.0140082 0.078151 -0.996559 0.0276116 0.0723608 -0.996559 0.0404208 0.0644874 -0.996559 0.0520699 0.0547595 -0.996559 0.0622197 0.0434553 -0.996559 0.0705803 0.0309052 -0.996559 0.0769078 0.017462 -0.996559 0.0810246 0.382564 -0.923566 -0.0259042 0.397707 -0.915411 0.06207 0.5224 -0.852501 -0.0184413 0.543117 -0.836986 0.0669241 0.540443 -0.836264 0.0926563 0.517006 -0.836262 0.182675 0.478703 -0.83626 0.26742 0.426617 -0.836262 0.344476 0.362271 -0.83626 0.411619 0.362274 -0.836262 0.411611 0.266199 -0.915238 0.302452 0.211249 -0.915238 0.343093 0.211251 -0.915239 0.34309 0.649407 -0.76035 -0.0117274 0.673589 -0.735658 0.0713144 0.668843 -0.734505 0.114683 0.639839 -0.734501 0.226085 0.592435 -0.734507 0.33094 0.527972 -0.734507 0.426316 0.527972 -0.734507 0.426316 0.760389 -0.64944 -0.00602255 0.785572 -0.614184 0.0751997 0.778852 -0.612826 0.133544 0.745072 -0.612835 0.263251 0.68987 -0.61282 0.385396 0.689872 -0.612836 0.385369 0.852645 -0.522489 -0.00129023 0.875982 -0.475938 0.0783487 0.867532 -0.474619 0.148743 0.829912 -0.474615 0.293234 0.829912 -0.47462 0.293227 0.923873 -0.382692 0.00232204 0.942364 -0.324711 0.0806991 0.932567 -0.323647 0.159913 0.932564 -0.323664 0.159893 0.97236 -0.233437 0.0047451 0.982937 -0.164596 0.0821164 0.867534 -0.474614 0.148749 0.778853 -0.612824 0.133546 0.668839 -0.734511 0.114669 0.540448 -0.836258 0.0926757 0.397119 -0.915237 0.0680977 0.242764 -0.969192 0.0416269 0.242764 -0.969192 0.0416274 0.241334 -0.968759 0.0571343 0.233304 -0.971809 -0.0339982 0.397118 -0.915238 0.0680942 0.982169 -0.0835543 0.168415 0.996908 -0.0042679 0.0784616 0.930108 -0.164022 0.328626 0.826025 -0.323655 0.461444 0.684822 -0.474623 0.552948 0.522084 -0.612828 0.593186 0.355803 -0.734511 0.577839 0.287501 -0.836262 0.466914 0.892128 -0.323655 0.315207 0.745074 -0.612825 0.263269 0.639838 -0.734505 0.226076 0.517006 -0.836264 0.18267 0.379899 -0.915238 0.134227 0.232237 -0.969192 0.0820567 0.232238 -0.969192 0.0820523 0.379899 -0.915237 0.13423 0.972267 -0.164022 0.166717 0.930106 -0.16401 0.328638 0.861196 -0.164012 0.481084 0.736158 -0.323662 0.594402 0.581521 -0.474621 0.660734 0.414315 -0.612822 0.672898 0.355795 -0.734504 0.577853 0.861196 -0.164011 0.481085 0.826026 -0.323661 0.461437 0.768418 -0.474623 0.429263 0.592434 -0.734501 0.330954 0.478704 -0.836262 0.267411 0.35175 -0.915239 0.196493 0.215031 -0.969191 0.120126 0.215033 -0.969192 0.120117 0.351749 -0.915237 0.196502 0.767498 -0.164009 0.619716 0.736157 -0.323658 0.594407 0.684821 -0.474621 0.552951 0.61482 -0.612828 0.496425 0.426615 -0.836259 0.344486 0.313476 -0.915238 0.253127 0.191637 -0.969191 0.154738 0.191638 -0.969191 0.154737 0.313478 -0.915239 0.253118 0.651729 -0.164015 0.740505 0.625113 -0.323647 0.710272 0.581523 -0.474625 0.660729 0.52208 -0.612822 0.593196 0.448339 -0.734511 0.509398 0.16273 -0.969191 0.184899 0.266196 -0.915237 0.302458 0.16273 -0.969191 0.184898 0.129136 -0.969191 0.209744 0.129141 -0.969191 0.209739 0.0918408 -0.969191 0.228546 0.0918395 -0.969191 0.228547 0.517206 -0.164017 0.839998 0.496085 -0.323656 0.805696 0.461484 -0.474614 0.749516 0.414319 -0.612827 0.672891 0.352793 -0.323654 0.877944 0.328191 -0.474625 0.816714 0.294632 -0.612824 0.733238 0.204444 -0.836264 0.508789 0.115523 -0.836263 0.536021 0.115524 -0.836263 0.53602 0.0848875 -0.915238 0.39387 0.185432 -0.474615 0.860439 0.166487 -0.612827 0.77248 0.142969 -0.734505 0.663372 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0.000411619 -0.996917 0.0784624 -0.00065828 -0.994373 0.10593 0 -0.962164 0.272469 0 -0.902281 0.431148 0 -0.816437 0.577434 0 -0.70711 0.707104 0 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.0105742 0.999944 0 -0.105936 0.994373 0 -0.272463 0.962166 0 -0.431153 0.902279 0 -0.577428 0.816441 0 -0.70711 0.707104 0 -0.816437 0.577434 0 -0.902281 0.431148 0 -0.962164 0.272469 0 -0.994374 0.10593 -0.000233408 -0.998794 0.0490951 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.994374 0.10593 0 -0.962164 0.272469 0 -0.902281 0.431148 0 -0.816437 0.577434 0 -0.816437 0.577434 0.000597272 -0.972372 0.233438 -0.000820962 -0.962164 0.272469 0 -0.902281 0.431148 0 -0.816437 0.577434 0 -0.70711 0.707104 0 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000735759 -0.923876 0.382691 -0.000936494 -0.902281 0.431148 0 -0.816437 0.577434 0 -0.70711 0.707104 0 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000828575 -0.852644 0.522492 -0.00100642 -0.816437 0.577434 0 -0.70711 0.707104 0 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000874746 -0.760402 0.649452 -0.0010293 -0.707109 0.707104 0 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.0105742 0.999944 0.000875029 -0.649448 0.760405 -0.0010062 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000828379 -0.522498 0.85264 -0.000936776 -0.431152 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000735889 -0.382683 0.92388 -0.000820797 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000596959 -0.233445 0.97237 -0.000658543 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000411846 -0.078456 0.996918 -0.000324139 -0.0487414 0.998811 0.00137425 -0.105936 0.994372 -9.15281e-05 -0.146635 0.989191 0.000102167 -0.243074 0.970008 0.00122956 -0.272463 0.962166 -0.000370004 -0.336571 0.941658 0.000761362 -0.431152 0.902279 0.000912291 -0.428302 0.903635 -0.000386243 -0.513812 0.857903 0.00106402 -0.577428 0.816441 0.000234588 -0.595858 0.80309 -0.000179349 -0.671456 0.741045 -0.000172221 -0.740869 0.67165 0.000226575 -0.803289 0.595589 0.000975876 -0.816437 0.577434 -0.0003454 -0.857638 0.514253 0.000764549 -0.902281 0.431148 0.000601769 -0.9041 0.42732 -0.00030737 -0.941511 0.336982 0.000991985 -0.962164 0.272469 7.84816e-05 -0.970035 0.242966 -7.11067e-05 -0.989177 0.146728 0.0010211 -0.994373 0.10593 0.00112331 -0.707109 0.707104 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.105936 0.994373 0 -0.105936 0.994373 0 -0.105936 0.994373 0 -0.105936 0.994373 0 -0.105936 0.994373 0 -0.0105742 0.999944 0.996917 -0.0784619 0.000392867 0.994373 -0.105931 -0.000628461 0.972371 -0.233439 0.000569899 0.962165 -0.272468 -0.000783802 0.902281 -0.431148 0 0.816439 -0.577432 0 0.707107 -0.707107 0 0.577432 -0.816439 0 0.522489 -0.852646 -0.000791427 0.577432 -0.816439 0.000960172 0.649452 -0.760402 -0.000835537 0.707106 -0.707106 0.00098207 0.816439 -0.577432 0 0.902281 -0.431148 0 0.962165 -0.272468 0 0.994374 -0.105931 0 0.962165 -0.272468 0 0.962165 -0.272468 0 0.923875 -0.382693 0.00070215 0.902281 -0.431148 -0.000894036 0.816439 -0.577432 0 0.707107 -0.707107 0 0.577432 -0.816439 0 0.431144 -0.902283 0 0.382693 -0.923875 -0.000702619 0.431144 -0.902283 0.000893435 0.852646 -0.522489 0.00079073 0.816439 -0.577432 -0.000960879 0.707107 -0.707107 0 0.577432 -0.816439 0 0.431144 -0.902283 0 0.272471 -0.962164 0 0.233439 -0.972371 -0.000570512 0.272471 -0.962164 0.000783276 0.760402 -0.649452 0.00083482 0.707106 -0.707106 -0.000982787 0.577432 -0.816439 0 0.431144 -0.902283 0 0.272471 -0.962164 0 0.105931 -0.994374 0 0.0784619 -0.996917 -0.000393413 0.105931 -0.994373 0.000627902 0.649452 -0.760402 0.000834764 0.577432 -0.816439 -0.000961002 0.431144 -0.902283 0 0.272471 -0.962164 0 0.105931 -0.994374 0 0.0105304 -0.999945 0 0.105931 -0.994374 0 0.522489 -0.852646 0.000790561 0.431144 -0.902283 -0.000894352 0.272471 -0.962164 0 0.105931 -0.994374 0 0.0105304 -0.999945 0 0.105931 -0.994374 0 0.382693 -0.923875 0.00070168 0.272471 -0.962164 -0.000784254 0.105931 -0.994374 0 0.0107058 -0.999943 0 0.0106181 -0.999944 -1.69322e-07 0.233439 -0.972371 0.000569524 0.105931 -0.994373 -0.000628912 0.0107058 -0.999943 0 0.105931 -0.994374 0 0.0784619 -0.996917 0.0003924 0.760402 -0.649452 -0.00083548 0.816439 -0.577432 0.000960293 0.902281 -0.431148 0 0.962165 -0.272468 0 0.994374 -0.105931 0 0.999944 -0.010619 0 0.994374 -0.105931 0 0.852646 -0.522489 -0.00079126 0.902281 -0.431148 0.000893598 0.962165 -0.272468 0 0.994374 -0.105931 0 0.999944 -0.010619 0 0.994374 -0.105931 0 0.923875 -0.382693 -0.000702539 0.962165 -0.272468 0.000783526 0.994374 -0.105931 0 0.999944 -0.0106172 0 0.999944 -0.0106181 -1.79798e-09 0.972371 -0.233439 -0.000570136 0.994373 -0.105931 0.000628353 0.999944 -0.0106172 0 0.994374 -0.105931 0 0.996917 -0.0784619 -0.000392946 0.994374 -0.105931 0 0.962165 -0.272468 0 0.962165 -0.272468 0 0.962165 -0.272468 0 0.902281 -0.431148 0 0.902281 -0.431148 0 0.902281 -0.431148 0 0.902281 -0.431148 0 0.902281 -0.431148 0 0.816439 -0.577432 0 0.816439 -0.577432 0 0.816439 -0.577432 0 0.816439 -0.577432 0 0.816439 -0.577432 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.431144 -0.902283 0 0.431144 -0.902283 0 0.431144 -0.902283 0 0.431144 -0.902283 0 0.272471 -0.962164 0 0.272471 -0.962164 0 0.272471 -0.962164 0 0.272471 -0.962164 0 0.105931 -0.994374 0 0.996917 0.000430028 0.0784624 0.994373 -0.000687785 0.10593 0.972371 0.000623951 0.233438 0.962164 -0.000857798 0.272469 0.902281 -1.19862e-07 0.431148 0.816437 0 0.577434 0.70711 0 0.707104 0.577428 -2.26976e-07 0.816441 0.431148 0 0.902281 0.272466 -2.67489e-07 0.962166 0.105936 -2.76442e-07 0.994373 0.0105742 0 0.999944 0.078456 -0.000613387 0.996917 0.105936 0.000980808 0.994372 0.233445 -0.000889086 0.97237 0.272465 0.00122262 0.962165 0.431148 0 0.902281 0.577428 -2.26976e-07 0.816441 0.70711 -1.9658e-07 0.707104 0.816437 0 0.577434 0.902281 -1.19862e-07 0.431148 0.962164 0 0.272469 0.994374 0 0.10593 0.999944 -2.9519e-09 0.0106181 0.994374 -2.94493e-08 0.10593 0.994374 -2.93089e-08 0.10593 0.962164 -7.57483e-08 0.272469 0.902281 0 0.431148 0.816437 0 0.577434 0.70711 -1.9658e-07 0.707104 0.577428 0 0.816441 0.431148 -2.5084e-07 0.902281 0.272466 -2.67489e-07 0.962165 0.105936 0 0.994373 0.272466 0 0.962166 0.272466 0 0.962166 0.923876 0.000768597 0.382691 0.902281 -0.000978547 0.431148 0.816437 -1.60531e-07 0.577434 0.70711 0 0.707104 0.577428 0 0.816441 0.431148 -2.5084e-07 0.902281 0.272466 0 0.962166 0.105936 -2.76442e-07 0.994373 0.0106489 -2.77991e-07 0.999943 0.105936 -2.76442e-07 0.994373 0.852644 0.00086553 0.522492 0.816437 -0.00105164 0.577434 0.70711 -1.9658e-07 0.707104 0.577428 0 0.816441 0.431148 0 0.902281 0.272466 -2.67489e-07 0.962166 0.105936 0 0.994373 0.0106489 -2.77991e-07 0.999943 0.105936 -2.76442e-07 0.994373 0.760402 0.000913734 0.649452 0.707109 -0.00107559 0.707104 0.577428 -2.26976e-07 0.816441 0.431148 0 0.902281 0.272466 0 0.962166 0.105936 -2.76442e-07 0.994373 0.0104865 0 0.999945 0.105936 0 0.994373 0.649448 0.000913998 0.760405 0.577428 -0.00105148 0.816441 0.431148 -2.5084e-07 0.902281 0.272466 0 0.962165 0.105936 0 0.994373 0.0104865 -2.77991e-07 0.999945 0.0105928 -1.43272e-07 0.999944 0.522498 0.000865234 0.85264 0.431148 -0.000979056 0.902281 0.272466 -2.67489e-07 0.962166 0.105936 0 0.994373 0.010662 0 0.999943 0.105936 0 0.994373 0.382683 0.00076837 0.92388 0.272466 -0.000857926 0.962165 0.105936 -2.76442e-07 0.994373 0.010662 0 0.999943 0.105936 0 0.994373 0.233445 0.000623418 0.97237 0.105936 -0.000688306 0.994373 0.0105742 -2.77991e-07 0.999944 0.105936 -2.76442e-07 0.994373 0.078456 0.000430009 0.996918 0.382683 -0.0010957 0.923879 0.431148 0.00139532 0.90228 0.577428 0 0.816441 0.70711 -1.9658e-07 0.707104 0.816437 -1.60531e-07 0.577434 0.902281 0 0.431148 0.962164 -7.57483e-08 0.272469 0.994374 0 0.10593 0.999944 0 0.010619 0.994374 0 0.10593 0.522497 -0.00123376 0.85264 0.577428 0.0014986 0.816441 0.70711 0 0.707104 0.816437 -1.60531e-07 0.577434 0.902281 -1.19862e-07 0.431148 0.962164 0 0.272469 0.994374 -2.94493e-08 0.10593 0.999944 0 0.010619 0.994374 0 0.10593 0.649448 -0.00130323 0.760405 0.707109 0.00153301 0.707103 0.816437 0 0.577434 0.902281 -1.19862e-07 0.431148 0.962165 -7.57483e-08 0.272469 0.994374 0 0.10593 0.999944 -2.95164e-09 0.0106172 0.994374 -2.94493e-08 0.10593 0.760402 -0.00130281 0.649452 0.816436 0.00149892 0.577434 0.902281 0 0.431148 0.962165 -7.57483e-08 0.272469 0.994374 -2.94494e-08 0.10593 0.999944 0 0.0106172 0.999944 -1.52139e-09 0.0106183 0.852644 -0.00123405 0.522491 0.90228 0.00139478 0.431148 0.962164 0 0.272469 0.994374 -2.94494e-08 0.10593 0.999944 -2.95212e-09 0.0106189 0.994374 -2.94494e-08 0.10593 0.923876 -0.00109581 0.382691 0.962164 0.00122271 0.272469 0.994374 0 0.10593 0.999944 -2.95212e-09 0.0106189 0.994374 -2.94494e-08 0.10593 0.972371 -0.000889554 0.233438 0.994373 0.000980417 0.10593 0.999944 0 0.0106181 0.994374 0 0.10593 0.996917 -0.000613049 0.0784623 0.994374 0 0.10593 0.962165 0 0.272469 0.962165 0 0.272469 0.962164 -7.57483e-08 0.272469 0.962164 0 0.272469 0.962164 -7.50308e-08 0.272469 0.962164 0 0.272469 0.962165 -7.50309e-08 0.272469 0.962165 -7.50309e-08 0.272469 0.962164 0 0.272469 0.902281 -1.19862e-07 0.431148 0.902281 0 0.431148 0.902281 0 0.431148 0.902281 0 0.431148 0.902281 -1.20363e-07 0.431148 0.902281 0 0.431148 0.902281 -1.20363e-07 0.431148 0.902281 -1.20363e-07 0.431148 0.902281 0 0.431148 0.816437 -1.60531e-07 0.577434 0.816437 0 0.577434 0.816437 -1.61004e-07 0.577434 0.816437 -1.60531e-07 0.577434 0.816437 0 0.577434 0.816437 0 0.577434 0.816437 -1.61004e-07 0.577434 0.816437 -1.61004e-07 0.577434 0.816437 0 0.577434 0.70711 -1.9658e-07 0.707104 0.70711 0 0.707104 0.70711 0 0.707104 0.70711 0 0.707104 0.70711 0 0.707104 0.70711 -1.9658e-07 0.707104 0.70711 -1.96955e-07 0.707104 0.70711 -1.96955e-07 0.707104 0.70711 0 0.707104 0.577428 -2.26976e-07 0.816441 0.577428 0 0.816441 0.577428 0 0.816441 0.577428 -2.26976e-07 0.816441 0.577428 -2.26656e-07 0.816441 0.577428 -2.26976e-07 0.816441 0.577428 0 0.816441 0.577428 -2.26656e-07 0.816441 0.577428 0 0.816441 0.431148 -2.5084e-07 0.902281 0.431148 0 0.902281 0.431148 0 0.902281 0.431148 -2.5084e-07 0.902281 0.431148 0 0.902281 0.431148 -2.50101e-07 0.902281 0.431148 -2.5084e-07 0.902281 0.431148 -2.5084e-07 0.902281 0.431148 0 0.902281 0.272466 -2.67489e-07 0.962166 0.272466 0 0.962165 0.272466 0 0.962166 0.272466 -2.67489e-07 0.962166 0.272466 0 0.962166 0.272466 -2.67489e-07 0.962166 0.272466 -2.67489e-07 0.962165 0.105936 -2.76442e-07 0.994373 0.105936 0 0.994373 -0.999944 -1.42972e-09 0.0106181 -0.999944 -2.85981e-09 0.0106195 -0.999944 0 0.0106172 -0.999944 -2.85968e-09 0.010619 -0.999944 0 0.0106176 -0.999944 -2.85956e-09 0.0106186 -0.998795 -0.000236105 0.0490764 -0.994373 0.00103396 0.10593 -0.962162 -7.33782e-08 0.272479 -0.902281 0 0.431148 -0.816442 -1.555e-07 0.577427 -0.70711 0 0.707104 -0.577428 -2.19866e-07 0.816441 -0.431153 0 0.902279 -0.272466 0 0.962166 -0.105935 -2.67783e-07 0.994373 -0.0105304 -2.69283e-07 0.999945 -0.0784569 0.000416545 0.996917 -0.233442 0.000604113 0.972371 -0.382678 0.000744446 0.923881 -0.522508 0.000838062 0.852634 -0.649443 0.000885556 0.76041 -0.760402 0.000885111 0.649452 -0.852644 0.000837958 0.522492 -0.923876 0.000744023 0.382691 -0.972369 0.000603995 0.233449 -0.996918 0.000416485 0.0784506 -0.999944 -2.85931e-09 0.0106176 -0.994374 -2.85268e-08 0.10593 -0.962162 -7.33782e-08 0.27248 -0.902281 -1.16107e-07 0.431148 -0.816442 -1.555e-07 0.577427 -0.70711 -1.90422e-07 0.707104 -0.577428 -2.19866e-07 0.816441 -0.431153 -2.42982e-07 0.902279 -0.272466 -2.59109e-07 0.962166 -0.105935 -2.67783e-07 0.994373 -0.105935 -2.67629e-07 0.994373 -0.105935 0 0.994373 -0.0106181 0 0.999944 -0.989174 -7.19391e-05 0.146747 -0.970035 7.94326e-05 0.242966 -0.941505 -0.000310976 0.337 -0.904101 0.000609151 0.427318 -0.902281 0.000774086 0.431148 -0.857646 -0.00034954 0.514241 -0.816442 0.000988539 0.577427 -0.70711 -1.90422e-07 0.707104 -0.577428 0 0.816441 -0.431153 -2.42982e-07 0.902279 -0.272466 0 0.962165 -0.105935 -2.67783e-07 0.994373 -0.0107058 0 0.999943 -0.0105304 -2.69283e-07 0.999945 -0.105935 -2.67629e-07 0.994373 -0.105935 0 0.994373 -0.272466 0 0.962166 -0.272466 0 0.962166 -0.272466 -2.58926e-07 0.962166 -0.272466 -2.59109e-07 0.962166 -0.962161 0.00100441 0.272479 -0.902281 -1.16107e-07 0.431148 -0.816442 0 0.577427 -0.70711 -1.90422e-07 0.707104 -0.577428 0 0.816441 -0.431153 -2.42982e-07 0.902279 -0.431153 0 0.902279 -0.80329 0.000229503 0.595587 -0.740868 -0.000174447 0.671651 -0.671444 -0.000182106 0.741055 -0.595859 0.00023738 0.803089 -0.577428 0.00107837 0.816441 -0.513825 -0.000391611 0.857895 -0.428306 0.000925487 0.903633 -0.33656 -0.00037556 0.941662 -0.24308 0.000103614 0.970006 -0.146636 -9.30655e-05 0.989191 -0.105935 0.00139669 0.994372 -0.0487351 -0.000329578 0.998812 -0.0106054 -2.69283e-07 0.999944 -0.105935 -2.67783e-07 0.994373 -0.272466 -2.59109e-07 0.962166 -0.431153 -2.42982e-07 0.902279 -0.577428 -2.19866e-07 0.816441 -0.70711 -1.90786e-07 0.707104 -0.707109 0.00113809 0.707104 -0.431152 0.000772529 0.902279 -0.431153 -2.42982e-07 0.902279 -0.272465 0.00124794 0.962165 -0.272466 -2.59109e-07 0.962166 -0.010571 -1.37966e-07 0.999944 -0.0106932 -2.69283e-07 0.999943 -0.105935 0 0.994373 -0.272466 -2.59109e-07 0.962166 -0.431153 0 0.902279 -0.272466 0 0.962166 -0.272466 0 0.962166 -0.105935 0 0.994373 -0.0106493 -2.69283e-07 0.999943 -0.0104865 0 0.999945 -0.105935 -0.000666642 0.994373 -0.272466 0 0.962166 -0.272466 -2.59109e-07 0.962166 -0.272466 -0.000830905 0.962165 -0.431153 0 0.902279 -0.431153 -2.42982e-07 0.902279 -0.431152 -0.000948538 0.902279 -0.577428 0 0.816441 -0.577428 -2.19866e-07 0.816441 -0.577428 -0.00101824 0.816441 -0.70711 0 0.707104 -0.70711 -1.90422e-07 0.707104 -0.707109 -0.0010419 0.707104 -0.816442 0 0.577427 -0.816442 -1.555e-07 0.577427 -0.816442 -0.00101889 0.577427 -0.902281 0 0.431148 -0.902281 -1.16107e-07 0.431148 -0.902281 -0.00094838 0.431148 -0.962162 0 0.27248 -0.962162 -7.33782e-08 0.27248 -0.962161 -0.000831302 0.272479 -0.994374 0 0.10593 -0.994374 -2.85268e-08 0.10593 -0.994373 -0.000666775 0.10593 -0.999944 0 0.0106186 -0.994374 0 0.10593 -0.994374 -2.85268e-08 0.10593 -0.994374 -2.85268e-08 0.10593 -0.994374 0 0.10593 -0.994374 -2.83908e-08 0.10593 -0.994374 0 0.10593 -0.994374 -2.83908e-08 0.10593 -0.994374 0 0.10593 -0.994374 0 0.10593 -0.994374 -2.83908e-08 0.10593 -0.994374 -2.85268e-08 0.10593 -0.999944 -2.85931e-09 0.0106176 -0.999944 0 0.0106195 -0.994374 0 0.10593 -0.994374 -2.83908e-08 0.10593 -0.994374 0 0.10593 -0.994374 0 0.10593 -0.994374 -2.83908e-08 0.10593 -0.994374 0 0.10593 -0.999944 0 0.0106195 -0.962162 -7.33782e-08 0.272479 -0.962162 0 0.27248 -0.962162 0 0.27248 -0.962162 -7.26803e-08 0.27248 -0.962162 0 0.272479 -0.962162 -7.26802e-08 0.272479 -0.962162 0 0.27248 -0.962162 0 0.272479 -0.962162 -7.26803e-08 0.272479 -0.962162 -7.33782e-08 0.272479 -0.962162 0 0.272479 -0.962162 -7.33782e-08 0.27248 -0.962162 -7.26802e-08 0.272479 -0.962162 0 0.27248 -0.962162 0 0.272479 -0.962162 0 0.27248 -0.902281 0 0.431148 -0.902281 -1.16107e-07 0.431148 -0.902281 -1.16107e-07 0.431148 -0.902281 0 0.431148 -0.902281 -1.15078e-07 0.431148 -0.902281 0 0.431148 -0.902281 0 0.431148 -0.902281 -1.15078e-07 0.431148 -0.902281 -1.16107e-07 0.431148 -0.902281 -1.15078e-07 0.431148 -0.902281 0 0.431148 -0.902281 0 0.431148 -0.902281 0 0.431148 -0.902281 0 0.431148 -0.902281 -1.15078e-07 0.431148 -0.816442 -1.555e-07 0.577427 -0.70711 0 0.707104 -0.577428 -2.19866e-07 0.816441 -0.431153 0 0.902279 -0.431153 -2.42982e-07 0.902279 -0.816442 -1.555e-07 0.577427 -0.816442 0 0.577427 -0.816442 -1.555e-07 0.577427 -0.816442 0 0.577427 -0.816442 0 0.577427 -0.816442 -1.55961e-07 0.577427 -0.816442 0 0.577427 -0.816442 0 0.577427 -0.816442 -1.55961e-07 0.577427 -0.816442 -1.555e-07 0.577427 -0.816442 0 0.577427 -0.816442 -1.555e-07 0.577427 -0.816442 0 0.577427 -0.816442 0 0.577427 -0.70711 0 0.707104 -0.70711 -1.90422e-07 0.707104 -0.70711 0 0.707104 -0.70711 -1.90422e-07 0.707104 -0.70711 -1.90422e-07 0.707104 -0.70711 0 0.707104 -0.70711 0 0.707104 -0.70711 -1.90786e-07 0.707104 -0.70711 -1.90422e-07 0.707104 -0.70711 0 0.707104 -0.70711 0 0.707104 -0.70711 0 0.707104 -0.577428 -2.19866e-07 0.816441 -0.577428 -2.19866e-07 0.816441 -0.577428 0 0.816441 -0.577428 -2.19866e-07 0.816441 -0.577428 0 0.816441 -0.577428 0 0.816441 -0.577428 0 0.816441 -0.577428 -2.19556e-07 0.816441 -0.577428 -2.19866e-07 0.816441 -0.577428 0 0.816441 -0.577428 0 0.816441 -0.577428 0 0.816441 -0.431153 0 0.902279 -0.272466 -2.59109e-07 0.962166 -0.105935 0 0.994373 -0.105935 -2.67783e-07 0.994373 -0.431153 -2.42982e-07 0.902279 -0.431153 0 0.902279 -0.431153 -2.43783e-07 0.902279 -0.431153 0 0.902279 -0.431153 0 0.902279 -0.431153 -2.42982e-07 0.902279 -0.272466 -2.59109e-07 0.962165 -0.272466 -2.58926e-07 0.962165 -0.431153 0 0.902279 -0.272466 0 0.962165 -0.272466 0 0.962166 -0.105935 0 0.994373 -0.105935 0 0.994373 -0.105935 0 0.994373 -0.0107058 0 0.999943 -0.272466 0 0.962166 -0.105935 -2.67783e-07 0.994373 -0.105935 -2.67629e-07 0.994373 -0.105935 -2.67783e-07 0.994373 -0.105935 0 0.994373 -0.0105303 0 0.999945 -0.105935 0 0.994373 0 1.37974e-07 -1 -3.21215e-09 1.38466e-07 -1 -9.09363e-09 1.39411e-07 -1 -1.42706e-08 1.40282e-07 -1 0 1.37777e-07 -1 1.35115e-08 1.32819e-07 -1 8.77937e-09 1.34592e-07 -1 3.17201e-09 1.36736e-07 -1 -3.24456e-08 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 1.86265e-07 -1 0 1.4328e-07 -1 5.41935e-09 1.3586e-07 -1 0 -1 -2.66092e-07 0 -1 -2.66092e-07 1.63211e-08 -1 -1.69331e-07 -4.4883e-08 -1 0 0 -1 -2.66092e-07 0 -1 -2.66092e-07 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.0376819 0.99621 0.0783948 0.0174596 0.99656 0.0810137 0.030901 0.99656 0.0768973 0.0434494 0.99656 0.0705707 0.0547521 0.99656 0.0622113 0.0644787 0.99656 0.0520629 0.072351 0.99656 0.0404153 0.0781404 0.99656 0.0276079 0.0816819 0.99656 0.0140062 0.0783429 0.995556 0.0522555 -0.00369583 0.923874 0.382678 0.0555902 0.915767 0.397844 0.00622025 0.852623 0.52249 0.0639328 0.83715 0.543224 0.115524 0.836258 0.536029 0.204447 0.836257 0.508798 0.287507 0.836255 0.466923 0.362274 0.836256 0.411622 0.426626 0.836254 0.344483 0.478707 0.836257 0.267422 0.517012 0.836258 0.182677 0.540458 0.836253 0.0926588 0.649401 0.760356 -0.0116948 0.543122 0.83698 0.0669493 0.522409 0.852496 -0.0184077 0.397693 0.915413 0.0621203 0.39711 0.915241 0.0680928 0.379889 0.915242 0.134227 0.351737 0.915243 0.196496 0.313474 0.915242 0.253115 0.266187 0.915243 0.302448 0.211244 0.915243 0.343083 0.150229 0.915241 0.373851 0.0848826 0.915242 0.393861 0.0848857 0.915242 0.393862 0.115526 0.836258 0.536029 0.0151691 0.760321 0.64937 0.0714915 0.735645 0.673584 0.142969 0.734505 0.663372 0.253022 0.734503 0.62967 0.355791 0.73451 0.577847 0.448342 0.734507 0.509401 0.527972 0.734507 0.426316 0.59244 0.734501 0.330943 0.639836 0.734504 0.226083 0.668837 0.73451 0.114682 0.760399 0.649429 -0.00599355 0.673584 0.735657 0.0713599 0.0229158 0.649266 0.760216 0.0779879 0.61406 0.785397 0.166487 0.61283 0.772477 0.29463 0.612833 0.733232 0.414319 0.612828 0.67289 0.522074 0.612833 0.59319 0.614821 0.612826 0.496427 0.689857 0.61284 0.385389 0.745075 0.612831 0.263252 0.778849 0.61283 0.133543 0.852639 0.522499 -0.00129554 0.785568 0.61419 0.0751878 0.029304 0.522284 0.852268 0.0832961 0.475753 0.875626 0.18543 0.47463 0.860431 0.328192 0.474619 0.816717 0.46148 0.474629 0.749509 0.581523 0.474625 0.660728 0.684818 0.474628 0.552949 0.76842 0.474619 0.429264 0.829908 0.474624 0.293232 0.867532 0.474619 0.148743 0.923875 0.382688 0.00231873 0.875979 0.475943 0.0783527 0.0342332 0.382454 0.92334 0.0871828 0.324528 0.94185 0.199339 0.323653 0.924939 0.352793 0.323656 0.877943 0.496087 0.323648 0.805699 0.62511 0.323657 0.71027 0.736155 0.323662 0.594406 0.826028 0.323655 0.461438 0.892131 0.323647 0.315208 0.932561 0.323664 0.159912 0.972361 0.233434 0.00473761 0.942365 0.324711 0.0806916 0.0375599 0.233278 0.971685 0.0895689 0.164485 0.982304 0.207827 0.164013 0.964317 0.367808 0.164018 0.915323 0.517206 0.164016 0.839998 0.65173 0.164009 0.740506 0.767498 0.164011 0.619716 0.861196 0.164012 0.481085 0.930104 0.164022 0.328637 0.972267 0.164019 0.166717 0.996899 0.0784614 0.00596703 0.982938 0.164596 0.0821091 0.0392523 0.0783965 0.996149 0.0784427 0.0184067 0.996749 0.233361 0.0268829 0.972019 0.382416 0.0373009 0.923237 0.521961 0.045321 0.851765 0.523665 0.049442 0.850488 0.649082 0.0336182 0.759976 0.659478 0.0601933 0.74931 0.760186 0.0238527 0.649267 0.852535 0.0160295 0.522425 0.923829 0.0101673 0.382671 0.972353 0.00623922 0.233433 0.996908 0.0042678 0.0784616 0.982169 0.0835524 0.168415 0.210217 0.066253 0.975407 0.372255 0.0568209 0.92639 0.77618 0.0689762 0.626727 0.870507 0.07579 0.486286 0.939802 0.0806617 0.332064 0.382555 0.923571 -0.0258355 0.24135 0.968752 0.05719 0.242785 0.969187 0.041631 0.232259 0.969186 0.0820598 0.215054 0.969186 0.120129 0.191655 0.969186 0.15475 0.162743 0.969186 0.184915 0.129147 0.969186 0.20976 0.0918489 0.969186 0.228566 0.0518962 0.969186 0.240802 -0.0144683 0.972267 0.233425 0.0467598 0.969278 0.241483 0.233316 0.971806 -0.0340046 0.207826 0.164015 0.964317 0.199332 0.323659 0.924939 0.185443 0.47462 0.860434 0.166483 0.612833 0.772476 0.142969 0.734505 0.663372 0.0518961 0.969186 0.240802 0.367813 0.164012 0.915322 0.352795 0.323653 0.877943 0.328176 0.474631 0.816717 0.294636 0.612829 0.733232 0.253017 0.734505 0.62967 0.150219 0.915243 0.373851 0.204443 0.836259 0.508797 0.0918473 0.969186 0.228566 0.517206 0.164017 0.839998 0.496077 0.323656 0.805701 0.461494 0.474618 0.749507 0.414311 0.612833 0.672891 0.355807 0.734504 0.577846 0.211246 0.915243 0.343083 0.287499 0.836257 0.466924 0.129153 0.969186 0.209758 0.651724 0.164016 0.74051 0.625121 0.323647 0.710265 0.581518 0.474629 0.66073 0.522084 0.612827 0.593187 0.448332 0.734511 0.509403 0.266194 0.915242 0.302446 0.362282 0.836254 0.41162 0.162745 0.969186 0.184914 0.767499 0.164009 0.619714 0.736159 0.323658 0.594403 0.684822 0.474625 0.552947 0.614808 0.612834 0.496433 0.527972 0.734507 0.426316 0.313466 0.915243 0.253119 0.426618 0.836257 0.344488 0.191654 0.969186 0.154751 0.861196 0.164011 0.481084 0.826023 0.323662 0.461443 0.76841 0.474628 0.429272 0.689879 0.612825 0.385374 0.592428 0.734507 0.330951 0.351745 0.915242 0.196489 0.478714 0.836254 0.267417 0.215048 0.969186 0.120135 0.93011 0.16401 0.328627 0.892125 0.323655 0.315215 0.829912 0.474619 0.293227 0.745062 0.612841 0.263265 0.639842 0.734501 0.226077 0.379891 0.915241 0.134224 0.517016 0.836257 0.182673 0.232257 0.969186 0.0820636 0.972266 0.164022 0.16672 0.93257 0.323646 0.159894 0.867529 0.474624 0.148749 0.778848 0.612831 0.133545 0.668846 0.734504 0.11467 0.397109 0.915242 0.068096 0.540446 0.836259 0.0926754 0.242785 0.969186 0.0416305 0.010589 -1.86254e-07 0.999944 0.105936 -0.000694227 0.994373 0.272466 -0.000865305 0.962165 0.431148 -0.000987478 0.902281 0.577428 -0.00106053 0.816441 0.707109 -0.00108484 0.707104 0.816437 -0.00106069 0.577434 0.902281 -0.000986964 0.431148 0.962164 -0.000865176 0.272469 0.994373 -0.000693701 0.10593 0.999944 -1.9778e-09 0.0106182 0.996917 0.000433727 0.0784624 0.233445 -0.00124472 0.972369 0.272465 0.00171167 0.962164 0.382682 -0.00153398 0.923879 0.431147 0.00195344 0.902279 0.522497 -0.00172726 0.852639 0.649448 -0.00182453 0.760404 0.760401 -0.00182394 0.649451 0.852643 -0.00172767 0.522491 0.923875 -0.00153414 0.38269 0.972371 -0.00124538 0.233438 0.996917 -0.00085827 0.0784623 0.994373 0.00137258 0.10593 0.577427 0.00209804 0.81644 0.707108 0.00214621 0.707102 0.816435 0.00209849 0.577433 0.902279 0.00195269 0.431147 0.962163 0.00171179 0.272469 0.972372 0.000629318 0.233438 0.923876 0.000775208 0.382691 0.852644 0.000872975 0.522492 0.760402 0.000921593 0.649452 0.649448 0.00092186 0.760405 0.522498 0.000872676 0.85264 0.382683 0.000774979 0.92388 0.233445 0.00062878 0.97237 0.078456 0.000433708 0.996918 0.078456 -0.000858743 0.996917 0.105936 0.00137313 0.994372 0.0783429 0.995556 -0.052259 0.081682 0.99656 -0.0140063 0.0781408 0.99656 -0.027607 0.072351 0.99656 -0.0404153 0.0644787 0.99656 -0.0520629 0.0547521 0.99656 -0.0622113 0.0434494 0.99656 -0.0705707 0.0308987 0.99656 -0.0768985 0.0174623 0.99656 -0.0810133 0.0376855 0.99621 -0.0783944 0.382555 0.923571 0.0258355 0.397693 0.915413 -0.0621203 0.522409 0.852496 0.0184078 0.543122 0.836981 -0.0669474 0.54045 0.836258 -0.0926592 0.517014 0.836257 -0.182678 0.478709 0.836256 -0.267421 0.426621 0.836257 -0.344483 0.362277 0.836256 -0.411621 0.287502 0.836257 -0.466922 0.204448 0.836257 -0.508798 0.115524 0.836259 -0.536027 0.0151706 0.760323 -0.649368 0.0639339 0.837152 -0.543221 0.00622824 0.852623 -0.52249 0.0555957 0.915766 -0.397846 0.0848825 0.915242 -0.393861 0.150227 0.915243 -0.373847 0.211248 0.915244 -0.34308 0.266186 0.915241 -0.302454 0.313472 0.915243 -0.253113 0.351741 0.915242 -0.196494 0.37989 0.915241 -0.134229 0.397109 0.915242 -0.0680926 0.39711 0.915241 -0.0680961 0.540455 0.836253 -0.0926769 0.649401 0.760356 0.0116943 0.673585 0.735657 -0.0713602 0.668843 0.734505 -0.114682 0.639839 0.734501 -0.226085 0.592436 0.734505 -0.330941 0.527971 0.734506 -0.426319 0.448343 0.734509 -0.509397 0.355792 0.734502 -0.577857 0.253022 0.734503 -0.629671 0.142974 0.734505 -0.663372 0.0229215 0.649263 -0.760219 0.0714984 0.735645 -0.673584 0.760399 0.649429 0.00599364 0.785568 0.61419 -0.0751874 0.778848 0.612831 -0.133545 0.745068 0.612841 -0.263248 0.689867 0.612826 -0.385394 0.614815 0.612833 -0.496425 0.522078 0.612825 -0.593194 0.41432 0.612835 -0.672884 0.29463 0.612833 -0.733232 0.166482 0.612829 -0.772479 0.0293115 0.522284 -0.852268 0.0779931 0.614056 -0.785399 0.852639 0.522499 0.00129552 0.875979 0.475943 -0.0783524 0.867529 0.474625 -0.148743 0.82991 0.474621 -0.293231 0.768414 0.474628 -0.429264 0.684818 0.474628 -0.552949 0.581518 0.474629 -0.660729 0.461489 0.474622 -0.749507 0.328181 0.474626 -0.816717 0.185436 0.474624 -0.860433 0.0342358 0.382459 -0.923338 0.0832984 0.475758 -0.875623 0.923875 0.382688 -0.00231873 0.942365 0.324711 -0.0806916 0.932567 0.323648 -0.159912 0.892128 0.323655 -0.315207 0.826024 0.323659 -0.461442 0.736157 0.323654 -0.594407 0.625113 0.323647 -0.710272 0.49609 0.323652 -0.805695 0.352792 0.323658 -0.877942 0.199334 0.323654 -0.92494 0.0375716 0.233272 -0.971685 0.0871945 0.324523 -0.94185 0.972361 0.233434 -0.00473761 0.982938 0.164596 -0.0821091 0.972267 0.164021 -0.166717 0.930106 0.164006 -0.32864 0.861196 0.164011 -0.481085 0.7675 0.164013 -0.619713 0.651726 0.164016 -0.740508 0.517206 0.164012 -0.839999 0.367817 0.164017 -0.915319 0.207817 0.164015 -0.964319 0.0392596 0.0783964 -0.996149 0.0895762 0.164486 -0.982304 0.996899 0.0784614 -0.00596703 0.996908 0.00426828 -0.0784612 0.972352 0.00624022 -0.233435 0.923829 0.0101716 -0.382671 0.852535 0.0160354 -0.522425 0.760189 0.0238591 -0.649264 0.649078 0.0336272 -0.759978 0.521961 0.0453306 -0.851764 0.382421 0.0373115 -0.923234 0.372264 0.056823 -0.926386 0.233355 0.0268817 -0.97202 0.210207 0.0662601 -0.975409 0.0784427 0.0184179 -0.996749 0.982169 0.0835543 -0.168415 0.939801 0.080663 -0.332065 0.870506 0.0757954 -0.486286 0.776182 0.0689818 -0.626724 0.659475 0.0602027 -0.749312 0.523665 0.0494516 -0.850488 -0.00369583 0.923874 -0.382678 0.0467598 0.969278 -0.241483 0.0519037 0.969186 -0.240798 0.0918418 0.969186 -0.228569 0.129148 0.969185 -0.209763 0.162744 0.969186 -0.184915 0.191654 0.969186 -0.15475 0.215052 0.969186 -0.120128 0.232258 0.969187 -0.0820565 0.242784 0.969187 -0.041631 0.233316 0.971806 0.0340069 0.24135 0.968752 -0.0571902 -0.0144583 0.972267 -0.233426 0.972267 0.164019 -0.166719 0.932564 0.323664 -0.159893 0.867531 0.474618 -0.148751 0.778848 0.612831 -0.133544 0.66884 0.734509 -0.114671 0.242784 0.969187 -0.0416304 0.930109 0.16402 -0.328626 0.892128 0.323648 -0.315214 0.82991 0.474626 -0.293224 0.745071 0.612829 -0.263268 0.639838 0.734505 -0.226076 0.37989 0.915242 -0.134224 0.517013 0.836258 -0.182672 0.232257 0.969186 -0.0820652 0.861194 0.164006 -0.481089 0.826023 0.323655 -0.461447 0.768413 0.474621 -0.429274 0.689868 0.612841 -0.385367 0.592435 0.734501 -0.330951 0.351742 0.915243 -0.19649 0.478709 0.836257 -0.267417 0.21505 0.969186 -0.120134 0.767499 0.164011 -0.619714 0.736159 0.323659 -0.594403 0.684818 0.474628 -0.552949 0.614811 0.612825 -0.49644 0.527971 0.734505 -0.42632 0.31347 0.915242 -0.253119 0.426621 0.836256 -0.344485 0.191654 0.969186 -0.154751 0.651725 0.164013 -0.74051 0.625116 0.323654 -0.710266 0.581518 0.474629 -0.66073 0.522084 0.612834 -0.59318 0.44834 0.734505 -0.509405 0.266192 0.915243 -0.302443 0.362279 0.836257 -0.411617 0.162743 0.969186 -0.184917 0.51721 0.164016 -0.839996 0.496087 0.323648 -0.805698 0.461495 0.47463 -0.749499 0.41431 0.612825 -0.672899 0.3558 0.734509 -0.577844 0.211246 0.915243 -0.343082 0.2875 0.836256 -0.466925 0.129155 0.969186 -0.209756 0.367813 0.164013 -0.915322 0.352786 0.323651 -0.877948 0.328178 0.474623 -0.816721 0.294634 0.612837 -0.733227 0.253019 0.734501 -0.629675 0.150224 0.915242 -0.373851 0.204449 0.836257 -0.508797 0.0918469 0.969186 -0.228566 0.20782 0.164019 -0.964318 0.199337 0.323658 -0.924938 0.185437 0.474626 -0.860432 0.166488 0.612834 -0.772474 0.14297 0.734502 -0.663375 0.0848769 0.915241 -0.393864 0.115514 0.836256 -0.536034 0.0518962 0.969186 -0.240802 0.999944 1.97775e-09 -0.0106179 0.994373 -0.000693695 -0.105931 0.962164 -0.000865099 -0.272469 0.902282 -0.000986765 -0.431146 0.816437 -0.00106036 -0.577434 0.707109 -0.00108464 -0.707104 0.577432 -0.00105997 -0.816438 0.431148 -0.000986719 -0.902281 0.272466 -0.000864766 -0.962165 0.105931 -0.000693446 -0.994373 0.0105595 1.86254e-07 -0.999944 0.078456 0.000434267 -0.996918 0.972371 -0.00124515 -0.233439 0.962163 0.00171194 -0.272469 0.923875 -0.00153392 -0.38269 0.902281 0.00195277 -0.431145 0.852643 -0.00172767 -0.522491 0.760404 -0.00182377 -0.649448 0.649444 -0.00182434 -0.760407 0.522497 -0.00172773 -0.852639 0.382687 -0.00153414 -0.923877 0.233439 -0.00124515 -0.972371 0.078456 -0.000858742 -0.996917 0.105931 0.00137269 -0.994373 0.816435 0.00209849 -0.577433 0.707108 0.0021466 -0.707102 0.577431 0.00209785 -0.816437 0.431147 0.00195294 -0.902279 0.272465 0.00171167 -0.962164 0.233439 0.000629539 -0.972371 0.382688 0.000775574 -0.923877 0.522498 0.000873393 -0.85264 0.649445 0.000922188 -0.760408 0.760405 0.000921874 -0.649449 0.852644 0.000873267 -0.522492 0.923876 0.000775316 -0.382691 0.972371 0.000629331 -0.233439 0.996917 0.000433768 -0.0784619 0.996917 -0.000858264 -0.0784619 0.994373 0.00137269 -0.105931 -0.000898583 0.0784569 -0.996917 0.00143613 0.10593 -0.994373 -0.00130338 0.233437 -0.972371 0.00179074 0.272465 -0.962164 0 0.431153 -0.902279 0 0.577432 -0.816439 0 0.70711 -0.707104 0 0.816442 -0.577427 0 0.902278 -0.431155 0 0.962165 -0.272469 0 0.994374 -0.105931 0.000425212 0.996918 -0.0784501 -0.000680741 0.994373 -0.105931 0.000616574 0.972369 -0.233451 -0.000848302 0.962164 -0.272469 0 0.902278 -0.431155 0 0.816442 -0.577427 0 0.70711 -0.707104 0 0.577432 -0.816439 0 0.431153 -0.902279 0 0.272466 -0.962166 0 0.10593 -0.994374 0 0.272466 -0.962166 0 0.272466 -0.962166 -0.0016056 0.382683 -0.923878 0.00204385 0.431152 -0.902277 0 0.577432 -0.816439 0 0.70711 -0.707104 0 0.816442 -0.577427 0 0.902278 -0.431155 0 0.962165 -0.272469 0 0.994374 -0.105931 0 0.999944 -0.0106181 0 0.994374 -0.105931 -0.00180772 0.522508 -0.852633 0.00219448 0.57743 -0.816437 0 0.70711 -0.707104 0 0.816442 -0.577427 0 0.902278 -0.431155 0 0.962165 -0.272469 0 0.994374 -0.105931 0 0.999944 -0.0106181 0 0.994374 -0.105931 -0.00190901 0.649432 -0.760417 0.00224698 0.707108 -0.707102 0 0.816442 -0.577427 0 0.902278 -0.431155 0 0.962165 -0.272469 0 0.994374 -0.105931 0 0.999944 -0.0106181 0 0.994374 -0.105931 -0.00190795 0.76041 -0.649441 0.00219622 0.81644 -0.577426 0 0.902278 -0.431155 0 0.962164 -0.272469 0 0.994374 -0.105931 0 0.999944 -0.0106181 0 0.999944 -0.0106181 -0.00180619 0.852638 -0.5225 0.00204507 0.902276 -0.431154 0 0.962165 -0.272469 0 0.994374 -0.105931 0 0.999944 -0.0106181 0 0.994374 -0.105931 -0.00160475 0.92388 -0.38268 0.00179134 0.962163 -0.272469 0 0.994374 -0.105931 0 0.999944 -0.0106181 0 0.994374 -0.105931 -0.00130201 0.972368 -0.23345 0.00143751 0.994372 -0.105931 0 0.999944 -0.0106181 0 0.994374 -0.105931 -0.000897914 0.996918 -0.0784501 0.000759938 0.92388 -0.382681 -0.000968457 0.902278 -0.431155 0 0.816442 -0.577427 0 0.70711 -0.707104 0 0.577432 -0.816439 0 0.431153 -0.902279 0 0.272466 -0.962166 0 0.10593 -0.994374 0 0.0105742 -0.999944 0 0.10593 -0.994374 0.000855331 0.852639 -0.5225 -0.00104003 0.816442 -0.577427 0 0.70711 -0.707104 0 0.577432 -0.816439 0 0.431153 -0.902279 0 0.272466 -0.962166 0 0.10593 -0.994374 0 0.0105742 -0.999944 0 0.10593 -0.994374 0.000903523 0.760411 -0.649442 -0.00106407 0.707109 -0.707104 0 0.577432 -0.816439 0 0.431153 -0.902279 0 0.272466 -0.962166 0 0.10593 -0.994374 0 0.0105742 -0.999944 0 0.10593 -0.994374 0.000904022 0.649433 -0.760418 -0.00103921 0.577432 -0.816438 0 0.431153 -0.902279 0 0.272466 -0.962166 0 0.10593 -0.994374 0 0.0105742 -0.999944 0 0.10593 -0.994374 0.000856054 0.522508 -0.852634 -0.000967876 0.431152 -0.902279 0 0.272466 -0.962166 0 0.10593 -0.994374 0 0.0105742 -0.999944 0 0.0105742 -0.999944 0.000760341 0.382683 -0.923879 -0.000848014 0.272466 -0.962165 0 0.10593 -0.994374 0 0.0105742 -0.999944 0 0.10593 -0.994374 0.000617222 0.233437 -0.972372 -0.000680089 0.10593 -0.994373 0 0.0105742 -0.999944 0 0.10593 -0.994374 0.000425529 0.0784569 -0.996917 0 0.10593 -0.994374 0 0.272466 -0.962166 0 0.272466 -0.962166 0 0.272466 -0.962166 0 0.272466 -0.962166 0 0.272466 -0.962166 0 0.272466 -0.962166 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.962165 -0.272469 0 0.962164 -0.272469 0 0.962165 -0.272469 0 0.962165 -0.272469 0 0.962165 -0.272469 0 0.962165 -0.272469 0 0.962165 -0.272469 0 0.962165 -0.272469 0 0.994374 -0.105931 0.0784501 0.996918 0.000393344 0.105931 0.994373 -0.000628407 0.233451 0.972369 0.000570124 0.272471 0.962164 -0.000783277 0.431153 0.902279 0 0.577425 0.816444 0 0.707107 0.707107 0 0.816439 0.577432 0 0.852639 0.5225 -0.000790665 0.816439 0.577432 0.000960587 0.760412 0.64944 -0.000834847 0.707106 0.707106 0.000983148 0.577425 0.816444 0 0.431153 0.902279 0 0.272471 0.962164 0 0.105931 0.994374 0 0.272471 0.962164 0 0.272471 0.962164 0 0.382683 0.92388 0.00070248 0.431153 0.902279 -0.000894192 0.577425 0.816444 0 0.707107 0.707107 0 0.816439 0.577432 0 0.902281 0.431148 0 0.923877 0.382689 -0.000702092 0.902281 0.431148 0.000894256 0.522498 0.85264 0.000790725 0.577424 0.816444 -0.000960349 0.707107 0.707107 0 0.816439 0.577432 0 0.902281 0.431148 0 0.962164 0.272471 0 0.972372 0.233437 -0.000570111 0.962164 0.272471 0.000783765 0.649445 0.760408 0.000835359 0.707106 0.707106 -0.000982429 0.816439 0.577432 0 0.902281 0.431148 0 0.962164 0.272471 0 0.994374 0.10593 0 0.996917 0.0784628 -0.000392872 0.994373 0.10593 0.000628364 0.760412 0.64944 0.000835507 0.816439 0.577432 -0.00096 0.902281 0.431148 0 0.962164 0.272471 0 0.994374 0.10593 0 0.999944 0.010619 0 0.994374 0.10593 0 0.852639 0.5225 0.000791196 0.902281 0.431148 -0.000893817 0.962164 0.272471 0 0.994374 0.10593 0 0.999944 0.010619 0 0.994374 0.10593 0 0.923877 0.382689 0.000702481 0.962164 0.272471 -0.000783488 0.994374 0.10593 0 0.999944 0.0106172 0 0.999944 0.0106181 1.79798e-09 0.972372 0.233437 0.000570348 0.994373 0.10593 -0.000628256 0.999944 0.0106172 0 0.994374 0.10593 0 0.996917 0.0784628 0.000392951 0.649445 0.760408 -0.000834587 0.577424 0.816444 0.000961178 0.431153 0.902279 0 0.272471 0.962164 0 0.105931 0.994374 0 0.0105304 0.999945 0 0.105931 0.994374 0 0.522498 0.85264 -0.00078986 0.431153 0.902279 0.000895109 0.272471 0.962164 0 0.105931 0.994374 0 0.0105304 0.999945 0 0.105931 0.994374 0 0.382683 0.92388 -0.000701542 0.272471 0.962164 0.000784254 0.105931 0.994374 0 0.0107058 0.999943 0 0.0106181 0.999944 1.69322e-07 0.233451 0.972369 -0.000569136 0.105931 0.994373 0.000629417 0.0107058 0.999943 0 0.105931 0.994374 0 0.0784501 0.996918 -0.000392331 0.105931 0.994374 0 0.272471 0.962164 0 0.272471 0.962164 0 0.272471 0.962164 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.577425 0.816444 0 0.577425 0.816444 0 0.577425 0.816444 0 0.577425 0.816444 0 0.577425 0.816444 0 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 0 0.816439 0.577432 0 0.816439 0.577432 0 0.816439 0.577432 0 0.816439 0.577432 0 0.902281 0.431148 0 0.902281 0.431148 0 0.902281 0.431148 0 0.902281 0.431148 0 0.962164 0.272471 0 0.962164 0.272471 0 0.962164 0.272471 0 0.962164 0.272471 0 0.994374 0.10593 0 -0.000401451 0.996918 0.0784506 0.000642648 0.994373 0.10593 -0.000582224 0.972369 0.233449 0.000800823 0.962164 0.272469 0 0.902277 0.431157 0 0.816442 0.577427 0 0.70711 0.707104 0 0.577428 0.816441 0 0.431153 0.902279 0 0.272466 0.962166 0.00058253 0.233442 0.972371 -0.000800622 0.272466 0.962165 0.000717778 0.382678 0.923881 -0.000914019 0.431152 0.902279 0 0.577428 0.816441 0 0.70711 0.707104 0 0.816442 0.577427 0 0.902277 0.431157 0 0.962164 0.272469 0 0.994374 0.10593 0 0.962165 0.272469 0 0.962165 0.272469 -0.000717567 0.92388 0.382681 0.000914293 0.902277 0.431157 0 0.816442 0.577427 0 0.70711 0.707104 0 0.577428 0.816441 0 0.431153 0.902279 0 0.272466 0.962166 0 0.105935 0.994373 0.000401748 0.0784569 0.996917 -0.000642288 0.105935 0.994373 -0.00080753 0.852639 0.5225 0.000981909 0.816442 0.577427 0 0.70711 0.707104 0 0.577428 0.816441 0 0.431153 0.902279 0 0.272466 0.962166 0 0.105935 0.994373 0 0.0105742 0.999944 0 0.105935 0.994373 -0.000853106 0.760408 0.649445 0.00100442 0.707109 0.707104 0 0.577428 0.816441 0 0.431153 0.902279 0 0.272466 0.962166 0 0.105935 0.994373 0 0.0105742 0.999944 0 0.105935 0.994373 -0.00085359 0.649437 0.760415 0.000981221 0.577428 0.816441 0 0.431153 0.902279 0 0.272466 0.962166 0 0.105935 0.994373 0 0.0105742 0.999944 0 0.105935 0.994373 -0.000807992 0.522508 0.852634 0.000914019 0.431152 0.902279 0 0.272466 0.962166 0 0.105935 0.994373 0 0.0105742 0.999944 0 0.0105742 0.999944 -0.000717778 0.382678 0.923881 0.000800622 0.272466 0.962165 0 0.105935 0.994373 0 0.0105742 0.999944 0 0.105935 0.994373 -0.00058253 0.233442 0.972371 0.000642288 0.105935 0.994373 0 0.0105742 0.999944 0 0.105935 0.994373 -0.000401748 0.0784569 0.996917 0.000807991 0.522508 0.852634 -0.00098122 0.577428 0.816441 0 0.70711 0.707104 0 0.816442 0.577427 0 0.902277 0.431157 0 0.962165 0.272469 0 0.994374 0.10593 0 0.999944 0.0106181 0 0.994374 0.10593 0.000853589 0.649437 0.760415 -0.00100442 0.707109 0.707104 0 0.816442 0.577427 0 0.902277 0.431157 0 0.962165 0.272469 0 0.994374 0.10593 0 0.999944 0.0106181 0 0.994374 0.10593 0.000853106 0.760408 0.649445 -0.000981909 0.816442 0.577427 0 0.902277 0.431157 0 0.962164 0.272469 0 0.994374 0.10593 0 0.999944 0.0106181 0 0.994374 0.10593 0.000807531 0.852639 0.5225 -0.000914293 0.902277 0.431157 0 0.962165 0.272469 0 0.994374 0.10593 0 0.999944 0.0106181 0 0.999944 0.0106181 0.000717567 0.92388 0.382681 -0.000800822 0.962164 0.272469 0 0.994374 0.10593 0 0.999944 0.0106181 0 0.994374 0.10593 0.000582225 0.972369 0.233449 -0.000642647 0.994373 0.10593 0 0.999944 0.0106181 0 0.994374 0.10593 0.000401451 0.996918 0.0784506 0 0.994374 0.10593 0 0.962164 0.272469 0 0.962165 0.272469 0 0.962165 0.272469 0 0.962164 0.272469 0 0.962165 0.272469 0 0.902277 0.431157 0 0.902277 0.431157 0 0.902277 0.431157 0 0.902277 0.431157 0 0.902277 0.431157 0 0.902277 0.431157 0 0.902277 0.431157 0 0.816442 0.577427 0 0.816442 0.577427 0 0.816442 0.577427 0 0.816442 0.577427 0 0.816442 0.577427 0 0.816442 0.577427 0 0.816442 0.577427 0 0.70711 0.707104 0 0.70711 0.707104 0 0.70711 0.707104 0 0.70711 0.707104 0 0.70711 0.707104 0 0.70711 0.707104 0 0.70711 0.707104 0 0.577428 0.816441 0 0.577428 0.816441 0 0.577428 0.816441 0 0.577428 0.816441 0 0.577428 0.816441 0 0.577428 0.816441 0 0.577428 0.816441 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.272466 0.962166 0 0.272466 0.962166 0 0.272466 0.962166 0 0.272466 0.962166 0 0.272466 0.962166 0 0.272466 0.962166 0 0.105935 0.994373 0.0320746 0.996405 -0.0784097 0.0513611 0.958937 -0.278931 0.095595 0.908708 -0.406338 0.0803662 0.858321 -0.50678 0.0771915 0.816612 -0.572002 0.0734318 0.77005 -0.633744 0.0691226 0.718856 -0.691714 0.0642923 0.663403 -0.745495 0.0589825 0.603935 -0.794848 0.0532145 0.540881 -0.839414 0.0470405 0.474579 -0.878955 0.0410743 0.389998 -0.919899 0.0305456 0.297792 -0.954142 0.0222482 0.214314 -0.976511 0.0136795 0.129205 -0.991524 0.00483183 0.0421439 -0.9991 0.00414709 0.0407661 -0.99916 0.00559827 0.0386772 -0.999236 0.00504208 0.0377047 -0.999276 0.00613305 0.0351645 -0.999363 0.0192808 0.0996698 -0.994834 0.0318725 0.147622 -0.98853 0.0427022 0.181454 -0.982472 0.0528535 0.207924 -0.976716 0.057093 0.217947 -0.974289 0.0562957 0.208246 -0.976455 0.0513524 0.187563 -0.980909 0.0416136 0.156047 -0.986873 0.0271588 0.106006 -0.993995 0.0117262 0.0591186 -0.998182 0.0410349 0.966622 -0.252901 0.00765514 0.94357 -0.331086 0.031148 0.923432 -0.382495 0.0155883 0.896616 -0.442534 0.0247292 0.856646 -0.515311 0.0284479 0.852294 -0.522289 0.0135374 0.811326 -0.584438 0.0436493 0.772322 -0.63373 0.0504358 0.80985 -0.584464 0.046926 0.818918 -0.571988 0.0525793 0.85542 -0.51526 0.0500357 0.860629 -0.506768 0.0427672 0.912729 -0.406321 0.0688537 0.941083 -0.331093 0.0252014 0.760169 -0.649236 0.0248714 0.759752 -0.649737 0.0402337 0.721062 -0.691701 0.0478431 0.758719 -0.649659 0.0113358 0.703797 -0.710311 0.036668 0.665515 -0.745483 0.044799 0.702443 -0.710329 0.0220633 0.522381 -0.852427 0.0144731 0.50703 -0.861807 0.00503413 0.418012 -0.908428 0.0190354 0.382614 -0.923712 0.00282684 0.319732 -0.947504 0.0153551 0.29896 -0.954142 0.0221117 0.318945 -0.947515 0.0191927 0.391681 -0.919901 0.0301268 0.416979 -0.908417 0.02542 0.476237 -0.878949 0.0292526 0.542718 -0.839406 0.0330129 0.605923 -0.794838 0.0413581 0.641252 -0.766215 0.0196719 0.642199 -0.766285 0.0239913 0.649247 -0.760199 0.0130523 0.233417 -0.972289 0.011348 0.229981 -0.973129 0.0110593 0.215174 -0.976513 0.0162028 0.229777 -0.973108 -0.000522009 0.139084 -0.990281 0.00678328 0.129735 -0.991525 0.0100611 0.138651 -0.99029 0.0126314 0.0784507 -0.996838 0.00108439 0.0467572 -0.998906 0.0028566 0.0437932 -0.999037 0.00370334 0.0457658 -0.998945 0.00612976 0.0351577 -0.999363 0.0060844 0.0295124 -0.999546 0.0193734 0.0887109 -0.995869 0.0308288 0.129941 -0.991042 0.0398561 0.157449 -0.986722 0.047468 0.176918 -0.98308 0.0491326 0.180377 -0.98237 0.0456892 0.165342 -0.985177 0.0377009 0.139481 -0.989507 0.0249607 0.0956961 -0.995098 0.0111156 0.0535354 -0.998504 0.00659133 0.0290317 -0.999557 0.0187357 0.07809 -0.99677 0.0287581 0.112669 -0.993216 0.0358142 0.133839 -0.990356 0.0407971 0.146255 -0.988405 0.0399121 0.143129 -0.988899 0.0336606 0.122861 -0.991853 0.0226953 0.0855225 -0.996078 0.010486 0.0485431 -0.998766 0.00508505 0.0201775 -0.999784 0.00483861 0.0169152 -0.999845 0.00451256 0.0163232 -0.999857 0.00344174 0.011148 -0.999932 0.0110083 0.037512 -0.999236 0.0135427 0.0463077 -0.998835 0.0113891 0.0396292 -0.99915 0.00677907 0.0266789 -0.999621 0.00105951 0.00357597 -0.999993 0.0662328 0.253293 -0.96512 0.0627807 0.306623 -0.949758 0.0867964 0.270914 -0.958682 0.103176 0.42458 -0.899492 0.107347 0.408136 -0.906588 0.11118 0.407424 -0.906446 0.115167 0.442769 -0.889208 0.113854 0.480304 -0.869681 0.107548 0.507679 -0.854808 0.0965499 0.525172 -0.845501 0.0812123 0.532853 -0.842302 0.0631355 0.530303 -0.845454 0.044728 0.508008 -0.86019 0.0646435 0.50585 -0.860196 0.0559958 0.47301 -0.879276 0.0569484 0.442974 -0.894724 0.0389196 0.444928 -0.89472 0.139207 0.485244 -0.863227 0.119465 0.565061 -0.816354 0.123287 0.487495 -0.864378 0.122848 0.514851 -0.848432 0.116607 0.546835 -0.82908 0.105337 0.569219 -0.81541 0.089352 0.582055 -0.808226 0.0700826 0.585226 -0.807836 0.0503995 0.568394 -0.821212 0.0719065 0.566065 -0.821219 0.13842 0.550032 -0.823593 0.130533 0.564882 -0.814782 0.125525 0.584842 -0.801376 0.113957 0.61156 -0.782948 0.0973019 0.629173 -0.771151 0.0768213 0.637509 -0.766604 0.0559079 0.625867 -0.777923 0.078701 0.6234 -0.777932 0.175333 0.708966 -0.6831 0.125725 0.796106 -0.591954 0.136583 0.789978 -0.597729 0.157628 0.842475 -0.515159 0.104764 0.887176 -0.44938 0.0944019 0.833821 -0.543903 0.0758239 0.82026 -0.566943 0.100447 0.817603 -0.566953 0.0954718 0.776236 -0.623172 0.0958815 0.774815 -0.624875 0.0712156 0.777479 -0.624864 0.107432 0.887433 -0.448241 0.0718236 0.875816 -0.477271 0.116321 0.871015 -0.477287 0.00240386 0.00836822 -0.999962 0.00511557 0.0171016 -0.999841 0.00825566 0.0277461 -0.999581 0.00786344 0.0278602 -0.999581 0.00345325 0.0112879 -0.99993 0.00328144 0.010843 -0.999936 0.00500441 0.0171347 -0.999841 0.00525624 0.0197549 -0.999791 0.00618896 0.0194751 -0.999791 0.00826496 0.0282008 -0.999568 0.00836345 0.0281715 -0.999568 0.00790867 0.03248 -0.999441 0.0178829 0.0651002 -0.997719 0.029491 0.106206 -0.993907 0.0312888 0.105683 -0.993908 0.0331381 0.116001 -0.992696 0.0319492 0.116336 -0.992696 0.0308232 0.110545 -0.993393 0.0222413 0.0790154 -0.996625 0.0135035 0.0474011 -0.998785 0.0102465 0.0377271 -0.999236 0.00975164 0.043409 -0.99901 0.0150473 0.0418194 -0.999012 0.020309 0.0751474 -0.996966 0.0240252 0.0740423 -0.996966 0.012196 0.0641836 -0.997864 0.031264 0.126501 -0.991474 0.0525998 0.205624 -0.977216 0.0726376 0.2753 -0.95861 0.089633 0.344546 -0.934481 0.10385 0.412885 -0.904843 0.114652 0.480113 -0.869682 0.106482 0.41221 -0.904845 0.106809 0.444368 -0.889454 0.104774 0.444855 -0.889453 0.113304 0.50642 -0.854811 0.0130816 0.0773863 -0.996915 0.0364938 0.151663 -0.987758 0.0620776 0.24573 -0.967348 0.0867672 0.327716 -0.940784 0.107339 0.408462 -0.906442 0.0950565 0.325381 -0.940793 0.0957763 0.371077 -0.92365 0.103233 0.36906 -0.923654 0.110856 0.443874 -0.889206 0.00247585 0.00827217 -0.999963 0.00249498 0.00834083 -0.999962 0.124407 0.487209 -0.864379 0.0157268 0.0894237 -0.99587 0.122742 0.930868 -0.344121 0.0534832 0.958963 -0.278443 0.0324554 0.971856 -0.233328 0.0333578 0.506226 -0.861755 0.0375344 0.57573 -0.816778 0.00905654 0.576892 -0.81677 0.0542314 0.895095 -0.442565 0.0612323 0.680023 -0.73063 0.0849841 0.677454 -0.730639 0.0833136 0.686893 -0.721967 0.103349 0.68416 -0.721973 0.105022 0.673941 -0.731282 0.112518 0.716185 -0.688781 0.114767 0.774385 -0.622219 0.130813 0.828876 -0.543924 0.0663389 0.730653 -0.679519 0.090729 0.728017 -0.679529 0.0895348 0.733255 -0.674033 0.109478 0.730537 -0.674041 0.0103918 0.120595 -0.992647 0.0162539 0.119959 -0.992646 0.0298599 0.184146 -0.982445 0.043718 0.230886 -0.971998 0.0585462 0.271327 -0.960705 0.0678192 0.29415 -0.95335 0.0726161 0.294667 -0.952837 0.0735098 0.284359 -0.955896 0.0701338 0.263417 -0.962129 0.0622666 0.231606 -0.970815 0.049072 0.189231 -0.980706 0.0435595 0.122841 -0.99147 0.0170792 0.200148 -0.979617 0.0265596 0.199118 -0.979616 0.041173 0.256377 -0.9657 0.058078 0.303767 -0.950974 0.0697751 0.332819 -0.940406 0.0775328 0.338258 -0.937854 0.0815606 0.332995 -0.939395 0.0814166 0.317082 -0.944897 0.0767952 0.290592 -0.95376 0.0675175 0.2535 -0.964976 0.0638723 0.202346 -0.977229 0.0237637 0.278353 -0.960185 0.036588 0.276961 -0.960184 0.054774 0.336757 -0.939997 0.0688348 0.371944 -0.9257 0.0796559 0.382166 -0.920654 0.0869681 0.381698 -0.920187 0.0902444 0.370709 -0.924354 0.0891255 0.349451 -0.932706 0.0833083 0.317754 -0.944506 0.0808143 0.272994 -0.958615 0.021398 0.185304 -0.982448 0.0303655 0.365274 -0.930405 0.0489182 0.363259 -0.930403 0.0643779 0.411448 -0.909157 0.0784223 0.426317 -0.901168 0.0891877 0.43045 -0.898197 0.0960936 0.424238 -0.900438 0.0986695 0.407898 -0.90768 0.0966017 0.381384 -0.919355 0.0952387 0.343029 -0.934484 0.0298107 0.257931 -0.965703 0.0384592 0.33899 -0.940004 0.0487075 0.413591 -0.909159 0.0129709 0.11158 -0.993671 0.0182322 0.110861 -0.993669 0.0316357 0.165641 -0.985679 0.0440994 0.205922 -0.977574 0.0566689 0.239395 -0.969267 0.0634423 0.255856 -0.964631 0.0653973 0.251333 -0.965689 0.0632787 0.235829 -0.969732 0.0568723 0.209609 -0.97613 0.0453935 0.172545 -0.983955 0.0292668 0.116364 -0.992775 0.024242 0.0605537 -0.997871 0.0147086 0.100439 -0.994835 0.0243628 0.166864 -0.985679 0.0340551 0.232511 -0.971997 0.044292 0.306086 -0.950973 0.0559082 0.374116 -0.925695 0.0814747 0.527795 -0.845455 0.073153 0.470663 -0.879275 0.0644646 0.428661 -0.901163 0.0729103 0.481652 -0.873324 0.0785193 0.432534 -0.898192 0.0876027 0.479185 -0.873331 0.0875987 0.47955 -0.873131 0.0983379 0.477454 -0.873137 0.0890779 0.425773 -0.900435 0.0893058 0.582599 -0.807838 0.0966157 0.634805 -0.766608 0.114989 0.773578 -0.623181 0.0261239 0.148746 -0.98853 0.0365374 0.207401 -0.977574 0.0477329 0.273441 -0.960704 0.0600496 0.334719 -0.940403 0.0693279 0.384184 -0.92065 0.0963455 0.530313 -0.84231 0.104605 0.579496 -0.808234 0.112358 0.626648 -0.77116 0.119575 0.671502 -0.731292 0.122378 0.652127 -0.748167 0.127137 0.709108 -0.693543 0.142548 0.769739 -0.622239 0.126214 0.71389 -0.688789 0.0158613 0.0787198 -0.996771 0.0263533 0.130922 -0.991042 0.0369184 0.182721 -0.982471 0.025376 0.113479 -0.993216 0.0483875 0.24121 -0.969266 0.0355777 0.158473 -0.986722 0.0607933 0.295688 -0.953348 0.0467467 0.209386 -0.976715 0.0702693 0.339848 -0.937852 0.0587199 0.256986 -0.964629 0.0797091 0.383287 -0.920183 0.0679752 0.295777 -0.952835 0.0772177 0.334033 -0.939393 0.10709 0.523115 -0.845507 0.0983603 0.467227 -0.878649 0.0956376 0.408623 -0.907679 0.104874 0.465802 -0.878652 0.0864398 0.371619 -0.924353 0.115397 0.567256 -0.815416 0.123249 0.609748 -0.782954 0.1306 0.650524 -0.748172 0.144988 0.63615 -0.75782 0.155726 0.646532 -0.746824 0.139024 0.675909 -0.723754 0.121347 0.545799 -0.829083 0.147032 0.705236 -0.693559 0.132099 0.638964 -0.757809 0.128999 0.584083 -0.801378 0.0152465 0.0681916 -0.997556 0.014029 0.0578605 -0.998226 0.0123285 0.0477193 -0.998785 0.0156795 0.0574371 -0.998226 0.0205804 0.0794653 -0.996625 0.0258279 0.0956877 -0.995076 0.023381 0.0963152 -0.995076 0.0174611 0.0676614 -0.997556 0.0328315 0.134604 -0.990355 0.0432378 0.178003 -0.98308 0.0543258 0.218656 -0.974289 0.0629838 0.251952 -0.965688 0.0716592 0.284833 -0.955895 0.0803619 0.317353 -0.944896 0.0890698 0.349465 -0.932706 0.0977798 0.381082 -0.919356 0.122178 0.515011 -0.848432 0.137207 0.563289 -0.814788 0.00603079 0.0276144 -0.9996 0.00579843 0.0229021 -0.999721 0.00612632 0.0231069 -0.999714 0.0171348 0.0628244 -0.997878 0.0131935 0.0464093 -0.998835 0.0181116 0.0625488 -0.997878 0.0186244 0.0648251 -0.997723 0.018664 0.0648136 -0.997723 0.0119471 0.0394631 -0.99915 0.0289297 0.111058 -0.993393 0.0241282 0.0877941 -0.995846 0.0250679 0.087529 -0.995847 0.0247557 0.086006 -0.995987 0.0245537 0.0860647 -0.995987 0.0160002 0.0523086 -0.998503 0.0148971 0.0526368 -0.998503 0.010844 0.0315967 -0.999442 0.0381898 0.146961 -0.988405 0.0480473 0.180671 -0.982369 0.0402546 0.143032 -0.988899 0.0557982 0.20838 -0.976455 0.0468305 0.16502 -0.985178 0.0635865 0.235746 -0.969732 0.0534977 0.186958 -0.98091 0.0714512 0.26306 -0.96213 0.0602148 0.208666 -0.976131 0.0793309 0.289906 -0.953762 0.0670176 0.230266 -0.970817 0.0872773 0.31668 -0.944509 0.073879 0.251706 -0.96498 0.0878057 0.294026 -0.951756 0.0696108 0.21826 -0.973405 0.0477941 0.132618 -0.990014 0.0268004 0.065678 -0.997481 0.0204761 0.0643287 -0.997719 0.03651 0.122032 -0.991854 0.0418115 0.138286 -0.989509 0.0277414 0.0840214 -0.996078 0.0471976 0.154423 -0.986877 0.0315278 0.0937396 -0.995097 0.0526642 0.170429 -0.983962 0.0354405 0.103541 -0.993994 0.0582424 0.186567 -0.980714 0.0394561 0.113334 -0.992773 0.0776023 0.296905 -0.951748 0.0757013 0.241774 -0.967375 0.0523594 0.146942 -0.987758 0.056015 0.22221 -0.973388 0.0126339 0.0699226 -0.997472 0.0331934 0.136963 -0.99002 0.0295261 0.0724434 -0.996935 0.0218273 0.0560845 -0.998187 0.0194369 0.0510376 -0.998508 0.0172076 0.0465206 -0.998769 0.00886794 0.0378892 -0.999243 0.0129037 0.0366762 -0.999244 0.00845735 0.0261747 -0.999622 0.00395834 0.0143447 -0.999889 0.00437184 0.0142203 -0.999889 0.0352264 0.101489 -0.994213 0.021196 0.372588 -0.927755 -0.0990804 0.992013 -0.0780641 -0.0105255 0.99656 -0.0822026 -0.0242667 0.99656 -0.0792413 -0.0373051 0.99656 -0.074002 -0.0492877 0.99656 -0.0666239 -0.0598306 0.99656 -0.0573451 -0.0686605 0.99656 -0.0464096 -0.0755142 0.99656 -0.0341411 -0.0801998 0.99656 -0.0208794 -0.0784127 0.996431 -0.0312383 0.0757769 0.921224 -0.38158 -0.0985551 0.912719 -0.396524 0.0698454 0.850557 -0.521224 -0.0989159 0.834752 -0.541668 -0.0696438 0.836258 -0.543895 -0.16057 0.836258 -0.5243 -0.246862 0.836258 -0.489625 -0.326054 0.836259 -0.440863 -0.395884 0.836257 -0.379408 -0.454295 0.83626 -0.30706 -0.499663 0.836255 -0.225863 -0.530651 0.836255 -0.138159 -0.649029 0.759933 -0.0355284 -0.543075 0.836908 -0.0682267 -0.522357 0.852387 -0.0240979 -0.397825 0.915716 -0.0565721 -0.389896 0.915244 -0.101533 -0.367137 0.915242 -0.16596 -0.333821 0.91524 -0.225609 -0.290862 0.915245 -0.278795 -0.23959 0.915242 -0.323928 -0.181388 0.915242 -0.359765 -0.117985 0.915242 -0.385243 -0.0511817 0.915241 -0.399643 -0.0511753 0.915242 -0.399641 -0.0696476 0.836257 -0.543896 0.0645957 0.758823 -0.648086 -0.0994924 0.733873 -0.671961 -0.0861936 0.734503 -0.673109 -0.19871 0.734506 -0.648857 -0.305514 0.734504 -0.605942 -0.40353 0.734498 -0.545597 -0.48992 0.734504 -0.469555 -0.562238 0.734498 -0.380001 -0.618356 0.734505 -0.279532 -0.656714 0.734503 -0.170974 -0.759626 0.648769 -0.0454724 -0.673217 0.73524 -0.0787481 0.0602584 0.648253 -0.759037 -0.0999883 0.612849 -0.783848 -0.100374 0.612831 -0.783813 -0.231392 0.612831 -0.755577 -0.35575 0.612837 -0.705601 -0.469895 0.612835 -0.635321 -0.570497 0.612834 -0.54678 -0.6547 0.612834 -0.442496 -0.720072 0.612822 -0.325492 -0.764718 0.612831 -0.199109 -0.85141 0.521746 -0.0536814 -0.784756 0.613559 -0.0877688 0.0567926 0.521665 -0.851258 -0.100609 0.474989 -0.874222 -0.111788 0.47463 -0.873058 -0.257751 0.474622 -0.841604 -0.396256 0.474625 -0.785947 -0.523384 0.474637 -0.707665 -0.635459 0.474631 -0.609029 -0.729241 0.474633 -0.49288 -0.802057 0.474626 -0.362539 -0.851788 0.474627 -0.221783 -0.922213 0.381999 -0.0599978 -0.874696 0.475257 -0.0950692 0.0540922 0.382123 -0.922527 -0.101071 0.324097 -0.940609 -0.120184 0.323642 -0.938516 -0.277057 0.323656 -0.904702 -0.425981 0.323639 -0.844866 -0.562626 0.323639 -0.76073 -0.683098 0.323646 -0.654699 -0.783923 0.32364 -0.529832 -0.862179 0.323656 -0.389736 -0.915643 0.32366 -0.238416 -0.970358 0.232965 -0.0642907 -0.940669 0.324122 -0.100436 0.0523351 0.233117 -0.971039 -0.10133 0.16431 -0.98119 -0.1253 0.164026 -0.978466 -0.288857 0.164023 -0.943217 -0.444101 0.164036 -0.880833 -0.586595 0.164015 -0.793099 -0.71217 0.164025 -0.682576 -0.817286 0.164036 -0.552391 -0.898892 0.16402 -0.406313 -0.954623 0.16403 -0.248576 -0.994715 0.0782776 -0.0664469 -0.980947 0.164276 -0.103712 0.0514784 0.0783529 -0.995596 -0.0784514 0.0118693 -0.996847 -0.23343 0.00731178 -0.972346 -0.382679 0.00471071 -0.923869 -0.522504 0.00403822 -0.852627 -0.64943 0.00537955 -0.760402 -0.760377 0.00862495 -0.649425 -0.852563 0.0138267 -0.522442 -0.923672 0.0210273 -0.382606 -0.971927 0.0301392 -0.233345 -0.995453 0.0541911 -0.0783348 -0.126621 0.0792269 -0.988782 -0.291817 0.0828049 -0.952883 -0.448592 0.0844203 -0.88974 -0.592542 0.0840839 -0.801139 -0.719526 0.0818472 -0.689626 -0.82601 0.0776147 -0.558287 -0.908905 0.0714321 -0.410839 -0.965787 0.0633395 -0.251483 -0.382654 0.923821 -0.0114392 -0.241512 0.969394 -0.0441207 -0.238387 0.969185 -0.0620621 -0.224454 0.969186 -0.101479 -0.204079 0.969187 -0.137943 -0.177836 0.969186 -0.170449 -0.146504 0.969185 -0.198035 -0.110884 0.969186 -0.219961 -0.0721288 0.969186 -0.235532 -0.0312854 0.969187 -0.244333 0.0825896 0.969047 -0.232653 -0.0985839 0.965613 -0.240568 -0.233447 0.972367 0.00220691 -0.1253 0.164026 -0.978466 -0.120169 0.323656 -0.938513 -0.111803 0.474619 -0.873062 -0.100369 0.612834 -0.783812 -0.086189 0.734505 -0.673108 -0.0312915 0.969186 -0.244334 -0.288852 0.164028 -0.943218 -0.277076 0.323639 -0.904702 -0.257737 0.474633 -0.841603 -0.231392 0.61283 -0.755577 -0.198717 0.734503 -0.648858 -0.117983 0.915242 -0.385243 -0.160571 0.836258 -0.5243 -0.0721342 0.969186 -0.235532 -0.444113 0.164023 -0.880829 -0.425963 0.323654 -0.844869 -0.396258 0.474623 -0.785947 -0.355761 0.61283 -0.705601 -0.305507 0.734506 -0.605942 -0.181388 0.915242 -0.359765 -0.246862 0.836257 -0.489625 -0.110899 0.969185 -0.219957 -0.586574 0.164038 -0.793109 -0.562629 0.323636 -0.760729 -0.523399 0.474625 -0.707661 -0.469891 0.612837 -0.635321 -0.403515 0.734504 -0.5456 -0.239577 0.915244 -0.323932 -0.326057 0.836258 -0.440862 -0.146482 0.969186 -0.198044 -0.712178 0.164015 -0.68257 -0.683106 0.323638 -0.654694 -0.635451 0.474638 -0.609033 -0.570495 0.612835 -0.546781 -0.489939 0.734496 -0.469549 -0.290884 0.915241 -0.278785 -0.395877 0.836259 -0.37941 -0.17783 0.969187 -0.170453 -0.817291 0.164029 -0.552385 -0.783918 0.323645 -0.529836 -0.729244 0.474631 -0.492879 -0.654701 0.612833 -0.442495 -0.562224 0.734505 -0.380009 -0.333812 0.915242 -0.225615 -0.454305 0.836257 -0.307054 -0.204091 0.969186 -0.137932 -0.898882 0.164037 -0.406328 -0.862193 0.323638 -0.389721 -0.802049 0.474634 -0.362547 -0.720055 0.612835 -0.325506 -0.618369 0.734498 -0.279522 -0.367128 0.915244 -0.165968 -0.499648 0.836261 -0.225877 -0.224465 0.969185 -0.101466 -0.954627 0.16402 -0.248566 -0.915646 0.323655 -0.23841 -0.85179 0.474624 -0.22178 -0.76473 0.612821 -0.199096 -0.656709 0.734506 -0.170979 -0.389908 0.915241 -0.101516 -0.53065 0.836255 -0.13816 -0.238379 0.969186 -0.0620765 -0.999944 1.42972e-09 -0.0106181 -0.999944 0 -0.0106167 -0.999944 0 -0.0106167 -0.999944 2.85956e-09 -0.0106186 -0.999944 2.85956e-09 -0.0106186 -0.999944 0 -0.0106176 -0.996918 0.000416524 -0.0784501 -0.994373 -0.00066677 -0.105931 -0.962162 0 -0.27248 -0.902282 1.16107e-07 -0.431146 -0.816442 1.555e-07 -0.577427 -0.70711 0 -0.707104 -0.577432 0 -0.816439 -0.431153 2.42982e-07 -0.902279 -0.272466 0 -0.962166 -0.10593 2.67783e-07 -0.994374 -0.0106181 0 -0.999944 -0.0487444 -0.000329659 -0.998811 -0.146626 -9.34324e-05 -0.989192 -0.243088 0.00010329 -0.970004 -0.272465 0.00124729 -0.962165 -0.336551 -0.000375973 -0.941665 -0.428142 0.000734962 -0.903711 -0.513825 -0.000391611 -0.857895 -0.595866 0.000237348 -0.803084 -0.671439 -0.000182076 -0.74106 -0.707109 0.0011383 -0.707104 -0.740872 -0.000174421 -0.671646 -0.80329 0.000229504 -0.595587 -0.857643 -0.000349455 -0.514245 -0.904105 0.000609076 -0.42731 -0.941504 -0.000310946 -0.337003 -0.962161 0.00100448 -0.272479 -0.970034 7.95676e-05 -0.242968 -0.989174 -7.18617e-05 -0.146746 -0.998795 -0.000236105 -0.0490764 -0.999944 0 -0.0106186 -0.994374 0 -0.105931 -0.962162 0 -0.27248 -0.902282 0 -0.431146 -0.816442 0 -0.577427 -0.70711 0 -0.707104 -0.577432 0 -0.816439 -0.431153 0 -0.902279 -0.272466 0 -0.962166 -0.10593 0 -0.994374 -0.10593 0 -0.994374 -0.10593 2.67783e-07 -0.994374 -0.010543 2.69283e-07 -0.999944 -0.972369 0.000604008 -0.233451 -0.962161 -0.000831229 -0.272479 -0.902282 0 -0.431146 -0.816442 1.555e-07 -0.577427 -0.70711 1.90422e-07 -0.707104 -0.577432 0 -0.816439 -0.431153 0 -0.902279 -0.272466 2.59109e-07 -0.962166 -0.10593 0 -0.994374 -0.0104992 2.69283e-07 -0.999945 -0.10593 2.67783e-07 -0.994374 -0.923876 0.000744126 -0.382691 -0.902282 -0.000948189 -0.431146 -0.816442 0 -0.577427 -0.70711 1.90422e-07 -0.707104 -0.577432 2.19865e-07 -0.816439 -0.431153 0 -0.902279 -0.272466 0 -0.962166 -0.10593 2.67783e-07 -0.994374 -0.010662 0 -0.999943 -0.10593 0 -0.994374 -0.852644 0.00083824 -0.522492 -0.816442 -0.00101857 -0.577427 -0.70711 0 -0.707104 -0.577432 2.19866e-07 -0.816439 -0.431153 2.42982e-07 -0.902279 -0.272466 0 -0.962166 -0.10593 0 -0.994374 -0.0104553 2.69283e-07 -0.999945 -0.10593 2.67783e-07 -0.994374 -0.760405 0.00088538 -0.649449 -0.707109 -0.00104171 -0.707104 -0.577432 0 -0.816439 -0.431153 2.42982e-07 -0.902279 -0.272466 2.59109e-07 -0.962166 -0.10593 0 -0.994374 -0.0104426 0 -0.999946 -0.0105775 1.37966e-07 -0.999944 -0.649439 0.000885872 -0.760413 -0.577432 -0.0010177 -0.816438 -0.431153 0 -0.902279 -0.272466 2.59109e-07 -0.962166 -0.10593 2.67783e-07 -0.994374 -0.0104426 0 -0.999946 -0.10593 0 -0.994374 -0.522508 0.000838751 -0.852634 -0.431152 -0.000947809 -0.902279 -0.272466 0 -0.962166 -0.10593 2.67783e-07 -0.994374 -0.0106181 2.69283e-07 -0.999944 -0.10593 2.67783e-07 -0.994374 -0.382684 0.000745017 -0.923879 -0.272466 -0.000830387 -0.962165 -0.10593 0 -0.994374 -0.0106181 2.69283e-07 -0.999944 -0.10593 2.67783e-07 -0.994374 -0.233437 0.000604842 -0.972372 -0.10593 -0.000665892 -0.994373 -0.0105304 0 -0.999945 -0.10593 0 -0.994374 -0.0784569 0.000417082 -0.996917 -0.10593 0.00139617 -0.994373 -0.272466 2.59109e-07 -0.962166 -0.272466 0 -0.962166 -0.431152 0.000882761 -0.902279 -0.577432 2.19866e-07 -0.816439 -0.577432 0 -0.816439 -0.577432 0.00107846 -0.816438 -0.70711 1.90422e-07 -0.707104 -0.70711 0 -0.707104 -0.816442 0.000988539 -0.577427 -0.902282 1.16107e-07 -0.431146 -0.902282 0 -0.431146 -0.902282 0.000774234 -0.431146 -0.962162 7.33782e-08 -0.272479 -0.962162 0 -0.27248 -0.994373 0.00103397 -0.105931 -0.999944 2.85931e-09 -0.0106176 -0.994374 2.8527e-08 -0.105931 -0.994374 2.8391e-08 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 2.8391e-08 -0.105931 -0.994374 2.83909e-08 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 2.83909e-08 -0.105931 -0.994374 0 -0.105931 -0.994374 2.8391e-08 -0.105931 -0.994374 2.8527e-08 -0.105931 -0.999944 2.85918e-09 -0.0106172 -0.999944 0 -0.010619 -0.994374 0 -0.105931 -0.994374 2.83909e-08 -0.105931 -0.994374 2.8527e-08 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 2.83909e-08 -0.105931 -0.999944 2.85906e-09 -0.0106167 -0.962162 0 -0.27248 -0.962162 7.33782e-08 -0.272479 -0.962162 7.33782e-08 -0.27248 -0.962162 7.26802e-08 -0.27248 -0.962162 0 -0.27248 -0.962162 0 -0.27248 -0.962162 7.26802e-08 -0.272479 -0.962162 0 -0.272479 -0.962162 7.26803e-08 -0.27248 -0.962162 7.33782e-08 -0.27248 -0.962162 0 -0.27248 -0.962162 7.33782e-08 -0.27248 -0.962162 0 -0.27248 -0.962162 7.26802e-08 -0.272479 -0.962162 0 -0.272479 -0.962162 7.26803e-08 -0.272479 -0.902282 0 -0.431146 -0.902282 1.16107e-07 -0.431146 -0.902282 1.16107e-07 -0.431146 -0.902282 1.16107e-07 -0.431146 -0.902282 0 -0.431146 -0.902282 0 -0.431146 -0.902282 1.16592e-07 -0.431146 -0.902282 0 -0.431146 -0.902282 1.16592e-07 -0.431146 -0.902282 1.16107e-07 -0.431146 -0.902282 0 -0.431146 -0.902282 0 -0.431146 -0.902282 1.16592e-07 -0.431146 -0.902282 0 -0.431146 -0.902282 1.16592e-07 -0.431146 -0.816442 0 -0.577427 -0.816442 1.555e-07 -0.577427 -0.816442 1.555e-07 -0.577427 -0.816442 0 -0.577427 -0.816442 0 -0.577427 -0.816442 0 -0.577427 -0.816442 1.55961e-07 -0.577427 -0.816442 0 -0.577427 -0.816442 1.55961e-07 -0.577427 -0.816442 1.555e-07 -0.577427 -0.816442 1.55961e-07 -0.577427 -0.816442 0 -0.577427 -0.816442 0 -0.577427 -0.816442 1.55961e-07 -0.577427 -0.816442 0 -0.577427 -0.816442 1.555e-07 -0.577427 -0.70711 1.90422e-07 -0.707104 -0.70711 0 -0.707104 -0.70711 1.90422e-07 -0.707104 -0.70711 1.90422e-07 -0.707104 -0.70711 0 -0.707104 -0.70711 0 -0.707104 -0.70711 0 -0.707104 -0.70711 1.90785e-07 -0.707104 -0.70711 0 -0.707104 -0.70711 1.90786e-07 -0.707104 -0.70711 1.90422e-07 -0.707104 -0.70711 0 -0.707104 -0.70711 1.90422e-07 -0.707104 -0.577432 0 -0.816439 -0.577432 2.19866e-07 -0.816439 -0.577432 2.19865e-07 -0.816439 -0.577432 0 -0.816439 -0.577432 0 -0.816439 -0.577432 2.19865e-07 -0.816439 -0.577432 2.19865e-07 -0.816439 -0.577432 0 -0.816439 -0.577432 2.21069e-07 -0.816439 -0.577432 2.19865e-07 -0.816439 -0.577432 0 -0.816439 -0.577432 2.21069e-07 -0.816439 -0.431153 0 -0.902279 -0.431153 2.42982e-07 -0.902279 -0.431153 2.42982e-07 -0.902279 -0.431153 0 -0.902279 -0.431153 0 -0.902279 -0.431153 2.42982e-07 -0.902279 -0.431153 2.43783e-07 -0.902279 -0.431153 0 -0.902279 -0.431153 0 -0.902279 -0.431153 2.42982e-07 -0.902279 -0.272466 2.59109e-07 -0.962166 -0.272466 2.58926e-07 -0.962166 -0.431153 2.43784e-07 -0.902279 -0.431153 0 -0.902279 -0.431153 2.42982e-07 -0.902279 -0.272466 2.59109e-07 -0.962166 -0.272466 0 -0.962166 -0.272466 2.59109e-07 -0.962166 -0.272466 2.59109e-07 -0.962166 -0.272466 0 -0.962166 -0.272466 0 -0.962166 -0.272466 2.59109e-07 -0.962166 -0.272466 0 -0.962166 -0.10593 0 -0.994374 -0.10593 2.68008e-07 -0.994374 -0.999944 0.0106185 3.18794e-09 -0.994373 0.10593 0.000678192 -0.962161 0.272481 0.000845619 -0.902281 0.431148 0.000964887 -0.816444 0.577424 0.00103612 -0.707106 0.707106 0.00106026 -0.577424 0.816444 0.00103625 -0.431157 0.902276 0.000965466 -0.272471 0.962164 0.000845615 -0.10593 0.994373 0.000678679 -0.0106578 0.999943 3.00208e-07 -0.078451 0.996918 -0.000423108 -0.972369 0.233448 0.000745112 -0.962161 0.272481 -0.00102512 -0.923877 0.382689 0.00091768 -0.902281 0.431148 -0.00116962 -0.852639 0.522499 0.00103364 -0.760412 0.64944 0.00109206 -0.649439 0.760413 0.00109202 -0.522508 0.852634 0.00103328 -0.382678 0.923881 0.000918112 -0.233448 0.972369 0.000745112 -0.078451 0.996918 0.000513673 -0.10593 0.994373 -0.000822224 -0.816443 0.577424 -0.00125589 -0.707106 0.707106 -0.00128507 -0.577424 0.816443 -0.00125589 -0.431157 0.902276 -0.00117001 -0.272471 0.962164 -0.00102465 -0.233448 0.972369 -0.000614003 -0.382678 0.923881 -0.000756711 -0.522508 0.852634 -0.000851739 -0.649439 0.760413 -0.000900236 -0.760412 0.64944 -0.000900326 -0.852639 0.5225 -0.00085221 -0.923877 0.382689 -0.000756652 -0.972369 0.233448 -0.000614407 -0.996918 0.078451 -0.000423612 -0.996918 0.078451 0.000513673 -0.994373 0.10593 -0.000822224 0.00367756 0.996911 -0.0784496 -0.000848534 0.995808 -0.0914621 0 0.984886 -0.173206 0.00533257 0.972355 -0.233447 -0.00114953 0.967344 -0.253465 0.000988773 0.962164 -0.272469 -0.000718673 0.972368 -0.233451 0.000793466 0.994373 -0.105931 -0.000495623 0.996918 -0.0784501 0 0.999944 -0.0106181 0 0.999944 -0.0106181 0 0.994374 -0.105931 0 0.943573 -0.331165 0.00657244 0.923861 -0.382672 0 0.9147 -0.404134 -0.000994762 0.902278 -0.431155 0 0.8771 -0.480307 0.00739741 0.852616 -0.522486 0 0.836753 -0.547581 -0.00106828 0.816442 -0.577427 -0.00105314 0.76041 -0.649442 0.00121225 0.816442 -0.577427 -0.000996967 0.852639 -0.5225 0.00112882 0.902277 -0.431155 -0.000885777 0.92388 -0.38268 0 0.787275 -0.616603 0.00781418 0.760388 -0.649423 0 0.736499 -0.676439 -0.00109297 0.707109 -0.707104 0 0.676445 -0.736493 0.0078185 0.649414 -0.760395 0 0.616576 -0.787296 -0.00106743 0.577432 -0.816438 -0.000997809 0.522508 -0.852634 0.00121129 0.577431 -0.816438 -0.00105372 0.649433 -0.760418 0.00124027 0.707109 -0.707104 0 0.54761 -0.836734 0.00740367 0.522494 -0.852611 0 0.480278 -0.877116 -0.000994166 0.431152 -0.902279 0 0.404148 -0.914694 0.00657592 0.382675 -0.92386 0 0.331141 -0.943581 -0.000871048 0.272466 -0.962165 -0.000719428 0.233437 -0.972372 0.000988438 0.272466 -0.962165 -0.000886247 0.382683 -0.923879 0.00112815 0.431152 -0.902278 0 0.249927 -0.968265 -0.0023675 0.14618 -0.989255 0.0036803 0.0784564 -0.996911 0 0.0105742 -0.999944 -0.000495993 0.0784569 -0.996917 0.000792706 0.10593 -0.994373 0 0.10593 -0.994374 0 0.0890387 -0.996028 0 0.0105742 -0.999944 0 0.962164 -0.272469 0.00533818 0.233433 -0.972358 -0.00101715 0.0438575 -0.999037 -0.00437801 0.0415103 -0.999129 -0.00581671 0.0379882 -0.999261 -0.00622612 0.0357107 -0.999343 -0.00456314 0.0341203 -0.999407 -0.0109675 0.233423 -0.972314 -0.0142988 0.22319 -0.97467 -0.00653364 0.310675 -0.950494 -0.0182065 0.38262 -0.923726 -0.0113344 0.406398 -0.913626 -0.0392852 0.36727 -0.929284 -0.0505336 0.317349 -0.946961 -0.0553448 0.26832 -0.961739 -0.0540215 0.220075 -0.973986 -0.0481567 0.173047 -0.983736 -0.0425257 0.145631 -0.988425 -0.0362224 0.118404 -0.992305 -0.0323127 0.109792 -0.993429 -0.0264728 0.0840139 -0.996113 -0.0159595 0.0408463 -0.999038 -0.0165217 0.522437 -0.852518 -0.0296798 0.492845 -0.869611 -0.0458147 0.447704 -0.893008 -0.0602164 0.388086 -0.919654 -0.0665878 0.329078 -0.941952 -0.0654174 0.270551 -0.96048 -0.0585914 0.213022 -0.975289 -0.0517118 0.179511 -0.982396 -0.0441521 0.146032 -0.988294 -0.0376639 0.126649 -0.991232 -0.0304001 0.0949869 -0.995014 -0.0185674 0.0466236 -0.99874 -0.0158354 0.56201 -0.826979 -0.0530643 0.511003 -0.85794 -0.0697812 0.444377 -0.893118 -0.0771782 0.377668 -0.922719 -0.0759171 0.311132 -0.94733 -0.0681336 0.245412 -0.967022 -0.0599828 0.206959 -0.976509 -0.051304 0.168629 -0.984343 -0.0431309 0.143522 -0.988707 -0.0344584 0.106085 -0.99376 -0.0211878 0.0517522 -0.998435 -0.0251751 0.76017 -0.649237 -0.0352791 0.742689 -0.668707 -0.0221379 0.794676 -0.60663 -0.0286925 0.852288 -0.522285 -0.0372211 0.840287 -0.540863 -0.0882895 0.780433 -0.618974 -0.116873 0.694294 -0.710138 -0.129646 0.601272 -0.788457 -0.128806 0.502982 -0.854645 -0.117018 0.401331 -0.908427 -0.101945 0.340809 -0.934589 -0.0879054 0.278628 -0.956368 -0.0716859 0.226698 -0.971323 -0.0564449 0.166533 -0.984419 -0.0388043 0.0801073 -0.996031 -0.00874194 0.923845 -0.382666 -0.069463 0.87915 -0.471455 -0.0949582 0.822868 -0.560242 -0.12594 0.736654 -0.66444 -0.139902 0.64114 -0.754564 -0.139346 0.538531 -0.831004 -0.126968 0.431069 -0.893341 -0.110415 0.366639 -0.923788 -0.0953845 0.300175 -0.949103 -0.0779262 0.251265 -0.964776 -0.066496 0.288992 -0.955019 -0.0652213 0.304178 -0.95038 -0.0387419 0.0777339 -0.996221 -0.0215201 0.335102 -0.941936 -0.0242023 0.930737 -0.364888 -0.11744 0.875208 -0.469274 -0.149854 0.791097 -0.593051 -0.163517 0.694018 -0.701143 -0.1618 0.586656 -0.793509 -0.147219 0.471827 -0.869313 -0.12717 0.402586 -0.906505 -0.110012 0.330194 -0.93748 -0.0892689 0.282228 -0.955185 -0.0891276 0.33611 -0.937596 -0.0325277 0.971854 -0.233327 0.0306199 0.996451 -0.0784133 -0.087359 0.973017 -0.213557 -0.0520293 0.959358 -0.277354 -0.11955 0.934542 -0.335172 -0.107266 0.886246 -0.450624 -0.147752 0.86313 -0.482883 -0.157728 0.772981 -0.61451 -0.151899 0.668021 -0.728475 -0.132697 0.552538 -0.822857 -0.108302 0.410822 -0.90526 -0.125092 0.450883 -0.883774 -0.13845 0.787619 -0.600407 -0.14859 0.671214 -0.726218 -0.141545 0.541542 -0.828672 -0.10295 0.428785 -0.897521 -0.00372435 0.0134719 -0.999902 -0.00496717 0.017139 -0.999841 -0.00485429 0.0169323 -0.999845 -0.00496866 0.0197156 -0.999793 -0.00598915 0.0226793 -0.999725 -0.0169606 0.0718957 -0.997268 -0.0289744 0.146217 -0.988828 -0.0368914 0.241522 -0.969694 -0.0418842 0.366995 -0.929279 -0.0053978 0.0247163 -0.99968 -0.00679501 0.0291418 -0.999552 -0.0173806 0.08791 -0.995977 -0.0262551 0.173414 -0.984499 -0.0282846 0.280192 -0.959527 -0.0263973 0.40556 -0.913687 -0.00596117 0.0293049 -0.999553 -0.0157682 0.104397 -0.994411 -0.0201693 0.201477 -0.979286 -0.0214529 0.309836 -0.950548 -0.0041892 0.0411803 -0.999143 -0.00297575 0.0456201 -0.998954 -0.0147483 0.0873417 -0.996069 -0.0142345 0.0787429 -0.996793 -0.0360379 0.154823 -0.987285 -0.0556902 0.213167 -0.975427 -0.0692934 0.260151 -0.963078 -0.0792625 0.318396 -0.944638 -0.0900916 0.375474 -0.922444 -0.101053 0.470315 -0.876694 -0.105372 0.562323 -0.820177 -0.0983129 0.650629 -0.753005 -0.0769134 0.734292 -0.674463 -0.0458354 0.793535 -0.606796 -0.0814663 0.733806 -0.674457 -0.0467362 0.839791 -0.540895 -0.0132179 0.0666606 -0.997688 -0.0318315 0.131598 -0.990792 -0.0478921 0.179017 -0.98268 -0.0583458 0.215147 -0.974837 -0.0666935 0.263993 -0.962216 -0.0759069 0.312099 -0.947012 -0.086218 0.39285 -0.915552 -0.0906229 0.472941 -0.876421 -0.0856518 0.551732 -0.829611 -0.0685904 0.629019 -0.774358 -0.0422076 0.686051 -0.726328 -0.0327227 0.625888 -0.779226 -0.0210977 0.649289 -0.760249 -0.0117992 0.0545713 -0.99844 -0.0109889 0.0489008 -0.998743 -0.00998697 0.0426384 -0.999041 -0.00899358 0.0370332 -0.999274 -0.0194945 0.0735599 -0.9971 -0.0256569 0.0891832 -0.995685 -0.0257001 0.0899176 -0.995618 -0.0290794 0.110864 -0.99341 -0.0331438 0.131745 -0.990729 -0.0386314 0.1672 -0.985166 -0.0411257 0.203756 -0.978158 -0.0397151 0.241079 -0.969692 -0.0406998 0.203839 -0.978158 -0.0509137 0.317289 -0.946961 -0.00598378 0.0225266 -0.999728 -0.0124025 0.0448513 -0.998917 -0.0140961 0.0480456 -0.998746 -0.0112788 0.0385107 -0.999195 -0.0127384 0.0475583 -0.998787 -0.0145157 0.0565243 -0.998296 -0.0170518 0.0718746 -0.997268 -0.0150652 0.0563824 -0.998296 -0.0279793 0.119671 -0.992419 -0.0283166 0.119593 -0.992419 -0.0298367 0.146049 -0.988827 -0.00271288 0.00943611 -0.999952 -0.00553798 0.0192305 -0.9998 -0.00332597 0.0109686 -0.999934 -0.00346652 0.0113107 -0.99993 -0.00812375 0.028873 -0.99955 -0.00856774 0.0287402 -0.99955 -0.00557715 0.0192189 -0.9998 -0.0182092 0.0877478 -0.995976 -0.0367278 0.930286 -0.364993 -0.0470155 0.880592 -0.471536 -0.0192236 0.687232 -0.726183 -0.0443085 0.742189 -0.668724 -0.0674221 0.629143 -0.77436 -0.039544 0.625495 -0.779225 -0.0602754 0.571597 -0.818317 -0.0634256 0.571263 -0.818313 -0.0793446 0.498762 -0.863099 -0.0784361 0.498905 -0.8631 -0.0877594 0.425203 -0.900834 -0.0864687 0.351129 -0.932326 -0.0777666 0.277454 -0.957586 -0.0683058 0.234223 -0.96978 -0.0584916 0.190769 -0.979891 -0.0486736 0.160341 -0.985861 -0.0386276 0.117254 -0.99235 -0.0239349 0.0570166 -0.998086 -0.0363571 0.560903 -0.827083 -0.0326945 0.492667 -0.869603 -0.0100361 0.078453 -0.996867 -0.00198943 0.13503 -0.990839 -0.0121677 0.121418 -0.992527 -0.0149713 0.121123 -0.992525 -0.0158753 0.223098 -0.974667 -0.00992811 0.134577 -0.990853 -0.0766109 0.879673 -0.469367 -0.0823118 0.824219 -0.560257 -0.107644 0.649159 -0.752997 -0.079989 0.781319 -0.618985 -0.0745058 0.683274 -0.726351 -0.0731014 0.683424 -0.726353 -0.0888537 0.551229 -0.829609 -0.0983029 0.601373 -0.792898 -0.108842 0.516262 -0.849486 -0.107641 0.428807 -0.89696 -0.0972638 0.340331 -0.935262 -0.0850691 0.28804 -0.953832 -0.0730918 0.23505 -0.969231 -0.0600058 0.193645 -0.979235 -0.0472063 0.139148 -0.989146 -0.0297411 0.067753 -0.997259 -0.0576553 0.510516 -0.857933 -0.0513049 0.447123 -0.893 -0.0331213 0.279682 -0.959521 -0.0242599 0.201046 -0.979281 -0.107778 0.79784 -0.593158 -0.108356 0.739429 -0.664457 -0.119292 0.559582 -0.820145 -0.103682 0.696374 -0.71015 -0.0983256 0.471427 -0.876406 -0.0922963 0.602318 -0.792903 -0.0706672 0.444238 -0.893118 -0.0624031 0.387743 -0.919653 -0.0289026 0.172999 -0.984498 -0.0177014 0.104094 -0.99441 -0.121037 0.702486 -0.70133 -0.118039 0.645454 -0.754623 -0.118236 0.466322 -0.876678 -0.111979 0.604754 -0.7885 -0.0970493 0.390334 -0.915544 -0.0982467 0.518346 -0.849509 -0.082537 0.426228 -0.900843 -0.0740119 0.378289 -0.922724 -0.065085 0.329372 -0.941954 -0.0398203 0.166924 -0.985165 -0.0530735 0.268765 -0.961742 -0.119317 0.596599 -0.79362 -0.114339 0.54435 -0.831029 -0.107111 0.371062 -0.92241 -0.107897 0.507844 -0.854665 -0.0874776 0.309104 -0.946997 -0.0938207 0.43202 -0.896971 -0.0782657 0.353034 -0.932331 -0.0699779 0.31251 -0.947333 -0.0613816 0.271488 -0.960483 -0.0355003 0.131136 -0.990729 -0.0500427 0.220999 -0.973989 -0.0251938 0.0938069 -0.995272 -0.0224236 0.0788516 -0.996634 -0.019053 0.0640948 -0.997762 -0.0192608 0.0673516 -0.997543 -0.0177451 0.0591145 -0.998093 -0.0109269 0.0289247 -0.999522 -0.109524 0.481663 -0.869486 -0.103418 0.437187 -0.893405 -0.0934969 0.314538 -0.944629 -0.0968698 0.406558 -0.908475 -0.0766705 0.261288 -0.962211 -0.0831006 0.343999 -0.935286 -0.0685174 0.279846 -0.957597 -0.0609455 0.247271 -0.967028 -0.0532041 0.214414 -0.975293 -0.0315001 0.110208 -0.993409 -0.0431649 0.174337 -0.98374 -0.0134504 0.0473654 -0.998787 -0.0238975 0.0941417 -0.995272 -0.0969643 0.410794 -0.906557 -0.0911669 0.371859 -0.923802 -0.085299 0.345324 -0.934599 -0.0804717 0.257001 -0.963055 -0.0796723 0.304607 -0.94914 -0.0658025 0.210146 -0.975453 -0.074554 0.282392 -0.956398 -0.0730573 0.291295 -0.953839 -0.0657698 0.213056 -0.974824 -0.0543077 0.1771 -0.982693 -0.063879 0.237645 -0.969249 -0.0601648 0.236429 -0.969784 -0.0535062 0.208713 -0.976512 -0.0526568 0.19241 -0.979901 -0.0466966 0.180869 -0.982398 -0.0468789 0.169875 -0.98435 -0.0379005 0.146885 -0.988427 -0.0267967 0.0896091 -0.995616 -0.040915 0.146943 -0.988298 -0.0255564 0.0892133 -0.995685 -0.033961 0.11904 -0.992308 -0.0209798 0.0792426 -0.996635 -0.0114036 0.0384756 -0.999195 -0.0136448 0.0481805 -0.998745 -0.0185613 0.064233 -0.997762 -0.0594246 0.230358 -0.97129 -0.0516516 0.1502 -0.987305 -0.0327907 0.0731774 -0.99678 -0.0382841 0.171907 -0.984369 -0.0518453 0.196082 -0.979216 -0.042882 0.128326 -0.990804 -0.0268086 0.0625788 -0.99768 -0.0339755 0.143072 -0.989129 -0.0438054 0.161796 -0.985852 -0.0396043 0.144576 -0.988701 -0.0295633 0.119941 -0.992341 -0.0352952 0.127357 -0.991229 -0.0271884 0.108241 -0.993753 -0.030896 0.110215 -0.993427 -0.0225728 0.0726469 -0.997102 -0.0247297 0.0966679 -0.995009 -0.0135328 0.0356697 -0.999272 -0.0221881 0.085283 -0.99611 -0.0194774 0.0672868 -0.997544 -0.0132934 0.0445871 -0.998917 -0.00787861 0.0219566 -0.999728 -0.0160591 0.0596141 -0.998092 -0.00937988 0.0319581 -0.999445 -0.00904074 0.0320587 -0.999445 -0.00317964 0.00929584 -0.999952 -0.0054702 0.0156341 -0.999863 -0.00267176 0.00891738 -0.999957 -0.00124532 0.0042031 -0.99999 -0.0661424 0.254781 -0.964734 -0.0137629 0.0725468 -0.99727 -0.0125386 0.0604407 -0.998093 -0.00760913 0.0299198 -0.999523 -0.00440732 0.0159528 -0.999863 -0.0784107 0.996405 -0.032075 -0.278932 0.958936 -0.0513621 -0.406343 0.908706 -0.0955932 -0.506781 0.858321 -0.0803669 -0.571979 0.816628 -0.077193 -0.633756 0.770041 -0.0734265 -0.691683 0.718885 -0.0691271 -0.745522 0.663372 -0.064299 -0.794837 0.60395 -0.058987 -0.839419 0.540873 -0.0532161 -0.878955 0.474578 -0.0470479 -0.919905 0.389984 -0.0410723 -0.954133 0.29782 -0.030563 -0.976514 0.214303 -0.0222551 -0.991524 0.129205 -0.0136865 -0.999099 0.0421594 -0.00483989 -0.99916 0.0407652 -0.00414697 -0.999236 0.0386931 -0.00558622 -0.999274 0.0377542 -0.00504914 -0.999361 0.0352106 -0.00614131 -0.994838 0.0996315 -0.0192768 -0.988528 0.147635 -0.0318835 -0.98246 0.181515 -0.0427183 -0.976713 0.207938 -0.052854 -0.974278 0.217995 -0.0571013 -0.976475 0.208157 -0.0562733 -0.980924 0.18749 -0.0513437 -0.986866 0.156087 -0.0416167 -0.994004 0.10592 -0.0271481 -0.998206 0.0587159 -0.0116857 -0.252876 0.966628 -0.041026 -0.331084 0.94357 -0.00765444 -0.382493 0.923433 -0.0311475 -0.442542 0.896612 -0.0155846 -0.515311 0.856646 -0.0247238 -0.522297 0.852289 -0.0284468 -0.584427 0.811333 -0.0135403 -0.633742 0.772312 -0.0436483 -0.584455 0.809857 -0.0504377 -0.571966 0.818934 -0.0469242 -0.51526 0.85542 -0.0525755 -0.506768 0.860629 -0.0500322 -0.406325 0.912727 -0.0427631 -0.331092 0.941083 -0.0688529 -0.649233 0.760172 -0.0252063 -0.649717 0.759769 -0.0248877 -0.69167 0.721093 -0.0402268 -0.649639 0.758737 -0.0478336 -0.710322 0.703785 -0.011344 -0.745511 0.665485 -0.0366663 -0.71034 0.702432 -0.0448023 -0.852432 0.522373 -0.0220625 -0.861806 0.507031 -0.0144766 -0.908428 0.418012 -0.00503755 -0.92371 0.382619 -0.019037 -0.94751 0.319714 -0.00282233 -0.954133 0.298988 -0.0153616 -0.947521 0.318927 -0.0221026 -0.919907 0.391667 -0.0191837 -0.908417 0.416979 -0.0301236 -0.878949 0.476237 -0.025417 -0.839411 0.542709 -0.029251 -0.794826 0.605939 -0.0330078 -0.766215 0.641253 -0.04135 -0.766285 0.6422 -0.0196724 -0.760194 0.649253 -0.0239959 -0.972287 0.233428 -0.0130442 -0.973127 0.22999 -0.0113391 -0.976515 0.215164 -0.0110598 -0.973106 0.229786 -0.0162098 -0.990274 0.139128 0.000528182 -0.991525 0.129736 -0.00676975 -0.990284 0.138695 -0.0100633 -0.996838 0.0784448 -0.0126364 -0.998907 0.046737 -0.00108391 -0.999036 0.0438145 -0.00285799 -0.998946 0.0457506 -0.00368909 -0.999361 0.0352029 -0.00613756 -0.999547 0.0294703 -0.0060914 -0.995866 0.0887398 -0.0193842 -0.991042 0.129941 -0.0308254 -0.986724 0.157441 -0.039856 -0.983074 0.176952 -0.0474753 -0.982358 0.180437 -0.0491492 -0.98515 0.165498 -0.0457139 -0.989515 0.139422 -0.0376932 -0.995083 0.0958412 -0.024984 -0.998492 0.0537498 -0.0111426 -0.999558 0.0290059 -0.00658095 -0.996773 0.0780525 -0.0187194 -0.993222 0.112625 -0.0287405 -0.99037 0.133738 -0.035793 -0.988405 0.146258 -0.0407921 -0.988916 0.143017 -0.0398886 -0.991846 0.122915 -0.0336704 -0.996083 0.0854651 -0.0226848 -0.998775 0.0483689 -0.0104639 -0.999782 0.02025 -0.00510334 -0.999844 0.0169905 -0.00485706 -0.999855 0.0164074 -0.00453592 -0.999932 0.0111021 -0.00343817 -0.999236 0.0375117 -0.0110137 -0.998835 0.0463098 -0.0135365 -0.999143 0.039785 -0.0114198 -0.999641 0.0259611 -0.00663339 -0.999991 0.00397257 -0.00117702 -0.965121 0.253288 -0.0662331 -0.949769 0.306589 -0.0627829 -0.958695 0.270871 -0.0867965 -0.899502 0.424558 -0.10318 -0.906577 0.408164 -0.107338 -0.906467 0.40738 -0.111175 -0.889195 0.442797 -0.115166 -0.869687 0.480293 -0.113858 -0.854809 0.507676 -0.107548 -0.845501 0.525172 -0.0965502 -0.842301 0.532854 -0.0812089 -0.845454 0.530303 -0.0631332 -0.860184 0.508019 -0.0447344 -0.860189 0.505861 -0.0646405 -0.879264 0.473032 -0.0559955 -0.89474 0.442941 -0.0569499 -0.894736 0.444896 -0.0389184 -0.863214 0.485267 -0.139202 -0.816335 0.56509 -0.119458 -0.864396 0.487462 -0.123291 -0.848404 0.514896 -0.12285 -0.829081 0.546835 -0.116603 -0.815427 0.569196 -0.105337 -0.808233 0.582044 -0.0893538 -0.807823 0.585244 -0.0700787 -0.821212 0.568394 -0.0504002 -0.821219 0.566065 -0.0719044 -0.823579 0.550053 -0.13842 -0.814798 0.56486 -0.130528 -0.801395 0.584817 -0.125523 -0.782949 0.611558 -0.113966 -0.771129 0.629201 -0.0972993 -0.766611 0.637502 -0.0768085 -0.777938 0.625849 -0.0559169 -0.777946 0.623384 -0.0786897 -0.683098 0.708968 -0.175336 -0.591939 0.796118 -0.12572 -0.597714 0.78999 -0.136578 -0.515145 0.842485 -0.157623 -0.44938 0.887176 -0.104768 -0.54389 0.833829 -0.0944063 -0.566923 0.820274 -0.0758262 -0.566933 0.817617 -0.100449 -0.623193 0.776219 -0.095471 -0.624893 0.7748 -0.0958802 -0.624882 0.777464 -0.0712122 -0.448242 0.887433 -0.107434 -0.477269 0.875817 -0.0718234 -0.477285 0.871016 -0.116322 -0.999963 0.00821421 -0.00236689 -0.999841 0.0171029 -0.00511602 -0.999584 0.0276539 -0.00822501 -0.999584 0.0277664 -0.00783844 -0.99993 0.0112892 -0.00345356 -0.999936 0.0108231 -0.0032736 -0.999841 0.0171366 -0.00500257 -0.999782 0.0202003 -0.00536641 -0.999782 0.0199212 -0.00629468 -0.999568 0.0281913 -0.0082621 -0.999568 0.0281639 -0.00835422 -0.999418 0.0331597 -0.00803519 -0.99772 0.0650774 -0.0178665 -0.993895 0.106313 -0.0295151 -0.993895 0.10579 -0.0313158 -0.992693 0.116024 -0.0331502 -0.992693 0.116362 -0.0319514 -0.993397 0.110516 -0.0308145 -0.996625 0.0790179 -0.0222363 -0.998789 0.047319 -0.013484 -0.999236 0.037726 -0.0102543 -0.99901 0.0434047 -0.0097539 -0.999012 0.0418142 -0.0150461 -0.996945 0.0754082 -0.0203496 -0.996945 0.0743 -0.0240749 -0.99784 0.0645504 -0.0122269 -0.991427 0.126856 -0.0312948 -0.977194 0.205729 -0.0526061 -0.958608 0.275309 -0.0726301 -0.93451 0.344467 -0.0896264 -0.904843 0.412885 -0.103853 -0.869687 0.480104 -0.114651 -0.904844 0.412211 -0.106483 -0.889475 0.444326 -0.106809 -0.889474 0.444813 -0.104772 -0.854812 0.506416 -0.113307 -0.996937 0.077104 -0.0130674 -0.987742 0.151769 -0.0365072 -0.967327 0.245813 -0.0620812 -0.94077 0.327754 -0.0867672 -0.906462 0.408419 -0.107331 -0.94078 0.325417 -0.095061 -0.923598 0.371205 -0.0957817 -0.923603 0.369188 -0.103238 -0.889192 0.443902 -0.11085 -0.999963 0.00822976 -0.00246096 -0.999963 0.00818933 -0.0024497 -0.864397 0.487177 -0.124404 -0.995867 0.0894511 -0.0157455 -0.344121 0.930868 -0.122742 -0.278444 0.958962 -0.053481 -0.233325 0.971857 -0.0324557 -0.861755 0.506227 -0.0333551 -0.816784 0.575722 -0.0375329 -0.816776 0.576884 -0.00905553 -0.442573 0.895092 -0.054228 -0.730623 0.68003 -0.061232 -0.730633 0.677462 -0.084974 -0.721952 0.686911 -0.0833017 -0.721958 0.684176 -0.103345 -0.731319 0.6739 -0.105028 -0.688773 0.716191 -0.112525 -0.622216 0.774387 -0.114766 -0.543911 0.828884 -0.130813 -0.679478 0.73069 -0.0663414 -0.679488 0.728056 -0.0907208 -0.674034 0.733254 -0.0895355 -0.674042 0.730535 -0.109486 -0.992647 0.120595 -0.0103988 -0.992646 0.119959 -0.0162542 -0.982445 0.184146 -0.0298595 -0.971995 0.2309 -0.043713 -0.960715 0.271292 -0.0585446 -0.953355 0.294135 -0.0678122 -0.952849 0.294631 -0.0726048 -0.955911 0.284308 -0.0735003 -0.962139 0.263383 -0.070133 -0.970796 0.231682 -0.0622729 -0.980718 0.189167 -0.0490708 -0.991425 0.123195 -0.0435936 -0.979619 0.200136 -0.0170857 -0.979618 0.199108 -0.0265559 -0.965687 0.256421 -0.0411829 -0.950978 0.303755 -0.0580762 -0.940393 0.332853 -0.069787 -0.937824 0.338338 -0.0775498 -0.939389 0.333011 -0.0815619 -0.944902 0.317067 -0.0814114 -0.953744 0.290646 -0.0768006 -0.964999 0.253417 -0.067509 -0.977207 0.202453 -0.0638774 -0.960185 0.278352 -0.0237707 -0.960185 0.276962 -0.0365715 -0.94 0.336748 -0.0547806 -0.925705 0.371932 -0.0688243 -0.920658 0.382157 -0.0796515 -0.920219 0.381621 -0.0869612 -0.92433 0.370767 -0.0902566 -0.932678 0.349524 -0.0891286 -0.94452 0.317714 -0.0833105 -0.958613 0.272999 -0.080819 -0.982448 0.185304 -0.0213983 -0.930392 0.365304 -0.0303824 -0.93039 0.363292 -0.0489186 -0.909173 0.411412 -0.0643788 -0.901151 0.426353 -0.0784217 -0.898183 0.430477 -0.089201 -0.900401 0.424315 -0.0960964 -0.907688 0.407882 -0.098668 -0.919383 0.381318 -0.0965954 -0.934514 0.342951 -0.0952312 -0.965691 0.257977 -0.0298102 -0.940006 0.338984 -0.0384429 -0.909175 0.413555 -0.0487081 -0.993676 0.111533 -0.0129527 -0.993674 0.110812 -0.0182248 -0.985681 0.165628 -0.031631 -0.977568 0.20595 -0.0441071 -0.969274 0.23937 -0.0566652 -0.964624 0.25588 -0.0634417 -0.965689 0.251328 -0.0654062 -0.969709 0.235922 -0.0632911 -0.976139 0.209567 -0.0568629 -0.983952 0.172565 -0.0453974 -0.992797 0.116182 -0.0292499 -0.997847 0.0609267 -0.0242753 -0.994838 0.100401 -0.0147049 -0.985681 0.166851 -0.0243583 -0.971994 0.232524 -0.0340557 -0.950977 0.306072 -0.0442975 -0.925701 0.374102 -0.0559146 -0.845455 0.527796 -0.0814713 -0.879263 0.470686 -0.0731515 -0.901145 0.428698 -0.0644659 -0.873318 0.481664 -0.072909 -0.898178 0.432563 -0.0785185 -0.873325 0.479198 -0.0875987 -0.873145 0.479526 -0.0875951 -0.87315 0.47743 -0.0983345 -0.900398 0.425848 -0.0890925 -0.807826 0.582617 -0.0893052 -0.766615 0.634797 -0.0966175 -0.623201 0.773561 -0.114993 -0.988528 0.148761 -0.0261278 -0.977567 0.207429 -0.0365375 -0.960714 0.273407 -0.0477234 -0.94039 0.334755 -0.0600507 -0.920654 0.384175 -0.0693175 -0.842309 0.530314 -0.0963459 -0.808242 0.579486 -0.104604 -0.771138 0.626675 -0.112363 -0.731328 0.671461 -0.119579 -0.748159 0.652137 -0.122375 -0.693544 0.709108 -0.127135 -0.622237 0.769741 -0.142547 -0.688781 0.713899 -0.126211 -0.996774 0.0786816 -0.0158485 -0.991042 0.13092 -0.0263593 -0.982459 0.182782 -0.0369365 -0.993222 0.113434 -0.0253579 -0.969273 0.241183 -0.0483888 -0.986723 0.158466 -0.0355729 -0.953353 0.295672 -0.0607935 -0.976712 0.209397 -0.0467554 -0.937821 0.339929 -0.0702846 -0.964623 0.25701 -0.0587219 -0.920216 0.38321 -0.0797016 -0.952847 0.295741 -0.0679662 -0.939387 0.334048 -0.0772298 -0.845507 0.523116 -0.10709 -0.878663 0.467202 -0.0983567 -0.907686 0.408606 -0.0956377 -0.878666 0.465777 -0.104872 -0.924328 0.371679 -0.0864403 -0.815433 0.567234 -0.115395 -0.782954 0.609749 -0.123245 -0.748164 0.650534 -0.130601 -0.757815 0.636156 -0.144989 -0.746819 0.646538 -0.155724 -0.723751 0.675911 -0.139025 -0.829083 0.545797 -0.121351 -0.69356 0.705235 -0.147031 -0.757804 0.63897 -0.1321 -0.801397 0.584058 -0.128995 -0.997559 0.0681544 -0.0152304 -0.998229 0.0578153 -0.0140186 -0.998789 0.0476372 -0.0123089 -0.998229 0.057391 -0.0156725 -0.996625 0.0794653 -0.0205839 -0.995074 0.0957072 -0.0258377 -0.995074 0.0963365 -0.023385 -0.997558 0.0676224 -0.0174527 -0.99037 0.134505 -0.0328036 -0.983073 0.178038 -0.0432448 -0.974277 0.218706 -0.0543314 -0.965689 0.25195 -0.0629802 -0.955911 0.284784 -0.0716464 -0.944902 0.317337 -0.0803605 -0.932678 0.349536 -0.0890829 -0.919384 0.381016 -0.0977765 -0.848404 0.515057 -0.122178 -0.814804 0.563265 -0.137211 -0.9996 0.0276148 -0.00603081 -0.99972 0.0229536 -0.00580092 -0.999713 0.023165 -0.00613946 -0.997873 0.0628928 -0.0171506 -0.998835 0.0464081 -0.0131988 -0.997873 0.0626143 -0.0181372 -0.99771 0.0650061 -0.018676 -0.99771 0.0649986 -0.0187016 -0.999143 0.0396193 -0.0119768 -0.993396 0.111027 -0.0289294 -0.995853 0.0877257 -0.024108 -0.995853 0.0874603 -0.0250487 -0.996 0.0858631 -0.0247213 -0.996 0.0859197 -0.0245265 -0.998519 0.0520214 -0.0159372 -0.998519 0.0523475 -0.0148412 -0.999419 0.0322763 -0.0109749 -0.988405 0.146963 -0.0381871 -0.982358 0.180732 -0.0480588 -0.988916 0.14292 -0.0402303 -0.976475 0.208289 -0.0557874 -0.985151 0.165174 -0.0468612 -0.969709 0.235836 -0.0636066 -0.980925 0.186888 -0.0534777 -0.962139 0.263028 -0.0714437 -0.976141 0.208622 -0.0602115 -0.953745 0.289961 -0.0793311 -0.970799 0.230341 -0.0670269 -0.944522 0.316641 -0.0872742 -0.965002 0.251621 -0.0738741 -0.951741 0.294074 -0.0878042 -0.973441 0.218103 -0.0695941 -0.990056 0.132311 -0.0477726 -0.997482 0.0656574 -0.0268041 -0.99772 0.0643022 -0.0204713 -0.991847 0.122088 -0.0365136 -0.989518 0.138226 -0.0418054 -0.996083 0.0839625 -0.0277338 -0.98687 0.154462 -0.047203 -0.995083 0.0938874 -0.0315484 -0.983958 0.170452 -0.0526622 -0.994003 0.103457 -0.0354285 -0.980727 0.186502 -0.0582357 -0.992795 0.11315 -0.0394394 -0.951734 0.29695 -0.0776071 -0.967354 0.241856 -0.0757071 -0.987742 0.147051 -0.052366 -0.973425 0.222051 -0.0560075 -0.997473 0.0699127 -0.0126361 -0.990063 0.136658 -0.0331717 -0.996956 0.0721654 -0.0295047 -0.998211 0.0556833 -0.0217833 -0.998496 0.0512475 -0.0194684 -0.998778 0.0463502 -0.0171808 -0.99925 0.0377135 -0.00884276 -0.999251 0.0365063 -0.0128615 -0.999641 0.0254607 -0.00829857 -0.999889 0.0143541 -0.00396386 -0.999889 0.0142306 -0.00437461 -0.994209 0.101518 -0.035238 -0.927671 0.372797 -0.0211957 0.00101737 0.0438676 0.999037 0.00438038 0.0415158 0.999128 0.00581711 0.0379893 0.999261 0.00622586 0.0357089 0.999343 0.00456248 0.0341182 0.999407 0.0109681 0.233428 0.972312 0.0142979 0.2232 0.974668 0.0065345 0.310665 0.950497 0.0182081 0.382615 0.923728 0.0113344 0.406398 0.913626 0.0392854 0.36727 0.929284 0.0505355 0.317328 0.946968 0.0553437 0.268324 0.961738 0.0540204 0.220077 0.973985 0.0481567 0.173028 0.983739 0.0425194 0.145609 0.988428 0.0362239 0.118427 0.992302 0.032309 0.109763 0.993433 0.0264589 0.0839368 0.99612 0.0159599 0.0408677 0.999037 0.0165217 0.522437 0.852518 0.0296796 0.492845 0.869611 0.0458123 0.447713 0.893003 0.0602129 0.388133 0.919634 0.0665914 0.329092 0.941947 0.0654179 0.270551 0.96048 0.0585957 0.213072 0.975278 0.0517143 0.179502 0.982397 0.0441497 0.14604 0.988293 0.0376728 0.126682 0.991228 0.0304154 0.0950683 0.995006 0.0185642 0.0466001 0.998741 0.015837 0.562002 0.826984 0.0530601 0.511005 0.857939 0.0697847 0.444341 0.893135 0.0771738 0.377706 0.922704 0.0759211 0.311129 0.94733 0.0681321 0.245412 0.967022 0.0599876 0.207002 0.9765 0.0513002 0.168556 0.984356 0.0431318 0.143548 0.988703 0.0344721 0.106174 0.99375 0.0211934 0.0517776 0.998434 0.0251765 0.760167 0.649239 0.0352787 0.742689 0.668706 0.0221366 0.79468 0.606624 0.028691 0.852288 0.522285 0.0372222 0.840283 0.540869 0.08829 0.780431 0.618977 0.116873 0.694294 0.710138 0.129648 0.601252 0.788472 0.128807 0.502977 0.854648 0.11702 0.40133 0.908427 0.101945 0.340781 0.934599 0.0879053 0.278629 0.956367 0.0716851 0.226698 0.971323 0.0564491 0.166532 0.984419 0.0388035 0.0802058 0.996023 0.00874194 0.923845 0.382666 0.0694625 0.879151 0.471454 0.0949561 0.822871 0.560238 0.125943 0.73665 0.664444 0.139899 0.641147 0.754559 0.139346 0.538539 0.830999 0.126968 0.431094 0.893329 0.110412 0.366704 0.923763 0.0953906 0.300166 0.949105 0.0779232 0.251226 0.964787 0.0664971 0.289039 0.955005 0.0652239 0.304206 0.950371 0.038737 0.0776768 0.996226 0.0215192 0.334997 0.941973 0.0242024 0.930736 0.364889 0.117437 0.875209 0.469273 0.149851 0.791104 0.593042 0.16352 0.694013 0.701147 0.161797 0.586666 0.793501 0.147222 0.471826 0.869313 0.127171 0.402567 0.906514 0.110016 0.330193 0.93748 0.0892703 0.282274 0.955171 0.0891289 0.33611 0.937596 0.0325279 0.971854 0.233326 -0.0306197 0.996451 0.0784138 0.0873577 0.973017 0.213555 0.0520279 0.959358 0.277354 0.119549 0.934542 0.335171 0.107266 0.886246 0.450625 0.147751 0.863131 0.482881 0.157727 0.772988 0.614502 0.151899 0.668017 0.728478 0.132695 0.552549 0.82285 0.108306 0.410836 0.905253 0.125091 0.450893 0.883769 0.13845 0.787624 0.6004 0.148588 0.671211 0.726222 0.141546 0.541551 0.828666 0.102948 0.428816 0.897507 0.00371433 0.0134358 0.999903 0.00498604 0.0171881 0.99984 0.00485491 0.0169481 0.999845 0.00496863 0.0197154 0.999793 0.00599393 0.022693 0.999725 0.0169597 0.0718757 0.997269 0.0289736 0.146176 0.988834 0.03689 0.241522 0.969694 0.041882 0.366995 0.92928 0.00540156 0.0247334 0.99968 0.00679816 0.0291567 0.999552 0.0173784 0.087911 0.995977 0.0262561 0.173428 0.984497 0.0282847 0.280182 0.95953 0.0263972 0.405561 0.913687 0.00596166 0.0293203 0.999552 0.01577 0.104395 0.994411 0.0201694 0.201477 0.979286 0.021453 0.309826 0.950551 0.0041894 0.041182 0.999143 0.00297645 0.0456305 0.998954 0.0147531 0.0874303 0.996061 0.0142239 0.0785738 0.996807 0.0360316 0.154745 0.987297 0.0556927 0.213178 0.975425 0.0692934 0.260154 0.963078 0.0792647 0.318464 0.944615 0.0900956 0.375473 0.922444 0.101055 0.470306 0.876698 0.105367 0.562358 0.820153 0.0983164 0.65062 0.753013 0.0769122 0.734292 0.674462 0.0458336 0.793539 0.606791 0.081467 0.733807 0.674456 0.0467381 0.839787 0.540901 0.0132206 0.0666641 0.997688 0.0318226 0.13152 0.990803 0.0478866 0.178976 0.982687 0.0583398 0.21511 0.974846 0.066693 0.264012 0.962211 0.0759097 0.312092 0.947014 0.0862191 0.392894 0.915533 0.0906234 0.472945 0.876419 0.0856547 0.551721 0.829619 0.0685878 0.629019 0.774358 0.042211 0.686044 0.726334 0.0327196 0.625902 0.779215 0.0210998 0.649292 0.760246 0.0118033 0.054602 0.998438 0.0109847 0.0488736 0.998745 0.00999092 0.0426619 0.99904 0.00899389 0.037036 0.999273 0.0194863 0.073505 0.997104 0.0256562 0.0891873 0.995684 0.025705 0.0899429 0.995615 0.029087 0.110886 0.993407 0.0331414 0.131722 0.990732 0.0386324 0.16723 0.985161 0.0411277 0.203768 0.978155 0.0397189 0.241079 0.969692 0.040703 0.203852 0.978155 0.0509149 0.317268 0.946968 0.00597958 0.022508 0.999729 0.0124044 0.0448639 0.998916 0.014083 0.0479871 0.998749 0.011285 0.038549 0.999193 0.0127432 0.0475514 0.998788 0.0145126 0.0565262 0.998296 0.0170488 0.071855 0.997269 0.015065 0.0563836 0.998296 0.0279751 0.119655 0.992421 0.0283142 0.119577 0.992421 0.029833 0.146008 0.988833 0.00274814 0.00957195 0.99995 0.00552441 0.0191616 0.999801 0.00331746 0.0109475 0.999935 0.00345847 0.0112908 0.99993 0.00813534 0.0289272 0.999548 0.00858426 0.0287929 0.999549 0.00555616 0.0191522 0.999801 0.01821 0.0877482 0.995976 0.0367272 0.930285 0.364994 0.0470152 0.880593 0.471536 0.0192249 0.687226 0.726189 0.0443088 0.74219 0.668723 0.0674228 0.629143 0.77436 0.0395384 0.62551 0.779214 0.0602795 0.571587 0.818324 0.0634281 0.571253 0.81832 0.0793401 0.498784 0.863087 0.0784306 0.498927 0.863088 0.0877626 0.42516 0.900853 0.0864688 0.351124 0.932328 0.0777652 0.277453 0.957587 0.068305 0.234179 0.969791 0.0584966 0.190837 0.979877 0.0486729 0.160336 0.985862 0.0386197 0.1172 0.992357 0.0239474 0.0571389 0.998079 0.0363562 0.560895 0.827088 0.0326947 0.492668 0.869603 0.0100336 0.078453 0.996867 0.00198851 0.13502 0.990841 0.0121656 0.121424 0.992526 0.0149728 0.121129 0.992524 0.0158734 0.223108 0.974664 0.0099287 0.134566 0.990855 0.0766115 0.879674 0.469366 0.0823107 0.824222 0.560253 0.107646 0.649151 0.753005 0.0799899 0.781317 0.618988 0.0745038 0.683281 0.726345 0.0730996 0.68343 0.726347 0.0888538 0.551218 0.829617 0.0983001 0.601384 0.79289 0.108845 0.516257 0.849489 0.107638 0.428799 0.896965 0.0972654 0.340331 0.935262 0.0850714 0.28801 0.953841 0.073092 0.235089 0.969222 0.0600129 0.193695 0.979225 0.0472162 0.139226 0.989134 0.0297398 0.0677436 0.997259 0.0576515 0.510518 0.857932 0.0513069 0.447132 0.892995 0.0331209 0.279672 0.959524 0.0242618 0.201046 0.979281 0.107775 0.797846 0.59315 0.108356 0.739426 0.664461 0.119286 0.559617 0.820122 0.103678 0.696374 0.71015 0.098326 0.471432 0.876404 0.0922962 0.602328 0.792895 0.07067 0.444202 0.893135 0.0624045 0.387789 0.919633 0.0289015 0.173013 0.984495 0.017699 0.104093 0.99441 0.121038 0.702482 0.701334 0.118039 0.645461 0.754617 0.118238 0.466312 0.876683 0.111983 0.604734 0.788516 0.097047 0.390379 0.915525 0.0982428 0.518342 0.849512 0.0825387 0.426186 0.900862 0.0740144 0.378326 0.922709 0.0650853 0.329387 0.941949 0.039823 0.166953 0.98516 0.0530721 0.268769 0.961741 0.119317 0.596608 0.793613 0.114341 0.544356 0.831025 0.107113 0.371061 0.92241 0.107898 0.50784 0.854668 0.0874796 0.309098 0.946998 0.0938234 0.432009 0.896976 0.0782638 0.353029 0.932333 0.0699764 0.312509 0.947334 0.0613834 0.271487 0.960483 0.0354968 0.131114 0.990732 0.0500431 0.221001 0.973989 0.0251939 0.0938295 0.995269 0.0224216 0.0788295 0.996636 0.0190485 0.0640601 0.997764 0.0192585 0.0673522 0.997543 0.0177537 0.0591554 0.998091 0.0109287 0.0289216 0.999522 0.109519 0.481664 0.869486 0.103417 0.437211 0.893393 0.0934974 0.314607 0.944606 0.0968665 0.406558 0.908475 0.0766737 0.261307 0.962206 0.0831008 0.343999 0.935286 0.0685166 0.279844 0.957597 0.0609492 0.24727 0.967028 0.053209 0.214464 0.975281 0.0315014 0.110232 0.993407 0.0431585 0.17432 0.983743 0.0134466 0.0473609 0.998787 0.0238998 0.0941638 0.99527 0.0969724 0.410771 0.906567 0.0911699 0.37192 0.923777 0.0853003 0.345295 0.93461 0.0804747 0.257003 0.963054 0.0796704 0.304601 0.949142 0.0658029 0.21016 0.97545 0.074553 0.282392 0.956398 0.0730568 0.291266 0.953848 0.0657679 0.213018 0.974832 0.0543011 0.177061 0.982701 0.0638847 0.237682 0.96924 0.0601661 0.236385 0.969795 0.0535092 0.208757 0.976503 0.0526652 0.192478 0.979887 0.0466933 0.180862 0.982399 0.0468657 0.169805 0.984363 0.0378992 0.14686 0.988431 0.0268047 0.0896336 0.995614 0.0409196 0.146948 0.988297 0.0255569 0.089217 0.995684 0.0339666 0.119063 0.992306 0.0209768 0.0792207 0.996636 0.0114149 0.0385124 0.999193 0.0136275 0.0481233 0.998748 0.0185514 0.0641998 0.997765 0.0594289 0.230357 0.97129 0.0516481 0.150121 0.987318 0.0327801 0.0729961 0.996793 0.0382837 0.171908 0.984369 0.0518529 0.196132 0.979206 0.0428726 0.128249 0.990815 0.0268062 0.0625883 0.997679 0.033979 0.143151 0.989117 0.0438029 0.161792 0.985852 0.0396091 0.1446 0.988697 0.029559 0.119884 0.992348 0.0353031 0.12739 0.991224 0.0272015 0.108331 0.993743 0.03089 0.110187 0.993431 0.0225611 0.0725928 0.997106 0.0247394 0.0967501 0.995001 0.0135336 0.0356739 0.999272 0.0221749 0.0852058 0.996117 0.0194774 0.0672868 0.997544 0.0132966 0.0445992 0.998917 0.0078747 0.0219385 0.999728 0.0160681 0.0596547 0.99809 0.00938072 0.0319486 0.999445 0.0090386 0.03205 0.999445 0.00321465 0.00943175 0.99995 0.00547287 0.015647 0.999863 0.00272717 0.00909676 0.999955 0.00119292 0.00402624 0.999991 0.0661425 0.254739 0.964745 0.0137648 0.0725417 0.99727 0.012554 0.06056 0.998086 0.00760861 0.0299182 0.999523 0.00441062 0.0159658 0.999863 -0.0784127 0.996431 0.0312383 -0.0801999 0.99656 0.0208787 -0.0755132 0.99656 0.0341432 -0.0686615 0.99656 0.0464084 -0.0598306 0.99656 0.0573451 -0.0492861 0.99656 0.0666253 -0.0373074 0.99656 0.0740011 -0.0242667 0.99656 0.0792413 -0.0105255 0.99656 0.0822026 -0.0990696 0.992014 0.0780646 -0.382654 0.923821 0.0114392 -0.397825 0.915716 0.0565715 -0.522357 0.852387 0.0240977 -0.543075 0.836908 0.0682261 -0.530649 0.836255 0.13816 -0.499656 0.836259 0.225863 -0.4543 0.836257 0.30706 -0.395882 0.836259 0.379406 -0.326055 0.836258 0.440864 -0.246863 0.836256 0.489627 -0.160567 0.836256 0.524303 -0.0696485 0.836259 0.543894 0.0646005 0.75882 0.648089 -0.09892 0.834753 0.541665 0.0698215 0.850558 0.521225 -0.0985741 0.912717 0.396522 -0.0511747 0.915242 0.399641 -0.117984 0.915243 0.38524 -0.181388 0.915242 0.359765 -0.239584 0.915243 0.323928 -0.290874 0.915241 0.278794 -0.333815 0.915242 0.225609 -0.367133 0.915243 0.165961 -0.389904 0.915241 0.101533 -0.389902 0.915243 0.101517 -0.53065 0.836254 0.138163 -0.649029 0.759933 0.0355281 -0.673218 0.735239 0.078748 -0.656711 0.734506 0.170972 -0.61836 0.7345 0.279537 -0.562232 0.734504 0.379998 -0.489926 0.734497 0.469561 -0.403529 0.734505 0.545588 -0.305508 0.734506 0.605942 -0.198706 0.734503 0.648861 -0.0861937 0.734502 0.67311 0.0602526 0.648257 0.759034 -0.0994875 0.733871 0.671964 -0.759626 0.648769 0.0454724 -0.784756 0.613559 0.0877688 -0.764726 0.612821 0.19911 -0.720063 0.612836 0.325487 -0.654698 0.612834 0.442499 -0.570497 0.612837 0.546775 -0.469892 0.612834 0.635324 -0.355749 0.612828 0.70561 -0.231396 0.612833 0.755574 -0.100373 0.612837 0.783809 0.0567926 0.521665 0.851258 -0.0999945 0.612853 0.783845 -0.85141 0.521746 0.0536814 -0.874696 0.475257 0.0950692 -0.851789 0.474623 0.221783 -0.802054 0.474634 0.362535 -0.72924 0.474628 0.492887 -0.635459 0.474632 0.609029 -0.523391 0.474626 0.707667 -0.396259 0.474631 0.785942 -0.257741 0.474629 0.841604 -0.111799 0.474623 0.87306 0.0541224 0.382118 0.922527 -0.100595 0.474994 0.874221 -0.922213 0.381999 0.0599978 -0.940669 0.324122 0.100436 -0.915645 0.323653 0.238417 -0.862186 0.323638 0.389735 -0.783918 0.323645 0.529836 -0.683104 0.323645 0.654693 -0.562626 0.32364 0.760729 -0.425975 0.32365 0.844865 -0.277063 0.323649 0.904703 -0.120179 0.32365 0.938514 0.0523479 0.233122 0.971037 -0.101041 0.324092 0.940614 -0.970358 0.232965 0.0642907 -0.980947 0.164277 0.103712 -0.954624 0.16402 0.248579 -0.898892 0.164037 0.406307 -0.817285 0.164032 0.552393 -0.712171 0.164011 0.682578 -0.58659 0.164029 0.793099 -0.444103 0.164019 0.880835 -0.288862 0.164022 0.943216 -0.125295 0.16402 0.978467 0.0514565 0.078353 0.995597 -0.101342 0.164304 0.98119 -0.994715 0.0782776 0.0664466 -0.995453 0.0541892 0.0783353 -0.971927 0.0301342 0.233343 -0.965786 0.0633415 0.251486 -0.923672 0.0210316 0.382606 -0.908907 0.0714296 0.410835 -0.852563 0.0138208 0.522442 -0.760374 0.00862257 0.649428 -0.649433 0.00537446 0.760399 -0.522504 0.00404781 0.852627 -0.382674 0.00471523 0.923871 -0.233436 0.00730793 0.972345 -0.0784514 0.0118806 0.996847 -0.126616 0.0792305 0.988783 -0.826008 0.0776128 0.558289 -0.719526 0.0818394 0.689627 -0.592538 0.0840888 0.801141 -0.448592 0.0844303 0.88974 -0.291822 0.0828007 0.952882 0.0757513 0.921226 0.381581 -0.0985992 0.965611 0.240568 -0.0312856 0.969186 0.244335 -0.0721293 0.969186 0.235534 -0.110892 0.969186 0.21996 -0.146494 0.969187 0.198032 -0.177835 0.969186 0.170448 -0.204085 0.969186 0.137941 -0.224457 0.969185 0.101488 -0.238382 0.969187 0.0620588 -0.233447 0.972367 -0.00220691 -0.241512 0.969394 0.0441207 0.0826188 0.969045 0.232651 -0.954625 0.164032 0.248567 -0.915645 0.323661 0.238409 -0.851789 0.474627 0.221779 -0.764722 0.612832 0.199092 -0.656713 0.734502 0.170982 -0.238382 0.969185 0.0620762 -0.898887 0.164019 0.406325 -0.862188 0.323654 0.389717 -0.802054 0.474623 0.362549 -0.720064 0.612821 0.325513 -0.618359 0.734507 0.27952 -0.367133 0.915242 0.165968 -0.499657 0.836255 0.225878 -0.224461 0.969186 0.101466 -0.817287 0.164036 0.55239 -0.783916 0.323639 0.529842 -0.729241 0.474633 0.49288 -0.654699 0.612836 0.442493 -0.562231 0.734499 0.38001 -0.333813 0.915241 0.225616 -0.454302 0.836259 0.307051 -0.204088 0.969187 0.137933 -0.712182 0.164029 0.682562 -0.683104 0.323646 0.654692 -0.635457 0.474628 0.609034 -0.570495 0.612834 0.546782 -0.489932 0.734505 0.469542 -0.29088 0.915243 0.278781 -0.39588 0.836257 0.379413 -0.177834 0.969186 0.170449 -0.586577 0.16401 0.793113 -0.562631 0.323647 0.760723 -0.523394 0.47463 0.707661 -0.469895 0.612838 0.635318 -0.40352 0.734497 0.545606 -0.239579 0.915242 0.323935 -0.326056 0.836259 0.440861 -0.14648 0.969185 0.198048 -0.444109 0.164026 0.880831 -0.425969 0.323643 0.84487 -0.396253 0.474625 0.785948 -0.355755 0.612834 0.705601 -0.305507 0.734506 0.605943 -0.181391 0.915243 0.359761 -0.246867 0.836258 0.489622 -0.110898 0.969186 0.219955 -0.288858 0.164018 0.943218 -0.277065 0.323651 0.904702 -0.257742 0.47463 0.841603 -0.231388 0.612827 0.755581 -0.198711 0.734506 0.648857 -0.117979 0.915242 0.385244 -0.160566 0.836256 0.524304 -0.0721339 0.969186 0.235531 -0.125295 0.16402 0.978467 -0.12018 0.323652 0.938513 -0.111802 0.474626 0.873058 -0.100369 0.612834 0.783811 -0.0861952 0.734503 0.673109 -0.0511709 0.915241 0.399643 -0.0696415 0.836256 0.543898 -0.0312874 0.969186 0.244334 0 0.0105742 0.999944 0.000680308 0.105935 0.994373 0.000848014 0.272466 0.962165 0.000968123 0.431152 0.902279 0.0010393 0.577428 0.816441 0.00106388 0.707109 0.707104 0.00104003 0.816442 0.577427 0.000968414 0.902277 0.431157 0.000848226 0.962164 0.272469 0.000680688 0.994373 0.10593 0 0.999944 0.0106181 -0.000425215 0.996918 0.0784506 0.000742057 0.233442 0.97237 -0.00101987 0.272466 0.962165 0.000914343 0.382678 0.923881 -0.00116432 0.431152 0.902278 0.00102926 0.522508 0.852634 0.00108735 0.649437 0.760415 0.00108673 0.760408 0.649445 0.00102867 0.852639 0.5225 0.000914075 0.92388 0.38268 0.000741667 0.972369 0.233449 0.000511389 0.996918 0.0784506 -0.000818638 0.994373 0.10593 -0.00124993 0.577428 0.816441 -0.00127948 0.707109 0.707103 -0.00125081 0.816442 0.577427 -0.00116467 0.902276 0.431157 -0.00102013 0.962164 0.272469 -0.000616688 0.972369 0.233449 -0.000760043 0.92388 0.382681 -0.000855331 0.852639 0.5225 -0.000903604 0.760408 0.649445 -0.000904117 0.649437 0.760415 -0.00085582 0.522508 0.852634 -0.000760266 0.382678 0.923881 -0.000617012 0.233442 0.97237 -0.000425529 0.0784569 0.996917 0.000511767 0.0784569 0.996917 -0.00081818 0.105935 0.994373 -0.0105742 0.999944 3.00208e-07 -0.10593 0.994373 -0.000742663 -0.272471 0.962164 -0.000925669 -0.431157 0.902276 -0.0010571 -0.577424 0.816444 -0.00113479 -0.707106 0.707106 -0.00116123 -0.816444 0.577424 -0.00113493 -0.902281 0.431148 -0.00105703 -0.962161 0.272481 -0.000926503 -0.994373 0.10593 -0.000743196 -0.999944 0.0106176 3.18768e-09 -0.996918 0.078451 0.000464388 -0.233448 0.972369 -0.000673554 -0.272471 0.962164 0.000926247 -0.382678 0.923881 -0.000829939 -0.431157 0.902276 0.00105765 -0.522508 0.852634 -0.00093405 -0.649439 0.760413 -0.000987149 -0.760412 0.64944 -0.00098718 -0.852639 0.5225 -0.000934368 -0.923877 0.382689 -0.000829549 -0.972369 0.233448 -0.000673553 -0.996918 0.078451 -0.000464341 -0.994373 0.10593 0.00074326 -0.577424 0.816444 0.00113528 -0.707106 0.707106 0.00116166 -0.816444 0.577424 0.00113528 -0.902281 0.431148 0.00105729 -0.962161 0.272481 0.000926666 -0.972369 0.233448 0.000673694 -0.923877 0.382689 0.000829778 -0.852639 0.5225 0.000934682 -0.760412 0.64944 0.00098757 -0.649439 0.760413 0.000987606 -0.522508 0.852634 0.000934562 -0.382678 0.923881 0.000830494 -0.233448 0.972369 0.000674138 -0.078451 0.996918 0.000464939 -0.10593 0.994373 0.00074326 -0.078451 0.996918 -0.000464341 -0.999037 0.0438575 0.00101713 -0.999129 0.0415044 0.00438006 -0.999261 0.0379855 0.00581096 -0.999342 0.0357376 0.00623079 -0.999406 0.034146 0.0045666 -0.972311 0.233434 0.0109662 -0.974666 0.22321 0.0142946 -0.950497 0.310665 0.00653213 -0.923724 0.382625 0.0182073 -0.91362 0.406412 0.0113326 -0.929292 0.36725 0.0392954 -0.946968 0.317331 0.0505317 -0.961732 0.268346 0.0553445 -0.973989 0.220062 0.0540219 -0.983732 0.173065 0.0481577 -0.988429 0.145602 0.0425298 -0.992298 0.118458 0.0362255 -0.993442 0.109685 0.0322935 -0.996148 0.0836266 0.0263912 -0.999038 0.0408432 0.0159537 -0.852523 0.522428 0.016519 -0.869611 0.492845 0.0296729 -0.892987 0.447744 0.0458117 -0.919653 0.388088 0.0602209 -0.941955 0.329071 0.0665867 -0.960481 0.270548 0.0654189 -0.975284 0.213047 0.0585909 -0.982385 0.179567 0.0517253 -0.988282 0.146114 0.0441578 -0.991225 0.126701 0.0376833 -0.994991 0.0952205 0.0304356 -0.998755 0.0463135 0.0185318 -0.827 0.561979 0.0158355 -0.857938 0.511006 0.053055 -0.89313 0.44435 0.0697906 -0.922716 0.377676 0.0771736 -0.94733 0.31113 0.0759189 -0.96702 0.245419 0.0681305 -0.97649 0.207049 0.0599913 -0.984357 0.168549 0.0513015 -0.988676 0.143723 0.0431624 -0.993777 0.105934 0.0344406 -0.998418 0.0520659 0.0212239 -0.649234 0.760172 0.0251802 -0.668706 0.742689 0.0352856 -0.606602 0.794697 0.0221384 -0.522293 0.852283 0.0286904 -0.54089 0.840269 0.0372274 -0.618984 0.780425 0.0882916 -0.710146 0.694286 0.116876 -0.788465 0.601262 0.129646 -0.854646 0.502981 0.128802 -0.908418 0.401353 0.117018 -0.934608 0.340755 0.101955 -0.956356 0.278668 0.0879011 -0.971312 0.226746 0.0716885 -0.984412 0.16657 0.0564491 -0.996023 0.0801995 0.038804 -0.382664 0.923846 0.00874257 -0.471444 0.879156 0.069458 -0.560231 0.822876 0.0949545 -0.66442 0.736672 0.125936 -0.75455 0.641157 0.139899 -0.83099 0.538553 0.139341 -0.893328 0.431097 0.126967 -0.923786 0.366639 0.110429 -0.949089 0.300221 0.0953767 -0.964789 0.251217 0.077925 -0.955004 0.289044 0.066492 -0.950379 0.30418 0.0652215 -0.996224 0.0777053 0.0387384 -0.941971 0.335004 0.0215224 -0.364893 0.930735 0.024204 -0.46927 0.875212 0.117433 -0.593049 0.791098 0.149855 -0.701143 0.694018 0.163516 -0.793522 0.586636 0.161804 -0.869317 0.471822 0.147216 -0.906492 0.402616 0.127176 -0.937487 0.330175 0.110012 -0.95517 0.282279 0.0892715 -0.937603 0.336091 0.0891303 -0.233324 0.971855 0.0325291 -0.0784142 0.996451 -0.0306187 -0.213554 0.973018 0.0873569 -0.277353 0.959358 0.0520258 -0.335174 0.934541 0.11955 -0.450622 0.886247 0.107267 -0.482878 0.863133 0.14775 -0.614502 0.772987 0.157729 -0.728474 0.668022 0.151899 -0.822868 0.552522 0.132695 -0.905242 0.410863 0.1083 -0.883747 0.450935 0.125096 -0.600395 0.787628 0.138447 -0.726218 0.671214 0.14859 -0.828682 0.541527 0.141543 -0.897505 0.428819 0.10295 -0.999902 0.0135071 0.0037341 -0.99984 0.0171646 0.00497369 -0.999844 0.0169774 0.00487141 -0.999792 0.019788 0.0049869 -0.999725 0.0226898 0.00598607 -0.997268 0.071894 0.0169658 -0.98884 0.146139 0.0289744 -0.969697 0.24151 0.036891 -0.929287 0.366976 0.0418827 -0.99968 0.0247163 0.00539779 -0.999552 0.0291564 0.00679965 -0.995974 0.0879419 0.0173812 -0.984494 0.17344 0.0262567 -0.959531 0.280181 0.0282825 -0.913681 0.405574 0.0263963 -0.999552 0.0293196 0.00596533 -0.994412 0.104383 0.0157688 -0.979283 0.201488 0.0201671 -0.950551 0.309827 0.0214505 -0.999143 0.0411687 0.00418801 -0.998954 0.0456201 0.00297576 -0.996062 0.0874271 0.014756 -0.996819 0.0784203 0.0142178 -0.987298 0.154741 0.0360272 -0.97546 0.213021 0.0556782 -0.963098 0.260083 0.0692789 -0.944639 0.318394 0.0792597 -0.922433 0.3755 0.0900932 -0.876661 0.470376 0.101051 -0.820175 0.562325 0.105375 -0.753014 0.650618 0.0983144 -0.674455 0.734299 0.0769116 -0.606769 0.793556 0.0458281 -0.674449 0.733813 0.0814694 -0.540922 0.839773 0.0467469 -0.997688 0.0666575 0.0132229 -0.990793 0.131593 0.0318284 -0.982699 0.178918 0.0478743 -0.974871 0.215001 0.0583279 -0.962223 0.263969 0.0666825 -0.947003 0.312126 0.0759149 -0.915546 0.392863 0.0862162 -0.876405 0.472973 0.0906204 -0.829626 0.551709 0.0856603 -0.774377 0.628996 0.0685895 -0.726334 0.686044 0.0422069 -0.779221 0.625894 0.0327251 -0.760243 0.649296 0.0211 -0.998423 0.0548843 0.0118413 -0.998759 0.0485914 0.010942 -0.999041 0.0426347 0.00998904 -0.999274 0.0370341 0.00899645 -0.997088 0.073718 0.0195275 -0.995685 0.0891866 0.0256523 -0.995621 0.0898873 0.0256887 -0.993415 0.110821 0.0290762 -0.990732 0.131723 0.0331392 -0.985156 0.167254 0.0386363 -0.978155 0.203769 0.0411276 -0.969696 0.241066 0.0397224 -0.978155 0.203852 0.0407065 -0.946967 0.31727 0.0509174 -0.999741 0.0219801 0.00585823 -0.998919 0.0447988 0.0123841 -0.998743 0.0480897 0.0141096 -0.999199 0.0384104 0.0112487 -0.998786 0.0475862 0.0127454 -0.9983 0.0564539 0.0145046 -0.997268 0.0718753 0.0170465 -0.9983 0.0563127 0.015051 -0.992424 0.119636 0.027972 -0.992424 0.119557 0.028311 -0.988839 0.145971 0.0298288 -0.999952 0.00943683 0.00271308 -0.999797 0.0193645 0.00557522 -0.999932 0.011148 0.00338317 -0.999929 0.0113863 0.0034811 -0.999554 0.0287403 0.00808311 -0.999554 0.0286071 0.00852876 -0.999797 0.0193508 0.00562138 -0.995973 0.0877784 0.0182169 -0.364998 0.930284 0.0367289 -0.471526 0.880598 0.0470123 -0.726189 0.687226 0.0192287 -0.668723 0.74219 0.04431 -0.774379 0.629119 0.0674284 -0.77922 0.625503 0.039535 -0.818331 0.571578 0.0602761 -0.818326 0.571243 0.0634344 -0.863067 0.498819 0.0793371 -0.863068 0.498961 0.078432 -0.900855 0.425156 0.0877684 -0.932325 0.351133 0.0864652 -0.957589 0.277447 0.077763 -0.969802 0.234133 0.0683025 -0.97987 0.190874 0.058495 -0.985887 0.160185 0.0486538 -0.992341 0.117332 0.0386362 -0.998077 0.0571805 0.0239497 -0.827104 0.560871 0.03636 -0.869603 0.492667 0.0327031 -0.996868 0.0784471 0.0100344 -0.990839 0.13503 0.00198704 -0.992523 0.121452 0.0121642 -0.99252 0.121157 0.0149696 -0.974662 0.223118 0.0158713 -0.990853 0.134576 0.00993049 -0.469363 0.879676 0.0766101 -0.560246 0.824226 0.0823107 -0.753006 0.649148 0.107649 -0.618995 0.78131 0.0799933 -0.726339 0.683287 0.0745016 -0.726341 0.683437 0.0730954 -0.829624 0.551207 0.0888512 -0.792904 0.601365 0.0983033 -0.849489 0.516258 0.108842 -0.896982 0.428762 0.107644 -0.935289 0.340259 0.0972583 -0.953825 0.288065 0.0850708 -0.969213 0.235123 0.0730946 -0.979184 0.193894 0.0600358 -0.989144 0.139158 0.0472121 -0.997248 0.0679059 0.0297474 -0.857931 0.510519 0.0576594 -0.89298 0.447163 0.0513033 -0.959525 0.279671 0.0331206 -0.979279 0.201057 0.0242626 -0.593157 0.79784 0.107777 -0.664437 0.739448 0.108351 -0.820143 0.559586 0.11929 -0.710158 0.696366 0.103682 -0.87639 0.471459 0.0983259 -0.792909 0.60231 0.0922981 -0.89313 0.444213 0.0706666 -0.919652 0.387746 0.0624013 -0.984493 0.173025 0.0289024 -0.994411 0.10408 0.0177012 -0.70133 0.702486 0.121034 -0.754608 0.645473 0.118034 -0.876646 0.466382 0.118232 -0.788508 0.604744 0.111978 -0.915538 0.390348 0.0970484 -0.849511 0.518342 0.0982457 -0.900864 0.426183 0.0825364 -0.922721 0.378296 0.0740133 -0.941957 0.329365 0.065086 -0.985156 0.166979 0.0398238 -0.961735 0.268791 0.0530731 -0.793635 0.596579 0.119318 -0.831016 0.54437 0.114338 -0.922399 0.371089 0.107112 -0.854666 0.507843 0.107898 -0.946987 0.309134 0.0874756 -0.896993 0.431976 0.0938151 -0.93233 0.353038 0.0782623 -0.947334 0.31251 0.0699746 -0.960483 0.271486 0.0613798 -0.990732 0.131113 0.0354984 -0.973992 0.220985 0.0500423 -0.995269 0.09383 0.0251928 -0.996636 0.0788238 0.0224178 -0.99776 0.0641265 0.0190659 -0.997547 0.067306 0.019245 -0.998084 0.0592711 0.0177808 -0.999512 0.0292458 0.0109912 -0.869488 0.481655 0.109538 -0.893392 0.43721 0.103429 -0.94463 0.314535 0.0935003 -0.908466 0.406578 0.096874 -0.962218 0.261259 0.0766795 -0.935313 0.343924 0.0831059 -0.957599 0.279839 0.0685146 -0.967026 0.247275 0.0609519 -0.975287 0.214436 0.053216 -0.993414 0.110166 0.0314939 -0.983736 0.174354 0.0431703 -0.998786 0.047395 0.013451 -0.99527 0.0941648 0.0238966 -0.906544 0.410826 0.0969563 -0.9238 0.371864 0.0911614 -0.934619 0.345271 0.0852985 -0.963074 0.25693 0.08047 -0.949127 0.304648 0.0796741 -0.975485 0.210004 0.0657824 -0.956386 0.28243 0.074556 -0.953832 0.291321 0.0730598 -0.974857 0.21291 0.0657561 -0.982712 0.177 0.0542944 -0.969231 0.237716 0.0638928 -0.969806 0.23634 0.060161 -0.976492 0.208804 0.0535131 -0.97988 0.192514 0.0526693 -0.982387 0.180928 0.0467006 -0.984364 0.169797 0.0468706 -0.988432 0.146856 0.0378987 -0.99562 0.0895762 0.0267949 -0.988286 0.14702 0.0409383 -0.995684 0.0892167 0.0255518 -0.992302 0.119092 0.0339702 -0.996637 0.079212 0.0209848 -0.999199 0.0383704 0.011391 -0.998743 0.0482269 0.0136506 -0.99776 0.0642678 0.0185635 -0.971278 0.230406 0.0594291 -0.987319 0.150115 0.0516432 -0.996804 0.0728537 0.0327698 -0.984362 0.171947 0.0382839 -0.979164 0.196333 0.0518704 -0.990806 0.128321 0.0428735 -0.99768 0.0625764 0.0268057 -0.989128 0.143081 0.0339735 -0.985878 0.16164 0.043787 -0.98867 0.144778 0.0396338 -0.992331 0.120019 0.0295703 -0.991221 0.127412 0.0353035 -0.99377 0.108087 0.0271748 -0.99344 0.110113 0.0308625 -0.99709 0.0728083 0.0225978 -0.994986 0.0969024 0.0247612 -0.999272 0.0356699 0.013541 -0.996145 0.0848903 0.0221277 -0.997547 0.0672388 0.0194698 -0.99892 0.044532 0.0132838 -0.999741 0.021408 0.00776213 -0.998082 0.0597707 0.0160958 -0.99945 0.0318165 0.00933875 -0.99945 0.0319142 0.00900894 -0.999952 0.00929516 0.00318493 -0.999858 0.0159074 0.00552942 -0.999957 0.00891779 0.00267188 -0.99999 0.00420326 0.00124536 -0.964747 0.254733 0.0661373 -0.997259 0.0727002 0.0137824 -0.998083 0.0606 0.0125614 -0.999513 0.0302416 0.00767446 -0.999858 0.0162231 0.00447763 -0.032075 0.996405 0.0784102 -0.0513619 0.958937 0.278931 -0.0955931 0.908708 0.40634 -0.0803649 0.858319 0.506784 -0.0771925 0.816618 0.571994 -0.0734304 0.770055 0.633738 -0.0691241 0.718874 0.691695 -0.0642965 0.663384 0.745512 -0.0589821 0.603927 0.794854 -0.0532207 0.540891 0.839407 -0.0470441 0.474587 0.878951 -0.0410755 0.389991 0.919902 -0.03055 0.297793 0.954141 -0.0222521 0.214323 0.976509 -0.0136802 0.129216 0.991522 -0.00482791 0.0421352 0.9991 -0.00414697 0.0407652 0.99916 -0.00559816 0.0386759 0.999236 -0.00504246 0.0377044 0.999276 -0.00613058 0.0351702 0.999363 -0.0192734 0.0996459 0.994836 -0.0318617 0.147577 0.988537 -0.0427066 0.181469 0.982469 -0.0528596 0.207937 0.976713 -0.057095 0.217972 0.974284 -0.0562934 0.208236 0.976457 -0.0513466 0.187533 0.980915 -0.0416098 0.156028 0.986876 -0.0271607 0.10601 0.993994 -0.0117221 0.0590761 0.998185 -0.0410385 0.96662 0.252908 -0.00765279 0.94357 0.331086 -0.0311462 0.923432 0.382495 -0.0155855 0.896616 0.442534 -0.0247249 0.856652 0.515302 -0.0284488 0.852294 0.522289 -0.0135352 0.811318 0.584448 -0.0436511 0.772327 0.633724 -0.0504353 0.809843 0.584475 -0.0469204 0.818924 0.571981 -0.0525744 0.855425 0.515251 -0.0500344 0.860628 0.50677 -0.042767 0.912728 0.406323 -0.068855 0.941083 0.331092 -0.0251977 0.760167 0.649239 -0.0248939 0.759783 0.649701 -0.0402249 0.721081 0.691682 -0.0478369 0.758751 0.649622 -0.0113416 0.703763 0.710344 -0.0366747 0.665496 0.7455 -0.0448029 0.702409 0.710362 -0.0220609 0.522381 0.852427 -0.0144745 0.507039 0.861802 -0.0050343 0.418012 0.908428 -0.0190381 0.382609 0.923714 -0.00283029 0.319732 0.947504 -0.015352 0.298961 0.954142 -0.0221082 0.318945 0.947515 -0.0191896 0.391674 0.919904 -0.0301267 0.416979 0.908417 -0.0254193 0.476246 0.878945 -0.0292519 0.542727 0.8394 -0.0330085 0.605915 0.794844 -0.0413587 0.641265 0.766204 -0.0196746 0.642212 0.766274 -0.0239884 0.64925 0.760197 -0.0130554 0.233422 0.972288 -0.0113527 0.229989 0.973127 -0.0110566 0.215184 0.976511 -0.0161997 0.229786 0.973106 0.000519536 0.139088 0.99028 -0.00678379 0.129746 0.991524 -0.0100594 0.138655 0.99029 -0.0126351 0.0784507 0.996838 -0.00108416 0.0467471 0.998906 -0.00285586 0.0437821 0.999037 -0.00370303 0.0457556 0.998946 -0.00613264 0.0351745 0.999362 -0.00608702 0.0295104 0.999546 -0.0193818 0.0887251 0.995868 -0.030833 0.129956 0.99104 -0.0398561 0.15746 0.986721 -0.0474642 0.176897 0.983084 -0.0491385 0.180417 0.982362 -0.0456887 0.165343 0.985177 -0.0377002 0.139472 0.989508 -0.0249704 0.095754 0.995092 -0.0111203 0.0535736 0.998502 -0.0065913 0.0290321 0.999557 -0.018728 0.0780692 0.996772 -0.028747 0.112645 0.993219 -0.0358109 0.133807 0.99036 -0.0407963 0.146255 0.988405 -0.0399184 0.143168 0.988893 -0.0336613 0.122868 0.991852 -0.0226876 0.0854646 0.996083 -0.0104859 0.0485432 0.998766 -0.0050852 0.020178 0.999783 -0.00483864 0.0169148 0.999845 -0.00451249 0.0163227 0.999857 -0.00344192 0.0111487 0.999932 -0.0110112 0.0375128 0.999236 -0.0135542 0.0463545 0.998833 -0.0113893 0.039631 0.99915 -0.00675484 0.026569 0.999624 -0.00105927 0.00357518 0.999993 -0.0662368 0.253256 0.965129 -0.0627811 0.30664 0.949753 -0.0867949 0.270922 0.95868 -0.103176 0.424583 0.899491 -0.107342 0.408158 0.906579 -0.111179 0.407403 0.906456 -0.115167 0.442767 0.88921 -0.113853 0.480305 0.869681 -0.107548 0.507675 0.85481 -0.096548 0.525158 0.84551 -0.0812117 0.532857 0.8423 -0.0631306 0.530293 0.845461 -0.0447319 0.508009 0.860189 -0.0646377 0.505853 0.860195 -0.055993 0.473024 0.879268 -0.0569461 0.442972 0.894725 -0.0389205 0.444927 0.894721 -0.139205 0.485262 0.863217 -0.119464 0.565076 0.816344 -0.123288 0.487486 0.864383 -0.122848 0.514853 0.84843 -0.116607 0.546847 0.829073 -0.105342 0.569216 0.815412 -0.0893507 0.582075 0.808211 -0.0700852 0.585233 0.807831 -0.0504047 0.56841 0.8212 -0.0719083 0.566081 0.821207 -0.138418 0.550045 0.823584 -0.130532 0.564874 0.814787 -0.125524 0.584834 0.801382 -0.113959 0.61156 0.782948 -0.0972995 0.629162 0.77116 -0.0768131 0.637511 0.766603 -0.0559099 0.625842 0.777944 -0.0786965 0.623376 0.777952 -0.175334 0.708964 0.683102 -0.125725 0.796106 0.591953 -0.136582 0.789979 0.597728 -0.157626 0.842477 0.515156 -0.104765 0.887175 0.449383 -0.0944049 0.833824 0.543899 -0.0758237 0.820269 0.56693 -0.100447 0.817612 0.56694 -0.0954719 0.776241 0.623165 -0.095883 0.774816 0.624874 -0.0712157 0.77748 0.624863 -0.107434 0.887432 0.448244 -0.0718216 0.875815 0.477272 -0.116323 0.871014 0.477289 -0.00244659 0.00852065 0.999961 -0.00509371 0.0170282 0.999842 -0.0082534 0.0277468 0.999581 -0.0078659 0.0278595 0.999581 -0.00345222 0.0112739 0.999931 -0.00325404 0.0107607 0.999937 -0.00497971 0.0170621 0.999842 -0.0052876 0.0198976 0.999788 -0.00622529 0.0196165 0.999788 -0.00826466 0.0281889 0.999568 -0.00835836 0.028161 0.999568 -0.00796973 0.0328077 0.99943 -0.0178752 0.0650752 0.99772 -0.0295011 0.106259 0.993901 -0.0312983 0.105736 0.993902 -0.0331284 0.115946 0.992703 -0.0319332 0.116283 0.992703 -0.0308171 0.110543 0.993393 -0.0222353 0.0789879 0.996628 -0.0135038 0.0474026 0.998785 -0.0102468 0.0377285 0.999236 -0.00972616 0.0432316 0.999018 -0.0150191 0.0416429 0.99902 -0.0203218 0.0752316 0.996959 -0.0240404 0.0741251 0.996959 -0.0121971 0.0641968 0.997863 -0.0312624 0.126496 0.991474 -0.0526035 0.20568 0.977205 -0.0726365 0.2753 0.95861 -0.0896301 0.344561 0.934475 -0.103853 0.41285 0.904859 -0.114651 0.480114 0.869682 -0.106478 0.412177 0.90486 -0.106806 0.444369 0.889454 -0.104773 0.444855 0.889453 -0.113303 0.506416 0.854813 -0.0130841 0.0774791 0.996908 -0.0364964 0.151677 0.987756 -0.0620781 0.245758 0.967341 -0.0867657 0.327698 0.94079 -0.107334 0.408443 0.906451 -0.0950525 0.325363 0.9408 -0.0957733 0.371123 0.923631 -0.103236 0.369104 0.923636 -0.110855 0.443872 0.889207 -0.00251714 0.00840903 0.999961 -0.00254038 0.00849246 0.999961 -0.124407 0.4872 0.864384 -0.0157326 0.0894384 0.995868 -0.122742 0.930868 0.34412 -0.0534813 0.958963 0.278443 -0.0324549 0.971857 0.233326 -0.0333572 0.506234 0.861751 -0.0375297 0.575724 0.816783 -0.00905562 0.576884 0.816776 -0.0542306 0.895095 0.442565 -0.0612315 0.680031 0.730623 -0.0849768 0.677463 0.730632 -0.0833047 0.68691 0.721952 -0.103349 0.684175 0.721958 -0.105024 0.673947 0.731277 -0.112516 0.716185 0.688781 -0.114766 0.774386 0.622218 -0.130812 0.828879 0.54392 -0.0663399 0.730673 0.679497 -0.0907206 0.728039 0.679507 -0.0895342 0.733242 0.674047 -0.109479 0.730524 0.674055 -0.0103887 0.120596 0.992647 -0.0162506 0.119959 0.992646 -0.0298559 0.184134 0.982448 -0.0437187 0.2309 0.971995 -0.0585423 0.271294 0.960715 -0.0678195 0.294168 0.953345 -0.0726163 0.294644 0.952844 -0.0735029 0.284361 0.955896 -0.070128 0.263355 0.962147 -0.062269 0.231642 0.970806 -0.0490598 0.189115 0.980729 -0.0435564 0.122834 0.991471 -0.0170795 0.200147 0.979617 -0.0265598 0.199118 0.979616 -0.0411734 0.256399 0.965694 -0.0580812 0.303754 0.950979 -0.0697764 0.33284 0.940398 -0.0775379 0.338276 0.937847 -0.0815632 0.332994 0.939395 -0.0814197 0.317112 0.944887 -0.0768001 0.290641 0.953745 -0.0675172 0.25346 0.964987 -0.063879 0.202403 0.977217 -0.0237673 0.278353 0.960185 -0.0365815 0.276962 0.960184 -0.0547728 0.336749 0.94 -0.0688321 0.371933 0.925704 -0.0796552 0.382174 0.920651 -0.0869625 0.381652 0.920206 -0.0902471 0.370728 0.924347 -0.0891188 0.349431 0.932714 -0.0832994 0.317658 0.944539 -0.0808105 0.272993 0.958616 -0.0213937 0.185292 0.982451 -0.0303698 0.365274 0.930404 -0.0489156 0.363261 0.930402 -0.0643784 0.411445 0.909158 -0.0784174 0.426312 0.901171 -0.0891935 0.430448 0.898198 -0.0960924 0.424267 0.900424 -0.0986699 0.407877 0.90769 -0.0966112 0.381428 0.919336 -0.0952469 0.343042 0.934479 -0.0298122 0.257954 0.965697 -0.0384527 0.338983 0.940006 -0.048705 0.41359 0.90916 -0.0129722 0.111593 0.993669 -0.0182369 0.110873 0.993667 -0.0316444 0.165651 0.985677 -0.0441101 0.205978 0.977562 -0.0566637 0.239381 0.969271 -0.0634452 0.255861 0.964629 -0.0653911 0.251307 0.965696 -0.0632872 0.235867 0.969722 -0.0568692 0.209601 0.976132 -0.0453958 0.17257 0.983951 -0.0292579 0.11627 0.992787 -0.0242439 0.06058 0.997869 -0.0147014 0.100415 0.994837 -0.0243664 0.166876 0.985677 -0.0340533 0.232526 0.971994 -0.0442895 0.306073 0.950977 -0.055907 0.374104 0.9257 -0.0814748 0.527784 0.845462 -0.0731573 0.470677 0.879267 -0.0644651 0.428657 0.901165 -0.0729146 0.481664 0.873318 -0.0785145 0.432533 0.898193 -0.0876002 0.479198 0.873325 -0.0875963 0.479553 0.87313 -0.0983397 0.477457 0.873135 -0.0890844 0.425801 0.900421 -0.0893057 0.582606 0.807833 -0.0966118 0.634807 0.766607 -0.114986 0.773584 0.623174 -0.0261141 0.148701 0.988537 -0.0365511 0.207456 0.977561 -0.0477291 0.273407 0.960714 -0.0600548 0.334739 0.940395 -0.0693258 0.384191 0.920647 -0.096343 0.530317 0.842307 -0.104608 0.579516 0.80822 -0.112361 0.626636 0.77117 -0.119577 0.671508 0.731285 -0.122381 0.652128 0.748166 -0.127138 0.709113 0.693537 -0.142547 0.769739 0.622239 -0.126216 0.71389 0.688789 -0.015857 0.0786981 0.996772 -0.026362 0.130936 0.99104 -0.0369168 0.182737 0.982468 -0.0253671 0.113453 0.993219 -0.0483896 0.241194 0.96927 -0.0355809 0.158483 0.98672 -0.0607941 0.295706 0.953342 -0.0467508 0.209399 0.976712 -0.0702706 0.339866 0.937845 -0.058717 0.256992 0.964628 -0.0797059 0.383241 0.920203 -0.0679718 0.295756 0.952842 -0.0772214 0.334033 0.939393 -0.107091 0.523101 0.845516 -0.0983621 0.46722 0.878653 -0.095635 0.408603 0.907688 -0.104873 0.465796 0.878656 -0.0864377 0.371639 0.924345 -0.115397 0.567255 0.815417 -0.123247 0.609749 0.782954 -0.130599 0.650526 0.748171 -0.144989 0.636153 0.757817 -0.155723 0.646537 0.74682 -0.139026 0.675908 0.723755 -0.121347 0.54581 0.829075 -0.147031 0.705242 0.693553 -0.132098 0.638968 0.757806 -0.128998 0.584074 0.801384 -0.0152396 0.0681728 0.997557 -0.0140369 0.057884 0.998225 -0.0123292 0.0477206 0.998785 -0.0156879 0.0574606 0.998225 -0.0205775 0.0794369 0.996628 -0.0258328 0.0956833 0.995077 -0.0233773 0.0963133 0.995076 -0.017454 0.0676427 0.997557 -0.0328191 0.134574 0.99036 -0.0432329 0.177983 0.983083 -0.0543335 0.21868 0.974283 -0.0629829 0.251925 0.965695 -0.0716614 0.284833 0.955895 -0.0803666 0.317382 0.944886 -0.0890703 0.349443 0.932714 -0.0977825 0.381128 0.919337 -0.122178 0.515014 0.84843 -0.137207 0.563281 0.814794 -0.00603071 0.0276146 0.9996 -0.00579913 0.022919 0.999721 -0.00613373 0.0231279 0.999714 -0.0171266 0.0627897 0.99788 -0.0132075 0.0464554 0.998833 -0.0181013 0.0625147 0.99788 -0.0186212 0.0648233 0.997723 -0.0186641 0.0648108 0.997723 -0.0119481 0.0394648 0.99915 -0.0289347 0.111053 0.993393 -0.0241431 0.0878636 0.99584 -0.0250849 0.0875979 0.99584 -0.0247514 0.0859711 0.99599 -0.0245452 0.0860311 0.99599 -0.0160035 0.0523211 0.998502 -0.0149023 0.0526489 0.998502 -0.0109099 0.0319236 0.999431 -0.0381921 0.14696 0.988405 -0.0480529 0.18071 0.982362 -0.0402605 0.143072 0.988893 -0.0557953 0.20837 0.976457 -0.0468305 0.16502 0.985178 -0.063588 0.235785 0.969723 -0.0534929 0.186928 0.980916 -0.0714385 0.263 0.962147 -0.0602176 0.208656 0.976134 -0.0793355 0.289955 0.953746 -0.0670206 0.230302 0.970809 -0.0872662 0.316585 0.944541 -0.0738773 0.251666 0.96499 -0.0878052 0.294085 0.951737 -0.0696092 0.218251 0.973407 -0.047798 0.132682 0.990005 -0.0267846 0.0654786 0.997494 -0.0204692 0.0643032 0.99772 -0.0365112 0.12204 0.991853 -0.0418098 0.138277 0.989511 -0.0277319 0.0839635 0.996083 -0.047193 0.154404 0.98688 -0.0315367 0.0937997 0.995091 -0.0526655 0.170457 0.983957 -0.0354393 0.103548 0.993993 -0.0582305 0.186449 0.980738 -0.0394462 0.113239 0.992785 -0.0776079 0.29696 0.951731 -0.0757055 0.241801 0.967368 -0.0523588 0.146956 0.987756 -0.0560122 0.222204 0.97339 -0.0126196 0.0697333 0.997486 -0.0332007 0.137023 0.990011 -0.0295344 0.0725313 0.996929 -0.0218249 0.0560437 0.99819 -0.0194439 0.0510733 0.998506 -0.0172078 0.0465205 0.998769 -0.00887073 0.0378884 0.999243 -0.0129006 0.0366771 0.999244 -0.00843308 0.026064 0.999625 -0.00388401 0.0140472 0.999894 -0.00429249 0.0139243 0.999894 -0.0352267 0.101413 0.994221 -0.0211948 0.372552 0.927769 1 0 0 1 0 0 0 1 0 -2.75533e-07 1 6.64013e-06 3.07978e-07 1 5.27938e-07 3.14902e-07 1 4.86053e-07 3.23663e-07 1 4.51781e-07 3.34284e-07 1 4.23293e-07 3.46885e-07 1 3.99334e-07 3.61683e-07 1 3.79015e-07 3.79014e-07 1 3.61684e-07 3.99333e-07 1 3.46885e-07 4.23295e-07 1 3.34283e-07 4.51779e-07 1 3.23663e-07 4.86057e-07 1 3.14901e-07 5.27937e-07 1 3.07978e-07 5.80139e-07 1 3.02994e-07 6.46878e-07 1 3.00225e-07 0 -1.37974e-07 1 3.2113e-09 -1.38466e-07 1 9.09363e-09 -1.39411e-07 1 1.42706e-08 -1.40282e-07 1 0 -1.37777e-07 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -1.4328e-07 1 -5.41933e-09 -1.3586e-07 1 -3.17118e-09 -1.36737e-07 1 -8.77937e-09 -1.34592e-07 1 -1.35118e-08 -1.32819e-07 1 0 0 1 0 0 1 0 -1.86265e-07 1 3.24456e-08 0 1 0 0 1 0 0 1 -6.46878e-07 1 3.00225e-07 -5.8014e-07 1 3.02994e-07 -5.27935e-07 1 3.07978e-07 -4.86057e-07 1 3.14901e-07 -4.51779e-07 1 3.23663e-07 -4.23295e-07 1 3.34283e-07 -3.99334e-07 1 3.46885e-07 -3.79014e-07 1 3.61684e-07 -3.61683e-07 1 3.79015e-07 -3.46885e-07 1 3.99334e-07 -3.34284e-07 1 4.23293e-07 -3.23662e-07 1 4.51782e-07 -3.14901e-07 1 4.86053e-07 -3.07978e-07 1 5.27937e-07 2.75532e-07 1 6.6401e-06 0 1 0 0.998176 -7.90731e-09 0.0603762 0.983619 -2.36082e-08 0.180261 0.983619 -2.57428e-08 0.18026 0.954723 -4.32916e-08 0.297497 0.954723 -3.46333e-08 0.297498 0.911899 -4.77786e-08 0.410415 0.85578 0 0.517339 0.787183 0 0.61672 0.707112 0 0.707102 0.616716 0 0.787186 0.51734 0 0.85578 0.41041 0 0.911901 0.297507 0 0.95472 0.180252 -2.29017e-07 0.983621 0.06038 -2.32406e-07 0.998175 -0.0603801 -2.32406e-07 0.998175 -0.180252 0 0.983621 -0.297507 0 0.95472 -0.41041 0 0.911901 -0.51734 0 0.85578 -0.616716 0 0.787186 -0.707111 0 0.707102 -0.787182 0 0.61672 -0.85578 -6.02262e-08 0.517339 -0.911899 -4.77786e-08 0.410415 -0.954723 -4.32916e-08 0.297498 -0.983619 -2.36081e-08 0.18026 -0.998176 -7.91736e-09 0.0603762 -0.998176 7.90731e-09 -0.0603762 -0.983619 2.62312e-08 -0.18026 -0.954723 3.46333e-08 -0.297498 -0.911899 4.77786e-08 -0.410415 -0.85578 0 -0.517339 -0.787182 0 -0.61672 -0.707111 0 -0.707102 -0.616716 0 -0.787186 -0.51734 0 -0.85578 -0.41041 0 -0.911901 -0.297507 0 -0.95472 -0.180251 2.29017e-07 -0.983621 -0.0603801 2.32406e-07 -0.998175 0.06038 2.32406e-07 -0.998175 0.180251 0 -0.983621 0.297507 0 -0.95472 0.41041 0 -0.911901 0.51734 0 -0.85578 0.616716 0 -0.787186 0.707112 0 -0.707102 0.787183 0 -0.61672 0.85578 6.02262e-08 -0.517339 0.911899 4.77786e-08 -0.410415 0.954723 4.32916e-08 -0.297498 0.983619 2.36082e-08 -0.18026 0.998176 7.91736e-09 -0.0603762 0.998176 -8.27447e-09 0.0603762 0.911899 -4.77786e-08 0.410415 0.85578 -6.02262e-08 0.517339 0.787183 0 0.61672 0.707112 0 0.707102 0.616716 0 0.787186 0.51734 0 0.85578 0.41041 0 0.911901 0.297507 0 0.95472 0.180251 0 0.983621 0.06038 -2.32406e-07 0.998175 -0.0603801 -2.32406e-07 0.998175 -0.180251 -2.29017e-07 0.983621 -0.297507 0 0.95472 -0.41041 0 0.911901 -0.51734 0 0.85578 -0.616716 0 0.787186 -0.707111 0 0.707102 -0.787182 0 0.61672 -0.85578 0 0.517339 -0.911899 -4.77786e-08 0.410415 -0.954723 -3.46333e-08 0.297497 -0.983619 -2.62312e-08 0.18026 -0.998176 -7.90731e-09 0.0603762 -0.998176 7.91736e-09 -0.0603762 -0.983619 2.36081e-08 -0.18026 -0.954723 4.32916e-08 -0.297498 -0.911899 4.77786e-08 -0.410415 -0.85578 6.02262e-08 -0.517339 -0.787182 0 -0.61672 -0.707111 0 -0.707102 -0.616716 0 -0.787186 -0.51734 0 -0.85578 -0.41041 0 -0.911901 -0.297507 0 -0.95472 -0.180252 0 -0.983621 -0.0603801 2.32406e-07 -0.998175 0.06038 2.32406e-07 -0.998175 0.180252 2.29017e-07 -0.983621 0.297507 0 -0.95472 0.41041 0 -0.911901 0.51734 0 -0.85578 0.616716 0 -0.787186 0.707112 0 -0.707102 0.787183 0 -0.61672 0.85578 0 -0.517339 0.911899 4.77786e-08 -0.410415 0.954723 3.46333e-08 -0.297498 0.983619 2.62313e-08 -0.18026 0.998176 7.90731e-09 -0.0603762 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0.920404 -2.8009e-08 0.390968 0.920314 2.08636e-05 0.391181 0.89989 4.12447e-05 0.436118 0.877275 6.14641e-05 0.479988 0.852522 8.09357e-05 0.522692 0.825691 0.00010029 0.564122 0.796851 0.000119237 0.604175 0.766067 0.000137742 0.642761 0.733419 0.000155701 0.679777 0.698981 0.000173517 0.71514 0.662844 0.000190861 0.748757 0.625089 0.000207671 0.780553 0.585813 0.000224003 0.810446 0.545108 0.000240185 0.838366 0.503077 0.000256135 0.864241 0.459819 0.000271264 0.888013 0.415441 0.000286464 0.90962 0.370053 0.000391311 0.929011 0.323759 0.000471343 0.946139 0.27668 0.00052628 0.960962 0.228925 0.000556194 0.973444 0.180616 0.000561125 0.983554 0.131861 0.00054092 0.991268 0.0827876 0.000495876 0.996567 0.033515 0.000425709 0.999438 -0.0158433 0.000330415 0.999874 -0.0651627 0.000210239 0.997875 -0.113957 6.42165e-05 0.993486 -0.0684689 -6.01755e-05 0.997653 0.900191 -2.06482e-05 0.435495 0.87783 -4.09713e-05 0.478973 0.853365 -6.0702e-05 0.521314 0.826863 -8.03567e-05 0.562404 0.798384 -9.9348e-05 0.602148 0.767994 -0.00011799 0.640458 0.735768 -0.00013625 0.677233 0.701785 -0.00015429 0.712389 0.666124 -0.000171648 0.745841 0.628867 -0.000188763 0.777512 0.590109 -0.000205572 0.807324 0.549941 -0.000222063 0.835204 0.508459 -0.000237888 0.861086 0.465755 -0.000253281 0.884914 0.421946 -0.000268553 0.906621 0.378612 -0.00026112 0.925556 0.335951 -0.000353785 0.941879 0.292588 -0.000421175 0.956238 0.248615 -0.000463691 0.968602 0.204125 -0.00048105 0.978945 0.159206 -0.000473608 0.987245 0.113957 -0.000440925 0.993486 0.0684689 -0.000383235 0.997653 0.0228389 -0.000300729 0.999739 -0.022839 -0.000193041 0.999739 -0.114321 9.60826e-05 0.993444 -0.159206 -8.85237e-05 0.987245 -0.1632 0.00023623 0.986593 -0.211686 0.000351451 0.977338 -0.259651 0.000441694 0.965702 -0.306988 0.000506989 0.951713 -0.353571 0.000547 0.935407 -0.399301 0.000562051 0.91682 -0.444051 0.000557824 0.896001 -0.487724 0.000541296 0.872998 -0.530204 0.000512385 0.84787 -0.571396 0.000470766 0.820675 -0.611199 0.000417517 0.791477 -0.649506 0.000351934 0.760357 -0.68623 0.000273558 0.727385 -0.72129 0.000183132 0.692633 -0.754589 8.04584e-05 0.656198 -0.78479 -3.11976e-05 0.619762 -0.754944 3.29698e-05 0.655789 -0.204125 -0.000216366 0.978945 -0.248615 -0.000319011 0.968602 -0.292588 -0.000396693 0.956239 -0.335952 -0.000449212 0.941879 -0.378613 -0.000477137 0.925555 -0.420983 -0.000504285 0.907069 -0.462941 -0.000493853 0.886389 -0.503888 -0.000470944 0.863769 -0.543734 -0.000436242 0.839258 -0.582382 -0.000388899 0.812915 -0.619765 -0.00032887 0.784788 -0.655789 -0.000257087 0.754944 -0.690372 -0.000173009 0.723455 -0.723453 -7.64e-05 0.690374 -0.786047 0.000141327 0.618167 -0.812913 -0.000133261 0.582386 -0.815591 0.000236826 0.578629 -0.843152 0.000320222 0.537676 -0.868655 0.000391617 0.495418 -0.892039 0.000449848 0.451959 -0.913257 0.000496194 0.407384 -0.932246 0.000541118 0.361826 -0.948963 0.00056096 0.315387 -0.96337 0.000555697 0.268175 -0.975429 0.000525802 0.220315 -0.985112 0.000470768 0.171914 -0.992394 0.000390252 0.123098 -0.99726 0.000285235 0.073974 -0.999695 0.00015501 0.02468 -0.999739 2.99541e-09 -0.0228423 -0.999739 -2.99541e-09 0.0228423 -0.83926 -0.000222892 0.54373 -0.863768 -0.000299842 0.50389 -0.886389 -0.000364414 0.462941 -0.907069 -0.000417683 0.420983 -0.925556 -0.00044134 0.378611 -0.94188 -0.000473789 0.335951 -0.956239 -0.000481417 0.292587 -0.968601 -0.00046363 0.248618 -0.978944 -0.000420889 0.204126 -0.987246 -0.000353388 0.159203 -0.993485 -0.000260741 0.11396 -0.997653 -0.000142922 0.0684658 -0.999695 0.000155016 -0.02468 -0.997653 -0.000142904 -0.0684658 -0.99726 0.000285256 -0.073974 -0.992394 0.000390287 -0.123098 -0.985112 0.000470818 -0.171914 -0.975429 0.000525865 -0.220315 -0.96337 0.000555774 -0.268175 -0.948963 0.000561039 -0.315387 -0.932246 0.000541185 -0.361826 -0.913257 0.000496223 -0.407384 -0.892039 0.000449848 -0.451959 -0.868654 0.000391597 -0.495419 -0.843152 0.000320395 -0.537675 -0.815589 0.000236928 -0.578631 -0.786047 0.000141542 -0.618166 -0.754943 3.31593e-05 -0.65579 -0.784791 -3.10207e-05 -0.619761 -0.993485 -0.000260708 -0.11396 -0.987246 -0.000353342 -0.159203 -0.978944 -0.000420831 -0.204126 -0.968601 -0.000463559 -0.248618 -0.956239 -0.000481334 -0.292587 -0.94188 -0.000473715 -0.335951 -0.925556 -0.000441303 -0.378611 -0.907068 -0.000417674 -0.420983 -0.886389 -0.00036444 -0.462942 -0.863768 -0.000299687 -0.503889 -0.83926 -0.000222812 -0.54373 -0.812912 -0.000133049 -0.582386 -0.754589 8.05524e-05 -0.656198 -0.723453 -7.63015e-05 -0.690374 -0.72129 0.000183232 -0.692633 -0.68623 0.000273573 -0.727384 -0.649506 0.000351934 -0.760357 -0.611199 0.000417517 -0.791477 -0.571396 0.000470884 -0.820675 -0.530203 0.000512585 -0.847871 -0.487724 0.000541546 -0.872998 -0.444052 0.000557883 -0.896001 -0.3993 0.000562115 -0.91682 -0.353573 0.000547078 -0.935407 -0.306988 0.000506989 -0.951713 -0.259652 0.000441795 -0.965702 -0.211686 0.000351311 -0.977338 -0.1632 0.00023623 -0.986593 -0.114318 9.60954e-05 -0.993444 -0.0684655 -5.98989e-05 -0.997653 -0.113957 6.45012e-05 -0.993486 -0.690372 -0.000172905 -0.723455 -0.655789 -0.000257088 -0.754944 -0.619765 -0.000328868 -0.784788 -0.582383 -0.00038887 -0.812914 -0.543734 -0.00043612 -0.839258 -0.503888 -0.000470695 -0.863769 -0.46294 -0.000493653 -0.88639 -0.420983 -0.000504287 -0.907069 -0.378613 -0.000477004 -0.925555 -0.335952 -0.000449211 -0.941879 -0.292589 -0.000396596 -0.956238 -0.248613 -0.00031912 -0.968603 -0.204127 -0.000216384 -0.978944 -0.159206 -8.85235e-05 -0.987245 -0.0651627 0.000210238 -0.997875 -0.0228407 -0.000193025 -0.999739 -0.0158433 0.000330558 -0.999874 0.0335135 0.000425602 -0.999438 0.0827891 0.000496114 -0.996567 0.131861 0.000541204 -0.991268 0.180614 0.000561333 -0.983554 0.228925 0.000556194 -0.973444 0.276679 0.000526367 -0.960962 0.323761 0.000471247 -0.946139 0.370053 0.000391311 -0.929011 0.415441 0.000286464 -0.90962 0.459819 0.000271391 -0.888013 0.503077 0.000256135 -0.864241 0.545109 0.000240079 -0.838365 0.585812 0.000224106 -0.810447 0.62509 0.000207683 -0.780552 0.662843 0.000190958 -0.748758 0.698981 0.000173414 -0.71514 0.733419 0.000155701 -0.679777 0.766067 0.000137653 -0.642761 0.796852 0.000119242 -0.604175 0.825691 0.000100371 -0.564122 0.852522 8.1012e-05 -0.522691 0.877275 6.1634e-05 -0.479988 0.899889 4.1306e-05 -0.436118 0.920314 2.0836e-05 -0.39118 0.920404 2.8009e-08 -0.390968 0.0228406 -0.000300614 -0.999739 0.0684655 -0.0003833 -0.997653 0.113957 -0.00044064 -0.993486 0.159206 -0.000473325 -0.987245 0.204127 -0.000480983 -0.978944 0.248614 -0.000463608 -0.968603 0.29259 -0.000421268 -0.956238 0.335951 -0.000353784 -0.941879 0.378612 -0.000261121 -0.925556 0.421945 -0.00026844 -0.906622 0.465756 -0.000253265 -0.884913 0.50846 -0.000237996 -0.861085 0.54994 -0.000221956 -0.835204 0.590109 -0.000205572 -0.807324 0.628867 -0.000188652 -0.777512 0.666125 -0.000171744 -0.74584 0.701784 -0.000154295 -0.71239 0.735769 -0.000136339 -0.677232 0.767993 -0.000117992 -0.640458 0.798384 -9.92651e-05 -0.602148 0.826863 -8.02773e-05 -0.562404 0.853364 -6.05181e-05 -0.521315 0.87783 -4.09006e-05 -0.478972 0.900191 -2.06794e-05 -0.435494 0.0603721 0 -0.998176 0.0603721 0 -0.998176 0.180261 0 -0.983619 0.297499 0 -0.954722 0.410416 0 -0.911899 0.517339 0 -0.855781 0.616722 0 -0.787181 0.707103 0 -0.707111 0.787186 0 -0.616716 0.855776 0 -0.517346 0.9119 0 -0.410412 0.954722 0 -0.297498 0.98362 0 -0.180254 0.998175 0 -0.06038 0.998175 0 -0.06038 0.180261 0 -0.983619 0.297499 0 -0.954722 0.410416 0 -0.911899 0.517339 0 -0.855781 0.616722 0 -0.787181 0.707103 0 -0.707111 0.787186 0 -0.616716 0.855776 0 -0.517346 0.9119 0 -0.410412 0.954722 0 -0.297498 0.98362 0 -0.180254 4.4718e-07 -1 -2.70467e-08 3.39508e-07 -1 -6.22195e-08 2.7245e-07 -1 -8.48973e-08 2.25459e-07 -1 -1.01473e-07 1.89738e-07 -1 -1.147e-07 1.60824e-07 -1 -1.25998e-07 1.36191e-07 -1 -1.36191e-07 1.14249e-07 -1 -1.45828e-07 2.66694e-08 -1 -1.86686e-07 0 -1 0 0 -1 0 1.28534e-07 -1 -2.1262e-07 9.17619e-08 -1 -2.03887e-07 6.14765e-08 -1 -1.97282e-07 3.52055e-08 -1 -1.92114e-07 1.13703e-08 -1 -1.8797e-07 -1.11659e-08 -1 -1.8459e-07 -3.33164e-08 -1 -1.81805e-07 -5.59372e-08 -1 -1.79508e-07 -7.9946e-08 -1 -1.77634e-07 -1.06489e-07 -1 -1.76153e-07 -1.08581e-08 -1 -1.79514e-07 -3.26631e-08 -1 -1.78238e-07 3.45191e-07 -1 -2.09158e-07 -3.25154e-07 -1 -7.22462e-07 -1.97721e-07 -1 -6.34502e-07 -7.14919e-08 -1 -9.12523e-08 -8.72288e-08 -1 -8.72292e-08 -1.05851e-07 -1 -8.2929e-08 -1.29127e-07 -1 -7.80603e-08 -1.60258e-07 -1 -7.2126e-08 -2.05816e-07 -1 -6.41334e-08 -2.81959e-07 -1 -5.16729e-08 -4.42454e-07 -1 -2.67643e-08 4.42454e-07 -1 -2.67642e-08 2.81959e-07 -1 -5.16729e-08 2.05816e-07 -1 -6.41334e-08 1.60258e-07 -1 -7.21261e-08 1.29127e-07 -1 -7.80603e-08 1.05851e-07 -1 -8.2929e-08 8.72288e-08 -1 -8.72292e-08 7.14919e-08 -1 -9.12523e-08 -3.45191e-07 -1 -2.09158e-07 3.26645e-08 -1 -1.78238e-07 1.08574e-08 -1 -1.79514e-07 1.06489e-07 -1 -1.76153e-07 7.99457e-08 -1 -1.77634e-07 5.59374e-08 -1 -1.79508e-07 3.33164e-08 -1 -1.81805e-07 1.11659e-08 -1 -1.8459e-07 -1.13703e-08 -1 -1.8797e-07 -3.52055e-08 -1 -1.92114e-07 -6.14771e-08 -1 -1.97282e-07 -9.17614e-08 -1 -2.03887e-07 -1.28534e-07 -1 -2.1262e-07 -2.66694e-08 -1 -1.86686e-07 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -9.31323e-08 -1.14249e-07 -1 -1.45828e-07 -1.3619e-07 -1 -1.36191e-07 -1.60824e-07 -1 -1.25998e-07 -1.89738e-07 -1 -1.147e-07 -2.25459e-07 -1 -1.01473e-07 -2.7245e-07 -1 -8.48972e-08 -3.39509e-07 -1 -6.22195e-08 -4.47182e-07 -1 -2.70467e-08 0 -1 -1.69331e-07 1.97715e-07 -1 -6.34497e-07 3.25158e-07 -1 -7.22465e-07 5.01011e-07 -1 -8.28772e-07 7.52867e-07 -1 -9.60956e-07 1.13123e-06 -1 -1.13124e-06 1.73734e-06 -1 -1.36111e-06 2.79848e-06 -1 -1.69178e-06 4.92183e-06 -1 -2.21513e-06 -2.12376e-05 -1 2.57871e-06 0 -1 0 0 -1 0 2.12377e-05 -1 2.57873e-06 -4.92179e-06 -1 -2.21513e-06 -2.79851e-06 -1 -1.69179e-06 -1.73733e-06 -1 -1.36111e-06 -1.13125e-06 -1 -1.13125e-06 -7.52858e-07 -1 -9.60951e-07 -5.01011e-07 -1 -8.28772e-07 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 0.998175 0 0.06038 0.998175 0 0.06038 0.98362 0 0.180254 0.954723 0 0.297497 0.911899 0 0.410414 0.855777 0 0.517344 0.787184 0 0.616718 0.707107 0 0.707107 0.616719 0 0.787183 0.517339 0 0.855781 0.410412 0 0.9119 0.297506 0 0.95472 0.180254 0 0.98362 0.060376 0 0.998176 0.060376 0 0.998176 0.98362 0 0.180254 0.954723 0 0.297497 0.911899 0 0.410414 0.855777 0 0.517344 0.787184 0 0.616718 0.707107 0 0.707107 0.616719 0 0.787183 0.517339 0 0.855781 0.410412 0 0.9119 0.297506 0 0.95472 0.180254 0 0.98362 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 0 1 0 0 1 -1 0 0 -1 0 0 0 1 0 5.89132e-08 1 6.70375e-06 -6.52757e-08 1 1.10545e-07 -6.56049e-08 1 9.96354e-08 -6.60048e-08 1 9.03003e-08 -6.64664e-08 1 8.21771e-08 -6.69845e-08 1 7.50005e-08 -6.75555e-08 1 6.85763e-08 -6.81781e-08 1 6.2754e-08 -6.88521e-08 1 5.74183e-08 -6.45872e-08 1 8.64348e-08 -5.72554e-08 1 1.07045e-07 -6.1471e-08 1 9.56651e-08 -6.53154e-08 1 8.57217e-08 -6.89088e-08 1 7.68336e-08 -7.23419e-08 1 6.87225e-08 -7.56878e-08 1 6.11773e-08 -7.90091e-08 1 5.40301e-08 -8.23637e-08 1 4.71392e-08 -8.58076e-08 1 4.03813e-08 -8.94007e-08 1 3.36387e-08 -9.32092e-08 1 2.67932e-08 -9.73107e-08 1 1.97186e-08 -1.01801e-07 1 1.2271e-08 -1.06802e-07 1 4.27314e-09 0 1 1.69331e-07 1.06802e-07 1 4.27314e-09 1.01801e-07 1 1.2271e-08 9.73107e-08 1 1.97187e-08 9.3209e-08 1 2.67933e-08 8.94007e-08 1 3.36387e-08 8.58078e-08 1 4.03812e-08 8.2364e-08 1 4.71389e-08 7.90091e-08 1 5.40301e-08 7.56878e-08 1 6.11772e-08 7.2342e-08 1 6.87226e-08 6.89088e-08 1 7.68336e-08 6.53154e-08 1 8.57218e-08 6.14711e-08 1 9.5665e-08 5.72555e-08 1 1.07045e-07 6.45872e-08 1 8.64347e-08 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 6.88521e-08 1 5.74183e-08 6.81781e-08 1 6.27542e-08 6.75555e-08 1 6.85763e-08 6.69845e-08 1 7.50007e-08 6.64664e-08 1 8.21772e-08 6.60047e-08 1 9.03007e-08 6.56049e-08 1 9.96352e-08 6.52757e-08 1 1.10545e-07 -5.89136e-08 1 6.70377e-06 0 1 0 0.998176 7.9068e-09 -0.0603724 0.983619 2.36083e-08 -0.180261 0.983619 2.4517e-08 -0.180261 0.954723 4.04055e-08 -0.297497 0.954723 4.04055e-08 -0.297497 0.911897 5.57423e-08 -0.41042 0.855781 6.02261e-08 -0.517338 0.787183 7.17956e-08 -0.616719 0.707106 8.23181e-08 -0.707107 0.61672 9.16401e-08 -0.787182 0.517339 9.96259e-08 -0.85578 0.410411 1.06159e-07 -0.911901 0.297507 1.11144e-07 -0.95472 0.180252 1.52678e-07 -0.98362 0.0603798 1.54937e-07 -0.998176 -0.0603803 1.54937e-07 -0.998175 -0.180252 1.14508e-07 -0.98362 -0.297505 1.11144e-07 -0.95472 -0.410411 1.06159e-07 -0.911901 -0.517339 9.96259e-08 -0.85578 -0.61672 9.16401e-08 -0.787182 -0.707106 8.23181e-08 -0.707107 -0.787183 7.17956e-08 -0.616719 -0.855781 7.02637e-08 -0.517338 -0.9119 5.57415e-08 -0.410413 -0.954723 4.04055e-08 -0.297498 -0.983619 2.36083e-08 -0.180261 -0.998175 7.91789e-09 -0.0603801 -0.998175 -7.90781e-09 0.0603801 -0.983619 -2.44827e-08 0.180261 -0.954723 -4.04055e-08 0.297498 -0.9119 -5.57415e-08 0.410413 -0.855781 -6.0226e-08 0.517338 -0.787183 -7.17956e-08 0.616719 -0.707106 -8.23181e-08 0.707107 -0.61672 -9.16401e-08 0.787182 -0.517339 -9.96259e-08 0.85578 -0.410411 -1.06159e-07 0.911901 -0.297505 -1.11144e-07 0.95472 -0.180251 -1.52678e-07 0.983621 -0.0603803 -1.54937e-07 0.998175 0.0603798 -1.54937e-07 0.998176 0.180251 -1.14508e-07 0.983621 0.297507 -1.11144e-07 0.95472 0.410411 -1.06159e-07 0.911901 0.517339 -9.96259e-08 0.85578 0.61672 -9.16401e-08 0.787182 0.707106 -8.23181e-08 0.707107 0.787183 -7.17956e-08 0.616719 0.855781 -7.02637e-08 0.517338 0.911897 -5.57423e-08 0.41042 0.954723 -4.04055e-08 0.297497 0.983619 -2.36083e-08 0.180261 0.998176 -7.91688e-09 0.0603724 0.998176 7.96801e-09 -0.0603724 0.911897 5.57423e-08 -0.41042 0.855781 7.02637e-08 -0.517338 0.787183 7.17956e-08 -0.616719 0.707106 8.23181e-08 -0.707107 0.61672 9.16401e-08 -0.787182 0.517339 9.96259e-08 -0.85578 0.410411 1.06159e-07 -0.911901 0.297507 1.11144e-07 -0.95472 0.180252 1.14508e-07 -0.983621 0.0603798 1.54937e-07 -0.998176 -0.0603803 1.54937e-07 -0.998175 -0.180252 1.52678e-07 -0.983621 -0.297505 1.11144e-07 -0.95472 -0.410411 1.06159e-07 -0.911901 -0.517339 9.96259e-08 -0.85578 -0.61672 9.16401e-08 -0.787182 -0.707106 8.23181e-08 -0.707107 -0.787183 7.17956e-08 -0.616719 -0.855781 6.0226e-08 -0.517338 -0.9119 5.57415e-08 -0.410413 -0.954723 4.04055e-08 -0.297497 -0.983619 2.44827e-08 -0.180261 -0.998175 7.90781e-09 -0.0603801 -0.998175 -7.91789e-09 0.0603801 -0.983619 -2.36083e-08 0.180261 -0.954723 -4.04055e-08 0.297497 -0.9119 -5.57415e-08 0.410413 -0.855781 -7.02637e-08 0.517338 -0.787183 -7.17956e-08 0.616719 -0.707106 -8.23181e-08 0.707107 -0.61672 -9.16401e-08 0.787182 -0.517339 -9.96259e-08 0.85578 -0.410411 -1.06159e-07 0.911901 -0.297505 -1.11144e-07 0.95472 -0.180252 -1.14508e-07 0.983621 -0.0603803 -1.54937e-07 0.998175 0.0603798 -1.54937e-07 0.998176 0.180252 -1.52678e-07 0.983621 0.297507 -1.11144e-07 0.95472 0.410411 -1.06159e-07 0.911901 0.517339 -9.96259e-08 0.85578 0.61672 -9.16401e-08 0.787182 0.707106 -8.23181e-08 0.707107 0.787183 -7.17956e-08 0.616719 0.855781 -6.0226e-08 0.517338 0.911897 -5.57423e-08 0.41042 0.954723 -4.04055e-08 0.297497 0.983619 -2.44827e-08 0.180261 0.998176 -7.9068e-09 0.0603724 0 1 0 2.12376e-05 1 -2.57871e-06 -4.92183e-06 1 2.21513e-06 -2.79848e-06 1 1.69178e-06 -1.73735e-06 1 1.36111e-06 -1.13123e-06 1 1.13124e-06 -7.52867e-07 1 9.60956e-07 -5.01011e-07 1 8.28772e-07 -3.25158e-07 1 7.22465e-07 -1.97715e-07 1 6.34497e-07 3.45191e-07 1 2.09158e-07 -7.14919e-08 1 9.12523e-08 -8.72291e-08 1 8.72292e-08 -1.05851e-07 1 8.29291e-08 -1.29127e-07 1 7.80603e-08 -1.60257e-07 1 7.21261e-08 -2.05816e-07 1 6.41334e-08 -2.81959e-07 1 5.16729e-08 -4.42454e-07 1 2.67642e-08 0 1 9.31323e-08 4.42454e-07 1 2.67642e-08 2.81959e-07 1 5.16729e-08 2.05816e-07 1 6.41334e-08 1.60257e-07 1 7.21261e-08 1.29127e-07 1 7.80603e-08 1.05851e-07 1 8.2929e-08 8.72292e-08 1 8.72292e-08 7.14919e-08 1 9.12523e-08 -3.45191e-07 1 2.09158e-07 3.26631e-08 1 1.78238e-07 1.08581e-08 1 1.79514e-07 1.06489e-07 1 1.76153e-07 7.99464e-08 1 1.77634e-07 5.59374e-08 1 1.79508e-07 3.33165e-08 1 1.81805e-07 1.11659e-08 1 1.8459e-07 -1.13703e-08 1 1.8797e-07 -3.52056e-08 1 1.92114e-07 -6.14763e-08 1 1.97282e-07 -9.17614e-08 1 2.03887e-07 -1.28534e-07 1 2.1262e-07 -2.66694e-08 1 1.86686e-07 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -3.26645e-08 1 1.78238e-07 -1.08574e-08 1 1.79514e-07 -7.9946e-08 1 1.77634e-07 -1.06489e-07 1 1.76153e-07 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 2.66694e-08 1 1.86686e-07 1.28534e-07 1 2.1262e-07 9.17614e-08 1 2.03887e-07 6.14763e-08 1 1.97282e-07 3.52054e-08 1 1.92114e-07 1.13703e-08 1 1.8797e-07 -1.11659e-08 1 1.8459e-07 -3.33163e-08 1 1.81805e-07 -5.59374e-08 1 1.79508e-07 0 1 1.69331e-07 -4.47182e-07 1 2.70467e-08 -3.39508e-07 1 6.22196e-08 -2.72451e-07 1 8.48973e-08 -2.25458e-07 1 1.01473e-07 -1.89738e-07 1 1.147e-07 -1.60824e-07 1 1.25998e-07 -1.36191e-07 1 1.36191e-07 -1.14249e-07 1 1.45828e-07 1.97721e-07 1 6.34502e-07 3.25153e-07 1 7.22462e-07 5.01011e-07 1 8.28772e-07 7.52859e-07 1 9.60952e-07 1.13125e-06 1 1.13125e-06 1.73733e-06 1 1.36111e-06 2.79851e-06 1 1.69179e-06 4.92179e-06 1 2.21513e-06 -2.12377e-05 1 -2.57873e-06 0 1 0 4.4718e-07 1 2.70467e-08 3.39508e-07 1 6.22196e-08 2.7245e-07 1 8.48972e-08 2.25459e-07 1 1.01473e-07 1.89738e-07 1 1.147e-07 1.60824e-07 1 1.25998e-07 1.36191e-07 1 1.36191e-07 1.14249e-07 1 1.45828e-07 1 0 0 1 0 0 0.707107 -7.31716e-08 0.707107 0.707107 -7.31716e-08 0.707107 0.707107 -6.58545e-08 0.707107 0.707107 -7.14566e-08 0.707107 9.31323e-08 -9.31323e-08 1 -8.46655e-08 -8.46657e-08 1 -0.707107 0.707107 9.40778e-08 -0.707107 0.707107 4.59364e-08 -0.707107 0.707107 0 -0.707107 7.31716e-08 -0.707107 -0.707107 5.98677e-08 -0.707107 -0.707107 -0.707107 0 -0.707107 -0.707107 0 -0.707107 -0.707107 0 0 1 1.33046e-07 1.16415e-07 1 1.16415e-07 0 1.0348e-07 -1 0 1.0348e-07 -1 0 -1 0 0 -1 0 -0.707107 -0.707107 0 -0.707107 -0.707107 0 -0.707107 0.707107 0 -0.707107 0.707107 1.09757e-07 -0.707107 -5.98677e-08 0.707107 -0.707107 -7.31716e-08 0.707107 0 -1 0 0 -1 0 0 -1.0348e-07 1 0 -1.0348e-07 1 0 1 1.5522e-07 0 1 1.5522e-07 0 1.0348e-07 -1 0 1.0348e-07 -1 -8.46656e-08 8.46657e-08 -1 1.0348e-07 1.0348e-07 -1 -0.5 0.707107 0.5 -0.5 0.707107 0.5 -0.5 0.707107 0.499999 -0.5 0.707107 0.499999 -0.5 0.707107 0.5 1.01176e-07 -1.43085e-07 1 0 -1.65568e-07 1 -0.707107 -1.17075e-07 0.707107 -0.707107 -1.17075e-07 0.707107 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -0.5 0.707107 -0.5 -0.5 0.707107 -0.499999 -0.5 0.707107 -0.5 -0.5 0.707107 -0.5 -0.5 0.707107 -0.5 -0.707107 1.17075e-07 -0.707107 -0.707107 1.17075e-07 -0.707107 -1.01176e-07 1.43085e-07 -1 0 1.65568e-07 -1 -0.707107 6.58544e-08 -0.707106 -0.707107 1.31709e-07 -0.707106 6.55813e-09 -1 -1.878e-07 3.87034e-08 -1 -2.19496e-07 2.12282e-08 -1 -2.01975e-07 -6.14694e-09 -1 -1.76024e-07 -1.74495e-08 -1 -1.66025e-07 -2.77483e-08 -1 -1.57368e-07 -9.99615e-08 -1 -9.99613e-08 -4.24606e-08 -1 -1.70301e-07 -5.18089e-08 -1 -1.5945e-07 -6.0548e-08 -1 -1.49863e-07 -6.88893e-08 -1 -1.41245e-07 -7.70038e-08 -1 -1.33374e-07 -8.50401e-08 -1 -1.26076e-07 -9.31346e-08 -1 -1.19207e-07 -1.01424e-07 -1 -1.12644e-07 -1.10052e-07 -1 -1.06276e-07 -1.19175e-07 -1 -9.99999e-08 -1.28981e-07 -1 -9.37089e-08 -1.39695e-07 -1 -8.72915e-08 -1.51612e-07 -1 -8.06137e-08 -1.65118e-07 -1 -7.35153e-08 -1.80744e-07 -1 -6.5786e-08 -1.99254e-07 -1 -5.71345e-08 -2.21779e-07 -1 -4.71411e-08 -2.50105e-07 -1 -3.51505e-08 -2.87205e-07 -1 -2.00822e-08 -3.38438e-07 -1 0 2.87205e-07 -1 -2.00822e-08 2.50105e-07 -1 -3.51505e-08 2.21779e-07 -1 -4.71411e-08 1.99254e-07 -1 -5.71345e-08 1.80744e-07 -1 -6.57861e-08 1.65118e-07 -1 -7.35153e-08 1.51613e-07 -1 -8.06137e-08 1.39695e-07 -1 -8.72915e-08 1.28981e-07 -1 -9.3709e-08 1.19175e-07 -1 -9.99999e-08 1.10052e-07 -1 -1.06276e-07 1.01424e-07 -1 -1.12644e-07 9.31345e-08 -1 -1.19207e-07 8.50397e-08 -1 -1.26076e-07 7.70039e-08 -1 -1.33374e-07 6.88893e-08 -1 -1.41245e-07 6.05485e-08 -1 -1.49862e-07 5.18087e-08 -1 -1.5945e-07 4.2461e-08 -1 -1.70301e-07 2.77482e-08 -1 -1.57368e-07 1.74498e-08 -1 -1.66024e-07 6.14676e-09 -1 -1.76024e-07 -6.55834e-09 -1 -1.878e-07 -2.12284e-08 -1 -2.01975e-07 -3.87031e-08 -1 -2.19495e-07 -6.03011e-08 -1 -2.41855e-07 -8.82446e-08 -1 -2.71589e-07 -1.26598e-07 -1 -3.13341e-07 0 -1 -1.72968e-07 -3.1496e-08 -1 -6.45771e-08 -3.34689e-08 -1 -5.79696e-08 -3.52317e-08 -1 -5.22332e-08 -3.68407e-08 -1 -4.71541e-08 -3.83371e-08 -1 -4.25775e-08 -3.97521e-08 -1 -3.83881e-08 -4.1111e-08 -1 -3.44963e-08 -4.24346e-08 -1 -3.08307e-08 -4.37412e-08 -1 -2.73323e-08 -4.50468e-08 -1 -2.3952e-08 -4.63678e-08 -1 -2.06441e-08 -4.77193e-08 -1 -1.73685e-08 -4.91184e-08 -1 -1.4084e-08 -5.05827e-08 -1 -1.07517e-08 -5.21333e-08 -1 -7.32686e-09 -5.37942e-08 -1 -3.76169e-09 -5.55953e-08 -1 0 7.16398e-10 -1 -1.14624e-07 5.37942e-08 -1 -3.7618e-09 5.21333e-08 -1 -7.32698e-09 5.05827e-08 -1 -1.07517e-08 4.91183e-08 -1 -1.40844e-08 4.77192e-08 -1 -1.73686e-08 4.63678e-08 -1 -2.06441e-08 4.50469e-08 -1 -2.3952e-08 4.37411e-08 -1 -2.73324e-08 4.24346e-08 -1 -3.08307e-08 4.1111e-08 -1 -3.44963e-08 3.97521e-08 -1 -3.83879e-08 3.8337e-08 -1 -4.25775e-08 3.68407e-08 -1 -4.7154e-08 3.52319e-08 -1 -5.22326e-08 3.34688e-08 -1 -5.79698e-08 3.14962e-08 -1 -6.45768e-08 0 -1 -1.72968e-07 1.26595e-07 -1 -3.13337e-07 8.8246e-08 -1 -2.7159e-07 6.03008e-08 -1 -2.41855e-07 4.97318e-09 -1 -1.30712e-07 9.99614e-08 -1 -9.99614e-08 -9.19824e-08 -1 -9.19825e-08 1.14624e-07 -1 -1.14624e-07 0 1.86264e-07 -1 0 1.86264e-07 -1 -1.17896e-07 1.66731e-07 -1 -0.707107 -6.58544e-08 0.707106 -0.707107 -1.31709e-07 0.707107 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0 -0.707107 -1.17896e-07 0.707107 -0.707107 -1.17896e-07 0.707107 -0.707107 1.17896e-07 -0.707107 -0.707107 1.17896e-07 -0.707107 -8.82238e-08 1.24767e-07 -1 0 1.33046e-07 -1 0 1.33046e-07 -1 0 -1.86264e-07 1 0 -1.86264e-07 1 1.17896e-07 -1.66731e-07 1 0 -1.33046e-07 1 0 -1.33046e-07 1 8.82238e-08 -1.24767e-07 1 0 1 0 0 1 0 0 1 0 0 -0.998176 -0.0603798 0 -0.983618 -0.180264 0 -0.983618 -0.180264 0 -0.954723 -0.297497 0 -0.954723 -0.297497 0 -0.911901 -0.41041 0 -0.855779 -0.517342 0 -0.787181 -0.616722 0 -0.707107 -0.707107 0 -0.616727 -0.787177 0 -0.517339 -0.855781 0 -0.410412 -0.9119 0 -0.297497 -0.954723 0 -0.180254 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180254 -0.98362 0 0.297499 -0.954722 0 0.410412 -0.9119 0 0.517339 -0.855781 0 0.616727 -0.787177 0 0.707107 -0.707107 0 0.787181 -0.616722 0 0.855775 -0.517348 0 0.911901 -0.41041 0 0.954723 -0.297497 0 0.983619 -0.180262 0 0.998176 -0.0603721 0 0.998176 0.0603726 0 0.983619 0.180262 0 0.954723 0.297495 0 0.9119 0.410412 0 0.855775 0.517348 0 0.787184 0.616718 0 0.707103 0.707111 0 0.616727 0.787177 0 0.517339 0.855781 0 0.410412 0.9119 0 0.297499 0.954722 0 0.180254 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180254 0.98362 0 -0.297497 0.954723 0 -0.410412 0.9119 0 -0.517339 0.855781 0 -0.616727 0.787177 0 -0.707103 0.707111 0 -0.787184 0.616718 0 -0.855779 0.517342 0 -0.9119 0.410412 0 -0.954723 0.297495 0 -0.983619 0.180261 0 -0.998175 0.0603803 0 -0.998176 -0.0603798 0 -0.911901 -0.41041 0 -0.855779 -0.517342 0 -0.787181 -0.616722 0 -0.707107 -0.707107 0 -0.616727 -0.787177 0 -0.517339 -0.855781 0 -0.410412 -0.9119 0 -0.297497 -0.954723 0 -0.180254 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180254 -0.98362 0 0.297499 -0.954722 0 0.410412 -0.9119 0 0.517339 -0.855781 0 0.616727 -0.787177 0 0.707107 -0.707107 0 0.787181 -0.616722 0 0.855775 -0.517348 0 0.911901 -0.41041 0 0.954723 -0.297497 0 0.983619 -0.180262 0 0.998176 -0.0603721 0 0.998176 0.0603726 0 0.983619 0.180262 0 0.954723 0.297495 0 0.9119 0.410412 0 0.855775 0.517348 0 0.787184 0.616718 0 0.707103 0.707111 0 0.616727 0.787177 0 0.517339 0.855781 0 0.410412 0.9119 0 0.297499 0.954722 0 0.180254 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180254 0.98362 0 -0.297497 0.954723 0 -0.410412 0.9119 0 -0.517339 0.855781 0 -0.616727 0.787177 0 -0.707103 0.707111 0 -0.787184 0.616718 0 -0.855779 0.517342 0 -0.9119 0.410412 0 -0.954723 0.297495 0 -0.983619 0.180261 0 -0.998175 0.0603803 0 -0.998176 -0.0603798 0 -0.98362 -0.180255 0 -0.98362 -0.180255 0 -0.954723 -0.297497 0 -0.954723 -0.297497 0 -0.911901 -0.41041 0 -0.855782 -0.517336 0 -0.787174 -0.616731 0 -0.707111 -0.707103 0 -0.616727 -0.787177 0 -0.517339 -0.855781 0 -0.410412 -0.9119 0 -0.297495 -0.954723 0 -0.180255 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180255 -0.98362 0 0.297497 -0.954723 0 0.41041 -0.911901 0 0.517339 -0.855781 0 0.61673 -0.787175 0 0.707107 -0.707107 0 0.787174 -0.616731 0 0.855782 -0.517336 0 0.911901 -0.41041 0 0.954723 -0.297497 0 0.98362 -0.180255 0 0.998176 -0.0603798 0 0.998175 0.0603803 0 0.98362 0.180255 0 0.954723 0.297495 0 0.9119 0.410412 0 0.855782 0.517336 0 0.787177 0.616728 0 0.707103 0.707111 0 0.61673 0.787175 0 0.517339 0.855781 0 0.41041 0.911901 0 0.297497 0.954723 0 0.180255 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180255 0.98362 0 -0.297495 0.954723 0 -0.410412 0.9119 0 -0.517339 0.855781 0 -0.616727 0.787177 0 -0.707107 0.707107 0 -0.787177 0.616728 0 -0.855782 0.517336 0 -0.9119 0.410412 0 -0.954723 0.297495 0 -0.98362 0.180255 0 -0.998175 0.0603803 0 -0.998176 -0.0603798 0 -0.911901 -0.41041 0 -0.855782 -0.517336 0 -0.787174 -0.616731 0 -0.707111 -0.707103 0 -0.616727 -0.787177 0 -0.517339 -0.855781 0 -0.410412 -0.9119 0 -0.297495 -0.954723 0 -0.180255 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180255 -0.98362 0 0.297497 -0.954723 0 0.41041 -0.911901 0 0.517339 -0.855781 0 0.61673 -0.787175 0 0.707107 -0.707107 0 0.787174 -0.616731 0 0.855782 -0.517336 0 0.911901 -0.41041 0 0.954723 -0.297497 0 0.98362 -0.180255 0 0.998176 -0.0603798 0 0.998175 0.0603803 0 0.98362 0.180255 0 0.954723 0.297495 0 0.9119 0.410412 0 0.855782 0.517336 0 0.787177 0.616728 0 0.707103 0.707111 0 0.61673 0.787175 0 0.517339 0.855781 0 0.41041 0.911901 0 0.297497 0.954723 0 0.180255 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180255 0.98362 0 -0.297495 0.954723 0 -0.410412 0.9119 0 -0.517339 0.855781 0 -0.616727 0.787177 0 -0.707107 0.707107 0 -0.787177 0.616728 0 -0.855782 0.517336 0 -0.9119 0.410412 0 -0.954723 0.297495 0 -0.98362 0.180255 0 -0.998175 0.0603803 0 -0.998175 -0.0603803 0 -0.98362 -0.180255 0 -0.98362 -0.180255 0 -0.954723 -0.297495 0 -0.954723 -0.297495 0 -0.9119 -0.410412 0 -0.855784 -0.517333 0 -0.787174 -0.616731 0 -0.707107 -0.707107 0 -0.616731 -0.787174 0 -0.517333 -0.855784 0 -0.410412 -0.9119 0 -0.297495 -0.954723 0 -0.180255 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180255 -0.98362 0 0.297497 -0.954723 0 0.41041 -0.911901 0 0.517333 -0.855784 0 0.616735 -0.787171 0 0.707103 -0.707111 0 0.787174 -0.616731 0 0.855784 -0.517333 0 0.9119 -0.410412 0 0.954723 -0.297495 0 0.98362 -0.180255 0 0.998175 -0.0603803 0 0.998176 0.0603798 0 0.98362 0.180255 0 0.954723 0.297497 0 0.911901 0.41041 0 0.855784 0.517333 0 0.787171 0.616735 0 0.707107 0.707107 0 0.616735 0.787171 0 0.517333 0.855784 0 0.41041 0.911901 0 0.297497 0.954723 0 0.180255 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180255 0.98362 0 -0.297495 0.954723 0 -0.410412 0.9119 0 -0.517333 0.855784 0 -0.616731 0.787174 0 -0.707111 0.707103 0 -0.787171 0.616735 0 -0.855784 0.517333 0 -0.911901 0.41041 0 -0.954723 0.297497 0 -0.98362 0.180255 0 -0.998176 0.0603798 0 -0.998175 -0.0603803 0 -0.9119 -0.410412 0 -0.855784 -0.517333 0 -0.787174 -0.616731 0 -0.707107 -0.707107 0 -0.616731 -0.787174 0 -0.517333 -0.855784 0 -0.410412 -0.9119 0 -0.297495 -0.954723 0 -0.180255 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180255 -0.98362 0 0.297497 -0.954723 0 0.41041 -0.911901 0 0.517333 -0.855784 0 0.616735 -0.787171 0 0.707103 -0.707111 0 0.787174 -0.616731 0 0.855784 -0.517333 0 0.9119 -0.410412 0 0.954723 -0.297495 0 0.98362 -0.180255 0 0.998175 -0.0603803 0 0.998176 0.0603798 0 0.98362 0.180255 0 0.954723 0.297497 0 0.911901 0.41041 0 0.855784 0.517333 0 0.787171 0.616735 0 0.707107 0.707107 0 0.616735 0.787171 0 0.517333 0.855784 0 0.41041 0.911901 0 0.297497 0.954723 0 0.180255 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180255 0.98362 0 -0.297495 0.954723 0 -0.410412 0.9119 0 -0.517333 0.855784 0 -0.616731 0.787174 0 -0.707111 0.707103 0 -0.787171 0.616735 0 -0.855784 0.517333 0 -0.911901 0.41041 0 -0.954723 0.297497 0 -0.98362 0.180255 0 -0.998176 0.0603798 0 -0.998175 -0.0603803 0 -0.983618 -0.180264 0 -0.983618 -0.180264 0 -0.954723 -0.297495 0 -0.954723 -0.297495 0 -0.9119 -0.410412 0 -0.855781 -0.517339 0 -0.787181 -0.616722 0 -0.707103 -0.707111 0 -0.616731 -0.787174 0 -0.517333 -0.855784 0 -0.410412 -0.9119 0 -0.297497 -0.954723 0 -0.180254 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180254 -0.98362 0 0.297499 -0.954722 0 0.410412 -0.9119 0 0.517333 -0.855784 0 0.616731 -0.787174 0 0.707103 -0.707111 0 0.787181 -0.616722 0 0.855777 -0.517344 0 0.9119 -0.410412 0 0.954723 -0.297495 0 0.983619 -0.180262 0 0.998176 -0.0603726 0 0.998176 0.0603721 0 0.983619 0.180262 0 0.954723 0.297497 0 0.911901 0.41041 0 0.855777 0.517344 0 0.787178 0.616726 0 0.707107 0.707107 0 0.616731 0.787174 0 0.517333 0.855784 0 0.410412 0.9119 0 0.297499 0.954722 0 0.180254 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180254 0.98362 0 -0.297497 0.954723 0 -0.410412 0.9119 0 -0.517333 0.855784 0 -0.616731 0.787174 0 -0.707107 0.707107 0 -0.787178 0.616726 0 -0.855781 0.517339 0 -0.911901 0.41041 0 -0.954723 0.297497 0 -0.983619 0.180261 0 -0.998176 0.0603798 0 -0.998175 -0.0603803 0 -0.9119 -0.410412 0 -0.855781 -0.517339 0 -0.787181 -0.616722 0 -0.707103 -0.707111 0 -0.616731 -0.787174 0 -0.517333 -0.855784 0 -0.410412 -0.9119 0 -0.297497 -0.954723 0 -0.180254 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180254 -0.98362 0 0.297499 -0.954722 0 0.410412 -0.9119 0 0.517333 -0.855784 0 0.616731 -0.787174 0 0.707103 -0.707111 0 0.787181 -0.616722 0 0.855777 -0.517344 0 0.9119 -0.410412 0 0.954723 -0.297495 0 0.983619 -0.180262 0 0.998176 -0.0603726 0 0.998176 0.0603721 0 0.983619 0.180262 0 0.954723 0.297497 0 0.911901 0.41041 0 0.855777 0.517344 0 0.787178 0.616726 0 0.707107 0.707107 0 0.616731 0.787174 0 0.517333 0.855784 0 0.410412 0.9119 0 0.297499 0.954722 0 0.180254 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180254 0.98362 0 -0.297497 0.954723 0 -0.410412 0.9119 0 -0.517333 0.855784 0 -0.616731 0.787174 0 -0.707107 0.707107 0 -0.787178 0.616726 0 -0.855781 0.517339 0 -0.911901 0.41041 0 -0.954723 0.297497 0 -0.983619 0.180261 0 -0.998176 0.0603798 -0.707107 -7.74758e-08 0.707107 -0.707107 -1.06217e-07 0.707107 -0.707107 0.707107 7.74758e-08 -0.707107 0.707107 8.49735e-08 -0.707107 7.74758e-08 -0.707107 -0.707107 1.06217e-07 -0.707107 -0.707107 -0.707107 -7.74758e-08 -0.707107 -0.707107 -8.49735e-08 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0.0697559 1.39358e-07 -0.997564 0.207911 1.36646e-07 -0.978148 0.207911 1.37511e-07 -0.978148 0.34202 1.31274e-07 -0.939693 0.469473 1.23346e-07 -0.882947 0.587786 8.47637e-08 -0.809017 0.469473 9.25097e-08 -0.882947 0.469473 9.16741e-08 -0.882947 0.34202 9.84552e-08 -0.939693 0.207911 1.36646e-07 -0.978148 0.34202 1.31274e-07 -0.939693 0.34202 1.31146e-07 -0.939693 0.34202 1.31145e-07 -0.939693 0.469473 1.23346e-07 -0.882947 0.587786 1.13018e-07 -0.809016 0.694657 7.53681e-08 -0.719341 0.587786 8.47637e-08 -0.809017 0.469473 1.22232e-07 -0.882947 0.587786 1.13018e-07 -0.809016 0.694657 1.00491e-07 -0.719341 0.788011 6.45052e-08 -0.615662 0.694657 7.53681e-08 -0.719341 0.587786 1.12047e-07 -0.809016 0.694657 1.00491e-07 -0.719341 0.788011 8.6007e-08 -0.615662 0.86603 6.1117e-08 -0.499991 0.788011 7.52561e-08 -0.615662 0.694657 9.93133e-08 -0.719341 0.788011 8.60069e-08 -0.615662 0.86603 6.9848e-08 -0.499991 0.927181 4.57912e-08 -0.374613 0.86603 6.1117e-08 -0.499991 0.788011 7.38488e-08 -0.615662 0.86603 6.1117e-08 -0.499991 0.927181 5.23328e-08 -0.374613 0.970296 3.16837e-08 -0.241921 0.927181 4.9062e-08 -0.374613 0.86603 6.11167e-08 -0.499991 0.927181 4.57912e-08 -0.374613 0.970296 3.16837e-08 -0.241921 0.994522 1.369e-08 -0.10453 0.970296 3.16837e-08 -0.241921 0.927181 4.83837e-08 -0.374613 0.970296 3.16837e-08 -0.241921 0.994522 1.369e-08 -0.10453 0.999391 -4.57072e-09 0.0348997 0.994522 1.369e-08 -0.10453 0.970296 3.18314e-08 -0.241921 0.994522 1.369e-08 -0.10453 0.999391 -4.57072e-09 0.0348997 0.984809 -2.27417e-08 0.173644 0.999391 -4.57072e-09 0.0348997 0.994522 1.40058e-08 -0.10453 0.999391 -4.57072e-09 0.0348997 0.984809 -2.27417e-08 0.173644 0.951055 -4.04719e-08 0.309023 0.984809 -2.27417e-08 0.173644 0.999391 -4.61556e-09 0.0348997 0.984809 -2.27417e-08 0.173644 0.951055 -4.04719e-08 0.309023 0.898795 -5.35845e-08 0.438369 0.951055 -3.77738e-08 0.309023 0.984809 -2.29186e-08 0.173644 0.951055 -4.04719e-08 0.309023 0.898795 -6.12394e-08 0.438369 0.829037 -6.83537e-08 0.559194 0.898795 -5.35845e-08 0.438369 0.951055 -3.81976e-08 0.309023 0.898795 -5.35845e-08 0.438369 0.829037 -7.81185e-08 0.559194 0.743146 -8.17917e-08 0.669129 0.829037 -6.83537e-08 0.559194 0.898795 -5.34768e-08 0.438369 0.829037 -6.83537e-08 0.559194 0.743146 -9.34763e-08 0.669129 0.642786 -8.02615e-08 0.766046 0.743146 -7.01072e-08 0.669129 0.829037 -6.87556e-08 0.559194 0.743146 -8.17917e-08 0.669129 0.642786 -1.07015e-07 0.766046 0.52992 -8.88532e-08 0.848048 0.642786 -8.02615e-08 0.766046 0.743146 -9.16745e-08 0.669129 0.642786 -1.07015e-07 0.766046 0.52992 -1.18471e-07 0.848048 0.406741 -9.57154e-08 0.913544 0.52992 -8.88532e-08 0.848048 0.642786 -1.095e-07 0.766046 0.52992 -1.18471e-07 0.848048 0.406741 -1.27621e-07 0.913544 0.275632 -1.00715e-07 0.961263 0.406741 -9.57154e-08 0.913544 0.52992 -1.19687e-07 0.848048 0.406741 -1.27621e-07 0.913544 0.275632 -1.34287e-07 0.961263 0.139177 -1.03754e-07 0.990268 0.275632 -1.00715e-07 0.961263 0.406741 -1.27326e-07 0.913544 0.275632 -1.34287e-07 0.961263 0.139177 -1.38339e-07 0.990268 0 -1.04774e-07 1 0.139177 -1.03754e-07 0.990268 0.275632 -1.34963e-07 0.961263 0.139177 -1.38339e-07 0.990268 0 -1.39698e-07 1 -0.139176 -1.03754e-07 0.990268 0 -1.04774e-07 1 0.139177 -1.3815e-07 0.990268 0 -1.39698e-07 1 -0.139176 -1.38339e-07 0.990268 -0.275634 -1.00715e-07 0.961263 -0.139176 -1.03754e-07 0.990268 0 -1.39698e-07 1 -0.139176 -1.38339e-07 0.990268 -0.275634 -1.34287e-07 0.961263 -0.406738 -9.57156e-08 0.913545 -0.275634 -1.00715e-07 0.961263 -0.139176 -1.38149e-07 0.990268 -0.275634 -1.34287e-07 0.961263 -0.406738 -1.27621e-07 0.913545 -0.52992 -8.88532e-08 0.848048 -0.406738 -9.57156e-08 0.913545 -0.275634 -1.33691e-07 0.961263 -0.406738 -1.27621e-07 0.913545 -0.52992 -1.18471e-07 0.848048 -0.642786 -8.02615e-08 0.766046 -0.52992 -8.88532e-08 0.848048 -0.406738 -1.27325e-07 0.913545 -0.52992 -1.18471e-07 0.848048 -0.642786 -1.07015e-07 0.766046 -0.743146 -7.01072e-08 0.669129 -0.642786 -8.02615e-08 0.766046 -0.52992 -1.1714e-07 0.848048 -0.642786 -1.07015e-07 0.766046 -0.743146 -9.34763e-08 0.669129 -0.829037 -6.83537e-08 0.559194 -0.743146 -8.17917e-08 0.669129 -0.642786 -1.06953e-07 0.766046 -0.743146 -9.34762e-08 0.669129 -0.829037 -7.81185e-08 0.559194 -0.898795 -5.35845e-08 0.438369 -0.829037 -6.83537e-08 0.559194 -0.743146 -8.14885e-08 0.669129 -0.829037 -6.83537e-08 0.559194 -0.898795 -6.12394e-08 0.438369 -0.951055 -3.77737e-08 0.309023 -0.898795 -5.35845e-08 0.438369 -0.829037 -6.87556e-08 0.559194 -0.898795 -5.35845e-08 0.438369 -0.951055 -4.317e-08 0.309023 -0.984809 -2.27417e-08 0.173644 -0.951055 -4.04719e-08 0.309023 -0.898795 -5.60233e-08 0.438369 -0.951055 -3.77737e-08 0.309023 -0.984809 -2.27417e-08 0.173644 -0.999391 -4.57072e-09 0.0348997 -0.984809 -2.27417e-08 0.173644 -0.951055 -4.07441e-08 0.309023 -0.984809 -2.27417e-08 0.173644 -0.999391 -4.57072e-09 0.0348997 -0.994521 1.36911e-08 -0.104539 -0.999391 -4.57072e-09 0.0348997 -0.984809 -2.29186e-08 0.173644 -0.999391 -4.57072e-09 0.0348997 -0.994521 1.36911e-08 -0.104539 -0.970296 3.16837e-08 -0.241921 -0.994521 1.36911e-08 -0.104539 -0.999391 -4.61556e-09 0.0348997 -0.994521 1.36911e-08 -0.104539 -0.970296 3.16837e-08 -0.241921 -0.927184 4.9061e-08 -0.374605 -0.970296 3.16837e-08 -0.241921 -0.994521 1.33692e-08 -0.104539 -0.970296 3.16837e-08 -0.241921 -0.927185 4.9061e-08 -0.374605 -0.866026 6.11178e-08 -0.499998 -0.927185 4.57902e-08 -0.374605 -0.970296 3.18314e-08 -0.241921 -0.927184 4.9061e-08 -0.374605 -0.866027 6.9849e-08 -0.499998 -0.788011 7.52561e-08 -0.615662 -0.866026 6.11178e-08 -0.499998 -0.927185 4.58373e-08 -0.374605 -0.866026 6.11178e-08 -0.499998 -0.788011 8.6007e-08 -0.615662 -0.694661 7.53676e-08 -0.719337 -0.788011 6.45052e-08 -0.615662 -0.866026 6.11164e-08 -0.499998 -0.788011 7.5256e-08 -0.615662 -0.694661 1.0049e-07 -0.719337 -0.587786 8.47637e-08 -0.809017 -0.694661 7.53676e-08 -0.719337 -0.788011 8.65814e-08 -0.615662 -0.694661 1.0049e-07 -0.719337 -0.587786 1.13018e-07 -0.809016 -0.469473 9.25097e-08 -0.882947 -0.587786 8.47637e-08 -0.809017 -0.694661 9.93139e-08 -0.719337 -0.587786 1.13018e-07 -0.809016 -0.469473 1.23346e-07 -0.882947 -0.342017 9.84553e-08 -0.939694 -0.469473 9.25097e-08 -0.882947 -0.587786 1.14594e-07 -0.809016 -0.469473 1.23346e-07 -0.882947 -0.342017 1.31274e-07 -0.939694 -0.207913 1.02484e-07 -0.978147 -0.342017 9.84553e-08 -0.939694 -0.469473 1.24779e-07 -0.882947 -0.342017 1.31274e-07 -0.939694 -0.207913 1.36646e-07 -0.978147 -0.0697559 1.04519e-07 -0.997564 -0.207913 1.02484e-07 -0.978147 -0.342017 1.31144e-07 -0.939694 -0.207913 1.36646e-07 -0.978147 -0.0697559 1.39358e-07 -0.997564 0.0697559 1.04519e-07 -0.997564 -0.0697559 1.04519e-07 -0.997564 -0.207913 1.37512e-07 -0.978147 -0.0697559 1.39358e-07 -0.997564 0.0697559 1.39358e-07 -0.997564 0.207911 1.02484e-07 -0.978148 0.0697559 1.04519e-07 -0.997564 -0.0697559 1.39421e-07 -0.997564 0.0697559 1.39358e-07 -0.997564 0.207911 1.36646e-07 -0.978148 0.34202 9.84552e-08 -0.939693 0.207911 1.02484e-07 -0.978148 -0.0697559 1.39358e-07 -0.997564 0.0697559 1.39358e-07 -0.997564 0.469473 1.23346e-07 -0.882947 0.587786 1.13018e-07 -0.809016 0.694657 1.00491e-07 -0.719341 0.788011 8.6007e-08 -0.615662 0.86603 6.9848e-08 -0.499991 0.927181 4.9062e-08 -0.374613 0.970296 3.16837e-08 -0.241921 0.994522 1.369e-08 -0.10453 0.999391 -4.57072e-09 0.0348997 0.984809 -2.27417e-08 0.173644 0.951055 -4.317e-08 0.309023 0.898795 -6.12394e-08 0.438369 0.829037 -7.81186e-08 0.559194 0.743146 -9.34763e-08 0.669129 0.642786 -1.07015e-07 0.766046 0.52992 -1.18471e-07 0.848048 0.406741 -1.27621e-07 0.913544 0.275632 -1.34287e-07 0.961263 0.139177 -1.38339e-07 0.990268 0 -1.39698e-07 1 -0.139176 -1.38339e-07 0.990268 -0.275634 -1.34287e-07 0.961263 -0.406737 -1.27621e-07 0.913545 -0.52992 -1.18471e-07 0.848048 -0.642786 -1.07015e-07 0.766046 -0.743146 -9.34763e-08 0.669129 -0.829037 -7.81185e-08 0.559194 -0.898795 -6.12394e-08 0.438369 -0.951055 -4.04719e-08 0.309023 -0.984809 -2.27417e-08 0.173644 -0.999391 -4.57072e-09 0.0348997 -0.994521 1.36911e-08 -0.104539 -0.970296 3.16837e-08 -0.241921 -0.927185 5.23317e-08 -0.374605 -0.866027 6.9849e-08 -0.499998 -0.788011 8.6007e-08 -0.615662 -0.694661 1.0049e-07 -0.719337 -0.587786 1.13018e-07 -0.809016 -0.469473 1.23346e-07 -0.882947 -0.342017 1.31274e-07 -0.939694 -0.207913 1.36646e-07 -0.978147 8.17036e-09 1 1.16843e-07 -1.6543e-08 1 1.17706e-07 2.533e-08 1 1.19168e-07 -3.47722e-08 1 1.21267e-07 4.51529e-08 1 1.24058e-07 7.02151e-08 1 1.32055e-07 1.04743e-07 1 1.44165e-07 1.56654e-07 1 1.62218e-07 -2.70587e-07 1 1.41568e-07 -3.21944e-07 1 1.35303e-07 2.58638e-07 1 1.04496e-07 -6.79995e-07 1 1.17121e-07 0 1 0 1.33699e-06 1 4.66957e-08 -5.68196e-08 1 1.27619e-07 -8.59242e-08 1 1.37507e-07 -1.27782e-07 1 1.52285e-07 2.38706e-07 1 1.59353e-07 -2.06568e-07 1 1.39332e-07 4.09456e-07 1 1.09763e-07 -3.91465e-07 1 1.27197e-07 0 1 1.16557e-07 -8.17036e-09 1 1.16843e-07 1.65431e-08 1 1.17706e-07 3.47719e-08 1 1.21267e-07 5.682e-08 1 1.27619e-07 8.59242e-08 1 1.37507e-07 1.27782e-07 1 1.52285e-07 -2.38706e-07 1 1.59353e-07 2.06568e-07 1 1.39333e-07 -4.09456e-07 1 1.09763e-07 3.91468e-07 1 1.27198e-07 -1.33702e-06 1 4.66907e-08 0 1 0 -2.53298e-08 1 1.19168e-07 -4.51533e-08 1 1.24058e-07 -7.0215e-08 1 1.32055e-07 -1.04743e-07 1 1.44165e-07 -1.56652e-07 1 1.62218e-07 2.70587e-07 1 1.41568e-07 3.21949e-07 1 1.35303e-07 -2.58632e-07 1 1.04496e-07 6.79988e-07 1 1.17122e-07 -0.0304101 -0.707381 -0.706178 -0.0739668 -0.706583 -0.703754 -0.0909613 -0.707702 -0.700631 -0.1229 -0.706462 -0.696999 -0.150847 -0.707743 -0.690178 -0.20969 -0.707505 -0.674883 -0.264918 -0.707017 -0.655702 -0.218611 -0.706781 -0.672807 -0.171203 -0.706528 -0.686664 -0.267032 -0.707233 -0.654611 -0.310172 -0.706654 -0.63595 -0.322133 -0.707637 -0.628872 -0.374933 -0.70776 -0.59875 -0.425116 -0.707603 -0.564423 -0.472352 -0.707167 -0.526116 -0.508912 -0.706742 -0.491452 -0.473169 -0.707072 -0.525509 -0.353867 -0.706478 -0.612916 -0.395757 -0.706489 -0.586728 -0.435597 -0.706687 -0.557538 -0.515553 -0.707547 -0.483304 -0.542131 -0.70651 -0.454904 -0.555032 -0.707753 -0.437065 -0.59061 -0.707679 -0.387777 -0.62207 -0.707325 -0.335738 -0.646213 -0.706845 -0.287712 -0.624486 -0.70694 -0.332045 -0.572581 -0.706466 -0.416 -0.600081 -0.70661 -0.374974 -0.64853 -0.707434 -0.280973 -0.664976 -0.706559 -0.242034 -0.670007 -0.707722 -0.224099 -0.686787 -0.707729 -0.16566 -0.698753 -0.707458 -0.106052 -0.705526 -0.706966 -0.0493323 -0.700505 -0.706824 -0.0984513 -0.680337 -0.70646 -0.195081 -0.6922 -0.706548 -0.147133 -0.705446 -0.707295 -0.0456029 -0.707589 -0.706624 0 -0.706384 -0.707666 0.0151993 -0.702372 -0.707756 0.0758569 -0.693436 -0.707567 0.135999 -0.679721 -0.707101 0.194904 -0.69203 -0.706723 0.147097 -0.70602 -0.70647 0.0493669 -0.700823 -0.706502 0.0984959 -0.679595 -0.707133 0.195226 -0.664839 -0.706705 0.241984 -0.659875 -0.707586 0.252759 -0.63555 -0.707759 0.308471 -0.606767 -0.707652 0.362026 -0.57372 -0.707265 0.413063 -0.541908 -0.706802 0.454716 -0.572157 -0.706991 0.415692 -0.646532 -0.706496 0.287854 -0.624897 -0.706473 0.332263 -0.600056 -0.706639 0.374959 -0.535919 -0.707482 0.460717 -0.509059 -0.706537 0.491593 -0.49415 -0.707737 0.504901 -0.448933 -0.707712 0.54553 -0.400547 -0.707407 0.582355 -0.35365 -0.706915 0.612538 -0.395543 -0.706868 0.586416 -0.473578 -0.70646 0.525964 -0.435668 -0.70657 0.55763 -0.34903 -0.707354 0.614678 -0.310197 -0.706596 0.636003 -0.294758 -0.707691 0.6421 -0.238448 -0.707748 0.665007 -0.180454 -0.707526 0.683259 -0.122799 -0.707045 0.696425 -0.171148 -0.706761 0.686438 -0.265128 -0.706464 0.656214 -0.21869 -0.706519 0.673057 -0.121105 -0.707201 0.696563 -0.0739582 -0.706671 0.703667 -0.060744 -0.707621 0.703977 0 -0.707761 0.706452 0.0607444 -0.707621 0.703977 0.121105 -0.707201 0.696563 0.171148 -0.706761 0.686438 0.122799 -0.707045 0.696425 -0.0246988 -0.706483 0.707299 0.0246987 -0.706483 0.707299 0.0739582 -0.706671 0.703667 0.180454 -0.707526 0.683259 0.21869 -0.706519 0.673057 0.238447 -0.707748 0.665008 0.294758 -0.707691 0.642099 0.34903 -0.707354 0.614678 0.395542 -0.706868 0.586416 0.35365 -0.706915 0.612538 0.265128 -0.706464 0.656214 0.310197 -0.706596 0.636003 0.400547 -0.707408 0.582355 0.435668 -0.706571 0.55763 0.448937 -0.707712 0.545526 0.49415 -0.707737 0.504901 0.535918 -0.707482 0.460718 0.572154 -0.706991 0.415696 0.541909 -0.706802 0.454715 0.47358 -0.706461 0.525962 0.509059 -0.706538 0.491592 0.573716 -0.707265 0.41307 0.600058 -0.706639 0.374956 0.606771 -0.707652 0.36202 0.635548 -0.707759 0.308474 0.659875 -0.707586 0.252759 0.679596 -0.707133 0.195224 0.69203 -0.706723 0.147096 0.679721 -0.707101 0.194906 0.624896 -0.706474 0.332266 0.646533 -0.706496 0.287853 0.66484 -0.706705 0.241983 0.693437 -0.707567 0.135993 0.700823 -0.706503 0.0984944 0.702372 -0.707756 0.0758606 0.706384 -0.707666 0.0152049 0.705446 -0.707295 -0.0456045 0.700506 -0.706823 -0.0984498 0.705526 -0.706965 -0.049337 0.706019 -0.70647 0.0493715 0.70759 -0.706624 0 0.698753 -0.707458 -0.106052 0.692201 -0.706548 -0.147132 0.686785 -0.70773 -0.165665 0.670011 -0.707721 -0.22409 0.64853 -0.707433 -0.280975 0.624485 -0.70694 -0.332047 0.646214 -0.706845 -0.287711 0.680336 -0.70646 -0.195082 0.664977 -0.706559 -0.242033 0.622071 -0.707325 -0.335738 0.600083 -0.706609 -0.374971 0.59061 -0.707679 -0.387777 0.555031 -0.707753 -0.437066 0.515552 -0.707547 -0.483305 0.47317 -0.707072 -0.525508 0.508912 -0.706742 -0.491451 0.572578 -0.706466 -0.416004 0.542132 -0.70651 -0.454903 0.472352 -0.707167 -0.526116 0.435596 -0.706687 -0.557538 0.425116 -0.707604 -0.564423 0.374934 -0.70776 -0.598749 0.322132 -0.707637 -0.628873 0.267032 -0.707233 -0.65461 0.218611 -0.706781 -0.672807 0.264918 -0.707017 -0.655702 0.395756 -0.706489 -0.586729 0.353867 -0.706478 -0.612917 0.310173 -0.706654 -0.63595 0.209691 -0.707505 -0.674883 0.171203 -0.706528 -0.686664 0.150847 -0.707743 -0.690178 0.0909615 -0.707702 -0.700631 0.0304102 -0.707381 -0.706178 -0.0246854 -0.706891 -0.706891 0.0246853 -0.706891 -0.706891 0.122901 -0.706462 -0.696999 0.0739666 -0.706583 -0.703754 -0.043023 9.3046e-08 -0.999074 -0.128747 9.23571e-08 -0.991677 -0.128747 9.18687e-08 -0.991677 -0.213521 9.09845e-08 -0.976938 -0.296714 2.22346e-07 -0.954967 -0.213521 2.27461e-07 -0.976938 -0.213522 1.51395e-07 -0.976938 -0.128747 1.53929e-07 -0.991678 -0.128747 1.53929e-07 -0.991678 -0.043023 1.55077e-07 -0.999074 0.0430228 1.55077e-07 -0.999074 0.128747 1.53929e-07 -0.991678 0.213522 2.27461e-07 -0.976938 0.296715 2.22345e-07 -0.954966 0.377708 2.15584e-07 -0.925925 0.455906 2.07226e-07 -0.890028 0.530727 1.97334e-07 -0.847543 0.601628 1.8598e-07 -0.798777 0.668064 1.7325e-07 -0.744104 0.729555 1.59238e-07 -0.683922 0.785651 1.44046e-07 -0.61867 0.835925 1.27788e-07 -0.548844 0.880012 7.37222e-08 -0.474952 0.917584 6.17067e-08 -0.397542 0.948363 4.9234e-08 -0.317188 0.972118 2.72986e-08 -0.234493 0.988678 1.89244e-08 -0.150055 0.997917 8.44898e-09 -0.0645121 0.999768 -2.71407e-09 0.0215203 0.994218 -1.25009e-08 0.107382 0.981307 -2.2404e-08 0.192449 0.961129 -4.28563e-08 0.276099 0.933837 -5.55221e-08 0.357698 0.899631 -6.77772e-08 0.436651 0.858765 -1.19295e-07 0.512369 0.811538 -1.36043e-07 0.584299 0.758305 -1.51782e-07 0.6519 0.699457 -1.66398e-07 0.714675 0.635437 -1.79781e-07 0.772152 0.5667 -1.91835e-07 0.823924 0.493775 -2.02467e-07 0.86959 0.417196 -2.116e-07 0.908817 0.337521 -2.19168e-07 0.941318 0.255353 -1.50075e-07 0.966848 0.171292 -1.52926e-07 0.98522 0.085968 -1.54646e-07 0.996298 0 -1.55221e-07 1 -0.0859675 -1.54646e-07 0.996298 -0.171292 -1.52926e-07 0.98522 -0.255353 -2.25112e-07 0.966848 -0.337523 -2.19168e-07 0.941317 -0.417195 -2.11601e-07 0.908817 -0.493775 -2.02467e-07 0.86959 -0.5667 -1.91835e-07 0.823924 -0.635432 -1.79782e-07 0.772157 -0.699457 -1.66398e-07 0.714675 -0.758307 -1.51782e-07 0.651898 -0.811545 -1.36041e-07 0.58429 -0.858761 -7.95314e-08 0.512377 -0.899633 -6.77765e-08 0.436646 -0.933837 -5.55221e-08 0.357698 -0.961128 -3.21425e-08 0.276102 -0.981305 -2.24049e-08 0.192457 -0.994218 -1.3542e-08 0.107377 -0.999769 -2.81739e-09 0.0215122 -0.997917 8.13572e-09 -0.0645095 -0.988678 1.74687e-08 -0.150055 -0.97212 3.63969e-08 -0.234485 -0.948359 4.92359e-08 -0.3172 -0.917585 6.17063e-08 -0.39754 -0.880012 1.10583e-07 -0.474952 -0.835925 1.27788e-07 -0.548844 -0.785652 1.44045e-07 -0.618669 -0.729556 1.59238e-07 -0.683921 -0.668064 1.7325e-07 -0.744104 -0.601628 1.8598e-07 -0.798777 -0.530726 1.97334e-07 -0.847543 -0.455907 2.07226e-07 -0.890027 -0.377708 2.15584e-07 -0.925925 -0.296714 2.20209e-07 -0.954967 -0.296714 8.89382e-08 -0.954967 -0.213521 9.09845e-08 -0.976938 -0.296714 8.87718e-08 -0.954967 -0.377708 8.62334e-08 -0.925925 -0.377708 2.15584e-07 -0.925925 -0.377708 8.67072e-08 -0.925925 -0.455907 8.28902e-08 -0.890027 -0.455907 2.07226e-07 -0.890027 -0.455907 8.25788e-08 -0.890027 -0.530726 7.89336e-08 -0.847543 -0.530726 1.97334e-07 -0.847543 -0.530726 7.84493e-08 -0.847543 -0.601628 7.43919e-08 -0.798777 -0.601628 1.8598e-07 -0.798777 -0.601628 7.43208e-08 -0.798777 -0.668064 6.93001e-08 -0.744104 -0.668064 1.7325e-07 -0.744104 -0.668064 7.01917e-08 -0.744104 -0.729556 6.36951e-08 -0.683921 -0.729556 1.59238e-07 -0.683921 -0.729556 6.1934e-08 -0.683921 -0.785652 5.7618e-08 -0.618669 -0.785652 1.44045e-07 -0.618669 -0.785652 5.57405e-08 -0.618669 -0.835925 5.1115e-08 -0.548844 -0.835925 1.27788e-07 -0.548844 -0.835925 5.16115e-08 -0.548844 -0.880012 4.42333e-08 -0.474951 -0.880012 7.37222e-08 -0.474951 -0.880012 5.57406e-08 -0.474952 -0.917585 4.62797e-08 -0.39754 -0.917585 6.17063e-08 -0.39754 -0.917585 4.54182e-08 -0.39754 -0.948359 3.69269e-08 -0.3172 -0.948359 4.92359e-08 -0.3172 -0.948359 3.71603e-08 -0.3172 -0.97212 2.72977e-08 -0.234485 -0.97212 2.72977e-08 -0.234485 -0.97212 3.30314e-08 -0.234485 -0.988678 2.09624e-08 -0.150055 -0.988678 1.89244e-08 -0.150055 -0.988678 1.96124e-08 -0.150055 -0.997917 8.63637e-09 -0.0645095 -0.997917 8.44863e-09 -0.0645095 -0.997917 8.25785e-09 -0.0645095 -0.999769 -2.84869e-09 0.0215122 -0.999769 -2.71304e-09 0.0215122 -0.999769 -2.70961e-09 0.0215122 -0.994218 -1.43754e-08 0.107377 -0.994218 -1.25003e-08 0.107377 -0.994218 -1.49674e-08 0.107377 -0.981305 -2.68859e-08 0.192457 -0.981305 -2.24049e-08 0.192457 -0.981305 -2.6838e-08 0.192457 -0.961128 -3.8571e-08 0.276102 -0.961128 -4.28567e-08 0.276102 -0.961128 -3.30313e-08 0.276102 -0.933837 -4.16416e-08 0.357698 -0.933837 -5.55221e-08 0.357698 -0.933837 -4.12894e-08 0.357698 -0.899633 -5.08323e-08 0.436646 -0.899633 -6.77765e-08 0.436646 -0.899633 -5.16117e-08 0.436646 -0.85876 -5.96486e-08 0.512377 -0.85876 -1.19297e-07 0.512378 -0.85876 -4.74825e-08 0.512377 -0.811545 -5.44163e-08 0.58429 -0.811545 -1.36041e-07 0.58429 -0.811545 -5.57408e-08 0.58429 -0.758307 -6.07127e-08 0.651898 -0.758307 -1.51782e-07 0.651898 -0.758307 -5.98695e-08 0.651898 -0.699457 -6.65593e-08 0.714675 -0.699457 -1.66398e-07 0.714675 -0.699457 -6.60628e-08 0.714675 -0.635432 -7.19127e-08 0.772157 -0.635432 -1.79782e-07 0.772157 -0.635432 -7.22559e-08 0.772157 -0.5667 -7.67339e-08 0.823924 -0.5667 -1.91835e-07 0.823924 -0.5667 -7.63851e-08 0.823924 -0.493775 -8.09868e-08 0.86959 -0.493775 -2.02467e-07 0.86959 -0.493775 -8.05141e-08 0.86959 -0.417195 -8.46402e-08 0.908817 -0.417195 -2.11601e-07 0.908817 -0.417195 -8.46432e-08 0.908817 -0.337523 -8.7667e-08 0.941317 -0.337523 -2.19168e-07 0.941317 -0.337523 -8.87718e-08 0.941317 -0.255353 -9.00447e-08 0.966848 -0.255353 -1.50075e-07 0.966848 -0.255353 -8.98043e-08 0.966848 -0.171292 -9.17558e-08 0.98522 -0.171292 -1.52926e-07 0.98522 -0.171292 -9.18682e-08 0.98522 -0.0859675 -9.27875e-08 0.996298 -0.0859675 -1.54646e-07 0.996298 -0.0859675 -9.23848e-08 0.996298 0 -9.31322e-08 1 0 -1.55221e-07 1 0 -9.31322e-08 1 0.085968 -9.27875e-08 0.996298 0.085968 -1.54646e-07 0.996298 0.085968 -9.29014e-08 0.996298 0.171292 -9.17558e-08 0.98522 0.171292 -1.52926e-07 0.98522 0.171292 -9.08359e-08 0.98522 0.255353 -9.00447e-08 0.966848 0.255353 -2.25112e-07 0.966848 0.255353 -8.98043e-08 0.966848 0.337521 -8.76671e-08 0.941318 0.337521 -2.19168e-07 0.941318 0.337521 -8.67069e-08 0.941318 0.417196 -8.46401e-08 0.908817 0.417196 -2.116e-07 0.908817 0.417196 -8.46434e-08 0.908817 0.493775 -8.09868e-08 0.86959 0.493775 -2.02467e-07 0.86959 0.493775 -8.05141e-08 0.86959 0.5667 -7.67339e-08 0.823924 0.5667 -1.91835e-07 0.823924 0.5667 -7.63851e-08 0.823924 0.635437 -7.19123e-08 0.772153 0.635437 -1.79781e-07 0.772152 0.635437 -7.22565e-08 0.772153 0.699457 -6.65593e-08 0.714675 0.699457 -1.66398e-07 0.714675 0.699457 -6.81273e-08 0.714675 0.758305 -6.07129e-08 0.6519 0.758305 -1.51782e-07 0.6519 0.758305 -5.98694e-08 0.6519 0.811538 -5.44171e-08 0.584299 0.811538 -1.36043e-07 0.584299 0.811538 -5.57404e-08 0.584299 0.858765 -4.77181e-08 0.512369 0.858765 -7.95302e-08 0.512369 0.858765 -5.98695e-08 0.512369 0.899631 -5.08328e-08 0.436651 0.899631 -6.77772e-08 0.436651 0.899631 -4.95471e-08 0.436651 0.933837 -4.16416e-08 0.357698 0.933837 -5.55221e-08 0.357698 0.933837 -4.33538e-08 0.357698 0.961129 -3.21422e-08 0.276099 0.961129 -3.21422e-08 0.276099 0.961129 -3.92247e-08 0.276099 0.981307 -2.68848e-08 0.192449 0.981307 -2.2404e-08 0.192449 0.981307 -2.68381e-08 0.192449 0.994218 -1.50011e-08 0.107382 0.994218 -1.35427e-08 0.107382 0.994218 -1.44512e-08 0.107382 0.999768 -2.88109e-09 0.0215203 0.999768 -2.81846e-09 0.0215203 0.999768 -2.96767e-09 0.0215203 0.997917 8.54285e-09 -0.0645121 0.997917 8.13606e-09 -0.0645121 0.997917 8.77397e-09 -0.0645121 0.988678 2.0089e-08 -0.150055 0.988678 1.74687e-08 -0.150055 0.988678 2.16769e-08 -0.150055 0.972118 3.27583e-08 -0.234493 0.972118 3.63981e-08 -0.234493 0.972118 2.6838e-08 -0.234493 0.948363 3.69255e-08 -0.317188 0.948363 4.9234e-08 -0.317188 0.948363 3.71604e-08 -0.317188 0.917584 4.628e-08 -0.397542 0.917584 6.17067e-08 -0.397542 0.917584 4.74826e-08 -0.397542 0.880012 5.52916e-08 -0.474951 0.880012 1.10583e-07 -0.474951 0.880012 4.33538e-08 -0.474952 0.835925 5.1115e-08 -0.548844 0.835925 1.27788e-07 -0.548844 0.835925 5.16115e-08 -0.548844 0.785651 5.76182e-08 -0.61867 0.785651 1.44046e-07 -0.61867 0.785651 5.78049e-08 -0.61867 0.729555 6.36952e-08 -0.683922 0.729555 1.59238e-07 -0.683922 0.729555 6.39984e-08 -0.683922 0.668064 6.93001e-08 -0.744104 0.668064 1.7325e-07 -0.744104 0.668064 6.81272e-08 -0.744104 0.601628 7.43919e-08 -0.798777 0.601628 1.8598e-07 -0.798777 0.601628 7.43208e-08 -0.798777 0.530727 7.89336e-08 -0.847543 0.530727 1.97334e-07 -0.847543 0.530727 7.84494e-08 -0.847543 0.455906 8.28903e-08 -0.890028 0.455906 2.07226e-07 -0.890028 0.455906 8.25786e-08 -0.890028 0.377708 8.62334e-08 -0.925925 0.377708 2.15584e-07 -0.925925 0.377708 8.46428e-08 -0.925925 0.296715 8.89381e-08 -0.954966 0.296715 2.22345e-07 -0.954966 0.296715 8.87723e-08 -0.954966 0.213521 9.09845e-08 -0.976938 0.213521 1.51641e-07 -0.976938 0.213521 9.08368e-08 -0.976938 0.128747 9.23571e-08 -0.991677 0.128747 1.53929e-07 -0.991678 0.128747 9.2901e-08 -0.991677 0.0430228 9.3046e-08 -0.999074 0.0430228 1.55077e-07 -0.999074 0.0430228 9.31584e-08 -0.999074 -0.043023 9.3046e-08 -0.999074 -0.043023 1.55077e-07 -0.999074 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 1.52053e-07 0 1 1.52053e-07 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1.21556e-08 1 1.73835e-07 6.47159e-08 1 1.77807e-07 3.73018e-08 1 1.7549e-07 -1.20768e-08 1 1.72708e-07 -3.65691e-08 1 1.72045e-07 -6.25435e-08 1 1.71837e-07 -9.15236e-08 1 1.7213e-07 -1.25727e-07 1 1.73049e-07 -1.68852e-07 1 1.74852e-07 -2.27934e-07 1 1.78082e-07 -3.18696e-07 1 1.83996e-07 -4.85627e-07 1 1.9621e-07 0 1 1.58032e-07 -2.65633e-07 1 6.62294e-08 -3.52577e-07 1 3.70577e-08 4.20413e-07 1 1.46812e-08 3.03337e-07 1 5.34851e-08 2.35569e-07 1 7.65427e-08 0 1 1.59363e-07 3.87257e-07 1 1.88876e-07 2.67705e-07 1 1.8057e-07 1.95731e-07 1 1.76236e-07 1.45848e-07 1 1.73816e-07 1.0779e-07 1 1.72499e-07 7.65426e-08 1 1.71915e-07 4.9285e-08 1 1.71883e-07 2.42188e-08 1 1.7232e-07 0 1 1.7321e-07 -2.45376e-08 1 1.7459e-07 -5.06255e-08 1 1.76554e-07 -7.9822e-08 1 1.79283e-07 -1.14411e-07 1 1.83095e-07 -1.58238e-07 1 1.88582e-07 -2.18713e-07 1 1.9693e-07 -5.95388e-08 1 1.73292e-07 -2.20764e-07 1 1.48908e-07 -2.86245e-07 1 1.3961e-07 -3.8769e-07 1 1.25971e-07 -5.76569e-07 1 1.01663e-07 -1.08706e-06 1 3.79612e-08 4.49819e-09 1 1.71936e-07 7.54408e-07 1 7.92993e-08 4.64932e-07 1 1.1592e-07 3.30587e-07 1 1.33565e-07 2.50489e-07 1 1.44619e-07 1.95423e-07 1 1.52682e-07 1.85688e-07 1 1.92283e-07 1.34829e-07 1 1.85575e-07 9.62531e-08 1 1.81025e-07 -1.98949e-09 1 1.52053e-07 5.87697e-08 1 1.74061e-07 - - - - - - - - - - - - - - -

210 0 0 0 1 0 128 1 210 1 1 1 128 2 2 2 210 2 128 3 129 3 2 3 2 4 129 4 3 4 3 5 129 5 133 5 4 6 133 6 5 6 217 7 5 7 132 7 233 8 132 8 7 8 6 9 7 9 8 9 9 10 8 10 10 10 234 11 10 11 137 11 219 12 137 12 125 12 220 13 125 13 138 13 124 14 138 14 140 14 11 15 140 15 141 15 221 16 141 16 12 16 222 17 12 17 143 17 13 18 143 18 145 18 123 19 145 19 146 19 121 20 146 20 122 20 120 21 122 21 148 21 119 22 148 22 14 22 118 23 14 23 149 23 15 24 149 24 150 24 16 25 150 25 17 25 18 26 17 26 19 26 117 27 19 27 151 27 116 28 151 28 152 28 115 29 152 29 153 29 240 30 153 30 20 30 241 31 20 31 114 31 237 32 114 32 154 32 21 33 154 33 22 33 243 34 22 34 23 34 113 35 23 35 24 35 112 36 24 36 25 36 111 37 25 37 26 37 246 38 26 38 110 38 247 39 110 39 27 39 252 40 27 40 109 40 108 41 109 41 160 41 107 42 160 42 161 42 106 43 161 43 28 43 105 44 28 44 162 44 254 45 162 45 30 45 29 46 30 46 104 46 103 47 104 47 31 47 259 48 31 48 167 48 260 49 167 49 32 49 261 50 32 50 168 50 263 51 168 51 265 51 263 52 261 52 168 52 33 53 126 53 1204 53 33 54 463 54 126 54 126 55 463 55 699 55 34 56 126 56 699 56 34 57 35 57 126 57 126 58 35 58 81 58 81 59 35 59 36 59 474 60 81 60 36 60 474 61 478 61 81 61 81 62 478 62 481 62 127 63 481 63 484 63 37 64 127 64 484 64 37 65 38 65 127 65 127 66 38 66 39 66 39 67 38 67 40 67 82 68 40 68 41 68 698 69 82 69 41 69 698 70 209 70 82 70 698 71 498 71 209 71 209 72 498 72 42 72 45 73 42 73 43 73 501 74 45 74 43 74 501 75 44 75 45 75 501 76 46 76 44 76 44 77 46 77 48 77 47 78 44 78 48 78 47 79 693 79 44 79 44 80 693 80 53 80 53 81 693 81 49 81 690 82 53 82 49 82 690 83 50 83 53 83 53 84 50 84 689 84 51 85 53 85 689 85 51 86 52 86 53 86 53 87 52 87 684 87 54 88 684 88 55 88 56 89 54 89 55 89 56 90 57 90 54 90 56 91 317 91 57 91 57 92 317 92 206 92 206 93 317 93 58 93 205 94 58 94 311 94 204 95 311 95 59 95 202 96 59 96 60 96 83 97 60 97 305 97 201 98 305 98 303 98 84 99 303 99 308 99 85 100 308 100 86 100 197 101 86 101 302 101 198 102 302 102 87 102 196 103 87 103 299 103 88 104 299 104 89 104 90 105 89 105 301 105 195 106 301 106 297 106 194 107 297 107 61 107 193 108 61 108 295 108 91 109 295 109 63 109 62 110 63 110 293 110 191 111 293 111 92 111 93 112 92 112 64 112 190 113 64 113 290 113 189 114 290 114 283 114 187 115 283 115 282 115 94 116 282 116 65 116 186 117 65 117 66 117 185 118 66 118 287 118 67 119 287 119 95 119 68 120 95 120 281 120 69 121 281 121 71 121 70 122 71 122 72 122 181 123 72 123 73 123 180 124 73 124 74 124 179 125 74 125 280 125 96 126 280 126 75 126 76 127 75 127 275 127 177 128 275 128 274 128 77 129 274 129 279 129 97 130 279 130 78 130 175 131 78 131 98 131 174 132 98 132 273 132 172 133 273 133 79 133 99 134 79 134 100 134 101 135 100 135 269 135 102 136 269 136 80 136 170 137 80 137 265 137 168 138 170 138 265 138 81 139 481 139 127 139 39 140 40 140 82 140 209 141 42 141 45 141 53 142 684 142 54 142 206 143 58 143 205 143 205 144 311 144 204 144 204 145 59 145 202 145 202 146 60 146 83 146 83 147 305 147 201 147 201 148 303 148 84 148 84 149 308 149 85 149 85 150 86 150 197 150 197 151 302 151 198 151 198 152 87 152 196 152 196 153 299 153 88 153 88 154 89 154 90 154 90 155 301 155 195 155 195 156 297 156 194 156 194 157 61 157 193 157 193 158 295 158 91 158 91 159 63 159 62 159 62 160 293 160 191 160 191 161 92 161 93 161 93 162 64 162 190 162 190 163 290 163 189 163 189 164 283 164 187 164 187 165 282 165 94 165 94 166 65 166 186 166 186 167 66 167 185 167 185 168 287 168 67 168 67 169 95 169 68 169 68 170 281 170 69 170 69 171 71 171 70 171 70 172 72 172 181 172 181 173 73 173 180 173 180 174 74 174 179 174 179 175 280 175 96 175 96 176 75 176 76 176 76 177 275 177 177 177 177 178 274 178 77 178 77 179 279 179 97 179 97 180 78 180 175 180 175 181 98 181 174 181 174 182 273 182 172 182 172 183 79 183 99 183 99 184 100 184 101 184 101 185 269 185 102 185 102 186 80 186 170 186 261 187 260 187 32 187 260 188 259 188 167 188 259 189 103 189 31 189 103 190 29 190 104 190 29 191 254 191 30 191 254 192 105 192 162 192 105 193 106 193 28 193 106 194 107 194 161 194 107 195 108 195 160 195 108 196 252 196 109 196 252 197 247 197 27 197 247 198 246 198 110 198 246 199 111 199 26 199 111 200 112 200 25 200 112 201 113 201 24 201 113 202 243 202 23 202 243 203 21 203 22 203 21 204 237 204 154 204 237 205 241 205 114 205 241 206 240 206 20 206 240 207 115 207 153 207 115 208 116 208 152 208 116 209 117 209 151 209 117 210 18 210 19 210 18 211 16 211 17 211 16 212 15 212 150 212 15 213 118 213 149 213 118 214 119 214 14 214 119 215 120 215 148 215 120 216 121 216 122 216 121 217 123 217 146 217 123 218 13 218 145 218 13 219 222 219 143 219 222 220 221 220 12 220 221 221 11 221 141 221 11 222 124 222 140 222 124 223 220 223 138 223 220 224 219 224 125 224 219 225 234 225 137 225 234 226 9 226 10 226 9 227 6 227 8 227 6 228 233 228 7 228 233 229 217 229 132 229 217 230 4 230 5 230 4 231 3 231 133 231 126 232 1 232 1204 232 1204 233 1 233 0 233 1166 234 39 234 1167 234 1166 235 127 235 39 235 1166 236 1163 236 127 236 127 237 1163 237 81 237 81 238 1163 238 126 238 126 239 1163 239 130 239 1 240 130 240 1161 240 128 241 1161 241 129 241 128 242 1 242 1161 242 126 243 130 243 1 243 1161 244 1160 244 129 244 129 245 1160 245 133 245 133 246 1160 246 131 246 5 247 131 247 132 247 5 248 133 248 131 248 131 249 134 249 132 249 132 250 134 250 7 250 7 251 134 251 135 251 8 252 135 252 10 252 8 253 7 253 135 253 135 254 1155 254 10 254 10 255 1155 255 137 255 137 256 1155 256 136 256 125 257 136 257 138 257 125 258 137 258 136 258 136 259 139 259 138 259 138 260 139 260 140 260 140 261 139 261 142 261 141 262 142 262 12 262 141 263 140 263 142 263 142 264 144 264 12 264 12 265 144 265 143 265 143 266 144 266 145 266 145 267 144 267 1152 267 146 268 1152 268 147 268 122 269 147 269 148 269 122 270 146 270 147 270 145 271 1152 271 146 271 147 272 1149 272 148 272 148 273 1149 273 14 273 14 274 1149 274 1147 274 149 275 1147 275 150 275 149 276 14 276 1147 276 1146 277 151 277 1147 277 1146 278 152 278 151 278 1146 279 153 279 152 279 1146 280 20 280 153 280 1146 281 114 281 20 281 1146 282 154 282 114 282 1146 283 22 283 154 283 1146 284 23 284 22 284 1146 285 24 285 23 285 1146 286 25 286 24 286 1146 287 26 287 25 287 1146 288 110 288 26 288 1146 289 27 289 110 289 1146 290 109 290 27 290 1146 291 160 291 109 291 1146 292 1145 292 160 292 160 293 1145 293 1143 293 1141 294 160 294 1143 294 1141 295 1139 295 160 295 160 296 1139 296 155 296 156 297 160 297 155 297 156 298 157 298 160 298 160 299 157 299 158 299 159 300 160 300 158 300 159 301 1129 301 160 301 160 302 1129 302 163 302 161 303 163 303 164 303 28 304 164 304 1124 304 162 305 1124 305 30 305 162 306 28 306 1124 306 160 307 163 307 161 307 161 308 164 308 28 308 1124 309 165 309 30 309 30 310 165 310 104 310 104 311 165 311 166 311 31 312 166 312 167 312 31 313 104 313 166 313 166 314 1119 314 167 314 167 315 1119 315 32 315 32 316 1119 316 169 316 168 317 169 317 170 317 168 318 32 318 169 318 169 319 171 319 170 319 170 320 171 320 102 320 102 321 171 321 101 321 101 322 171 322 173 322 99 323 173 323 1116 323 172 324 1116 324 174 324 172 325 99 325 1116 325 101 326 173 326 99 326 1116 327 1115 327 174 327 174 328 1115 328 175 328 175 329 1115 329 1114 329 97 330 1114 330 77 330 97 331 175 331 1114 331 1114 332 176 332 77 332 77 333 176 333 177 333 177 334 176 334 178 334 76 335 178 335 96 335 76 336 177 336 178 336 178 337 1107 337 96 337 96 338 1107 338 179 338 179 339 1107 339 1105 339 180 340 1105 340 181 340 180 341 179 341 1105 341 1105 342 182 342 181 342 181 343 182 343 70 343 70 344 182 344 1103 344 69 345 1103 345 68 345 69 346 70 346 1103 346 1103 347 183 347 68 347 68 348 183 348 67 348 67 349 183 349 184 349 185 350 184 350 186 350 185 351 67 351 184 351 184 352 1098 352 186 352 186 353 1098 353 94 353 94 354 1098 354 188 354 187 355 188 355 189 355 187 356 94 356 188 356 188 357 1097 357 189 357 189 358 1097 358 190 358 190 359 1097 359 1096 359 93 360 1096 360 191 360 93 361 190 361 1096 361 1096 362 192 362 191 362 191 363 192 363 62 363 62 364 192 364 1092 364 91 365 1092 365 193 365 91 366 62 366 1092 366 1092 367 1091 367 193 367 193 368 1091 368 194 368 194 369 1091 369 1089 369 195 370 1089 370 90 370 195 371 194 371 1089 371 1089 372 1088 372 90 372 90 373 1088 373 88 373 88 374 1088 374 196 374 196 375 1088 375 1086 375 198 376 1086 376 199 376 197 377 199 377 85 377 197 378 198 378 199 378 196 379 1086 379 198 379 199 380 200 380 85 380 85 381 200 381 84 381 84 382 200 382 1082 382 201 383 1082 383 83 383 201 384 84 384 1082 384 1082 385 203 385 83 385 83 386 203 386 202 386 202 387 203 387 1080 387 204 388 1080 388 205 388 204 389 202 389 1080 389 1080 390 1079 390 205 390 205 391 1079 391 206 391 206 392 1079 392 1076 392 57 393 1076 393 54 393 57 394 206 394 1076 394 1076 395 207 395 54 395 54 396 207 396 53 396 53 397 207 397 208 397 44 398 208 398 45 398 44 399 53 399 208 399 208 400 1029 400 45 400 45 401 1029 401 209 401 209 402 1029 402 1167 402 82 403 1167 403 39 403 82 404 209 404 1167 404 151 405 19 405 1147 405 1147 406 19 406 17 406 150 407 1147 407 17 407 0 408 210 408 1203 408 1203 409 210 409 214 409 214 410 210 410 2 410 1205 411 2 411 3 411 319 412 3 412 4 412 1207 413 4 413 215 413 211 414 215 414 1375 414 1208 415 1375 415 212 415 1256 416 212 416 213 416 1256 417 1208 417 212 417 214 418 2 418 1205 418 1205 419 3 419 319 419 4 420 217 420 215 420 215 421 217 421 216 421 216 422 217 422 1390 422 1390 423 217 423 233 423 2025 424 233 424 6 424 218 425 6 425 9 425 1412 426 9 426 234 426 223 427 234 427 219 427 220 428 223 428 219 428 220 429 124 429 223 429 223 430 124 430 11 430 221 431 223 431 11 431 221 432 222 432 223 432 223 433 222 433 13 433 123 434 223 434 13 434 123 435 121 435 223 435 223 436 121 436 120 436 119 437 223 437 120 437 119 438 118 438 223 438 223 439 118 439 1427 439 1427 440 118 440 1432 440 1432 441 118 441 224 441 224 442 118 442 225 442 225 443 118 443 1452 443 1452 444 118 444 226 444 226 445 118 445 1459 445 1459 446 118 446 1468 446 1468 447 118 447 227 447 227 448 118 448 228 448 228 449 118 449 229 449 229 450 118 450 1494 450 1494 451 118 451 230 451 230 452 118 452 15 452 231 453 15 453 16 453 235 454 16 454 18 454 232 455 18 455 1518 455 232 456 235 456 18 456 1390 457 233 457 2025 457 2025 458 6 458 218 458 218 459 9 459 1412 459 1412 460 234 460 223 460 230 461 15 461 231 461 231 462 16 462 235 462 18 463 117 463 1518 463 1518 464 117 464 238 464 238 465 117 465 116 465 1534 466 116 466 115 466 239 467 115 467 240 467 236 468 240 468 241 468 2019 469 241 469 237 469 242 470 237 470 21 470 1551 471 21 471 243 471 1573 472 243 472 113 472 244 473 113 473 112 473 1578 474 112 474 111 474 1588 475 111 475 245 475 1588 476 1578 476 111 476 238 477 116 477 1534 477 1534 478 115 478 239 478 239 479 240 479 236 479 236 480 241 480 2019 480 2019 481 237 481 242 481 242 482 21 482 1551 482 1551 483 243 483 1573 483 1573 484 113 484 244 484 244 485 112 485 1578 485 111 486 246 486 245 486 245 487 246 487 1601 487 1601 488 246 488 247 488 251 489 247 489 252 489 1608 490 252 490 108 490 248 491 108 491 107 491 250 492 107 492 106 492 249 493 106 493 253 493 249 494 250 494 106 494 1601 495 247 495 251 495 251 496 252 496 1608 496 1608 497 108 497 248 497 248 498 107 498 250 498 106 499 105 499 253 499 253 500 105 500 1638 500 1638 501 105 501 254 501 256 502 254 502 29 502 257 503 29 503 103 503 255 504 103 504 258 504 255 505 257 505 103 505 1638 506 254 506 256 506 256 507 29 507 257 507 103 508 259 508 258 508 258 509 259 509 262 509 262 510 259 510 260 510 1674 511 260 511 261 511 1680 512 261 512 263 512 1689 513 263 513 264 513 1689 514 1680 514 263 514 262 515 260 515 1674 515 1674 516 261 516 1680 516 263 517 265 517 264 517 264 518 265 518 1692 518 1692 519 265 519 80 519 266 520 80 520 269 520 268 521 269 521 267 521 268 522 266 522 269 522 1692 523 80 523 266 523 269 524 100 524 267 524 267 525 100 525 270 525 270 526 100 526 79 526 1718 527 79 527 273 527 271 528 273 528 98 528 1730 529 98 529 272 529 1730 530 271 530 98 530 270 531 79 531 1718 531 1718 532 273 532 271 532 98 533 78 533 272 533 272 534 78 534 1743 534 1743 535 78 535 279 535 1754 536 279 536 274 536 1756 537 274 537 275 537 276 538 275 538 75 538 1767 539 75 539 280 539 277 540 280 540 278 540 277 541 1767 541 280 541 1743 542 279 542 1754 542 1754 543 274 543 1756 543 1756 544 275 544 276 544 276 545 75 545 1767 545 280 546 74 546 278 546 278 547 74 547 1782 547 1782 548 74 548 73 548 1789 549 73 549 72 549 1794 550 72 550 71 550 286 551 71 551 281 551 1807 552 281 552 95 552 2005 553 95 553 287 553 1823 554 287 554 66 554 288 555 66 555 65 555 1825 556 65 556 282 556 285 557 282 557 283 557 284 558 283 558 289 558 284 559 285 559 283 559 1782 560 73 560 1789 560 1789 561 72 561 1794 561 1794 562 71 562 286 562 286 563 281 563 1807 563 1807 564 95 564 2005 564 2005 565 287 565 1823 565 1823 566 66 566 288 566 288 567 65 567 1825 567 1825 568 282 568 285 568 283 569 290 569 289 569 289 570 290 570 1862 570 1862 571 290 571 64 571 291 572 64 572 92 572 1870 573 92 573 293 573 292 574 293 574 63 574 294 575 63 575 295 575 1893 576 295 576 61 576 300 577 61 577 297 577 296 578 297 578 301 578 298 579 301 579 89 579 1919 580 89 580 299 580 1932 581 299 581 87 581 1933 582 87 582 302 582 1938 583 302 583 1952 583 1938 584 1933 584 302 584 1862 585 64 585 291 585 291 586 92 586 1870 586 1870 587 293 587 292 587 292 588 63 588 294 588 294 589 295 589 1893 589 1893 590 61 590 300 590 300 591 297 591 296 591 296 592 301 592 298 592 298 593 89 593 1919 593 1919 594 299 594 1932 594 1932 595 87 595 1933 595 302 596 86 596 1952 596 1952 597 86 597 1953 597 1953 598 86 598 308 598 309 599 308 599 303 599 1965 600 303 600 305 600 304 601 305 601 60 601 306 602 60 602 59 602 307 603 59 603 310 603 307 604 306 604 59 604 1953 605 308 605 309 605 309 606 303 606 1965 606 1965 607 305 607 304 607 304 608 60 608 306 608 59 609 311 609 310 609 310 610 311 610 315 610 312 611 310 611 315 611 312 612 1990 612 310 612 312 613 1249 613 1990 613 1990 614 1249 614 1993 614 1993 615 1249 615 314 615 313 616 1993 616 314 616 315 617 311 617 1289 617 1289 618 311 618 58 618 1251 619 58 619 317 619 318 620 317 620 56 620 316 621 56 621 55 621 316 622 318 622 56 622 1289 623 58 623 1251 623 1251 624 317 624 318 624 1207 625 215 625 211 625 211 626 1375 626 1208 626 1207 627 319 627 4 627 1352 628 682 628 320 628 1352 629 321 629 682 629 1352 630 331 630 321 630 1352 631 1290 631 331 631 331 632 1290 632 322 632 752 633 322 633 343 633 753 634 343 634 323 634 755 635 323 635 716 635 329 636 716 636 718 636 757 637 718 637 324 637 756 638 324 638 325 638 758 639 325 639 720 639 327 640 720 640 326 640 1273 641 326 641 1230 641 1273 642 327 642 326 642 1273 643 328 643 327 643 327 644 328 644 656 644 758 645 656 645 665 645 756 646 665 646 664 646 757 647 664 647 662 647 329 648 662 648 754 648 755 649 754 649 677 649 753 650 677 650 330 650 752 651 330 651 680 651 331 652 680 652 321 652 331 653 752 653 680 653 331 654 322 654 752 654 1290 655 332 655 322 655 322 656 332 656 761 656 344 657 761 657 760 657 767 658 760 658 775 658 766 659 775 659 348 659 341 660 348 660 349 660 338 661 349 661 784 661 785 662 784 662 333 662 795 663 333 663 334 663 336 664 334 664 351 664 1265 665 351 665 335 665 1265 666 336 666 351 666 1265 667 709 667 336 667 336 668 709 668 337 668 795 669 337 669 794 669 785 670 794 670 339 670 338 671 339 671 340 671 341 672 340 672 776 672 766 673 776 673 342 673 767 674 342 674 759 674 344 675 759 675 343 675 322 676 344 676 343 676 322 677 761 677 344 677 332 678 345 678 761 678 761 679 345 679 346 679 760 680 346 680 347 680 775 681 347 681 774 681 348 682 774 682 783 682 349 683 783 683 790 683 784 684 790 684 792 684 333 685 792 685 350 685 334 686 350 686 803 686 351 687 803 687 802 687 335 688 802 688 356 688 335 689 351 689 802 689 345 690 1291 690 346 690 346 691 1291 691 352 691 347 692 352 692 353 692 774 693 353 693 781 693 783 694 781 694 354 694 790 695 354 695 791 695 792 696 791 696 793 696 350 697 793 697 360 697 803 698 360 698 355 698 802 699 355 699 809 699 356 700 809 700 361 700 356 701 802 701 809 701 1291 702 357 702 352 702 352 703 357 703 780 703 353 704 780 704 779 704 781 705 779 705 358 705 354 706 358 706 359 706 791 707 359 707 789 707 793 708 789 708 365 708 360 709 365 709 807 709 355 710 807 710 810 710 809 711 810 711 367 711 361 712 367 712 366 712 361 713 809 713 367 713 357 714 368 714 780 714 780 715 368 715 778 715 779 716 778 716 362 716 358 717 362 717 363 717 359 718 363 718 364 718 789 719 364 719 798 719 365 720 798 720 369 720 807 721 369 721 808 721 810 722 808 722 813 722 367 723 813 723 818 723 366 724 818 724 1263 724 366 725 367 725 818 725 368 726 1294 726 778 726 778 727 1294 727 370 727 362 728 370 728 788 728 363 729 788 729 797 729 364 730 797 730 372 730 798 731 372 731 800 731 369 732 800 732 373 732 808 733 373 733 816 733 813 734 816 734 819 734 818 735 819 735 375 735 1263 736 375 736 1223 736 1263 737 818 737 375 737 370 738 1294 738 371 738 788 739 371 739 796 739 797 740 796 740 799 740 372 741 799 741 801 741 800 742 801 742 806 742 373 743 806 743 374 743 816 744 374 744 817 744 819 745 817 745 706 745 375 746 706 746 839 746 1223 747 839 747 1261 747 1223 748 375 748 839 748 1295 749 388 749 708 749 1295 750 387 750 388 750 1295 751 376 751 387 751 387 752 376 752 389 752 804 753 389 753 391 753 385 754 391 754 815 754 384 755 815 755 377 755 823 756 377 756 824 756 822 757 824 757 393 757 378 758 393 758 835 758 837 759 835 759 838 759 840 760 838 760 379 760 380 761 379 761 1259 761 380 762 840 762 379 762 380 763 381 763 840 763 840 764 381 764 382 764 837 765 382 765 827 765 378 766 827 766 825 766 822 767 825 767 383 767 823 768 383 768 707 768 384 769 707 769 811 769 385 770 811 770 805 770 804 771 805 771 386 771 387 772 386 772 388 772 387 773 804 773 386 773 387 774 389 774 804 774 376 775 396 775 389 775 389 776 396 776 390 776 391 777 390 777 812 777 815 778 812 778 814 778 377 779 814 779 821 779 824 780 821 780 392 780 393 781 392 781 834 781 835 782 834 782 836 782 838 783 836 783 394 783 379 784 394 784 395 784 1259 785 395 785 1218 785 1259 786 379 786 395 786 396 787 1299 787 390 787 390 788 1299 788 397 788 812 789 397 789 400 789 814 790 400 790 398 790 821 791 398 791 826 791 392 792 826 792 399 792 834 793 399 793 842 793 836 794 842 794 841 794 394 795 841 795 846 795 395 796 846 796 404 796 1218 797 404 797 1212 797 1218 798 395 798 404 798 1299 799 405 799 397 799 397 800 405 800 401 800 400 801 401 801 402 801 398 802 402 802 406 802 826 803 406 803 832 803 399 804 832 804 403 804 842 805 403 805 843 805 841 806 843 806 851 806 846 807 851 807 855 807 404 808 855 808 409 808 1212 809 409 809 1257 809 1212 810 404 810 409 810 405 811 1303 811 401 811 401 812 1303 812 820 812 402 813 820 813 829 813 406 814 829 814 830 814 832 815 830 815 833 815 403 816 833 816 411 816 843 817 411 817 412 817 851 818 412 818 407 818 855 819 407 819 408 819 409 820 408 820 416 820 1257 821 416 821 415 821 1257 822 409 822 416 822 820 823 1303 823 704 823 829 824 704 824 703 824 830 825 703 825 831 825 833 826 831 826 410 826 411 827 410 827 413 827 412 828 413 828 702 828 407 829 702 829 701 829 408 830 701 830 414 830 416 831 414 831 870 831 415 832 870 832 1255 832 415 833 416 833 870 833 1302 834 705 834 417 834 1302 835 428 835 705 835 1302 836 429 836 428 836 428 837 429 837 418 837 426 838 418 838 845 838 848 839 845 839 854 839 425 840 854 840 852 840 857 841 852 841 861 841 419 842 861 842 860 842 868 843 860 843 872 843 867 844 872 844 871 844 422 845 871 845 421 845 420 846 421 846 1206 846 420 847 422 847 421 847 420 848 1254 848 422 848 422 849 1254 849 873 849 867 850 873 850 423 850 868 851 423 851 424 851 419 852 424 852 700 852 857 853 700 853 849 853 425 854 849 854 850 854 848 855 850 855 427 855 426 856 427 856 828 856 428 857 828 857 705 857 428 858 426 858 828 858 428 859 418 859 426 859 429 860 1308 860 418 860 418 861 1308 861 844 861 845 862 844 862 433 862 854 863 433 863 853 863 852 864 853 864 430 864 861 865 430 865 431 865 860 866 431 866 869 866 872 867 869 867 880 867 871 868 880 868 879 868 421 869 879 869 432 869 1206 870 432 870 437 870 1206 871 421 871 432 871 1308 872 1306 872 844 872 844 873 1306 873 847 873 433 874 847 874 440 874 853 875 440 875 856 875 430 876 856 876 859 876 431 877 859 877 866 877 869 878 866 878 442 878 880 879 442 879 434 879 879 880 434 880 435 880 432 881 435 881 436 881 437 882 436 882 1253 882 437 883 432 883 436 883 1306 884 438 884 847 884 847 885 438 885 439 885 440 886 439 886 441 886 856 887 441 887 446 887 859 888 446 888 865 888 866 889 865 889 878 889 442 890 878 890 877 890 434 891 877 891 443 891 435 892 443 892 448 892 436 893 448 893 444 893 1253 894 444 894 450 894 1253 895 436 895 444 895 438 896 445 896 439 896 439 897 445 897 451 897 441 898 451 898 858 898 446 899 858 899 863 899 865 900 863 900 447 900 878 901 447 901 453 901 877 902 453 902 454 902 443 903 454 903 455 903 448 904 455 904 449 904 444 905 449 905 456 905 450 906 456 906 1204 906 450 907 444 907 456 907 445 908 457 908 451 908 451 909 457 909 862 909 858 910 862 910 864 910 863 911 864 911 452 911 447 912 452 912 876 912 453 913 876 913 887 913 454 914 887 914 890 914 455 915 890 915 893 915 449 916 893 916 462 916 456 917 462 917 33 917 1204 918 456 918 33 918 862 919 457 919 458 919 864 920 458 920 459 920 452 921 459 921 460 921 876 922 460 922 461 922 887 923 461 923 469 923 890 924 469 924 467 924 893 925 467 925 894 925 462 926 894 926 463 926 33 927 462 927 463 927 464 928 881 928 874 928 464 929 883 929 881 929 464 930 465 930 883 930 883 931 465 931 886 931 885 932 886 932 471 932 470 933 471 933 892 933 889 934 892 934 473 934 888 935 473 935 475 935 35 936 475 936 36 936 35 937 888 937 475 937 35 938 466 938 888 938 35 939 34 939 466 939 466 940 34 940 894 940 467 941 466 941 894 941 467 942 468 942 466 942 467 943 469 943 468 943 468 944 469 944 875 944 889 945 875 945 470 945 892 946 889 946 470 946 465 947 1316 947 886 947 886 948 1316 948 476 948 471 949 476 949 472 949 892 950 472 950 891 950 473 951 891 951 479 951 475 952 479 952 474 952 36 953 475 953 474 953 1316 954 1313 954 476 954 476 955 1313 955 477 955 472 956 477 956 896 956 891 957 896 957 482 957 479 958 482 958 481 958 478 959 479 959 481 959 478 960 474 960 479 960 1313 961 1314 961 477 961 477 962 1314 962 483 962 896 963 483 963 480 963 482 964 480 964 485 964 481 965 485 965 484 965 481 966 482 966 485 966 1314 967 486 967 483 967 483 968 486 968 895 968 480 969 895 969 746 969 485 970 746 970 484 970 485 971 480 971 746 971 486 972 487 972 895 972 895 973 487 973 747 973 746 974 747 974 488 974 37 975 488 975 38 975 37 976 746 976 488 976 37 977 484 977 746 977 747 978 487 978 494 978 488 979 494 979 489 979 38 980 489 980 40 980 38 981 488 981 489 981 490 982 491 982 1317 982 490 983 493 983 491 983 490 984 495 984 493 984 493 985 495 985 492 985 41 986 492 986 698 986 41 987 493 987 492 987 41 988 40 988 493 988 493 989 40 989 491 989 491 990 40 990 489 990 494 991 491 991 489 991 494 992 1317 992 491 992 494 993 487 993 1317 993 495 994 496 994 492 994 492 995 496 995 697 995 698 996 697 996 497 996 498 997 497 997 42 997 498 998 698 998 497 998 496 999 499 999 697 999 697 1000 499 1000 500 1000 497 1001 500 1001 897 1001 42 1002 897 1002 502 1002 43 1003 502 1003 501 1003 43 1004 42 1004 502 1004 499 1005 1320 1005 500 1005 500 1006 1320 1006 511 1006 897 1007 511 1007 898 1007 502 1008 898 1008 510 1008 503 1009 510 1009 696 1009 900 1010 696 1010 504 1010 904 1011 504 1011 903 1011 906 1012 903 1012 505 1012 508 1013 505 1013 912 1013 507 1014 912 1014 506 1014 1287 1015 506 1015 515 1015 1287 1016 507 1016 506 1016 1287 1017 1288 1017 507 1017 507 1018 1288 1018 742 1018 508 1019 742 1019 687 1019 906 1020 687 1020 685 1020 904 1021 685 1021 692 1021 900 1022 692 1022 693 1022 509 1023 693 1023 47 1023 48 1024 509 1024 47 1024 48 1025 503 1025 509 1025 48 1026 46 1026 503 1026 503 1027 46 1027 501 1027 502 1028 503 1028 501 1028 502 1029 510 1029 503 1029 1320 1030 1321 1030 511 1030 511 1031 1321 1031 516 1031 898 1032 516 1032 512 1032 510 1033 512 1033 513 1033 696 1034 513 1034 899 1034 504 1035 899 1035 902 1035 903 1036 902 1036 905 1036 505 1037 905 1037 911 1037 912 1038 911 1038 918 1038 506 1039 918 1039 514 1039 515 1040 514 1040 1286 1040 515 1041 506 1041 514 1041 516 1042 1321 1042 741 1042 512 1043 741 1043 517 1043 513 1044 517 1044 518 1044 899 1045 518 1045 738 1045 902 1046 738 1046 519 1046 905 1047 519 1047 520 1047 911 1048 520 1048 922 1048 918 1049 922 1049 521 1049 514 1050 521 1050 522 1050 1286 1051 522 1051 1285 1051 1286 1052 514 1052 522 1052 1323 1053 740 1053 1322 1053 1323 1054 534 1054 740 1054 1323 1055 523 1055 534 1055 534 1056 523 1056 524 1056 535 1057 524 1057 910 1057 533 1058 910 1058 908 1058 909 1059 908 1059 538 1059 916 1060 538 1060 525 1060 925 1061 525 1061 526 1061 924 1062 526 1062 929 1062 527 1063 929 1063 935 1063 528 1064 935 1064 529 1064 530 1065 529 1065 1247 1065 530 1066 528 1066 529 1066 530 1067 531 1067 528 1067 528 1068 531 1068 932 1068 527 1069 932 1069 931 1069 924 1070 931 1070 921 1070 925 1071 921 1071 532 1071 916 1072 532 1072 915 1072 909 1073 915 1073 907 1073 533 1074 907 1074 739 1074 535 1075 739 1075 901 1075 534 1076 901 1076 740 1076 534 1077 535 1077 901 1077 534 1078 524 1078 535 1078 523 1079 536 1079 524 1079 524 1080 536 1080 537 1080 910 1081 537 1081 914 1081 908 1082 914 1082 917 1082 538 1083 917 1083 920 1083 525 1084 920 1084 539 1084 526 1085 539 1085 542 1085 929 1086 542 1086 930 1086 935 1087 930 1087 540 1087 529 1088 540 1088 543 1088 1247 1089 543 1089 1282 1089 1247 1090 529 1090 543 1090 536 1091 1327 1091 537 1091 537 1092 1327 1092 545 1092 914 1093 545 1093 913 1093 917 1094 913 1094 541 1094 920 1095 541 1095 923 1095 539 1096 923 1096 549 1096 542 1097 549 1097 928 1097 930 1098 928 1098 933 1098 540 1099 933 1099 940 1099 543 1100 940 1100 550 1100 1282 1101 550 1101 544 1101 1282 1102 543 1102 550 1102 1327 1103 1330 1103 545 1103 545 1104 1330 1104 546 1104 913 1105 546 1105 547 1105 541 1106 547 1106 548 1106 923 1107 548 1107 927 1107 549 1108 927 1108 553 1108 928 1109 553 1109 934 1109 933 1110 934 1110 938 1110 940 1111 938 1111 948 1111 550 1112 948 1112 551 1112 544 1113 551 1113 552 1113 544 1114 550 1114 551 1114 1330 1115 557 1115 546 1115 546 1116 557 1116 919 1116 547 1117 919 1117 748 1117 548 1118 748 1118 559 1118 927 1119 559 1119 554 1119 553 1120 554 1120 560 1120 934 1121 560 1121 939 1121 938 1122 939 1122 947 1122 948 1123 947 1123 555 1123 551 1124 555 1124 556 1124 552 1125 556 1125 1241 1125 552 1126 551 1126 556 1126 557 1127 1329 1127 919 1127 919 1128 1329 1128 558 1128 748 1129 558 1129 563 1129 559 1130 563 1130 926 1130 554 1131 926 1131 566 1131 560 1132 566 1132 942 1132 939 1133 942 1133 561 1133 947 1134 561 1134 953 1134 555 1135 953 1135 562 1135 556 1136 562 1136 956 1136 1241 1137 956 1137 567 1137 1241 1138 556 1138 956 1138 558 1139 1329 1139 564 1139 563 1140 564 1140 565 1140 926 1141 565 1141 936 1141 566 1142 936 1142 937 1142 942 1143 937 1143 735 1143 561 1144 735 1144 734 1144 953 1145 734 1145 954 1145 562 1146 954 1146 957 1146 956 1147 957 1147 971 1147 567 1148 971 1148 733 1148 567 1149 956 1149 971 1149 1332 1150 737 1150 736 1150 1332 1151 568 1151 737 1151 1332 1152 569 1152 568 1152 568 1153 569 1153 570 1153 579 1154 570 1154 571 1154 946 1155 571 1155 952 1155 572 1156 952 1156 573 1156 951 1157 573 1157 955 1157 961 1158 955 1158 962 1158 968 1159 962 1159 970 1159 974 1160 970 1160 969 1160 973 1161 969 1161 574 1161 575 1162 574 1162 1240 1162 575 1163 973 1163 574 1163 575 1164 732 1164 973 1164 973 1165 732 1165 576 1165 974 1166 576 1166 972 1166 968 1167 972 1167 960 1167 961 1168 960 1168 577 1168 951 1169 577 1169 578 1169 572 1170 578 1170 945 1170 946 1171 945 1171 941 1171 579 1172 941 1172 580 1172 568 1173 580 1173 737 1173 568 1174 579 1174 580 1174 568 1175 570 1175 579 1175 569 1176 581 1176 570 1176 570 1177 581 1177 944 1177 571 1178 944 1178 943 1178 952 1179 943 1179 950 1179 573 1180 950 1180 582 1180 955 1181 582 1181 583 1181 962 1182 583 1182 588 1182 970 1183 588 1183 589 1183 969 1184 589 1184 584 1184 574 1185 584 1185 585 1185 1240 1186 585 1186 1278 1186 1240 1187 574 1187 585 1187 581 1188 1333 1188 944 1188 944 1189 1333 1189 586 1189 943 1190 586 1190 949 1190 950 1191 949 1191 587 1191 582 1192 587 1192 959 1192 583 1193 959 1193 593 1193 588 1194 593 1194 594 1194 589 1195 594 1195 977 1195 584 1196 977 1196 590 1196 585 1197 590 1197 596 1197 1278 1198 596 1198 597 1198 1278 1199 585 1199 596 1199 1333 1200 591 1200 586 1200 586 1201 591 1201 592 1201 949 1202 592 1202 958 1202 587 1203 958 1203 599 1203 959 1204 599 1204 964 1204 593 1205 964 1205 967 1205 594 1206 967 1206 976 1206 977 1207 976 1207 595 1207 590 1208 595 1208 984 1208 596 1209 984 1209 598 1209 597 1210 598 1210 1237 1210 597 1211 596 1211 598 1211 591 1212 1334 1212 592 1212 592 1213 1334 1213 605 1213 958 1214 605 1214 600 1214 599 1215 600 1215 966 1215 964 1216 966 1216 965 1216 967 1217 965 1217 601 1217 976 1218 601 1218 602 1218 595 1219 602 1219 603 1219 984 1220 603 1220 983 1220 598 1221 983 1221 604 1221 1237 1222 604 1222 1276 1222 1237 1223 598 1223 604 1223 1334 1224 731 1224 605 1224 605 1225 731 1225 606 1225 600 1226 606 1226 610 1226 966 1227 610 1227 607 1227 965 1228 607 1228 975 1228 601 1229 975 1229 611 1229 602 1230 611 1230 608 1230 603 1231 608 1231 986 1231 983 1232 986 1232 992 1232 604 1233 992 1233 991 1233 1276 1234 991 1234 1275 1234 1276 1235 604 1235 991 1235 606 1236 731 1236 609 1236 610 1237 609 1237 963 1237 607 1238 963 1238 730 1238 975 1239 730 1239 729 1239 611 1240 729 1240 727 1240 608 1241 727 1241 612 1241 986 1242 612 1242 726 1242 992 1243 726 1243 997 1243 991 1244 997 1244 725 1244 1275 1245 725 1245 724 1245 1275 1246 991 1246 725 1246 1337 1247 627 1247 613 1247 1337 1248 614 1248 627 1248 1337 1249 1336 1249 614 1249 614 1250 1336 1250 979 1250 615 1251 979 1251 978 1251 616 1252 978 1252 982 1252 625 1253 982 1253 617 1253 990 1254 617 1254 996 1254 995 1255 996 1255 631 1255 1002 1256 631 1256 1003 1256 618 1257 1003 1257 620 1257 619 1258 620 1258 621 1258 622 1259 621 1259 1274 1259 622 1260 619 1260 621 1260 622 1261 723 1261 619 1261 619 1262 723 1262 623 1262 618 1263 623 1263 1001 1263 1002 1264 1001 1264 994 1264 995 1265 994 1265 624 1265 990 1266 624 1266 985 1266 625 1267 985 1267 981 1267 616 1268 981 1268 728 1268 615 1269 728 1269 626 1269 614 1270 626 1270 627 1270 614 1271 615 1271 626 1271 614 1272 979 1272 615 1272 1336 1273 1338 1273 979 1273 979 1274 1338 1274 628 1274 978 1275 628 1275 980 1275 982 1276 980 1276 989 1276 617 1277 989 1277 629 1277 996 1278 629 1278 630 1278 631 1279 630 1279 635 1279 1003 1280 635 1280 1010 1280 620 1281 1010 1281 637 1281 621 1282 637 1282 639 1282 1274 1283 639 1283 638 1283 1274 1284 621 1284 639 1284 1338 1285 632 1285 628 1285 628 1286 632 1286 640 1286 980 1287 640 1287 633 1287 989 1288 633 1288 988 1288 629 1289 988 1289 1000 1289 630 1290 1000 1290 634 1290 635 1291 634 1291 636 1291 1010 1292 636 1292 1009 1292 637 1293 1009 1293 1022 1293 639 1294 1022 1294 1021 1294 638 1295 1021 1295 643 1295 638 1296 639 1296 1021 1296 632 1297 1339 1297 640 1297 640 1298 1339 1298 641 1298 633 1299 641 1299 993 1299 988 1300 993 1300 644 1300 1000 1301 644 1301 998 1301 634 1302 998 1302 642 1302 636 1303 642 1303 645 1303 1009 1304 645 1304 646 1304 1022 1305 646 1305 1025 1305 1021 1306 1025 1306 650 1306 643 1307 650 1307 649 1307 643 1308 1021 1308 650 1308 1339 1309 1341 1309 641 1309 641 1310 1341 1310 987 1310 993 1311 987 1311 651 1311 644 1312 651 1312 652 1312 998 1313 652 1313 653 1313 642 1314 653 1314 1008 1314 645 1315 1008 1315 1015 1315 646 1316 1015 1316 647 1316 1025 1317 647 1317 648 1317 650 1318 648 1318 1028 1318 649 1319 1028 1319 1231 1319 649 1320 650 1320 1028 1320 987 1321 1341 1321 749 1321 651 1322 749 1322 999 1322 652 1323 999 1323 654 1323 653 1324 654 1324 1013 1324 1008 1325 1013 1325 1014 1325 1015 1326 1014 1326 1020 1326 647 1327 1020 1327 668 1327 648 1328 668 1328 655 1328 1028 1329 655 1329 722 1329 1231 1330 722 1330 656 1330 328 1331 1231 1331 656 1331 658 1332 750 1332 657 1332 658 1333 659 1333 750 1333 658 1334 1344 1334 659 1334 659 1335 1344 1335 660 1335 1005 1336 660 1336 1011 1336 1007 1337 1011 1337 1017 1337 1018 1338 1017 1338 1027 1338 1024 1339 1027 1339 669 1339 661 1340 669 1340 672 1340 663 1341 672 1341 662 1341 664 1342 663 1342 662 1342 664 1343 666 1343 663 1343 664 1344 665 1344 666 1344 666 1345 665 1345 722 1345 655 1346 666 1346 722 1346 655 1347 667 1347 666 1347 655 1348 668 1348 667 1348 667 1349 668 1349 751 1349 661 1350 751 1350 1024 1350 669 1351 661 1351 1024 1351 1344 1352 1346 1352 660 1352 660 1353 1346 1353 673 1353 1011 1354 673 1354 1016 1354 1017 1355 1016 1355 670 1355 1027 1356 670 1356 745 1356 669 1357 745 1357 671 1357 672 1358 671 1358 754 1358 662 1359 672 1359 754 1359 1346 1360 1347 1360 673 1360 673 1361 1347 1361 674 1361 1016 1362 674 1362 675 1362 670 1363 675 1363 676 1363 745 1364 676 1364 678 1364 671 1365 678 1365 677 1365 754 1366 671 1366 677 1366 1347 1367 1348 1367 674 1367 674 1368 1348 1368 1023 1368 675 1369 1023 1369 1026 1369 676 1370 1026 1370 679 1370 678 1371 679 1371 330 1371 677 1372 678 1372 330 1372 1348 1373 1350 1373 1023 1373 1023 1374 1350 1374 681 1374 1026 1375 681 1375 683 1375 679 1376 683 1376 680 1376 330 1377 679 1377 680 1377 1350 1378 320 1378 681 1378 681 1379 320 1379 682 1379 683 1380 682 1380 321 1380 680 1381 683 1381 321 1381 1252 1382 684 1382 694 1382 688 1383 694 1383 695 1383 686 1384 695 1384 691 1384 685 1385 691 1385 692 1385 685 1386 686 1386 691 1386 685 1387 687 1387 686 1387 686 1388 687 1388 744 1388 688 1389 744 1389 1250 1389 1252 1390 688 1390 1250 1390 1252 1391 694 1391 688 1391 684 1392 52 1392 694 1392 694 1393 52 1393 51 1393 695 1394 51 1394 689 1394 691 1395 689 1395 50 1395 690 1396 691 1396 50 1396 690 1397 692 1397 691 1397 690 1398 49 1398 692 1398 692 1399 49 1399 693 1399 694 1400 51 1400 695 1400 695 1401 689 1401 691 1401 900 1402 693 1402 509 1402 503 1403 900 1403 509 1403 503 1404 696 1404 900 1404 897 1405 42 1405 497 1405 697 1406 698 1406 492 1406 34 1407 699 1407 894 1407 894 1408 699 1408 463 1408 1254 1409 1255 1409 873 1409 873 1410 1255 1410 870 1410 423 1411 870 1411 414 1411 424 1412 414 1412 701 1412 700 1413 701 1413 702 1413 849 1414 702 1414 413 1414 850 1415 413 1415 410 1415 427 1416 410 1416 831 1416 828 1417 831 1417 703 1417 705 1418 703 1418 704 1418 417 1419 704 1419 1303 1419 417 1420 705 1420 704 1420 381 1421 1261 1421 382 1421 382 1422 1261 1422 839 1422 827 1423 839 1423 706 1423 825 1424 706 1424 817 1424 383 1425 817 1425 374 1425 707 1426 374 1426 806 1426 811 1427 806 1427 801 1427 805 1428 801 1428 799 1428 386 1429 799 1429 796 1429 388 1430 796 1430 371 1430 708 1431 371 1431 1294 1431 708 1432 388 1432 371 1432 709 1433 1228 1433 337 1433 337 1434 1228 1434 710 1434 794 1435 710 1435 786 1435 339 1436 786 1436 782 1436 340 1437 782 1437 711 1437 776 1438 711 1438 768 1438 342 1439 768 1439 762 1439 759 1440 762 1440 323 1440 343 1441 759 1441 323 1441 1228 1442 714 1442 710 1442 710 1443 714 1443 787 1443 786 1444 787 1444 712 1444 782 1445 712 1445 769 1445 711 1446 769 1446 763 1446 768 1447 763 1447 713 1447 762 1448 713 1448 716 1448 323 1449 762 1449 716 1449 714 1450 1229 1450 787 1450 787 1451 1229 1451 717 1451 712 1452 717 1452 771 1452 769 1453 771 1453 770 1453 763 1454 770 1454 715 1454 713 1455 715 1455 718 1455 716 1456 713 1456 718 1456 1229 1457 1268 1457 717 1457 717 1458 1268 1458 777 1458 771 1459 777 1459 772 1459 770 1460 772 1460 764 1460 715 1461 764 1461 324 1461 718 1462 715 1462 324 1462 1268 1463 719 1463 777 1463 777 1464 719 1464 773 1464 772 1465 773 1465 765 1465 764 1466 765 1466 325 1466 324 1467 764 1467 325 1467 719 1468 1271 1468 773 1468 773 1469 1271 1469 721 1469 765 1470 721 1470 720 1470 325 1471 765 1471 720 1471 1271 1472 1230 1472 721 1472 721 1473 1230 1473 326 1473 720 1474 721 1474 326 1474 722 1475 1231 1475 1028 1475 723 1476 724 1476 623 1476 623 1477 724 1477 725 1477 1001 1478 725 1478 997 1478 994 1479 997 1479 726 1479 624 1480 726 1480 612 1480 985 1481 612 1481 727 1481 981 1482 727 1482 729 1482 728 1483 729 1483 730 1483 626 1484 730 1484 963 1484 627 1485 963 1485 609 1485 613 1486 609 1486 731 1486 613 1487 627 1487 609 1487 732 1488 733 1488 576 1488 576 1489 733 1489 971 1489 972 1490 971 1490 957 1490 960 1491 957 1491 954 1491 577 1492 954 1492 734 1492 578 1493 734 1493 735 1493 945 1494 735 1494 937 1494 941 1495 937 1495 936 1495 580 1496 936 1496 565 1496 737 1497 565 1497 564 1497 736 1498 564 1498 1329 1498 736 1499 737 1499 564 1499 531 1500 1285 1500 932 1500 932 1501 1285 1501 522 1501 931 1502 522 1502 521 1502 921 1503 521 1503 922 1503 532 1504 922 1504 520 1504 915 1505 520 1505 519 1505 907 1506 519 1506 738 1506 739 1507 738 1507 518 1507 901 1508 518 1508 517 1508 740 1509 517 1509 741 1509 1322 1510 741 1510 1321 1510 1322 1511 740 1511 741 1511 1288 1512 743 1512 742 1512 742 1513 743 1513 744 1513 687 1514 742 1514 744 1514 743 1515 1250 1515 744 1515 683 1516 681 1516 682 1516 665 1517 656 1517 722 1517 683 1518 679 1518 1026 1518 679 1519 678 1519 676 1519 678 1520 671 1520 745 1520 671 1521 672 1521 669 1521 672 1522 663 1522 661 1522 661 1523 663 1523 667 1523 751 1524 661 1524 667 1524 663 1525 666 1525 667 1525 362 1526 778 1526 370 1526 858 1527 451 1527 862 1527 746 1528 895 1528 747 1528 748 1529 919 1529 558 1529 600 1530 605 1530 606 1530 1341 1531 657 1531 749 1531 749 1532 657 1532 750 1532 999 1533 750 1533 1004 1533 654 1534 1004 1534 1006 1534 1013 1535 1006 1535 1012 1535 1014 1536 1012 1536 1019 1536 1020 1537 1019 1537 751 1537 668 1538 1020 1538 751 1538 753 1539 330 1539 752 1539 343 1540 753 1540 752 1540 755 1541 677 1541 753 1541 323 1542 755 1542 753 1542 329 1543 754 1543 755 1543 716 1544 329 1544 755 1544 757 1545 662 1545 329 1545 718 1546 757 1546 329 1546 756 1547 664 1547 757 1547 324 1548 756 1548 757 1548 758 1549 665 1549 756 1549 325 1550 758 1550 756 1550 327 1551 656 1551 758 1551 720 1552 327 1552 758 1552 767 1553 759 1553 344 1553 760 1554 767 1554 344 1554 346 1555 760 1555 761 1555 342 1556 762 1556 759 1556 768 1557 713 1557 762 1557 763 1558 715 1558 713 1558 770 1559 764 1559 715 1559 772 1560 765 1560 764 1560 773 1561 721 1561 765 1561 352 1562 347 1562 346 1562 766 1563 342 1563 767 1563 775 1564 766 1564 767 1564 347 1565 775 1565 760 1565 776 1566 768 1566 342 1566 711 1567 763 1567 768 1567 769 1568 770 1568 763 1568 771 1569 772 1569 770 1569 777 1570 773 1570 772 1570 780 1571 353 1571 352 1571 353 1572 774 1572 347 1572 341 1573 776 1573 766 1573 348 1574 341 1574 766 1574 774 1575 348 1575 775 1575 340 1576 711 1576 776 1576 782 1577 769 1577 711 1577 712 1578 771 1578 769 1578 717 1579 777 1579 771 1579 778 1580 779 1580 780 1580 779 1581 781 1581 353 1581 781 1582 783 1582 774 1582 338 1583 340 1583 341 1583 349 1584 338 1584 341 1584 783 1585 349 1585 348 1585 339 1586 782 1586 340 1586 786 1587 712 1587 782 1587 787 1588 717 1588 712 1588 358 1589 779 1589 362 1589 354 1590 781 1590 358 1590 790 1591 783 1591 354 1591 784 1592 349 1592 790 1592 785 1593 339 1593 338 1593 784 1594 785 1594 338 1594 794 1595 786 1595 339 1595 710 1596 787 1596 786 1596 371 1597 788 1597 370 1597 788 1598 363 1598 362 1598 797 1599 788 1599 796 1599 363 1600 359 1600 358 1600 364 1601 363 1601 797 1601 359 1602 791 1602 354 1602 789 1603 359 1603 364 1603 791 1604 792 1604 790 1604 793 1605 791 1605 789 1605 792 1606 333 1606 784 1606 350 1607 792 1607 793 1607 794 1608 785 1608 795 1608 795 1609 785 1609 333 1609 334 1610 333 1610 350 1610 710 1611 794 1611 337 1611 334 1612 336 1612 795 1612 795 1613 336 1613 337 1613 386 1614 796 1614 388 1614 372 1615 797 1615 799 1615 798 1616 364 1616 372 1616 365 1617 789 1617 798 1617 360 1618 793 1618 365 1618 803 1619 350 1619 360 1619 351 1620 334 1620 803 1620 805 1621 799 1621 386 1621 800 1622 372 1622 801 1622 369 1623 798 1623 800 1623 807 1624 365 1624 369 1624 355 1625 360 1625 807 1625 802 1626 803 1626 355 1626 385 1627 805 1627 804 1627 391 1628 385 1628 804 1628 390 1629 391 1629 389 1629 811 1630 801 1630 805 1630 373 1631 800 1631 806 1631 808 1632 369 1632 373 1632 810 1633 807 1633 808 1633 809 1634 355 1634 810 1634 397 1635 812 1635 390 1635 384 1636 811 1636 385 1636 815 1637 384 1637 385 1637 812 1638 815 1638 391 1638 707 1639 806 1639 811 1639 816 1640 373 1640 374 1640 813 1641 808 1641 816 1641 367 1642 810 1642 813 1642 401 1643 400 1643 397 1643 400 1644 814 1644 812 1644 823 1645 707 1645 384 1645 377 1646 823 1646 384 1646 814 1647 377 1647 815 1647 383 1648 374 1648 707 1648 819 1649 816 1649 817 1649 818 1650 813 1650 819 1650 820 1651 402 1651 401 1651 402 1652 398 1652 400 1652 398 1653 821 1653 814 1653 822 1654 383 1654 823 1654 824 1655 822 1655 823 1655 821 1656 824 1656 377 1656 825 1657 817 1657 383 1657 375 1658 819 1658 706 1658 704 1659 829 1659 820 1659 829 1660 406 1660 402 1660 406 1661 826 1661 398 1661 826 1662 392 1662 821 1662 378 1663 825 1663 822 1663 393 1664 378 1664 822 1664 392 1665 393 1665 824 1665 827 1666 706 1666 825 1666 828 1667 703 1667 705 1667 703 1668 830 1668 829 1668 830 1669 832 1669 406 1669 833 1670 830 1670 831 1670 832 1671 399 1671 826 1671 403 1672 832 1672 833 1672 399 1673 834 1673 392 1673 842 1674 399 1674 403 1674 834 1675 835 1675 393 1675 836 1676 834 1676 842 1676 827 1677 378 1677 837 1677 837 1678 378 1678 835 1678 838 1679 835 1679 836 1679 839 1680 827 1680 382 1680 838 1681 840 1681 837 1681 837 1682 840 1682 382 1682 427 1683 831 1683 828 1683 411 1684 833 1684 410 1684 843 1685 403 1685 411 1685 841 1686 842 1686 843 1686 394 1687 836 1687 841 1687 379 1688 838 1688 394 1688 848 1689 427 1689 426 1689 845 1690 848 1690 426 1690 844 1691 845 1691 418 1691 850 1692 410 1692 427 1692 412 1693 411 1693 413 1693 851 1694 843 1694 412 1694 846 1695 841 1695 851 1695 395 1696 394 1696 846 1696 847 1697 433 1697 844 1697 425 1698 850 1698 848 1698 854 1699 425 1699 848 1699 433 1700 854 1700 845 1700 849 1701 413 1701 850 1701 407 1702 412 1702 702 1702 855 1703 851 1703 407 1703 404 1704 846 1704 855 1704 439 1705 440 1705 847 1705 440 1706 853 1706 433 1706 857 1707 849 1707 425 1707 852 1708 857 1708 425 1708 853 1709 852 1709 854 1709 700 1710 702 1710 849 1710 408 1711 407 1711 701 1711 409 1712 855 1712 408 1712 451 1713 441 1713 439 1713 441 1714 856 1714 440 1714 856 1715 430 1715 853 1715 419 1716 700 1716 857 1716 861 1717 419 1717 857 1717 430 1718 861 1718 852 1718 424 1719 701 1719 700 1719 416 1720 408 1720 414 1720 446 1721 441 1721 858 1721 859 1722 856 1722 446 1722 431 1723 430 1723 859 1723 860 1724 861 1724 431 1724 868 1725 424 1725 419 1725 860 1726 868 1726 419 1726 423 1727 414 1727 424 1727 458 1728 864 1728 862 1728 864 1729 863 1729 858 1729 863 1730 865 1730 446 1730 459 1731 452 1731 864 1731 865 1732 866 1732 859 1732 452 1733 447 1733 863 1733 866 1734 869 1734 431 1734 447 1735 878 1735 865 1735 869 1736 872 1736 860 1736 878 1737 442 1737 866 1737 423 1738 868 1738 867 1738 867 1739 868 1739 872 1739 442 1740 880 1740 869 1740 870 1741 423 1741 873 1741 880 1742 871 1742 872 1742 871 1743 422 1743 867 1743 867 1744 422 1744 873 1744 874 1745 881 1745 458 1745 457 1746 874 1746 458 1746 459 1747 881 1747 882 1747 460 1748 882 1748 884 1748 461 1749 884 1749 875 1749 469 1750 461 1750 875 1750 882 1751 460 1751 459 1751 460 1752 876 1752 452 1752 876 1753 453 1753 447 1753 453 1754 877 1754 878 1754 877 1755 434 1755 442 1755 434 1756 879 1756 880 1756 879 1757 421 1757 871 1757 881 1758 459 1758 458 1758 881 1759 883 1759 882 1759 882 1760 883 1760 885 1760 884 1761 885 1761 470 1761 875 1762 884 1762 470 1762 886 1763 885 1763 883 1763 885 1764 884 1764 882 1764 884 1765 461 1765 460 1765 461 1766 887 1766 876 1766 887 1767 454 1767 453 1767 454 1768 443 1768 877 1768 443 1769 435 1769 434 1769 435 1770 432 1770 879 1770 476 1771 471 1771 886 1771 471 1772 470 1772 885 1772 890 1773 887 1773 469 1773 455 1774 454 1774 890 1774 448 1775 443 1775 455 1775 436 1776 435 1776 448 1776 477 1777 472 1777 476 1777 472 1778 892 1778 471 1778 468 1779 875 1779 889 1779 888 1780 889 1780 473 1780 888 1781 468 1781 889 1781 888 1782 466 1782 468 1782 893 1783 890 1783 467 1783 449 1784 455 1784 893 1784 444 1785 448 1785 449 1785 483 1786 896 1786 477 1786 896 1787 891 1787 472 1787 891 1788 473 1788 892 1788 462 1789 893 1789 894 1789 456 1790 449 1790 462 1790 895 1791 480 1791 483 1791 480 1792 482 1792 896 1792 482 1793 479 1793 891 1793 479 1794 475 1794 473 1794 494 1795 488 1795 747 1795 500 1796 497 1796 697 1796 511 1797 897 1797 500 1797 516 1798 898 1798 511 1798 898 1799 502 1799 897 1799 741 1800 512 1800 516 1800 512 1801 510 1801 898 1801 901 1802 517 1802 740 1802 517 1803 513 1803 512 1803 513 1804 696 1804 510 1804 899 1805 513 1805 518 1805 504 1806 696 1806 899 1806 904 1807 692 1807 900 1807 504 1808 904 1808 900 1808 688 1809 695 1809 686 1809 744 1810 688 1810 686 1810 739 1811 518 1811 901 1811 902 1812 899 1812 738 1812 903 1813 504 1813 902 1813 906 1814 685 1814 904 1814 903 1815 906 1815 904 1815 533 1816 739 1816 535 1816 910 1817 533 1817 535 1817 537 1818 910 1818 524 1818 907 1819 738 1819 739 1819 905 1820 902 1820 519 1820 505 1821 903 1821 905 1821 508 1822 687 1822 906 1822 505 1823 508 1823 906 1823 545 1824 914 1824 537 1824 909 1825 907 1825 533 1825 908 1826 909 1826 533 1826 914 1827 908 1827 910 1827 915 1828 519 1828 907 1828 911 1829 905 1829 520 1829 912 1830 505 1830 911 1830 507 1831 742 1831 508 1831 912 1832 507 1832 508 1832 546 1833 913 1833 545 1833 913 1834 917 1834 914 1834 916 1835 915 1835 909 1835 538 1836 916 1836 909 1836 917 1837 538 1837 908 1837 532 1838 520 1838 915 1838 918 1839 911 1839 922 1839 506 1840 912 1840 918 1840 919 1841 547 1841 546 1841 547 1842 541 1842 913 1842 541 1843 920 1843 917 1843 925 1844 532 1844 916 1844 525 1845 925 1845 916 1845 920 1846 525 1846 538 1846 921 1847 922 1847 532 1847 514 1848 918 1848 521 1848 548 1849 547 1849 748 1849 923 1850 541 1850 548 1850 539 1851 920 1851 923 1851 526 1852 525 1852 539 1852 924 1853 921 1853 925 1853 526 1854 924 1854 925 1854 931 1855 521 1855 921 1855 564 1856 563 1856 558 1856 563 1857 559 1857 748 1857 926 1858 563 1858 565 1858 559 1859 927 1859 548 1859 554 1860 559 1860 926 1860 927 1861 549 1861 923 1861 553 1862 927 1862 554 1862 549 1863 542 1863 539 1863 928 1864 549 1864 553 1864 542 1865 929 1865 526 1865 930 1866 542 1866 928 1866 931 1867 924 1867 527 1867 527 1868 924 1868 929 1868 935 1869 929 1869 930 1869 522 1870 931 1870 932 1870 935 1871 528 1871 527 1871 527 1872 528 1872 932 1872 580 1873 565 1873 737 1873 566 1874 926 1874 936 1874 560 1875 554 1875 566 1875 934 1876 553 1876 560 1876 933 1877 928 1877 934 1877 540 1878 930 1878 933 1878 529 1879 935 1879 540 1879 941 1880 936 1880 580 1880 942 1881 566 1881 937 1881 939 1882 560 1882 942 1882 938 1883 934 1883 939 1883 940 1884 933 1884 938 1884 543 1885 540 1885 940 1885 946 1886 941 1886 579 1886 571 1887 946 1887 579 1887 944 1888 571 1888 570 1888 945 1889 937 1889 941 1889 561 1890 942 1890 735 1890 947 1891 939 1891 561 1891 948 1892 938 1892 947 1892 550 1893 940 1893 948 1893 586 1894 943 1894 944 1894 572 1895 945 1895 946 1895 952 1896 572 1896 946 1896 943 1897 952 1897 571 1897 578 1898 735 1898 945 1898 953 1899 561 1899 734 1899 555 1900 947 1900 953 1900 551 1901 948 1901 555 1901 592 1902 949 1902 586 1902 949 1903 950 1903 943 1903 951 1904 578 1904 572 1904 573 1905 951 1905 572 1905 950 1906 573 1906 952 1906 577 1907 734 1907 578 1907 562 1908 953 1908 954 1908 556 1909 555 1909 562 1909 605 1910 958 1910 592 1910 958 1911 587 1911 949 1911 587 1912 582 1912 950 1912 961 1913 577 1913 951 1913 955 1914 961 1914 951 1914 582 1915 955 1915 573 1915 960 1916 954 1916 577 1916 956 1917 562 1917 957 1917 599 1918 958 1918 600 1918 959 1919 587 1919 599 1919 583 1920 582 1920 959 1920 962 1921 955 1921 583 1921 968 1922 960 1922 961 1922 962 1923 968 1923 961 1923 972 1924 957 1924 960 1924 609 1925 610 1925 606 1925 610 1926 966 1926 600 1926 607 1927 610 1927 963 1927 966 1928 964 1928 599 1928 965 1929 966 1929 607 1929 964 1930 593 1930 959 1930 967 1931 964 1931 965 1931 593 1932 588 1932 583 1932 594 1933 593 1933 967 1933 588 1934 970 1934 962 1934 589 1935 588 1935 594 1935 972 1936 968 1936 974 1936 974 1937 968 1937 970 1937 969 1938 970 1938 589 1938 971 1939 972 1939 576 1939 969 1940 973 1940 974 1940 974 1941 973 1941 576 1941 626 1942 963 1942 627 1942 975 1943 607 1943 730 1943 601 1944 965 1944 975 1944 976 1945 967 1945 601 1945 977 1946 594 1946 976 1946 584 1947 589 1947 977 1947 574 1948 969 1948 584 1948 728 1949 730 1949 626 1949 611 1950 975 1950 729 1950 602 1951 601 1951 611 1951 595 1952 976 1952 602 1952 590 1953 977 1953 595 1953 585 1954 584 1954 590 1954 616 1955 728 1955 615 1955 978 1956 616 1956 615 1956 628 1957 978 1957 979 1957 981 1958 729 1958 728 1958 608 1959 611 1959 727 1959 603 1960 602 1960 608 1960 984 1961 595 1961 603 1961 596 1962 590 1962 984 1962 640 1963 980 1963 628 1963 625 1964 981 1964 616 1964 982 1965 625 1965 616 1965 980 1966 982 1966 978 1966 985 1967 727 1967 981 1967 986 1968 608 1968 612 1968 983 1969 603 1969 986 1969 598 1970 984 1970 983 1970 641 1971 633 1971 640 1971 633 1972 989 1972 980 1972 990 1973 985 1973 625 1973 617 1974 990 1974 625 1974 989 1975 617 1975 982 1975 624 1976 612 1976 985 1976 992 1977 986 1977 726 1977 604 1978 983 1978 992 1978 987 1979 993 1979 641 1979 993 1980 988 1980 633 1980 988 1981 629 1981 989 1981 995 1982 624 1982 990 1982 996 1983 995 1983 990 1983 629 1984 996 1984 617 1984 994 1985 726 1985 624 1985 991 1986 992 1986 997 1986 749 1987 651 1987 987 1987 651 1988 644 1988 993 1988 644 1989 1000 1989 988 1989 1000 1990 630 1990 629 1990 1002 1991 994 1991 995 1991 631 1992 1002 1992 995 1992 630 1993 631 1993 996 1993 1001 1994 997 1994 994 1994 750 1995 999 1995 749 1995 999 1996 652 1996 651 1996 652 1997 998 1997 644 1997 1004 1998 654 1998 999 1998 998 1999 634 1999 1000 1999 654 2000 653 2000 652 2000 634 2001 635 2001 630 2001 653 2002 642 2002 998 2002 635 2003 1003 2003 631 2003 642 2004 636 2004 634 2004 1001 2005 1002 2005 618 2005 618 2006 1002 2006 1003 2006 636 2007 1010 2007 635 2007 725 2008 1001 2008 623 2008 1010 2009 620 2009 1003 2009 620 2010 619 2010 618 2010 618 2011 619 2011 623 2011 750 2012 659 2012 1004 2012 1004 2013 659 2013 1005 2013 1006 2014 1005 2014 1007 2014 1012 2015 1007 2015 1018 2015 1019 2016 1018 2016 1024 2016 751 2017 1019 2017 1024 2017 660 2018 1005 2018 659 2018 1005 2019 1006 2019 1004 2019 1006 2020 1013 2020 654 2020 1013 2021 1008 2021 653 2021 1008 2022 645 2022 642 2022 645 2023 1009 2023 636 2023 1009 2024 637 2024 1010 2024 637 2025 621 2025 620 2025 673 2026 1011 2026 660 2026 1011 2027 1007 2027 1005 2027 1007 2028 1012 2028 1006 2028 1012 2029 1014 2029 1013 2029 1014 2030 1015 2030 1008 2030 1015 2031 646 2031 645 2031 646 2032 1022 2032 1009 2032 1022 2033 639 2033 637 2033 674 2034 1016 2034 673 2034 1016 2035 1017 2035 1011 2035 1017 2036 1018 2036 1007 2036 1018 2037 1019 2037 1012 2037 1019 2038 1020 2038 1014 2038 1020 2039 647 2039 1015 2039 647 2040 1025 2040 646 2040 1025 2041 1021 2041 1022 2041 1023 2042 675 2042 674 2042 675 2043 670 2043 1016 2043 670 2044 1027 2044 1017 2044 1027 2045 1024 2045 1018 2045 648 2046 647 2046 668 2046 650 2047 1025 2047 648 2047 681 2048 1026 2048 1023 2048 1026 2049 676 2049 675 2049 676 2050 745 2050 670 2050 745 2051 669 2051 1027 2051 1028 2052 648 2052 655 2052 1029 2053 1034 2053 1167 2053 1029 2054 1032 2054 1034 2054 1029 2055 208 2055 1032 2055 1032 2056 208 2056 1030 2056 1031 2057 1030 2057 1175 2057 1031 2058 1032 2058 1030 2058 1031 2059 1033 2059 1032 2059 1032 2060 1033 2060 1034 2060 1034 2061 1033 2061 1168 2061 1165 2062 1168 2062 1184 2062 1164 2063 1184 2063 1193 2063 1162 2064 1193 2064 1194 2064 1035 2065 1194 2065 1195 2065 1036 2066 1195 2066 1197 2066 1158 2067 1197 2067 1037 2067 1159 2068 1037 2068 1186 2068 1157 2069 1186 2069 1038 2069 1156 2070 1038 2070 1039 2070 1040 2071 1039 2071 1187 2071 1041 2072 1187 2072 1189 2072 1154 2073 1189 2073 1042 2073 1153 2074 1042 2074 1043 2074 1151 2075 1043 2075 1200 2075 1044 2076 1200 2076 1202 2076 1150 2077 1202 2077 1190 2077 1045 2078 1190 2078 1191 2078 1148 2079 1191 2079 1046 2079 1047 2080 1046 2080 1192 2080 1048 2081 1192 2081 1144 2081 1142 2082 1144 2082 1049 2082 1140 2083 1049 2083 1201 2083 1138 2084 1201 2084 1137 2084 1136 2085 1137 2085 1134 2085 1135 2086 1134 2086 1050 2086 1133 2087 1050 2087 1188 2087 1132 2088 1188 2088 1131 2088 1130 2089 1131 2089 1199 2089 1128 2090 1199 2090 1198 2090 1051 2091 1198 2091 1127 2091 1126 2092 1127 2092 1125 2092 1122 2093 1125 2093 1196 2093 1123 2094 1196 2094 1121 2094 1120 2095 1121 2095 1185 2095 1052 2096 1185 2096 1054 2096 1053 2097 1054 2097 1183 2097 1118 2098 1183 2098 1169 2098 1055 2099 1169 2099 1056 2099 1117 2100 1056 2100 1174 2100 1057 2101 1174 2101 1113 2101 1112 2102 1113 2102 1110 2102 1111 2103 1110 2103 1108 2103 1109 2104 1108 2104 1058 2104 1106 2105 1058 2105 1177 2105 1104 2106 1177 2106 1060 2106 1059 2107 1060 2107 1173 2107 1102 2108 1173 2108 1061 2108 1101 2109 1061 2109 1181 2109 1099 2110 1181 2110 1062 2110 1100 2111 1062 2111 1063 2111 1064 2112 1063 2112 1065 2112 1066 2113 1065 2113 1067 2113 1095 2114 1067 2114 1068 2114 1093 2115 1068 2115 1094 2115 1069 2116 1094 2116 1070 2116 1090 2117 1070 2117 1182 2117 1071 2118 1182 2118 1072 2118 1087 2119 1072 2119 1180 2119 1073 2120 1180 2120 1179 2120 1085 2121 1179 2121 1178 2121 1084 2122 1178 2122 1172 2122 1083 2123 1172 2123 1074 2123 1081 2124 1074 2124 1176 2124 1075 2125 1176 2125 1171 2125 1078 2126 1171 2126 1170 2126 1077 2127 1170 2127 1175 2127 1030 2128 1077 2128 1175 2128 1030 2129 207 2129 1077 2129 1030 2130 208 2130 207 2130 207 2131 1076 2131 1077 2131 1077 2132 1076 2132 1078 2132 1170 2133 1077 2133 1078 2133 1076 2134 1079 2134 1078 2134 1078 2135 1079 2135 1075 2135 1171 2136 1078 2136 1075 2136 1079 2137 1080 2137 1075 2137 1075 2138 1080 2138 1081 2138 1176 2139 1075 2139 1081 2139 1080 2140 203 2140 1081 2140 1081 2141 203 2141 1083 2141 1074 2142 1081 2142 1083 2142 203 2143 1082 2143 1083 2143 1083 2144 1082 2144 1084 2144 1172 2145 1083 2145 1084 2145 1082 2146 200 2146 1084 2146 1084 2147 200 2147 1085 2147 1178 2148 1084 2148 1085 2148 200 2149 199 2149 1085 2149 1085 2150 199 2150 1073 2150 1179 2151 1085 2151 1073 2151 199 2152 1086 2152 1073 2152 1073 2153 1086 2153 1087 2153 1180 2154 1073 2154 1087 2154 1086 2155 1088 2155 1087 2155 1087 2156 1088 2156 1071 2156 1072 2157 1087 2157 1071 2157 1088 2158 1089 2158 1071 2158 1071 2159 1089 2159 1090 2159 1182 2160 1071 2160 1090 2160 1089 2161 1091 2161 1090 2161 1090 2162 1091 2162 1069 2162 1070 2163 1090 2163 1069 2163 1091 2164 1092 2164 1069 2164 1069 2165 1092 2165 1093 2165 1094 2166 1069 2166 1093 2166 1092 2167 192 2167 1093 2167 1093 2168 192 2168 1095 2168 1068 2169 1093 2169 1095 2169 192 2170 1096 2170 1095 2170 1095 2171 1096 2171 1066 2171 1067 2172 1095 2172 1066 2172 1096 2173 1097 2173 1066 2173 1066 2174 1097 2174 1064 2174 1065 2175 1066 2175 1064 2175 1097 2176 188 2176 1064 2176 1064 2177 188 2177 1100 2177 1063 2178 1064 2178 1100 2178 188 2179 1098 2179 1100 2179 1100 2180 1098 2180 1099 2180 1062 2181 1100 2181 1099 2181 1098 2182 184 2182 1099 2182 1099 2183 184 2183 1101 2183 1181 2184 1099 2184 1101 2184 184 2185 183 2185 1101 2185 1101 2186 183 2186 1102 2186 1061 2187 1101 2187 1102 2187 183 2188 1103 2188 1102 2188 1102 2189 1103 2189 1059 2189 1173 2190 1102 2190 1059 2190 1103 2191 182 2191 1059 2191 1059 2192 182 2192 1104 2192 1060 2193 1059 2193 1104 2193 182 2194 1105 2194 1104 2194 1104 2195 1105 2195 1106 2195 1177 2196 1104 2196 1106 2196 1105 2197 1107 2197 1106 2197 1106 2198 1107 2198 1109 2198 1058 2199 1106 2199 1109 2199 1107 2200 178 2200 1109 2200 1109 2201 178 2201 1111 2201 1108 2202 1109 2202 1111 2202 178 2203 176 2203 1111 2203 1111 2204 176 2204 1112 2204 1110 2205 1111 2205 1112 2205 176 2206 1114 2206 1112 2206 1112 2207 1114 2207 1057 2207 1113 2208 1112 2208 1057 2208 1114 2209 1115 2209 1057 2209 1057 2210 1115 2210 1117 2210 1174 2211 1057 2211 1117 2211 1115 2212 1116 2212 1117 2212 1117 2213 1116 2213 1055 2213 1056 2214 1117 2214 1055 2214 1116 2215 173 2215 1055 2215 1055 2216 173 2216 1118 2216 1169 2217 1055 2217 1118 2217 173 2218 171 2218 1118 2218 1118 2219 171 2219 1053 2219 1183 2220 1118 2220 1053 2220 171 2221 169 2221 1053 2221 1053 2222 169 2222 1052 2222 1054 2223 1053 2223 1052 2223 169 2224 1119 2224 1052 2224 1052 2225 1119 2225 1120 2225 1185 2226 1052 2226 1120 2226 1119 2227 166 2227 1120 2227 1120 2228 166 2228 1123 2228 1121 2229 1120 2229 1123 2229 166 2230 165 2230 1123 2230 1123 2231 165 2231 1122 2231 1196 2232 1123 2232 1122 2232 165 2233 1124 2233 1122 2233 1122 2234 1124 2234 1126 2234 1125 2235 1122 2235 1126 2235 1124 2236 164 2236 1126 2236 1126 2237 164 2237 1051 2237 1127 2238 1126 2238 1051 2238 164 2239 163 2239 1051 2239 1051 2240 163 2240 1128 2240 1198 2241 1051 2241 1128 2241 163 2242 1129 2242 1128 2242 1128 2243 1129 2243 1130 2243 1199 2244 1128 2244 1130 2244 1129 2245 159 2245 1130 2245 1130 2246 159 2246 1132 2246 1131 2247 1130 2247 1132 2247 159 2248 158 2248 1132 2248 1132 2249 158 2249 1133 2249 1188 2250 1132 2250 1133 2250 158 2251 157 2251 1133 2251 1133 2252 157 2252 1135 2252 1050 2253 1133 2253 1135 2253 157 2254 156 2254 1135 2254 1135 2255 156 2255 1136 2255 1134 2256 1135 2256 1136 2256 156 2257 155 2257 1136 2257 1136 2258 155 2258 1138 2258 1137 2259 1136 2259 1138 2259 155 2260 1139 2260 1138 2260 1138 2261 1139 2261 1140 2261 1201 2262 1138 2262 1140 2262 1139 2263 1141 2263 1140 2263 1140 2264 1141 2264 1142 2264 1049 2265 1140 2265 1142 2265 1141 2266 1143 2266 1142 2266 1142 2267 1143 2267 1048 2267 1144 2268 1142 2268 1048 2268 1143 2269 1145 2269 1048 2269 1048 2270 1145 2270 1047 2270 1192 2271 1048 2271 1047 2271 1145 2272 1146 2272 1047 2272 1047 2273 1146 2273 1148 2273 1046 2274 1047 2274 1148 2274 1146 2275 1147 2275 1148 2275 1148 2276 1147 2276 1045 2276 1191 2277 1148 2277 1045 2277 1147 2278 1149 2278 1045 2278 1045 2279 1149 2279 1150 2279 1190 2280 1045 2280 1150 2280 1149 2281 147 2281 1150 2281 1150 2282 147 2282 1044 2282 1202 2283 1150 2283 1044 2283 147 2284 1152 2284 1044 2284 1044 2285 1152 2285 1151 2285 1200 2286 1044 2286 1151 2286 1152 2287 144 2287 1151 2287 1151 2288 144 2288 1153 2288 1043 2289 1151 2289 1153 2289 144 2290 142 2290 1153 2290 1153 2291 142 2291 1154 2291 1042 2292 1153 2292 1154 2292 142 2293 139 2293 1154 2293 1154 2294 139 2294 1041 2294 1189 2295 1154 2295 1041 2295 139 2296 136 2296 1041 2296 1041 2297 136 2297 1040 2297 1187 2298 1041 2298 1040 2298 136 2299 1155 2299 1040 2299 1040 2300 1155 2300 1156 2300 1039 2301 1040 2301 1156 2301 1155 2302 135 2302 1156 2302 1156 2303 135 2303 1157 2303 1038 2304 1156 2304 1157 2304 135 2305 134 2305 1157 2305 1157 2306 134 2306 1159 2306 1186 2307 1157 2307 1159 2307 134 2308 131 2308 1159 2308 1159 2309 131 2309 1158 2309 1037 2310 1159 2310 1158 2310 131 2311 1160 2311 1158 2311 1158 2312 1160 2312 1036 2312 1197 2313 1158 2313 1036 2313 1160 2314 1161 2314 1036 2314 1036 2315 1161 2315 1035 2315 1195 2316 1036 2316 1035 2316 1161 2317 130 2317 1035 2317 1035 2318 130 2318 1162 2318 1194 2319 1035 2319 1162 2319 130 2320 1163 2320 1162 2320 1162 2321 1163 2321 1164 2321 1193 2322 1162 2322 1164 2322 1163 2323 1166 2323 1164 2323 1164 2324 1166 2324 1165 2324 1184 2325 1164 2325 1165 2325 1166 2326 1167 2326 1165 2326 1165 2327 1167 2327 1034 2327 1168 2328 1165 2328 1034 2328 1168 2329 1183 2329 1184 2329 1168 2330 1169 2330 1183 2330 1168 2331 1033 2331 1169 2331 1169 2332 1033 2332 1056 2332 1056 2333 1033 2333 1031 2333 1174 2334 1031 2334 1175 2334 1113 2335 1175 2335 1170 2335 1110 2336 1170 2336 1171 2336 1108 2337 1171 2337 1176 2337 1058 2338 1176 2338 1074 2338 1177 2339 1074 2339 1172 2339 1060 2340 1172 2340 1178 2340 1173 2341 1178 2341 1179 2341 1061 2342 1179 2342 1180 2342 1181 2343 1180 2343 1072 2343 1062 2344 1072 2344 1182 2344 1063 2345 1182 2345 1070 2345 1065 2346 1070 2346 1094 2346 1067 2347 1094 2347 1068 2347 1067 2348 1065 2348 1094 2348 1056 2349 1031 2349 1174 2349 1174 2350 1175 2350 1113 2350 1113 2351 1170 2351 1110 2351 1110 2352 1171 2352 1108 2352 1108 2353 1176 2353 1058 2353 1058 2354 1074 2354 1177 2354 1177 2355 1172 2355 1060 2355 1060 2356 1178 2356 1173 2356 1173 2357 1179 2357 1061 2357 1061 2358 1180 2358 1181 2358 1181 2359 1072 2359 1062 2359 1062 2360 1182 2360 1063 2360 1063 2361 1070 2361 1065 2361 1183 2362 1054 2362 1184 2362 1184 2363 1054 2363 1193 2363 1193 2364 1054 2364 1185 2364 1194 2365 1185 2365 1121 2365 1195 2366 1121 2366 1196 2366 1197 2367 1196 2367 1125 2367 1037 2368 1125 2368 1127 2368 1186 2369 1127 2369 1198 2369 1038 2370 1198 2370 1199 2370 1039 2371 1199 2371 1131 2371 1187 2372 1131 2372 1188 2372 1189 2373 1188 2373 1050 2373 1042 2374 1050 2374 1134 2374 1043 2375 1134 2375 1137 2375 1200 2376 1137 2376 1201 2376 1202 2377 1201 2377 1049 2377 1190 2378 1049 2378 1144 2378 1191 2379 1144 2379 1192 2379 1046 2380 1191 2380 1192 2380 1193 2381 1185 2381 1194 2381 1194 2382 1121 2382 1195 2382 1195 2383 1196 2383 1197 2383 1197 2384 1125 2384 1037 2384 1037 2385 1127 2385 1186 2385 1186 2386 1198 2386 1038 2386 1038 2387 1199 2387 1039 2387 1039 2388 1131 2388 1187 2388 1187 2389 1188 2389 1189 2389 1189 2390 1050 2390 1042 2390 1042 2391 1134 2391 1043 2391 1043 2392 1137 2392 1200 2392 1200 2393 1201 2393 1202 2393 1202 2394 1049 2394 1190 2394 1190 2395 1144 2395 1191 2395 0 2396 1203 2396 1204 2396 1204 2397 1203 2397 450 2397 450 2398 1203 2398 214 2398 1253 2399 214 2399 1205 2399 437 2400 1205 2400 319 2400 1206 2401 319 2401 1207 2401 420 2402 1207 2402 211 2402 1254 2403 211 2403 1208 2403 1255 2404 1208 2404 1256 2404 415 2405 1256 2405 213 2405 1257 2406 213 2406 1209 2406 1355 2407 1257 2407 1209 2407 1355 2408 1210 2408 1257 2408 1257 2409 1210 2409 1211 2409 1212 2410 1211 2410 1213 2410 1214 2411 1212 2411 1213 2411 1214 2412 1215 2412 1212 2412 1212 2413 1215 2413 1216 2413 1218 2414 1216 2414 1372 2414 1217 2415 1218 2415 1372 2415 1217 2416 1219 2416 1218 2416 1218 2417 1219 2417 1221 2417 1220 2418 1218 2418 1221 2418 1220 2419 1370 2419 1218 2419 1218 2420 1370 2420 1258 2420 1259 2421 1258 2421 5437 2421 1222 2422 1259 2422 5437 2422 1222 2423 5662 2423 1259 2423 1259 2424 5662 2424 380 2424 380 2425 5662 2425 1260 2425 381 2426 1260 2426 5660 2426 1261 2427 5660 2427 1262 2427 1223 2428 1262 2428 1224 2428 1263 2429 1224 2429 5659 2429 366 2430 5659 2430 5656 2430 361 2431 5656 2431 1225 2431 356 2432 1225 2432 1264 2432 335 2433 1264 2433 5655 2433 1265 2434 5655 2434 1226 2434 709 2435 1226 2435 1227 2435 1228 2436 1227 2436 1266 2436 714 2437 1266 2437 5654 2437 1229 2438 5654 2438 1267 2438 1268 2439 1267 2439 1269 2439 719 2440 1269 2440 1270 2440 1271 2441 1270 2441 1272 2441 1230 2442 1272 2442 5653 2442 1273 2443 5653 2443 5652 2443 328 2444 5652 2444 5651 2444 1231 2445 5651 2445 5650 2445 5649 2446 1231 2446 5650 2446 5649 2447 649 2447 1231 2447 5649 2448 1232 2448 649 2448 649 2449 1232 2449 643 2449 643 2450 1232 2450 1233 2450 638 2451 1233 2451 5648 2451 1274 2452 5648 2452 1234 2452 622 2453 1234 2453 5647 2453 723 2454 5647 2454 1235 2454 724 2455 1235 2455 5646 2455 1275 2456 5646 2456 1236 2456 1276 2457 1236 2457 1277 2457 1237 2458 1277 2458 1238 2458 597 2459 1238 2459 1239 2459 1278 2460 1239 2460 1279 2460 1240 2461 1279 2461 1280 2461 575 2462 1280 2462 5666 2462 732 2463 5666 2463 5663 2463 733 2464 5663 2464 5665 2464 567 2465 5665 2465 1281 2465 1241 2466 1281 2466 1242 2466 552 2467 1242 2467 1243 2467 544 2468 1243 2468 1244 2468 1282 2469 1244 2469 5669 2469 1964 2470 1282 2470 5669 2470 1964 2471 1245 2471 1282 2471 1282 2472 1245 2472 1247 2472 1247 2473 1245 2473 1246 2473 1975 2474 1247 2474 1246 2474 1975 2475 1981 2475 1247 2475 1247 2476 1981 2476 1987 2476 1988 2477 1247 2477 1987 2477 1988 2478 1986 2478 1247 2478 1247 2479 1986 2479 1283 2479 530 2480 1283 2480 1985 2480 1984 2481 530 2481 1985 2481 1984 2482 1992 2482 530 2482 530 2483 1992 2483 1284 2483 531 2484 1284 2484 1996 2484 1997 2485 531 2485 1996 2485 1997 2486 1248 2486 531 2486 531 2487 1248 2487 313 2487 1285 2488 313 2488 314 2488 1286 2489 314 2489 1249 2489 515 2490 1249 2490 312 2490 1287 2491 312 2491 315 2491 1288 2492 315 2492 1289 2492 743 2493 1289 2493 1251 2493 1250 2494 1251 2494 318 2494 1252 2495 318 2495 316 2495 684 2496 316 2496 55 2496 684 2497 1252 2497 316 2497 450 2498 214 2498 1253 2498 1253 2499 1205 2499 437 2499 437 2500 319 2500 1206 2500 1206 2501 1207 2501 420 2501 420 2502 211 2502 1254 2502 1254 2503 1208 2503 1255 2503 1255 2504 1256 2504 415 2504 415 2505 213 2505 1257 2505 1257 2506 1211 2506 1212 2506 1212 2507 1216 2507 1218 2507 1218 2508 1258 2508 1259 2508 380 2509 1260 2509 381 2509 381 2510 5660 2510 1261 2510 1261 2511 1262 2511 1223 2511 1223 2512 1224 2512 1263 2512 1263 2513 5659 2513 366 2513 366 2514 5656 2514 361 2514 361 2515 1225 2515 356 2515 356 2516 1264 2516 335 2516 335 2517 5655 2517 1265 2517 1265 2518 1226 2518 709 2518 709 2519 1227 2519 1228 2519 1228 2520 1266 2520 714 2520 714 2521 5654 2521 1229 2521 1229 2522 1267 2522 1268 2522 1268 2523 1269 2523 719 2523 719 2524 1270 2524 1271 2524 1271 2525 1272 2525 1230 2525 1230 2526 5653 2526 1273 2526 1273 2527 5652 2527 328 2527 328 2528 5651 2528 1231 2528 643 2529 1233 2529 638 2529 638 2530 5648 2530 1274 2530 1274 2531 1234 2531 622 2531 622 2532 5647 2532 723 2532 723 2533 1235 2533 724 2533 724 2534 5646 2534 1275 2534 1275 2535 1236 2535 1276 2535 1276 2536 1277 2536 1237 2536 1237 2537 1238 2537 597 2537 597 2538 1239 2538 1278 2538 1278 2539 1279 2539 1240 2539 1240 2540 1280 2540 575 2540 575 2541 5666 2541 732 2541 732 2542 5663 2542 733 2542 733 2543 5665 2543 567 2543 567 2544 1281 2544 1241 2544 1241 2545 1242 2545 552 2545 552 2546 1243 2546 544 2546 544 2547 1244 2547 1282 2547 1247 2548 1283 2548 530 2548 530 2549 1284 2549 531 2549 531 2550 313 2550 1285 2550 1285 2551 314 2551 1286 2551 1286 2552 1249 2552 515 2552 515 2553 312 2553 1287 2553 1287 2554 315 2554 1288 2554 1288 2555 1289 2555 743 2555 743 2556 1251 2556 1250 2556 1250 2557 318 2557 1252 2557 5363 2558 1352 2558 5390 2558 5363 2559 1290 2559 1352 2559 5363 2560 5362 2560 1290 2560 1290 2561 5362 2561 332 2561 332 2562 5362 2562 1292 2562 345 2563 1292 2563 1291 2563 345 2564 332 2564 1292 2564 1292 2565 1293 2565 1291 2565 1291 2566 1293 2566 357 2566 357 2567 1293 2567 5391 2567 368 2568 5391 2568 5365 2568 1294 2569 5365 2569 708 2569 1294 2570 368 2570 5365 2570 357 2571 5391 2571 368 2571 5365 2572 1296 2572 708 2572 708 2573 1296 2573 1295 2573 1295 2574 1296 2574 1297 2574 376 2575 1297 2575 1298 2575 396 2576 1298 2576 1299 2576 396 2577 376 2577 1298 2577 1295 2578 1297 2578 376 2578 1298 2579 1300 2579 1299 2579 1299 2580 1300 2580 405 2580 405 2581 1300 2581 1301 2581 1303 2582 1301 2582 1304 2582 417 2583 1304 2583 1302 2583 417 2584 1303 2584 1304 2584 405 2585 1301 2585 1303 2585 1304 2586 1305 2586 1302 2586 1302 2587 1305 2587 429 2587 429 2588 1305 2588 5393 2588 1308 2589 5393 2589 1307 2589 1306 2590 1307 2590 438 2590 1306 2591 1308 2591 1307 2591 429 2592 5393 2592 1308 2592 1307 2593 1309 2593 438 2593 438 2594 1309 2594 445 2594 445 2595 1309 2595 1311 2595 457 2596 1311 2596 1310 2596 874 2597 1310 2597 464 2597 874 2598 457 2598 1310 2598 445 2599 1311 2599 457 2599 1310 2600 5395 2600 464 2600 464 2601 5395 2601 465 2601 465 2602 5395 2602 1315 2602 1316 2603 1315 2603 1312 2603 1313 2604 1312 2604 1314 2604 1313 2605 1316 2605 1312 2605 465 2606 1315 2606 1316 2606 1312 2607 5371 2607 1314 2607 1314 2608 5371 2608 486 2608 486 2609 5371 2609 5396 2609 487 2610 5396 2610 1318 2610 1317 2611 1318 2611 490 2611 1317 2612 487 2612 1318 2612 486 2613 5396 2613 487 2613 1318 2614 1319 2614 490 2614 490 2615 1319 2615 495 2615 495 2616 1319 2616 5373 2616 496 2617 5373 2617 5399 2617 499 2618 5399 2618 5400 2618 1320 2619 5400 2619 1321 2619 1320 2620 499 2620 5400 2620 495 2621 5373 2621 496 2621 496 2622 5399 2622 499 2622 5400 2623 5401 2623 1321 2623 1321 2624 5401 2624 1322 2624 1322 2625 5401 2625 1324 2625 1323 2626 1324 2626 1325 2626 523 2627 1325 2627 536 2627 523 2628 1323 2628 1325 2628 1322 2629 1324 2629 1323 2629 1325 2630 1326 2630 536 2630 536 2631 1326 2631 1327 2631 1327 2632 1326 2632 1328 2632 1330 2633 1328 2633 1331 2633 557 2634 1331 2634 1329 2634 557 2635 1330 2635 1331 2635 1327 2636 1328 2636 1330 2636 1331 2637 5378 2637 1329 2637 1329 2638 5378 2638 736 2638 736 2639 5378 2639 5404 2639 1332 2640 5404 2640 5405 2640 569 2641 5405 2641 581 2641 569 2642 1332 2642 5405 2642 736 2643 5404 2643 1332 2643 5405 2644 5406 2644 581 2644 581 2645 5406 2645 1333 2645 1333 2646 5406 2646 5382 2646 591 2647 5382 2647 5383 2647 1334 2648 5383 2648 731 2648 1334 2649 591 2649 5383 2649 1333 2650 5382 2650 591 2650 5383 2651 5384 2651 731 2651 731 2652 5384 2652 613 2652 613 2653 5384 2653 5385 2653 1337 2654 5385 2654 1335 2654 1336 2655 1335 2655 1338 2655 1336 2656 1337 2656 1335 2656 613 2657 5385 2657 1337 2657 1335 2658 5386 2658 1338 2658 1338 2659 5386 2659 632 2659 632 2660 5386 2660 1342 2660 1339 2661 1342 2661 1340 2661 1341 2662 1340 2662 657 2662 1341 2663 1339 2663 1340 2663 632 2664 1342 2664 1339 2664 1340 2665 1343 2665 657 2665 657 2666 1343 2666 658 2666 658 2667 1343 2667 1345 2667 1344 2668 1345 2668 5388 2668 1346 2669 5388 2669 1347 2669 1346 2670 1344 2670 5388 2670 658 2671 1345 2671 1344 2671 5388 2672 1349 2672 1347 2672 1347 2673 1349 2673 1348 2673 1348 2674 1349 2674 1351 2674 1350 2675 1351 2675 5390 2675 320 2676 5390 2676 1352 2676 320 2677 1350 2677 5390 2677 1348 2678 1351 2678 1350 2678 2045 2679 5437 2679 1369 2679 2043 2680 1369 2680 2044 2680 2041 2681 2044 2681 1353 2681 2037 2682 1353 2682 2038 2682 2034 2683 2038 2683 1373 2683 2035 2684 1373 2684 1354 2684 2029 2685 1354 2685 1371 2685 2027 2686 1371 2686 1368 2686 1367 2687 1368 2687 1355 2687 1374 2688 1355 2688 1209 2688 213 2689 1374 2689 1209 2689 213 2690 212 2690 1374 2690 1374 2691 212 2691 1356 2691 1367 2692 1356 2692 2049 2692 2051 2693 2049 2693 2052 2693 2050 2694 2052 2694 1357 2694 1365 2695 1357 2695 2063 2695 2060 2696 2063 2696 1377 2696 2076 2697 1377 2697 2074 2697 1362 2698 2074 2698 2077 2698 1360 2699 2077 2699 1358 2699 5443 2700 1358 2700 5444 2700 5443 2701 1360 2701 1358 2701 5443 2702 1359 2702 1360 2702 1360 2703 1359 2703 1361 2703 1362 2704 1361 2704 1363 2704 2076 2705 1363 2705 1364 2705 2060 2706 1364 2706 1366 2706 1365 2707 1366 2707 2055 2707 2050 2708 2055 2708 2026 2708 2051 2709 2026 2709 2027 2709 1367 2710 2027 2710 1368 2710 1367 2711 2051 2711 2027 2711 1367 2712 2049 2712 2051 2712 5437 2713 1258 2713 1369 2713 1369 2714 1258 2714 1370 2714 2044 2715 1370 2715 1220 2715 1353 2716 1220 2716 1221 2716 1219 2717 1353 2717 1221 2717 1219 2718 2038 2718 1353 2718 1219 2719 1217 2719 2038 2719 2038 2720 1217 2720 1372 2720 1373 2721 1372 2721 1216 2721 1215 2722 1373 2722 1216 2722 1215 2723 1354 2723 1373 2723 1215 2724 1214 2724 1354 2724 1354 2725 1214 2725 1371 2725 1371 2726 1214 2726 1213 2726 1211 2727 1371 2727 1213 2727 1211 2728 1368 2728 1371 2728 1211 2729 1210 2729 1368 2729 1368 2730 1210 2730 1355 2730 1369 2731 1370 2731 2044 2731 2044 2732 1220 2732 1353 2732 2038 2733 1372 2733 1373 2733 1367 2734 1355 2734 1374 2734 1356 2735 1367 2735 1374 2735 212 2736 1375 2736 1356 2736 1356 2737 1375 2737 1379 2737 2049 2738 1379 2738 1376 2738 2052 2739 1376 2739 2056 2739 1357 2740 2056 2740 2061 2740 2063 2741 2061 2741 2064 2741 1377 2742 2064 2742 2075 2742 2074 2743 2075 2743 1382 2743 2077 2744 1382 2744 1384 2744 1358 2745 1384 2745 1378 2745 5444 2746 1378 2746 5446 2746 5444 2747 1358 2747 1378 2747 1375 2748 215 2748 1379 2748 1379 2749 215 2749 1386 2749 1376 2750 1386 2750 2054 2750 2056 2751 2054 2751 1380 2751 2061 2752 1380 2752 1381 2752 2064 2753 1381 2753 2071 2753 2075 2754 2071 2754 1383 2754 1382 2755 1383 2755 2081 2755 1384 2756 2081 2756 2089 2756 1378 2757 2089 2757 1385 2757 5446 2758 1385 2758 5448 2758 5446 2759 1378 2759 1385 2759 215 2760 216 2760 1386 2760 1386 2761 216 2761 2059 2761 2054 2762 2059 2762 1391 2762 1380 2763 1391 2763 1392 2763 1381 2764 1392 2764 2072 2764 2071 2765 2072 2765 2073 2765 1383 2766 2073 2766 2080 2766 2081 2767 2080 2767 1387 2767 2089 2768 1387 2768 1388 2768 1385 2769 1388 2769 1389 2769 5448 2770 1389 2770 1394 2770 5448 2771 1385 2771 1389 2771 216 2772 1390 2772 2059 2772 2059 2773 1390 2773 1395 2773 1391 2774 1395 2774 1396 2774 1392 2775 1396 2775 2067 2775 2072 2776 2067 2776 1397 2776 2073 2777 1397 2777 1393 2777 2080 2778 1393 2778 2082 2778 1387 2779 2082 2779 2088 2779 1388 2780 2088 2780 2093 2780 1389 2781 2093 2781 1403 2781 1394 2782 1403 2782 1401 2782 1394 2783 1389 2783 1403 2783 1390 2784 2025 2784 1395 2784 1395 2785 2025 2785 2066 2785 1396 2786 2066 2786 2069 2786 2067 2787 2069 2787 2068 2787 1397 2788 2068 2788 2079 2788 1393 2789 2079 2789 2087 2789 2082 2790 2087 2790 1398 2790 2088 2791 1398 2791 1399 2791 2093 2792 1399 2792 1400 2792 1403 2793 1400 2793 1404 2793 1401 2794 1404 2794 1402 2794 1401 2795 1403 2795 1404 2795 2066 2796 2025 2796 1405 2796 2069 2797 1405 2797 2070 2797 2068 2798 2070 2798 2024 2798 2079 2799 2024 2799 2085 2799 2087 2800 2085 2800 2022 2800 1398 2801 2022 2801 1406 2801 1399 2802 1406 2802 1407 2802 1400 2803 1407 2803 1408 2803 1404 2804 1408 2804 1409 2804 1402 2805 1409 2805 1410 2805 1402 2806 1404 2806 1409 2806 1412 2807 1411 2807 218 2807 1412 2808 1413 2808 1411 2808 1412 2809 223 2809 1413 2809 1413 2810 223 2810 1414 2810 2083 2811 1414 2811 2084 2811 2090 2812 2084 2812 2091 2812 1415 2813 2091 2813 1416 2813 2096 2814 1416 2814 2098 2814 1417 2815 2098 2815 2102 2815 1424 2816 2102 2816 1418 2816 2101 2817 1418 2817 1420 2817 1419 2818 1420 2818 1421 2818 5449 2819 1421 2819 5466 2819 5449 2820 1419 2820 1421 2820 5449 2821 5463 2821 1419 2821 1419 2822 5463 2822 1422 2822 2101 2823 1422 2823 1423 2823 1424 2824 1423 2824 1425 2824 1417 2825 1425 2825 2095 2825 2096 2826 2095 2826 2021 2826 1415 2827 2021 2827 1426 2827 2090 2828 1426 2828 2086 2828 2083 2829 2086 2829 2023 2829 1413 2830 2023 2830 1411 2830 1413 2831 2083 2831 2023 2831 1413 2832 1414 2832 2083 2832 223 2833 1427 2833 1414 2833 1414 2834 1427 2834 1433 2834 2084 2835 1433 2835 1428 2835 2091 2836 1428 2836 2092 2836 1416 2837 2092 2837 2094 2837 2098 2838 2094 2838 1429 2838 2102 2839 1429 2839 1430 2839 1418 2840 1430 2840 2110 2840 1420 2841 2110 2841 1438 2841 1421 2842 1438 2842 1440 2842 5466 2843 1440 2843 1431 2843 5466 2844 1421 2844 1440 2844 1427 2845 1432 2845 1433 2845 1433 2846 1432 2846 1434 2846 1428 2847 1434 2847 1435 2847 2092 2848 1435 2848 1443 2848 2094 2849 1443 2849 1444 2849 1429 2850 1444 2850 1436 2850 1430 2851 1436 2851 1445 2851 2110 2852 1445 2852 1437 2852 1438 2853 1437 2853 2117 2853 1440 2854 2117 2854 1441 2854 1431 2855 1441 2855 1439 2855 1431 2856 1440 2856 1441 2856 1432 2857 224 2857 1434 2857 1434 2858 224 2858 1447 2858 1435 2859 1447 2859 1442 2859 1443 2860 1442 2860 1449 2860 1444 2861 1449 2861 2105 2861 1436 2862 2105 2862 2106 2862 1445 2863 2106 2863 1446 2863 1437 2864 1446 2864 2116 2864 2117 2865 2116 2865 2122 2865 1441 2866 2122 2866 1450 2866 1439 2867 1450 2867 5450 2867 1439 2868 1441 2868 1450 2868 224 2869 225 2869 1447 2869 1447 2870 225 2870 1448 2870 1442 2871 1448 2871 2100 2871 1449 2872 2100 2872 1453 2872 2105 2873 1453 2873 2104 2873 2106 2874 2104 2874 2109 2874 1446 2875 2109 2875 2115 2875 2116 2876 2115 2876 2121 2876 2122 2877 2121 2877 2125 2877 1450 2878 2125 2878 2128 2878 5450 2879 2128 2879 1451 2879 5450 2880 1450 2880 2128 2880 225 2881 1452 2881 1448 2881 1448 2882 1452 2882 2097 2882 2100 2883 2097 2883 2099 2883 1453 2884 2099 2884 2103 2884 2104 2885 2103 2885 2114 2885 2109 2886 2114 2886 2120 2886 2115 2887 2120 2887 2119 2887 2121 2888 2119 2888 1454 2888 2125 2889 1454 2889 1455 2889 2128 2890 1455 2890 1458 2890 1451 2891 1458 2891 5451 2891 1451 2892 2128 2892 1458 2892 1452 2893 226 2893 2097 2893 2097 2894 226 2894 1460 2894 2099 2895 1460 2895 2108 2895 2103 2896 2108 2896 2113 2896 2114 2897 2113 2897 2112 2897 2120 2898 2112 2898 1456 2898 2119 2899 1456 2899 1457 2899 1454 2900 1457 2900 2127 2900 1455 2901 2127 2901 2132 2901 1458 2902 2132 2902 1465 2902 5451 2903 1465 2903 1466 2903 5451 2904 1458 2904 1465 2904 226 2905 1459 2905 1460 2905 1460 2906 1459 2906 1467 2906 2108 2907 1467 2907 1461 2907 2113 2908 1461 2908 1462 2908 2112 2909 1462 2909 1463 2909 1456 2910 1463 2910 2124 2910 1457 2911 2124 2911 1471 2911 2127 2912 1471 2912 2136 2912 2132 2913 2136 2913 1464 2913 1465 2914 1464 2914 2140 2914 1466 2915 2140 2915 5453 2915 1466 2916 1465 2916 2140 2916 1459 2917 1468 2917 1467 2917 1467 2918 1468 2918 2107 2918 1461 2919 2107 2919 2111 2919 1462 2920 2111 2920 1469 2920 1463 2921 1469 2921 1470 2921 2124 2922 1470 2922 2126 2922 1471 2923 2126 2923 2131 2923 2136 2924 2131 2924 1474 2924 1464 2925 1474 2925 1475 2925 2140 2926 1475 2926 2144 2926 5453 2927 2144 2927 1478 2927 5453 2928 2140 2928 2144 2928 1468 2929 227 2929 2107 2929 2107 2930 227 2930 1480 2930 2111 2931 1480 2931 1472 2931 1469 2932 1472 2932 1473 2932 1470 2933 1473 2933 1481 2933 2126 2934 1481 2934 2135 2934 2131 2935 2135 2935 2134 2935 1474 2936 2134 2936 1476 2936 1475 2937 1476 2937 1477 2937 2144 2938 1477 2938 1479 2938 1478 2939 1479 2939 5479 2939 1478 2940 2144 2940 1479 2940 227 2941 228 2941 1480 2941 1480 2942 228 2942 2118 2942 1472 2943 2118 2943 2123 2943 1473 2944 2123 2944 2130 2944 1481 2945 2130 2945 2129 2945 2135 2946 2129 2946 1482 2946 2134 2947 1482 2947 2139 2947 1476 2948 2139 2948 2143 2948 1477 2949 2143 2949 2156 2949 1479 2950 2156 2950 1483 2950 5479 2951 1483 2951 1489 2951 5479 2952 1479 2952 1483 2952 228 2953 229 2953 2118 2953 2118 2954 229 2954 1484 2954 2123 2955 1484 2955 1485 2955 2130 2956 1485 2956 1486 2956 2129 2957 1486 2957 2138 2957 1482 2958 2138 2958 1492 2958 2139 2959 1492 2959 2149 2959 2143 2960 2149 2960 1487 2960 2156 2961 1487 2961 2159 2961 1483 2962 2159 2962 1488 2962 1489 2963 1488 2963 5469 2963 1489 2964 1483 2964 1488 2964 229 2965 1494 2965 1484 2965 1484 2966 1494 2966 1490 2966 1485 2967 1490 2967 1495 2967 1486 2968 1495 2968 1491 2968 2138 2969 1491 2969 2142 2969 1492 2970 2142 2970 2147 2970 2149 2971 2147 2971 2148 2971 1487 2972 2148 2972 1493 2972 2159 2973 1493 2973 2165 2973 1488 2974 2165 2974 2164 2974 5469 2975 2164 2975 5470 2975 5469 2976 1488 2976 2164 2976 1494 2977 230 2977 1490 2977 1490 2978 230 2978 1500 2978 1495 2979 1500 2979 1496 2979 1491 2980 1496 2980 1497 2980 2142 2981 1497 2981 1498 2981 2147 2982 1498 2982 2155 2982 2148 2983 2155 2983 2154 2983 1493 2984 2154 2984 2163 2984 2165 2985 2163 2985 1499 2985 2164 2986 1499 2986 1504 2986 5470 2987 1504 2987 1503 2987 5470 2988 2164 2988 1504 2988 230 2989 231 2989 1500 2989 1500 2990 231 2990 2133 2990 1496 2991 2133 2991 1501 2991 1497 2992 1501 2992 2141 2992 1498 2993 2141 2993 1506 2993 2155 2994 1506 2994 2158 2994 2154 2995 2158 2995 1507 2995 2163 2996 1507 2996 1502 2996 1499 2997 1502 2997 2168 2997 1504 2998 2168 2998 2177 2998 1503 2999 2177 2999 5482 2999 1503 3000 1504 3000 2177 3000 231 3001 235 3001 2133 3001 2133 3002 235 3002 2137 3002 1501 3003 2137 3003 1505 3003 2141 3004 1505 3004 2146 3004 1506 3005 2146 3005 2153 3005 2158 3006 2153 3006 2157 3006 1507 3007 2157 3007 1512 3007 1502 3008 1512 3008 1514 3008 2168 3009 1514 3009 1508 3009 2177 3010 1508 3010 1509 3010 5482 3011 1509 3011 1516 3011 5482 3012 2177 3012 1509 3012 235 3013 232 3013 2137 3013 2137 3014 232 3014 1510 3014 1505 3015 1510 3015 2145 3015 2146 3016 2145 3016 2152 3016 2153 3017 2152 3017 1511 3017 2157 3018 1511 3018 1513 3018 1512 3019 1513 3019 2167 3019 1514 3020 2167 3020 2175 3020 1508 3021 2175 3021 2176 3021 1509 3022 2176 3022 1517 3022 1516 3023 1517 3023 1515 3023 1516 3024 1509 3024 1517 3024 232 3025 1518 3025 1510 3025 1510 3026 1518 3026 2150 3026 2145 3027 2150 3027 2151 3027 2152 3028 2151 3028 1519 3028 1511 3029 1519 3029 2162 3029 1513 3030 2162 3030 1520 3030 2167 3031 1520 3031 1521 3031 2175 3032 1521 3032 2174 3032 2176 3033 2174 3033 1522 3033 1517 3034 1522 3034 2182 3034 1515 3035 2182 3035 1526 3035 1515 3036 1517 3036 2182 3036 1518 3037 238 3037 2150 3037 2150 3038 238 3038 1528 3038 2151 3039 1528 3039 1523 3039 1519 3040 1523 3040 2161 3040 2162 3041 2161 3041 1524 3041 1520 3042 1524 3042 1530 3042 1521 3043 1530 3043 2173 3043 2174 3044 2173 3044 1533 3044 1522 3045 1533 3045 1525 3045 2182 3046 1525 3046 1527 3046 1526 3047 1527 3047 5471 3047 1526 3048 2182 3048 1527 3048 238 3049 1534 3049 1528 3049 1528 3050 1534 3050 1535 3050 1523 3051 1535 3051 2166 3051 2161 3052 2166 3052 1536 3052 1524 3053 1536 3053 1529 3053 1530 3054 1529 3054 1531 3054 2173 3055 1531 3055 1532 3055 1533 3056 1532 3056 2184 3056 1525 3057 2184 3057 2185 3057 1527 3058 2185 3058 1538 3058 5471 3059 1538 3059 5472 3059 5471 3060 1527 3060 1538 3060 1534 3061 239 3061 1535 3061 1535 3062 239 3062 2160 3062 2166 3063 2160 3063 2170 3063 1536 3064 2170 3064 2169 3064 1529 3065 2169 3065 2178 3065 1531 3066 2178 3066 2180 3066 1532 3067 2180 3067 2181 3067 2184 3068 2181 3068 1537 3068 2185 3069 1537 3069 2189 3069 1538 3070 2189 3070 2191 3070 5472 3071 2191 3071 5486 3071 5472 3072 1538 3072 2191 3072 239 3073 236 3073 2160 3073 2160 3074 236 3074 1539 3074 2170 3075 1539 3075 2172 3075 2169 3076 2172 3076 2171 3076 2178 3077 2171 3077 1540 3077 2180 3078 1540 3078 2179 3078 2181 3079 2179 3079 2188 3079 1537 3080 2188 3080 1541 3080 2189 3081 1541 3081 1542 3081 2191 3082 1542 3082 1543 3082 5486 3083 1543 3083 5474 3083 5486 3084 2191 3084 1543 3084 1539 3085 236 3085 2020 3085 2172 3086 2020 3086 1544 3086 2171 3087 1544 3087 1545 3087 1540 3088 1545 3088 1546 3088 2179 3089 1546 3089 2017 3089 2188 3090 2017 3090 1547 3090 1541 3091 1547 3091 1548 3091 1542 3092 1548 3092 1549 3092 1543 3093 1549 3093 1550 3093 5474 3094 1550 3094 5487 3094 5474 3095 1543 3095 1550 3095 242 3096 2018 3096 2019 3096 242 3097 1552 3097 2018 3097 242 3098 1551 3098 1552 3098 1552 3099 1551 3099 1553 3099 1566 3100 1553 3100 2186 3100 2183 3101 2186 3101 1554 3101 1555 3102 1554 3102 1557 3102 1556 3103 1557 3103 2194 3103 2192 3104 2194 3104 2193 3104 2202 3105 2193 3105 2210 3105 2209 3106 2210 3106 1558 3106 1561 3107 1558 3107 1560 3107 1559 3108 1560 3108 5488 3108 1559 3109 1561 3109 1560 3109 1559 3110 5476 3110 1561 3110 1561 3111 5476 3111 2208 3111 2209 3112 2208 3112 2203 3112 2202 3113 2203 3113 1562 3113 2192 3114 1562 3114 2190 3114 1556 3115 2190 3115 2187 3115 1555 3116 2187 3116 1563 3116 2183 3117 1563 3117 1564 3117 1566 3118 1564 3118 1565 3118 1552 3119 1565 3119 2018 3119 1552 3120 1566 3120 1565 3120 1552 3121 1553 3121 1566 3121 1551 3122 1573 3122 1553 3122 1553 3123 1573 3123 1574 3123 2186 3124 1574 3124 1567 3124 1554 3125 1567 3125 1568 3125 1557 3126 1568 3126 1569 3126 2194 3127 1569 3127 1570 3127 2193 3128 1570 3128 2201 3128 2210 3129 2201 3129 1571 3129 1558 3130 1571 3130 1576 3130 1560 3131 1576 3131 1572 3131 5488 3132 1572 3132 1577 3132 5488 3133 1560 3133 1572 3133 1573 3134 244 3134 1574 3134 1574 3135 244 3135 1579 3135 1567 3136 1579 3136 1575 3136 1568 3137 1575 3137 1580 3137 1569 3138 1580 3138 2200 3138 1570 3139 2200 3139 2199 3139 2201 3140 2199 3140 2212 3140 1571 3141 2212 3141 2215 3141 1576 3142 2215 3142 2220 3142 1572 3143 2220 3143 2219 3143 1577 3144 2219 3144 1585 3144 1577 3145 1572 3145 2219 3145 244 3146 1578 3146 1579 3146 1579 3147 1578 3147 1587 3147 1575 3148 1587 3148 1581 3148 1580 3149 1581 3149 1582 3149 2200 3150 1582 3150 2198 3150 2199 3151 2198 3151 2207 3151 2212 3152 2207 3152 1583 3152 2215 3153 1583 3153 1584 3153 2220 3154 1584 3154 2224 3154 2219 3155 2224 3155 1586 3155 1585 3156 1586 3156 1590 3156 1585 3157 2219 3157 1586 3157 1578 3158 1588 3158 1587 3158 1587 3159 1588 3159 1591 3159 1581 3160 1591 3160 2197 3160 1582 3161 2197 3161 2205 3161 2198 3162 2205 3162 2206 3162 2207 3163 2206 3163 2214 3163 1583 3164 2214 3164 1592 3164 1584 3165 1592 3165 2223 3165 2224 3166 2223 3166 2222 3166 1586 3167 2222 3167 1589 3167 1590 3168 1589 3168 5490 3168 1590 3169 1586 3169 1589 3169 1588 3170 245 3170 1591 3170 1591 3171 245 3171 1594 3171 2197 3172 1594 3172 2196 3172 2205 3173 2196 3173 2204 3173 2206 3174 2204 3174 2211 3174 2214 3175 2211 3175 2218 3175 1592 3176 2218 3176 2217 3176 2223 3177 2217 3177 1593 3177 2222 3178 1593 3178 2233 3178 1589 3179 2233 3179 1600 3179 5490 3180 1600 3180 1599 3180 5490 3181 1589 3181 1600 3181 245 3182 1601 3182 1594 3182 1594 3183 1601 3183 2195 3183 2196 3184 2195 3184 1595 3184 2204 3185 1595 3185 1596 3185 2211 3186 1596 3186 1597 3186 2218 3187 1597 3187 1598 3187 2217 3188 1598 3188 2227 3188 1593 3189 2227 3189 1606 3189 2233 3190 1606 3190 2238 3190 1600 3191 2238 3191 2237 3191 1599 3192 2237 3192 5491 3192 1599 3193 1600 3193 2237 3193 1601 3194 251 3194 2195 3194 2195 3195 251 3195 1609 3195 1595 3196 1609 3196 1602 3196 1596 3197 1602 3197 1603 3197 1597 3198 1603 3198 2216 3198 1598 3199 2216 3199 1604 3199 2227 3200 1604 3200 1605 3200 1606 3201 1605 3201 2236 3201 2238 3202 2236 3202 1612 3202 2237 3203 1612 3203 1613 3203 5491 3204 1613 3204 1607 3204 5491 3205 2237 3205 1613 3205 251 3206 1608 3206 1609 3206 1609 3207 1608 3207 2213 3207 1602 3208 2213 3208 1610 3208 1603 3209 1610 3209 2221 3209 2216 3210 2221 3210 1611 3210 1604 3211 1611 3211 1615 3211 1605 3212 1615 3212 2235 3212 2236 3213 2235 3213 1617 3213 1612 3214 1617 3214 2243 3214 1613 3215 2243 3215 1618 3215 1607 3216 1618 3216 5497 3216 1607 3217 1613 3217 1618 3217 1608 3218 248 3218 2213 3218 2213 3219 248 3219 1619 3219 1610 3220 1619 3220 1614 3220 2221 3221 1614 3221 2226 3221 1611 3222 2226 3222 1620 3222 1615 3223 1620 3223 2232 3223 2235 3224 2232 3224 1616 3224 1617 3225 1616 3225 2242 3225 2243 3226 2242 3226 1622 3226 1618 3227 1622 3227 1623 3227 5497 3228 1623 3228 5499 3228 5497 3229 1618 3229 1623 3229 248 3230 250 3230 1619 3230 1619 3231 250 3231 1624 3231 1614 3232 1624 3232 1625 3232 2226 3233 1625 3233 2225 3233 1620 3234 2225 3234 2231 3234 2232 3235 2231 3235 1621 3235 1616 3236 1621 3236 1626 3236 2242 3237 1626 3237 2248 3237 1622 3238 2248 3238 1627 3238 1623 3239 1627 3239 2254 3239 5499 3240 2254 3240 5500 3240 5499 3241 1623 3241 2254 3241 250 3242 249 3242 1624 3242 1624 3243 249 3243 1628 3243 1625 3244 1628 3244 1629 3244 2225 3245 1629 3245 2230 3245 2231 3246 2230 3246 2241 3246 1621 3247 2241 3247 1630 3247 1626 3248 1630 3248 2246 3248 2248 3249 2246 3249 2247 3249 1627 3250 2247 3250 2253 3250 2254 3251 2253 3251 1633 3251 5500 3252 1633 3252 5501 3252 5500 3253 2254 3253 1633 3253 249 3254 253 3254 1628 3254 1628 3255 253 3255 2229 3255 1629 3256 2229 3256 2228 3256 2230 3257 2228 3257 2240 3257 2241 3258 2240 3258 1635 3258 1630 3259 1635 3259 1631 3259 2246 3260 1631 3260 2245 3260 2247 3261 2245 3261 2252 3261 2253 3262 2252 3262 1632 3262 1633 3263 1632 3263 2260 3263 5501 3264 2260 3264 1637 3264 5501 3265 1633 3265 2260 3265 253 3266 1638 3266 2229 3266 2229 3267 1638 3267 2234 3267 2228 3268 2234 3268 1634 3268 2240 3269 1634 3269 1641 3269 1635 3270 1641 3270 1636 3270 1631 3271 1636 3271 2251 3271 2245 3272 2251 3272 1643 3272 2252 3273 1643 3273 1646 3273 1632 3274 1646 3274 1647 3274 2260 3275 1647 3275 2261 3275 1637 3276 2261 3276 5504 3276 1637 3277 2260 3277 2261 3277 1638 3278 256 3278 2234 3278 2234 3279 256 3279 1639 3279 1634 3280 1639 3280 1640 3280 1641 3281 1640 3281 1642 3281 1636 3282 1642 3282 1648 3282 2251 3283 1648 3283 1644 3283 1643 3284 1644 3284 1645 3284 1646 3285 1645 3285 1649 3285 1647 3286 1649 3286 2272 3286 2261 3287 2272 3287 1651 3287 5504 3288 1651 3288 5509 3288 5504 3289 2261 3289 1651 3289 256 3290 257 3290 1639 3290 1639 3291 257 3291 2239 3291 1640 3292 2239 3292 1653 3292 1642 3293 1653 3293 1654 3293 1648 3294 1654 3294 2256 3294 1644 3295 2256 3295 2255 3295 1645 3296 2255 3296 2259 3296 1649 3297 2259 3297 1650 3297 2272 3298 1650 3298 2271 3298 1651 3299 2271 3299 1652 3299 5509 3300 1652 3300 5505 3300 5509 3301 1651 3301 1652 3301 257 3302 255 3302 2239 3302 2239 3303 255 3303 2244 3303 1653 3304 2244 3304 1655 3304 1654 3305 1655 3305 2250 3305 2256 3306 2250 3306 2258 3306 2255 3307 2258 3307 1656 3307 2259 3308 1656 3308 2269 3308 1650 3309 2269 3309 1657 3309 2271 3310 1657 3310 1658 3310 1652 3311 1658 3311 1659 3311 5505 3312 1659 3312 1665 3312 5505 3313 1652 3313 1659 3313 255 3314 258 3314 2244 3314 2244 3315 258 3315 1660 3315 1655 3316 1660 3316 2249 3316 2250 3317 2249 3317 1661 3317 2258 3318 1661 3318 1668 3318 1656 3319 1668 3319 2268 3319 2269 3320 2268 3320 2270 3320 1657 3321 2270 3321 1662 3321 1658 3322 1662 3322 1663 3322 1659 3323 1663 3323 1664 3323 1665 3324 1664 3324 1673 3324 1665 3325 1659 3325 1664 3325 258 3326 262 3326 1660 3326 1660 3327 262 3327 1666 3327 2249 3328 1666 3328 1667 3328 1661 3329 1667 3329 1669 3329 1668 3330 1669 3330 2266 3330 2268 3331 2266 3331 2265 3331 2270 3332 2265 3332 1670 3332 1662 3333 1670 3333 2275 3333 1663 3334 2275 3334 1671 3334 1664 3335 1671 3335 1672 3335 1673 3336 1672 3336 1677 3336 1673 3337 1664 3337 1672 3337 262 3338 1674 3338 1666 3338 1666 3339 1674 3339 2257 3339 1667 3340 2257 3340 1681 3340 1669 3341 1681 3341 2263 3341 2266 3342 2263 3342 2264 3342 2265 3343 2264 3343 1675 3343 1670 3344 1675 3344 2277 3344 2275 3345 2277 3345 1676 3345 1671 3346 1676 3346 1684 3346 1672 3347 1684 3347 1678 3347 1677 3348 1678 3348 1679 3348 1677 3349 1672 3349 1678 3349 1674 3350 1680 3350 2257 3350 2257 3351 1680 3351 2047 3351 1681 3352 2047 3352 2046 3352 2263 3353 2046 3353 2262 3353 2264 3354 2262 3354 2267 3354 1675 3355 2267 3355 1682 3355 2277 3356 1682 3356 2278 3356 1676 3357 2278 3357 1683 3357 1684 3358 1683 3358 2285 3358 1678 3359 2285 3359 1685 3359 1679 3360 1685 3360 5512 3360 1679 3361 1678 3361 1685 3361 1680 3362 1689 3362 2047 3362 2047 3363 1689 3363 1686 3363 2046 3364 1686 3364 1690 3364 2262 3365 1690 3365 2273 3365 2267 3366 2273 3366 1691 3366 1682 3367 1691 3367 2281 3367 2278 3368 2281 3368 2284 3368 1683 3369 2284 3369 2286 3369 2285 3370 2286 3370 1687 3370 1685 3371 1687 3371 1688 3371 5512 3372 1688 3372 5513 3372 5512 3373 1685 3373 1688 3373 1686 3374 1689 3374 2016 3374 1690 3375 2016 3375 2015 3375 2273 3376 2015 3376 2274 3376 1691 3377 2274 3377 2280 3377 2281 3378 2280 3378 2012 3378 2284 3379 2012 3379 2010 3379 2286 3380 2010 3380 2009 3380 1687 3381 2009 3381 2292 3381 1688 3382 2292 3382 2008 3382 5513 3383 2008 3383 2006 3383 5513 3384 1688 3384 2008 3384 1692 3385 2014 3385 264 3385 1692 3386 1702 3386 2014 3386 1692 3387 266 3387 1702 3387 1702 3388 266 3388 1693 3388 1703 3389 1693 3389 1694 3389 2283 3390 1694 3390 2282 3390 1695 3391 2282 3391 1696 3391 2291 3392 1696 3392 2290 3392 2296 3393 2290 3393 1697 3393 2295 3394 1697 3394 2302 3394 2307 3395 2302 3395 2306 3395 2305 3396 2306 3396 1698 3396 1699 3397 1698 3397 1706 3397 1699 3398 2305 3398 1698 3398 1699 3399 5516 3399 2305 3399 2305 3400 5516 3400 2007 3400 2307 3401 2007 3401 2297 3401 2295 3402 2297 3402 1700 3402 2296 3403 1700 3403 1701 3403 2291 3404 1701 3404 2011 3404 1695 3405 2011 3405 2279 3405 2283 3406 2279 3406 2276 3406 1703 3407 2276 3407 2013 3407 1702 3408 2013 3408 2014 3408 1702 3409 1703 3409 2013 3409 1702 3410 1693 3410 1703 3410 266 3411 268 3411 1693 3411 1693 3412 268 3412 1704 3412 1694 3413 1704 3413 1707 3413 2282 3414 1707 3414 1708 3414 1696 3415 1708 3415 2289 3415 2290 3416 2289 3416 1710 3416 1697 3417 1710 3417 2301 3417 2302 3418 2301 3418 1705 3418 2306 3419 1705 3419 2316 3419 1698 3420 2316 3420 2315 3420 1706 3421 2315 3421 5508 3421 1706 3422 1698 3422 2315 3422 268 3423 267 3423 1704 3423 1704 3424 267 3424 1715 3424 1707 3425 1715 3425 1709 3425 1708 3426 1709 3426 2288 3426 2289 3427 2288 3427 2300 3427 1710 3428 2300 3428 1711 3428 2301 3429 1711 3429 1712 3429 1705 3430 1712 3430 2314 3430 2316 3431 2314 3431 1713 3431 2315 3432 1713 3432 1714 3432 5508 3433 1714 3433 5519 3433 5508 3434 2315 3434 1714 3434 267 3435 270 3435 1715 3435 1715 3436 270 3436 1717 3436 1709 3437 1717 3437 2287 3437 2288 3438 2287 3438 2294 3438 2300 3439 2294 3439 2299 3439 1711 3440 2299 3440 2311 3440 1712 3441 2311 3441 2310 3441 2314 3442 2310 3442 2313 3442 1713 3443 2313 3443 1716 3443 1714 3444 1716 3444 1721 3444 5519 3445 1721 3445 5529 3445 5519 3446 1714 3446 1721 3446 270 3447 1718 3447 1717 3447 1717 3448 1718 3448 1722 3448 2287 3449 1722 3449 2293 3449 2294 3450 2293 3450 1719 3450 2299 3451 1719 3451 2304 3451 2311 3452 2304 3452 1726 3452 2310 3453 1726 3453 2312 3453 2313 3454 2312 3454 2323 3454 1716 3455 2323 3455 1720 3455 1721 3456 1720 3456 1729 3456 5529 3457 1729 3457 5531 3457 5529 3458 1721 3458 1729 3458 1718 3459 271 3459 1722 3459 1722 3460 271 3460 1723 3460 2293 3461 1723 3461 1724 3461 1719 3462 1724 3462 1725 3462 2304 3463 1725 3463 1731 3463 1726 3464 1731 3464 1727 3464 2312 3465 1727 3465 1728 3465 2323 3466 1728 3466 2328 3466 1720 3467 2328 3467 2327 3467 1729 3468 2327 3468 1734 3468 5531 3469 1734 3469 1735 3469 5531 3470 1729 3470 1734 3470 271 3471 1730 3471 1723 3471 1723 3472 1730 3472 2298 3472 1724 3473 2298 3473 2303 3473 1725 3474 2303 3474 1732 3474 1731 3475 1732 3475 2320 3475 1727 3476 2320 3476 2321 3476 1728 3477 2321 3477 1733 3477 2328 3478 1733 3478 1739 3478 2327 3479 1739 3479 2337 3479 1734 3480 2337 3480 1736 3480 1735 3481 1736 3481 1742 3481 1735 3482 1734 3482 1736 3482 1730 3483 272 3483 2298 3483 2298 3484 272 3484 2309 3484 2303 3485 2309 3485 2308 3485 1732 3486 2308 3486 2319 3486 2320 3487 2319 3487 1737 3487 2321 3488 1737 3488 1738 3488 1733 3489 1738 3489 1747 3489 1739 3490 1747 3490 2336 3490 2337 3491 2336 3491 2344 3491 1736 3492 2344 3492 1740 3492 1742 3493 1740 3493 1741 3493 1742 3494 1736 3494 1740 3494 272 3495 1743 3495 2309 3495 2309 3496 1743 3496 1744 3496 2308 3497 1744 3497 1745 3497 2319 3498 1745 3498 1750 3498 1737 3499 1750 3499 1746 3499 1738 3500 1746 3500 2335 3500 1747 3501 2335 3501 2334 3501 2336 3502 2334 3502 2340 3502 2344 3503 2340 3503 1748 3503 1740 3504 1748 3504 1749 3504 1741 3505 1749 3505 5535 3505 1741 3506 1740 3506 1749 3506 1743 3507 1754 3507 1744 3507 1744 3508 1754 3508 1755 3508 1745 3509 1755 3509 2318 3509 1750 3510 2318 3510 2325 3510 1746 3511 2325 3511 2326 3511 2335 3512 2326 3512 2333 3512 2334 3513 2333 3513 1751 3513 2340 3514 1751 3514 1752 3514 1748 3515 1752 3515 2347 3515 1749 3516 2347 3516 1753 3516 5535 3517 1753 3517 5524 3517 5535 3518 1749 3518 1753 3518 1754 3519 1756 3519 1755 3519 1755 3520 1756 3520 2317 3520 2318 3521 2317 3521 1759 3521 2325 3522 1759 3522 2332 3522 2326 3523 2332 3523 2331 3523 2333 3524 2331 3524 2343 3524 1751 3525 2343 3525 1757 3525 1752 3526 1757 3526 2351 3526 2347 3527 2351 3527 2352 3527 1753 3528 2352 3528 1758 3528 5524 3529 1758 3529 5525 3529 5524 3530 1753 3530 1758 3530 1756 3531 276 3531 2317 3531 2317 3532 276 3532 2322 3532 1759 3533 2322 3533 2330 3533 2332 3534 2330 3534 2329 3534 2331 3535 2329 3535 2342 3535 2343 3536 2342 3536 1760 3536 1757 3537 1760 3537 1761 3537 2351 3538 1761 3538 2350 3538 2352 3539 2350 3539 2356 3539 1758 3540 2356 3540 1762 3540 5525 3541 1762 3541 1766 3541 5525 3542 1758 3542 1762 3542 276 3543 1767 3543 2322 3543 2322 3544 1767 3544 2324 3544 2330 3545 2324 3545 1768 3545 2329 3546 1768 3546 2339 3546 2342 3547 2339 3547 1763 3547 1760 3548 1763 3548 1771 3548 1761 3549 1771 3549 1773 3549 2350 3550 1773 3550 1764 3550 2356 3551 1764 3551 1775 3551 1762 3552 1775 3552 2366 3552 1766 3553 2366 3553 1765 3553 1766 3554 1762 3554 2366 3554 1767 3555 277 3555 2324 3555 2324 3556 277 3556 1769 3556 1768 3557 1769 3557 2338 3557 2339 3558 2338 3558 1770 3558 1763 3559 1770 3559 1772 3559 1771 3560 1772 3560 2355 3560 1773 3561 2355 3561 1778 3561 1764 3562 1778 3562 1774 3562 1775 3563 1774 3563 2365 3563 2366 3564 2365 3564 1776 3564 1765 3565 1776 3565 1781 3565 1765 3566 2366 3566 1776 3566 277 3567 278 3567 1769 3567 1769 3568 278 3568 2341 3568 2338 3569 2341 3569 2346 3569 1770 3570 2346 3570 1784 3570 1772 3571 1784 3571 2349 3571 2355 3572 2349 3572 1777 3572 1778 3573 1777 3573 2364 3573 1774 3574 2364 3574 1779 3574 2365 3575 1779 3575 2372 3575 1776 3576 2372 3576 1780 3576 1781 3577 1780 3577 1788 3577 1781 3578 1776 3578 1780 3578 278 3579 1782 3579 2341 3579 2341 3580 1782 3580 1783 3580 2346 3581 1783 3581 2345 3581 1784 3582 2345 3582 1785 3582 2349 3583 1785 3583 2354 3583 1777 3584 2354 3584 2360 3584 2364 3585 2360 3585 2363 3585 1779 3586 2363 3586 2369 3586 2372 3587 2369 3587 2373 3587 1780 3588 2373 3588 1786 3588 1788 3589 1786 3589 1787 3589 1788 3590 1780 3590 1786 3590 1782 3591 1789 3591 1783 3591 1783 3592 1789 3592 1793 3592 2345 3593 1793 3593 2348 3593 1785 3594 2348 3594 2359 3594 2354 3595 2359 3595 2362 3595 2360 3596 2362 3596 1790 3596 2363 3597 1790 3597 1797 3597 2369 3598 1797 3598 1798 3598 2373 3599 1798 3599 2380 3599 1786 3600 2380 3600 1791 3600 1787 3601 1791 3601 1792 3601 1787 3602 1786 3602 1791 3602 1789 3603 1794 3603 1793 3603 1793 3604 1794 3604 1800 3604 2348 3605 1800 3605 1795 3605 2359 3606 1795 3606 2358 3606 2362 3607 2358 3607 2368 3607 1790 3608 2368 3608 1796 3608 1797 3609 1796 3609 1803 3609 1798 3610 1803 3610 1799 3610 2380 3611 1799 3611 2379 3611 1791 3612 2379 3612 2383 3612 1792 3613 2383 3613 5528 3613 1792 3614 1791 3614 2383 3614 1794 3615 286 3615 1800 3615 1800 3616 286 3616 2353 3616 1795 3617 2353 3617 2357 3617 2358 3618 2357 3618 1801 3618 2368 3619 1801 3619 1802 3619 1796 3620 1802 3620 1804 3620 1803 3621 1804 3621 2377 3621 1799 3622 2377 3622 1805 3622 2379 3623 1805 3623 1806 3623 2383 3624 1806 3624 2387 3624 5528 3625 2387 3625 5537 3625 5528 3626 2383 3626 2387 3626 286 3627 1807 3627 2353 3627 2353 3628 1807 3628 1813 3628 2357 3629 1813 3629 2361 3629 1801 3630 2361 3630 1808 3630 1802 3631 1808 3631 1809 3631 1804 3632 1809 3632 2376 3632 2377 3633 2376 3633 2378 3633 1805 3634 2378 3634 1810 3634 1806 3635 1810 3635 2386 3635 2387 3636 2386 3636 1811 3636 5537 3637 1811 3637 1812 3637 5537 3638 2387 3638 1811 3638 1807 3639 2005 3639 1813 3639 1813 3640 2005 3640 2367 3640 2361 3641 2367 3641 2371 3641 1808 3642 2371 3642 1815 3642 1809 3643 1815 3643 1817 3643 2376 3644 1817 3644 2374 3644 2378 3645 2374 3645 1818 3645 1810 3646 1818 3646 1819 3646 2386 3647 1819 3647 1821 3647 1811 3648 1821 3648 2392 3648 1812 3649 2392 3649 5539 3649 1812 3650 1811 3650 2392 3650 2367 3651 2005 3651 2004 3651 2371 3652 2004 3652 1814 3652 1815 3653 1814 3653 1816 3653 1817 3654 1816 3654 2375 3654 2374 3655 2375 3655 2382 3655 1818 3656 2382 3656 2389 3656 1819 3657 2389 3657 1820 3657 1821 3658 1820 3658 2001 3658 2392 3659 2001 3659 1822 3659 5539 3660 1822 3660 2000 3660 5539 3661 2392 3661 1822 3661 288 3662 2003 3662 1823 3662 288 3663 1824 3663 2003 3663 288 3664 1825 3664 1824 3664 1824 3665 1825 3665 1826 3665 1841 3666 1826 3666 1827 3666 1828 3667 1827 3667 2385 3667 2384 3668 2385 3668 1830 3668 1829 3669 1830 3669 1831 3669 2394 3670 1831 3670 1832 3670 1833 3671 1832 3671 1843 3671 2400 3672 1843 3672 2406 3672 2407 3673 2406 3673 1836 3673 1835 3674 1836 3674 1834 3674 1835 3675 2407 3675 1836 3675 1835 3676 5548 3676 2407 3676 2407 3677 5548 3677 1837 3677 2400 3678 1837 3678 2401 3678 1833 3679 2401 3679 1838 3679 2394 3680 1838 3680 2002 3680 1829 3681 2002 3681 1839 3681 2384 3682 1839 3682 1840 3682 1828 3683 1840 3683 2381 3683 1841 3684 2381 3684 2370 3684 1824 3685 2370 3685 2003 3685 1824 3686 1841 3686 2370 3686 1824 3687 1826 3687 1841 3687 1825 3688 285 3688 1826 3688 1826 3689 285 3689 1846 3689 1827 3690 1846 3690 1842 3690 2385 3691 1842 3691 2388 3691 1830 3692 2388 3692 1848 3692 1831 3693 1848 3693 1849 3693 1832 3694 1849 3694 2405 3694 1843 3695 2405 3695 1844 3695 2406 3696 1844 3696 2410 3696 1836 3697 2410 3697 1845 3697 1834 3698 1845 3698 5549 3698 1834 3699 1836 3699 1845 3699 285 3700 284 3700 1846 3700 1846 3701 284 3701 1847 3701 1842 3702 1847 3702 1851 3702 2388 3703 1851 3703 2391 3703 1848 3704 2391 3704 2399 3704 1849 3705 2399 3705 2404 3705 2405 3706 2404 3706 2403 3706 1844 3707 2403 3707 2409 3707 2410 3708 2409 3708 1850 3708 1845 3709 1850 3709 1855 3709 5549 3710 1855 3710 5551 3710 5549 3711 1845 3711 1855 3711 284 3712 289 3712 1847 3712 1847 3713 289 3713 1858 3713 1851 3714 1858 3714 2390 3714 2391 3715 2390 3715 1852 3715 2399 3716 1852 3716 2398 3716 2404 3717 2398 3717 2402 3717 2403 3718 2402 3718 2414 3718 2409 3719 2414 3719 1853 3719 1850 3720 1853 3720 1854 3720 1855 3721 1854 3721 1856 3721 5551 3722 1856 3722 1857 3722 5551 3723 1855 3723 1856 3723 289 3724 1862 3724 1858 3724 1858 3725 1862 3725 1859 3725 2390 3726 1859 3726 2393 3726 1852 3727 2393 3727 2397 3727 2398 3728 2397 3728 1864 3728 2402 3729 1864 3729 1860 3729 2414 3730 1860 3730 2413 3730 1853 3731 2413 3731 1866 3731 1854 3732 1866 3732 2421 3732 1856 3733 2421 3733 1861 3733 1857 3734 1861 3734 1868 3734 1857 3735 1856 3735 1861 3735 1862 3736 291 3736 1859 3736 1859 3737 291 3737 2395 3737 2393 3738 2395 3738 2396 3738 2397 3739 2396 3739 1863 3739 1864 3740 1863 3740 2408 3740 1860 3741 2408 3741 2412 3741 2413 3742 2412 3742 1865 3742 1866 3743 1865 3743 2423 3743 2421 3744 2423 3744 1867 3744 1861 3745 1867 3745 1869 3745 1868 3746 1869 3746 5542 3746 1868 3747 1861 3747 1869 3747 291 3748 1870 3748 2395 3748 2395 3749 1870 3749 1871 3749 2396 3750 1871 3750 1872 3750 1863 3751 1872 3751 1873 3751 2408 3752 1873 3752 2417 3752 2412 3753 2417 3753 1878 3753 1865 3754 1878 3754 2420 3754 2423 3755 2420 3755 2422 3755 1867 3756 2422 3756 1874 3756 1869 3757 1874 3757 1880 3757 5542 3758 1880 3758 1881 3758 5542 3759 1869 3759 1880 3759 1870 3760 292 3760 1871 3760 1871 3761 292 3761 1875 3761 1872 3762 1875 3762 1876 3762 1873 3763 1876 3763 1877 3763 2417 3764 1877 3764 1879 3764 1878 3765 1879 3765 1883 3765 2420 3766 1883 3766 1885 3766 2422 3767 1885 3767 1886 3767 1874 3768 1886 3768 1888 3768 1880 3769 1888 3769 1882 3769 1881 3770 1882 3770 1890 3770 1881 3771 1880 3771 1882 3771 292 3772 294 3772 1875 3772 1875 3773 294 3773 1892 3773 1876 3774 1892 3774 2411 3774 1877 3775 2411 3775 2416 3775 1879 3776 2416 3776 2419 3776 1883 3777 2419 3777 1884 3777 1885 3778 1884 3778 1887 3778 1886 3779 1887 3779 1889 3779 1888 3780 1889 3780 1898 3780 1882 3781 1898 3781 1891 3781 1890 3782 1891 3782 5553 3782 1890 3783 1882 3783 1891 3783 294 3784 1893 3784 1892 3784 1892 3785 1893 3785 2415 3785 2411 3786 2415 3786 1894 3786 2416 3787 1894 3787 1895 3787 2419 3788 1895 3788 1896 3788 1884 3789 1896 3789 2425 3789 1887 3790 2425 3790 1904 3790 1889 3791 1904 3791 1897 3791 1898 3792 1897 3792 1899 3792 1891 3793 1899 3793 1900 3793 5553 3794 1900 3794 1906 3794 5553 3795 1891 3795 1900 3795 1893 3796 300 3796 2415 3796 2415 3797 300 3797 1901 3797 1894 3798 1901 3798 2418 3798 1895 3799 2418 3799 1902 3799 1896 3800 1902 3800 1903 3800 2425 3801 1903 3801 2429 3801 1904 3802 2429 3802 1905 3802 1897 3803 1905 3803 2437 3803 1899 3804 2437 3804 2438 3804 1900 3805 2438 3805 1907 3805 1906 3806 1907 3806 5554 3806 1906 3807 1900 3807 1907 3807 300 3808 296 3808 1901 3808 1901 3809 296 3809 1908 3809 2418 3810 1908 3810 2424 3810 1902 3811 2424 3811 1909 3811 1903 3812 1909 3812 1910 3812 2429 3813 1910 3813 2433 3813 1905 3814 2433 3814 1914 3814 2437 3815 1914 3815 2436 3815 2438 3816 2436 3816 1915 3816 1907 3817 1915 3817 2447 3817 5554 3818 2447 3818 1911 3818 5554 3819 1907 3819 2447 3819 296 3820 298 3820 1908 3820 1908 3821 298 3821 1918 3821 2424 3822 1918 3822 1912 3822 1909 3823 1912 3823 1913 3823 1910 3824 1913 3824 1920 3824 2433 3825 1920 3825 1922 3825 1914 3826 1922 3826 2439 3826 2436 3827 2439 3827 1916 3827 1915 3828 1916 3828 1925 3828 2447 3829 1925 3829 1917 3829 1911 3830 1917 3830 5543 3830 1911 3831 2447 3831 1917 3831 298 3832 1919 3832 1918 3832 1918 3833 1919 3833 1928 3833 1912 3834 1928 3834 2428 3834 1913 3835 2428 3835 2427 3835 1920 3836 2427 3836 1921 3836 1922 3837 1921 3837 1923 3837 2439 3838 1923 3838 1924 3838 1916 3839 1924 3839 2446 3839 1925 3840 2446 3840 1926 3840 1917 3841 1926 3841 1927 3841 5543 3842 1927 3842 1931 3842 5543 3843 1917 3843 1927 3843 1919 3844 1932 3844 1928 3844 1928 3845 1932 3845 2426 3845 2428 3846 2426 3846 1929 3846 2427 3847 1929 3847 2432 3847 1921 3848 2432 3848 2435 3848 1923 3849 2435 3849 1934 3849 1924 3850 1934 3850 2445 3850 2446 3851 2445 3851 2450 3851 1926 3852 2450 3852 1930 3852 1927 3853 1930 3853 1937 3853 1931 3854 1937 3854 5545 3854 1931 3855 1927 3855 1937 3855 1932 3856 1933 3856 2426 3856 2426 3857 1933 3857 2431 3857 1929 3858 2431 3858 1939 3858 2432 3859 1939 3859 2434 3859 2435 3860 2434 3860 2443 3860 1934 3861 2443 3861 2444 3861 2445 3862 2444 3862 2449 3862 2450 3863 2449 3863 1935 3863 1930 3864 1935 3864 1943 3864 1937 3865 1943 3865 1936 3865 5545 3866 1936 3866 5546 3866 5545 3867 1937 3867 1936 3867 1933 3868 1938 3868 2431 3868 2431 3869 1938 3869 2430 3869 1939 3870 2430 3870 1940 3870 2434 3871 1940 3871 2442 3871 2443 3872 2442 3872 2441 3872 2444 3873 2441 3873 1941 3873 2449 3874 1941 3874 1942 3874 1935 3875 1942 3875 1946 3875 1943 3876 1946 3876 2462 3876 1936 3877 2462 3877 1948 3877 5546 3878 1948 3878 1950 3878 5546 3879 1936 3879 1948 3879 1938 3880 1952 3880 2430 3880 2430 3881 1952 3881 1944 3881 1940 3882 1944 3882 2440 3882 2442 3883 2440 3883 1945 3883 2441 3884 1945 3884 1956 3884 1941 3885 1956 3885 2455 3885 1942 3886 2455 3886 1957 3886 1946 3887 1957 3887 1947 3887 2462 3888 1947 3888 2468 3888 1948 3889 2468 3889 1949 3889 1950 3890 1949 3890 1951 3890 1950 3891 1948 3891 1949 3891 1952 3892 1953 3892 1944 3892 1944 3893 1953 3893 1954 3893 2440 3894 1954 3894 1955 3894 1945 3895 1955 3895 2451 3895 1956 3896 2451 3896 2452 3896 2455 3897 2452 3897 1958 3897 1957 3898 1958 3898 1959 3898 1947 3899 1959 3899 2463 3899 2468 3900 2463 3900 2474 3900 1949 3901 2474 3901 1960 3901 1951 3902 1960 3902 5555 3902 1951 3903 1949 3903 1960 3903 1953 3904 309 3904 1954 3904 1954 3905 309 3905 2448 3905 1955 3906 2448 3906 1966 3906 2451 3907 1966 3907 1961 3907 2452 3908 1961 3908 2454 3908 1958 3909 2454 3909 2460 3909 1959 3910 2460 3910 1968 3910 2463 3911 1968 3911 1962 3911 2474 3912 1962 3912 1971 3912 1960 3913 1971 3913 1963 3913 5555 3914 1963 3914 1964 3914 5555 3915 1960 3915 1963 3915 309 3916 1965 3916 2448 3916 2448 3917 1965 3917 1973 3917 1966 3918 1973 3918 1967 3918 1961 3919 1967 3919 2453 3919 2454 3920 2453 3920 2458 3920 2460 3921 2458 3921 1969 3921 1968 3922 1969 3922 1974 3922 1962 3923 1974 3923 1970 3923 1971 3924 1970 3924 1972 3924 1963 3925 1972 3925 1246 3925 1245 3926 1963 3926 1246 3926 1245 3927 1964 3927 1963 3927 1965 3928 304 3928 1973 3928 1973 3929 304 3929 1976 3929 1967 3930 1976 3930 2457 3930 2453 3931 2457 3931 2456 3931 2458 3932 2456 3932 1977 3932 1969 3933 1977 3933 2467 3933 1974 3934 2467 3934 1978 3934 1970 3935 1978 3935 1980 3935 1972 3936 1980 3936 1975 3936 1246 3937 1972 3937 1975 3937 1976 3938 304 3938 2048 3938 2457 3939 2048 3939 2459 3939 2456 3940 2459 3940 2461 3940 1977 3941 2461 3941 1998 3941 2467 3942 1998 3942 1979 3942 1978 3943 1979 3943 2475 3943 1980 3944 2475 3944 1987 3944 1981 3945 1980 3945 1987 3945 1981 3946 1975 3946 1980 3946 307 3947 1982 3947 306 3947 307 3948 2466 3948 1982 3948 307 3949 310 3949 2466 3949 2466 3950 310 3950 1989 3950 2464 3951 1989 3951 2469 3951 1999 3952 2469 3952 1983 3952 2471 3953 1983 3953 1984 3953 1985 3954 2471 3954 1984 3954 1985 3955 2473 3955 2471 3955 1985 3956 1283 3956 2473 3956 2473 3957 1283 3957 1986 3957 2475 3958 1986 3958 1988 3958 1987 3959 2475 3959 1988 3959 310 3960 1990 3960 1989 3960 1989 3961 1990 3961 1991 3961 2469 3962 1991 3962 1995 3962 1284 3963 1995 3963 1996 3963 1284 3964 2469 3964 1995 3964 1284 3965 1983 3965 2469 3965 1284 3966 1992 3966 1983 3966 1983 3967 1992 3967 1984 3967 1990 3968 1993 3968 1991 3968 1991 3969 1993 3969 1994 3969 1995 3970 1994 3970 1997 3970 1996 3971 1995 3971 1997 3971 1993 3972 313 3972 1994 3972 1994 3973 313 3973 1248 3973 1997 3974 1994 3974 1248 3974 2473 3975 1986 3975 2475 3975 1979 3976 2473 3976 2475 3976 1979 3977 2472 3977 2473 3977 1979 3978 1998 3978 2472 3978 2472 3979 1998 3979 2470 3979 1999 3980 2470 3980 2464 3980 2469 3981 1999 3981 2464 3981 5548 3982 2000 3982 1837 3982 1837 3983 2000 3983 1822 3983 2401 3984 1822 3984 2001 3984 1838 3985 2001 3985 1820 3985 2002 3986 1820 3986 2389 3986 1839 3987 2389 3987 2382 3987 1840 3988 2382 3988 2375 3988 2381 3989 2375 3989 1816 3989 2370 3990 1816 3990 1814 3990 2003 3991 1814 3991 2004 3991 1823 3992 2004 3992 2005 3992 1823 3993 2003 3993 2004 3993 5516 3994 2006 3994 2007 3994 2007 3995 2006 3995 2008 3995 2297 3996 2008 3996 2292 3996 1700 3997 2292 3997 2009 3997 1701 3998 2009 3998 2010 3998 2011 3999 2010 3999 2012 3999 2279 4000 2012 4000 2280 4000 2276 4001 2280 4001 2274 4001 2013 4002 2274 4002 2015 4002 2014 4003 2015 4003 2016 4003 264 4004 2016 4004 1689 4004 264 4005 2014 4005 2016 4005 5476 4006 5487 4006 2208 4006 2208 4007 5487 4007 1550 4007 2203 4008 1550 4008 1549 4008 1562 4009 1549 4009 1548 4009 2190 4010 1548 4010 1547 4010 2187 4011 1547 4011 2017 4011 1563 4012 2017 4012 1546 4012 1564 4013 1546 4013 1545 4013 1565 4014 1545 4014 1544 4014 2018 4015 1544 4015 2020 4015 2019 4016 2020 4016 236 4016 2019 4017 2018 4017 2020 4017 5463 4018 1410 4018 1422 4018 1422 4019 1410 4019 1409 4019 1423 4020 1409 4020 1408 4020 1425 4021 1408 4021 1407 4021 2095 4022 1407 4022 1406 4022 2021 4023 1406 4023 2022 4023 1426 4024 2022 4024 2085 4024 2086 4025 2085 4025 2024 4025 2023 4026 2024 4026 2070 4026 1411 4027 2070 4027 1405 4027 218 4028 1405 4028 2025 4028 218 4029 1411 4029 1405 4029 1359 4030 5457 4030 1361 4030 1361 4031 5457 4031 2078 4031 1363 4032 2078 4032 2065 4032 1364 4033 2065 4033 2062 4033 1366 4034 2062 4034 2057 4034 2055 4035 2057 4035 2030 4035 2026 4036 2030 4036 2029 4036 2027 4037 2029 4037 1371 4037 2027 4038 2026 4038 2029 4038 5457 4039 5442 4039 2078 4039 2078 4040 5442 4040 2031 4040 2065 4041 2031 4041 2032 4041 2062 4042 2032 4042 2033 4042 2057 4043 2033 4043 2028 4043 2030 4044 2028 4044 2035 4044 2029 4045 2035 4045 1354 4045 2029 4046 2030 4046 2035 4046 5442 4047 5440 4047 2031 4047 2031 4048 5440 4048 2058 4048 2032 4049 2058 4049 2036 4049 2033 4050 2036 4050 2053 4050 2028 4051 2053 4051 2034 4051 2035 4052 2034 4052 1373 4052 2035 4053 2028 4053 2034 4053 5440 4054 5456 4054 2058 4054 2058 4055 5456 4055 2039 4055 2036 4056 2039 4056 2040 4056 2053 4057 2040 4057 2037 4057 2034 4058 2037 4058 2038 4058 2034 4059 2053 4059 2037 4059 5456 4060 5439 4060 2039 4060 2039 4061 5439 4061 2042 4061 2040 4062 2042 4062 2041 4062 2037 4063 2041 4063 1353 4063 2037 4064 2040 4064 2041 4064 5439 4065 5454 4065 2042 4065 2042 4066 5454 4066 2043 4066 2041 4067 2043 4067 2044 4067 2041 4068 2042 4068 2043 4068 5454 4069 2045 4069 2043 4069 2043 4070 2045 4070 1369 4070 1396 4071 1395 4071 2066 4071 2170 4072 2160 4072 1539 4072 2046 4073 2047 4073 1686 4073 304 4074 306 4074 2048 4074 2048 4075 306 4075 1982 4075 2459 4076 1982 4076 2465 4076 2461 4077 2465 4077 2470 4077 1998 4078 2461 4078 2470 4078 1379 4079 2049 4079 1356 4079 1386 4080 1376 4080 1379 4080 2050 4081 2026 4081 2051 4081 2052 4082 2050 4082 2051 4082 1376 4083 2052 4083 2049 4083 2055 4084 2030 4084 2026 4084 2057 4085 2028 4085 2030 4085 2033 4086 2053 4086 2028 4086 2036 4087 2040 4087 2053 4087 2039 4088 2042 4088 2040 4088 2059 4089 2054 4089 1386 4089 2054 4090 2056 4090 1376 4090 1365 4091 2055 4091 2050 4091 1357 4092 1365 4092 2050 4092 2056 4093 1357 4093 2052 4093 1366 4094 2057 4094 2055 4094 2062 4095 2033 4095 2057 4095 2032 4096 2036 4096 2033 4096 2058 4097 2039 4097 2036 4097 1395 4098 1391 4098 2059 4098 1391 4099 1380 4099 2054 4099 1380 4100 2061 4100 2056 4100 2060 4101 1366 4101 1365 4101 2063 4102 2060 4102 1365 4102 2061 4103 2063 4103 1357 4103 1364 4104 2062 4104 1366 4104 2065 4105 2032 4105 2062 4105 2031 4106 2058 4106 2032 4106 1392 4107 1391 4107 1396 4107 1381 4108 1380 4108 1392 4108 2064 4109 2061 4109 1381 4109 1377 4110 2063 4110 2064 4110 2076 4111 1364 4111 2060 4111 1377 4112 2076 4112 2060 4112 1363 4113 2065 4113 1364 4113 2078 4114 2031 4114 2065 4114 1405 4115 2069 4115 2066 4115 2069 4116 2067 4116 1396 4116 2068 4117 2069 4117 2070 4117 2067 4118 2072 4118 1392 4118 1397 4119 2067 4119 2068 4119 2072 4120 2071 4120 1381 4120 2073 4121 2072 4121 1397 4121 2071 4122 2075 4122 2064 4122 1383 4123 2071 4123 2073 4123 2075 4124 2074 4124 1377 4124 1382 4125 2075 4125 1383 4125 1363 4126 2076 4126 1362 4126 1362 4127 2076 4127 2074 4127 2077 4128 2074 4128 1382 4128 2078 4129 1363 4129 1361 4129 2077 4130 1360 4130 1362 4130 1362 4131 1360 4131 1361 4131 2023 4132 2070 4132 1411 4132 2079 4133 2068 4133 2024 4133 1393 4134 1397 4134 2079 4134 2080 4135 2073 4135 1393 4135 2081 4136 1383 4136 2080 4136 1384 4137 1382 4137 2081 4137 1358 4138 2077 4138 1384 4138 2086 4139 2024 4139 2023 4139 2087 4140 2079 4140 2085 4140 2082 4141 1393 4141 2087 4141 1387 4142 2080 4142 2082 4142 2089 4143 2081 4143 1387 4143 1378 4144 1384 4144 2089 4144 2090 4145 2086 4145 2083 4145 2084 4146 2090 4146 2083 4146 1433 4147 2084 4147 1414 4147 1426 4148 2085 4148 2086 4148 1398 4149 2087 4149 2022 4149 2088 4150 2082 4150 1398 4150 1388 4151 1387 4151 2088 4151 1385 4152 2089 4152 1388 4152 1434 4153 1428 4153 1433 4153 1415 4154 1426 4154 2090 4154 2091 4155 1415 4155 2090 4155 1428 4156 2091 4156 2084 4156 2021 4157 2022 4157 1426 4157 1399 4158 1398 4158 1406 4158 2093 4159 2088 4159 1399 4159 1389 4160 1388 4160 2093 4160 1447 4161 1435 4161 1434 4161 1435 4162 2092 4162 1428 4162 2096 4163 2021 4163 1415 4163 1416 4164 2096 4164 1415 4164 2092 4165 1416 4165 2091 4165 2095 4166 1406 4166 2021 4166 1400 4167 1399 4167 1407 4167 1403 4168 2093 4168 1400 4168 1448 4169 1442 4169 1447 4169 1442 4170 1443 4170 1435 4170 1443 4171 2094 4171 2092 4171 1417 4172 2095 4172 2096 4172 2098 4173 1417 4173 2096 4173 2094 4174 2098 4174 1416 4174 1425 4175 1407 4175 2095 4175 1404 4176 1400 4176 1408 4176 2097 4177 2100 4177 1448 4177 2100 4178 1449 4178 1442 4178 1449 4179 1444 4179 1443 4179 1444 4180 1429 4180 2094 4180 1424 4181 1425 4181 1417 4181 2102 4182 1424 4182 1417 4182 1429 4183 2102 4183 2098 4183 1423 4184 1408 4184 1425 4184 1460 4185 2099 4185 2097 4185 2099 4186 1453 4186 2100 4186 1453 4187 2105 4187 1449 4187 2105 4188 1436 4188 1444 4188 1436 4189 1430 4189 1429 4189 2101 4190 1423 4190 1424 4190 1418 4191 2101 4191 1424 4191 1430 4192 1418 4192 2102 4192 1422 4193 1409 4193 1423 4193 1467 4194 2108 4194 1460 4194 2108 4195 2103 4195 2099 4195 2103 4196 2104 4196 1453 4196 2104 4197 2106 4197 2105 4197 2106 4198 1445 4198 1436 4198 1445 4199 2110 4199 1430 4199 1419 4200 1422 4200 2101 4200 1420 4201 1419 4201 2101 4201 2110 4202 1420 4202 1418 4202 2107 4203 1461 4203 1467 4203 1461 4204 2113 4204 2108 4204 2113 4205 2114 4205 2103 4205 2114 4206 2109 4206 2104 4206 2109 4207 1446 4207 2106 4207 1446 4208 1437 4208 1445 4208 1437 4209 1438 4209 2110 4209 1438 4210 1421 4210 1420 4210 1480 4211 2111 4211 2107 4211 2111 4212 1462 4212 1461 4212 1462 4213 2112 4213 2113 4213 2112 4214 2120 4214 2114 4214 2120 4215 2115 4215 2109 4215 2115 4216 2116 4216 1446 4216 2116 4217 2117 4217 1437 4217 2117 4218 1440 4218 1438 4218 2118 4219 1472 4219 1480 4219 1472 4220 1469 4220 2111 4220 1469 4221 1463 4221 1462 4221 1463 4222 1456 4222 2112 4222 1456 4223 2119 4223 2120 4223 2119 4224 2121 4224 2115 4224 2121 4225 2122 4225 2116 4225 2122 4226 1441 4226 2117 4226 1484 4227 2123 4227 2118 4227 2123 4228 1473 4228 1472 4228 1473 4229 1470 4229 1469 4229 1470 4230 2124 4230 1463 4230 2124 4231 1457 4231 1456 4231 1457 4232 1454 4232 2119 4232 1454 4233 2125 4233 2121 4233 2125 4234 1450 4234 2122 4234 1490 4235 1485 4235 1484 4235 1485 4236 2130 4236 2123 4236 2130 4237 1481 4237 1473 4237 1481 4238 2126 4238 1470 4238 2126 4239 1471 4239 2124 4239 1471 4240 2127 4240 1457 4240 2127 4241 1455 4241 1454 4241 1455 4242 2128 4242 2125 4242 1500 4243 1495 4243 1490 4243 1495 4244 1486 4244 1485 4244 1486 4245 2129 4245 2130 4245 2129 4246 2135 4246 1481 4246 2135 4247 2131 4247 2126 4247 2131 4248 2136 4248 1471 4248 2136 4249 2132 4249 2127 4249 2132 4250 1458 4250 1455 4250 2133 4251 1496 4251 1500 4251 1496 4252 1491 4252 1495 4252 1491 4253 2138 4253 1486 4253 2138 4254 1482 4254 2129 4254 1482 4255 2134 4255 2135 4255 2134 4256 1474 4256 2131 4256 1474 4257 1464 4257 2136 4257 1464 4258 1465 4258 2132 4258 2137 4259 1501 4259 2133 4259 1501 4260 1497 4260 1496 4260 1497 4261 2142 4261 1491 4261 2142 4262 1492 4262 2138 4262 1492 4263 2139 4263 1482 4263 2139 4264 1476 4264 2134 4264 1476 4265 1475 4265 1474 4265 1475 4266 2140 4266 1464 4266 1510 4267 1505 4267 2137 4267 1505 4268 2141 4268 1501 4268 2141 4269 1498 4269 1497 4269 1498 4270 2147 4270 2142 4270 2147 4271 2149 4271 1492 4271 2149 4272 2143 4272 2139 4272 2143 4273 1477 4273 1476 4273 1477 4274 2144 4274 1475 4274 2150 4275 2145 4275 1510 4275 2145 4276 2146 4276 1505 4276 2146 4277 1506 4277 2141 4277 1506 4278 2155 4278 1498 4278 2155 4279 2148 4279 2147 4279 2148 4280 1487 4280 2149 4280 1487 4281 2156 4281 2143 4281 2156 4282 1479 4282 1477 4282 1528 4283 2151 4283 2150 4283 2151 4284 2152 4284 2145 4284 2152 4285 2153 4285 2146 4285 2153 4286 2158 4286 1506 4286 2158 4287 2154 4287 2155 4287 2154 4288 1493 4288 2148 4288 1493 4289 2159 4289 1487 4289 2159 4290 1483 4290 2156 4290 1535 4291 1523 4291 1528 4291 1523 4292 1519 4292 2151 4292 1519 4293 1511 4293 2152 4293 1511 4294 2157 4294 2153 4294 2157 4295 1507 4295 2158 4295 1507 4296 2163 4296 2154 4296 2163 4297 2165 4297 1493 4297 2165 4298 1488 4298 2159 4298 2160 4299 2166 4299 1535 4299 2166 4300 2161 4300 1523 4300 2161 4301 2162 4301 1519 4301 2162 4302 1513 4302 1511 4302 1513 4303 1512 4303 2157 4303 1512 4304 1502 4304 1507 4304 1502 4305 1499 4305 2163 4305 1499 4306 2164 4306 2165 4306 1536 4307 2166 4307 2170 4307 1524 4308 2161 4308 1536 4308 1520 4309 2162 4309 1524 4309 2167 4310 1513 4310 1520 4310 1514 4311 1512 4311 2167 4311 2168 4312 1502 4312 1514 4312 1504 4313 1499 4313 2168 4313 2020 4314 2172 4314 1539 4314 2172 4315 2169 4315 2170 4315 2171 4316 2172 4316 1544 4316 2169 4317 1529 4317 1536 4317 2178 4318 2169 4318 2171 4318 1529 4319 1530 4319 1524 4319 1531 4320 1529 4320 2178 4320 1530 4321 1521 4321 1520 4321 2173 4322 1530 4322 1531 4322 1521 4323 2175 4323 2167 4323 2174 4324 1521 4324 2173 4324 2175 4325 1508 4325 1514 4325 2176 4326 2175 4326 2174 4326 1508 4327 2177 4327 2168 4327 1509 4328 1508 4328 2176 4328 1565 4329 1544 4329 2018 4329 1540 4330 2171 4330 1545 4330 2180 4331 2178 4331 1540 4331 1532 4332 1531 4332 2180 4332 1533 4333 2173 4333 1532 4333 1522 4334 2174 4334 1533 4334 1517 4335 2176 4335 1522 4335 1564 4336 1545 4336 1565 4336 2179 4337 1540 4337 1546 4337 2181 4338 2180 4338 2179 4338 2184 4339 1532 4339 2181 4339 1525 4340 1533 4340 2184 4340 2182 4341 1522 4341 1525 4341 2183 4342 1564 4342 1566 4342 2186 4343 2183 4343 1566 4343 1574 4344 2186 4344 1553 4344 1563 4345 1546 4345 1564 4345 2188 4346 2179 4346 2017 4346 1537 4347 2181 4347 2188 4347 2185 4348 2184 4348 1537 4348 1527 4349 1525 4349 2185 4349 1579 4350 1567 4350 1574 4350 1555 4351 1563 4351 2183 4351 1554 4352 1555 4352 2183 4352 1567 4353 1554 4353 2186 4353 2187 4354 2017 4354 1563 4354 1541 4355 2188 4355 1547 4355 2189 4356 1537 4356 1541 4356 1538 4357 2185 4357 2189 4357 1587 4358 1575 4358 1579 4358 1575 4359 1568 4359 1567 4359 1556 4360 2187 4360 1555 4360 1557 4361 1556 4361 1555 4361 1568 4362 1557 4362 1554 4362 2190 4363 1547 4363 2187 4363 1542 4364 1541 4364 1548 4364 2191 4365 2189 4365 1542 4365 1591 4366 1581 4366 1587 4366 1581 4367 1580 4367 1575 4367 1580 4368 1569 4368 1568 4368 2192 4369 2190 4369 1556 4369 2194 4370 2192 4370 1556 4370 1569 4371 2194 4371 1557 4371 1562 4372 1548 4372 2190 4372 1543 4373 1542 4373 1549 4373 1594 4374 2197 4374 1591 4374 2197 4375 1582 4375 1581 4375 1582 4376 2200 4376 1580 4376 2200 4377 1570 4377 1569 4377 2202 4378 1562 4378 2192 4378 2193 4379 2202 4379 2192 4379 1570 4380 2193 4380 2194 4380 2203 4381 1549 4381 1562 4381 2195 4382 2196 4382 1594 4382 2196 4383 2205 4383 2197 4383 2205 4384 2198 4384 1582 4384 2198 4385 2199 4385 2200 4385 2199 4386 2201 4386 1570 4386 2209 4387 2203 4387 2202 4387 2210 4388 2209 4388 2202 4388 2201 4389 2210 4389 2193 4389 2208 4390 1550 4390 2203 4390 1609 4391 1595 4391 2195 4391 1595 4392 2204 4392 2196 4392 2204 4393 2206 4393 2205 4393 2206 4394 2207 4394 2198 4394 2207 4395 2212 4395 2199 4395 2212 4396 1571 4396 2201 4396 1561 4397 2208 4397 2209 4397 1558 4398 1561 4398 2209 4398 1571 4399 1558 4399 2210 4399 2213 4400 1602 4400 1609 4400 1602 4401 1596 4401 1595 4401 1596 4402 2211 4402 2204 4402 2211 4403 2214 4403 2206 4403 2214 4404 1583 4404 2207 4404 1583 4405 2215 4405 2212 4405 2215 4406 1576 4406 1571 4406 1576 4407 1560 4407 1558 4407 1619 4408 1610 4408 2213 4408 1610 4409 1603 4409 1602 4409 1603 4410 1597 4410 1596 4410 1597 4411 2218 4411 2211 4411 2218 4412 1592 4412 2214 4412 1592 4413 1584 4413 1583 4413 1584 4414 2220 4414 2215 4414 2220 4415 1572 4415 1576 4415 1624 4416 1614 4416 1619 4416 1614 4417 2221 4417 1610 4417 2221 4418 2216 4418 1603 4418 2216 4419 1598 4419 1597 4419 1598 4420 2217 4420 2218 4420 2217 4421 2223 4421 1592 4421 2223 4422 2224 4422 1584 4422 2224 4423 2219 4423 2220 4423 1628 4424 1625 4424 1624 4424 1625 4425 2226 4425 1614 4425 2226 4426 1611 4426 2221 4426 1611 4427 1604 4427 2216 4427 1604 4428 2227 4428 1598 4428 2227 4429 1593 4429 2217 4429 1593 4430 2222 4430 2223 4430 2222 4431 1586 4431 2224 4431 2229 4432 1629 4432 1628 4432 1629 4433 2225 4433 1625 4433 2225 4434 1620 4434 2226 4434 1620 4435 1615 4435 1611 4435 1615 4436 1605 4436 1604 4436 1605 4437 1606 4437 2227 4437 1606 4438 2233 4438 1593 4438 2233 4439 1589 4439 2222 4439 2234 4440 2228 4440 2229 4440 2228 4441 2230 4441 1629 4441 2230 4442 2231 4442 2225 4442 2231 4443 2232 4443 1620 4443 2232 4444 2235 4444 1615 4444 2235 4445 2236 4445 1605 4445 2236 4446 2238 4446 1606 4446 2238 4447 1600 4447 2233 4447 1639 4448 1634 4448 2234 4448 1634 4449 2240 4449 2228 4449 2240 4450 2241 4450 2230 4450 2241 4451 1621 4451 2231 4451 1621 4452 1616 4452 2232 4452 1616 4453 1617 4453 2235 4453 1617 4454 1612 4454 2236 4454 1612 4455 2237 4455 2238 4455 2239 4456 1640 4456 1639 4456 1640 4457 1641 4457 1634 4457 1641 4458 1635 4458 2240 4458 1635 4459 1630 4459 2241 4459 1630 4460 1626 4460 1621 4460 1626 4461 2242 4461 1616 4461 2242 4462 2243 4462 1617 4462 2243 4463 1613 4463 1612 4463 2244 4464 1653 4464 2239 4464 1653 4465 1642 4465 1640 4465 1642 4466 1636 4466 1641 4466 1636 4467 1631 4467 1635 4467 1631 4468 2246 4468 1630 4468 2246 4469 2248 4469 1626 4469 2248 4470 1622 4470 2242 4470 1622 4471 1618 4471 2243 4471 1660 4472 1655 4472 2244 4472 1655 4473 1654 4473 1653 4473 1654 4474 1648 4474 1642 4474 1648 4475 2251 4475 1636 4475 2251 4476 2245 4476 1631 4476 2245 4477 2247 4477 2246 4477 2247 4478 1627 4478 2248 4478 1627 4479 1623 4479 1622 4479 1666 4480 2249 4480 1660 4480 2249 4481 2250 4481 1655 4481 2250 4482 2256 4482 1654 4482 2256 4483 1644 4483 1648 4483 1644 4484 1643 4484 2251 4484 1643 4485 2252 4485 2245 4485 2252 4486 2253 4486 2247 4486 2253 4487 2254 4487 1627 4487 2257 4488 1667 4488 1666 4488 1667 4489 1661 4489 2249 4489 1661 4490 2258 4490 2250 4490 2258 4491 2255 4491 2256 4491 2255 4492 1645 4492 1644 4492 1645 4493 1646 4493 1643 4493 1646 4494 1632 4494 2252 4494 1632 4495 1633 4495 2253 4495 2047 4496 1681 4496 2257 4496 1681 4497 1669 4497 1667 4497 1669 4498 1668 4498 1661 4498 1668 4499 1656 4499 2258 4499 1656 4500 2259 4500 2255 4500 2259 4501 1649 4501 1645 4501 1649 4502 1647 4502 1646 4502 1647 4503 2260 4503 1632 4503 2263 4504 1681 4504 2046 4504 2266 4505 1669 4505 2263 4505 2268 4506 1668 4506 2266 4506 2269 4507 1656 4507 2268 4507 1650 4508 2259 4508 2269 4508 2272 4509 1649 4509 1650 4509 2261 4510 1647 4510 2272 4510 2016 4511 1690 4511 1686 4511 1690 4512 2262 4512 2046 4512 2273 4513 1690 4513 2015 4513 2262 4514 2264 4514 2263 4514 2267 4515 2262 4515 2273 4515 2264 4516 2265 4516 2266 4516 1675 4517 2264 4517 2267 4517 2265 4518 2270 4518 2268 4518 1670 4519 2265 4519 1675 4519 2270 4520 1657 4520 2269 4520 1662 4521 2270 4521 1670 4521 1657 4522 2271 4522 1650 4522 1658 4523 1657 4523 1662 4523 2271 4524 1651 4524 2272 4524 1652 4525 2271 4525 1658 4525 2013 4526 2015 4526 2014 4526 1691 4527 2273 4527 2274 4527 1682 4528 2267 4528 1691 4528 2277 4529 1675 4529 1682 4529 2275 4530 1670 4530 2277 4530 1663 4531 1662 4531 2275 4531 1659 4532 1658 4532 1663 4532 2276 4533 2274 4533 2013 4533 2281 4534 1691 4534 2280 4534 2278 4535 1682 4535 2281 4535 1676 4536 2277 4536 2278 4536 1671 4537 2275 4537 1676 4537 1664 4538 1663 4538 1671 4538 2283 4539 2276 4539 1703 4539 1694 4540 2283 4540 1703 4540 1704 4541 1694 4541 1693 4541 2279 4542 2280 4542 2276 4542 2284 4543 2281 4543 2012 4543 1683 4544 2278 4544 2284 4544 1684 4545 1676 4545 1683 4545 1672 4546 1671 4546 1684 4546 1715 4547 1707 4547 1704 4547 1695 4548 2279 4548 2283 4548 2282 4549 1695 4549 2283 4549 1707 4550 2282 4550 1694 4550 2011 4551 2012 4551 2279 4551 2286 4552 2284 4552 2010 4552 2285 4553 1683 4553 2286 4553 1678 4554 1684 4554 2285 4554 1717 4555 1709 4555 1715 4555 1709 4556 1708 4556 1707 4556 2291 4557 2011 4557 1695 4557 1696 4558 2291 4558 1695 4558 1708 4559 1696 4559 2282 4559 1701 4560 2010 4560 2011 4560 1687 4561 2286 4561 2009 4561 1685 4562 2285 4562 1687 4562 1722 4563 2287 4563 1717 4563 2287 4564 2288 4564 1709 4564 2288 4565 2289 4565 1708 4565 2296 4566 1701 4566 2291 4566 2290 4567 2296 4567 2291 4567 2289 4568 2290 4568 1696 4568 1700 4569 2009 4569 1701 4569 1688 4570 1687 4570 2292 4570 1723 4571 2293 4571 1722 4571 2293 4572 2294 4572 2287 4572 2294 4573 2300 4573 2288 4573 2300 4574 1710 4574 2289 4574 2295 4575 1700 4575 2296 4575 1697 4576 2295 4576 2296 4576 1710 4577 1697 4577 2290 4577 2297 4578 2292 4578 1700 4578 2298 4579 1724 4579 1723 4579 1724 4580 1719 4580 2293 4580 1719 4581 2299 4581 2294 4581 2299 4582 1711 4582 2300 4582 1711 4583 2301 4583 1710 4583 2307 4584 2297 4584 2295 4584 2302 4585 2307 4585 2295 4585 2301 4586 2302 4586 1697 4586 2007 4587 2008 4587 2297 4587 2309 4588 2303 4588 2298 4588 2303 4589 1725 4589 1724 4589 1725 4590 2304 4590 1719 4590 2304 4591 2311 4591 2299 4591 2311 4592 1712 4592 1711 4592 1712 4593 1705 4593 2301 4593 2305 4594 2007 4594 2307 4594 2306 4595 2305 4595 2307 4595 1705 4596 2306 4596 2302 4596 1744 4597 2308 4597 2309 4597 2308 4598 1732 4598 2303 4598 1732 4599 1731 4599 1725 4599 1731 4600 1726 4600 2304 4600 1726 4601 2310 4601 2311 4601 2310 4602 2314 4602 1712 4602 2314 4603 2316 4603 1705 4603 2316 4604 1698 4604 2306 4604 1755 4605 1745 4605 1744 4605 1745 4606 2319 4606 2308 4606 2319 4607 2320 4607 1732 4607 2320 4608 1727 4608 1731 4608 1727 4609 2312 4609 1726 4609 2312 4610 2313 4610 2310 4610 2313 4611 1713 4611 2314 4611 1713 4612 2315 4612 2316 4612 2317 4613 2318 4613 1755 4613 2318 4614 1750 4614 1745 4614 1750 4615 1737 4615 2319 4615 1737 4616 2321 4616 2320 4616 2321 4617 1728 4617 1727 4617 1728 4618 2323 4618 2312 4618 2323 4619 1716 4619 2313 4619 1716 4620 1714 4620 1713 4620 2322 4621 1759 4621 2317 4621 1759 4622 2325 4622 2318 4622 2325 4623 1746 4623 1750 4623 1746 4624 1738 4624 1737 4624 1738 4625 1733 4625 2321 4625 1733 4626 2328 4626 1728 4626 2328 4627 1720 4627 2323 4627 1720 4628 1721 4628 1716 4628 2324 4629 2330 4629 2322 4629 2330 4630 2332 4630 1759 4630 2332 4631 2326 4631 2325 4631 2326 4632 2335 4632 1746 4632 2335 4633 1747 4633 1738 4633 1747 4634 1739 4634 1733 4634 1739 4635 2327 4635 2328 4635 2327 4636 1729 4636 1720 4636 1769 4637 1768 4637 2324 4637 1768 4638 2329 4638 2330 4638 2329 4639 2331 4639 2332 4639 2331 4640 2333 4640 2326 4640 2333 4641 2334 4641 2335 4641 2334 4642 2336 4642 1747 4642 2336 4643 2337 4643 1739 4643 2337 4644 1734 4644 2327 4644 2341 4645 2338 4645 1769 4645 2338 4646 2339 4646 1768 4646 2339 4647 2342 4647 2329 4647 2342 4648 2343 4648 2331 4648 2343 4649 1751 4649 2333 4649 1751 4650 2340 4650 2334 4650 2340 4651 2344 4651 2336 4651 2344 4652 1736 4652 2337 4652 1783 4653 2346 4653 2341 4653 2346 4654 1770 4654 2338 4654 1770 4655 1763 4655 2339 4655 1763 4656 1760 4656 2342 4656 1760 4657 1757 4657 2343 4657 1757 4658 1752 4658 1751 4658 1752 4659 1748 4659 2340 4659 1748 4660 1740 4660 2344 4660 1793 4661 2345 4661 1783 4661 2345 4662 1784 4662 2346 4662 1784 4663 1772 4663 1770 4663 1772 4664 1771 4664 1763 4664 1771 4665 1761 4665 1760 4665 1761 4666 2351 4666 1757 4666 2351 4667 2347 4667 1752 4667 2347 4668 1749 4668 1748 4668 1800 4669 2348 4669 1793 4669 2348 4670 1785 4670 2345 4670 1785 4671 2349 4671 1784 4671 2349 4672 2355 4672 1772 4672 2355 4673 1773 4673 1771 4673 1773 4674 2350 4674 1761 4674 2350 4675 2352 4675 2351 4675 2352 4676 1753 4676 2347 4676 2353 4677 1795 4677 1800 4677 1795 4678 2359 4678 2348 4678 2359 4679 2354 4679 1785 4679 2354 4680 1777 4680 2349 4680 1777 4681 1778 4681 2355 4681 1778 4682 1764 4682 1773 4682 1764 4683 2356 4683 2350 4683 2356 4684 1758 4684 2352 4684 1813 4685 2357 4685 2353 4685 2357 4686 2358 4686 1795 4686 2358 4687 2362 4687 2359 4687 2362 4688 2360 4688 2354 4688 2360 4689 2364 4689 1777 4689 2364 4690 1774 4690 1778 4690 1774 4691 1775 4691 1764 4691 1775 4692 1762 4692 2356 4692 2367 4693 2361 4693 1813 4693 2361 4694 1801 4694 2357 4694 1801 4695 2368 4695 2358 4695 2368 4696 1790 4696 2362 4696 1790 4697 2363 4697 2360 4697 2363 4698 1779 4698 2364 4698 1779 4699 2365 4699 1774 4699 2365 4700 2366 4700 1775 4700 2004 4701 2371 4701 2367 4701 2371 4702 1808 4702 2361 4702 1808 4703 1802 4703 1801 4703 1802 4704 1796 4704 2368 4704 1796 4705 1797 4705 1790 4705 1797 4706 2369 4706 2363 4706 2369 4707 2372 4707 1779 4707 2372 4708 1776 4708 2365 4708 2370 4709 1814 4709 2003 4709 1814 4710 1815 4710 2371 4710 1815 4711 1809 4711 1808 4711 1817 4712 1815 4712 1816 4712 1809 4713 1804 4713 1802 4713 2376 4714 1809 4714 1817 4714 1804 4715 1803 4715 1796 4715 2377 4716 1804 4716 2376 4716 1803 4717 1798 4717 1797 4717 1799 4718 1803 4718 2377 4718 1798 4719 2373 4719 2369 4719 2380 4720 1798 4720 1799 4720 2373 4721 1780 4721 2372 4721 1786 4722 2373 4722 2380 4722 2381 4723 1816 4723 2370 4723 2374 4724 1817 4724 2375 4724 2378 4725 2376 4725 2374 4725 1805 4726 2377 4726 2378 4726 2379 4727 1799 4727 1805 4727 1791 4728 2380 4728 2379 4728 1828 4729 2381 4729 1841 4729 1827 4730 1828 4730 1841 4730 1846 4731 1827 4731 1826 4731 1840 4732 2375 4732 2381 4732 1818 4733 2374 4733 2382 4733 1810 4734 2378 4734 1818 4734 1806 4735 1805 4735 1810 4735 2383 4736 2379 4736 1806 4736 1847 4737 1842 4737 1846 4737 2384 4738 1840 4738 1828 4738 2385 4739 2384 4739 1828 4739 1842 4740 2385 4740 1827 4740 1839 4741 2382 4741 1840 4741 1819 4742 1818 4742 2389 4742 2386 4743 1810 4743 1819 4743 2387 4744 1806 4744 2386 4744 1858 4745 1851 4745 1847 4745 1851 4746 2388 4746 1842 4746 1829 4747 1839 4747 2384 4747 1830 4748 1829 4748 2384 4748 2388 4749 1830 4749 2385 4749 2002 4750 2389 4750 1839 4750 1821 4751 1819 4751 1820 4751 1811 4752 2386 4752 1821 4752 1859 4753 2390 4753 1858 4753 2390 4754 2391 4754 1851 4754 2391 4755 1848 4755 2388 4755 2394 4756 2002 4756 1829 4756 1831 4757 2394 4757 1829 4757 1848 4758 1831 4758 1830 4758 1838 4759 1820 4759 2002 4759 2392 4760 1821 4760 2001 4760 2395 4761 2393 4761 1859 4761 2393 4762 1852 4762 2390 4762 1852 4763 2399 4763 2391 4763 2399 4764 1849 4764 1848 4764 1833 4765 1838 4765 2394 4765 1832 4766 1833 4766 2394 4766 1849 4767 1832 4767 1831 4767 2401 4768 2001 4768 1838 4768 1871 4769 2396 4769 2395 4769 2396 4770 2397 4770 2393 4770 2397 4771 2398 4771 1852 4771 2398 4772 2404 4772 2399 4772 2404 4773 2405 4773 1849 4773 2400 4774 2401 4774 1833 4774 1843 4775 2400 4775 1833 4775 2405 4776 1843 4776 1832 4776 1837 4777 1822 4777 2401 4777 1875 4778 1872 4778 1871 4778 1872 4779 1863 4779 2396 4779 1863 4780 1864 4780 2397 4780 1864 4781 2402 4781 2398 4781 2402 4782 2403 4782 2404 4782 2403 4783 1844 4783 2405 4783 2407 4784 1837 4784 2400 4784 2406 4785 2407 4785 2400 4785 1844 4786 2406 4786 1843 4786 1892 4787 1876 4787 1875 4787 1876 4788 1873 4788 1872 4788 1873 4789 2408 4789 1863 4789 2408 4790 1860 4790 1864 4790 1860 4791 2414 4791 2402 4791 2414 4792 2409 4792 2403 4792 2409 4793 2410 4793 1844 4793 2410 4794 1836 4794 2406 4794 2415 4795 2411 4795 1892 4795 2411 4796 1877 4796 1876 4796 1877 4797 2417 4797 1873 4797 2417 4798 2412 4798 2408 4798 2412 4799 2413 4799 1860 4799 2413 4800 1853 4800 2414 4800 1853 4801 1850 4801 2409 4801 1850 4802 1845 4802 2410 4802 1901 4803 1894 4803 2415 4803 1894 4804 2416 4804 2411 4804 2416 4805 1879 4805 1877 4805 1879 4806 1878 4806 2417 4806 1878 4807 1865 4807 2412 4807 1865 4808 1866 4808 2413 4808 1866 4809 1854 4809 1853 4809 1854 4810 1855 4810 1850 4810 1908 4811 2418 4811 1901 4811 2418 4812 1895 4812 1894 4812 1895 4813 2419 4813 2416 4813 2419 4814 1883 4814 1879 4814 1883 4815 2420 4815 1878 4815 2420 4816 2423 4816 1865 4816 2423 4817 2421 4817 1866 4817 2421 4818 1856 4818 1854 4818 1918 4819 2424 4819 1908 4819 2424 4820 1902 4820 2418 4820 1902 4821 1896 4821 1895 4821 1896 4822 1884 4822 2419 4822 1884 4823 1885 4823 1883 4823 1885 4824 2422 4824 2420 4824 2422 4825 1867 4825 2423 4825 1867 4826 1861 4826 2421 4826 1928 4827 1912 4827 1918 4827 1912 4828 1909 4828 2424 4828 1909 4829 1903 4829 1902 4829 1903 4830 2425 4830 1896 4830 2425 4831 1887 4831 1884 4831 1887 4832 1886 4832 1885 4832 1886 4833 1874 4833 2422 4833 1874 4834 1869 4834 1867 4834 2426 4835 2428 4835 1928 4835 2428 4836 1913 4836 1912 4836 1913 4837 1910 4837 1909 4837 1910 4838 2429 4838 1903 4838 2429 4839 1904 4839 2425 4839 1904 4840 1889 4840 1887 4840 1889 4841 1888 4841 1886 4841 1888 4842 1880 4842 1874 4842 2431 4843 1929 4843 2426 4843 1929 4844 2427 4844 2428 4844 2427 4845 1920 4845 1913 4845 1920 4846 2433 4846 1910 4846 2433 4847 1905 4847 2429 4847 1905 4848 1897 4848 1904 4848 1897 4849 1898 4849 1889 4849 1898 4850 1882 4850 1888 4850 2430 4851 1939 4851 2431 4851 1939 4852 2432 4852 1929 4852 2432 4853 1921 4853 2427 4853 1921 4854 1922 4854 1920 4854 1922 4855 1914 4855 2433 4855 1914 4856 2437 4856 1905 4856 2437 4857 1899 4857 1897 4857 1899 4858 1891 4858 1898 4858 1944 4859 1940 4859 2430 4859 1940 4860 2434 4860 1939 4860 2434 4861 2435 4861 2432 4861 2435 4862 1923 4862 1921 4862 1923 4863 2439 4863 1922 4863 2439 4864 2436 4864 1914 4864 2436 4865 2438 4865 2437 4865 2438 4866 1900 4866 1899 4866 1954 4867 2440 4867 1944 4867 2440 4868 2442 4868 1940 4868 2442 4869 2443 4869 2434 4869 2443 4870 1934 4870 2435 4870 1934 4871 1924 4871 1923 4871 1924 4872 1916 4872 2439 4872 1916 4873 1915 4873 2436 4873 1915 4874 1907 4874 2438 4874 2448 4875 1955 4875 1954 4875 1955 4876 1945 4876 2440 4876 1945 4877 2441 4877 2442 4877 2441 4878 2444 4878 2443 4878 2444 4879 2445 4879 1934 4879 2445 4880 2446 4880 1924 4880 2446 4881 1925 4881 1916 4881 1925 4882 2447 4882 1915 4882 1973 4883 1966 4883 2448 4883 1966 4884 2451 4884 1955 4884 2451 4885 1956 4885 1945 4885 1956 4886 1941 4886 2441 4886 1941 4887 2449 4887 2444 4887 2449 4888 2450 4888 2445 4888 2450 4889 1926 4889 2446 4889 1926 4890 1917 4890 1925 4890 1976 4891 1967 4891 1973 4891 1967 4892 1961 4892 1966 4892 1961 4893 2452 4893 2451 4893 2452 4894 2455 4894 1956 4894 2455 4895 1942 4895 1941 4895 1942 4896 1935 4896 2449 4896 1935 4897 1930 4897 2450 4897 1930 4898 1927 4898 1926 4898 2048 4899 2457 4899 1976 4899 2457 4900 2453 4900 1967 4900 2453 4901 2454 4901 1961 4901 2454 4902 1958 4902 2452 4902 1958 4903 1957 4903 2455 4903 1957 4904 1946 4904 1942 4904 1946 4905 1943 4905 1935 4905 1943 4906 1937 4906 1930 4906 1982 4907 2459 4907 2048 4907 2459 4908 2456 4908 2457 4908 2456 4909 2458 4909 2453 4909 2465 4910 2461 4910 2459 4910 2458 4911 2460 4911 2454 4911 2461 4912 1977 4912 2456 4912 2460 4913 1959 4913 1958 4913 1977 4914 1969 4914 2458 4914 1959 4915 1947 4915 1957 4915 1969 4916 1968 4916 2460 4916 1947 4917 2462 4917 1946 4917 1968 4918 2463 4918 1959 4918 2462 4919 1936 4919 1943 4919 2463 4920 2468 4920 1947 4920 2468 4921 1948 4921 2462 4921 1982 4922 2466 4922 2465 4922 2465 4923 2466 4923 2464 4923 2470 4924 2465 4924 2464 4924 1989 4925 2464 4925 2466 4925 2467 4926 1977 4926 1998 4926 1974 4927 1969 4927 2467 4927 1962 4928 1968 4928 1974 4928 2474 4929 2463 4929 1962 4929 1949 4930 2468 4930 2474 4930 1991 4931 2469 4931 1989 4931 2472 4932 2470 4932 1999 4932 2471 4933 1999 4933 1983 4933 2471 4934 2472 4934 1999 4934 2471 4935 2473 4935 2472 4935 1978 4936 2467 4936 1979 4936 1970 4937 1974 4937 1978 4937 1971 4938 1962 4938 1970 4938 1960 4939 2474 4939 1971 4939 1994 4940 1995 4940 1991 4940 1980 4941 1978 4941 2475 4941 1972 4942 1970 4942 1980 4942 1963 4943 1971 4943 1972 4943 2526 4944 2746 4944 2542 4944 2525 4945 2542 4945 2476 4945 2520 4946 2476 4946 2479 4946 2511 4947 2479 4947 2477 4947 2506 4948 2477 4948 2486 4948 2568 4949 2486 4949 2480 4949 2496 4950 2480 4950 2478 4950 2570 4951 2478 4951 2483 4951 2528 4952 2483 4952 2529 4952 2530 4953 2529 4953 3057 4953 3076 4954 2476 4954 2541 4954 3076 4955 2479 4955 2476 4955 3076 4956 3074 4956 2479 4956 2479 4957 3074 4957 2477 4957 2477 4958 3074 4958 3070 4958 2486 4959 3070 4959 3069 4959 2480 4960 3069 4960 2481 4960 2478 4961 2481 4961 2482 4961 2483 4962 2482 4962 2484 4962 2529 4963 2484 4963 2485 4963 3057 4964 2529 4964 2485 4964 2477 4965 3070 4965 2486 4965 2486 4966 3069 4966 2480 4966 2480 4967 2481 4967 2478 4967 2478 4968 2482 4968 2483 4968 2483 4969 2484 4969 2529 4969 3057 4970 2617 4970 2535 4970 3057 4971 2535 4971 2546 4971 3057 4972 2546 4972 2487 4972 3057 4973 2487 4973 2488 4973 3057 4974 2488 4974 2533 4974 3057 4975 2533 4975 2489 4975 3057 4976 2489 4976 2531 4976 3057 4977 2531 4977 2530 4977 2611 4978 2534 4978 2490 4978 2611 4979 2491 4979 2534 4979 2611 4980 2610 4980 2491 4980 2491 4981 2610 4981 2492 4981 2539 4982 2492 4982 2493 4982 2494 4983 2493 4983 2495 4983 2556 4984 2495 4984 2554 4984 2555 4985 2554 4985 2553 4985 2566 4986 2553 4986 2499 4986 2565 4987 2499 4987 2564 4987 2570 4988 2564 4988 2496 4988 2478 4989 2570 4989 2496 4989 2610 4990 2500 4990 2492 4990 2492 4991 2500 4991 2502 4991 2493 4992 2502 4992 2497 4992 2495 4993 2497 4993 2551 4993 2554 4994 2551 4994 2498 4994 2553 4995 2498 4995 2504 4995 2499 4996 2504 4996 2563 4996 2564 4997 2563 4997 2569 4997 2496 4998 2569 4998 2568 4998 2480 4999 2496 4999 2568 4999 2500 5000 2501 5000 2502 5000 2502 5001 2501 5001 2545 5001 2497 5002 2545 5002 2549 5002 2551 5003 2549 5003 2503 5003 2498 5004 2503 5004 2509 5004 2504 5005 2509 5005 2560 5005 2563 5006 2560 5006 2559 5006 2569 5007 2559 5007 2505 5007 2568 5008 2505 5008 2506 5008 2486 5009 2568 5009 2506 5009 2501 5010 2507 5010 2545 5010 2545 5011 2507 5011 2512 5011 2549 5012 2512 5012 2508 5012 2503 5013 2508 5013 2548 5013 2509 5014 2548 5014 2552 5014 2560 5015 2552 5015 2558 5015 2559 5016 2558 5016 2510 5016 2505 5017 2510 5017 2562 5017 2506 5018 2562 5018 2511 5018 2477 5019 2506 5019 2511 5019 2507 5020 2593 5020 2512 5020 2512 5021 2593 5021 2544 5021 2508 5022 2544 5022 2513 5022 2548 5023 2513 5023 2515 5023 2552 5024 2515 5024 2550 5024 2558 5025 2550 5025 2518 5025 2510 5026 2518 5026 2557 5026 2562 5027 2557 5027 2514 5027 2511 5028 2514 5028 2520 5028 2479 5029 2511 5029 2520 5029 2593 5030 2591 5030 2544 5030 2544 5031 2591 5031 2543 5031 2513 5032 2543 5032 2527 5032 2515 5033 2527 5033 2516 5033 2550 5034 2516 5034 2517 5034 2518 5035 2517 5035 2519 5035 2557 5036 2519 5036 2561 5036 2514 5037 2561 5037 2567 5037 2520 5038 2567 5038 2525 5038 2476 5039 2520 5039 2525 5039 2591 5040 2572 5040 2543 5040 2543 5041 2572 5041 2774 5041 2527 5042 2774 5042 2521 5042 2516 5043 2521 5043 2522 5043 2523 5044 2516 5044 2522 5044 2523 5045 2517 5045 2516 5045 2523 5046 2764 5046 2517 5046 2517 5047 2764 5047 2519 5047 2519 5048 2764 5048 2524 5048 2561 5049 2524 5049 2760 5049 2567 5050 2760 5050 2715 5050 2525 5051 2715 5051 2526 5051 2542 5052 2525 5052 2526 5052 2543 5053 2774 5053 2527 5053 2527 5054 2521 5054 2516 5054 2519 5055 2524 5055 2561 5055 2561 5056 2760 5056 2567 5056 2567 5057 2715 5057 2525 5057 2528 5058 2529 5058 2530 5058 2536 5059 2530 5059 2531 5059 2532 5060 2531 5060 2489 5060 2537 5061 2489 5061 2533 5061 2538 5062 2533 5062 2488 5062 2540 5063 2488 5063 2487 5063 2547 5064 2487 5064 2546 5064 2534 5065 2546 5065 2535 5065 2490 5066 2535 5066 2617 5066 2490 5067 2534 5067 2535 5067 2570 5068 2483 5068 2528 5068 2565 5069 2528 5069 2536 5069 2566 5070 2536 5070 2532 5070 2555 5071 2532 5071 2537 5071 2556 5072 2537 5072 2538 5072 2494 5073 2538 5073 2540 5073 2539 5074 2540 5074 2547 5074 2491 5075 2547 5075 2534 5075 2491 5076 2539 5076 2547 5076 2491 5077 2492 5077 2539 5077 2746 5078 2541 5078 2542 5078 2542 5079 2541 5079 2476 5079 2513 5080 2544 5080 2543 5080 2508 5081 2512 5081 2544 5081 2549 5082 2545 5082 2512 5082 2497 5083 2502 5083 2545 5083 2493 5084 2492 5084 2502 5084 2546 5085 2534 5085 2547 5085 2515 5086 2513 5086 2527 5086 2548 5087 2508 5087 2513 5087 2503 5088 2549 5088 2508 5088 2551 5089 2497 5089 2549 5089 2495 5090 2493 5090 2497 5090 2540 5091 2539 5091 2494 5091 2494 5092 2539 5092 2493 5092 2487 5093 2547 5093 2540 5093 2550 5094 2515 5094 2516 5094 2552 5095 2548 5095 2515 5095 2509 5096 2503 5096 2548 5096 2498 5097 2551 5097 2503 5097 2554 5098 2495 5098 2551 5098 2538 5099 2494 5099 2556 5099 2556 5100 2494 5100 2495 5100 2488 5101 2540 5101 2538 5101 2518 5102 2550 5102 2517 5102 2558 5103 2552 5103 2550 5103 2560 5104 2509 5104 2552 5104 2504 5105 2498 5105 2509 5105 2553 5106 2554 5106 2498 5106 2537 5107 2556 5107 2555 5107 2555 5108 2556 5108 2554 5108 2533 5109 2538 5109 2537 5109 2557 5110 2518 5110 2519 5110 2510 5111 2558 5111 2518 5111 2559 5112 2560 5112 2558 5112 2563 5113 2504 5113 2560 5113 2499 5114 2553 5114 2504 5114 2532 5115 2555 5115 2566 5115 2566 5116 2555 5116 2553 5116 2489 5117 2537 5117 2532 5117 2514 5118 2557 5118 2561 5118 2562 5119 2510 5119 2557 5119 2505 5120 2559 5120 2510 5120 2569 5121 2563 5121 2559 5121 2564 5122 2499 5122 2563 5122 2536 5123 2566 5123 2565 5123 2565 5124 2566 5124 2499 5124 2531 5125 2532 5125 2536 5125 2520 5126 2514 5126 2567 5126 2511 5127 2562 5127 2514 5127 2506 5128 2505 5128 2562 5128 2568 5129 2569 5129 2505 5129 2496 5130 2564 5130 2569 5130 2528 5131 2565 5131 2570 5131 2570 5132 2565 5132 2564 5132 2530 5133 2536 5133 2528 5133 2572 5134 2571 5134 2584 5134 2572 5135 2634 5135 2571 5135 2572 5136 2637 5136 2634 5136 2572 5137 2573 5137 2637 5137 2572 5138 2636 5138 2573 5138 2572 5139 2574 5139 2636 5139 2572 5140 2591 5140 2574 5140 2574 5141 2591 5141 2592 5141 2632 5142 2592 5142 2645 5142 2646 5143 2645 5143 2575 5143 2655 5144 2575 5144 2666 5144 2668 5145 2666 5145 2576 5145 2667 5146 2576 5146 2577 5146 2688 5147 2577 5147 2595 5147 2689 5148 2595 5148 2596 5148 2702 5149 2596 5149 2701 5149 2705 5150 2701 5150 3332 5150 2704 5151 3332 5151 2618 5151 2703 5152 2618 5152 2621 5152 2690 5153 2621 5153 2631 5153 2691 5154 2631 5154 2578 5154 2677 5155 2578 5155 2579 5155 2590 5156 2579 5156 2580 5156 2660 5157 2580 5157 2624 5157 2652 5158 2624 5158 2627 5158 2642 5159 2627 5159 2581 5159 2641 5160 2581 5160 2630 5160 2584 5161 2630 5161 2629 5161 2584 5162 2641 5162 2630 5162 2584 5163 2582 5163 2641 5163 2584 5164 2643 5164 2582 5164 2584 5165 2583 5165 2643 5165 2584 5166 2638 5166 2583 5166 2584 5167 2571 5167 2638 5167 2638 5168 2571 5168 2586 5168 2585 5169 2586 5169 2650 5169 2587 5170 2650 5170 2589 5170 2588 5171 2589 5171 2665 5171 2590 5172 2665 5172 2677 5172 2579 5173 2590 5173 2677 5173 2591 5174 2593 5174 2592 5174 2592 5175 2593 5175 2644 5175 2645 5176 2644 5176 2653 5176 2575 5177 2653 5177 2661 5177 2666 5178 2661 5178 2594 5178 2576 5179 2594 5179 2687 5179 2577 5180 2687 5180 2686 5180 2595 5181 2686 5181 2695 5181 2596 5182 2695 5182 2599 5182 2701 5183 2599 5183 3332 5183 2701 5184 2596 5184 2599 5184 2593 5185 2507 5185 2644 5185 2644 5186 2507 5186 2600 5186 2653 5187 2600 5187 2602 5187 2661 5188 2602 5188 2603 5188 2594 5189 2603 5189 2604 5189 2687 5190 2604 5190 2597 5190 2686 5191 2597 5191 2607 5191 2695 5192 2607 5192 2598 5192 2599 5193 2598 5193 3332 5193 2599 5194 2695 5194 2598 5194 2507 5195 2501 5195 2600 5195 2600 5196 2501 5196 2601 5196 2602 5197 2601 5197 2608 5197 2603 5198 2608 5198 2605 5198 2604 5199 2605 5199 2685 5199 2597 5200 2685 5200 2606 5200 2607 5201 2606 5201 2699 5201 2598 5202 2699 5202 3332 5202 2598 5203 2607 5203 2699 5203 2501 5204 2500 5204 2601 5204 2601 5205 2500 5205 2673 5205 2608 5206 2673 5206 2684 5206 2605 5207 2684 5207 2683 5207 2685 5208 2683 5208 2694 5208 2606 5209 2694 5209 2700 5209 2699 5210 2700 5210 3057 5210 3332 5211 2699 5211 3057 5211 2500 5212 2610 5212 2673 5212 2673 5213 2610 5213 2681 5213 2684 5214 2681 5214 2682 5214 2683 5215 2682 5215 2693 5215 2694 5216 2693 5216 2609 5216 2700 5217 2609 5217 3057 5217 2700 5218 2694 5218 2609 5218 2610 5219 2611 5219 2681 5219 2681 5220 2611 5220 2692 5220 2682 5221 2692 5221 2612 5221 2693 5222 2612 5222 2614 5222 2609 5223 2614 5223 3057 5223 2609 5224 2693 5224 2614 5224 2611 5225 2490 5225 2692 5225 2692 5226 2490 5226 2613 5226 2612 5227 2613 5227 2615 5227 2614 5228 2615 5228 3057 5228 2614 5229 2612 5229 2615 5229 2490 5230 2617 5230 2613 5230 2613 5231 2617 5231 2616 5231 2615 5232 2616 5232 3057 5232 2615 5233 2613 5233 2616 5233 2617 5234 3057 5234 2616 5234 3332 5235 2619 5235 2618 5235 2618 5236 2619 5236 2621 5236 2621 5237 2619 5237 4129 5237 2620 5238 2621 5238 4129 5238 2620 5239 2631 5239 2621 5239 2620 5240 4128 5240 2631 5240 2631 5241 4128 5241 2622 5241 2578 5242 2622 5242 2623 5242 2579 5243 2623 5243 4125 5243 4124 5244 2579 5244 4125 5244 4124 5245 2580 5245 2579 5245 4124 5246 4123 5246 2580 5246 2580 5247 4123 5247 2625 5247 2624 5248 2625 5248 4173 5248 2626 5249 2624 5249 4173 5249 2626 5250 2627 5250 2624 5250 2626 5251 2628 5251 2627 5251 2627 5252 2628 5252 2581 5252 2581 5253 2628 5253 4121 5253 2629 5254 2581 5254 4121 5254 2629 5255 2630 5255 2581 5255 2631 5256 2622 5256 2578 5256 2578 5257 2623 5257 2579 5257 2580 5258 2625 5258 2624 5258 2592 5259 2632 5259 2574 5259 2574 5260 2632 5260 2636 5260 2636 5261 2632 5261 2647 5261 2573 5262 2647 5262 2635 5262 2637 5263 2635 5263 2649 5263 2634 5264 2649 5264 2633 5264 2571 5265 2633 5265 2586 5265 2571 5266 2634 5266 2633 5266 2635 5267 2637 5267 2573 5267 2573 5268 2636 5268 2647 5268 2649 5269 2634 5269 2637 5269 2586 5270 2585 5270 2638 5270 2638 5271 2585 5271 2583 5271 2583 5272 2585 5272 2639 5272 2643 5273 2639 5273 2640 5273 2582 5274 2640 5274 2642 5274 2641 5275 2642 5275 2581 5275 2641 5276 2582 5276 2642 5276 2640 5277 2582 5277 2643 5277 2643 5278 2583 5278 2639 5278 2644 5279 2645 5279 2592 5279 2645 5280 2646 5280 2632 5280 2632 5281 2646 5281 2647 5281 2647 5282 2646 5282 2654 5282 2635 5283 2654 5283 2659 5283 2649 5284 2659 5284 2648 5284 2633 5285 2648 5285 2657 5285 2586 5286 2657 5286 2650 5286 2586 5287 2633 5287 2657 5287 2659 5288 2649 5288 2635 5288 2635 5289 2647 5289 2654 5289 2648 5290 2633 5290 2649 5290 2650 5291 2587 5291 2585 5291 2585 5292 2587 5292 2639 5292 2639 5293 2587 5293 2651 5293 2640 5294 2651 5294 2652 5294 2642 5295 2652 5295 2627 5295 2642 5296 2640 5296 2652 5296 2640 5297 2639 5297 2651 5297 2600 5298 2653 5298 2644 5298 2653 5299 2575 5299 2645 5299 2575 5300 2655 5300 2646 5300 2646 5301 2655 5301 2654 5301 2654 5302 2655 5302 2662 5302 2659 5303 2662 5303 2658 5303 2648 5304 2658 5304 2664 5304 2657 5305 2664 5305 2656 5305 2650 5306 2656 5306 2589 5306 2650 5307 2657 5307 2656 5307 2658 5308 2648 5308 2659 5308 2659 5309 2654 5309 2662 5309 2664 5310 2657 5310 2648 5310 2589 5311 2588 5311 2587 5311 2587 5312 2588 5312 2651 5312 2651 5313 2588 5313 2660 5313 2652 5314 2660 5314 2624 5314 2652 5315 2651 5315 2660 5315 2601 5316 2602 5316 2600 5316 2602 5317 2661 5317 2653 5317 2661 5318 2666 5318 2575 5318 2666 5319 2668 5319 2655 5319 2655 5320 2668 5320 2662 5320 2662 5321 2668 5321 2663 5321 2658 5322 2663 5322 2671 5322 2664 5323 2671 5323 2672 5323 2656 5324 2672 5324 2670 5324 2589 5325 2670 5325 2665 5325 2589 5326 2656 5326 2670 5326 2671 5327 2664 5327 2658 5327 2658 5328 2662 5328 2663 5328 2672 5329 2656 5329 2664 5329 2665 5330 2590 5330 2588 5330 2588 5331 2590 5331 2660 5331 2660 5332 2590 5332 2580 5332 2673 5333 2608 5333 2601 5333 2608 5334 2603 5334 2602 5334 2603 5335 2594 5335 2661 5335 2594 5336 2576 5336 2666 5336 2576 5337 2667 5337 2668 5337 2668 5338 2667 5338 2663 5338 2663 5339 2667 5339 2669 5339 2671 5340 2669 5340 2674 5340 2672 5341 2674 5341 2676 5341 2670 5342 2676 5342 2678 5342 2665 5343 2678 5343 2677 5343 2665 5344 2670 5344 2678 5344 2674 5345 2672 5345 2671 5345 2671 5346 2663 5346 2669 5346 2676 5347 2670 5347 2672 5347 2681 5348 2684 5348 2673 5348 2684 5349 2605 5349 2608 5349 2605 5350 2604 5350 2603 5350 2604 5351 2687 5351 2594 5351 2687 5352 2577 5352 2576 5352 2577 5353 2688 5353 2667 5353 2667 5354 2688 5354 2669 5354 2669 5355 2688 5355 2675 5355 2674 5356 2675 5356 2679 5356 2676 5357 2679 5357 2680 5357 2678 5358 2680 5358 2691 5358 2677 5359 2691 5359 2578 5359 2677 5360 2678 5360 2691 5360 2679 5361 2676 5361 2674 5361 2674 5362 2669 5362 2675 5362 2680 5363 2678 5363 2676 5363 2692 5364 2682 5364 2681 5364 2682 5365 2683 5365 2684 5365 2683 5366 2685 5366 2605 5366 2685 5367 2597 5367 2604 5367 2597 5368 2686 5368 2687 5368 2686 5369 2595 5369 2577 5369 2595 5370 2689 5370 2688 5370 2688 5371 2689 5371 2675 5371 2675 5372 2689 5372 2698 5372 2679 5373 2698 5373 2697 5373 2680 5374 2697 5374 2690 5374 2691 5375 2690 5375 2631 5375 2691 5376 2680 5376 2690 5376 2697 5377 2680 5377 2679 5377 2679 5378 2675 5378 2698 5378 2613 5379 2612 5379 2692 5379 2612 5380 2693 5380 2682 5380 2693 5381 2694 5381 2683 5381 2694 5382 2606 5382 2685 5382 2606 5383 2607 5383 2597 5383 2607 5384 2695 5384 2686 5384 2695 5385 2596 5385 2595 5385 2596 5386 2702 5386 2689 5386 2689 5387 2702 5387 2698 5387 2698 5388 2702 5388 2696 5388 2697 5389 2696 5389 2703 5389 2690 5390 2703 5390 2621 5390 2690 5391 2697 5391 2703 5391 2697 5392 2698 5392 2696 5392 2699 5393 2606 5393 2700 5393 2701 5394 2705 5394 2702 5394 2702 5395 2705 5395 2696 5395 2696 5396 2705 5396 2704 5396 2703 5397 2704 5397 2618 5397 2703 5398 2696 5398 2704 5398 3332 5399 2704 5399 2705 5399 5595 5400 2721 5400 3326 5400 5595 5401 2707 5401 2721 5401 5595 5402 2706 5402 2707 5402 2707 5403 2706 5403 2708 5403 2722 5404 2708 5404 2783 5404 2723 5405 2783 5405 2710 5405 2709 5406 2710 5406 2712 5406 2711 5407 2712 5407 2796 5407 2724 5408 2796 5408 2799 5408 2713 5409 2799 5409 2726 5409 2804 5410 2726 5410 2805 5410 2725 5411 2805 5411 2746 5411 2526 5412 2725 5412 2746 5412 2526 5413 2714 5413 2725 5413 2526 5414 2715 5414 2714 5414 2714 5415 2715 5415 2801 5415 2800 5416 2801 5416 2802 5416 2716 5417 2802 5417 2755 5417 2792 5418 2755 5418 2757 5418 2793 5419 2757 5419 2717 5419 2781 5420 2717 5420 2785 5420 2782 5421 2785 5421 2718 5421 2719 5422 2718 5422 2720 5422 2721 5423 2720 5423 3326 5423 2721 5424 2719 5424 2720 5424 2721 5425 2707 5425 2719 5425 2719 5426 2707 5426 2722 5426 2782 5427 2722 5427 2723 5427 2781 5428 2723 5428 2709 5428 2793 5429 2709 5429 2711 5429 2792 5430 2711 5430 2724 5430 2716 5431 2724 5431 2713 5431 2800 5432 2713 5432 2804 5432 2714 5433 2804 5433 2725 5433 2714 5434 2800 5434 2804 5434 2714 5435 2801 5435 2800 5435 2706 5436 5594 5436 2708 5436 2708 5437 5594 5437 2729 5437 2783 5438 2729 5438 2788 5438 2710 5439 2788 5439 2791 5439 2712 5440 2791 5440 2730 5440 2796 5441 2730 5441 2798 5441 2799 5442 2798 5442 2727 5442 2726 5443 2727 5443 2728 5443 2805 5444 2728 5444 2746 5444 2805 5445 2726 5445 2728 5445 5594 5446 2733 5446 2729 5446 2729 5447 2733 5447 2734 5447 2788 5448 2734 5448 2737 5448 2791 5449 2737 5449 2731 5449 2730 5450 2731 5450 2739 5450 2798 5451 2739 5451 2732 5451 2727 5452 2732 5452 2741 5452 2728 5453 2741 5453 2746 5453 2728 5454 2727 5454 2741 5454 2733 5455 2735 5455 2734 5455 2734 5456 2735 5456 2736 5456 2737 5457 2736 5457 2797 5457 2731 5458 2797 5458 2738 5458 2739 5459 2738 5459 2740 5459 2732 5460 2740 5460 2742 5460 2741 5461 2742 5461 2746 5461 2741 5462 2732 5462 2742 5462 2735 5463 2747 5463 2736 5463 2736 5464 2747 5464 2748 5464 2797 5465 2748 5465 2743 5465 2738 5466 2743 5466 2803 5466 2740 5467 2803 5467 2744 5467 2742 5468 2744 5468 2745 5468 2746 5469 2742 5469 2745 5469 2747 5470 5593 5470 2748 5470 2748 5471 5593 5471 2751 5471 2743 5472 2751 5472 2749 5472 2803 5473 2749 5473 2750 5473 2744 5474 2750 5474 2745 5474 2744 5475 2803 5475 2750 5475 5593 5476 5592 5476 2751 5476 2751 5477 5592 5477 2753 5477 2749 5478 2753 5478 2752 5478 2750 5479 2752 5479 2745 5479 2750 5480 2749 5480 2752 5480 5592 5481 5591 5481 2753 5481 2753 5482 5591 5482 2754 5482 2752 5483 2754 5483 2745 5483 2752 5484 2753 5484 2754 5484 5591 5485 2745 5485 2754 5485 2715 5486 2760 5486 2801 5486 2801 5487 2760 5487 2761 5487 2802 5488 2761 5488 2756 5488 2755 5489 2756 5489 2794 5489 2757 5490 2794 5490 2789 5490 2717 5491 2789 5491 2784 5491 2785 5492 2784 5492 2758 5492 2718 5493 2758 5493 2759 5493 2720 5494 2759 5494 3326 5494 2720 5495 2718 5495 2759 5495 2760 5496 2524 5496 2761 5496 2761 5497 2524 5497 2763 5497 2756 5498 2763 5498 2795 5498 2794 5499 2795 5499 2786 5499 2789 5500 2786 5500 2762 5500 2784 5501 2762 5501 2778 5501 2758 5502 2778 5502 2766 5502 2759 5503 2766 5503 3326 5503 2759 5504 2758 5504 2766 5504 2524 5505 2764 5505 2763 5505 2763 5506 2764 5506 2767 5506 2795 5507 2767 5507 2790 5507 2786 5508 2790 5508 2765 5508 2762 5509 2765 5509 2768 5509 2778 5510 2768 5510 2777 5510 2766 5511 2777 5511 3326 5511 2766 5512 2778 5512 2777 5512 2764 5513 2523 5513 2767 5513 2767 5514 2523 5514 2769 5514 2790 5515 2769 5515 2787 5515 2765 5516 2787 5516 2779 5516 2768 5517 2779 5517 2776 5517 2777 5518 2776 5518 2572 5518 3326 5519 2777 5519 2572 5519 2523 5520 2522 5520 2769 5520 2769 5521 2522 5521 2780 5521 2787 5522 2780 5522 2770 5522 2779 5523 2770 5523 2772 5523 2776 5524 2772 5524 2572 5524 2776 5525 2779 5525 2772 5525 2522 5526 2521 5526 2780 5526 2780 5527 2521 5527 2773 5527 2770 5528 2773 5528 2771 5528 2772 5529 2771 5529 2572 5529 2772 5530 2770 5530 2771 5530 2521 5531 2774 5531 2773 5531 2773 5532 2774 5532 2775 5532 2771 5533 2775 5533 2572 5533 2771 5534 2773 5534 2775 5534 2774 5535 2572 5535 2775 5535 2768 5536 2776 5536 2777 5536 2782 5537 2718 5537 2719 5537 2722 5538 2782 5538 2719 5538 2708 5539 2722 5539 2707 5539 2785 5540 2758 5540 2718 5540 2784 5541 2778 5541 2758 5541 2762 5542 2768 5542 2778 5542 2765 5543 2779 5543 2768 5543 2787 5544 2770 5544 2779 5544 2780 5545 2773 5545 2770 5545 2729 5546 2783 5546 2708 5546 2781 5547 2785 5547 2782 5547 2723 5548 2781 5548 2782 5548 2783 5549 2723 5549 2722 5549 2717 5550 2784 5550 2785 5550 2789 5551 2762 5551 2784 5551 2786 5552 2765 5552 2762 5552 2790 5553 2787 5553 2765 5553 2769 5554 2780 5554 2787 5554 2734 5555 2788 5555 2729 5555 2788 5556 2710 5556 2783 5556 2793 5557 2717 5557 2781 5557 2709 5558 2793 5558 2781 5558 2710 5559 2709 5559 2723 5559 2757 5560 2789 5560 2717 5560 2794 5561 2786 5561 2789 5561 2795 5562 2790 5562 2786 5562 2767 5563 2769 5563 2790 5563 2736 5564 2737 5564 2734 5564 2737 5565 2791 5565 2788 5565 2791 5566 2712 5566 2710 5566 2792 5567 2757 5567 2793 5567 2711 5568 2792 5568 2793 5568 2712 5569 2711 5569 2709 5569 2755 5570 2794 5570 2757 5570 2756 5571 2795 5571 2794 5571 2763 5572 2767 5572 2795 5572 2748 5573 2797 5573 2736 5573 2797 5574 2731 5574 2737 5574 2731 5575 2730 5575 2791 5575 2730 5576 2796 5576 2712 5576 2716 5577 2755 5577 2792 5577 2724 5578 2716 5578 2792 5578 2796 5579 2724 5579 2711 5579 2802 5580 2756 5580 2755 5580 2761 5581 2763 5581 2756 5581 2751 5582 2743 5582 2748 5582 2743 5583 2738 5583 2797 5583 2738 5584 2739 5584 2731 5584 2739 5585 2798 5585 2730 5585 2798 5586 2799 5586 2796 5586 2800 5587 2802 5587 2716 5587 2713 5588 2800 5588 2716 5588 2799 5589 2713 5589 2724 5589 2801 5590 2761 5590 2802 5590 2753 5591 2749 5591 2751 5591 2749 5592 2803 5592 2743 5592 2803 5593 2740 5593 2738 5593 2740 5594 2732 5594 2739 5594 2732 5595 2727 5595 2798 5595 2727 5596 2726 5596 2799 5596 2726 5597 2804 5597 2713 5597 2742 5598 2740 5598 2744 5598 2725 5599 2804 5599 2805 5599 2806 5600 3157 5600 2872 5600 2866 5601 2872 5601 2873 5601 2862 5602 2873 5602 2807 5602 2808 5603 2807 5603 2809 5603 2857 5604 2809 5604 2877 5604 2852 5605 2877 5605 2879 5605 2854 5606 2879 5606 2810 5606 2811 5607 2810 5607 2820 5607 2847 5608 2820 5608 2818 5608 3164 5609 2874 5609 3165 5609 3164 5610 2881 5610 2874 5610 3164 5611 2812 5611 2881 5611 2881 5612 2812 5612 2888 5612 2889 5613 2888 5613 2813 5613 2890 5614 2813 5614 2824 5614 2882 5615 2824 5615 2826 5615 2883 5616 2826 5616 2814 5616 2821 5617 2814 5617 2815 5617 2816 5618 2815 5618 2817 5618 2928 5619 2816 5619 2817 5619 2928 5620 2898 5620 2816 5620 2928 5621 2922 5621 2898 5621 2898 5622 2922 5622 2818 5622 2820 5623 2898 5623 2818 5623 2820 5624 2819 5624 2898 5624 2820 5625 2810 5625 2819 5625 2819 5626 2810 5626 2878 5626 2821 5627 2878 5627 2883 5627 2814 5628 2821 5628 2883 5628 2812 5629 2822 5629 2888 5629 2888 5630 2822 5630 2823 5630 2813 5631 2823 5631 2825 5631 2824 5632 2825 5632 2830 5632 2826 5633 2830 5633 2827 5633 2814 5634 2827 5634 2828 5634 2815 5635 2828 5635 2943 5635 2817 5636 2815 5636 2943 5636 2822 5637 2829 5637 2823 5637 2823 5638 2829 5638 2831 5638 2825 5639 2831 5639 2897 5639 2830 5640 2897 5640 2899 5640 2827 5641 2899 5641 2835 5641 2828 5642 2835 5642 2944 5642 2943 5643 2828 5643 2944 5643 2829 5644 3147 5644 2831 5644 2831 5645 3147 5645 2832 5645 2897 5646 2832 5646 2833 5646 2899 5647 2833 5647 2839 5647 2835 5648 2839 5648 2834 5648 2944 5649 2835 5649 2834 5649 3147 5650 3146 5650 2832 5650 2832 5651 3146 5651 2836 5651 2840 5652 2836 5652 2837 5652 2841 5653 2837 5653 2957 5653 2954 5654 2841 5654 2957 5654 2954 5655 2838 5655 2841 5655 2954 5656 2953 5656 2838 5656 2838 5657 2953 5657 2839 5657 2833 5658 2838 5658 2839 5658 2833 5659 2840 5659 2838 5659 2833 5660 2832 5660 2840 5660 2840 5661 2832 5661 2836 5661 2840 5662 2837 5662 2841 5662 2838 5663 2840 5663 2841 5663 2953 5664 2834 5664 2839 5664 2922 5665 2902 5665 2818 5665 2818 5666 2902 5666 2844 5666 2844 5667 3068 5667 2870 5667 2844 5668 2870 5668 2842 5668 2844 5669 2842 5669 2880 5669 2844 5670 2880 5670 2843 5670 2844 5671 2843 5671 2845 5671 2844 5672 2845 5672 2896 5672 2844 5673 2896 5673 2846 5673 2844 5674 2846 5674 2847 5674 2844 5675 2847 5675 2818 5675 3059 5676 2848 5676 3064 5676 3059 5677 2849 5677 2848 5677 3059 5678 2855 5678 2849 5678 2849 5679 2855 5679 2850 5679 2869 5680 2850 5680 2868 5680 2885 5681 2868 5681 2884 5681 2892 5682 2884 5682 2851 5682 2893 5683 2851 5683 2852 5683 2854 5684 2852 5684 2879 5684 2854 5685 2893 5685 2852 5685 2854 5686 2853 5686 2893 5686 2854 5687 2811 5687 2853 5687 2854 5688 2810 5688 2811 5688 2855 5689 2856 5689 2850 5689 2850 5690 2856 5690 2858 5690 2868 5691 2858 5691 2859 5691 2884 5692 2859 5692 2860 5692 2851 5693 2860 5693 2857 5693 2852 5694 2857 5694 2877 5694 2852 5695 2851 5695 2857 5695 2856 5696 3047 5696 2858 5696 2858 5697 3047 5697 2867 5697 2859 5698 2867 5698 2863 5698 2860 5699 2863 5699 2808 5699 2857 5700 2808 5700 2809 5700 2857 5701 2860 5701 2808 5701 3047 5702 2864 5702 2867 5702 2867 5703 2864 5703 2861 5703 2863 5704 2861 5704 2862 5704 2808 5705 2862 5705 2807 5705 2808 5706 2863 5706 2862 5706 2864 5707 2865 5707 2861 5707 2861 5708 2865 5708 2866 5708 2862 5709 2866 5709 2873 5709 2862 5710 2861 5710 2866 5710 2865 5711 2806 5711 2866 5711 2866 5712 2806 5712 2872 5712 2863 5713 2867 5713 2861 5713 2859 5714 2858 5714 2867 5714 2868 5715 2850 5715 2858 5715 2850 5716 2869 5716 2849 5716 2849 5717 2869 5717 2871 5717 2848 5718 2871 5718 2842 5718 2870 5719 2848 5719 2842 5719 2870 5720 3064 5720 2848 5720 2870 5721 3068 5721 3064 5721 2848 5722 2849 5722 2871 5722 3165 5723 2874 5723 2872 5723 3157 5724 3165 5724 2872 5724 2873 5725 2874 5725 2875 5725 2807 5726 2875 5726 2876 5726 2809 5727 2876 5727 2891 5727 2877 5728 2891 5728 2894 5728 2879 5729 2894 5729 2878 5729 2810 5730 2879 5730 2878 5730 2875 5731 2807 5731 2873 5731 2860 5732 2859 5732 2863 5732 2884 5733 2868 5733 2859 5733 2868 5734 2885 5734 2869 5734 2869 5735 2885 5735 2887 5735 2871 5736 2887 5736 2880 5736 2842 5737 2871 5737 2880 5737 2871 5738 2869 5738 2887 5738 2872 5739 2874 5739 2873 5739 2874 5740 2881 5740 2875 5740 2875 5741 2881 5741 2889 5741 2876 5742 2889 5742 2890 5742 2891 5743 2890 5743 2882 5743 2894 5744 2882 5744 2883 5744 2878 5745 2894 5745 2883 5745 2888 5746 2889 5746 2881 5746 2889 5747 2876 5747 2875 5747 2876 5748 2809 5748 2807 5748 2851 5749 2884 5749 2860 5749 2884 5750 2892 5750 2885 5750 2885 5751 2892 5751 2886 5751 2887 5752 2886 5752 2843 5752 2880 5753 2887 5753 2843 5753 2887 5754 2885 5754 2886 5754 2823 5755 2813 5755 2888 5755 2813 5756 2890 5756 2889 5756 2890 5757 2891 5757 2876 5757 2891 5758 2877 5758 2809 5758 2851 5759 2893 5759 2892 5759 2892 5760 2893 5760 2895 5760 2886 5761 2895 5761 2845 5761 2843 5762 2886 5762 2845 5762 2886 5763 2892 5763 2895 5763 2831 5764 2825 5764 2823 5764 2825 5765 2824 5765 2813 5765 2824 5766 2882 5766 2890 5766 2882 5767 2894 5767 2891 5767 2894 5768 2879 5768 2877 5768 2896 5769 2845 5769 2895 5769 2853 5770 2895 5770 2893 5770 2853 5771 2896 5771 2895 5771 2853 5772 2846 5772 2896 5772 2853 5773 2811 5773 2846 5773 2846 5774 2811 5774 2847 5774 2847 5775 2811 5775 2820 5775 2832 5776 2897 5776 2831 5776 2897 5777 2830 5777 2825 5777 2830 5778 2826 5778 2824 5778 2826 5779 2883 5779 2882 5779 2899 5780 2897 5780 2833 5780 2827 5781 2830 5781 2899 5781 2814 5782 2826 5782 2827 5782 2819 5783 2878 5783 2821 5783 2816 5784 2821 5784 2815 5784 2816 5785 2819 5785 2821 5785 2816 5786 2898 5786 2819 5786 2835 5787 2899 5787 2839 5787 2828 5788 2827 5788 2835 5788 2815 5789 2814 5789 2828 5789 2844 5790 2970 5790 2911 5790 2844 5791 2900 5791 2970 5791 2844 5792 2969 5792 2900 5792 2844 5793 2901 5793 2969 5793 2844 5794 2971 5794 2901 5794 2844 5795 2968 5795 2971 5795 2844 5796 2967 5796 2968 5796 2844 5797 2902 5797 2967 5797 2967 5798 2902 5798 2966 5798 2903 5799 2966 5799 2923 5799 2974 5800 2923 5800 2904 5800 2991 5801 2904 5801 2998 5801 2992 5802 2998 5802 3010 5802 2999 5803 3010 5803 2905 5803 3011 5804 2905 5804 2906 5804 3029 5805 2906 5805 3027 5805 3028 5806 3027 5806 2926 5806 3032 5807 2926 5807 2925 5807 3035 5808 2925 5808 3033 5808 3031 5809 3033 5809 2907 5809 3018 5810 2907 5810 3020 5810 3019 5811 3020 5811 3013 5811 3002 5812 3013 5812 3003 5812 3001 5813 3003 5813 2908 5813 2921 5814 2908 5814 2909 5814 2989 5815 2909 5815 2910 5815 2973 5816 2910 5816 2965 5816 2913 5817 2965 5817 2964 5817 2911 5818 2964 5818 2912 5818 2911 5819 2913 5819 2964 5819 2911 5820 2914 5820 2913 5820 2911 5821 2916 5821 2914 5821 2911 5822 2915 5822 2916 5822 2911 5823 2970 5823 2915 5823 2915 5824 2970 5824 2917 5824 2918 5825 2917 5825 2919 5825 2920 5826 2919 5826 2985 5826 2921 5827 2985 5827 3001 5827 2908 5828 2921 5828 3001 5828 2902 5829 2922 5829 2966 5829 2966 5830 2922 5830 2927 5830 2923 5831 2927 5831 2929 5831 2904 5832 2929 5832 2990 5832 2998 5833 2990 5833 2997 5833 3010 5834 2997 5834 3017 5834 2905 5835 3017 5835 2924 5835 2906 5836 2924 5836 3026 5836 3027 5837 3026 5837 2932 5837 2926 5838 2932 5838 2925 5838 2926 5839 3027 5839 2932 5839 2922 5840 2928 5840 2927 5840 2927 5841 2928 5841 2933 5841 2929 5842 2933 5842 2930 5842 2990 5843 2930 5843 2936 5843 2997 5844 2936 5844 2931 5844 3017 5845 2931 5845 2937 5845 2924 5846 2937 5846 3025 5846 3026 5847 3025 5847 2941 5847 2932 5848 2941 5848 2925 5848 2932 5849 3026 5849 2941 5849 2928 5850 2817 5850 2933 5850 2933 5851 2817 5851 2934 5851 2930 5852 2934 5852 2935 5852 2936 5853 2935 5853 3016 5853 2931 5854 3016 5854 2938 5854 2937 5855 2938 5855 2939 5855 3025 5856 2939 5856 2940 5856 2941 5857 2940 5857 2925 5857 2941 5858 3025 5858 2940 5858 2817 5859 2943 5859 2934 5859 2934 5860 2943 5860 2945 5860 2935 5861 2945 5861 3015 5861 3016 5862 3015 5862 2947 5862 2938 5863 2947 5863 3024 5863 2939 5864 3024 5864 2942 5864 2940 5865 2942 5865 2957 5865 2925 5866 2940 5866 2957 5866 2943 5867 2944 5867 2945 5867 2945 5868 2944 5868 3014 5868 3015 5869 3014 5869 2946 5869 2947 5870 2946 5870 2951 5870 3024 5871 2951 5871 2948 5871 2942 5872 2948 5872 2957 5872 2942 5873 3024 5873 2948 5873 2944 5874 2834 5874 3014 5874 3014 5875 2834 5875 2952 5875 2946 5876 2952 5876 2949 5876 2951 5877 2949 5877 2950 5877 2948 5878 2950 5878 2957 5878 2948 5879 2951 5879 2950 5879 2834 5880 2953 5880 2952 5880 2952 5881 2953 5881 2956 5881 2949 5882 2956 5882 2955 5882 2950 5883 2955 5883 2957 5883 2950 5884 2949 5884 2955 5884 2953 5885 2954 5885 2956 5885 2956 5886 2954 5886 2958 5886 2955 5887 2958 5887 2957 5887 2955 5888 2956 5888 2958 5888 2954 5889 2957 5889 2958 5889 2925 5890 3239 5890 3033 5890 3033 5891 3239 5891 2907 5891 2907 5892 3239 5892 3237 5892 3236 5893 2907 5893 3237 5893 3236 5894 3020 5894 2907 5894 3236 5895 3243 5895 3020 5895 3020 5896 3243 5896 3013 5896 3013 5897 3243 5897 3235 5897 2959 5898 3013 5898 3235 5898 2959 5899 3003 5899 3013 5899 2959 5900 2960 5900 3003 5900 3003 5901 2960 5901 2961 5901 2908 5902 2961 5902 3234 5902 3222 5903 2908 5903 3234 5903 3222 5904 2909 5904 2908 5904 3222 5905 2962 5905 2909 5905 2909 5906 2962 5906 2910 5906 2910 5907 2962 5907 3219 5907 2963 5908 2910 5908 3219 5908 2963 5909 2965 5909 2910 5909 2963 5910 3218 5910 2965 5910 2965 5911 3218 5911 2912 5911 2964 5912 2965 5912 2912 5912 3003 5913 2961 5913 2908 5913 2966 5914 2903 5914 2967 5914 2967 5915 2903 5915 2968 5915 2968 5916 2903 5916 2982 5916 2971 5917 2982 5917 2981 5917 2901 5918 2981 5918 2976 5918 2969 5919 2976 5919 2978 5919 2900 5920 2978 5920 2980 5920 2970 5921 2980 5921 2917 5921 2970 5922 2900 5922 2980 5922 2981 5923 2901 5923 2971 5923 2971 5924 2968 5924 2982 5924 2976 5925 2969 5925 2901 5925 2978 5926 2900 5926 2969 5926 2917 5927 2918 5927 2915 5927 2915 5928 2918 5928 2916 5928 2916 5929 2918 5929 2972 5929 2914 5930 2972 5930 2973 5930 2913 5931 2973 5931 2965 5931 2913 5932 2914 5932 2973 5932 2914 5933 2916 5933 2972 5933 2927 5934 2923 5934 2966 5934 2923 5935 2974 5935 2903 5935 2903 5936 2974 5936 2982 5936 2982 5937 2974 5937 2975 5937 2981 5938 2975 5938 2977 5938 2976 5939 2977 5939 2979 5939 2978 5940 2979 5940 2983 5940 2980 5941 2983 5941 2988 5941 2917 5942 2988 5942 2919 5942 2917 5943 2980 5943 2988 5943 2977 5944 2976 5944 2981 5944 2981 5945 2982 5945 2975 5945 2979 5946 2978 5946 2976 5946 2983 5947 2980 5947 2978 5947 2919 5948 2920 5948 2918 5948 2918 5949 2920 5949 2972 5949 2972 5950 2920 5950 2989 5950 2973 5951 2989 5951 2910 5951 2973 5952 2972 5952 2989 5952 2933 5953 2929 5953 2927 5953 2929 5954 2904 5954 2923 5954 2904 5955 2991 5955 2974 5955 2974 5956 2991 5956 2975 5956 2975 5957 2991 5957 2987 5957 2977 5958 2987 5958 2986 5958 2979 5959 2986 5959 2996 5959 2983 5960 2996 5960 2995 5960 2988 5961 2995 5961 2984 5961 2919 5962 2984 5962 2985 5962 2919 5963 2988 5963 2984 5963 2986 5964 2979 5964 2977 5964 2977 5965 2975 5965 2987 5965 2996 5966 2983 5966 2979 5966 2995 5967 2988 5967 2983 5967 2985 5968 2921 5968 2920 5968 2920 5969 2921 5969 2989 5969 2989 5970 2921 5970 2909 5970 2934 5971 2930 5971 2933 5971 2930 5972 2990 5972 2929 5972 2990 5973 2998 5973 2904 5973 2998 5974 2992 5974 2991 5974 2991 5975 2992 5975 2987 5975 2987 5976 2992 5976 3006 5976 2986 5977 3006 5977 2993 5977 2996 5978 2993 5978 2994 5978 2995 5979 2994 5979 3008 5979 2984 5980 3008 5980 3004 5980 2985 5981 3004 5981 3001 5981 2985 5982 2984 5982 3004 5982 2993 5983 2996 5983 2986 5983 2986 5984 2987 5984 3006 5984 2994 5985 2995 5985 2996 5985 3008 5986 2984 5986 2995 5986 2945 5987 2935 5987 2934 5987 2935 5988 2936 5988 2930 5988 2936 5989 2997 5989 2990 5989 2997 5990 3010 5990 2998 5990 3010 5991 2999 5991 2992 5991 2992 5992 2999 5992 3006 5992 3006 5993 2999 5993 3000 5993 2993 5994 3000 5994 3005 5994 2994 5995 3005 5995 3007 5995 3008 5996 3007 5996 3009 5996 3004 5997 3009 5997 3002 5997 3001 5998 3002 5998 3003 5998 3001 5999 3004 5999 3002 5999 3005 6000 2994 6000 2993 6000 2993 6001 3006 6001 3000 6001 3007 6002 3008 6002 2994 6002 3009 6003 3004 6003 3008 6003 3014 6004 3015 6004 2945 6004 3015 6005 3016 6005 2935 6005 3016 6006 2931 6006 2936 6006 2931 6007 3017 6007 2997 6007 3017 6008 2905 6008 3010 6008 2905 6009 3011 6009 2999 6009 2999 6010 3011 6010 3000 6010 3000 6011 3011 6011 3023 6011 3005 6012 3023 6012 3022 6012 3007 6013 3022 6013 3012 6013 3009 6014 3012 6014 3019 6014 3002 6015 3019 6015 3013 6015 3002 6016 3009 6016 3019 6016 3022 6017 3007 6017 3005 6017 3005 6018 3000 6018 3023 6018 3012 6019 3009 6019 3007 6019 2952 6020 2946 6020 3014 6020 2946 6021 2947 6021 3015 6021 2947 6022 2938 6022 3016 6022 2938 6023 2937 6023 2931 6023 2937 6024 2924 6024 3017 6024 2924 6025 2906 6025 2905 6025 2906 6026 3029 6026 3011 6026 3011 6027 3029 6027 3023 6027 3023 6028 3029 6028 3030 6028 3022 6029 3030 6029 3021 6029 3012 6030 3021 6030 3018 6030 3019 6031 3018 6031 3020 6031 3019 6032 3012 6032 3018 6032 3021 6033 3012 6033 3022 6033 3022 6034 3023 6034 3030 6034 2956 6035 2949 6035 2952 6035 2949 6036 2951 6036 2946 6036 2951 6037 3024 6037 2947 6037 3024 6038 2939 6038 2938 6038 2939 6039 3025 6039 2937 6039 3025 6040 3026 6040 2924 6040 3026 6041 3027 6041 2906 6041 3027 6042 3028 6042 3029 6042 3029 6043 3028 6043 3030 6043 3030 6044 3028 6044 3034 6044 3021 6045 3034 6045 3031 6045 3018 6046 3031 6046 2907 6046 3018 6047 3021 6047 3031 6047 3021 6048 3030 6048 3034 6048 2940 6049 2939 6049 2942 6049 2926 6050 3032 6050 3028 6050 3028 6051 3032 6051 3034 6051 3034 6052 3032 6052 3035 6052 3031 6053 3035 6053 3033 6053 3031 6054 3034 6054 3035 6054 2925 6055 3035 6055 3032 6055 2806 6056 3072 6056 3157 6056 2806 6057 3039 6057 3072 6057 2806 6058 2865 6058 3039 6058 3039 6059 2865 6059 3042 6059 3040 6060 3042 6060 3037 6060 3036 6061 3037 6061 3044 6061 3090 6062 3044 6062 3091 6062 3038 6063 3091 6063 3045 6063 2481 6064 3045 6064 2482 6064 2481 6065 3038 6065 3045 6065 2481 6066 3069 6066 3038 6066 3038 6067 3069 6067 3086 6067 3090 6068 3086 6068 3071 6068 3036 6069 3071 6069 3082 6069 3040 6070 3082 6070 3041 6070 3039 6071 3041 6071 3072 6071 3039 6072 3040 6072 3041 6072 3039 6073 3042 6073 3040 6073 2865 6074 2864 6074 3042 6074 3042 6075 2864 6075 3043 6075 3037 6076 3043 6076 3085 6076 3044 6077 3085 6077 3089 6077 3091 6078 3089 6078 3092 6078 3045 6079 3092 6079 3046 6079 2482 6080 3046 6080 2484 6080 2482 6081 3045 6081 3046 6081 2864 6082 3047 6082 3043 6082 3043 6083 3047 6083 3049 6083 3085 6084 3049 6084 3050 6084 3089 6085 3050 6085 3052 6085 3092 6086 3052 6086 3053 6086 3046 6087 3053 6087 3048 6087 2484 6088 3048 6088 2485 6088 2484 6089 3046 6089 3048 6089 3047 6090 2856 6090 3049 6090 3049 6091 2856 6091 3088 6091 3050 6092 3088 6092 3051 6092 3052 6093 3051 6093 3054 6093 3053 6094 3054 6094 3094 6094 3048 6095 3094 6095 3058 6095 2485 6096 3058 6096 3057 6096 2485 6097 3048 6097 3058 6097 2856 6098 2855 6098 3088 6098 3088 6099 2855 6099 3055 6099 3051 6100 3055 6100 3056 6100 3054 6101 3056 6101 3093 6101 3094 6102 3093 6102 3062 6102 3058 6103 3062 6103 3057 6103 3058 6104 3094 6104 3062 6104 2855 6105 3059 6105 3055 6105 3055 6106 3059 6106 3060 6106 3056 6107 3060 6107 3061 6107 3093 6108 3061 6108 3063 6108 3062 6109 3063 6109 3057 6109 3062 6110 3093 6110 3063 6110 3059 6111 3064 6111 3060 6111 3060 6112 3064 6112 3065 6112 3061 6113 3065 6113 3067 6113 3063 6114 3067 6114 2844 6114 3057 6115 3063 6115 2844 6115 3064 6116 3068 6116 3065 6116 3065 6117 3068 6117 3066 6117 3067 6118 3066 6118 2844 6118 3067 6119 3065 6119 3066 6119 3068 6120 2844 6120 3066 6120 3069 6121 3070 6121 3086 6121 3086 6122 3070 6122 3087 6122 3071 6123 3087 6123 3083 6123 3082 6124 3083 6124 3075 6124 3041 6125 3075 6125 3073 6125 3072 6126 3073 6126 3157 6126 3072 6127 3041 6127 3073 6127 3070 6128 3074 6128 3087 6128 3087 6129 3074 6129 3084 6129 3083 6130 3084 6130 3081 6130 3075 6131 3081 6131 3078 6131 3073 6132 3078 6132 3157 6132 3073 6133 3075 6133 3078 6133 3074 6134 3076 6134 3084 6134 3084 6135 3076 6135 3077 6135 3081 6136 3077 6136 3079 6136 3078 6137 3079 6137 2746 6137 3157 6138 3078 6138 2746 6138 3076 6139 2541 6139 3077 6139 3077 6140 2541 6140 3080 6140 3079 6141 3080 6141 2746 6141 3079 6142 3077 6142 3080 6142 2541 6143 2746 6143 3080 6143 3081 6144 3079 6144 3078 6144 3082 6145 3075 6145 3041 6145 3083 6146 3081 6146 3075 6146 3084 6147 3077 6147 3081 6147 3036 6148 3082 6148 3040 6148 3037 6149 3036 6149 3040 6149 3043 6150 3037 6150 3042 6150 3071 6151 3083 6151 3082 6151 3087 6152 3084 6152 3083 6152 3049 6153 3085 6153 3043 6153 3090 6154 3071 6154 3036 6154 3044 6155 3090 6155 3036 6155 3085 6156 3044 6156 3037 6156 3086 6157 3087 6157 3071 6157 3088 6158 3050 6158 3049 6158 3050 6159 3089 6159 3085 6159 3038 6160 3086 6160 3090 6160 3091 6161 3038 6161 3090 6161 3089 6162 3091 6162 3044 6162 3055 6163 3051 6163 3088 6163 3051 6164 3052 6164 3050 6164 3052 6165 3092 6165 3089 6165 3092 6166 3045 6166 3091 6166 3060 6167 3056 6167 3055 6167 3056 6168 3054 6168 3051 6168 3054 6169 3053 6169 3052 6169 3053 6170 3046 6170 3092 6170 3065 6171 3061 6171 3060 6171 3061 6172 3093 6172 3056 6172 3093 6173 3094 6173 3054 6173 3094 6174 3048 6174 3053 6174 3063 6175 3061 6175 3067 6175 3095 6176 3096 6176 3153 6176 3095 6177 3098 6177 3096 6177 3095 6178 3097 6178 3098 6178 3098 6179 3097 6179 3099 6179 3100 6180 3099 6180 3178 6180 3180 6181 3178 6181 3101 6181 3179 6182 3101 6182 3181 6182 3187 6183 3181 6183 3185 6183 3102 6184 3185 6184 3191 6184 3196 6185 3191 6185 3103 6185 3113 6186 3103 6186 3104 6186 3105 6187 3104 6187 2957 6187 2837 6188 3105 6188 2957 6188 2837 6189 3106 6189 3105 6189 2837 6190 2836 6190 3106 6190 3106 6191 2836 6191 3140 6191 3190 6192 3140 6192 3141 6192 3112 6193 3141 6193 3186 6193 3107 6194 3186 6194 3108 6194 3182 6195 3108 6195 3174 6195 3109 6196 3174 6196 3168 6196 3110 6197 3168 6197 3145 6197 3167 6198 3145 6198 3111 6198 3096 6199 3111 6199 3153 6199 3096 6200 3167 6200 3111 6200 3096 6201 3098 6201 3167 6201 3167 6202 3098 6202 3100 6202 3110 6203 3100 6203 3180 6203 3109 6204 3180 6204 3179 6204 3182 6205 3179 6205 3187 6205 3107 6206 3187 6206 3102 6206 3112 6207 3102 6207 3196 6207 3190 6208 3196 6208 3113 6208 3106 6209 3113 6209 3105 6209 3106 6210 3190 6210 3113 6210 3106 6211 3140 6211 3190 6211 3097 6212 5614 6212 3099 6212 3099 6213 5614 6213 3114 6213 3178 6214 3114 6214 3115 6214 3101 6215 3115 6215 3116 6215 3181 6216 3116 6216 3118 6216 3185 6217 3118 6217 3189 6217 3191 6218 3189 6218 3120 6218 3103 6219 3120 6219 3119 6219 3104 6220 3119 6220 2957 6220 3104 6221 3103 6221 3119 6221 5614 6222 3117 6222 3114 6222 3114 6223 3117 6223 3122 6223 3115 6224 3122 6224 3184 6224 3116 6225 3184 6225 3124 6225 3118 6226 3124 6226 3194 6226 3189 6227 3194 6227 3195 6227 3120 6228 3195 6228 3121 6228 3119 6229 3121 6229 2957 6229 3119 6230 3120 6230 3121 6230 3117 6231 3123 6231 3122 6231 3122 6232 3123 6232 3127 6232 3184 6233 3127 6233 3128 6233 3124 6234 3128 6234 3129 6234 3194 6235 3129 6235 3125 6235 3195 6236 3125 6236 3126 6236 3121 6237 3126 6237 2957 6237 3121 6238 3195 6238 3126 6238 3123 6239 3131 6239 3127 6239 3127 6240 3131 6240 3132 6240 3128 6241 3132 6241 3193 6241 3129 6242 3193 6242 3133 6242 3125 6243 3133 6243 3130 6243 3126 6244 3130 6244 5351 6244 2957 6245 3126 6245 5351 6245 3131 6246 5613 6246 3132 6246 3132 6247 5613 6247 3192 6247 3193 6248 3192 6248 3134 6248 3133 6249 3134 6249 3136 6249 3130 6250 3136 6250 5351 6250 3130 6251 3133 6251 3136 6251 5613 6252 5611 6252 3192 6252 3192 6253 5611 6253 3137 6253 3134 6254 3137 6254 3135 6254 3136 6255 3135 6255 5351 6255 3136 6256 3134 6256 3135 6256 5611 6257 3138 6257 3137 6257 3137 6258 3138 6258 3139 6258 3135 6259 3139 6259 5351 6259 3135 6260 3137 6260 3139 6260 3138 6261 5351 6261 3139 6261 2836 6262 3146 6262 3140 6262 3140 6263 3146 6263 3148 6263 3141 6264 3148 6264 3188 6264 3186 6265 3188 6265 3183 6265 3108 6266 3183 6266 3142 6266 3174 6267 3142 6267 3169 6267 3168 6268 3169 6268 3143 6268 3145 6269 3143 6269 3144 6269 3111 6270 3144 6270 3153 6270 3111 6271 3145 6271 3144 6271 3146 6272 3147 6272 3148 6272 3148 6273 3147 6273 3149 6273 3188 6274 3149 6274 3150 6274 3183 6275 3150 6275 3151 6275 3142 6276 3151 6276 3155 6276 3169 6277 3155 6277 3170 6277 3143 6278 3170 6278 3152 6278 3144 6279 3152 6279 3153 6279 3144 6280 3143 6280 3152 6280 3147 6281 2829 6281 3149 6281 3149 6282 2829 6282 3154 6282 3150 6283 3154 6283 3175 6283 3151 6284 3175 6284 3176 6284 3155 6285 3176 6285 3172 6285 3170 6286 3172 6286 3156 6286 3152 6287 3156 6287 3153 6287 3152 6288 3170 6288 3156 6288 2829 6289 2822 6289 3154 6289 3154 6290 2822 6290 3177 6290 3175 6291 3177 6291 3173 6291 3176 6292 3173 6292 3171 6292 3172 6293 3171 6293 3159 6293 3156 6294 3159 6294 3157 6294 3153 6295 3156 6295 3157 6295 2822 6296 2812 6296 3177 6296 3177 6297 2812 6297 3158 6297 3173 6298 3158 6298 3162 6298 3171 6299 3162 6299 3160 6299 3159 6300 3160 6300 3157 6300 3159 6301 3171 6301 3160 6301 2812 6302 3164 6302 3158 6302 3158 6303 3164 6303 3161 6303 3162 6304 3161 6304 3163 6304 3160 6305 3163 6305 3157 6305 3160 6306 3162 6306 3163 6306 3164 6307 3165 6307 3161 6307 3161 6308 3165 6308 3166 6308 3163 6309 3166 6309 3157 6309 3163 6310 3161 6310 3166 6310 3165 6311 3157 6311 3166 6311 3172 6312 3159 6312 3156 6312 3110 6313 3145 6313 3167 6313 3100 6314 3110 6314 3167 6314 3099 6315 3100 6315 3098 6315 3168 6316 3143 6316 3145 6316 3169 6317 3170 6317 3143 6317 3155 6318 3172 6318 3170 6318 3176 6319 3171 6319 3172 6319 3173 6320 3162 6320 3171 6320 3158 6321 3161 6321 3162 6321 3114 6322 3178 6322 3099 6322 3109 6323 3168 6323 3110 6323 3180 6324 3109 6324 3110 6324 3178 6325 3180 6325 3100 6325 3174 6326 3169 6326 3168 6326 3142 6327 3155 6327 3169 6327 3151 6328 3176 6328 3155 6328 3175 6329 3173 6329 3176 6329 3177 6330 3158 6330 3173 6330 3122 6331 3115 6331 3114 6331 3115 6332 3101 6332 3178 6332 3182 6333 3174 6333 3109 6333 3179 6334 3182 6334 3109 6334 3101 6335 3179 6335 3180 6335 3108 6336 3142 6336 3174 6336 3183 6337 3151 6337 3142 6337 3150 6338 3175 6338 3151 6338 3154 6339 3177 6339 3175 6339 3127 6340 3184 6340 3122 6340 3184 6341 3116 6341 3115 6341 3116 6342 3181 6342 3101 6342 3107 6343 3108 6343 3182 6343 3187 6344 3107 6344 3182 6344 3181 6345 3187 6345 3179 6345 3186 6346 3183 6346 3108 6346 3188 6347 3150 6347 3183 6347 3149 6348 3154 6348 3150 6348 3132 6349 3128 6349 3127 6349 3128 6350 3124 6350 3184 6350 3124 6351 3118 6351 3116 6351 3118 6352 3185 6352 3181 6352 3112 6353 3186 6353 3107 6353 3102 6354 3112 6354 3107 6354 3185 6355 3102 6355 3187 6355 3141 6356 3188 6356 3186 6356 3148 6357 3149 6357 3188 6357 3192 6358 3193 6358 3132 6358 3193 6359 3129 6359 3128 6359 3129 6360 3194 6360 3124 6360 3194 6361 3189 6361 3118 6361 3189 6362 3191 6362 3185 6362 3190 6363 3141 6363 3112 6363 3196 6364 3190 6364 3112 6364 3191 6365 3196 6365 3102 6365 3140 6366 3148 6366 3141 6366 3137 6367 3134 6367 3192 6367 3134 6368 3133 6368 3193 6368 3133 6369 3125 6369 3129 6369 3125 6370 3195 6370 3194 6370 3195 6371 3120 6371 3189 6371 3120 6372 3103 6372 3191 6372 3103 6373 3113 6373 3196 6373 3126 6374 3125 6374 3130 6374 3105 6375 3113 6375 3104 6375 2911 6376 3197 6376 3212 6376 2911 6377 3198 6377 3197 6377 2911 6378 3267 6378 3198 6378 2911 6379 3265 6379 3267 6379 2911 6380 3266 6380 3265 6380 2911 6381 3199 6381 3266 6381 2911 6382 2912 6382 3199 6382 3199 6383 2912 6383 3200 6383 3259 6384 3200 6384 3229 6384 3275 6385 3229 6385 3284 6385 3285 6386 3284 6386 3283 6386 3201 6387 3283 6387 3291 6387 3292 6388 3291 6388 3232 6388 3300 6389 3232 6389 3228 6389 3202 6390 3228 6390 3203 6390 3308 6391 3203 6391 3227 6391 3204 6392 3227 6392 5344 6392 3320 6393 5344 6393 3205 6393 3316 6394 3205 6394 4886 6394 3317 6395 4886 6395 3206 6395 3309 6396 3206 6396 3207 6396 3252 6397 3207 6397 3208 6397 3297 6398 3208 6398 3254 6398 3209 6399 3254 6399 4882 6399 3256 6400 4882 6400 3210 6400 3211 6401 3210 6401 4881 6401 3258 6402 4881 6402 3212 6402 3257 6403 3212 6403 3214 6403 3213 6404 3214 6404 3215 6404 3255 6405 3215 6405 3271 6405 3216 6406 3271 6406 3276 6406 3253 6407 3276 6407 3287 6407 3251 6408 3287 6408 3250 6408 3310 6409 3250 6409 3217 6409 3318 6410 3217 6410 3306 6410 3321 6411 3306 6411 3308 6411 3204 6412 3308 6412 3227 6412 3204 6413 3321 6413 3308 6413 3204 6414 3320 6414 3321 6414 3204 6415 5344 6415 3320 6415 2912 6416 3218 6416 3200 6416 3200 6417 3218 6417 2963 6417 3220 6418 2963 6418 3219 6418 2962 6419 3220 6419 3219 6419 2962 6420 3221 6420 3220 6420 2962 6421 3222 6421 3221 6421 3221 6422 3222 6422 3223 6422 3289 6423 3223 6423 3224 6423 3281 6424 3224 6424 3302 6424 3305 6425 3302 6425 3225 6425 3304 6426 3225 6426 3226 6426 3314 6427 3226 6427 3313 6427 3312 6428 3313 6428 5344 6428 3315 6429 5344 6429 3227 6429 3203 6430 3315 6430 3227 6430 3203 6431 3311 6431 3315 6431 3203 6432 3228 6432 3311 6432 3311 6433 3228 6433 3233 6433 3314 6434 3233 6434 3304 6434 3226 6435 3314 6435 3304 6435 3200 6436 2963 6436 3220 6436 3229 6437 3220 6437 3282 6437 3284 6438 3282 6438 3230 6438 3283 6439 3230 6439 3290 6439 3291 6440 3290 6440 3231 6440 3232 6441 3231 6441 3233 6441 3228 6442 3232 6442 3233 6442 3222 6443 3234 6443 3223 6443 3223 6444 3234 6444 2961 6444 3299 6445 2961 6445 2960 6445 2959 6446 3299 6446 2960 6446 2959 6447 3298 6447 3299 6447 2959 6448 3235 6448 3298 6448 3298 6449 3235 6449 3243 6449 3244 6450 3243 6450 3236 6450 3238 6451 3236 6451 3237 6451 3239 6452 3238 6452 3237 6452 3239 6453 3319 6453 3238 6453 3239 6454 2925 6454 3319 6454 3319 6455 2925 6455 3240 6455 3238 6456 3240 6456 3241 6456 3244 6457 3241 6457 3248 6457 3298 6458 3248 6458 3242 6458 3299 6459 3242 6459 3224 6459 3223 6460 3299 6460 3224 6460 3223 6461 2961 6461 3299 6461 3298 6462 3243 6462 3244 6462 3248 6463 3298 6463 3244 6463 3244 6464 3236 6464 3238 6464 3241 6465 3244 6465 3238 6465 5344 6466 3313 6466 2925 6466 2925 6467 3313 6467 3245 6467 3249 6468 3245 6468 3246 6468 3247 6469 3246 6469 3303 6469 3248 6470 3303 6470 3242 6470 3248 6471 3247 6471 3303 6471 3248 6472 3241 6472 3247 6472 3247 6473 3241 6473 3322 6473 3249 6474 3322 6474 2925 6474 3245 6475 3249 6475 2925 6475 3320 6476 3205 6476 3316 6476 3321 6477 3316 6477 3318 6477 3306 6478 3321 6478 3318 6478 3316 6479 4886 6479 3317 6479 3318 6480 3317 6480 3310 6480 3217 6481 3318 6481 3310 6481 3317 6482 3206 6482 3309 6482 3310 6483 3309 6483 3251 6483 3250 6484 3310 6484 3251 6484 3309 6485 3207 6485 3252 6485 3251 6486 3252 6486 3253 6486 3287 6487 3251 6487 3253 6487 3252 6488 3208 6488 3297 6488 3253 6489 3297 6489 3216 6489 3276 6490 3253 6490 3216 6490 3297 6491 3254 6491 3209 6491 3216 6492 3209 6492 3255 6492 3271 6493 3216 6493 3255 6493 3209 6494 4882 6494 3256 6494 3255 6495 3256 6495 3213 6495 3215 6496 3255 6496 3213 6496 3256 6497 3210 6497 3211 6497 3213 6498 3211 6498 3257 6498 3214 6499 3213 6499 3257 6499 3211 6500 4881 6500 3258 6500 3257 6501 3258 6501 3212 6501 3257 6502 3211 6502 3258 6502 3200 6503 3259 6503 3199 6503 3199 6504 3259 6504 3266 6504 3266 6505 3259 6505 3260 6505 3265 6506 3260 6506 3268 6506 3267 6507 3268 6507 3261 6507 3198 6508 3261 6508 3273 6508 3197 6509 3273 6509 3274 6509 3262 6510 3274 6510 3263 6510 3264 6511 3263 6511 3215 6511 3214 6512 3264 6512 3215 6512 3214 6513 3212 6513 3264 6513 3264 6514 3212 6514 3262 6514 3263 6515 3264 6515 3262 6515 3268 6516 3267 6516 3265 6516 3265 6517 3266 6517 3260 6517 3261 6518 3198 6518 3267 6518 3273 6519 3197 6519 3198 6519 3274 6520 3262 6520 3197 6520 3197 6521 3262 6521 3212 6521 3220 6522 3229 6522 3200 6522 3229 6523 3275 6523 3259 6523 3259 6524 3275 6524 3260 6524 3260 6525 3275 6525 3277 6525 3268 6526 3277 6526 3269 6526 3261 6527 3269 6527 3272 6527 3273 6528 3272 6528 3279 6528 3274 6529 3279 6529 3270 6529 3263 6530 3270 6530 3271 6530 3215 6531 3263 6531 3271 6531 3269 6532 3261 6532 3268 6532 3268 6533 3260 6533 3277 6533 3272 6534 3273 6534 3261 6534 3279 6535 3274 6535 3273 6535 3270 6536 3263 6536 3274 6536 3256 6537 3211 6537 3213 6537 3282 6538 3284 6538 3229 6538 3284 6539 3285 6539 3275 6539 3275 6540 3285 6540 3277 6540 3277 6541 3285 6541 3278 6541 3269 6542 3278 6542 3288 6542 3272 6543 3288 6543 3286 6543 3279 6544 3286 6544 3280 6544 3270 6545 3280 6545 3276 6545 3271 6546 3270 6546 3276 6546 3288 6547 3272 6547 3269 6547 3269 6548 3277 6548 3278 6548 3286 6549 3279 6549 3272 6549 3280 6550 3270 6550 3279 6550 3209 6551 3256 6551 3255 6551 3220 6552 3221 6552 3282 6552 3282 6553 3221 6553 3289 6553 3230 6554 3289 6554 3281 6554 3290 6555 3281 6555 3305 6555 3231 6556 3305 6556 3304 6556 3233 6557 3231 6557 3304 6557 3223 6558 3289 6558 3221 6558 3289 6559 3230 6559 3282 6559 3230 6560 3283 6560 3284 6560 3283 6561 3201 6561 3285 6561 3285 6562 3201 6562 3278 6562 3278 6563 3201 6563 3293 6563 3288 6564 3293 6564 3296 6564 3286 6565 3296 6565 3295 6565 3280 6566 3295 6566 3287 6566 3276 6567 3280 6567 3287 6567 3296 6568 3286 6568 3288 6568 3288 6569 3278 6569 3293 6569 3295 6570 3280 6570 3286 6570 3297 6571 3209 6571 3216 6571 3281 6572 3289 6572 3224 6572 3290 6573 3230 6573 3281 6573 3291 6574 3283 6574 3290 6574 3291 6575 3292 6575 3201 6575 3201 6576 3292 6576 3293 6576 3293 6577 3292 6577 3294 6577 3296 6578 3294 6578 3301 6578 3295 6579 3301 6579 3250 6579 3287 6580 3295 6580 3250 6580 3301 6581 3295 6581 3296 6581 3296 6582 3293 6582 3294 6582 3252 6583 3297 6583 3253 6583 3298 6584 3242 6584 3299 6584 3302 6585 3305 6585 3281 6585 3305 6586 3231 6586 3290 6586 3231 6587 3232 6587 3291 6587 3232 6588 3300 6588 3292 6588 3292 6589 3300 6589 3294 6589 3294 6590 3300 6590 3307 6590 3301 6591 3307 6591 3217 6591 3250 6592 3301 6592 3217 6592 3301 6593 3294 6593 3307 6593 3309 6594 3252 6594 3251 6594 3224 6595 3242 6595 3302 6595 3302 6596 3242 6596 3303 6596 3225 6597 3303 6597 3246 6597 3226 6598 3246 6598 3245 6598 3313 6599 3226 6599 3245 6599 3225 6600 3302 6600 3303 6600 3304 6601 3305 6601 3225 6601 3306 6602 3217 6602 3307 6602 3202 6603 3307 6603 3300 6603 3228 6604 3202 6604 3300 6604 3306 6605 3307 6605 3202 6605 3308 6606 3202 6606 3203 6606 3308 6607 3306 6607 3202 6607 3317 6608 3309 6608 3310 6608 3226 6609 3225 6609 3246 6609 3311 6610 3233 6610 3314 6610 3312 6611 3314 6611 3313 6611 3312 6612 3311 6612 3314 6612 3312 6613 3315 6613 3311 6613 3312 6614 5344 6614 3315 6614 3316 6615 3317 6615 3318 6615 3319 6616 3240 6616 3238 6616 3249 6617 3246 6617 3247 6617 3322 6618 3249 6618 3247 6618 3320 6619 3316 6619 3321 6619 2925 6620 3322 6620 3240 6620 3240 6621 3322 6621 3241 6621 4161 6622 4278 6622 2584 6622 2584 6623 4278 6623 3323 6623 3324 6624 2584 6624 3323 6624 3324 6625 3325 6625 2584 6625 2584 6626 3325 6626 3817 6626 3842 6627 2584 6627 3817 6627 3842 6628 3841 6628 2584 6628 2584 6629 3841 6629 3620 6629 3326 6630 3620 6630 3579 6630 5557 6631 3579 6631 5760 6631 5557 6632 3326 6632 3579 6632 3817 6633 3325 6633 3327 6633 3327 6634 3325 6634 3328 6634 3983 6635 3328 6635 4283 6635 3984 6636 4283 6636 3330 6636 3331 6637 3330 6637 4332 6637 3329 6638 4332 6638 4368 6638 4315 6639 3329 6639 4368 6639 3327 6640 3328 6640 3983 6640 3983 6641 4283 6641 3984 6641 3984 6642 3330 6642 3331 6642 3331 6643 4332 6643 3329 6643 3620 6644 3634 6644 3579 6644 2572 6645 2584 6645 3326 6645 3326 6646 2584 6646 3620 6646 3337 6647 3332 6647 3334 6647 3334 6648 3332 6648 3057 6648 2844 6649 3334 6649 3057 6649 2844 6650 3333 6650 3334 6650 2844 6651 2911 6651 3333 6651 3333 6652 2911 6652 3342 6652 3335 6653 4250 6653 3338 6653 3336 6654 3338 6654 4489 6654 3336 6655 3335 6655 3338 6655 3337 6656 3338 6656 3332 6656 3332 6657 3338 6657 4250 6657 5634 6658 4506 6658 3338 6658 5634 6659 3343 6659 4506 6659 5634 6660 5015 6660 3343 6660 5634 6661 5016 6661 5015 6661 5634 6662 3339 6662 5016 6662 5634 6663 4968 6663 3339 6663 5634 6664 4967 6664 4968 6664 5634 6665 3340 6665 4967 6665 5634 6666 3341 6666 3340 6666 5634 6667 5066 6667 3341 6667 5634 6668 3212 6668 5066 6668 5634 6669 2911 6669 3212 6669 5634 6670 3342 6670 2911 6670 3343 6671 5492 6671 4506 6671 4506 6672 4504 6672 3338 6672 3338 6673 4504 6673 4505 6673 4629 6674 3338 6674 4505 6674 4629 6675 3344 6675 3338 6675 3338 6676 3344 6676 3345 6676 4489 6677 3338 6677 3345 6677 5673 6678 3346 6678 3402 6678 5673 6679 3402 6679 3401 6679 5673 6680 3401 6680 3400 6680 5673 6681 3400 6681 3412 6681 5673 6682 3412 6682 3415 6682 5673 6683 3415 6683 3420 6683 5673 6684 3420 6684 3347 6684 5673 6685 3347 6685 3399 6685 5673 6686 3399 6686 3441 6686 5673 6687 3441 6687 3348 6687 3351 6688 3350 6688 3776 6688 3351 6689 3349 6689 3350 6689 3351 6690 3363 6690 3349 6690 3349 6691 3363 6691 3405 6691 3408 6692 3405 6692 3352 6692 3360 6693 3352 6693 3354 6693 3353 6694 3354 6694 3413 6694 3418 6695 3413 6695 3365 6695 3419 6696 3365 6696 3355 6696 3427 6697 3355 6697 3356 6697 3435 6698 3356 6698 3432 6698 3359 6699 3432 6699 3357 6699 3358 6700 3357 6700 3366 6700 3358 6701 3359 6701 3357 6701 3358 6702 3688 6702 3359 6702 3359 6703 3688 6703 3442 6703 3435 6704 3442 6704 3434 6704 3427 6705 3434 6705 3429 6705 3419 6706 3429 6706 3428 6706 3418 6707 3428 6707 3414 6707 3353 6708 3414 6708 3361 6708 3360 6709 3361 6709 3407 6709 3408 6710 3407 6710 3362 6710 3349 6711 3362 6711 3350 6711 3349 6712 3408 6712 3362 6712 3349 6713 3405 6713 3408 6713 3363 6714 3367 6714 3405 6714 3405 6715 3367 6715 3368 6715 3352 6716 3368 6716 3364 6716 3354 6717 3364 6717 3370 6717 3413 6718 3370 6718 3411 6718 3365 6719 3411 6719 3417 6719 3355 6720 3417 6720 3426 6720 3356 6721 3426 6721 3433 6721 3432 6722 3433 6722 3431 6722 3357 6723 3431 6723 3439 6723 3366 6724 3439 6724 3693 6724 3366 6725 3357 6725 3439 6725 3367 6726 3369 6726 3368 6726 3368 6727 3369 6727 3374 6727 3364 6728 3374 6728 3371 6728 3370 6729 3371 6729 3372 6729 3411 6730 3372 6730 3410 6730 3417 6731 3410 6731 3376 6731 3426 6732 3376 6732 3373 6732 3433 6733 3373 6733 3424 6733 3431 6734 3424 6734 3440 6734 3439 6735 3440 6735 3438 6735 3693 6736 3438 6736 3698 6736 3693 6737 3439 6737 3438 6737 3369 6738 3375 6738 3374 6738 3374 6739 3375 6739 3377 6739 3371 6740 3377 6740 3378 6740 3372 6741 3378 6741 3409 6741 3410 6742 3409 6742 3380 6742 3376 6743 3380 6743 3381 6743 3373 6744 3381 6744 3425 6744 3424 6745 3425 6745 3423 6745 3440 6746 3423 6746 3382 6746 3438 6747 3382 6747 3437 6747 3698 6748 3437 6748 3699 6748 3698 6749 3438 6749 3437 6749 3375 6750 3383 6750 3377 6750 3377 6751 3383 6751 3404 6751 3378 6752 3404 6752 3379 6752 3409 6753 3379 6753 3406 6753 3380 6754 3406 6754 3384 6754 3381 6755 3384 6755 3416 6755 3425 6756 3416 6756 3421 6756 3423 6757 3421 6757 3430 6757 3382 6758 3430 6758 3385 6758 3437 6759 3385 6759 3387 6759 3699 6760 3387 6760 3388 6760 3699 6761 3437 6761 3387 6761 3383 6762 3389 6762 3404 6762 3404 6763 3389 6763 3403 6763 3379 6764 3403 6764 3396 6764 3406 6765 3396 6765 3390 6765 3384 6766 3390 6766 3392 6766 3416 6767 3392 6767 3393 6767 3421 6768 3393 6768 3422 6768 3430 6769 3422 6769 3386 6769 3385 6770 3386 6770 3436 6770 3387 6771 3436 6771 3395 6771 3388 6772 3395 6772 5341 6772 3388 6773 3387 6773 3395 6773 3389 6774 5350 6774 3403 6774 3403 6775 5350 6775 3443 6775 3396 6776 3443 6776 3397 6776 3390 6777 3397 6777 3462 6777 3391 6778 3390 6778 3462 6778 3391 6779 3392 6779 3390 6779 3391 6780 3460 6780 3392 6780 3392 6781 3460 6781 3393 6781 3393 6782 3460 6782 3394 6782 3422 6783 3394 6783 3458 6783 3386 6784 3458 6784 3445 6784 3436 6785 3445 6785 3457 6785 3395 6786 3457 6786 5341 6786 3395 6787 3436 6787 3457 6787 3403 6788 3443 6788 3396 6788 3396 6789 3397 6789 3390 6789 3393 6790 3394 6790 3422 6790 3422 6791 3458 6791 3386 6791 3386 6792 3445 6792 3436 6792 3688 6793 3398 6793 3442 6793 3442 6794 3398 6794 3441 6794 3434 6795 3441 6795 3399 6795 3429 6796 3399 6796 3347 6796 3428 6797 3347 6797 3420 6797 3414 6798 3420 6798 3415 6798 3361 6799 3415 6799 3412 6799 3407 6800 3412 6800 3400 6800 3362 6801 3400 6801 3401 6801 3350 6802 3401 6802 3402 6802 3776 6803 3402 6803 3346 6803 3776 6804 3350 6804 3402 6804 3398 6805 3348 6805 3441 6805 3379 6806 3404 6806 3403 6806 3378 6807 3377 6807 3404 6807 3371 6808 3374 6808 3377 6808 3364 6809 3368 6809 3374 6809 3352 6810 3405 6810 3368 6810 3401 6811 3350 6811 3362 6811 3406 6812 3379 6812 3396 6812 3409 6813 3378 6813 3379 6813 3372 6814 3371 6814 3378 6814 3370 6815 3364 6815 3371 6815 3354 6816 3352 6816 3364 6816 3407 6817 3408 6817 3360 6817 3360 6818 3408 6818 3352 6818 3400 6819 3362 6819 3407 6819 3384 6820 3406 6820 3390 6820 3380 6821 3409 6821 3406 6821 3410 6822 3372 6822 3409 6822 3411 6823 3370 6823 3372 6823 3413 6824 3354 6824 3370 6824 3361 6825 3360 6825 3353 6825 3353 6826 3360 6826 3354 6826 3412 6827 3407 6827 3361 6827 3416 6828 3384 6828 3392 6828 3381 6829 3380 6829 3384 6829 3376 6830 3410 6830 3380 6830 3417 6831 3411 6831 3410 6831 3365 6832 3413 6832 3411 6832 3414 6833 3353 6833 3418 6833 3418 6834 3353 6834 3413 6834 3415 6835 3361 6835 3414 6835 3421 6836 3416 6836 3393 6836 3425 6837 3381 6837 3416 6837 3373 6838 3376 6838 3381 6838 3426 6839 3417 6839 3376 6839 3355 6840 3365 6840 3417 6840 3428 6841 3418 6841 3419 6841 3419 6842 3418 6842 3365 6842 3420 6843 3414 6843 3428 6843 3430 6844 3421 6844 3422 6844 3423 6845 3425 6845 3421 6845 3424 6846 3373 6846 3425 6846 3433 6847 3426 6847 3373 6847 3356 6848 3355 6848 3426 6848 3429 6849 3419 6849 3427 6849 3427 6850 3419 6850 3355 6850 3347 6851 3428 6851 3429 6851 3385 6852 3430 6852 3386 6852 3382 6853 3423 6853 3430 6853 3440 6854 3424 6854 3423 6854 3431 6855 3433 6855 3424 6855 3432 6856 3356 6856 3433 6856 3434 6857 3427 6857 3435 6857 3435 6858 3427 6858 3356 6858 3399 6859 3429 6859 3434 6859 3387 6860 3385 6860 3436 6860 3437 6861 3382 6861 3385 6861 3438 6862 3440 6862 3382 6862 3439 6863 3431 6863 3440 6863 3357 6864 3432 6864 3431 6864 3442 6865 3435 6865 3359 6865 3359 6866 3435 6866 3432 6866 3441 6867 3434 6867 3442 6867 5350 6868 5758 6868 3464 6868 3443 6869 3464 6869 3448 6869 3397 6870 3448 6870 3463 6870 3462 6871 3463 6871 3444 6871 3391 6872 3444 6872 3454 6872 3460 6873 3454 6873 3461 6873 3394 6874 3461 6874 3459 6874 3458 6875 3459 6875 3455 6875 3445 6876 3455 6876 3452 6876 3457 6877 3452 6877 3446 6877 5341 6878 3446 6878 5767 6878 5341 6879 3457 6879 3446 6879 3447 6880 3448 6880 5757 6880 3447 6881 3463 6881 3448 6881 3447 6882 5756 6882 3463 6882 3463 6883 5756 6883 3444 6883 3444 6884 5756 6884 3449 6884 3454 6885 3449 6885 3450 6885 3461 6886 3450 6886 5755 6886 3459 6887 5755 6887 3451 6887 3455 6888 3451 6888 3456 6888 3452 6889 3456 6889 3453 6889 3446 6890 3453 6890 5767 6890 3446 6891 3452 6891 3453 6891 3444 6892 3449 6892 3454 6892 3454 6893 3450 6893 3461 6893 3461 6894 5755 6894 3459 6894 3459 6895 3451 6895 3455 6895 3455 6896 3456 6896 3452 6896 3457 6897 3445 6897 3452 6897 3445 6898 3458 6898 3455 6898 3458 6899 3394 6899 3459 6899 3394 6900 3460 6900 3461 6900 3460 6901 3391 6901 3454 6901 3391 6902 3462 6902 3444 6902 3462 6903 3397 6903 3463 6903 3397 6904 3443 6904 3448 6904 3443 6905 5350 6905 3464 6905 5758 6906 5757 6906 3464 6906 3464 6907 5757 6907 3448 6907 5642 6908 3712 6908 3530 6908 5642 6909 3530 6909 3529 6909 5642 6910 3529 6910 3528 6910 5642 6911 3528 6911 3465 6911 5642 6912 3465 6912 3466 6912 5642 6913 3466 6913 3527 6913 5642 6914 3527 6914 3467 6914 5642 6915 3467 6915 3554 6915 5642 6916 3554 6916 3562 6916 5642 6917 3562 6917 3588 6917 3469 6918 3468 6918 3710 6918 3469 6919 3471 6919 3468 6919 3469 6920 3470 6920 3471 6920 3471 6921 3470 6921 3481 6921 3536 6922 3481 6922 3533 6922 3535 6923 3533 6923 3543 6923 3472 6924 3543 6924 3484 6924 3548 6925 3484 6925 3486 6925 3550 6926 3486 6926 3487 6926 3473 6927 3487 6927 3549 6927 3474 6928 3549 6928 3553 6928 3476 6929 3553 6929 3489 6929 3475 6930 3489 6930 3491 6930 3475 6931 3476 6931 3489 6931 3475 6932 3619 6932 3476 6932 3476 6933 3619 6933 3561 6933 3474 6934 3561 6934 3526 6934 3473 6935 3526 6935 3477 6935 3550 6936 3477 6936 3478 6936 3548 6937 3478 6937 3479 6937 3472 6938 3479 6938 3542 6938 3535 6939 3542 6939 3537 6939 3536 6940 3537 6940 3480 6940 3471 6941 3480 6941 3468 6941 3471 6942 3536 6942 3480 6942 3471 6943 3481 6943 3536 6943 3470 6944 3493 6944 3481 6944 3481 6945 3493 6945 3494 6945 3533 6946 3494 6946 3482 6946 3543 6947 3482 6947 3483 6947 3484 6948 3483 6948 3485 6948 3486 6949 3485 6949 3547 6949 3487 6950 3547 6950 3497 6950 3549 6951 3497 6951 3488 6951 3553 6952 3488 6952 3560 6952 3489 6953 3560 6953 3490 6953 3491 6954 3490 6954 3492 6954 3491 6955 3489 6955 3490 6955 3493 6956 3495 6956 3494 6956 3494 6957 3495 6957 3496 6957 3482 6958 3496 6958 3534 6958 3483 6959 3534 6959 3541 6959 3485 6960 3541 6960 3539 6960 3547 6961 3539 6961 3546 6961 3497 6962 3546 6962 3545 6962 3488 6963 3545 6963 3552 6963 3560 6964 3552 6964 3559 6964 3490 6965 3559 6965 3502 6965 3492 6966 3502 6966 3498 6966 3492 6967 3490 6967 3502 6967 3495 6968 3503 6968 3496 6968 3496 6969 3503 6969 3532 6969 3534 6970 3532 6970 3531 6970 3541 6971 3531 6971 3540 6971 3539 6972 3540 6972 3538 6972 3546 6973 3538 6973 3499 6973 3545 6974 3499 6974 3500 6974 3552 6975 3500 6975 3501 6975 3559 6976 3501 6976 3558 6976 3502 6977 3558 6977 3557 6977 3498 6978 3557 6978 3635 6978 3498 6979 3502 6979 3557 6979 3503 6980 3504 6980 3532 6980 3532 6981 3504 6981 3505 6981 3531 6982 3505 6982 3508 6982 3540 6983 3508 6983 3509 6983 3538 6984 3509 6984 3510 6984 3499 6985 3510 6985 3544 6985 3500 6986 3544 6986 3506 6986 3501 6987 3506 6987 3551 6987 3558 6988 3551 6988 3555 6988 3557 6989 3555 6989 3515 6989 3635 6990 3515 6990 3514 6990 3635 6991 3557 6991 3515 6991 3504 6992 3507 6992 3505 6992 3505 6993 3507 6993 3516 6993 3508 6994 3516 6994 3525 6994 3509 6995 3525 6995 3517 6995 3510 6996 3517 6996 3511 6996 3544 6997 3511 6997 3521 6997 3506 6998 3521 6998 3512 6998 3551 6999 3512 6999 3513 6999 3555 7000 3513 7000 3556 7000 3515 7001 3556 7001 3523 7001 3514 7002 3523 7002 3634 7002 3514 7003 3515 7003 3523 7003 3507 7004 3563 7004 3516 7004 3516 7005 3563 7005 3524 7005 3525 7006 3524 7006 3518 7006 3517 7007 3518 7007 3519 7007 3511 7008 3519 7008 3520 7008 3521 7009 3520 7009 3583 7009 3512 7010 3583 7010 3522 7010 3513 7011 3522 7011 3582 7011 3567 7012 3513 7012 3582 7012 3567 7013 3556 7013 3513 7013 3567 7014 3569 7014 3556 7014 3556 7015 3569 7015 3523 7015 3523 7016 3569 7016 3634 7016 3516 7017 3524 7017 3525 7017 3525 7018 3518 7018 3517 7018 3517 7019 3519 7019 3511 7019 3511 7020 3520 7020 3521 7020 3521 7021 3583 7021 3512 7021 3512 7022 3522 7022 3513 7022 3619 7023 3590 7023 3561 7023 3561 7024 3590 7024 3562 7024 3526 7025 3562 7025 3554 7025 3477 7026 3554 7026 3467 7026 3478 7027 3467 7027 3527 7027 3479 7028 3527 7028 3466 7028 3542 7029 3466 7029 3465 7029 3537 7030 3465 7030 3528 7030 3480 7031 3528 7031 3529 7031 3468 7032 3529 7032 3530 7032 3710 7033 3530 7033 3712 7033 3710 7034 3468 7034 3530 7034 3590 7035 3588 7035 3562 7035 3508 7036 3505 7036 3516 7036 3531 7037 3532 7037 3505 7037 3534 7038 3496 7038 3532 7038 3482 7039 3494 7039 3496 7039 3533 7040 3481 7040 3494 7040 3529 7041 3468 7041 3480 7041 3509 7042 3508 7042 3525 7042 3540 7043 3531 7043 3508 7043 3541 7044 3534 7044 3531 7044 3483 7045 3482 7045 3534 7045 3543 7046 3533 7046 3482 7046 3537 7047 3536 7047 3535 7047 3535 7048 3536 7048 3533 7048 3528 7049 3480 7049 3537 7049 3510 7050 3509 7050 3517 7050 3538 7051 3540 7051 3509 7051 3539 7052 3541 7052 3540 7052 3485 7053 3483 7053 3541 7053 3484 7054 3543 7054 3483 7054 3542 7055 3535 7055 3472 7055 3472 7056 3535 7056 3543 7056 3465 7057 3537 7057 3542 7057 3544 7058 3510 7058 3511 7058 3499 7059 3538 7059 3510 7059 3546 7060 3539 7060 3538 7060 3547 7061 3485 7061 3539 7061 3486 7062 3484 7062 3485 7062 3479 7063 3472 7063 3548 7063 3548 7064 3472 7064 3484 7064 3466 7065 3542 7065 3479 7065 3506 7066 3544 7066 3521 7066 3500 7067 3499 7067 3544 7067 3545 7068 3546 7068 3499 7068 3497 7069 3547 7069 3546 7069 3487 7070 3486 7070 3547 7070 3478 7071 3548 7071 3550 7071 3550 7072 3548 7072 3486 7072 3527 7073 3479 7073 3478 7073 3551 7074 3506 7074 3512 7074 3501 7075 3500 7075 3506 7075 3552 7076 3545 7076 3500 7076 3488 7077 3497 7077 3545 7077 3549 7078 3487 7078 3497 7078 3477 7079 3550 7079 3473 7079 3473 7080 3550 7080 3487 7080 3467 7081 3478 7081 3477 7081 3555 7082 3551 7082 3513 7082 3558 7083 3501 7083 3551 7083 3559 7084 3552 7084 3501 7084 3560 7085 3488 7085 3552 7085 3553 7086 3549 7086 3488 7086 3526 7087 3473 7087 3474 7087 3474 7088 3473 7088 3549 7088 3554 7089 3477 7089 3526 7089 3515 7090 3555 7090 3556 7090 3557 7091 3558 7091 3555 7091 3502 7092 3559 7092 3558 7092 3490 7093 3560 7093 3559 7093 3489 7094 3553 7094 3560 7094 3561 7095 3474 7095 3476 7095 3476 7096 3474 7096 3553 7096 3562 7097 3526 7097 3561 7097 3563 7098 5773 7098 3564 7098 3524 7099 3564 7099 3565 7099 3518 7100 3565 7100 3584 7100 3519 7101 3584 7101 3573 7101 3520 7102 3573 7102 3566 7102 3583 7103 3566 7103 3574 7103 3522 7104 3574 7104 3577 7104 3582 7105 3577 7105 3578 7105 3567 7106 3578 7106 3568 7106 3569 7107 3568 7107 3580 7107 3634 7108 3580 7108 3579 7108 3634 7109 3569 7109 3580 7109 3571 7110 3565 7110 3570 7110 3571 7111 3584 7111 3565 7111 3571 7112 3572 7112 3584 7112 3584 7113 3572 7113 3573 7113 3573 7114 3572 7114 5763 7114 3566 7115 5763 7115 3575 7115 3574 7116 3575 7116 3576 7116 3577 7117 3576 7117 3581 7117 3578 7118 3581 7118 5762 7118 3568 7119 5762 7119 5761 7119 3580 7120 5761 7120 3579 7120 3580 7121 3568 7121 5761 7121 3573 7122 5763 7122 3566 7122 3566 7123 3575 7123 3574 7123 3574 7124 3576 7124 3577 7124 3577 7125 3581 7125 3578 7125 3578 7126 5762 7126 3568 7126 3569 7127 3567 7127 3568 7127 3567 7128 3582 7128 3578 7128 3582 7129 3522 7129 3577 7129 3522 7130 3583 7130 3574 7130 3583 7131 3520 7131 3566 7131 3520 7132 3519 7132 3573 7132 3519 7133 3518 7133 3584 7133 3518 7134 3524 7134 3565 7134 3524 7135 3563 7135 3564 7135 5773 7136 3570 7136 3564 7136 3564 7137 3570 7137 3565 7137 3585 7138 3597 7138 3620 7138 3585 7139 3596 7139 3597 7139 3585 7140 3598 7140 3596 7140 3596 7141 3598 7141 3599 7141 3594 7142 3599 7142 3646 7142 3647 7143 3646 7143 3650 7143 3655 7144 3650 7144 3586 7144 3664 7145 3586 7145 3663 7145 3592 7146 3663 7146 3670 7146 3676 7147 3670 7147 3587 7147 3675 7148 3587 7148 3589 7148 3588 7149 3589 7149 5642 7149 3588 7150 3675 7150 3589 7150 3588 7151 3590 7151 3675 7151 3675 7152 3590 7152 3591 7152 3676 7153 3591 7153 3666 7153 3592 7154 3666 7154 3593 7154 3664 7155 3593 7155 3656 7155 3655 7156 3656 7156 3652 7156 3647 7157 3652 7157 3595 7157 3594 7158 3595 7158 3639 7158 3596 7159 3639 7159 3597 7159 3596 7160 3594 7160 3639 7160 3596 7161 3599 7161 3594 7161 3598 7162 3831 7162 3599 7162 3599 7163 3831 7163 3648 7163 3646 7164 3648 7164 3651 7164 3650 7165 3651 7165 3602 7165 3586 7166 3602 7166 3665 7166 3663 7167 3665 7167 3674 7167 3670 7168 3674 7168 3603 7168 3587 7169 3603 7169 3600 7169 3589 7170 3600 7170 5642 7170 3589 7171 3587 7171 3600 7171 3831 7172 3909 7172 3648 7172 3648 7173 3909 7173 3601 7173 3651 7174 3601 7174 3662 7174 3602 7175 3662 7175 3661 7175 3665 7176 3661 7176 3669 7176 3674 7177 3669 7177 3673 7177 3603 7178 3673 7178 3607 7178 3600 7179 3607 7179 5642 7179 3600 7180 3603 7180 3607 7180 3909 7181 3604 7181 3601 7181 3601 7182 3604 7182 3605 7182 3662 7183 3605 7183 3660 7183 3661 7184 3660 7184 3606 7184 3669 7185 3606 7185 3672 7185 3673 7186 3672 7186 3609 7186 3607 7187 3609 7187 5642 7187 3607 7188 3673 7188 3609 7188 3604 7189 3608 7189 3605 7189 3605 7190 3608 7190 3659 7190 3660 7191 3659 7191 3611 7191 3606 7192 3611 7192 3671 7192 3672 7193 3671 7193 3613 7193 3609 7194 3613 7194 5636 7194 5642 7195 3609 7195 5636 7195 3608 7196 3610 7196 3659 7196 3659 7197 3610 7197 3668 7197 3611 7198 3668 7198 3612 7198 3671 7199 3612 7199 3614 7199 3613 7200 3614 7200 5636 7200 3613 7201 3671 7201 3614 7201 3610 7202 3615 7202 3668 7202 3668 7203 3615 7203 3616 7203 3612 7204 3616 7204 3617 7204 3614 7205 3617 7205 5636 7205 3614 7206 3612 7206 3617 7206 3615 7207 3809 7207 3616 7207 3616 7208 3809 7208 3618 7208 3617 7209 3618 7209 5636 7209 3617 7210 3616 7210 3618 7210 3809 7211 5636 7211 3618 7211 3590 7212 3619 7212 3591 7212 3591 7213 3619 7213 3622 7213 3666 7214 3622 7214 3667 7214 3593 7215 3667 7215 3657 7215 3656 7216 3657 7216 3624 7216 3652 7217 3624 7217 3640 7217 3595 7218 3640 7218 3641 7218 3639 7219 3641 7219 3621 7219 3597 7220 3621 7220 3620 7220 3597 7221 3639 7221 3621 7221 3619 7222 3475 7222 3622 7222 3622 7223 3475 7223 3658 7223 3667 7224 3658 7224 3623 7224 3657 7225 3623 7225 3653 7225 3624 7226 3653 7226 3649 7226 3640 7227 3649 7227 3625 7227 3641 7228 3625 7228 3626 7228 3621 7229 3626 7229 3620 7229 3621 7230 3641 7230 3626 7230 3475 7231 3491 7231 3658 7231 3658 7232 3491 7232 3627 7232 3623 7233 3627 7233 3629 7233 3653 7234 3629 7234 3643 7234 3649 7235 3643 7235 3642 7235 3625 7236 3642 7236 3628 7236 3626 7237 3628 7237 3620 7237 3626 7238 3625 7238 3628 7238 3491 7239 3492 7239 3627 7239 3627 7240 3492 7240 3654 7240 3629 7241 3654 7241 3644 7241 3643 7242 3644 7242 3631 7242 3642 7243 3631 7243 3633 7243 3628 7244 3633 7244 3620 7244 3628 7245 3642 7245 3633 7245 3492 7246 3498 7246 3654 7246 3654 7247 3498 7247 3645 7247 3644 7248 3645 7248 3630 7248 3631 7249 3630 7249 3632 7249 3633 7250 3632 7250 3634 7250 3620 7251 3633 7251 3634 7251 3498 7252 3635 7252 3645 7252 3645 7253 3635 7253 3636 7253 3630 7254 3636 7254 3637 7254 3632 7255 3637 7255 3634 7255 3632 7256 3630 7256 3637 7256 3635 7257 3514 7257 3636 7257 3636 7258 3514 7258 3638 7258 3637 7259 3638 7259 3634 7259 3637 7260 3636 7260 3638 7260 3514 7261 3634 7261 3638 7261 3631 7262 3632 7262 3633 7262 3595 7263 3641 7263 3639 7263 3640 7264 3625 7264 3641 7264 3649 7265 3642 7265 3625 7265 3643 7266 3631 7266 3642 7266 3644 7267 3630 7267 3631 7267 3645 7268 3636 7268 3630 7268 3647 7269 3595 7269 3594 7269 3646 7270 3647 7270 3594 7270 3648 7271 3646 7271 3599 7271 3652 7272 3640 7272 3595 7272 3624 7273 3649 7273 3640 7273 3653 7274 3643 7274 3649 7274 3629 7275 3644 7275 3643 7275 3654 7276 3645 7276 3644 7276 3601 7277 3651 7277 3648 7277 3655 7278 3652 7278 3647 7278 3650 7279 3655 7279 3647 7279 3651 7280 3650 7280 3646 7280 3656 7281 3624 7281 3652 7281 3657 7282 3653 7282 3624 7282 3623 7283 3629 7283 3653 7283 3627 7284 3654 7284 3629 7284 3605 7285 3662 7285 3601 7285 3662 7286 3602 7286 3651 7286 3664 7287 3656 7287 3655 7287 3586 7288 3664 7288 3655 7288 3602 7289 3586 7289 3650 7289 3593 7290 3657 7290 3656 7290 3667 7291 3623 7291 3657 7291 3658 7292 3627 7292 3623 7292 3659 7293 3660 7293 3605 7293 3660 7294 3661 7294 3662 7294 3661 7295 3665 7295 3602 7295 3592 7296 3593 7296 3664 7296 3663 7297 3592 7297 3664 7297 3665 7298 3663 7298 3586 7298 3666 7299 3667 7299 3593 7299 3622 7300 3658 7300 3667 7300 3668 7301 3611 7301 3659 7301 3611 7302 3606 7302 3660 7302 3606 7303 3669 7303 3661 7303 3669 7304 3674 7304 3665 7304 3676 7305 3666 7305 3592 7305 3670 7306 3676 7306 3592 7306 3674 7307 3670 7307 3663 7307 3591 7308 3622 7308 3666 7308 3616 7309 3612 7309 3668 7309 3612 7310 3671 7310 3611 7310 3671 7311 3672 7311 3606 7311 3672 7312 3673 7312 3669 7312 3673 7313 3603 7313 3674 7313 3675 7314 3591 7314 3676 7314 3587 7315 3675 7315 3676 7315 3603 7316 3587 7316 3670 7316 3609 7317 3672 7317 3613 7317 3348 7318 3677 7318 5673 7318 3348 7319 3683 7319 3677 7319 3348 7320 3398 7320 3683 7320 3683 7321 3398 7321 3718 7321 3684 7322 3718 7322 3685 7322 3720 7323 3685 7323 3687 7323 3723 7324 3687 7324 3678 7324 3679 7325 3678 7325 3680 7325 3495 7326 3680 7326 3503 7326 3495 7327 3679 7327 3680 7327 3495 7328 3493 7328 3679 7328 3679 7329 3493 7329 3681 7329 3723 7330 3681 7330 3722 7330 3720 7331 3722 7331 3682 7331 3684 7332 3682 7332 3716 7332 3683 7333 3716 7333 3677 7333 3683 7334 3684 7334 3716 7334 3683 7335 3718 7335 3684 7335 3398 7336 3688 7336 3718 7336 3718 7337 3688 7337 3686 7337 3685 7338 3686 7338 3721 7338 3687 7339 3721 7339 3725 7339 3678 7340 3725 7340 3726 7340 3680 7341 3726 7341 3729 7341 3503 7342 3729 7342 3504 7342 3503 7343 3680 7343 3729 7343 3688 7344 3358 7344 3686 7344 3686 7345 3358 7345 3719 7345 3721 7346 3719 7346 3724 7346 3725 7347 3724 7347 3689 7347 3726 7348 3689 7348 3732 7348 3729 7349 3732 7349 3690 7349 3504 7350 3690 7350 3507 7350 3504 7351 3729 7351 3690 7351 3358 7352 3366 7352 3719 7352 3719 7353 3366 7353 3691 7353 3724 7354 3691 7354 3728 7354 3689 7355 3728 7355 3727 7355 3732 7356 3727 7356 3731 7356 3690 7357 3731 7357 3692 7357 3507 7358 3692 7358 3563 7358 3507 7359 3690 7359 3692 7359 3366 7360 3693 7360 3691 7360 3691 7361 3693 7361 3694 7361 3728 7362 3694 7362 3695 7362 3727 7363 3695 7363 3730 7363 3731 7364 3730 7364 3696 7364 3692 7365 3696 7365 3563 7365 3692 7366 3731 7366 3696 7366 3693 7367 3698 7367 3694 7367 3694 7368 3698 7368 3700 7368 3695 7369 3700 7369 3701 7369 3730 7370 3701 7370 3697 7370 3696 7371 3697 7371 3563 7371 3696 7372 3730 7372 3697 7372 3698 7373 3699 7373 3700 7373 3700 7374 3699 7374 3702 7374 3701 7375 3702 7375 3703 7375 3697 7376 3703 7376 5341 7376 3563 7377 3697 7377 5341 7377 3699 7378 3388 7378 3702 7378 3702 7379 3388 7379 3704 7379 3703 7380 3704 7380 5341 7380 3703 7381 3702 7381 3704 7381 3388 7382 5341 7382 3704 7382 3493 7383 3470 7383 3681 7383 3681 7384 3470 7384 3705 7384 3722 7385 3705 7385 3717 7385 3682 7386 3717 7386 3706 7386 3716 7387 3706 7387 3709 7387 3677 7388 3709 7388 5673 7388 3677 7389 3716 7389 3709 7389 3470 7390 3469 7390 3705 7390 3705 7391 3469 7391 3707 7391 3717 7392 3707 7392 3714 7392 3706 7393 3714 7393 3708 7393 3709 7394 3708 7394 5673 7394 3709 7395 3706 7395 3708 7395 3469 7396 3710 7396 3707 7396 3707 7397 3710 7397 3711 7397 3714 7398 3711 7398 3715 7398 3708 7399 3715 7399 5642 7399 5673 7400 3708 7400 5642 7400 3710 7401 3712 7401 3711 7401 3711 7402 3712 7402 3713 7402 3715 7403 3713 7403 5642 7403 3715 7404 3711 7404 3713 7404 3712 7405 5642 7405 3713 7405 3714 7406 3715 7406 3708 7406 3682 7407 3706 7407 3716 7407 3717 7408 3714 7408 3706 7408 3707 7409 3711 7409 3714 7409 3720 7410 3682 7410 3684 7410 3685 7411 3720 7411 3684 7411 3686 7412 3685 7412 3718 7412 3722 7413 3717 7413 3682 7413 3705 7414 3707 7414 3717 7414 3719 7415 3721 7415 3686 7415 3723 7416 3722 7416 3720 7416 3687 7417 3723 7417 3720 7417 3721 7418 3687 7418 3685 7418 3681 7419 3705 7419 3722 7419 3691 7420 3724 7420 3719 7420 3724 7421 3725 7421 3721 7421 3679 7422 3681 7422 3723 7422 3678 7423 3679 7423 3723 7423 3725 7424 3678 7424 3687 7424 3694 7425 3728 7425 3691 7425 3728 7426 3689 7426 3724 7426 3689 7427 3726 7427 3725 7427 3726 7428 3680 7428 3678 7428 3700 7429 3695 7429 3694 7429 3695 7430 3727 7430 3728 7430 3727 7431 3732 7431 3689 7431 3732 7432 3729 7432 3726 7432 3702 7433 3701 7433 3700 7433 3701 7434 3730 7434 3695 7434 3730 7435 3731 7435 3727 7435 3731 7436 3690 7436 3732 7436 3697 7437 3701 7437 3703 7437 3733 7438 3766 7438 5461 7438 3733 7439 3734 7439 3766 7439 3733 7440 4687 7440 3734 7440 3734 7441 4687 7441 3739 7441 3738 7442 3739 7442 3735 7442 3785 7443 3735 7443 3736 7443 3792 7444 3736 7444 3741 7444 3793 7445 3741 7445 3742 7445 3737 7446 3742 7446 3807 7446 3800 7447 3807 7447 3806 7447 3383 7448 3806 7448 3389 7448 3383 7449 3800 7449 3806 7449 3383 7450 3375 7450 3800 7450 3800 7451 3375 7451 3763 7451 3737 7452 3763 7452 3794 7452 3793 7453 3794 7453 3795 7453 3792 7454 3795 7454 3765 7454 3785 7455 3765 7455 3786 7455 3738 7456 3786 7456 3767 7456 3734 7457 3767 7457 3766 7457 3734 7458 3738 7458 3767 7458 3734 7459 3739 7459 3738 7459 4687 7460 4683 7460 3739 7460 3739 7461 4683 7461 3790 7461 3735 7462 3790 7462 3789 7462 3736 7463 3789 7463 3740 7463 3741 7464 3740 7464 3799 7464 3742 7465 3799 7465 3805 7465 3807 7466 3805 7466 3743 7466 3806 7467 3743 7467 3748 7467 3389 7468 3748 7468 5350 7468 3389 7469 3806 7469 3748 7469 4683 7470 4681 7470 3790 7470 3790 7471 4681 7471 3744 7471 3789 7472 3744 7472 3745 7472 3740 7473 3745 7473 3746 7473 3799 7474 3746 7474 3747 7474 3805 7475 3747 7475 3804 7475 3743 7476 3804 7476 3753 7476 3748 7477 3753 7477 5350 7477 3748 7478 3743 7478 3753 7478 4681 7479 3749 7479 3744 7479 3744 7480 3749 7480 3750 7480 3745 7481 3750 7481 3751 7481 3746 7482 3751 7482 3798 7482 3747 7483 3798 7483 3754 7483 3804 7484 3754 7484 3752 7484 3753 7485 3752 7485 5350 7485 3753 7486 3804 7486 3752 7486 3749 7487 4675 7487 3750 7487 3750 7488 4675 7488 3797 7488 3751 7489 3797 7489 3803 7489 3798 7490 3803 7490 3808 7490 3754 7491 3808 7491 3756 7491 3752 7492 3756 7492 5350 7492 3752 7493 3754 7493 3756 7493 4675 7494 4668 7494 3797 7494 3797 7495 4668 7495 3802 7495 3803 7496 3802 7496 3801 7496 3808 7497 3801 7497 3755 7497 3756 7498 3755 7498 3760 7498 5350 7499 3756 7499 3760 7499 4668 7500 3759 7500 3802 7500 3802 7501 3759 7501 3757 7501 3801 7502 3757 7502 3758 7502 3755 7503 3758 7503 3760 7503 3755 7504 3801 7504 3758 7504 3759 7505 3761 7505 3757 7505 3757 7506 3761 7506 3762 7506 3758 7507 3762 7507 3760 7507 3758 7508 3757 7508 3762 7508 3761 7509 3760 7509 3762 7509 3375 7510 3369 7510 3763 7510 3763 7511 3369 7511 3796 7511 3794 7512 3796 7512 3764 7512 3795 7513 3764 7513 3770 7513 3765 7514 3770 7514 3787 7514 3786 7515 3787 7515 3772 7515 3767 7516 3772 7516 3771 7516 3766 7517 3771 7517 5461 7517 3766 7518 3767 7518 3771 7518 3369 7519 3367 7519 3796 7519 3796 7520 3367 7520 3768 7520 3764 7521 3768 7521 3769 7521 3770 7522 3769 7522 3784 7522 3787 7523 3784 7523 3775 7523 3772 7524 3775 7524 3774 7524 3771 7525 3774 7525 5461 7525 3771 7526 3772 7526 3774 7526 3367 7527 3363 7527 3768 7527 3768 7528 3363 7528 3791 7528 3769 7529 3791 7529 3773 7529 3784 7530 3773 7530 3782 7530 3775 7531 3782 7531 3783 7531 3774 7532 3783 7532 5461 7532 3774 7533 3775 7533 3783 7533 3363 7534 3351 7534 3791 7534 3791 7535 3351 7535 3788 7535 3773 7536 3788 7536 3778 7536 3782 7537 3778 7537 3777 7537 3783 7538 3777 7538 5673 7538 5461 7539 3783 7539 5673 7539 3351 7540 3776 7540 3788 7540 3788 7541 3776 7541 3779 7541 3778 7542 3779 7542 3780 7542 3777 7543 3780 7543 5673 7543 3777 7544 3778 7544 3780 7544 3776 7545 3346 7545 3779 7545 3779 7546 3346 7546 3781 7546 3780 7547 3781 7547 5673 7547 3780 7548 3779 7548 3781 7548 3346 7549 5673 7549 3781 7549 3782 7550 3777 7550 3783 7550 3786 7551 3772 7551 3767 7551 3787 7552 3775 7552 3772 7552 3784 7553 3782 7553 3775 7553 3773 7554 3778 7554 3782 7554 3788 7555 3779 7555 3778 7555 3785 7556 3786 7556 3738 7556 3735 7557 3785 7557 3738 7557 3790 7558 3735 7558 3739 7558 3765 7559 3787 7559 3786 7559 3770 7560 3784 7560 3787 7560 3769 7561 3773 7561 3784 7561 3791 7562 3788 7562 3773 7562 3744 7563 3789 7563 3790 7563 3792 7564 3765 7564 3785 7564 3736 7565 3792 7565 3785 7565 3789 7566 3736 7566 3735 7566 3795 7567 3770 7567 3765 7567 3764 7568 3769 7568 3770 7568 3768 7569 3791 7569 3769 7569 3750 7570 3745 7570 3744 7570 3745 7571 3740 7571 3789 7571 3793 7572 3795 7572 3792 7572 3741 7573 3793 7573 3792 7573 3740 7574 3741 7574 3736 7574 3794 7575 3764 7575 3795 7575 3796 7576 3768 7576 3764 7576 3797 7577 3751 7577 3750 7577 3751 7578 3746 7578 3745 7578 3746 7579 3799 7579 3740 7579 3737 7580 3794 7580 3793 7580 3742 7581 3737 7581 3793 7581 3799 7582 3742 7582 3741 7582 3763 7583 3796 7583 3794 7583 3802 7584 3803 7584 3797 7584 3803 7585 3798 7585 3751 7585 3798 7586 3747 7586 3746 7586 3747 7587 3805 7587 3799 7587 3800 7588 3763 7588 3737 7588 3807 7589 3800 7589 3737 7589 3805 7590 3807 7590 3742 7590 3757 7591 3801 7591 3802 7591 3801 7592 3808 7592 3803 7592 3808 7593 3754 7593 3798 7593 3754 7594 3804 7594 3747 7594 3804 7595 3743 7595 3805 7595 3743 7596 3806 7596 3807 7596 3756 7597 3808 7597 3755 7597 5636 7598 3809 7598 5552 7598 5552 7599 3809 7599 3810 7599 3879 7600 3810 7600 3811 7600 3875 7601 3811 7601 3812 7601 3876 7602 3812 7602 3826 7602 3877 7603 3826 7603 3827 7603 3911 7604 3827 7604 3912 7604 3870 7605 3912 7605 3871 7605 3872 7606 3871 7606 3868 7606 3861 7607 3868 7607 3813 7607 3814 7608 3813 7608 3863 7608 3862 7609 3863 7609 3832 7609 3925 7610 3832 7610 3815 7610 3920 7611 3815 7611 3838 7611 3921 7612 3838 7612 3840 7612 3816 7613 3840 7613 3842 7613 3817 7614 3816 7614 3842 7614 3817 7615 3818 7615 3816 7615 3817 7616 3327 7616 3818 7616 3818 7617 3327 7617 3819 7617 3935 7618 3819 7618 3820 7618 3962 7619 3820 7619 3821 7619 3937 7620 3821 7620 3964 7620 3938 7621 3964 7621 3822 7621 3939 7622 3822 7622 3994 7622 3993 7623 3994 7623 4003 7623 3995 7624 4003 7624 3845 7624 3940 7625 3845 7625 3823 7625 4005 7626 3823 7626 3847 7626 3942 7627 3847 7627 4315 7627 3810 7628 3809 7628 3829 7628 3910 7629 3829 7629 3615 7629 3610 7630 3910 7630 3615 7630 3610 7631 3825 7631 3910 7631 3610 7632 3824 7632 3825 7632 3610 7633 3608 7633 3824 7633 3824 7634 3608 7634 3828 7634 3826 7635 3828 7635 3827 7635 3826 7636 3824 7636 3828 7636 3826 7637 3812 7637 3824 7637 3824 7638 3812 7638 3825 7638 3825 7639 3812 7639 3811 7639 3910 7640 3811 7640 3810 7640 3829 7641 3910 7641 3810 7641 3608 7642 3604 7642 3828 7642 3828 7643 3604 7643 3830 7643 3827 7644 3830 7644 3912 7644 3827 7645 3828 7645 3830 7645 3830 7646 3604 7646 3835 7646 3912 7647 3835 7647 3871 7647 3912 7648 3830 7648 3835 7648 3831 7649 3907 7649 3909 7649 3831 7650 3834 7650 3907 7650 3831 7651 3833 7651 3834 7651 3831 7652 3598 7652 3833 7652 3833 7653 3598 7653 3836 7653 3832 7654 3836 7654 3815 7654 3832 7655 3833 7655 3836 7655 3832 7656 3863 7656 3833 7656 3833 7657 3863 7657 3834 7657 3834 7658 3863 7658 3813 7658 3907 7659 3813 7659 3868 7659 3908 7660 3868 7660 3871 7660 3835 7661 3908 7661 3871 7661 3835 7662 3909 7662 3908 7662 3835 7663 3604 7663 3909 7663 3598 7664 3585 7664 3836 7664 3836 7665 3585 7665 3837 7665 3815 7666 3837 7666 3838 7666 3815 7667 3836 7667 3837 7667 3837 7668 3585 7668 3839 7668 3838 7669 3839 7669 3840 7669 3838 7670 3837 7670 3839 7670 3585 7671 3620 7671 3839 7671 3839 7672 3620 7672 3841 7672 3840 7673 3841 7673 3842 7673 3840 7674 3839 7674 3841 7674 3327 7675 3983 7675 3819 7675 3819 7676 3983 7676 3843 7676 3820 7677 3843 7677 3961 7677 3821 7678 3961 7678 3850 7678 3964 7679 3850 7679 3844 7679 3822 7680 3844 7680 3992 7680 3994 7681 3992 7681 4002 7681 4003 7682 4002 7682 3853 7682 3845 7683 3853 7683 3846 7683 3823 7684 3846 7684 3848 7684 3847 7685 3848 7685 4315 7685 3843 7686 3983 7686 3849 7686 3961 7687 3849 7687 3851 7687 3850 7688 3851 7688 3977 7688 3844 7689 3977 7689 3852 7689 3992 7690 3852 7690 3888 7690 4002 7691 3888 7691 4001 7691 3853 7692 4001 7692 3854 7692 3846 7693 3854 7693 3892 7693 3848 7694 3892 7694 4315 7694 3331 7695 3974 7695 3984 7695 3331 7696 3975 7696 3974 7696 3331 7697 3329 7697 3975 7697 3975 7698 3329 7698 3883 7698 3985 7699 3883 7699 3882 7699 3986 7700 3882 7700 3887 7700 3987 7701 3887 7701 3855 7701 3990 7702 3855 7702 4315 7702 4315 7703 3880 7703 3329 7703 5538 7704 4008 7704 4015 7704 5538 7705 3902 7705 4008 7705 5538 7706 3903 7706 3902 7706 5538 7707 3856 7707 3903 7707 3903 7708 3856 7708 3857 7708 3904 7709 3857 7709 3981 7709 3858 7710 3981 7710 3897 7710 3899 7711 3897 7711 3866 7711 3901 7712 3866 7712 3954 7712 3859 7713 3954 7713 3949 7713 3946 7714 3949 7714 3950 7714 3860 7715 3950 7715 3861 7715 3814 7716 3861 7716 3813 7716 3814 7717 3860 7717 3861 7717 3814 7718 3945 7718 3860 7718 3814 7719 3862 7719 3945 7719 3814 7720 3863 7720 3862 7720 3856 7721 3972 7721 3857 7721 3857 7722 3972 7722 3906 7722 3981 7723 3906 7723 3864 7723 3897 7724 3864 7724 3865 7724 3866 7725 3865 7725 3955 7725 3954 7726 3955 7726 3867 7726 3949 7727 3867 7727 3869 7727 3950 7728 3869 7728 3872 7728 3861 7729 3872 7729 3868 7729 3861 7730 3950 7730 3872 7730 3906 7731 3972 7731 3982 7731 3864 7732 3982 7732 3971 7732 3865 7733 3971 7733 3957 7733 3955 7734 3957 7734 3956 7734 3867 7735 3956 7735 3913 7735 3869 7736 3913 7736 3870 7736 3872 7737 3870 7737 3871 7737 3872 7738 3869 7738 3870 7738 3874 7739 3973 7739 5540 7739 3874 7740 3959 7740 3973 7740 3874 7741 3873 7741 3959 7741 3874 7742 5550 7742 3873 7742 3873 7743 5550 7743 3878 7743 3915 7744 3878 7744 3875 7744 3876 7745 3875 7745 3812 7745 3876 7746 3915 7746 3875 7746 3876 7747 3951 7747 3915 7747 3876 7748 3877 7748 3951 7748 3876 7749 3826 7749 3877 7749 3878 7750 5550 7750 3879 7750 3875 7751 3879 7751 3811 7751 3875 7752 3878 7752 3879 7752 3880 7753 4315 7753 3881 7753 3905 7754 3881 7754 3884 7754 3885 7755 3884 7755 3882 7755 3883 7756 3885 7756 3882 7756 3883 7757 3329 7757 3885 7757 3885 7758 3329 7758 3905 7758 3884 7759 3885 7759 3905 7759 3886 7760 4315 7760 3855 7760 3887 7761 3886 7761 3855 7761 3887 7762 3884 7762 3886 7762 3887 7763 3882 7763 3884 7763 3990 7764 4315 7764 4013 7764 3991 7765 4013 7765 4000 7765 3989 7766 4000 7766 4001 7766 3888 7767 3989 7767 4001 7767 3888 7768 3889 7768 3989 7768 3888 7769 3852 7769 3889 7769 3889 7770 3852 7770 3976 7770 3890 7771 3976 7771 3891 7771 3985 7772 3891 7772 3975 7772 3883 7773 3985 7773 3975 7773 4014 7774 4315 7774 3892 7774 3854 7775 4014 7775 3892 7775 3854 7776 4000 7776 4014 7776 3854 7777 4001 7777 4000 7777 3942 7778 4315 7778 4010 7778 3941 7779 4010 7779 4011 7779 3893 7780 4011 7780 3894 7780 3998 7781 3894 7781 3895 7781 3933 7782 3895 7782 3896 7782 3898 7783 3896 7783 3858 7783 3899 7784 3858 7784 3897 7784 3899 7785 3898 7785 3858 7785 3899 7786 3900 7786 3898 7786 3899 7787 3901 7787 3900 7787 3899 7788 3866 7788 3901 7788 4012 7789 4315 7789 4016 7789 4009 7790 4016 7790 4008 7790 4007 7791 4008 7791 3902 7791 4006 7792 3902 7792 3903 7792 3904 7793 3903 7793 3857 7793 3904 7794 4006 7794 3903 7794 3904 7795 3896 7795 4006 7795 3904 7796 3858 7796 3896 7796 3904 7797 3981 7797 3858 7797 3905 7798 3329 7798 3880 7798 3881 7799 3905 7799 3880 7799 3981 7800 3857 7800 3906 7800 3820 7801 3819 7801 3843 7801 5550 7802 5552 7802 3879 7802 3879 7803 5552 7803 3810 7803 3615 7804 3829 7804 3809 7804 3813 7805 3907 7805 3834 7805 3868 7806 3908 7806 3907 7806 3907 7807 3908 7807 3909 7807 3811 7808 3910 7808 3825 7808 3911 7809 3912 7809 3870 7809 3913 7810 3911 7810 3870 7810 3913 7811 3916 7811 3911 7811 3913 7812 3956 7812 3916 7812 3916 7813 3956 7813 3914 7813 3951 7814 3914 7814 3958 7814 3915 7815 3958 7815 3873 7815 3878 7816 3915 7816 3873 7816 3877 7817 3827 7817 3911 7817 3916 7818 3877 7818 3911 7818 3916 7819 3951 7819 3877 7819 3916 7820 3914 7820 3951 7820 3840 7821 3816 7821 3921 7821 3921 7822 3816 7822 3917 7822 3934 7823 3917 7823 3936 7823 3922 7824 3936 7824 3952 7824 3953 7825 3952 7825 3963 7825 3965 7826 3963 7826 3966 7826 3967 7827 3966 7827 3918 7827 3996 7828 3918 7828 3919 7828 3924 7829 3919 7829 3997 7829 3999 7830 3997 7830 4004 7830 3893 7831 4004 7831 3941 7831 4011 7832 3893 7832 3941 7832 3838 7833 3921 7833 3920 7833 3920 7834 3921 7834 3934 7834 3927 7835 3934 7835 3922 7835 3943 7836 3922 7836 3953 7836 3928 7837 3953 7837 3965 7837 3923 7838 3965 7838 3967 7838 3968 7839 3967 7839 3996 7839 3930 7840 3996 7840 3924 7840 3931 7841 3924 7841 3999 7841 3998 7842 3999 7842 3893 7842 3894 7843 3998 7843 3893 7843 3815 7844 3920 7844 3925 7844 3925 7845 3920 7845 3927 7845 3926 7846 3927 7846 3943 7846 3944 7847 3943 7847 3928 7847 3929 7848 3928 7848 3923 7848 3932 7849 3923 7849 3968 7849 3970 7850 3968 7850 3930 7850 3980 7851 3930 7851 3931 7851 3933 7852 3931 7852 3998 7852 3895 7853 3933 7853 3998 7853 3917 7854 3934 7854 3921 7854 3832 7855 3925 7855 3862 7855 3862 7856 3925 7856 3926 7856 3945 7857 3926 7857 3944 7857 3947 7858 3944 7858 3929 7858 3948 7859 3929 7859 3932 7859 3969 7860 3932 7860 3970 7860 3900 7861 3970 7861 3980 7861 3898 7862 3980 7862 3933 7862 3896 7863 3898 7863 3933 7863 3934 7864 3927 7864 3920 7864 3927 7865 3926 7865 3925 7865 3926 7866 3945 7866 3862 7866 3816 7867 3818 7867 3917 7867 3917 7868 3818 7868 3935 7868 3936 7869 3935 7869 3962 7869 3952 7870 3962 7870 3937 7870 3963 7871 3937 7871 3938 7871 3966 7872 3938 7872 3939 7872 3918 7873 3939 7873 3993 7873 3919 7874 3993 7874 3995 7874 3997 7875 3995 7875 3940 7875 4004 7876 3940 7876 4005 7876 3941 7877 4005 7877 3942 7877 4010 7878 3941 7878 3942 7878 3819 7879 3935 7879 3818 7879 3935 7880 3936 7880 3917 7880 3936 7881 3922 7881 3934 7881 3922 7882 3943 7882 3927 7882 3943 7883 3944 7883 3926 7883 3946 7884 3950 7884 3860 7884 3947 7885 3860 7885 3945 7885 3944 7886 3947 7886 3945 7886 3946 7887 3860 7887 3947 7887 3948 7888 3947 7888 3929 7888 3948 7889 3946 7889 3947 7889 3948 7890 3859 7890 3946 7890 3948 7891 3969 7891 3859 7891 3948 7892 3932 7892 3969 7892 3949 7893 3869 7893 3950 7893 3867 7894 3913 7894 3869 7894 3958 7895 3915 7895 3951 7895 3962 7896 3935 7896 3820 7896 3952 7897 3936 7897 3962 7897 3953 7898 3922 7898 3952 7898 3928 7899 3943 7899 3953 7899 3929 7900 3944 7900 3928 7900 3859 7901 3949 7901 3946 7901 3954 7902 3867 7902 3949 7902 3955 7903 3956 7903 3867 7903 3956 7904 3957 7904 3914 7904 3914 7905 3957 7905 3960 7905 3958 7906 3960 7906 3959 7906 3873 7907 3958 7907 3959 7907 3958 7908 3914 7908 3960 7908 3849 7909 3961 7909 3843 7909 3961 7910 3821 7910 3820 7910 3821 7911 3937 7911 3962 7911 3851 7912 3850 7912 3961 7912 3937 7913 3963 7913 3952 7913 3850 7914 3964 7914 3821 7914 3963 7915 3965 7915 3953 7915 3964 7916 3938 7916 3937 7916 3965 7917 3923 7917 3928 7917 3938 7918 3966 7918 3963 7918 3923 7919 3932 7919 3929 7919 3966 7920 3967 7920 3965 7920 3967 7921 3968 7921 3923 7921 3954 7922 3859 7922 3901 7922 3901 7923 3859 7923 3969 7923 3900 7924 3969 7924 3970 7924 3900 7925 3901 7925 3969 7925 3968 7926 3970 7926 3932 7926 3955 7927 3954 7927 3866 7927 3957 7928 3955 7928 3865 7928 3960 7929 3957 7929 3971 7929 3973 7930 3971 7930 3982 7930 5540 7931 3982 7931 3972 7931 5540 7932 3973 7932 3982 7932 3865 7933 3866 7933 3897 7933 3959 7934 3960 7934 3973 7934 3973 7935 3960 7935 3971 7935 3971 7936 3865 7936 3864 7936 3851 7937 3849 7937 3978 7937 3979 7938 3978 7938 3974 7938 3891 7939 3974 7939 3975 7939 3891 7940 3979 7940 3974 7940 3891 7941 3976 7941 3979 7941 3979 7942 3976 7942 3977 7942 3851 7943 3979 7943 3977 7943 3851 7944 3978 7944 3979 7944 3844 7945 3850 7945 3977 7945 3822 7946 3964 7946 3844 7946 3939 7947 3938 7947 3822 7947 3918 7948 3966 7948 3939 7948 3996 7949 3967 7949 3918 7949 3930 7950 3968 7950 3996 7950 3980 7951 3970 7951 3930 7951 3898 7952 3900 7952 3980 7952 3981 7953 3864 7953 3897 7953 3906 7954 3982 7954 3864 7954 3983 7955 3984 7955 3849 7955 3849 7956 3984 7956 3978 7956 3978 7957 3984 7957 3974 7957 3890 7958 3891 7958 3985 7958 3986 7959 3985 7959 3882 7959 3986 7960 3890 7960 3985 7960 3986 7961 3988 7961 3890 7961 3986 7962 3987 7962 3988 7962 3986 7963 3887 7963 3987 7963 3976 7964 3852 7964 3977 7964 3889 7965 3976 7965 3890 7965 3988 7966 3889 7966 3890 7966 3988 7967 3989 7967 3889 7967 3988 7968 3991 7968 3989 7968 3988 7969 3987 7969 3991 7969 3991 7970 3987 7970 3990 7970 4013 7971 3991 7971 3990 7971 3852 7972 3992 7972 3844 7972 3992 7973 3994 7973 3822 7973 4002 7974 3992 7974 3888 7974 3994 7975 3993 7975 3939 7975 4003 7976 3994 7976 4002 7976 3993 7977 3919 7977 3918 7977 3995 7978 3993 7978 4003 7978 3919 7979 3924 7979 3996 7979 3997 7980 3919 7980 3995 7980 3924 7981 3931 7981 3930 7981 3999 7982 3924 7982 3997 7982 3931 7983 3933 7983 3980 7983 3998 7984 3931 7984 3999 7984 4006 7985 3896 7985 3895 7985 4007 7986 3895 7986 3894 7986 4009 7987 3894 7987 4011 7987 4012 7988 4011 7988 4010 7988 4000 7989 3989 7989 3991 7989 4001 7990 3853 7990 4002 7990 3853 7991 3845 7991 4003 7991 3846 7992 3853 7992 3854 7992 3845 7993 3940 7993 3995 7993 3823 7994 3845 7994 3846 7994 3940 7995 4004 7995 3997 7995 4005 7996 3940 7996 3823 7996 4004 7997 3893 7997 3999 7997 3941 7998 4004 7998 4005 7998 4006 7999 3895 7999 4007 7999 3902 8000 4006 8000 4007 8000 4008 8001 4007 8001 4009 8001 4009 8002 4007 8002 3894 8002 4012 8003 4010 8003 4315 8003 4011 8004 4012 8004 4009 8004 4009 8005 4012 8005 4016 8005 4005 8006 3847 8006 3942 8006 3823 8007 3848 8007 3847 8007 3892 8008 3848 8008 3846 8008 4315 8009 4014 8009 4013 8009 4013 8010 4014 8010 4000 8010 3855 8011 3990 8011 3987 8011 4315 8012 3886 8012 3881 8012 3881 8013 3886 8013 3884 8013 4015 8014 4008 8014 4016 8014 4315 8015 4015 8015 4016 8015 4255 8016 4017 8016 4018 8016 4255 8017 4018 8017 4019 8017 4255 8018 4019 8018 4081 8018 4255 8019 4081 8019 4080 8019 4255 8020 4080 8020 4078 8020 4255 8021 4078 8021 4076 8021 4255 8022 4076 8022 4020 8022 4255 8023 4020 8023 4075 8023 4255 8024 4075 8024 4021 8024 4255 8025 4021 8025 4246 8025 4022 8026 4023 8026 4254 8026 4022 8027 4031 8027 4023 8027 4022 8028 4024 8028 4031 8028 4031 8029 4024 8029 4032 8029 4084 8030 4032 8030 4033 8030 4088 8031 4033 8031 4090 8031 4089 8032 4090 8032 4094 8032 4100 8033 4094 8033 4099 8033 4028 8034 4099 8034 4098 8034 4103 8035 4098 8035 4036 8035 4106 8036 4036 8036 4037 8036 4027 8037 4037 8037 4026 8037 4025 8038 4026 8038 4038 8038 4025 8039 4027 8039 4026 8039 4025 8040 4074 8040 4027 8040 4027 8041 4074 8041 4111 8041 4106 8042 4111 8042 4107 8042 4103 8043 4107 8043 4104 8043 4028 8044 4104 8044 4029 8044 4100 8045 4029 8045 4077 8045 4089 8046 4077 8046 4091 8046 4088 8047 4091 8047 4079 8047 4084 8048 4079 8048 4030 8048 4031 8049 4030 8049 4023 8049 4031 8050 4084 8050 4030 8050 4031 8051 4032 8051 4084 8051 4024 8052 4268 8052 4032 8052 4032 8053 4268 8053 4034 8053 4033 8054 4034 8054 4039 8054 4090 8055 4039 8055 4086 8055 4094 8056 4086 8056 4095 8056 4099 8057 4095 8057 4035 8057 4098 8058 4035 8058 4041 8058 4036 8059 4041 8059 4042 8059 4037 8060 4042 8060 4110 8060 4026 8061 4110 8061 4044 8061 4038 8062 4044 8062 4046 8062 4038 8063 4026 8063 4044 8063 4268 8064 4047 8064 4034 8064 4034 8065 4047 8065 4040 8065 4039 8066 4040 8066 4083 8066 4086 8067 4083 8067 4087 8067 4095 8068 4087 8068 4085 8068 4035 8069 4085 8069 4097 8069 4041 8070 4097 8070 4043 8070 4042 8071 4043 8071 4105 8071 4110 8072 4105 8072 4109 8072 4044 8073 4109 8073 4045 8073 4046 8074 4045 8074 4054 8074 4046 8075 4044 8075 4045 8075 4047 8076 4055 8076 4040 8076 4040 8077 4055 8077 4048 8077 4083 8078 4048 8078 4049 8078 4087 8079 4049 8079 4056 8079 4085 8080 4056 8080 4093 8080 4097 8081 4093 8081 4050 8081 4043 8082 4050 8082 4051 8082 4105 8083 4051 8083 4052 8083 4109 8084 4052 8084 4053 8084 4045 8085 4053 8085 4059 8085 4054 8086 4059 8086 4248 8086 4054 8087 4045 8087 4059 8087 4055 8088 4275 8088 4048 8088 4048 8089 4275 8089 4062 8089 4049 8090 4062 8090 4057 8090 4056 8091 4057 8091 4058 8091 4093 8092 4058 8092 4092 8092 4050 8093 4092 8093 4096 8093 4051 8094 4096 8094 4101 8094 4052 8095 4101 8095 4065 8095 4053 8096 4065 8096 4108 8096 4059 8097 4108 8097 4060 8097 4248 8098 4060 8098 4249 8098 4248 8099 4059 8099 4060 8099 4275 8100 4061 8100 4062 8100 4062 8101 4061 8101 4063 8101 4057 8102 4063 8102 4082 8102 4058 8103 4082 8103 4072 8103 4092 8104 4072 8104 4070 8104 4096 8105 4070 8105 4064 8105 4101 8106 4064 8106 4102 8106 4065 8107 4102 8107 4066 8107 4108 8108 4066 8108 4067 8108 4060 8109 4067 8109 4073 8109 4249 8110 4073 8110 4250 8110 4249 8111 4060 8111 4073 8111 4061 8112 4161 8112 4063 8112 4063 8113 4161 8113 4068 8113 4082 8114 4068 8114 4069 8114 4072 8115 4069 8115 4168 8115 4070 8116 4168 8116 4162 8116 4064 8117 4162 8117 4157 8117 4102 8118 4157 8118 4155 8118 4066 8119 4155 8119 4071 8119 4067 8120 4071 8120 4138 8120 4073 8121 4138 8121 4115 8121 4250 8122 4073 8122 4115 8122 4063 8123 4068 8123 4082 8123 4082 8124 4069 8124 4072 8124 4072 8125 4168 8125 4070 8125 4070 8126 4162 8126 4064 8126 4064 8127 4157 8127 4102 8127 4102 8128 4155 8128 4066 8128 4066 8129 4071 8129 4067 8129 4067 8130 4138 8130 4073 8130 4074 8131 4238 8131 4111 8131 4111 8132 4238 8132 4021 8132 4107 8133 4021 8133 4075 8133 4104 8134 4075 8134 4020 8134 4029 8135 4020 8135 4076 8135 4077 8136 4076 8136 4078 8136 4091 8137 4078 8137 4080 8137 4079 8138 4080 8138 4081 8138 4030 8139 4081 8139 4019 8139 4023 8140 4019 8140 4018 8140 4254 8141 4018 8141 4017 8141 4254 8142 4023 8142 4018 8142 4238 8143 4246 8143 4021 8143 4057 8144 4062 8144 4063 8144 4049 8145 4048 8145 4062 8145 4083 8146 4040 8146 4048 8146 4039 8147 4034 8147 4040 8147 4033 8148 4032 8148 4034 8148 4019 8149 4023 8149 4030 8149 4058 8150 4057 8150 4082 8150 4056 8151 4049 8151 4057 8151 4087 8152 4083 8152 4049 8152 4086 8153 4039 8153 4083 8153 4090 8154 4033 8154 4039 8154 4079 8155 4084 8155 4088 8155 4088 8156 4084 8156 4033 8156 4081 8157 4030 8157 4079 8157 4092 8158 4058 8158 4072 8158 4093 8159 4056 8159 4058 8159 4085 8160 4087 8160 4056 8160 4095 8161 4086 8161 4087 8161 4094 8162 4090 8162 4086 8162 4091 8163 4088 8163 4089 8163 4089 8164 4088 8164 4090 8164 4080 8165 4079 8165 4091 8165 4096 8166 4092 8166 4070 8166 4050 8167 4093 8167 4092 8167 4097 8168 4085 8168 4093 8168 4035 8169 4095 8169 4085 8169 4099 8170 4094 8170 4095 8170 4077 8171 4089 8171 4100 8171 4100 8172 4089 8172 4094 8172 4078 8173 4091 8173 4077 8173 4101 8174 4096 8174 4064 8174 4051 8175 4050 8175 4096 8175 4043 8176 4097 8176 4050 8176 4041 8177 4035 8177 4097 8177 4098 8178 4099 8178 4035 8178 4029 8179 4100 8179 4028 8179 4028 8180 4100 8180 4099 8180 4076 8181 4077 8181 4029 8181 4065 8182 4101 8182 4102 8182 4052 8183 4051 8183 4101 8183 4105 8184 4043 8184 4051 8184 4042 8185 4041 8185 4043 8185 4036 8186 4098 8186 4041 8186 4104 8187 4028 8187 4103 8187 4103 8188 4028 8188 4098 8188 4020 8189 4029 8189 4104 8189 4108 8190 4065 8190 4066 8190 4053 8191 4052 8191 4065 8191 4109 8192 4105 8192 4052 8192 4110 8193 4042 8193 4105 8193 4037 8194 4036 8194 4042 8194 4107 8195 4103 8195 4106 8195 4106 8196 4103 8196 4036 8196 4075 8197 4104 8197 4107 8197 4060 8198 4108 8198 4067 8198 4059 8199 4053 8199 4108 8199 4045 8200 4109 8200 4053 8200 4044 8201 4110 8201 4109 8201 4026 8202 4037 8202 4110 8202 4111 8203 4106 8203 4027 8203 4027 8204 4106 8204 4037 8204 4021 8205 4107 8205 4111 8205 4250 8206 4190 8206 3332 8206 4250 8207 4113 8207 4190 8207 4250 8208 4112 8208 4113 8208 4250 8209 4114 8209 4112 8209 4250 8210 4188 8210 4114 8210 4250 8211 4182 8211 4188 8211 4250 8212 4115 8212 4182 8212 4182 8213 4115 8213 4191 8213 4116 8214 4191 8214 4117 8214 4198 8215 4117 8215 4140 8215 4206 8216 4140 8216 4213 8216 4205 8217 4213 8217 4212 8217 4118 8218 4212 8218 4220 8218 4222 8219 4220 8219 4226 8219 4119 8220 4226 8220 4143 8220 4227 8221 4143 8221 4136 8221 4137 8222 4136 8222 2584 8222 4120 8223 2584 8223 2629 8223 4230 8224 2629 8224 4121 8224 2628 8225 4230 8225 4121 8225 2628 8226 4122 8226 4230 8226 2628 8227 2626 8227 4122 8227 4122 8228 2626 8228 4173 8228 4174 8229 4173 8229 2625 8229 4176 8230 2625 8230 4123 8230 4124 8231 4176 8231 4123 8231 4124 8232 4126 8232 4176 8232 4124 8233 4125 8233 4126 8233 4126 8234 4125 8234 2623 8234 4179 8235 2623 8235 2622 8235 4127 8236 2622 8236 4128 8236 2620 8237 4127 8237 4128 8237 2620 8238 4130 8238 4127 8238 2620 8239 4129 8239 4130 8239 4130 8240 4129 8240 2619 8240 4181 8241 2619 8241 3332 8241 4131 8242 3332 8242 4132 8242 4197 8243 4132 8243 4187 8243 4204 8244 4187 8244 4133 8244 4180 8245 4133 8245 4134 8245 4178 8246 4134 8246 4214 8246 4177 8247 4214 8247 4175 8247 4229 8248 4175 8248 4135 8248 4228 8249 4135 8249 4172 8249 4235 8250 4172 8250 4227 8250 4137 8251 4227 8251 4136 8251 4137 8252 4235 8252 4227 8252 4137 8253 4120 8253 4235 8253 4137 8254 2584 8254 4120 8254 4115 8255 4138 8255 4191 8255 4191 8256 4138 8256 4139 8256 4117 8257 4139 8257 4146 8257 4140 8258 4146 8258 4141 8258 4213 8259 4141 8259 4211 8259 4212 8260 4211 8260 4142 8260 4220 8261 4142 8261 4224 8261 4226 8262 4224 8262 4144 8262 4143 8263 4144 8263 4145 8263 4136 8264 4145 8264 2584 8264 4136 8265 4143 8265 4145 8265 4138 8266 4071 8266 4139 8266 4139 8267 4071 8267 4150 8267 4146 8268 4150 8268 4147 8268 4141 8269 4147 8269 4210 8269 4211 8270 4210 8270 4148 8270 4142 8271 4148 8271 4149 8271 4224 8272 4149 8272 4234 8272 4144 8273 4234 8273 4154 8273 4145 8274 4154 8274 2584 8274 4145 8275 4144 8275 4154 8275 4071 8276 4155 8276 4150 8276 4150 8277 4155 8277 4156 8277 4147 8278 4156 8278 4158 8278 4210 8279 4158 8279 4219 8279 4148 8280 4219 8280 4151 8280 4149 8281 4151 8281 4152 8281 4234 8282 4152 8282 4153 8282 4154 8283 4153 8283 2584 8283 4154 8284 4234 8284 4153 8284 4155 8285 4157 8285 4156 8285 4156 8286 4157 8286 4209 8286 4158 8287 4209 8287 4159 8287 4219 8288 4159 8288 4233 8288 4151 8289 4233 8289 4232 8289 4152 8290 4232 8290 4160 8290 4153 8291 4160 8291 4161 8291 2584 8292 4153 8292 4161 8292 4157 8293 4162 8293 4209 8293 4209 8294 4162 8294 4163 8294 4159 8295 4163 8295 4223 8295 4233 8296 4223 8296 4231 8296 4232 8297 4231 8297 4166 8297 4160 8298 4166 8298 4161 8298 4160 8299 4232 8299 4166 8299 4162 8300 4168 8300 4163 8300 4163 8301 4168 8301 4164 8301 4223 8302 4164 8302 4165 8302 4231 8303 4165 8303 4167 8303 4166 8304 4167 8304 4161 8304 4166 8305 4231 8305 4167 8305 4168 8306 4069 8306 4164 8306 4164 8307 4069 8307 4169 8307 4165 8308 4169 8308 4170 8308 4167 8309 4170 8309 4161 8309 4167 8310 4165 8310 4170 8310 4069 8311 4068 8311 4169 8311 4169 8312 4068 8312 4171 8312 4170 8313 4171 8313 4161 8313 4170 8314 4169 8314 4171 8314 4068 8315 4161 8315 4171 8315 4120 8316 2629 8316 4230 8316 4235 8317 4230 8317 4228 8317 4172 8318 4235 8318 4228 8318 4122 8319 4173 8319 4174 8319 4229 8320 4174 8320 4177 8320 4175 8321 4229 8321 4177 8321 4174 8322 2625 8322 4176 8322 4177 8323 4176 8323 4178 8323 4214 8324 4177 8324 4178 8324 4126 8325 2623 8325 4179 8325 4180 8326 4179 8326 4204 8326 4133 8327 4180 8327 4204 8327 4179 8328 2622 8328 4127 8328 4204 8329 4127 8329 4197 8329 4187 8330 4204 8330 4197 8330 4130 8331 2619 8331 4181 8331 4131 8332 4181 8332 3332 8332 4131 8333 4130 8333 4181 8333 4131 8334 4197 8334 4130 8334 4131 8335 4132 8335 4197 8335 4191 8336 4116 8336 4182 8336 4182 8337 4116 8337 4188 8337 4188 8338 4116 8338 4189 8338 4114 8339 4189 8339 4193 8339 4112 8340 4193 8340 4183 8340 4113 8341 4183 8341 4195 8341 4190 8342 4195 8342 4196 8342 4184 8343 4196 8343 4185 8343 4186 8344 4185 8344 4187 8344 4132 8345 4186 8345 4187 8345 4132 8346 3332 8346 4186 8346 4186 8347 3332 8347 4184 8347 4185 8348 4186 8348 4184 8348 4193 8349 4112 8349 4114 8349 4114 8350 4188 8350 4189 8350 4183 8351 4113 8351 4112 8351 4195 8352 4190 8352 4113 8352 4196 8353 4184 8353 4190 8353 4190 8354 4184 8354 3332 8354 4139 8355 4117 8355 4191 8355 4117 8356 4198 8356 4116 8356 4116 8357 4198 8357 4189 8357 4189 8358 4198 8358 4192 8358 4193 8359 4192 8359 4202 8359 4183 8360 4202 8360 4194 8360 4195 8361 4194 8361 4199 8361 4196 8362 4199 8362 4203 8362 4185 8363 4203 8363 4133 8363 4187 8364 4185 8364 4133 8364 4202 8365 4183 8365 4193 8365 4193 8366 4189 8366 4192 8366 4194 8367 4195 8367 4183 8367 4199 8368 4196 8368 4195 8368 4203 8369 4185 8369 4196 8369 4127 8370 4130 8370 4197 8370 4150 8371 4146 8371 4139 8371 4146 8372 4140 8372 4117 8372 4140 8373 4206 8373 4198 8373 4198 8374 4206 8374 4192 8374 4192 8375 4206 8375 4207 8375 4202 8376 4207 8376 4201 8376 4194 8377 4201 8377 4208 8377 4199 8378 4208 8378 4200 8378 4203 8379 4200 8379 4134 8379 4133 8380 4203 8380 4134 8380 4201 8381 4194 8381 4202 8381 4202 8382 4192 8382 4207 8382 4208 8383 4199 8383 4194 8383 4200 8384 4203 8384 4199 8384 4179 8385 4127 8385 4204 8385 4156 8386 4147 8386 4150 8386 4147 8387 4141 8387 4146 8387 4141 8388 4213 8388 4140 8388 4213 8389 4205 8389 4206 8389 4206 8390 4205 8390 4207 8390 4207 8391 4205 8391 4218 8391 4201 8392 4218 8392 4217 8392 4208 8393 4217 8393 4215 8393 4200 8394 4215 8394 4214 8394 4134 8395 4200 8395 4214 8395 4217 8396 4208 8396 4201 8396 4201 8397 4207 8397 4218 8397 4215 8398 4200 8398 4208 8398 4126 8399 4179 8399 4180 8399 4178 8400 4180 8400 4134 8400 4178 8401 4126 8401 4180 8401 4178 8402 4176 8402 4126 8402 4209 8403 4158 8403 4156 8403 4158 8404 4210 8404 4147 8404 4210 8405 4211 8405 4141 8405 4211 8406 4212 8406 4213 8406 4212 8407 4118 8407 4205 8407 4205 8408 4118 8408 4218 8408 4218 8409 4118 8409 4221 8409 4217 8410 4221 8410 4216 8410 4215 8411 4216 8411 4175 8411 4214 8412 4215 8412 4175 8412 4216 8413 4215 8413 4217 8413 4217 8414 4218 8414 4221 8414 4163 8415 4159 8415 4209 8415 4159 8416 4219 8416 4158 8416 4219 8417 4148 8417 4210 8417 4148 8418 4142 8418 4211 8418 4142 8419 4220 8419 4212 8419 4220 8420 4222 8420 4118 8420 4118 8421 4222 8421 4221 8421 4221 8422 4222 8422 4225 8422 4216 8423 4225 8423 4135 8423 4175 8424 4216 8424 4135 8424 4216 8425 4221 8425 4225 8425 4174 8426 4176 8426 4177 8426 4164 8427 4223 8427 4163 8427 4223 8428 4233 8428 4159 8428 4233 8429 4151 8429 4219 8429 4151 8430 4149 8430 4148 8430 4149 8431 4224 8431 4142 8431 4224 8432 4226 8432 4220 8432 4172 8433 4135 8433 4225 8433 4119 8434 4225 8434 4222 8434 4226 8435 4119 8435 4222 8435 4172 8436 4225 8436 4119 8436 4227 8437 4119 8437 4143 8437 4227 8438 4172 8438 4119 8438 4122 8439 4174 8439 4229 8439 4228 8440 4229 8440 4135 8440 4228 8441 4122 8441 4229 8441 4228 8442 4230 8442 4122 8442 4169 8443 4165 8443 4164 8443 4165 8444 4231 8444 4223 8444 4231 8445 4232 8445 4233 8445 4232 8446 4152 8446 4151 8446 4152 8447 4234 8447 4149 8447 4234 8448 4144 8448 4224 8448 4144 8449 4143 8449 4226 8449 4153 8450 4152 8450 4160 8450 4120 8451 4230 8451 4235 8451 4250 8452 3335 8452 4236 8452 4249 8453 4236 8453 4251 8453 4248 8454 4251 8454 4240 8454 4054 8455 4240 8455 4243 8455 4046 8456 4243 8456 4244 8456 4038 8457 4244 8457 4247 8457 4025 8458 4247 8458 4237 8458 4074 8459 4237 8459 4245 8459 4238 8460 4245 8460 4242 8460 4246 8461 4242 8461 4239 8461 4255 8462 4239 8462 5343 8462 4255 8463 4246 8463 4239 8463 4484 8464 4251 8464 4486 8464 4484 8465 4240 8465 4251 8465 4484 8466 4478 8466 4240 8466 4240 8467 4478 8467 4243 8467 4243 8468 4478 8468 4556 8468 4244 8469 4556 8469 4473 8469 4247 8470 4473 8470 4467 8470 4237 8471 4467 8471 4466 8471 4245 8472 4466 8472 4241 8472 4242 8473 4241 8473 4463 8473 4239 8474 4463 8474 5343 8474 4239 8475 4242 8475 4463 8475 4243 8476 4556 8476 4244 8476 4244 8477 4473 8477 4247 8477 4247 8478 4467 8478 4237 8478 4237 8479 4466 8479 4245 8479 4245 8480 4241 8480 4242 8480 4246 8481 4238 8481 4242 8481 4238 8482 4074 8482 4245 8482 4074 8483 4025 8483 4237 8483 4025 8484 4038 8484 4247 8484 4038 8485 4046 8485 4244 8485 4046 8486 4054 8486 4243 8486 4054 8487 4248 8487 4240 8487 4248 8488 4249 8488 4251 8488 4249 8489 4250 8489 4236 8489 3335 8490 4486 8490 4236 8490 4236 8491 4486 8491 4251 8491 4321 8492 4257 8492 4322 8492 4321 8493 4252 8493 4257 8493 4321 8494 4253 8494 4252 8494 4321 8495 4310 8495 4253 8495 4253 8496 4310 8496 4282 8496 4252 8497 4282 8497 4254 8497 4017 8498 4252 8498 4254 8498 4017 8499 4256 8499 4252 8499 4017 8500 4255 8500 4256 8500 4256 8501 4255 8501 4322 8501 4257 8502 4256 8502 4322 8502 4257 8503 4252 8503 4256 8503 4282 8504 4310 8504 4258 8504 4259 8505 4258 8505 4310 8505 4259 8506 4282 8506 4258 8506 4259 8507 4260 8507 4282 8507 4259 8508 4261 8508 4260 8508 4259 8509 4263 8509 4261 8509 4261 8510 4263 8510 4260 8510 4260 8511 4263 8511 4269 8511 4024 8512 4269 8512 4268 8512 4024 8513 4260 8513 4269 8513 4024 8514 4022 8514 4260 8514 4260 8515 4022 8515 4282 8515 4282 8516 4022 8516 4254 8516 4269 8517 4263 8517 4264 8517 4262 8518 4264 8518 4263 8518 4262 8519 4269 8519 4264 8519 4262 8520 4267 8520 4269 8520 4262 8521 4265 8521 4267 8521 4262 8522 4294 8522 4265 8522 4265 8523 4294 8523 4267 8523 4267 8524 4294 8524 4266 8524 4047 8525 4266 8525 4055 8525 4047 8526 4267 8526 4266 8526 4047 8527 4268 8527 4267 8527 4267 8528 4268 8528 4269 8528 4266 8529 4294 8529 4270 8529 4271 8530 4270 8530 4294 8530 4271 8531 4266 8531 4270 8531 4271 8532 4276 8532 4266 8532 4271 8533 4272 8533 4276 8533 4271 8534 4273 8534 4272 8534 4272 8535 4273 8535 4276 8535 4276 8536 4273 8536 4274 8536 4275 8537 4274 8537 4061 8537 4275 8538 4276 8538 4274 8538 4275 8539 4055 8539 4276 8539 4276 8540 4055 8540 4266 8540 4274 8541 4273 8541 4279 8541 4281 8542 4279 8542 4277 8542 4280 8543 4277 8543 4278 8543 4281 8544 4278 8544 4161 8544 4061 8545 4281 8545 4161 8545 4061 8546 4274 8546 4281 8546 4281 8547 4274 8547 4279 8547 4281 8548 4277 8548 4280 8548 4278 8549 4281 8549 4280 8549 4282 8550 4252 8550 4253 8550 4277 8551 4279 8551 4273 8551 3323 8552 4278 8552 4386 8552 3324 8553 4386 8553 4388 8553 3325 8554 4388 8554 4405 8554 3328 8555 4405 8555 4283 8555 3328 8556 3325 8556 4405 8556 4273 8557 4387 8557 4277 8557 4273 8558 4284 8558 4387 8558 4273 8559 4340 8559 4284 8559 4273 8560 4271 8560 4340 8560 4340 8561 4271 8561 4341 8561 4337 8562 4341 8562 4285 8562 4359 8563 4285 8563 4360 8563 4358 8564 4360 8564 4287 8564 4286 8565 4287 8565 4413 8565 4412 8566 4413 8566 4411 8566 4430 8567 4411 8567 4428 8567 4288 8568 4428 8568 4289 8568 4442 8569 4289 8569 4440 8569 4443 8570 4440 8570 4290 8570 4354 8571 4290 8571 4353 8571 4271 8572 4294 8572 4341 8572 4341 8573 4294 8573 4385 8573 4285 8574 4385 8574 4291 8574 4360 8575 4291 8575 4295 8575 4287 8576 4295 8576 4292 8576 4413 8577 4292 8577 4296 8577 4411 8578 4296 8578 4421 8578 4428 8579 4421 8579 4429 8579 4289 8580 4429 8580 4293 8580 4440 8581 4293 8581 4299 8581 4290 8582 4299 8582 4439 8582 4353 8583 4439 8583 4441 8583 4385 8584 4294 8584 4351 8584 4291 8585 4351 8585 4378 8585 4295 8586 4378 8586 4403 8586 4292 8587 4403 8587 4297 8587 4296 8588 4297 8588 4298 8588 4421 8589 4298 8589 4420 8589 4429 8590 4420 8590 4382 8590 4293 8591 4382 8591 4300 8591 4299 8592 4300 8592 4438 8592 4439 8593 4438 8593 4437 8593 4441 8594 4437 8594 4352 8594 4263 8595 4377 8595 4262 8595 4263 8596 4375 8596 4377 8596 4263 8597 4301 8597 4375 8597 4263 8598 4259 8598 4301 8598 4301 8599 4259 8599 4374 8599 4391 8600 4374 8600 4389 8600 4390 8601 4389 8601 4400 8601 4399 8602 4400 8602 4302 8602 4303 8603 4302 8603 4304 8603 4410 8604 4304 8604 4305 8604 4306 8605 4305 8605 4313 8605 4426 8606 4313 8606 4307 8606 4432 8607 4307 8607 4448 8607 4308 8608 4448 8608 4309 8608 4433 8609 4309 8609 4343 8609 4259 8610 4310 8610 4374 8610 4374 8611 4310 8611 4372 8611 4389 8612 4372 8612 4311 8612 4400 8613 4311 8613 4312 8613 4302 8614 4312 8614 4406 8614 4304 8615 4406 8615 4317 8615 4305 8616 4317 8616 4318 8616 4313 8617 4318 8617 4416 8617 4307 8618 4416 8618 4320 8618 4448 8619 4320 8619 4319 8619 4309 8620 4319 8620 4314 8620 5536 8621 4309 8621 4314 8621 5536 8622 4343 8622 4309 8622 5536 8623 4315 8623 4343 8623 4372 8624 4310 8624 4373 8624 4311 8625 4373 8625 4316 8625 4312 8626 4316 8626 4329 8626 4406 8627 4329 8627 4330 8627 4317 8628 4330 8628 4409 8628 4318 8629 4409 8629 4326 8629 4416 8630 4326 8630 4328 8630 4320 8631 4328 8631 4331 8631 4319 8632 4331 8632 4314 8632 4319 8633 4320 8633 4331 8633 4310 8634 4321 8634 4373 8634 4373 8635 4321 8635 4322 8635 4323 8636 4373 8636 4322 8636 4323 8637 4316 8637 4373 8637 4323 8638 5526 8638 4316 8638 4316 8639 5526 8639 4329 8639 4329 8640 5526 8640 4324 8640 4330 8641 4324 8641 5527 8641 4409 8642 5527 8642 4325 8642 4326 8643 4325 8643 4327 8643 4328 8644 4327 8644 4331 8644 4328 8645 4326 8645 4327 8645 4329 8646 4324 8646 4330 8646 4330 8647 5527 8647 4409 8647 4409 8648 4325 8648 4326 8648 4327 8649 4314 8649 4331 8649 4332 8650 4369 8650 4368 8650 4332 8651 4422 8651 4369 8651 4332 8652 4333 8652 4422 8652 4332 8653 3330 8653 4333 8653 4333 8654 3330 8654 4334 8654 4363 8655 4334 8655 4335 8655 4336 8656 4335 8656 4398 8656 4359 8657 4398 8657 4337 8657 4285 8658 4359 8658 4337 8658 3330 8659 4283 8659 4334 8659 4334 8660 4283 8660 4404 8660 4335 8661 4404 8661 4338 8661 4398 8662 4338 8662 4339 8662 4337 8663 4339 8663 4340 8663 4341 8664 4337 8664 4340 8664 4404 8665 4283 8665 4405 8665 4338 8666 4405 8666 4342 8666 4339 8667 4342 8667 4284 8667 4340 8668 4339 8668 4284 8668 3325 8669 3324 8669 4388 8669 3324 8670 3323 8670 4386 8670 4343 8671 4315 8671 4433 8671 4433 8672 4315 8672 4344 8672 4308 8673 4344 8673 4345 8673 4432 8674 4345 8674 4425 8674 4426 8675 4425 8675 4397 8675 4306 8676 4397 8676 4417 8676 4410 8677 4417 8677 4395 8677 4303 8678 4395 8678 4394 8678 4399 8679 4394 8679 4346 8679 4390 8680 4346 8680 4347 8680 4391 8681 4347 8681 4393 8681 4301 8682 4393 8682 4375 8682 4301 8683 4391 8683 4393 8683 4301 8684 4374 8684 4391 8684 4348 8685 4315 8685 4435 8685 4436 8686 4435 8686 4434 8686 4396 8687 4434 8687 4349 8687 4427 8688 4349 8688 4384 8688 4419 8689 4384 8689 4383 8689 4418 8690 4383 8690 4381 8690 4407 8691 4381 8691 4350 8691 4401 8692 4350 8692 4408 8692 4402 8693 4408 8693 4380 8693 4392 8694 4380 8694 4376 8694 4377 8695 4376 8695 4379 8695 4262 8696 4379 8696 4351 8696 4294 8697 4262 8697 4351 8697 4352 8698 4315 8698 4441 8698 4441 8699 4315 8699 4353 8699 4353 8700 4315 8700 4354 8700 4354 8701 4315 8701 4449 8701 4443 8702 4449 8702 4355 8702 4442 8703 4355 8703 4356 8703 4288 8704 4356 8704 4431 8704 4430 8705 4431 8705 4357 8705 4412 8706 4357 8706 4424 8706 4286 8707 4424 8707 4364 8707 4358 8708 4364 8708 4336 8708 4359 8709 4336 8709 4398 8709 4359 8710 4358 8710 4336 8710 4359 8711 4360 8711 4358 8711 4361 8712 4315 8712 4446 8712 4445 8713 4446 8713 4444 8713 4415 8714 4444 8714 4370 8714 4414 8715 4370 8715 4369 8715 4362 8716 4369 8716 4422 8716 4423 8717 4422 8717 4333 8717 4363 8718 4333 8718 4334 8718 4363 8719 4423 8719 4333 8719 4363 8720 4364 8720 4423 8720 4363 8721 4336 8721 4364 8721 4363 8722 4335 8722 4336 8722 4365 8723 4315 8723 4447 8723 4366 8724 4447 8724 4367 8724 4371 8725 4367 8725 4368 8725 4369 8726 4371 8726 4368 8726 4369 8727 4370 8727 4371 8727 4371 8728 4370 8728 4366 8728 4367 8729 4371 8729 4366 8729 4335 8730 4334 8730 4404 8730 4311 8731 4372 8731 4373 8731 4389 8732 4374 8732 4372 8732 4377 8733 4379 8733 4262 8733 4377 8734 4375 8734 4392 8734 4376 8735 4377 8735 4392 8735 4378 8736 4351 8736 4379 8736 4376 8737 4378 8737 4379 8737 4376 8738 4403 8738 4378 8738 4376 8739 4380 8739 4403 8739 4403 8740 4380 8740 4297 8740 4297 8741 4380 8741 4408 8741 4298 8742 4408 8742 4350 8742 4420 8743 4350 8743 4381 8743 4382 8744 4381 8744 4383 8744 4300 8745 4383 8745 4384 8745 4438 8746 4384 8746 4349 8746 4437 8747 4349 8747 4434 8747 4352 8748 4434 8748 4435 8748 4291 8749 4385 8749 4351 8749 4285 8750 4341 8750 4385 8750 4278 8751 4277 8751 4386 8751 4386 8752 4277 8752 4387 8752 4388 8753 4387 8753 4342 8753 4405 8754 4388 8754 4342 8754 4387 8755 4284 8755 4342 8755 4386 8756 4387 8756 4388 8756 4312 8757 4311 8757 4316 8757 4400 8758 4389 8758 4311 8758 4347 8759 4391 8759 4390 8759 4390 8760 4391 8760 4389 8760 4375 8761 4393 8761 4392 8761 4392 8762 4393 8762 4402 8762 4380 8763 4392 8763 4402 8763 4402 8764 4393 8764 4347 8764 4401 8765 4347 8765 4346 8765 4407 8766 4346 8766 4394 8766 4418 8767 4394 8767 4395 8767 4419 8768 4395 8768 4417 8768 4427 8769 4417 8769 4397 8769 4396 8770 4397 8770 4425 8770 4436 8771 4425 8771 4345 8771 4348 8772 4345 8772 4344 8772 4295 8773 4291 8773 4378 8773 4360 8774 4285 8774 4291 8774 4398 8775 4339 8775 4337 8775 4338 8776 4342 8776 4339 8776 4406 8777 4312 8777 4329 8777 4302 8778 4400 8778 4312 8778 4346 8779 4390 8779 4399 8779 4399 8780 4390 8780 4400 8780 4408 8781 4402 8781 4401 8781 4401 8782 4402 8782 4347 8782 4292 8783 4295 8783 4403 8783 4287 8784 4360 8784 4295 8784 4335 8785 4338 8785 4398 8785 4404 8786 4405 8786 4338 8786 4317 8787 4406 8787 4330 8787 4304 8788 4302 8788 4406 8788 4394 8789 4399 8789 4303 8789 4303 8790 4399 8790 4302 8790 4350 8791 4401 8791 4407 8791 4407 8792 4401 8792 4346 8792 4298 8793 4297 8793 4408 8793 4296 8794 4292 8794 4297 8794 4413 8795 4287 8795 4292 8795 4364 8796 4358 8796 4286 8796 4286 8797 4358 8797 4287 8797 4318 8798 4317 8798 4409 8798 4305 8799 4304 8799 4317 8799 4395 8800 4303 8800 4410 8800 4410 8801 4303 8801 4304 8801 4381 8802 4407 8802 4418 8802 4418 8803 4407 8803 4394 8803 4420 8804 4298 8804 4350 8804 4421 8805 4296 8805 4298 8805 4411 8806 4413 8806 4296 8806 4424 8807 4286 8807 4412 8807 4412 8808 4286 8808 4413 8808 4423 8809 4364 8809 4424 8809 4362 8810 4424 8810 4357 8810 4414 8811 4357 8811 4431 8811 4415 8812 4431 8812 4356 8812 4445 8813 4356 8813 4355 8813 4361 8814 4355 8814 4449 8814 4416 8815 4318 8815 4326 8815 4313 8816 4305 8816 4318 8816 4417 8817 4410 8817 4306 8817 4306 8818 4410 8818 4305 8818 4383 8819 4418 8819 4419 8819 4419 8820 4418 8820 4395 8820 4382 8821 4420 8821 4381 8821 4429 8822 4421 8822 4420 8822 4428 8823 4411 8823 4421 8823 4357 8824 4412 8824 4430 8824 4430 8825 4412 8825 4411 8825 4422 8826 4423 8826 4362 8826 4362 8827 4423 8827 4424 8827 4416 8828 4328 8828 4320 8828 4313 8829 4416 8829 4307 8829 4306 8830 4313 8830 4426 8830 4397 8831 4306 8831 4426 8831 4307 8832 4320 8832 4448 8832 4425 8833 4426 8833 4432 8833 4432 8834 4426 8834 4307 8834 4419 8835 4417 8835 4427 8835 4384 8836 4419 8836 4427 8836 4349 8837 4427 8837 4396 8837 4396 8838 4427 8838 4397 8838 4382 8839 4383 8839 4300 8839 4429 8840 4382 8840 4293 8840 4300 8841 4384 8841 4438 8841 4428 8842 4429 8842 4289 8842 4293 8843 4300 8843 4299 8843 4430 8844 4428 8844 4288 8844 4431 8845 4430 8845 4288 8845 4289 8846 4293 8846 4440 8846 4356 8847 4288 8847 4442 8847 4442 8848 4288 8848 4289 8848 4362 8849 4357 8849 4414 8849 4369 8850 4362 8850 4414 8850 4370 8851 4414 8851 4415 8851 4415 8852 4414 8852 4431 8852 4432 8853 4448 8853 4308 8853 4345 8854 4432 8854 4308 8854 4344 8855 4308 8855 4433 8855 4433 8856 4308 8856 4309 8856 4396 8857 4425 8857 4436 8857 4434 8858 4396 8858 4436 8858 4435 8859 4436 8859 4348 8859 4348 8860 4436 8860 4345 8860 4438 8861 4349 8861 4437 8861 4299 8862 4438 8862 4439 8862 4437 8863 4434 8863 4352 8863 4440 8864 4299 8864 4290 8864 4439 8865 4437 8865 4441 8865 4442 8866 4440 8866 4443 8866 4355 8867 4442 8867 4443 8867 4290 8868 4439 8868 4353 8868 4449 8869 4443 8869 4354 8869 4354 8870 4443 8870 4290 8870 4415 8871 4356 8871 4445 8871 4444 8872 4415 8872 4445 8872 4446 8873 4445 8873 4361 8873 4361 8874 4445 8874 4355 8874 4370 8875 4444 8875 4366 8875 4366 8876 4444 8876 4365 8876 4447 8877 4366 8877 4365 8877 4365 8878 4444 8878 4446 8878 4368 8879 4367 8879 4447 8879 4315 8880 4368 8880 4447 8880 4448 8881 4319 8881 4309 8881 4315 8882 4348 8882 4344 8882 4315 8883 4352 8883 4435 8883 4315 8884 4361 8884 4449 8884 4315 8885 4365 8885 4446 8885 5343 8886 4463 8886 4450 8886 4450 8887 4463 8887 4451 8887 4533 8888 4451 8888 4452 8888 4528 8889 4452 8889 4470 8889 4529 8890 4470 8890 4468 8890 4561 8891 4468 8891 4453 8891 4557 8892 4453 8892 4475 8892 4526 8893 4475 8893 4477 8893 4454 8894 4477 8894 4455 8894 4518 8895 4455 8895 4555 8895 4519 8896 4555 8896 4456 8896 4520 8897 4456 8897 4584 8897 4578 8898 4584 8898 4577 8898 4457 8899 4577 8899 4570 8899 4583 8900 4570 8900 4488 8900 4563 8901 4488 8901 4489 8901 3345 8902 4563 8902 4489 8902 3345 8903 4591 8903 4563 8903 3345 8904 3344 8904 4591 8904 4591 8905 3344 8905 4554 8905 4599 8906 4554 8906 4458 8906 4605 8907 4458 8907 4459 8907 4606 8908 4459 8908 4490 8908 4460 8909 4490 8909 4492 8909 4461 8910 4492 8910 4640 8910 4589 8911 4640 8911 4639 8911 4642 8912 4639 8912 4652 8912 4462 8913 4652 8913 4651 8913 4656 8914 4651 8914 4659 8914 4590 8915 4659 8915 5492 8915 4451 8916 4463 8916 4471 8916 4465 8917 4471 8917 4241 8917 4466 8918 4465 8918 4241 8918 4466 8919 4464 8919 4465 8919 4466 8920 4469 8920 4464 8920 4466 8921 4467 8921 4469 8921 4469 8922 4467 8922 4472 8922 4468 8923 4472 8923 4453 8923 4468 8924 4469 8924 4472 8924 4468 8925 4470 8925 4469 8925 4469 8926 4470 8926 4464 8926 4464 8927 4470 8927 4452 8927 4465 8928 4452 8928 4451 8928 4471 8929 4465 8929 4451 8929 4467 8930 4473 8930 4472 8930 4472 8931 4473 8931 4474 8931 4453 8932 4474 8932 4475 8932 4453 8933 4472 8933 4474 8933 4474 8934 4473 8934 4476 8934 4475 8935 4476 8935 4477 8935 4475 8936 4474 8936 4476 8936 4478 8937 4482 8937 4556 8937 4478 8938 4479 8938 4482 8938 4478 8939 4480 8939 4479 8939 4478 8940 4484 8940 4480 8940 4480 8941 4484 8941 4481 8941 4584 8942 4481 8942 4577 8942 4584 8943 4480 8943 4481 8943 4584 8944 4456 8944 4480 8944 4480 8945 4456 8945 4479 8945 4479 8946 4456 8946 4555 8946 4482 8947 4555 8947 4455 8947 4483 8948 4455 8948 4477 8948 4476 8949 4483 8949 4477 8949 4476 8950 4556 8950 4483 8950 4476 8951 4473 8951 4556 8951 4484 8952 4486 8952 4481 8952 4481 8953 4486 8953 4485 8953 4577 8954 4485 8954 4570 8954 4577 8955 4481 8955 4485 8955 4485 8956 4486 8956 4487 8956 4570 8957 4487 8957 4488 8957 4570 8958 4485 8958 4487 8958 4486 8959 3335 8959 4487 8959 4487 8960 3335 8960 3336 8960 4488 8961 3336 8961 4489 8961 4488 8962 4487 8962 3336 8962 3344 8963 4629 8963 4554 8963 4554 8964 4629 8964 4494 8964 4458 8965 4494 8965 4496 8965 4459 8966 4496 8966 4622 8966 4490 8967 4622 8967 4491 8967 4492 8968 4491 8968 4638 8968 4640 8969 4638 8969 4499 8969 4639 8970 4499 8970 4493 8970 4652 8971 4493 8971 4502 8971 4651 8972 4502 8972 4503 8972 4659 8973 4503 8973 5492 8973 4494 8974 4629 8974 4495 8974 4496 8975 4495 8975 4497 8975 4622 8976 4497 8976 4498 8976 4491 8977 4498 8977 4541 8977 4638 8978 4541 8978 4500 8978 4499 8979 4500 8979 4501 8979 4493 8980 4501 8980 4650 8980 4502 8981 4650 8981 4543 8981 4503 8982 4543 8982 5492 8982 4504 8983 4619 8983 4505 8983 4504 8984 4620 8984 4619 8984 4504 8985 4506 8985 4620 8985 4620 8986 4506 8986 4507 8986 4542 8987 4507 8987 4632 8987 4508 8988 4632 8988 4538 8988 4509 8989 4538 8989 4660 8989 4661 8990 4660 8990 5492 8990 5492 8991 4553 8991 4506 8991 5503 8992 4550 8992 4662 8992 5503 8993 4510 8993 4550 8993 5503 8994 4511 8994 4510 8994 5503 8995 4512 8995 4511 8995 4511 8996 4512 8996 4522 8996 4513 8997 4522 8997 4523 8997 4514 8998 4523 8998 4627 8998 4547 8999 4627 8999 4615 8999 4548 9000 4615 9000 4608 9000 4515 9001 4608 9001 4598 9001 4516 9002 4598 9002 4517 9002 4596 9003 4517 9003 4518 9003 4519 9004 4518 9004 4555 9004 4519 9005 4596 9005 4518 9005 4519 9006 4521 9006 4596 9006 4519 9007 4520 9007 4521 9007 4519 9008 4456 9008 4520 9008 4512 9009 4612 9009 4522 9009 4522 9010 4612 9010 4628 9010 4523 9011 4628 9011 4626 9011 4627 9012 4626 9012 4524 9012 4615 9013 4524 9013 4601 9013 4608 9014 4601 9014 4602 9014 4598 9015 4602 9015 4525 9015 4517 9016 4525 9016 4454 9016 4518 9017 4454 9017 4455 9017 4518 9018 4517 9018 4454 9018 4628 9019 4612 9019 4614 9019 4626 9020 4614 9020 4618 9020 4524 9021 4618 9021 4611 9021 4601 9022 4611 9022 4603 9022 4602 9023 4603 9023 4558 9023 4525 9024 4558 9024 4526 9024 4454 9025 4526 9025 4477 9025 4454 9026 4525 9026 4526 9026 4527 9027 4613 9027 5510 9027 4527 9028 4616 9028 4613 9028 4527 9029 4560 9029 4616 9029 4527 9030 5511 9030 4560 9030 4560 9031 5511 9031 4532 9031 4530 9032 4532 9032 4528 9032 4529 9033 4528 9033 4470 9033 4529 9034 4530 9034 4528 9034 4529 9035 4531 9035 4530 9035 4529 9036 4561 9036 4531 9036 4529 9037 4468 9037 4561 9037 4532 9038 5511 9038 4533 9038 4528 9039 4533 9039 4452 9039 4528 9040 4532 9040 4533 9040 4553 9041 5492 9041 4534 9041 4552 9042 4534 9042 4537 9042 4535 9043 4537 9043 4632 9043 4507 9044 4535 9044 4632 9044 4507 9045 4506 9045 4535 9045 4535 9046 4506 9046 4552 9046 4537 9047 4535 9047 4552 9047 4536 9048 5492 9048 4660 9048 4538 9049 4536 9049 4660 9049 4538 9050 4537 9050 4536 9050 4538 9051 4632 9051 4537 9051 4661 9052 5492 9052 4539 9052 4636 9053 4539 9053 4540 9053 4649 9054 4540 9054 4501 9054 4500 9055 4649 9055 4501 9055 4500 9056 4633 9056 4649 9056 4500 9057 4541 9057 4633 9057 4633 9058 4541 9058 4634 9058 4635 9059 4634 9059 4631 9059 4542 9060 4631 9060 4620 9060 4507 9061 4542 9061 4620 9061 4544 9062 5492 9062 4543 9062 4650 9063 4544 9063 4543 9063 4650 9064 4540 9064 4544 9064 4650 9065 4501 9065 4540 9065 4590 9066 5492 9066 4657 9066 4655 9067 4657 9067 4647 9067 4569 9068 4647 9068 4576 9068 4581 9069 4576 9069 4646 9069 4582 9070 4646 9070 4546 9070 4545 9071 4546 9071 4514 9071 4547 9072 4514 9072 4627 9072 4547 9073 4545 9073 4514 9073 4547 9074 4549 9074 4545 9074 4547 9075 4548 9075 4549 9075 4547 9076 4615 9076 4548 9076 4648 9077 5492 9077 4663 9077 4658 9078 4663 9078 4550 9078 4551 9079 4550 9079 4510 9079 4645 9080 4510 9080 4511 9080 4513 9081 4511 9081 4522 9081 4513 9082 4645 9082 4511 9082 4513 9083 4546 9083 4645 9083 4513 9084 4514 9084 4546 9084 4513 9085 4523 9085 4514 9085 4552 9086 4506 9086 4553 9086 4534 9087 4552 9087 4553 9087 4523 9088 4522 9088 4628 9088 4458 9089 4554 9089 4494 9089 5511 9090 4450 9090 4533 9090 4533 9091 4450 9091 4451 9091 4241 9092 4471 9092 4463 9092 4555 9093 4482 9093 4479 9093 4455 9094 4483 9094 4482 9094 4482 9095 4483 9095 4556 9095 4452 9096 4465 9096 4464 9096 4557 9097 4475 9097 4526 9097 4558 9098 4557 9098 4526 9098 4558 9099 4562 9099 4557 9099 4558 9100 4603 9100 4562 9100 4562 9101 4603 9101 4559 9101 4531 9102 4559 9102 4604 9102 4530 9103 4604 9103 4560 9103 4532 9104 4530 9104 4560 9104 4561 9105 4453 9105 4557 9105 4562 9106 4561 9106 4557 9106 4562 9107 4531 9107 4561 9107 4562 9108 4559 9108 4531 9108 4488 9109 4563 9109 4583 9109 4583 9110 4563 9110 4564 9110 4593 9111 4564 9111 4592 9111 4571 9112 4592 9112 4565 9112 4572 9113 4565 9113 4588 9113 4607 9114 4588 9114 4566 9114 4575 9115 4566 9115 4623 9115 4643 9116 4623 9116 4641 9116 4567 9117 4641 9117 4568 9117 4654 9118 4568 9118 4653 9118 4569 9119 4653 9119 4655 9119 4647 9120 4569 9120 4655 9120 4570 9121 4583 9121 4457 9121 4457 9122 4583 9122 4593 9122 4579 9123 4593 9123 4571 9123 4580 9124 4571 9124 4572 9124 4600 9125 4572 9125 4607 9125 4573 9126 4607 9126 4575 9126 4574 9127 4575 9127 4643 9127 4625 9128 4643 9128 4567 9128 4644 9129 4567 9129 4654 9129 4581 9130 4654 9130 4569 9130 4576 9131 4581 9131 4569 9131 4577 9132 4457 9132 4578 9132 4578 9133 4457 9133 4579 9133 4595 9134 4579 9134 4580 9134 4594 9135 4580 9135 4600 9135 4586 9136 4600 9136 4573 9136 4587 9137 4573 9137 4574 9137 4610 9138 4574 9138 4625 9138 4624 9139 4625 9139 4644 9139 4582 9140 4644 9140 4581 9140 4646 9141 4582 9141 4581 9141 4564 9142 4593 9142 4583 9142 4584 9143 4578 9143 4520 9143 4520 9144 4578 9144 4595 9144 4521 9145 4595 9145 4594 9145 4585 9146 4594 9146 4586 9146 4597 9147 4586 9147 4587 9147 4609 9148 4587 9148 4610 9148 4549 9149 4610 9149 4624 9149 4545 9150 4624 9150 4582 9150 4546 9151 4545 9151 4582 9151 4593 9152 4579 9152 4457 9152 4579 9153 4595 9153 4578 9153 4595 9154 4521 9154 4520 9154 4563 9155 4591 9155 4564 9155 4564 9156 4591 9156 4599 9156 4592 9157 4599 9157 4605 9157 4565 9158 4605 9158 4606 9158 4588 9159 4606 9159 4460 9159 4566 9160 4460 9160 4461 9160 4623 9161 4461 9161 4589 9161 4641 9162 4589 9162 4642 9162 4568 9163 4642 9163 4462 9163 4653 9164 4462 9164 4656 9164 4655 9165 4656 9165 4590 9165 4657 9166 4655 9166 4590 9166 4554 9167 4599 9167 4591 9167 4599 9168 4592 9168 4564 9168 4592 9169 4571 9169 4593 9169 4571 9170 4580 9170 4579 9170 4580 9171 4594 9171 4595 9171 4516 9172 4517 9172 4596 9172 4585 9173 4596 9173 4521 9173 4594 9174 4585 9174 4521 9174 4516 9175 4596 9175 4585 9175 4597 9176 4585 9176 4586 9176 4597 9177 4516 9177 4585 9177 4597 9178 4515 9178 4516 9178 4597 9179 4609 9179 4515 9179 4597 9180 4587 9180 4609 9180 4598 9181 4525 9181 4517 9181 4602 9182 4558 9182 4525 9182 4604 9183 4530 9183 4531 9183 4605 9184 4599 9184 4458 9184 4565 9185 4592 9185 4605 9185 4572 9186 4571 9186 4565 9186 4600 9187 4580 9187 4572 9187 4586 9188 4594 9188 4600 9188 4515 9189 4598 9189 4516 9189 4608 9190 4602 9190 4598 9190 4601 9191 4603 9191 4602 9191 4603 9192 4611 9192 4559 9192 4559 9193 4611 9193 4617 9193 4604 9194 4617 9194 4616 9194 4560 9195 4604 9195 4616 9195 4604 9196 4559 9196 4617 9196 4495 9197 4496 9197 4494 9197 4496 9198 4459 9198 4458 9198 4459 9199 4606 9199 4605 9199 4497 9200 4622 9200 4496 9200 4606 9201 4588 9201 4565 9201 4622 9202 4490 9202 4459 9202 4588 9203 4607 9203 4572 9203 4490 9204 4460 9204 4606 9204 4607 9205 4573 9205 4600 9205 4460 9206 4566 9206 4588 9206 4573 9207 4587 9207 4586 9207 4566 9208 4575 9208 4607 9208 4575 9209 4574 9209 4573 9209 4608 9210 4515 9210 4548 9210 4548 9211 4515 9211 4609 9211 4549 9212 4609 9212 4610 9212 4549 9213 4548 9213 4609 9213 4574 9214 4610 9214 4587 9214 4601 9215 4608 9215 4615 9215 4611 9216 4601 9216 4524 9216 4617 9217 4611 9217 4618 9217 4613 9218 4618 9218 4614 9218 5510 9219 4614 9219 4612 9219 5510 9220 4613 9220 4614 9220 4524 9221 4615 9221 4627 9221 4616 9222 4617 9222 4613 9222 4613 9223 4617 9223 4618 9223 4618 9224 4524 9224 4626 9224 4497 9225 4495 9225 4630 9225 4621 9226 4630 9226 4619 9226 4631 9227 4619 9227 4620 9227 4631 9228 4621 9228 4619 9228 4631 9229 4634 9229 4621 9229 4621 9230 4634 9230 4498 9230 4497 9231 4621 9231 4498 9231 4497 9232 4630 9232 4621 9232 4491 9233 4622 9233 4498 9233 4492 9234 4490 9234 4491 9234 4461 9235 4460 9235 4492 9235 4623 9236 4566 9236 4461 9236 4643 9237 4575 9237 4623 9237 4625 9238 4574 9238 4643 9238 4624 9239 4610 9239 4625 9239 4545 9240 4549 9240 4624 9240 4523 9241 4626 9241 4627 9241 4628 9242 4614 9242 4626 9242 4629 9243 4505 9243 4495 9243 4495 9244 4505 9244 4630 9244 4630 9245 4505 9245 4619 9245 4635 9246 4631 9246 4542 9246 4508 9247 4542 9247 4632 9247 4508 9248 4635 9248 4542 9248 4508 9249 4637 9249 4635 9249 4508 9250 4509 9250 4637 9250 4508 9251 4538 9251 4509 9251 4634 9252 4541 9252 4498 9252 4633 9253 4634 9253 4635 9253 4637 9254 4633 9254 4635 9254 4637 9255 4649 9255 4633 9255 4637 9256 4636 9256 4649 9256 4637 9257 4509 9257 4636 9257 4636 9258 4509 9258 4661 9258 4539 9259 4636 9259 4661 9259 4541 9260 4638 9260 4491 9260 4638 9261 4640 9261 4492 9261 4499 9262 4638 9262 4500 9262 4640 9263 4589 9263 4461 9263 4639 9264 4640 9264 4499 9264 4589 9265 4641 9265 4623 9265 4642 9266 4589 9266 4639 9266 4641 9267 4567 9267 4643 9267 4568 9268 4641 9268 4642 9268 4567 9269 4644 9269 4625 9269 4654 9270 4567 9270 4568 9270 4644 9271 4582 9271 4624 9271 4581 9272 4644 9272 4654 9272 4645 9273 4546 9273 4646 9273 4551 9274 4646 9274 4576 9274 4658 9275 4576 9275 4647 9275 4648 9276 4647 9276 4657 9276 4540 9277 4649 9277 4636 9277 4501 9278 4493 9278 4499 9278 4493 9279 4652 9279 4639 9279 4502 9280 4493 9280 4650 9280 4652 9281 4462 9281 4642 9281 4651 9282 4652 9282 4502 9282 4462 9283 4653 9283 4568 9283 4656 9284 4462 9284 4651 9284 4653 9285 4569 9285 4654 9285 4655 9286 4653 9286 4656 9286 4645 9287 4646 9287 4551 9287 4510 9288 4645 9288 4551 9288 4550 9289 4551 9289 4658 9289 4658 9290 4551 9290 4576 9290 4648 9291 4657 9291 5492 9291 4647 9292 4648 9292 4658 9292 4658 9293 4648 9293 4663 9293 4656 9294 4659 9294 4590 9294 4651 9295 4503 9295 4659 9295 4543 9296 4503 9296 4502 9296 5492 9297 4544 9297 4539 9297 4539 9298 4544 9298 4540 9298 4660 9299 4661 9299 4509 9299 5492 9300 4536 9300 4534 9300 4534 9301 4536 9301 4537 9301 4662 9302 4550 9302 4663 9302 5492 9303 4662 9303 4663 9303 5349 9304 3760 9304 4759 9304 4714 9305 4759 9305 4757 9305 4713 9306 4757 9306 4664 9306 4665 9307 4664 9307 5347 9307 4665 9308 4713 9308 4664 9308 3759 9309 4666 9309 3761 9309 3759 9310 4667 9310 4666 9310 3759 9311 4711 9311 4667 9311 3759 9312 4668 9312 4711 9312 4711 9313 4668 9313 4756 9313 4767 9314 4756 9314 4708 9314 4709 9315 4708 9315 4766 9315 4778 9316 4766 9316 4777 9316 4779 9317 4777 9317 4783 9317 4794 9318 4783 9318 4669 9318 4670 9319 4669 9319 4806 9319 4809 9320 4806 9320 4671 9320 4672 9321 4671 9321 4808 9321 4818 9322 4808 9322 4820 9322 4819 9323 4820 9323 4731 9323 4668 9324 4675 9324 4756 9324 4756 9325 4675 9325 4673 9325 4708 9326 4673 9326 4755 9326 4766 9327 4755 9327 4769 9327 4777 9328 4769 9328 4674 9328 4783 9329 4674 9329 4776 9329 4669 9330 4776 9330 4782 9330 4806 9331 4782 9331 4803 9331 4671 9332 4803 9332 4804 9332 4808 9333 4804 9333 4678 9333 4820 9334 4678 9334 4816 9334 4731 9335 4816 9335 4680 9335 4673 9336 4675 9336 4676 9336 4755 9337 4676 9337 4677 9337 4769 9338 4677 9338 4750 9338 4674 9339 4750 9339 4751 9339 4776 9340 4751 9340 4753 9340 4782 9341 4753 9341 4793 9341 4803 9342 4793 9342 4802 9342 4804 9343 4802 9343 4807 9343 4678 9344 4807 9344 4679 9344 4816 9345 4679 9345 4817 9345 4680 9346 4817 9346 4815 9346 4681 9347 4730 9347 3749 9347 4681 9348 4721 9348 4730 9348 4681 9349 4682 9349 4721 9349 4681 9350 4683 9350 4682 9350 4682 9351 4683 9351 4684 9351 4761 9352 4684 9352 4747 9352 4768 9353 4747 9353 4760 9353 4685 9354 4760 9354 4772 9354 4774 9355 4772 9355 4781 9355 4791 9356 4781 9356 4686 9356 4798 9357 4686 9357 4691 9357 4799 9358 4691 9358 4797 9358 4716 9359 4797 9359 4811 9359 4812 9360 4811 9360 4813 9360 4715 9361 4813 9361 4695 9361 4683 9362 4687 9362 4684 9362 4684 9363 4687 9363 4746 9363 4747 9364 4746 9364 4698 9364 4760 9365 4698 9365 4688 9365 4772 9366 4688 9366 4689 9366 4781 9367 4689 9367 4690 9367 4686 9368 4690 9368 4788 9368 4691 9369 4788 9369 4692 9369 4797 9370 4692 9370 4693 9370 4811 9371 4693 9371 4701 9371 4813 9372 4701 9372 4694 9372 4696 9373 4813 9373 4694 9373 4696 9374 4695 9374 4813 9374 4696 9375 5467 9375 4695 9375 4746 9376 4687 9376 4697 9376 4698 9377 4697 9377 4702 9377 4688 9378 4702 9378 4699 9378 4689 9379 4699 9379 4707 9379 4690 9380 4707 9380 4780 9380 4788 9381 4780 9381 4789 9381 4692 9382 4789 9382 4706 9382 4693 9383 4706 9383 4700 9383 4701 9384 4700 9384 4694 9384 4701 9385 4693 9385 4700 9385 4687 9386 3733 9386 4697 9386 4697 9387 3733 9387 5461 9387 5462 9388 4697 9388 5461 9388 5462 9389 4702 9389 4697 9389 5462 9390 5464 9390 4702 9390 4702 9391 5464 9391 4699 9391 4699 9392 5464 9392 5465 9392 4707 9393 5465 9393 4703 9393 4780 9394 4703 9394 4704 9394 4789 9395 4704 9395 4705 9395 4706 9396 4705 9396 4700 9396 4706 9397 4789 9397 4705 9397 4699 9398 5465 9398 4707 9398 4707 9399 4703 9399 4780 9399 4780 9400 4704 9400 4789 9400 4705 9401 4694 9401 4700 9401 5354 9402 4737 9402 4825 9402 5354 9403 4795 9403 4737 9403 5354 9404 4738 9404 4795 9404 5354 9405 5348 9405 4738 9405 4738 9406 5348 9406 4744 9406 4740 9407 4744 9407 4770 9407 4735 9408 4770 9408 4710 9408 4709 9409 4710 9409 4767 9409 4708 9410 4709 9410 4767 9410 5348 9411 5347 9411 4744 9411 4744 9412 5347 9412 4745 9412 4770 9413 4745 9413 4771 9413 4710 9414 4771 9414 4712 9414 4767 9415 4712 9415 4711 9415 4756 9416 4767 9416 4711 9416 4745 9417 5347 9417 4664 9417 4771 9418 4664 9418 4758 9418 4712 9419 4758 9419 4667 9419 4711 9420 4712 9420 4667 9420 4713 9421 4714 9421 4757 9421 4714 9422 5349 9422 4759 9422 4695 9423 5467 9423 4715 9423 4715 9424 5467 9424 4828 9424 4812 9425 4828 9425 4765 9425 4716 9426 4765 9426 4717 9426 4799 9427 4717 9427 4801 9427 4798 9428 4801 9428 4790 9428 4791 9429 4790 9429 4718 9429 4774 9430 4718 9430 4773 9430 4685 9431 4773 9431 4762 9431 4768 9432 4762 9432 4719 9432 4761 9433 4719 9433 4720 9433 4682 9434 4720 9434 4721 9434 4682 9435 4761 9435 4720 9435 4682 9436 4684 9436 4761 9436 4827 9437 5467 9437 4829 9437 4814 9438 4829 9438 4722 9438 4764 9439 4722 9439 4723 9439 4763 9440 4723 9440 4805 9440 4800 9441 4805 9441 4724 9441 4792 9442 4724 9442 4726 9442 4725 9443 4726 9443 4754 9443 4775 9444 4754 9444 4752 9444 4727 9445 4752 9445 4728 9445 4748 9446 4728 9446 4729 9446 4730 9447 4729 9447 4749 9447 3749 9448 4749 9448 4676 9448 4675 9449 3749 9449 4676 9449 4815 9450 5467 9450 4680 9450 4680 9451 5467 9451 4731 9451 4731 9452 5467 9452 4819 9452 4819 9453 5467 9453 4732 9453 4818 9454 4732 9454 4733 9454 4672 9455 4733 9455 4734 9455 4809 9456 4734 9456 4810 9456 4670 9457 4810 9457 4786 9457 4794 9458 4786 9458 4784 9458 4779 9459 4784 9459 4785 9459 4778 9460 4785 9460 4735 9460 4709 9461 4735 9461 4710 9461 4709 9462 4778 9462 4735 9462 4709 9463 4766 9463 4778 9463 4830 9464 5467 9464 4831 9464 4822 9465 4831 9465 4736 9465 4821 9466 4736 9466 4823 9466 4787 9467 4823 9467 4737 9467 4796 9468 4737 9468 4795 9468 4739 9469 4795 9469 4738 9469 4740 9470 4738 9470 4744 9470 4740 9471 4739 9471 4738 9471 4740 9472 4785 9472 4739 9472 4740 9473 4735 9473 4785 9473 4740 9474 4770 9474 4735 9474 4824 9475 5467 9475 4826 9475 4743 9476 4826 9476 4741 9476 4742 9477 4741 9477 4825 9477 4737 9478 4742 9478 4825 9478 4737 9479 4823 9479 4742 9479 4742 9480 4823 9480 4743 9480 4741 9481 4742 9481 4743 9481 4770 9482 4744 9482 4745 9482 4698 9483 4746 9483 4697 9483 4747 9484 4684 9484 4746 9484 4730 9485 4749 9485 3749 9485 4730 9486 4721 9486 4748 9486 4729 9487 4730 9487 4748 9487 4677 9488 4676 9488 4749 9488 4729 9489 4677 9489 4749 9489 4729 9490 4750 9490 4677 9490 4729 9491 4728 9491 4750 9491 4750 9492 4728 9492 4751 9492 4751 9493 4728 9493 4752 9493 4753 9494 4752 9494 4754 9494 4793 9495 4754 9495 4726 9495 4802 9496 4726 9496 4724 9496 4807 9497 4724 9497 4805 9497 4679 9498 4805 9498 4723 9498 4817 9499 4723 9499 4722 9499 4815 9500 4722 9500 4829 9500 4755 9501 4673 9501 4676 9501 4708 9502 4756 9502 4673 9502 3760 9503 3761 9503 4759 9503 4759 9504 3761 9504 4666 9504 4757 9505 4666 9505 4758 9505 4664 9506 4757 9506 4758 9506 4666 9507 4667 9507 4758 9507 4759 9508 4666 9508 4757 9508 4688 9509 4698 9509 4702 9509 4760 9510 4747 9510 4698 9510 4719 9511 4761 9511 4768 9511 4768 9512 4761 9512 4747 9512 4721 9513 4720 9513 4748 9513 4748 9514 4720 9514 4727 9514 4728 9515 4748 9515 4727 9515 4727 9516 4720 9516 4719 9516 4775 9517 4719 9517 4762 9517 4725 9518 4762 9518 4773 9518 4792 9519 4773 9519 4718 9519 4800 9520 4718 9520 4790 9520 4763 9521 4790 9521 4801 9521 4764 9522 4801 9522 4717 9522 4814 9523 4717 9523 4765 9523 4827 9524 4765 9524 4828 9524 4769 9525 4755 9525 4677 9525 4766 9526 4708 9526 4755 9526 4710 9527 4712 9527 4767 9527 4771 9528 4758 9528 4712 9528 4689 9529 4688 9529 4699 9529 4772 9530 4760 9530 4688 9530 4762 9531 4768 9531 4685 9531 4685 9532 4768 9532 4760 9532 4752 9533 4727 9533 4775 9533 4775 9534 4727 9534 4719 9534 4674 9535 4769 9535 4750 9535 4777 9536 4766 9536 4769 9536 4770 9537 4771 9537 4710 9537 4745 9538 4664 9538 4771 9538 4690 9539 4689 9539 4707 9539 4781 9540 4772 9540 4689 9540 4773 9541 4685 9541 4774 9541 4774 9542 4685 9542 4772 9542 4754 9543 4775 9543 4725 9543 4725 9544 4775 9544 4762 9544 4753 9545 4751 9545 4752 9545 4776 9546 4674 9546 4751 9546 4783 9547 4777 9547 4674 9547 4785 9548 4778 9548 4779 9548 4779 9549 4778 9549 4777 9549 4788 9550 4690 9550 4780 9550 4686 9551 4781 9551 4690 9551 4718 9552 4774 9552 4791 9552 4791 9553 4774 9553 4781 9553 4726 9554 4725 9554 4792 9554 4792 9555 4725 9555 4773 9555 4793 9556 4753 9556 4754 9556 4782 9557 4776 9557 4753 9557 4669 9558 4783 9558 4776 9558 4784 9559 4779 9559 4794 9559 4794 9560 4779 9560 4783 9560 4739 9561 4785 9561 4784 9561 4796 9562 4784 9562 4786 9562 4787 9563 4786 9563 4810 9563 4821 9564 4810 9564 4734 9564 4822 9565 4734 9565 4733 9565 4830 9566 4733 9566 4732 9566 4692 9567 4788 9567 4789 9567 4691 9568 4686 9568 4788 9568 4790 9569 4791 9569 4798 9569 4798 9570 4791 9570 4686 9570 4724 9571 4792 9571 4800 9571 4800 9572 4792 9572 4718 9572 4802 9573 4793 9573 4726 9573 4803 9574 4782 9574 4793 9574 4806 9575 4669 9575 4782 9575 4786 9576 4794 9576 4670 9576 4670 9577 4794 9577 4669 9577 4795 9578 4739 9578 4796 9578 4796 9579 4739 9579 4784 9579 4692 9580 4706 9580 4693 9580 4691 9581 4692 9581 4797 9581 4798 9582 4691 9582 4799 9582 4801 9583 4798 9583 4799 9583 4797 9584 4693 9584 4811 9584 4717 9585 4799 9585 4716 9585 4716 9586 4799 9586 4797 9586 4800 9587 4790 9587 4763 9587 4805 9588 4800 9588 4763 9588 4723 9589 4763 9589 4764 9589 4764 9590 4763 9590 4801 9590 4802 9591 4724 9591 4807 9591 4803 9592 4802 9592 4804 9592 4807 9593 4805 9593 4679 9593 4806 9594 4803 9594 4671 9594 4804 9595 4807 9595 4678 9595 4670 9596 4806 9596 4809 9596 4810 9597 4670 9597 4809 9597 4671 9598 4804 9598 4808 9598 4734 9599 4809 9599 4672 9599 4672 9600 4809 9600 4671 9600 4796 9601 4786 9601 4787 9601 4737 9602 4796 9602 4787 9602 4823 9603 4787 9603 4821 9603 4821 9604 4787 9604 4810 9604 4716 9605 4811 9605 4812 9605 4765 9606 4716 9606 4812 9606 4828 9607 4812 9607 4715 9607 4715 9608 4812 9608 4813 9608 4764 9609 4717 9609 4814 9609 4722 9610 4764 9610 4814 9610 4829 9611 4814 9611 4827 9611 4827 9612 4814 9612 4765 9612 4679 9613 4723 9613 4817 9613 4678 9614 4679 9614 4816 9614 4817 9615 4722 9615 4815 9615 4808 9616 4678 9616 4820 9616 4816 9617 4817 9617 4680 9617 4672 9618 4808 9618 4818 9618 4733 9619 4672 9619 4818 9619 4820 9620 4816 9620 4731 9620 4732 9621 4818 9621 4819 9621 4819 9622 4818 9622 4820 9622 4821 9623 4734 9623 4822 9623 4736 9624 4821 9624 4822 9624 4831 9625 4822 9625 4830 9625 4830 9626 4822 9626 4733 9626 4823 9627 4736 9627 4743 9627 4743 9628 4736 9628 4824 9628 4826 9629 4743 9629 4824 9629 4824 9630 4736 9630 4831 9630 4825 9631 4741 9631 4826 9631 5467 9632 4825 9632 4826 9632 4811 9633 4701 9633 4813 9633 5467 9634 4827 9634 4828 9634 5467 9635 4815 9635 4829 9635 5467 9636 4830 9636 4732 9636 5467 9637 4824 9637 4831 9637 5357 9638 4964 9638 4893 9638 5357 9639 4893 9639 4832 9639 5357 9640 4832 9640 4891 9640 5357 9641 4891 9641 4890 9641 5357 9642 4890 9642 4889 9642 5357 9643 4889 9643 4888 9643 5357 9644 4888 9644 4887 9644 5357 9645 4887 9645 4917 9645 5357 9646 4917 9646 4894 9646 5357 9647 4894 9647 4927 9647 4833 9648 4896 9648 4892 9648 4833 9649 4834 9649 4896 9649 4833 9650 4835 9650 4834 9650 4834 9651 4835 9651 4847 9651 4846 9652 4847 9652 4848 9652 4899 9653 4848 9653 4836 9653 4906 9654 4836 9654 4903 9654 4907 9655 4903 9655 4851 9655 4912 9656 4851 9656 4837 9656 4838 9657 4837 9657 4852 9657 4841 9658 4852 9658 4853 9658 4922 9659 4853 9659 4920 9659 4839 9660 4920 9660 4855 9660 4839 9661 4922 9661 4920 9661 4839 9662 4840 9662 4922 9662 4922 9663 4840 9663 4924 9663 4841 9664 4924 9664 4923 9664 4838 9665 4923 9665 4842 9665 4912 9666 4842 9666 4916 9666 4907 9667 4916 9667 4843 9667 4906 9668 4843 9668 4844 9668 4899 9669 4844 9669 4845 9669 4846 9670 4845 9670 4900 9670 4834 9671 4900 9671 4896 9671 4834 9672 4846 9672 4900 9672 4834 9673 4847 9673 4846 9673 4835 9674 4856 9674 4847 9674 4847 9675 4856 9675 4849 9675 4848 9676 4849 9676 4898 9676 4836 9677 4898 9677 4850 9677 4903 9678 4850 9678 4905 9678 4851 9679 4905 9679 4859 9679 4837 9680 4859 9680 4915 9680 4852 9681 4915 9681 4860 9681 4853 9682 4860 9682 4921 9682 4920 9683 4921 9683 4854 9683 4855 9684 4854 9684 4942 9684 4855 9685 4920 9685 4854 9685 4856 9686 4857 9686 4849 9686 4849 9687 4857 9687 4858 9687 4898 9688 4858 9688 4897 9688 4850 9689 4897 9689 4863 9689 4905 9690 4863 9690 4902 9690 4859 9691 4902 9691 4911 9691 4915 9692 4911 9692 4910 9692 4860 9693 4910 9693 4914 9693 4921 9694 4914 9694 4861 9694 4854 9695 4861 9695 4866 9695 4942 9696 4866 9696 4862 9696 4942 9697 4854 9697 4866 9697 4857 9698 4949 9698 4858 9698 4858 9699 4949 9699 4895 9699 4897 9700 4895 9700 4867 9700 4863 9701 4867 9701 4864 9701 4902 9702 4864 9702 4869 9702 4911 9703 4869 9703 4909 9703 4910 9704 4909 9704 4865 9704 4914 9705 4865 9705 4870 9705 4861 9706 4870 9706 4919 9706 4866 9707 4919 9707 4918 9707 4862 9708 4918 9708 4943 9708 4862 9709 4866 9709 4918 9709 4949 9710 4960 9710 4895 9710 4895 9711 4960 9711 4875 9711 4867 9712 4875 9712 4868 9712 4864 9713 4868 9713 4901 9713 4869 9714 4901 9714 4878 9714 4909 9715 4878 9715 4908 9715 4865 9716 4908 9716 4871 9716 4870 9717 4871 9717 4872 9717 4919 9718 4872 9718 4873 9718 4918 9719 4873 9719 4874 9719 4943 9720 4874 9720 4945 9720 4943 9721 4918 9721 4874 9721 4960 9722 4951 9722 4875 9722 4875 9723 4951 9723 4876 9723 4868 9724 4876 9724 4877 9724 4901 9725 4877 9725 4885 9725 4878 9726 4885 9726 4904 9726 4908 9727 4904 9727 4883 9727 4871 9728 4883 9728 4913 9728 4872 9729 4913 9729 4879 9729 4873 9730 4879 9730 4884 9730 4874 9731 4884 9731 4880 9731 4945 9732 4880 9732 5344 9732 4945 9733 4874 9733 4880 9733 4951 9734 3212 9734 4876 9734 4876 9735 3212 9735 4881 9735 3210 9736 4876 9736 4881 9736 3210 9737 4877 9737 4876 9737 3210 9738 4882 9738 4877 9738 4877 9739 4882 9739 4885 9739 4885 9740 4882 9740 3254 9740 4904 9741 3254 9741 3208 9741 4883 9742 3208 9742 3207 9742 4913 9743 3207 9743 3206 9743 4879 9744 3206 9744 4886 9744 4884 9745 4886 9745 3205 9745 4880 9746 3205 9746 5344 9746 4880 9747 4884 9747 3205 9747 4885 9748 3254 9748 4904 9748 4904 9749 3208 9749 4883 9749 4883 9750 3207 9750 4913 9750 4913 9751 3206 9751 4879 9751 4879 9752 4886 9752 4884 9752 4840 9753 4939 9753 4924 9753 4924 9754 4939 9754 4894 9754 4923 9755 4894 9755 4917 9755 4842 9756 4917 9756 4887 9756 4916 9757 4887 9757 4888 9757 4843 9758 4888 9758 4889 9758 4844 9759 4889 9759 4890 9759 4845 9760 4890 9760 4891 9760 4900 9761 4891 9761 4832 9761 4896 9762 4832 9762 4893 9762 4892 9763 4893 9763 4964 9763 4892 9764 4896 9764 4893 9764 4939 9765 4927 9765 4894 9765 4868 9766 4875 9766 4876 9766 4867 9767 4895 9767 4875 9767 4897 9768 4858 9768 4895 9768 4898 9769 4849 9769 4858 9769 4848 9770 4847 9770 4849 9770 4832 9771 4896 9771 4900 9771 4901 9772 4868 9772 4877 9772 4864 9773 4867 9773 4868 9773 4863 9774 4897 9774 4867 9774 4850 9775 4898 9775 4897 9775 4836 9776 4848 9776 4898 9776 4845 9777 4846 9777 4899 9777 4899 9778 4846 9778 4848 9778 4891 9779 4900 9779 4845 9779 4878 9780 4901 9780 4885 9780 4869 9781 4864 9781 4901 9781 4902 9782 4863 9782 4864 9782 4905 9783 4850 9783 4863 9783 4903 9784 4836 9784 4850 9784 4844 9785 4899 9785 4906 9785 4906 9786 4899 9786 4836 9786 4890 9787 4845 9787 4844 9787 4908 9788 4878 9788 4904 9788 4909 9789 4869 9789 4878 9789 4911 9790 4902 9790 4869 9790 4859 9791 4905 9791 4902 9791 4851 9792 4903 9792 4905 9792 4843 9793 4906 9793 4907 9793 4907 9794 4906 9794 4903 9794 4889 9795 4844 9795 4843 9795 4871 9796 4908 9796 4883 9796 4865 9797 4909 9797 4908 9797 4910 9798 4911 9798 4909 9798 4915 9799 4859 9799 4911 9799 4837 9800 4851 9800 4859 9800 4916 9801 4907 9801 4912 9801 4912 9802 4907 9802 4851 9802 4888 9803 4843 9803 4916 9803 4872 9804 4871 9804 4913 9804 4870 9805 4865 9805 4871 9805 4914 9806 4910 9806 4865 9806 4860 9807 4915 9807 4910 9807 4852 9808 4837 9808 4915 9808 4842 9809 4912 9809 4838 9809 4838 9810 4912 9810 4837 9810 4887 9811 4916 9811 4842 9811 4873 9812 4872 9812 4879 9812 4919 9813 4870 9813 4872 9813 4861 9814 4914 9814 4870 9814 4921 9815 4860 9815 4914 9815 4853 9816 4852 9816 4860 9816 4923 9817 4838 9817 4841 9817 4841 9818 4838 9818 4852 9818 4917 9819 4842 9819 4923 9819 4874 9820 4873 9820 4884 9820 4918 9821 4919 9821 4873 9821 4866 9822 4861 9822 4919 9822 4854 9823 4921 9823 4861 9823 4920 9824 4853 9824 4921 9824 4924 9825 4841 9825 4922 9825 4922 9826 4841 9826 4853 9826 4894 9827 4923 9827 4924 9827 5344 9828 5345 9828 4946 9828 4945 9829 4946 9829 4925 9829 4943 9830 4925 9830 4944 9830 4862 9831 4944 9831 4936 9831 4942 9832 4936 9832 4941 9832 4855 9833 4941 9833 4940 9833 4839 9834 4940 9834 4931 9834 4840 9835 4931 9835 4938 9835 4939 9836 4938 9836 4926 9836 4927 9837 4926 9837 4935 9837 5357 9838 4935 9838 5481 9838 5357 9839 4927 9839 4935 9839 4928 9840 4925 9840 4929 9840 4928 9841 4944 9841 4925 9841 4928 9842 4930 9842 4944 9842 4944 9843 4930 9843 4936 9843 4936 9844 4930 9844 5242 9844 4941 9845 5242 9845 4937 9845 4940 9846 4937 9846 4932 9846 4931 9847 4932 9847 5155 9847 4938 9848 5155 9848 4933 9848 4926 9849 4933 9849 4934 9849 4935 9850 4934 9850 5481 9850 4935 9851 4926 9851 4934 9851 4936 9852 5242 9852 4941 9852 4941 9853 4937 9853 4940 9853 4940 9854 4932 9854 4931 9854 4931 9855 5155 9855 4938 9855 4938 9856 4933 9856 4926 9856 4927 9857 4939 9857 4926 9857 4939 9858 4840 9858 4938 9858 4840 9859 4839 9859 4931 9859 4839 9860 4855 9860 4940 9860 4855 9861 4942 9861 4941 9861 4942 9862 4862 9862 4936 9862 4862 9863 4943 9863 4944 9863 4943 9864 4945 9864 4925 9864 4945 9865 5344 9865 4946 9865 5345 9866 4929 9866 4946 9866 4946 9867 4929 9867 4925 9867 5357 9868 5008 9868 4966 9868 4964 9869 4966 9869 4947 9869 4892 9870 4947 9870 4953 9870 4833 9871 4953 9871 4963 9871 4835 9872 4963 9872 4948 9872 4856 9873 4948 9873 4959 9873 4857 9874 4959 9874 4962 9874 4949 9875 4962 9875 4961 9875 4960 9876 4961 9876 4950 9876 4951 9877 4950 9877 4957 9877 3212 9878 4957 9878 5066 9878 3212 9879 4951 9879 4957 9879 4952 9880 4947 9880 4965 9880 4952 9881 4953 9881 4947 9881 4952 9882 4991 9882 4953 9882 4953 9883 4991 9883 4963 9883 4963 9884 4991 9884 4989 9884 4948 9885 4989 9885 4958 9885 4959 9886 4958 9886 4954 9886 4962 9887 4954 9887 4955 9887 4961 9888 4955 9888 4970 9888 4950 9889 4970 9889 4956 9889 4957 9890 4956 9890 5066 9890 4957 9891 4950 9891 4956 9891 4963 9892 4989 9892 4948 9892 4948 9893 4958 9893 4959 9893 4959 9894 4954 9894 4962 9894 4962 9895 4955 9895 4961 9895 4961 9896 4970 9896 4950 9896 4951 9897 4960 9897 4950 9897 4960 9898 4949 9898 4961 9898 4949 9899 4857 9899 4962 9899 4857 9900 4856 9900 4959 9900 4856 9901 4835 9901 4948 9901 4835 9902 4833 9902 4963 9902 4833 9903 4892 9903 4953 9903 4892 9904 4964 9904 4947 9904 4964 9905 5357 9905 4966 9905 4965 9906 4947 9906 4966 9906 5008 9907 4965 9907 4966 9907 3341 9908 5066 9908 5022 9908 3340 9909 5022 9909 5067 9909 4967 9910 5067 9910 5021 9910 4968 9911 5021 9911 3339 9911 4968 9912 4967 9912 5021 9912 4970 9913 5069 9913 4956 9913 4970 9914 4969 9914 5069 9914 4970 9915 5020 9915 4969 9915 4970 9916 4955 9916 5020 9916 5020 9917 4955 9917 4971 9917 5018 9918 4971 9918 4976 9918 4972 9919 4976 9919 5047 9919 4973 9920 5047 9920 4974 9920 5089 9921 4974 9921 5097 9921 4975 9922 5097 9922 4980 9922 5103 9923 4980 9923 5114 9923 5115 9924 5114 9924 5113 9924 5117 9925 5113 9925 5116 9925 5042 9926 5116 9926 5121 9926 5041 9927 5121 9927 4981 9927 4955 9928 4954 9928 4971 9928 4971 9929 4954 9929 5065 9929 4976 9930 5065 9930 4984 9930 5047 9931 4984 9931 4977 9931 4974 9932 4977 9932 4978 9932 5097 9933 4978 9933 4979 9933 4980 9934 4979 9934 5096 9934 5114 9935 5096 9935 5102 9935 5113 9936 5102 9936 4987 9936 5116 9937 4987 9937 5119 9937 5121 9938 5119 9938 4982 9938 4981 9939 4982 9939 4983 9939 5065 9940 4954 9940 5040 9940 4984 9941 5040 9941 5079 9941 4977 9942 5079 9942 4985 9942 4978 9943 4985 9943 5088 9943 4979 9944 5088 9944 5087 9944 5096 9945 5087 9945 4986 9945 5102 9946 4986 9946 5101 9946 4987 9947 5101 9947 5112 9947 5119 9948 5112 9948 4988 9948 4982 9949 4988 9949 5120 9949 4983 9950 5120 9950 5132 9950 4989 9951 5059 9951 4958 9951 4989 9952 5032 9952 5059 9952 4989 9953 4990 9953 5032 9953 4989 9954 4991 9954 4990 9954 4990 9955 4991 9955 4992 9955 5071 9956 4992 9956 4993 9956 5030 9957 4993 9957 4999 9957 5081 9958 4999 9958 4994 9958 5086 9959 4994 9959 4995 9959 5027 9960 4995 9960 5092 9960 5026 9961 5092 9961 4996 9961 4997 9962 4996 9962 5107 9962 5108 9963 5107 9963 5130 9963 5023 9964 5130 9964 5004 9964 4998 9965 5004 9965 5003 9965 4991 9966 4952 9966 4992 9966 4992 9967 4952 9967 5058 9967 4993 9968 5058 9968 5005 9968 4999 9969 5005 9969 5000 9969 4994 9970 5000 9970 5001 9970 4995 9971 5001 9971 5085 9971 5092 9972 5085 9972 5090 9972 4996 9973 5090 9973 5002 9973 5107 9974 5002 9974 5106 9974 5130 9975 5106 9975 5131 9975 5004 9976 5131 9976 5014 9976 5502 9977 5004 9977 5014 9977 5502 9978 5003 9978 5004 9978 5502 9979 5492 9979 5003 9979 5058 9980 4952 9980 5007 9980 5005 9981 5007 9981 5070 9981 5000 9982 5070 9982 5010 9982 5001 9983 5010 9983 5013 9983 5085 9984 5013 9984 5091 9984 5090 9985 5091 9985 5012 9985 5002 9986 5012 9986 5105 9986 5106 9987 5105 9987 5006 9987 5131 9988 5006 9988 5014 9988 5131 9989 5106 9989 5006 9989 4952 9990 4965 9990 5007 9990 5007 9991 4965 9991 5008 9991 5489 9992 5007 9992 5008 9992 5489 9993 5070 9993 5007 9993 5489 9994 5009 9994 5070 9994 5070 9995 5009 9995 5010 9995 5010 9996 5009 9996 5495 9996 5013 9997 5495 9997 5496 9997 5091 9998 5496 9998 5498 9998 5012 9999 5498 9999 5011 9999 5105 10000 5011 10000 5006 10000 5105 10001 5012 10001 5011 10001 5010 10002 5495 10002 5013 10002 5013 10003 5496 10003 5091 10003 5091 10004 5498 10004 5012 10004 5011 10005 5014 10005 5006 10005 5015 10006 5048 10006 3343 10006 5015 10007 5050 10007 5048 10007 5015 10008 5051 10008 5050 10008 5015 10009 5016 10009 5051 10009 5051 10010 5016 10010 5017 10010 5052 10011 5017 10011 5057 10011 5053 10012 5057 10012 5019 10012 4972 10013 5019 10013 5018 10013 4976 10014 4972 10014 5018 10014 5016 10015 3339 10015 5017 10015 5017 10016 3339 10016 5083 10016 5057 10017 5083 10017 5084 10017 5019 10018 5084 10018 5080 10018 5018 10019 5080 10019 5020 10019 4971 10020 5018 10020 5020 10020 5083 10021 3339 10021 5021 10021 5084 10022 5021 10022 5068 10022 5080 10023 5068 10023 4969 10023 5020 10024 5080 10024 4969 10024 4967 10025 3340 10025 5067 10025 3340 10026 3341 10026 5022 10026 5003 10027 5492 10027 4998 10027 4998 10028 5492 10028 5024 10028 5023 10029 5024 10029 5078 10029 5108 10030 5078 10030 5025 10030 4997 10031 5025 10031 5076 10031 5026 10032 5076 10032 5109 10032 5027 10033 5109 10033 5093 10033 5086 10034 5093 10034 5028 10034 5081 10035 5028 10035 5029 10035 5030 10036 5029 10036 5031 10036 5071 10037 5031 10037 5073 10037 4990 10038 5073 10038 5032 10038 4990 10039 5071 10039 5073 10039 4990 10040 4992 10040 5071 10040 5077 10041 5492 10041 5064 10041 5033 10042 5064 10042 5034 10042 5111 10043 5034 10043 5110 10043 5035 10044 5110 10044 5063 10044 5036 10045 5063 10045 5062 10045 5095 10046 5062 10046 5094 10046 5037 10047 5094 10047 5039 10047 5038 10048 5039 10048 5082 10048 5074 10049 5082 10049 5075 10049 5072 10050 5075 10050 5061 10050 5059 10051 5061 10051 5060 10051 4958 10052 5060 10052 5040 10052 4954 10053 4958 10053 5040 10053 5132 10054 5492 10054 4983 10054 4983 10055 5492 10055 4981 10055 4981 10056 5492 10056 5041 10056 5041 10057 5492 10057 5043 10057 5042 10058 5043 10058 5124 10058 5117 10059 5124 10059 5044 10059 5115 10060 5044 10060 5100 10060 5103 10061 5100 10061 5045 10061 4975 10062 5045 10062 5104 10062 5089 10063 5104 10063 5046 10063 4973 10064 5046 10064 5053 10064 4972 10065 5053 10065 5019 10065 4972 10066 4973 10066 5053 10066 4972 10067 5047 10067 4973 10067 5123 10068 5492 10068 5129 10068 5122 10069 5129 10069 5126 10069 5118 10070 5126 10070 5125 10070 5099 10071 5125 10071 5048 10071 5049 10072 5048 10072 5050 10072 5098 10073 5050 10073 5051 10073 5052 10074 5051 10074 5017 10074 5052 10075 5098 10075 5051 10075 5052 10076 5046 10076 5098 10076 5052 10077 5053 10077 5046 10077 5052 10078 5057 10078 5053 10078 5128 10079 5492 10079 5054 10079 5127 10080 5054 10080 5055 10080 5056 10081 5055 10081 3343 10081 5048 10082 5056 10082 3343 10082 5048 10083 5125 10083 5056 10083 5056 10084 5125 10084 5127 10084 5055 10085 5056 10085 5127 10085 5057 10086 5017 10086 5083 10086 5005 10087 5058 10087 5007 10087 4993 10088 4992 10088 5058 10088 5059 10089 5060 10089 4958 10089 5059 10090 5032 10090 5072 10090 5061 10091 5059 10091 5072 10091 5079 10092 5040 10092 5060 10092 5061 10093 5079 10093 5060 10093 5061 10094 4985 10094 5079 10094 5061 10095 5075 10095 4985 10095 4985 10096 5075 10096 5088 10096 5088 10097 5075 10097 5082 10097 5087 10098 5082 10098 5039 10098 4986 10099 5039 10099 5094 10099 5101 10100 5094 10100 5062 10100 5112 10101 5062 10101 5063 10101 4988 10102 5063 10102 5110 10102 5120 10103 5110 10103 5034 10103 5132 10104 5034 10104 5064 10104 4984 10105 5065 10105 5040 10105 4976 10106 4971 10106 5065 10106 5066 10107 4956 10107 5022 10107 5022 10108 4956 10108 5069 10108 5067 10109 5069 10109 5068 10109 5021 10110 5067 10110 5068 10110 5069 10111 4969 10111 5068 10111 5022 10112 5069 10112 5067 10112 5000 10113 5005 10113 5070 10113 4999 10114 4993 10114 5005 10114 5031 10115 5071 10115 5030 10115 5030 10116 5071 10116 4993 10116 5032 10117 5073 10117 5072 10117 5072 10118 5073 10118 5074 10118 5075 10119 5072 10119 5074 10119 5074 10120 5073 10120 5031 10120 5038 10121 5031 10121 5029 10121 5037 10122 5029 10122 5028 10122 5095 10123 5028 10123 5093 10123 5036 10124 5093 10124 5109 10124 5035 10125 5109 10125 5076 10125 5111 10126 5076 10126 5025 10126 5033 10127 5025 10127 5078 10127 5077 10128 5078 10128 5024 10128 4977 10129 4984 10129 5079 10129 5047 10130 4976 10130 4984 10130 5019 10131 5080 10131 5018 10131 5084 10132 5068 10132 5080 10132 5001 10133 5000 10133 5010 10133 4994 10134 4999 10134 5000 10134 5029 10135 5030 10135 5081 10135 5081 10136 5030 10136 4999 10136 5082 10137 5074 10137 5038 10137 5038 10138 5074 10138 5031 10138 4978 10139 4977 10139 4985 10139 4974 10140 5047 10140 4977 10140 5057 10141 5084 10141 5019 10141 5083 10142 5021 10142 5084 10142 5085 10143 5001 10143 5013 10143 4995 10144 4994 10144 5001 10144 5028 10145 5081 10145 5086 10145 5086 10146 5081 10146 4994 10146 5039 10147 5038 10147 5037 10147 5037 10148 5038 10148 5029 10148 5087 10149 5088 10149 5082 10149 4979 10150 4978 10150 5088 10150 5097 10151 4974 10151 4978 10151 5046 10152 4973 10152 5089 10152 5089 10153 4973 10153 4974 10153 5090 10154 5085 10154 5091 10154 5092 10155 4995 10155 5085 10155 5093 10156 5086 10156 5027 10156 5027 10157 5086 10157 4995 10157 5094 10158 5037 10158 5095 10158 5095 10159 5037 10159 5028 10159 4986 10160 5087 10160 5039 10160 5096 10161 4979 10161 5087 10161 4980 10162 5097 10162 4979 10162 5104 10163 5089 10163 4975 10163 4975 10164 5089 10164 5097 10164 5098 10165 5046 10165 5104 10165 5049 10166 5104 10166 5045 10166 5099 10167 5045 10167 5100 10167 5118 10168 5100 10168 5044 10168 5122 10169 5044 10169 5124 10169 5123 10170 5124 10170 5043 10170 5002 10171 5090 10171 5012 10171 4996 10172 5092 10172 5090 10172 5109 10173 5027 10173 5026 10173 5026 10174 5027 10174 5092 10174 5062 10175 5095 10175 5036 10175 5036 10176 5095 10176 5093 10176 5101 10177 4986 10177 5094 10177 5102 10178 5096 10178 4986 10178 5114 10179 4980 10179 5096 10179 5045 10180 4975 10180 5103 10180 5103 10181 4975 10181 4980 10181 5050 10182 5098 10182 5049 10182 5049 10183 5098 10183 5104 10183 5002 10184 5105 10184 5106 10184 4996 10185 5002 10185 5107 10185 5026 10186 4996 10186 4997 10186 5076 10187 5026 10187 4997 10187 5107 10188 5106 10188 5130 10188 5025 10189 4997 10189 5108 10189 5108 10190 4997 10190 5107 10190 5036 10191 5109 10191 5035 10191 5063 10192 5036 10192 5035 10192 5110 10193 5035 10193 5111 10193 5111 10194 5035 10194 5076 10194 5101 10195 5062 10195 5112 10195 5102 10196 5101 10196 4987 10196 5112 10197 5063 10197 4988 10197 5114 10198 5102 10198 5113 10198 4987 10199 5112 10199 5119 10199 5103 10200 5114 10200 5115 10200 5100 10201 5103 10201 5115 10201 5113 10202 4987 10202 5116 10202 5044 10203 5115 10203 5117 10203 5117 10204 5115 10204 5113 10204 5049 10205 5045 10205 5099 10205 5048 10206 5049 10206 5099 10206 5125 10207 5099 10207 5118 10207 5118 10208 5099 10208 5100 10208 5108 10209 5130 10209 5023 10209 5078 10210 5108 10210 5023 10210 5024 10211 5023 10211 4998 10211 4998 10212 5023 10212 5004 10212 5111 10213 5025 10213 5033 10213 5034 10214 5111 10214 5033 10214 5064 10215 5033 10215 5077 10215 5077 10216 5033 10216 5078 10216 4988 10217 5110 10217 5120 10217 5119 10218 4988 10218 4982 10218 5120 10219 5034 10219 5132 10219 5116 10220 5119 10220 5121 10220 4982 10221 5120 10221 4983 10221 5117 10222 5116 10222 5042 10222 5124 10223 5117 10223 5042 10223 5121 10224 4982 10224 4981 10224 5043 10225 5042 10225 5041 10225 5041 10226 5042 10226 5121 10226 5118 10227 5044 10227 5122 10227 5126 10228 5118 10228 5122 10228 5129 10229 5122 10229 5123 10229 5123 10230 5122 10230 5124 10230 5125 10231 5126 10231 5127 10231 5127 10232 5126 10232 5128 10232 5054 10233 5127 10233 5128 10233 5128 10234 5126 10234 5129 10234 3343 10235 5055 10235 5054 10235 5492 10236 3343 10236 5054 10236 5130 10237 5131 10237 5004 10237 5492 10238 5077 10238 5024 10238 5492 10239 5132 10239 5064 10239 5492 10240 5123 10240 5043 10240 5492 10241 5128 10241 5129 10241 5481 10242 4934 10242 5240 10242 5240 10243 4934 10243 5241 10243 5239 10244 5241 10244 5134 10244 5133 10245 5134 10245 5157 10245 5207 10246 5157 10246 5135 10246 5245 10247 5135 10247 5160 10247 5136 10248 5160 10248 5161 10248 5137 10249 5161 10249 5162 10249 5200 10250 5162 10250 5199 10250 5138 10251 5199 10251 5139 10251 5192 10252 5139 10252 5166 10252 5266 10253 5166 10253 5140 10253 5270 10254 5140 10254 5170 10254 5261 10255 5170 10255 5141 10255 5254 10256 5141 10256 5142 10256 5144 10257 5142 10257 5143 10257 5346 10258 5144 10258 5143 10258 5346 10259 5275 10259 5144 10259 5346 10260 5145 10260 5275 10260 5275 10261 5145 10261 5273 10261 5274 10262 5273 10262 5146 10262 5291 10263 5146 10263 5298 10263 5297 10264 5298 10264 5147 10264 5299 10265 5147 10265 5148 10265 5271 10266 5148 10266 5149 10266 5322 10267 5149 10267 5150 10267 5324 10268 5150 10268 5151 10268 5331 10269 5151 10269 5329 10269 5272 10270 5329 10270 5152 10270 5334 10271 5152 10271 5467 10271 5241 10272 4934 10272 5153 10272 5154 10273 5153 10273 4933 10273 5155 10274 5154 10274 4933 10274 5155 10275 5243 10275 5154 10275 5155 10276 5156 10276 5243 10276 5155 10277 4932 10277 5156 10277 5156 10278 4932 10278 5158 10278 5135 10279 5158 10279 5160 10279 5135 10280 5156 10280 5158 10280 5135 10281 5157 10281 5156 10281 5156 10282 5157 10282 5243 10282 5243 10283 5157 10283 5134 10283 5154 10284 5134 10284 5241 10284 5153 10285 5154 10285 5241 10285 4932 10286 4937 10286 5158 10286 5158 10287 4937 10287 5159 10287 5160 10288 5159 10288 5161 10288 5160 10289 5158 10289 5159 10289 5159 10290 4937 10290 5163 10290 5161 10291 5163 10291 5162 10291 5161 10292 5159 10292 5163 10292 4930 10293 5167 10293 5242 10293 4930 10294 5165 10294 5167 10294 4930 10295 5164 10295 5165 10295 4930 10296 4928 10296 5164 10296 5164 10297 4928 10297 5169 10297 5140 10298 5169 10298 5170 10298 5140 10299 5164 10299 5169 10299 5140 10300 5166 10300 5164 10300 5164 10301 5166 10301 5165 10301 5165 10302 5166 10302 5139 10302 5167 10303 5139 10303 5199 10303 5168 10304 5199 10304 5162 10304 5163 10305 5168 10305 5162 10305 5163 10306 5242 10306 5168 10306 5163 10307 4937 10307 5242 10307 4928 10308 4929 10308 5169 10308 5169 10309 4929 10309 5171 10309 5170 10310 5171 10310 5141 10310 5170 10311 5169 10311 5171 10311 5171 10312 4929 10312 5172 10312 5141 10313 5172 10313 5142 10313 5141 10314 5171 10314 5172 10314 4929 10315 5345 10315 5172 10315 5172 10316 5345 10316 5173 10316 5142 10317 5173 10317 5143 10317 5142 10318 5172 10318 5173 10318 5145 10319 5352 10319 5273 10319 5273 10320 5352 10320 5237 10320 5146 10321 5237 10321 5296 10321 5298 10322 5296 10322 5174 10322 5147 10323 5174 10323 5312 10323 5148 10324 5312 10324 5321 10324 5149 10325 5321 10325 5175 10325 5150 10326 5175 10326 5176 10326 5151 10327 5176 10327 5328 10327 5329 10328 5328 10328 5335 10328 5152 10329 5335 10329 5467 10329 5237 10330 5352 10330 5177 10330 5296 10331 5177 10331 5178 10331 5174 10332 5178 10332 5179 10332 5312 10333 5179 10333 5219 10333 5321 10334 5219 10334 5180 10334 5175 10335 5180 10335 5217 10335 5176 10336 5217 10336 5223 10336 5328 10337 5223 10337 5222 10337 5335 10338 5222 10338 5467 10338 5181 10339 5316 10339 5353 10339 5181 10340 5221 10340 5316 10340 5181 10341 5184 10341 5221 10341 5221 10342 5184 10342 5210 10342 5220 10343 5210 10343 5182 10343 5318 10344 5182 10344 5214 10344 5338 10345 5214 10345 5213 10345 5337 10346 5213 10346 5467 10346 5467 10347 5183 10347 5184 10347 5185 10348 5340 10348 5468 10348 5185 10349 5186 10349 5340 10349 5185 10350 5187 10350 5186 10350 5185 10351 5452 10351 5187 10351 5187 10352 5452 10352 5233 10352 5234 10353 5233 10353 5194 10353 5235 10354 5194 10354 5188 10354 5189 10355 5188 10355 5198 10355 5190 10356 5198 10356 5303 10356 5293 10357 5303 10357 5191 10357 5284 10358 5191 10358 5286 10358 5280 10359 5286 10359 5138 10359 5192 10360 5138 10360 5139 10360 5192 10361 5280 10361 5138 10361 5192 10362 5267 10362 5280 10362 5192 10363 5266 10363 5267 10363 5192 10364 5166 10364 5266 10364 5452 10365 5193 10365 5233 10365 5233 10366 5193 10366 5195 10366 5194 10367 5195 10367 5196 10367 5188 10368 5196 10368 5197 10368 5198 10369 5197 10369 5305 10369 5303 10370 5305 10370 5287 10370 5191 10371 5287 10371 5288 10371 5286 10372 5288 10372 5200 10372 5138 10373 5200 10373 5199 10373 5138 10374 5286 10374 5200 10374 5195 10375 5193 10375 5201 10375 5196 10376 5201 10376 5308 10376 5197 10377 5308 10377 5202 10377 5305 10378 5202 10378 5294 10378 5287 10379 5294 10379 5203 10379 5288 10380 5203 10380 5137 10380 5200 10381 5137 10381 5162 10381 5200 10382 5288 10382 5137 10382 5204 10383 5307 10383 5480 10383 5204 10384 5205 10384 5307 10384 5204 10385 5206 10385 5205 10385 5204 10386 5238 10386 5206 10386 5206 10387 5238 10387 5208 10387 5289 10388 5208 10388 5133 10388 5207 10389 5133 10389 5157 10389 5207 10390 5289 10390 5133 10390 5207 10391 5290 10391 5289 10391 5207 10392 5245 10392 5290 10392 5207 10393 5135 10393 5245 10393 5208 10394 5238 10394 5239 10394 5133 10395 5239 10395 5134 10395 5133 10396 5208 10396 5239 10396 5183 10397 5467 10397 5236 10397 5209 10398 5236 10398 5339 10398 5211 10399 5339 10399 5182 10399 5210 10400 5211 10400 5182 10400 5210 10401 5184 10401 5211 10401 5211 10402 5184 10402 5209 10402 5339 10403 5211 10403 5209 10403 5212 10404 5467 10404 5213 10404 5214 10405 5212 10405 5213 10405 5214 10406 5339 10406 5212 10406 5214 10407 5182 10407 5339 10407 5337 10408 5467 10408 5215 10408 5327 10409 5215 10409 5224 10409 5216 10410 5224 10410 5217 10410 5180 10411 5216 10411 5217 10411 5180 10412 5218 10412 5216 10412 5180 10413 5219 10413 5218 10413 5218 10414 5219 10414 5320 10414 5317 10415 5320 10415 5309 10415 5220 10416 5309 10416 5221 10416 5210 10417 5220 10417 5221 10417 5336 10418 5467 10418 5222 10418 5223 10419 5336 10419 5222 10419 5223 10420 5224 10420 5336 10420 5223 10421 5217 10421 5224 10421 5334 10422 5467 10422 5226 10422 5225 10423 5226 10423 5227 10423 5332 10424 5227 10424 5260 10424 5264 10425 5260 10425 5265 10425 5263 10426 5265 10426 5228 10426 5315 10427 5228 10427 5235 10427 5189 10428 5235 10428 5188 10428 5189 10429 5315 10429 5235 10429 5189 10430 5268 10430 5315 10430 5189 10431 5190 10431 5268 10431 5189 10432 5198 10432 5190 10432 5229 10433 5467 10433 5230 10433 5231 10434 5230 10434 5340 10434 5333 10435 5340 10435 5186 10435 5232 10436 5186 10436 5187 10436 5234 10437 5187 10437 5233 10437 5234 10438 5232 10438 5187 10438 5234 10439 5228 10439 5232 10439 5234 10440 5235 10440 5228 10440 5234 10441 5194 10441 5235 10441 5209 10442 5184 10442 5183 10442 5236 10443 5209 10443 5183 10443 5194 10444 5233 10444 5195 10444 5146 10445 5273 10445 5237 10445 5238 10446 5240 10446 5239 10446 5239 10447 5240 10447 5241 10447 4933 10448 5153 10448 4934 10448 5139 10449 5167 10449 5165 10449 5199 10450 5168 10450 5167 10450 5167 10451 5168 10451 5242 10451 5134 10452 5154 10452 5243 10452 5136 10453 5161 10453 5137 10453 5203 10454 5136 10454 5137 10454 5203 10455 5246 10455 5136 10455 5203 10456 5294 10456 5246 10456 5246 10457 5294 10457 5244 10457 5290 10458 5244 10458 5295 10458 5289 10459 5295 10459 5206 10459 5208 10460 5289 10460 5206 10460 5245 10461 5160 10461 5136 10461 5246 10462 5245 10462 5136 10462 5246 10463 5290 10463 5245 10463 5246 10464 5244 10464 5290 10464 5142 10465 5144 10465 5254 10465 5254 10466 5144 10466 5247 10466 5277 10467 5247 10467 5248 10467 5276 10468 5248 10468 5250 10468 5249 10469 5250 10469 5300 10469 5256 10470 5300 10470 5251 10470 5302 10471 5251 10471 5323 10471 5258 10472 5323 10472 5252 10472 5325 10473 5252 10473 5253 10473 5259 10474 5253 10474 5330 10474 5332 10475 5330 10475 5225 10475 5227 10476 5332 10476 5225 10476 5141 10477 5254 10477 5261 10477 5261 10478 5254 10478 5277 10478 5278 10479 5277 10479 5276 10479 5255 10480 5276 10480 5249 10480 5292 10481 5249 10481 5256 10481 5257 10482 5256 10482 5302 10482 5314 10483 5302 10483 5258 10483 5313 10484 5258 10484 5325 10484 5326 10485 5325 10485 5259 10485 5264 10486 5259 10486 5332 10486 5260 10487 5264 10487 5332 10487 5170 10488 5261 10488 5270 10488 5270 10489 5261 10489 5278 10489 5279 10490 5278 10490 5255 10490 5282 10491 5255 10491 5292 10491 5283 10492 5292 10492 5257 10492 5301 10493 5257 10493 5314 10493 5269 10494 5314 10494 5313 10494 5262 10495 5313 10495 5326 10495 5263 10496 5326 10496 5264 10496 5265 10497 5263 10497 5264 10497 5247 10498 5277 10498 5254 10498 5140 10499 5270 10499 5266 10499 5266 10500 5270 10500 5279 10500 5267 10501 5279 10501 5282 10501 5281 10502 5282 10502 5283 10502 5285 10503 5283 10503 5301 10503 5304 10504 5301 10504 5269 10504 5268 10505 5269 10505 5262 10505 5315 10506 5262 10506 5263 10506 5228 10507 5315 10507 5263 10507 5277 10508 5278 10508 5261 10508 5278 10509 5279 10509 5270 10509 5279 10510 5267 10510 5266 10510 5144 10511 5275 10511 5247 10511 5247 10512 5275 10512 5274 10512 5248 10513 5274 10513 5291 10513 5250 10514 5291 10514 5297 10514 5300 10515 5297 10515 5299 10515 5251 10516 5299 10516 5271 10516 5323 10517 5271 10517 5322 10517 5252 10518 5322 10518 5324 10518 5253 10519 5324 10519 5331 10519 5330 10520 5331 10520 5272 10520 5225 10521 5272 10521 5334 10521 5226 10522 5225 10522 5334 10522 5273 10523 5274 10523 5275 10523 5274 10524 5248 10524 5247 10524 5248 10525 5276 10525 5277 10525 5276 10526 5255 10526 5278 10526 5255 10527 5282 10527 5279 10527 5284 10528 5286 10528 5280 10528 5281 10529 5280 10529 5267 10529 5282 10530 5281 10530 5267 10530 5284 10531 5280 10531 5281 10531 5285 10532 5281 10532 5283 10532 5285 10533 5284 10533 5281 10533 5285 10534 5293 10534 5284 10534 5285 10535 5304 10535 5293 10535 5285 10536 5301 10536 5304 10536 5191 10537 5288 10537 5286 10537 5287 10538 5203 10538 5288 10538 5295 10539 5289 10539 5290 10539 5291 10540 5274 10540 5146 10540 5250 10541 5248 10541 5291 10541 5249 10542 5276 10542 5250 10542 5292 10543 5255 10543 5249 10543 5283 10544 5282 10544 5292 10544 5293 10545 5191 10545 5284 10545 5303 10546 5287 10546 5191 10546 5305 10547 5294 10547 5287 10547 5294 10548 5202 10548 5244 10548 5244 10549 5202 10549 5306 10549 5295 10550 5306 10550 5205 10550 5206 10551 5295 10551 5205 10551 5295 10552 5244 10552 5306 10552 5177 10553 5296 10553 5237 10553 5296 10554 5298 10554 5146 10554 5298 10555 5297 10555 5291 10555 5178 10556 5174 10556 5296 10556 5297 10557 5300 10557 5250 10557 5174 10558 5147 10558 5298 10558 5300 10559 5256 10559 5249 10559 5147 10560 5299 10560 5297 10560 5256 10561 5257 10561 5292 10561 5299 10562 5251 10562 5300 10562 5257 10563 5301 10563 5283 10563 5251 10564 5302 10564 5256 10564 5302 10565 5314 10565 5257 10565 5303 10566 5293 10566 5190 10566 5190 10567 5293 10567 5304 10567 5268 10568 5304 10568 5269 10568 5268 10569 5190 10569 5304 10569 5314 10570 5269 10570 5301 10570 5305 10571 5303 10571 5198 10571 5202 10572 5305 10572 5197 10572 5306 10573 5202 10573 5308 10573 5307 10574 5308 10574 5201 10574 5480 10575 5201 10575 5193 10575 5480 10576 5307 10576 5201 10576 5197 10577 5198 10577 5188 10577 5205 10578 5306 10578 5307 10578 5307 10579 5306 10579 5308 10579 5308 10580 5197 10580 5196 10580 5178 10581 5177 10581 5311 10581 5310 10582 5311 10582 5316 10582 5309 10583 5316 10583 5221 10583 5309 10584 5310 10584 5316 10584 5309 10585 5320 10585 5310 10585 5310 10586 5320 10586 5179 10586 5178 10587 5310 10587 5179 10587 5178 10588 5311 10588 5310 10588 5312 10589 5174 10589 5179 10589 5148 10590 5147 10590 5312 10590 5271 10591 5299 10591 5148 10591 5323 10592 5251 10592 5271 10592 5258 10593 5302 10593 5323 10593 5313 10594 5314 10594 5258 10594 5262 10595 5269 10595 5313 10595 5315 10596 5268 10596 5262 10596 5194 10597 5196 10597 5188 10597 5195 10598 5201 10598 5196 10598 5352 10599 5353 10599 5177 10599 5177 10600 5353 10600 5311 10600 5311 10601 5353 10601 5316 10601 5317 10602 5309 10602 5220 10602 5318 10603 5220 10603 5182 10603 5318 10604 5317 10604 5220 10604 5318 10605 5319 10605 5317 10605 5318 10606 5338 10606 5319 10606 5318 10607 5214 10607 5338 10607 5320 10608 5219 10608 5179 10608 5218 10609 5320 10609 5317 10609 5319 10610 5218 10610 5317 10610 5319 10611 5216 10611 5218 10611 5319 10612 5327 10612 5216 10612 5319 10613 5338 10613 5327 10613 5327 10614 5338 10614 5337 10614 5215 10615 5327 10615 5337 10615 5219 10616 5321 10616 5312 10616 5321 10617 5149 10617 5148 10617 5175 10618 5321 10618 5180 10618 5149 10619 5322 10619 5271 10619 5150 10620 5149 10620 5175 10620 5322 10621 5252 10621 5323 10621 5324 10622 5322 10622 5150 10622 5252 10623 5325 10623 5258 10623 5253 10624 5252 10624 5324 10624 5325 10625 5326 10625 5313 10625 5259 10626 5325 10626 5253 10626 5326 10627 5263 10627 5262 10627 5264 10628 5326 10628 5259 10628 5232 10629 5228 10629 5265 10629 5333 10630 5265 10630 5260 10630 5231 10631 5260 10631 5227 10631 5229 10632 5227 10632 5226 10632 5224 10633 5216 10633 5327 10633 5217 10634 5176 10634 5175 10634 5176 10635 5151 10635 5150 10635 5328 10636 5176 10636 5223 10636 5151 10637 5331 10637 5324 10637 5329 10638 5151 10638 5328 10638 5331 10639 5330 10639 5253 10639 5272 10640 5331 10640 5329 10640 5330 10641 5332 10641 5259 10641 5225 10642 5330 10642 5272 10642 5232 10643 5265 10643 5333 10643 5186 10644 5232 10644 5333 10644 5340 10645 5333 10645 5231 10645 5231 10646 5333 10646 5260 10646 5229 10647 5226 10647 5467 10647 5227 10648 5229 10648 5231 10648 5231 10649 5229 10649 5230 10649 5272 10650 5152 10650 5334 10650 5329 10651 5335 10651 5152 10651 5222 10652 5335 10652 5328 10652 5467 10653 5336 10653 5215 10653 5215 10654 5336 10654 5224 10654 5213 10655 5337 10655 5338 10655 5467 10656 5212 10656 5236 10656 5236 10657 5212 10657 5339 10657 5468 10658 5340 10658 5230 10658 5467 10659 5468 10659 5230 10659 5773 10660 3563 10660 5767 10660 5767 10661 3563 10661 5341 10661 4322 10662 4255 10662 5523 10662 5523 10663 4255 10663 5522 10663 5522 10664 4255 10664 5534 10664 5534 10665 4255 10665 5521 10665 5521 10666 4255 10666 5533 10666 5533 10667 4255 10667 5532 10667 5532 10668 4255 10668 5530 10668 5530 10669 4255 10669 5520 10669 5520 10670 4255 10670 5518 10670 5518 10671 4255 10671 5507 10671 5507 10672 4255 10672 5506 10672 5506 10673 4255 10673 5517 10673 5517 10674 4255 10674 5342 10674 5342 10675 4255 10675 5515 10675 5515 10676 4255 10676 5514 10676 5514 10677 4255 10677 5343 10677 5344 10678 2925 10678 5345 10678 5345 10679 2925 10679 5173 10679 5173 10680 2925 10680 5143 10680 5143 10681 2925 10681 5346 10681 5346 10682 2925 10682 4713 10682 4665 10683 5346 10683 4713 10683 4665 10684 5145 10684 5346 10684 4665 10685 5347 10685 5145 10685 5145 10686 5347 10686 5352 10686 5352 10687 5347 10687 5348 10687 5353 10688 5348 10688 5354 10688 5181 10689 5354 10689 4825 10689 5184 10690 4825 10690 5467 10690 5184 10691 5181 10691 4825 10691 2957 10692 5351 10692 2925 10692 2925 10693 5351 10693 3760 10693 5349 10694 2925 10694 3760 10694 5349 10695 4714 10695 2925 10695 2925 10696 4714 10696 4713 10696 5612 10697 5759 10697 5351 10697 5351 10698 5759 10698 5758 10698 3760 10699 5758 10699 5350 10699 3760 10700 5351 10700 5758 10700 5352 10701 5348 10701 5353 10701 5353 10702 5354 10702 5181 10702 5008 10703 5357 10703 5494 10703 5494 10704 5357 10704 5355 10704 5355 10705 5357 10705 5493 10705 5493 10706 5357 10706 5356 10706 5356 10707 5357 10707 5477 10707 5477 10708 5357 10708 5475 10708 5475 10709 5357 10709 5478 10709 5478 10710 5357 10710 5358 10710 5358 10711 5357 10711 5473 10711 5473 10712 5357 10712 5485 10712 5485 10713 5357 10713 5359 10713 5359 10714 5357 10714 5360 10714 5360 10715 5357 10715 5484 10715 5484 10716 5357 10716 5483 10716 5483 10717 5357 10717 5361 10717 5361 10718 5357 10718 5481 10718 5431 10719 5363 10719 5411 10719 5431 10720 5362 10720 5363 10720 5431 10721 5432 10721 5362 10721 5362 10722 5432 10722 1292 10722 1292 10723 5432 10723 5433 10723 1293 10724 5433 10724 5364 10724 5391 10725 5364 10725 5434 10725 5365 10726 5434 10726 5366 10726 1296 10727 5366 10727 5426 10727 1297 10728 5426 10728 5367 10728 1298 10729 5367 10729 5427 10729 1300 10730 5427 10730 5436 10730 1301 10731 5436 10731 5428 10731 1304 10732 5428 10732 5368 10732 1305 10733 5368 10733 5392 10733 5393 10734 5392 10734 5430 10734 1307 10735 5430 10735 5429 10735 1309 10736 5429 10736 5394 10736 1311 10737 5394 10737 5369 10737 1310 10738 5369 10738 5435 10738 5395 10739 5435 10739 5370 10739 1315 10740 5370 10740 5425 10740 1312 10741 5425 10741 5424 10741 5371 10742 5424 10742 5372 10742 5396 10743 5372 10743 5397 10743 1318 10744 5397 10744 5398 10744 1319 10745 5398 10745 5374 10745 5373 10746 5374 10746 5423 10746 5399 10747 5423 10747 5375 10747 5400 10748 5375 10748 5376 10748 5401 10749 5376 10749 5412 10749 1324 10750 5412 10750 5402 10750 1325 10751 5402 10751 5420 10751 1326 10752 5420 10752 5377 10752 1328 10753 5377 10753 5422 10753 1331 10754 5422 10754 5403 10754 5378 10755 5403 10755 5379 10755 5404 10756 5379 10756 5380 10756 5405 10757 5380 10757 5381 10757 5406 10758 5381 10758 5407 10758 5382 10759 5407 10759 5418 10759 5383 10760 5418 10760 5419 10760 5384 10761 5419 10761 5408 10761 5385 10762 5408 10762 5409 10762 1335 10763 5409 10763 5387 10763 5386 10764 5387 10764 5417 10764 1342 10765 5417 10765 5410 10765 1340 10766 5410 10766 5416 10766 1343 10767 5416 10767 5421 10767 1345 10768 5421 10768 5415 10768 5388 10769 5415 10769 5414 10769 1349 10770 5414 10770 5413 10770 1351 10771 5413 10771 5389 10771 5390 10772 5389 10772 5411 10772 5363 10773 5390 10773 5411 10773 1292 10774 5433 10774 1293 10774 1293 10775 5364 10775 5391 10775 5391 10776 5434 10776 5365 10776 5365 10777 5366 10777 1296 10777 1296 10778 5426 10778 1297 10778 1297 10779 5367 10779 1298 10779 1298 10780 5427 10780 1300 10780 1300 10781 5436 10781 1301 10781 1301 10782 5428 10782 1304 10782 1304 10783 5368 10783 1305 10783 1305 10784 5392 10784 5393 10784 5393 10785 5430 10785 1307 10785 1307 10786 5429 10786 1309 10786 1309 10787 5394 10787 1311 10787 1311 10788 5369 10788 1310 10788 1310 10789 5435 10789 5395 10789 5395 10790 5370 10790 1315 10790 1315 10791 5425 10791 1312 10791 1312 10792 5424 10792 5371 10792 5371 10793 5372 10793 5396 10793 5396 10794 5397 10794 1318 10794 1318 10795 5398 10795 1319 10795 1319 10796 5374 10796 5373 10796 5373 10797 5423 10797 5399 10797 5399 10798 5375 10798 5400 10798 5400 10799 5376 10799 5401 10799 5401 10800 5412 10800 1324 10800 1324 10801 5402 10801 1325 10801 1325 10802 5420 10802 1326 10802 1326 10803 5377 10803 1328 10803 1328 10804 5422 10804 1331 10804 1331 10805 5403 10805 5378 10805 5378 10806 5379 10806 5404 10806 5404 10807 5380 10807 5405 10807 5405 10808 5381 10808 5406 10808 5406 10809 5407 10809 5382 10809 5382 10810 5418 10810 5383 10810 5383 10811 5419 10811 5384 10811 5384 10812 5408 10812 5385 10812 5385 10813 5409 10813 1335 10813 1335 10814 5387 10814 5386 10814 5386 10815 5417 10815 1342 10815 1342 10816 5410 10816 1340 10816 1340 10817 5416 10817 1343 10817 1343 10818 5421 10818 1345 10818 1345 10819 5415 10819 5388 10819 5388 10820 5414 10820 1349 10820 1349 10821 5413 10821 1351 10821 1351 10822 5389 10822 5390 10822 5389 10823 5375 10823 5411 10823 5389 10824 5376 10824 5375 10824 5389 10825 5413 10825 5376 10825 5376 10826 5413 10826 5412 10826 5412 10827 5413 10827 5414 10827 5402 10828 5414 10828 5415 10828 5420 10829 5415 10829 5421 10829 5377 10830 5421 10830 5416 10830 5422 10831 5416 10831 5410 10831 5403 10832 5410 10832 5417 10832 5379 10833 5417 10833 5387 10833 5380 10834 5387 10834 5409 10834 5381 10835 5409 10835 5408 10835 5407 10836 5408 10836 5419 10836 5418 10837 5407 10837 5419 10837 5412 10838 5414 10838 5402 10838 5402 10839 5415 10839 5420 10839 5420 10840 5421 10840 5377 10840 5377 10841 5416 10841 5422 10841 5422 10842 5410 10842 5403 10842 5403 10843 5417 10843 5379 10843 5379 10844 5387 10844 5380 10844 5380 10845 5409 10845 5381 10845 5381 10846 5408 10846 5407 10846 5375 10847 5423 10847 5411 10847 5411 10848 5423 10848 5431 10848 5431 10849 5423 10849 5374 10849 5432 10850 5374 10850 5398 10850 5433 10851 5398 10851 5397 10851 5364 10852 5397 10852 5372 10852 5434 10853 5372 10853 5424 10853 5366 10854 5424 10854 5425 10854 5426 10855 5425 10855 5370 10855 5367 10856 5370 10856 5435 10856 5427 10857 5435 10857 5369 10857 5436 10858 5369 10858 5394 10858 5428 10859 5394 10859 5429 10859 5368 10860 5429 10860 5430 10860 5392 10861 5368 10861 5430 10861 5431 10862 5374 10862 5432 10862 5432 10863 5398 10863 5433 10863 5433 10864 5397 10864 5364 10864 5364 10865 5372 10865 5434 10865 5434 10866 5424 10866 5366 10866 5366 10867 5425 10867 5426 10867 5426 10868 5370 10868 5367 10868 5367 10869 5435 10869 5427 10869 5427 10870 5369 10870 5436 10870 5436 10871 5394 10871 5428 10871 5428 10872 5429 10872 5368 10872 1222 10873 5437 10873 5661 10873 5661 10874 5437 10874 2045 10874 5670 10875 2045 10875 5454 10875 5438 10876 5454 10876 5439 10876 5455 10877 5439 10877 5456 10877 5658 10878 5456 10878 5440 10878 5657 10879 5440 10879 5442 10879 5441 10880 5442 10880 5457 10880 5458 10881 5457 10881 1359 10881 5459 10882 1359 10882 5443 10882 5671 10883 5443 10883 5444 10883 5672 10884 5444 10884 5446 10884 5445 10885 5446 10885 5448 10885 5447 10886 5448 10886 1394 10886 5674 10887 1394 10887 1401 10887 5460 10888 1401 10888 1402 10888 5461 10889 1402 10889 1410 10889 5462 10890 1410 10890 5463 10890 5464 10891 5463 10891 5449 10891 5465 10892 5449 10892 5466 10892 4703 10893 5466 10893 1431 10893 4704 10894 1431 10894 1439 10894 4705 10895 1439 10895 5450 10895 4694 10896 5450 10896 1451 10896 4696 10897 1451 10897 5451 10897 5467 10898 5451 10898 1466 10898 5468 10899 1466 10899 5453 10899 5185 10900 5453 10900 5452 10900 5185 10901 5468 10901 5453 10901 5661 10902 2045 10902 5670 10902 5670 10903 5454 10903 5438 10903 5438 10904 5439 10904 5455 10904 5455 10905 5456 10905 5658 10905 5658 10906 5440 10906 5657 10906 5657 10907 5442 10907 5441 10907 5441 10908 5457 10908 5458 10908 5458 10909 1359 10909 5459 10909 5459 10910 5443 10910 5671 10910 5671 10911 5444 10911 5672 10911 5672 10912 5446 10912 5445 10912 5445 10913 5448 10913 5447 10913 5447 10914 1394 10914 5674 10914 5674 10915 1401 10915 5460 10915 5460 10916 1402 10916 5461 10916 5461 10917 1410 10917 5462 10917 5462 10918 5463 10918 5464 10918 5464 10919 5449 10919 5465 10919 5465 10920 5466 10920 4703 10920 4703 10921 1431 10921 4704 10921 4704 10922 1439 10922 4705 10922 4705 10923 5450 10923 4694 10923 4694 10924 1451 10924 4696 10924 4696 10925 5451 10925 5467 10925 5467 10926 1466 10926 5468 10926 5453 10927 1478 10927 5452 10927 5452 10928 1478 10928 5193 10928 5193 10929 1478 10929 5479 10929 5480 10930 5479 10930 1489 10930 5204 10931 1489 10931 5469 10931 5238 10932 5469 10932 5470 10932 5240 10933 5470 10933 1503 10933 5481 10934 1503 10934 5482 10934 5361 10935 5482 10935 1516 10935 5483 10936 1516 10936 1515 10936 5484 10937 1515 10937 1526 10937 5360 10938 1526 10938 5471 10938 5359 10939 5471 10939 5472 10939 5485 10940 5472 10940 5486 10940 5473 10941 5486 10941 5474 10941 5358 10942 5474 10942 5487 10942 5478 10943 5487 10943 5476 10943 5475 10944 5476 10944 5477 10944 5475 10945 5478 10945 5476 10945 5193 10946 5479 10946 5480 10946 5480 10947 1489 10947 5204 10947 5204 10948 5469 10948 5238 10948 5238 10949 5470 10949 5240 10949 5240 10950 1503 10950 5481 10950 5481 10951 5482 10951 5361 10951 5361 10952 1516 10952 5483 10952 5483 10953 1515 10953 5484 10953 5484 10954 1526 10954 5360 10954 5360 10955 5471 10955 5359 10955 5359 10956 5472 10956 5485 10956 5485 10957 5486 10957 5473 10957 5473 10958 5474 10958 5358 10958 5358 10959 5487 10959 5478 10959 5476 10960 1559 10960 5477 10960 5477 10961 1559 10961 5356 10961 5356 10962 1559 10962 5488 10962 5493 10963 5488 10963 1577 10963 5355 10964 1577 10964 1585 10964 5494 10965 1585 10965 1590 10965 5008 10966 1590 10966 5490 10966 5489 10967 5490 10967 1599 10967 5009 10968 1599 10968 5491 10968 5495 10969 5491 10969 1607 10969 5496 10970 1607 10970 5497 10970 5498 10971 5497 10971 5499 10971 5011 10972 5499 10972 5500 10972 5014 10973 5500 10973 5501 10973 5502 10974 5501 10974 1637 10974 5492 10975 1637 10975 4662 10975 5492 10976 5502 10976 1637 10976 5356 10977 5488 10977 5493 10977 5493 10978 1577 10978 5355 10978 5355 10979 1585 10979 5494 10979 5494 10980 1590 10980 5008 10980 5008 10981 5490 10981 5489 10981 5489 10982 1599 10982 5009 10982 5009 10983 5491 10983 5495 10983 5495 10984 1607 10984 5496 10984 5496 10985 5497 10985 5498 10985 5498 10986 5499 10986 5011 10986 5011 10987 5500 10987 5014 10987 5014 10988 5501 10988 5502 10988 1637 10989 5504 10989 4662 10989 4662 10990 5504 10990 5503 10990 5503 10991 5504 10991 5509 10991 4512 10992 5509 10992 5505 10992 4612 10993 5505 10993 1665 10993 5510 10994 1665 10994 1673 10994 4527 10995 1673 10995 1677 10995 5511 10996 1677 10996 1679 10996 4450 10997 1679 10997 5512 10997 5343 10998 5512 10998 5513 10998 5514 10999 5513 10999 2006 10999 5515 11000 2006 11000 5516 11000 5342 11001 5516 11001 1699 11001 5517 11002 1699 11002 1706 11002 5506 11003 1706 11003 5508 11003 5507 11004 5508 11004 5518 11004 5507 11005 5506 11005 5508 11005 5503 11006 5509 11006 4512 11006 4512 11007 5505 11007 4612 11007 4612 11008 1665 11008 5510 11008 5510 11009 1673 11009 4527 11009 4527 11010 1677 11010 5511 11010 5511 11011 1679 11011 4450 11011 4450 11012 5512 11012 5343 11012 5343 11013 5513 11013 5514 11013 5514 11014 2006 11014 5515 11014 5515 11015 5516 11015 5342 11015 5342 11016 1699 11016 5517 11016 5517 11017 1706 11017 5506 11017 5508 11018 5519 11018 5518 11018 5518 11019 5519 11019 5520 11019 5520 11020 5519 11020 5529 11020 5530 11021 5529 11021 5531 11021 5532 11022 5531 11022 1735 11022 5533 11023 1735 11023 1742 11023 5521 11024 1742 11024 1741 11024 5534 11025 1741 11025 5535 11025 5522 11026 5535 11026 5524 11026 5523 11027 5524 11027 5525 11027 4322 11028 5525 11028 1766 11028 4323 11029 1766 11029 1765 11029 5526 11030 1765 11030 1781 11030 4324 11031 1781 11031 1788 11031 5527 11032 1788 11032 1787 11032 4325 11033 1787 11033 1792 11033 4327 11034 1792 11034 5528 11034 4314 11035 5528 11035 5536 11035 4314 11036 4327 11036 5528 11036 5520 11037 5529 11037 5530 11037 5530 11038 5531 11038 5532 11038 5532 11039 1735 11039 5533 11039 5533 11040 1742 11040 5521 11040 5521 11041 1741 11041 5534 11041 5534 11042 5535 11042 5522 11042 5522 11043 5524 11043 5523 11043 5523 11044 5525 11044 4322 11044 4322 11045 1766 11045 4323 11045 4323 11046 1765 11046 5526 11046 5526 11047 1781 11047 4324 11047 4324 11048 1788 11048 5527 11048 5527 11049 1787 11049 4325 11049 4325 11050 1792 11050 4327 11050 5528 11051 5537 11051 5536 11051 5536 11052 5537 11052 4315 11052 4315 11053 5537 11053 1812 11053 4015 11054 1812 11054 5539 11054 5538 11055 5539 11055 2000 11055 3856 11056 2000 11056 5548 11056 3972 11057 5548 11057 1835 11057 5540 11058 1835 11058 1834 11058 3874 11059 1834 11059 5549 11059 5550 11060 5549 11060 5551 11060 5552 11061 5551 11061 1857 11061 5636 11062 1857 11062 1868 11062 5637 11063 1868 11063 5542 11063 5541 11064 5542 11064 1881 11064 5638 11065 1881 11065 1890 11065 5639 11066 1890 11066 5553 11066 5640 11067 5553 11067 1906 11067 5641 11068 1906 11068 5554 11068 5643 11069 5554 11069 1911 11069 5644 11070 1911 11070 5543 11070 5645 11071 5543 11071 1931 11071 5544 11072 1931 11072 5545 11072 5664 11073 5545 11073 5546 11073 5667 11074 5546 11074 1950 11074 5547 11075 1950 11075 1951 11075 5668 11076 1951 11076 5555 11076 5556 11077 5555 11077 1964 11077 5669 11078 5556 11078 1964 11078 4315 11079 1812 11079 4015 11079 4015 11080 5539 11080 5538 11080 5538 11081 2000 11081 3856 11081 3856 11082 5548 11082 3972 11082 3972 11083 1835 11083 5540 11083 5540 11084 1834 11084 3874 11084 3874 11085 5549 11085 5550 11085 5550 11086 5551 11086 5552 11086 5552 11087 1857 11087 5636 11087 5636 11088 1868 11088 5637 11088 5637 11089 5542 11089 5541 11089 5541 11090 1881 11090 5638 11090 5638 11091 1890 11091 5639 11091 5639 11092 5553 11092 5640 11092 5640 11093 1906 11093 5641 11093 5641 11094 5554 11094 5643 11094 5643 11095 1911 11095 5644 11095 5644 11096 5543 11096 5645 11096 5645 11097 1931 11097 5544 11097 5544 11098 5545 11098 5664 11098 5664 11099 5546 11099 5667 11099 5667 11100 1950 11100 5547 11100 5547 11101 1951 11101 5668 11101 5668 11102 5555 11102 5556 11102 5760 11103 5558 11103 5557 11103 5557 11104 5558 11104 5585 11104 5585 11105 5558 11105 5742 11105 5566 11106 5742 11106 5567 11106 5599 11107 5567 11107 5559 11107 5560 11108 5559 11108 5561 11108 5568 11109 5561 11109 5569 11109 5570 11110 5569 11110 5562 11110 5601 11111 5562 11111 5741 11111 5602 11112 5741 11112 5571 11112 5563 11113 5571 11113 5740 11113 5572 11114 5740 11114 5564 11114 5603 11115 5564 11115 5739 11115 5604 11116 5739 11116 5565 11116 5600 11117 5604 11117 5565 11117 5585 11118 5742 11118 5566 11118 5566 11119 5567 11119 5599 11119 5599 11120 5559 11120 5560 11120 5560 11121 5561 11121 5568 11121 5568 11122 5569 11122 5570 11122 5570 11123 5562 11123 5601 11123 5601 11124 5741 11124 5602 11124 5602 11125 5571 11125 5563 11125 5563 11126 5740 11126 5572 11126 5572 11127 5564 11127 5603 11127 5603 11128 5739 11128 5604 11128 5675 11129 3153 11129 5707 11129 5675 11130 5676 11130 3153 11130 3153 11131 5676 11131 5679 11131 5708 11132 3153 11132 5679 11132 5708 11133 5680 11133 3153 11133 3153 11134 5680 11134 5573 11134 5574 11135 3153 11135 5573 11135 5574 11136 5575 11136 3153 11136 3153 11137 5575 11137 5612 11137 3095 11138 5612 11138 3097 11138 3095 11139 3153 11139 5612 11139 5575 11140 5711 11140 5612 11140 5612 11141 5711 11141 5576 11141 5713 11142 5612 11142 5576 11142 5713 11143 5577 11143 5612 11143 5612 11144 5577 11144 5684 11144 5716 11145 5612 11145 5684 11145 5716 11146 5578 11146 5612 11146 5612 11147 5578 11147 5579 11147 5717 11148 5612 11148 5579 11148 5717 11149 5580 11149 5612 11149 5612 11150 5580 11150 5632 11150 5632 11151 5580 11151 5622 11151 5622 11152 5580 11152 5616 11152 5630 11153 5616 11153 5581 11153 5630 11154 5622 11154 5616 11154 5580 11155 5720 11155 5616 11155 5616 11156 5720 11156 5686 11156 5687 11157 5616 11157 5686 11157 5687 11158 5722 11158 5616 11158 5616 11159 5722 11159 5582 11159 5723 11160 5616 11160 5582 11160 5723 11161 5724 11161 5616 11161 5616 11162 5724 11162 5596 11162 5600 11163 5596 11163 5583 11163 5689 11164 5600 11164 5583 11164 5689 11165 5691 11165 5600 11165 5600 11166 5691 11166 5692 11166 5725 11167 5600 11167 5692 11167 5725 11168 5726 11168 5600 11168 5600 11169 5726 11169 5727 11169 5584 11170 5600 11170 5727 11170 5584 11171 5566 11171 5600 11171 5584 11172 5585 11172 5566 11172 5584 11173 5557 11173 5585 11173 5584 11174 5586 11174 5557 11174 5557 11175 5586 11175 5587 11175 5730 11176 5557 11176 5587 11176 5730 11177 5732 11177 5557 11177 5557 11178 5732 11178 5696 11178 5588 11179 5557 11179 5696 11179 5588 11180 5698 11180 5557 11180 5557 11181 5698 11181 5589 11181 5699 11182 5557 11182 5589 11182 5699 11183 5590 11183 5557 11183 5557 11184 5590 11184 2745 11184 5591 11185 5557 11185 2745 11185 5591 11186 5592 11186 5557 11186 5557 11187 5592 11187 5593 11187 2747 11188 5557 11188 5593 11188 2747 11189 2735 11189 5557 11189 5557 11190 2735 11190 2733 11190 5594 11191 5557 11191 2733 11191 5594 11192 2706 11192 5557 11192 5557 11193 2706 11193 5595 11193 3326 11194 5557 11194 5595 11194 5616 11195 5596 11195 5600 11195 5590 11196 5735 11196 2745 11196 2745 11197 5735 11197 5700 11197 5701 11198 2745 11198 5700 11198 5701 11199 5703 11199 2745 11199 2745 11200 5703 11200 5736 11200 5597 11201 2745 11201 5736 11201 5597 11202 5598 11202 2745 11202 2745 11203 5598 11203 5707 11203 3153 11204 2745 11204 5707 11204 5566 11205 5599 11205 5600 11205 5600 11206 5599 11206 5560 11206 5568 11207 5600 11207 5560 11207 5568 11208 5570 11208 5600 11208 5600 11209 5570 11209 5601 11209 5602 11210 5600 11210 5601 11210 5602 11211 5563 11211 5600 11211 5600 11212 5563 11212 5572 11212 5603 11213 5600 11213 5572 11213 5603 11214 5604 11214 5600 11214 5617 11215 5605 11215 5616 11215 5616 11216 5605 11216 5606 11216 5607 11217 5616 11217 5606 11217 5607 11218 5608 11218 5616 11218 5616 11219 5608 11219 5627 11219 5609 11220 5616 11220 5627 11220 5609 11221 5610 11221 5616 11221 5616 11222 5610 11222 5581 11222 5351 11223 3138 11223 5612 11223 5612 11224 3138 11224 5611 11224 5613 11225 5612 11225 5611 11225 5613 11226 3131 11226 5612 11226 5612 11227 3131 11227 3123 11227 3117 11228 5612 11228 3123 11228 3117 11229 5614 11229 5612 11229 5612 11230 5614 11230 3097 11230 5600 11231 5565 11231 5616 11231 5616 11232 5565 11232 5772 11232 5772 11233 5615 11233 5616 11233 5616 11234 5615 11234 5617 11234 5617 11235 5615 11235 5624 11235 5605 11236 5624 11236 5625 11236 5606 11237 5625 11237 5626 11237 5607 11238 5626 11238 5618 11238 5608 11239 5618 11239 5619 11239 5627 11240 5619 11240 5628 11240 5609 11241 5628 11241 5629 11241 5610 11242 5629 11242 5620 11242 5581 11243 5620 11243 5621 11243 5630 11244 5621 11244 5631 11244 5622 11245 5631 11245 5623 11245 5632 11246 5623 11246 5759 11246 5612 11247 5632 11247 5759 11247 5617 11248 5624 11248 5605 11248 5605 11249 5625 11249 5606 11249 5606 11250 5626 11250 5607 11250 5607 11251 5618 11251 5608 11251 5608 11252 5619 11252 5627 11252 5627 11253 5628 11253 5609 11253 5609 11254 5629 11254 5610 11254 5610 11255 5620 11255 5581 11255 5581 11256 5621 11256 5630 11256 5630 11257 5631 11257 5622 11257 5622 11258 5623 11258 5632 11258 5633 11259 3333 11259 5634 11259 5634 11260 3333 11260 3342 11260 5633 11261 5634 11261 5635 11261 5635 11262 5634 11262 3338 11262 3334 11263 5635 11263 3337 11263 3337 11264 5635 11264 3338 11264 3333 11265 5633 11265 3334 11265 3334 11266 5633 11266 5635 11266 5636 11267 5637 11267 5642 11267 5642 11268 5637 11268 5541 11268 5638 11269 5642 11269 5541 11269 5638 11270 5639 11270 5642 11270 5642 11271 5639 11271 5640 11271 5641 11272 5642 11272 5640 11272 5641 11273 5643 11273 5642 11273 5642 11274 5643 11274 5644 11274 5645 11275 5642 11275 5644 11275 5645 11276 5544 11276 5642 11276 5642 11277 5544 11277 1280 11277 1279 11278 5642 11278 1280 11278 1279 11279 1239 11279 5642 11279 5642 11280 1239 11280 1238 11280 1277 11281 5642 11281 1238 11281 1277 11282 1236 11282 5642 11282 5642 11283 1236 11283 5646 11283 1235 11284 5642 11284 5646 11284 1235 11285 5647 11285 5642 11285 5642 11286 5647 11286 1234 11286 5648 11287 5642 11287 1234 11287 5648 11288 1233 11288 5642 11288 5642 11289 1233 11289 1232 11289 5649 11290 5642 11290 1232 11290 5649 11291 5650 11291 5642 11291 5642 11292 5650 11292 5673 11292 5673 11293 5650 11293 5651 11293 5652 11294 5673 11294 5651 11294 5652 11295 5653 11295 5673 11295 5673 11296 5653 11296 1272 11296 1270 11297 5673 11297 1272 11297 1270 11298 1269 11298 5673 11298 5673 11299 1269 11299 1267 11299 5654 11300 5673 11300 1267 11300 5654 11301 1266 11301 5673 11301 5673 11302 1266 11302 1227 11302 1226 11303 5673 11303 1227 11303 1226 11304 5655 11304 5673 11304 5673 11305 5655 11305 1264 11305 1225 11306 5673 11306 1264 11306 1225 11307 5657 11307 5673 11307 1225 11308 5656 11308 5657 11308 5657 11309 5656 11309 5658 11309 5658 11310 5656 11310 5659 11310 5455 11311 5659 11311 1224 11311 5438 11312 1224 11312 1262 11312 5670 11313 1262 11313 5660 11313 1260 11314 5670 11314 5660 11314 1260 11315 5661 11315 5670 11315 1260 11316 5662 11316 5661 11316 5661 11317 5662 11317 1222 11317 1280 11318 5544 11318 5666 11318 5666 11319 5544 11319 5664 11319 5663 11320 5664 11320 5667 11320 5665 11321 5667 11321 5547 11321 1281 11322 5547 11322 5668 11322 1242 11323 5668 11323 1243 11323 1242 11324 1281 11324 5668 11324 5666 11325 5664 11325 5663 11325 5663 11326 5667 11326 5665 11326 5665 11327 5547 11327 1281 11327 5668 11328 5556 11328 1243 11328 1243 11329 5556 11329 1244 11329 1244 11330 5556 11330 5669 11330 5658 11331 5659 11331 5455 11331 5455 11332 1224 11332 5438 11332 5438 11333 1262 11333 5670 11333 5657 11334 5441 11334 5673 11334 5673 11335 5441 11335 5458 11335 5459 11336 5673 11336 5458 11336 5459 11337 5671 11337 5673 11337 5673 11338 5671 11338 5672 11338 5445 11339 5673 11339 5672 11339 5445 11340 5447 11340 5673 11340 5673 11341 5447 11341 5674 11341 5460 11342 5673 11342 5674 11342 5460 11343 5461 11343 5673 11343 5677 11344 5675 11344 5768 11344 5677 11345 5676 11345 5675 11345 5677 11346 5678 11346 5676 11346 5676 11347 5678 11347 5679 11347 5679 11348 5678 11348 5769 11348 5708 11349 5769 11349 5770 11349 5680 11350 5770 11350 5771 11350 5573 11351 5771 11351 5681 11351 5574 11352 5681 11352 5709 11352 5575 11353 5709 11353 5710 11353 5711 11354 5710 11354 5682 11354 5576 11355 5682 11355 5712 11355 5713 11356 5712 11356 5714 11356 5577 11357 5714 11357 5683 11357 5684 11358 5683 11358 5715 11358 5716 11359 5715 11359 5685 11359 5578 11360 5685 11360 5754 11360 5579 11361 5754 11361 5753 11361 5717 11362 5753 11362 5718 11362 5580 11363 5718 11363 5719 11363 5720 11364 5719 11364 5721 11364 5686 11365 5721 11365 5752 11365 5687 11366 5752 11366 5751 11366 5722 11367 5751 11367 5750 11367 5582 11368 5750 11368 5749 11368 5723 11369 5749 11369 5688 11369 5724 11370 5688 11370 5748 11370 5596 11371 5748 11371 5747 11371 5583 11372 5747 11372 5746 11372 5689 11373 5746 11373 5690 11373 5691 11374 5690 11374 5693 11374 5692 11375 5693 11375 5745 11375 5725 11376 5745 11376 5744 11376 5726 11377 5744 11377 5743 11377 5727 11378 5743 11378 5728 11378 5584 11379 5728 11379 5694 11379 5586 11380 5694 11380 5695 11380 5587 11381 5695 11381 5729 11381 5730 11382 5729 11382 5731 11382 5732 11383 5731 11383 5697 11383 5696 11384 5697 11384 5733 11384 5588 11385 5733 11385 5766 11385 5698 11386 5766 11386 5765 11386 5589 11387 5765 11387 5734 11387 5699 11388 5734 11388 5764 11388 5590 11389 5764 11389 5775 11389 5735 11390 5775 11390 5774 11390 5700 11391 5774 11391 5702 11391 5701 11392 5702 11392 5704 11392 5703 11393 5704 11393 5705 11393 5736 11394 5705 11394 5737 11394 5597 11395 5737 11395 5738 11395 5598 11396 5738 11396 5706 11396 5707 11397 5706 11397 5768 11397 5675 11398 5707 11398 5768 11398 5679 11399 5769 11399 5708 11399 5708 11400 5770 11400 5680 11400 5680 11401 5771 11401 5573 11401 5573 11402 5681 11402 5574 11402 5574 11403 5709 11403 5575 11403 5575 11404 5710 11404 5711 11404 5711 11405 5682 11405 5576 11405 5576 11406 5712 11406 5713 11406 5713 11407 5714 11407 5577 11407 5577 11408 5683 11408 5684 11408 5684 11409 5715 11409 5716 11409 5716 11410 5685 11410 5578 11410 5578 11411 5754 11411 5579 11411 5579 11412 5753 11412 5717 11412 5717 11413 5718 11413 5580 11413 5580 11414 5719 11414 5720 11414 5720 11415 5721 11415 5686 11415 5686 11416 5752 11416 5687 11416 5687 11417 5751 11417 5722 11417 5722 11418 5750 11418 5582 11418 5582 11419 5749 11419 5723 11419 5723 11420 5688 11420 5724 11420 5724 11421 5748 11421 5596 11421 5596 11422 5747 11422 5583 11422 5583 11423 5746 11423 5689 11423 5689 11424 5690 11424 5691 11424 5691 11425 5693 11425 5692 11425 5692 11426 5745 11426 5725 11426 5725 11427 5744 11427 5726 11427 5726 11428 5743 11428 5727 11428 5727 11429 5728 11429 5584 11429 5584 11430 5694 11430 5586 11430 5586 11431 5695 11431 5587 11431 5587 11432 5729 11432 5730 11432 5730 11433 5731 11433 5732 11433 5732 11434 5697 11434 5696 11434 5696 11435 5733 11435 5588 11435 5588 11436 5766 11436 5698 11436 5698 11437 5765 11437 5589 11437 5589 11438 5734 11438 5699 11438 5699 11439 5764 11439 5590 11439 5590 11440 5775 11440 5735 11440 5735 11441 5774 11441 5700 11441 5700 11442 5702 11442 5701 11442 5701 11443 5704 11443 5703 11443 5703 11444 5705 11444 5736 11444 5736 11445 5737 11445 5597 11445 5597 11446 5738 11446 5598 11446 5598 11447 5706 11447 5707 11447 5739 11448 5564 11448 5565 11448 5565 11449 5564 11449 5740 11449 5571 11450 5565 11450 5740 11450 5571 11451 5741 11451 5565 11451 5565 11452 5741 11452 5562 11452 5569 11453 5565 11453 5562 11453 5569 11454 5561 11454 5565 11454 5565 11455 5561 11455 5559 11455 5567 11456 5565 11456 5559 11456 5567 11457 5742 11457 5565 11457 5565 11458 5742 11458 5694 11458 5728 11459 5565 11459 5694 11459 5728 11460 5743 11460 5565 11460 5565 11461 5743 11461 5744 11461 5745 11462 5565 11462 5744 11462 5745 11463 5693 11463 5565 11463 5565 11464 5693 11464 5690 11464 5746 11465 5565 11465 5690 11465 5746 11466 5747 11466 5565 11466 5565 11467 5747 11467 5772 11467 5772 11468 5747 11468 5748 11468 5688 11469 5772 11469 5748 11469 5688 11470 5749 11470 5772 11470 5772 11471 5749 11471 5750 11471 5751 11472 5772 11472 5750 11472 5751 11473 5752 11473 5772 11473 5772 11474 5752 11474 5721 11474 5719 11475 5772 11475 5721 11475 5719 11476 5631 11476 5772 11476 5719 11477 5623 11477 5631 11477 5719 11478 5759 11478 5623 11478 5719 11479 5718 11479 5759 11479 5759 11480 5718 11480 5753 11480 5754 11481 5759 11481 5753 11481 5754 11482 5685 11482 5759 11482 5759 11483 5685 11483 5715 11483 5683 11484 5759 11484 5715 11484 5683 11485 5714 11485 5759 11485 5759 11486 5714 11486 5712 11486 5682 11487 5759 11487 5712 11487 5682 11488 5710 11488 5759 11488 5759 11489 5710 11489 5767 11489 3453 11490 5759 11490 5767 11490 3453 11491 3456 11491 5759 11491 5759 11492 3456 11492 3451 11492 5755 11493 5759 11493 3451 11493 5755 11494 3450 11494 5759 11494 5759 11495 3450 11495 3449 11495 5756 11496 5759 11496 3449 11496 5756 11497 3447 11497 5759 11497 5759 11498 3447 11498 5757 11498 5758 11499 5759 11499 5757 11499 5742 11500 5558 11500 5694 11500 5694 11501 5558 11501 5760 11501 5695 11502 5760 11502 5729 11502 5695 11503 5694 11503 5760 11503 3579 11504 5761 11504 5760 11504 5760 11505 5761 11505 5762 11505 3581 11506 5760 11506 5762 11506 3581 11507 3576 11507 5760 11507 5760 11508 3576 11508 3575 11508 5763 11509 5760 11509 3575 11509 5763 11510 3572 11510 5760 11510 5760 11511 3572 11511 3571 11511 3570 11512 5760 11512 3571 11512 3570 11513 5773 11513 5760 11513 5760 11514 5773 11514 5775 11514 5764 11515 5760 11515 5775 11515 5764 11516 5734 11516 5760 11516 5760 11517 5734 11517 5765 11517 5766 11518 5760 11518 5765 11518 5766 11519 5733 11519 5760 11519 5760 11520 5733 11520 5697 11520 5731 11521 5760 11521 5697 11521 5731 11522 5729 11522 5760 11522 5767 11523 5768 11523 5773 11523 5767 11524 5677 11524 5768 11524 5767 11525 5678 11525 5677 11525 5767 11526 5769 11526 5678 11526 5767 11527 5770 11527 5769 11527 5767 11528 5771 11528 5770 11528 5767 11529 5681 11529 5771 11529 5767 11530 5709 11530 5681 11530 5767 11531 5710 11531 5709 11531 5631 11532 5621 11532 5772 11532 5772 11533 5621 11533 5620 11533 5629 11534 5772 11534 5620 11534 5629 11535 5628 11535 5772 11535 5772 11536 5628 11536 5619 11536 5618 11537 5772 11537 5619 11537 5618 11538 5626 11538 5772 11538 5772 11539 5626 11539 5625 11539 5624 11540 5772 11540 5625 11540 5624 11541 5615 11541 5772 11541 5768 11542 5706 11542 5773 11542 5773 11543 5706 11543 5738 11543 5737 11544 5773 11544 5738 11544 5737 11545 5705 11545 5773 11545 5773 11546 5705 11546 5704 11546 5702 11547 5773 11547 5704 11547 5702 11548 5774 11548 5773 11548 5773 11549 5774 11549 5775 11549 2746 11550 2745 11550 3157 11550 3157 11551 2745 11551 3153 11551 5776 11552 6249 11552 5783 11552 5783 11553 6249 11553 5781 11553 5777 11554 5781 11554 5780 11554 5777 11555 5783 11555 5781 11555 5777 11556 5780 11556 5796 11556 5796 11557 5780 11557 5799 11557 5783 11558 5777 11558 5782 11558 5782 11559 5777 11559 5778 11559 5778 11560 5777 11560 5796 11560 5779 11561 5782 11561 5801 11561 5801 11562 5782 11562 5778 11562 5799 11563 5780 11563 5801 11563 5801 11564 5780 11564 5779 11564 5779 11565 5780 11565 5781 11565 5782 11566 6208 11566 5783 11566 5783 11567 6208 11567 5776 11567 5782 11568 5779 11568 6208 11568 6208 11569 5779 11569 6211 11569 5779 11570 5781 11570 6211 11570 6211 11571 5781 11571 6249 11571 5789 11572 5852 11572 5786 11572 5786 11573 5852 11573 5846 11573 5874 11574 5784 11574 5785 11574 5785 11575 5784 11575 5790 11575 5846 11576 5874 11576 5786 11576 5786 11577 5874 11577 5785 11577 5789 11578 5786 11578 6233 11578 6233 11579 5786 11579 6277 11579 5786 11580 5785 11580 6277 11580 6277 11581 5785 11581 5787 11581 5785 11582 5790 11582 5787 11582 5787 11583 5790 11583 5788 11583 6233 11584 5788 11584 5789 11584 5789 11585 5788 11585 5790 11585 5852 11586 5790 11586 5784 11586 5852 11587 5789 11587 5790 11587 5795 11588 5794 11588 5793 11588 5826 11589 5795 11589 5793 11589 5791 11590 5955 11590 5792 11590 5792 11591 5955 11591 5793 11591 5794 11592 5792 11592 5793 11592 5792 11593 5794 11593 6752 11593 6752 11594 5794 11594 6732 11594 5795 11595 6750 11595 5794 11595 5794 11596 6750 11596 6732 11596 5799 11597 6750 11597 5796 11597 5799 11598 5852 11598 6750 11598 5799 11599 5797 11599 5852 11599 5799 11600 6037 11600 5797 11600 5799 11601 6067 11601 6037 11601 5799 11602 6035 11602 6067 11602 5799 11603 6065 11603 6035 11603 5799 11604 6064 11604 6065 11604 5799 11605 6062 11605 6064 11605 5799 11606 6034 11606 6062 11606 5799 11607 5798 11607 6034 11607 5799 11608 6060 11608 5798 11608 5799 11609 6059 11609 6060 11609 5799 11610 5800 11610 6059 11610 5799 11611 5801 11611 5800 11611 5800 11612 5801 11612 6058 11612 6058 11613 5801 11613 6057 11613 6057 11614 5801 11614 6056 11614 6056 11615 5801 11615 5802 11615 5802 11616 5801 11616 6055 11616 6055 11617 5801 11617 5803 11617 5803 11618 5801 11618 6053 11618 6053 11619 5801 11619 5804 11619 5804 11620 5801 11620 5805 11620 5805 11621 5801 11621 5806 11621 5806 11622 5801 11622 6026 11622 6026 11623 5801 11623 5807 11623 5807 11624 5801 11624 6024 11624 6024 11625 5801 11625 5891 11625 5808 11626 5891 11626 5896 11626 5808 11627 6024 11627 5891 11627 5891 11628 5801 11628 5811 11628 5811 11629 5801 11629 5778 11629 5809 11630 5778 11630 5967 11630 5809 11631 5811 11631 5778 11631 5809 11632 5968 11632 5811 11632 5811 11633 5968 11633 5989 11633 5990 11634 5811 11634 5989 11634 5990 11635 5992 11635 5811 11635 5811 11636 5992 11636 5810 11636 5993 11637 5811 11637 5810 11637 5993 11638 5969 11638 5811 11638 5811 11639 5969 11639 5995 11639 5970 11640 5811 11640 5995 11640 5970 11641 5812 11641 5811 11641 5811 11642 5812 11642 5972 11642 5997 11643 5811 11643 5972 11643 5997 11644 5813 11644 5811 11644 5811 11645 5813 11645 5998 11645 5975 11646 5811 11646 5998 11646 5975 11647 6000 11647 5811 11647 5811 11648 6000 11648 5814 11648 5820 11649 5814 11649 6001 11649 5978 11650 5820 11650 6001 11650 5978 11651 5979 11651 5820 11651 5820 11652 5979 11652 6003 11652 5815 11653 5820 11653 6003 11653 5815 11654 5816 11654 5820 11654 5820 11655 5816 11655 6007 11655 5817 11656 5820 11656 6007 11656 5817 11657 5818 11657 5820 11657 5820 11658 5818 11658 5980 11658 5819 11659 5820 11659 5980 11659 5819 11660 5821 11660 5820 11660 5820 11661 5821 11661 6008 11661 6010 11662 5820 11662 6008 11662 6010 11663 5822 11663 5820 11663 5820 11664 5822 11664 5826 11664 5826 11665 5822 11665 5823 11665 6011 11666 5826 11666 5823 11666 6011 11667 5824 11667 5826 11667 5826 11668 5824 11668 5825 11668 5984 11669 5826 11669 5825 11669 5984 11670 6012 11670 5826 11670 5826 11671 6012 11671 5827 11671 6013 11672 5826 11672 5827 11672 6013 11673 6014 11673 5826 11673 5826 11674 6014 11674 5986 11674 5959 11675 5826 11675 5986 11675 5959 11676 5778 11676 5826 11676 5959 11677 5961 11677 5778 11677 5778 11678 5961 11678 5962 11678 5964 11679 5778 11679 5962 11679 5964 11680 5828 11680 5778 11680 5778 11681 5828 11681 5829 11681 5966 11682 5778 11682 5829 11682 5966 11683 5988 11683 5778 11683 5778 11684 5988 11684 5967 11684 5778 11685 5796 11685 5826 11685 5826 11686 5796 11686 5795 11686 5795 11687 5796 11687 6750 11687 6750 11688 5852 11688 5830 11688 5830 11689 5852 11689 5784 11689 5831 11690 5784 11690 5833 11690 5831 11691 5830 11691 5784 11691 5784 11692 5874 11692 5833 11692 5833 11693 5874 11693 5832 11693 6171 11694 5833 11694 5832 11694 6171 11695 6140 11695 5833 11695 5833 11696 6140 11696 6141 11696 5834 11697 5833 11697 6141 11697 5834 11698 5835 11698 5833 11698 5833 11699 5835 11699 6144 11699 5836 11700 5833 11700 6144 11700 5836 11701 6172 11701 5833 11701 5833 11702 6172 11702 5838 11702 5837 11703 5833 11703 5838 11703 5837 11704 6174 11704 5833 11704 5833 11705 6174 11705 5842 11705 5842 11706 6174 11706 5839 11706 6175 11707 5842 11707 5839 11707 6175 11708 6148 11708 5842 11708 5842 11709 6148 11709 5840 11709 6177 11710 5842 11710 5840 11710 6177 11711 6178 11711 5842 11711 5842 11712 6178 11712 5841 11712 6179 11713 5842 11713 5841 11713 6179 11714 6150 11714 5842 11714 5842 11715 6150 11715 6180 11715 5843 11716 5842 11716 6180 11716 5843 11717 5844 11717 5842 11717 5842 11718 5844 11718 6181 11718 6153 11719 5842 11719 6181 11719 6153 11720 5845 11720 5842 11720 5842 11721 5845 11721 5870 11721 5811 11722 5842 11722 5870 11722 5811 11723 5820 11723 5842 11723 5811 11724 5814 11724 5820 11724 5846 11725 5870 11725 5874 11725 5846 11726 5858 11726 5870 11726 5846 11727 6105 11727 5858 11727 5846 11728 5847 11728 6105 11728 5846 11729 6127 11729 5847 11729 5846 11730 5848 11730 6127 11730 5846 11731 6102 11731 5848 11731 5846 11732 6125 11732 6102 11732 5846 11733 6100 11733 6125 11733 5846 11734 5849 11734 6100 11734 5846 11735 6097 11735 5849 11735 5846 11736 5850 11736 6097 11736 5846 11737 6124 11737 5850 11737 5846 11738 6095 11738 6124 11738 5846 11739 6093 11739 6095 11739 5846 11740 5851 11740 6093 11740 5846 11741 5852 11741 5851 11741 5851 11742 5852 11742 6092 11742 6092 11743 5852 11743 5853 11743 5853 11744 5852 11744 6120 11744 6120 11745 5852 11745 5854 11745 5854 11746 5852 11746 5855 11746 5855 11747 5852 11747 6119 11747 6119 11748 5852 11748 6088 11748 6088 11749 5852 11749 5856 11749 5856 11750 5852 11750 5857 11750 5857 11751 5852 11751 5797 11751 5858 11752 6040 11752 5891 11752 5858 11753 5880 11753 6040 11753 5858 11754 6078 11754 5880 11754 5858 11755 5859 11755 6078 11755 5858 11756 6076 11756 5859 11756 5858 11757 6138 11757 6076 11757 5858 11758 6114 11758 6138 11758 5858 11759 6112 11759 6114 11759 5858 11760 6137 11760 6112 11760 5858 11761 5861 11761 6137 11761 5858 11762 5860 11762 5861 11762 5858 11763 6136 11763 5860 11763 5858 11764 6134 11764 6136 11764 5858 11765 5862 11765 6134 11765 5858 11766 5863 11766 5862 11766 5858 11767 6109 11767 5863 11767 5858 11768 6130 11768 6109 11768 5858 11769 5864 11769 6130 11769 5858 11770 6129 11770 5864 11770 5858 11771 5865 11771 6129 11771 5858 11772 5866 11772 5865 11772 5858 11773 6105 11773 5866 11773 5845 11774 5867 11774 5870 11774 5870 11775 5867 11775 6155 11775 6184 11776 5870 11776 6155 11776 6184 11777 5868 11777 5870 11777 5870 11778 5868 11778 5869 11778 6187 11779 5870 11779 5869 11779 6187 11780 6188 11780 5870 11780 5870 11781 6188 11781 5871 11781 6189 11782 5870 11782 5871 11782 6189 11783 6159 11783 5870 11783 5870 11784 6159 11784 6190 11784 6191 11785 5870 11785 6190 11785 6191 11786 5872 11786 5870 11786 5870 11787 5872 11787 6162 11787 6193 11788 5870 11788 6162 11788 6193 11789 6194 11789 5870 11789 5870 11790 6194 11790 5873 11790 5874 11791 5873 11791 6196 11791 6164 11792 5874 11792 6196 11792 6164 11793 6165 11793 5874 11793 5874 11794 6165 11794 5875 11794 5876 11795 5874 11795 5875 11795 5876 11796 5877 11796 5874 11796 5874 11797 5877 11797 5878 11797 5879 11798 5874 11798 5878 11798 5879 11799 5832 11799 5874 11799 5870 11800 5873 11800 5874 11800 6040 11801 5880 11801 6071 11801 6071 11802 5880 11802 6116 11802 5887 11803 6116 11803 6117 11803 5881 11804 6117 11804 5888 11804 5882 11805 5888 11805 5883 11805 6038 11806 5883 11806 6118 11806 5884 11807 6118 11807 6083 11807 5889 11808 6083 11808 6085 11808 5885 11809 6085 11809 5886 11809 6037 11810 5886 11810 5797 11810 6037 11811 5885 11811 5886 11811 6071 11812 6116 11812 5887 11812 5887 11813 6117 11813 5881 11813 5881 11814 5888 11814 5882 11814 5882 11815 5883 11815 6038 11815 6038 11816 6118 11816 5884 11816 5884 11817 6083 11817 5889 11817 5889 11818 6085 11818 5885 11818 5890 11819 6075 11819 5891 11819 6041 11820 5891 11820 6073 11820 6041 11821 5890 11821 5891 11821 6075 11822 5892 11822 5891 11822 5891 11823 5892 11823 5894 11823 5893 11824 5891 11824 5894 11824 5893 11825 6017 11825 5891 11825 5891 11826 6017 11826 6042 11826 6043 11827 5891 11827 6042 11827 6043 11828 6044 11828 5891 11828 5891 11829 6044 11829 6045 11829 6047 11830 5891 11830 6045 11830 6047 11831 6021 11831 5891 11831 5891 11832 6021 11832 6048 11832 6049 11833 5891 11833 6048 11833 6049 11834 5895 11834 5891 11834 5891 11835 5895 11835 5896 11835 6040 11836 6073 11836 5891 11836 5898 11837 5899 11837 5897 11837 5898 11838 5958 11838 5899 11838 5899 11839 5900 11839 5897 11839 5897 11840 5900 11840 5831 11840 5833 11841 5897 11841 5831 11841 5900 11842 6744 11842 5831 11842 5831 11843 6744 11843 5830 11843 5899 11844 6729 11844 5900 11844 5900 11845 6729 11845 6744 11845 5910 11846 5951 11846 5944 11846 5944 11847 5951 11847 5949 11847 6541 11848 6540 11848 5944 11848 5901 11849 5944 11849 5946 11849 5901 11850 6541 11850 5944 11850 6540 11851 5902 11851 5944 11851 5944 11852 5902 11852 5903 11852 6460 11853 5944 11853 5903 11853 6460 11854 5910 11854 5944 11854 6460 11855 5904 11855 5910 11855 5910 11856 5904 11856 5905 11856 5906 11857 5910 11857 5905 11857 5906 11858 6461 11858 5910 11858 5910 11859 6461 11859 6466 11859 6468 11860 5910 11860 6466 11860 6468 11861 5907 11861 5910 11861 5910 11862 5907 11862 6465 11862 6464 11863 5910 11863 6465 11863 6464 11864 5908 11864 5910 11864 5910 11865 5908 11865 6469 11865 5909 11866 5910 11866 6469 11866 5909 11867 6471 11867 5910 11867 5910 11868 6471 11868 6472 11868 6475 11869 5910 11869 6472 11869 6475 11870 6476 11870 5910 11870 5910 11871 6476 11871 5911 11871 5912 11872 5910 11872 5911 11872 5912 11873 5913 11873 5910 11873 5910 11874 5913 11874 5914 11874 5919 11875 5914 11875 6479 11875 6480 11876 5919 11876 6479 11876 6480 11877 6481 11877 5919 11877 5919 11878 6481 11878 5915 11878 6483 11879 5919 11879 5915 11879 6483 11880 6486 11880 5919 11880 5919 11881 6486 11881 6488 11881 5916 11882 5919 11882 6488 11882 5916 11883 6485 11883 5919 11883 5919 11884 6485 11884 6490 11884 5917 11885 5919 11885 6490 11885 5917 11886 5918 11886 5919 11886 5919 11887 5918 11887 5920 11887 6493 11888 5919 11888 5920 11888 6493 11889 5921 11889 5919 11889 5919 11890 5921 11890 6496 11890 5922 11891 5919 11891 6496 11891 5922 11892 6497 11892 5919 11892 5919 11893 6497 11893 6495 11893 5925 11894 6495 11894 6499 11894 6500 11895 5925 11895 6499 11895 6500 11896 5923 11896 5925 11896 5925 11897 5923 11897 6505 11897 5924 11898 5925 11898 6505 11898 5924 11899 6504 11899 5925 11899 5925 11900 6504 11900 6506 11900 5926 11901 5925 11901 6506 11901 5926 11902 5927 11902 5925 11902 5925 11903 5927 11903 5948 11903 5948 11904 5927 11904 5928 11904 5929 11905 5948 11905 5928 11905 5929 11906 6508 11906 5948 11906 5948 11907 6508 11907 5930 11907 5931 11908 5948 11908 5930 11908 5931 11909 5932 11909 5948 11909 5948 11910 5932 11910 5933 11910 5934 11911 5948 11911 5933 11911 5934 11912 6512 11912 5948 11912 5948 11913 6512 11913 5935 11913 5936 11914 5948 11914 5935 11914 5936 11915 5937 11915 5948 11915 5948 11916 5937 11916 5938 11916 6511 11917 5948 11917 5938 11917 6511 11918 6516 11918 5948 11918 5948 11919 6516 11919 6519 11919 6520 11920 5948 11920 6519 11920 6520 11921 5947 11921 5948 11921 6520 11922 6517 11922 5947 11922 5947 11923 6517 11923 6521 11923 6526 11924 5947 11924 6521 11924 6526 11925 5939 11925 5947 11925 5947 11926 5939 11926 6525 11926 6523 11927 5947 11927 6525 11927 6523 11928 6527 11928 5947 11928 5947 11929 6527 11929 5940 11929 5941 11930 5947 11930 5940 11930 5941 11931 6531 11931 5947 11931 5947 11932 6531 11932 6529 11932 5942 11933 5947 11933 6529 11933 5942 11934 6535 11934 5947 11934 5947 11935 6535 11935 6534 11935 5943 11936 5947 11936 6534 11936 5943 11937 6537 11937 5947 11937 5947 11938 6537 11938 5944 11938 5944 11939 6537 11939 5945 11939 6538 11940 5944 11940 5945 11940 6538 11941 5946 11941 5944 11941 5910 11942 5914 11942 5919 11942 5919 11943 6495 11943 5925 11943 5842 11944 5820 11944 5947 11944 5947 11945 5820 11945 5948 11945 5949 11946 5898 11946 5944 11946 5944 11947 5898 11947 5947 11947 5947 11948 5898 11948 5897 11948 5956 11949 5919 11949 5950 11949 5950 11950 5919 11950 5925 11950 5957 11951 5951 11951 5952 11951 5952 11952 5951 11952 5910 11952 5919 11953 5952 11953 5910 11953 5919 11954 5953 11954 5952 11954 5919 11955 5956 11955 5953 11955 5953 11956 5956 11956 5954 11956 5950 11957 5955 11957 5956 11957 5956 11958 5955 11958 5791 11958 5954 11959 5956 11959 5791 11959 5793 11960 5948 11960 5826 11960 5826 11961 5948 11961 5820 11961 5833 11962 5842 11962 5897 11962 5897 11963 5842 11963 5947 11963 5792 11964 6728 11964 5791 11964 5791 11965 6728 11965 5953 11965 5954 11966 5791 11966 5953 11966 5950 11967 5925 11967 5955 11967 5955 11968 5925 11968 5948 11968 5793 11969 5955 11969 5948 11969 5957 11970 5952 11970 5958 11970 5958 11971 5952 11971 6727 11971 5899 11972 5958 11972 6727 11972 5957 11973 5958 11973 5951 11973 5951 11974 5958 11974 5898 11974 5949 11975 5951 11975 5898 11975 5960 11976 5959 11976 6204 11976 5960 11977 5961 11977 5959 11977 5960 11978 6205 11978 5961 11978 5961 11979 6205 11979 5962 11979 5962 11980 6205 11980 5963 11980 5964 11981 5963 11981 5965 11981 5828 11982 5965 11982 5987 11982 5829 11983 5987 11983 6206 11983 5966 11984 6206 11984 6207 11984 5988 11985 6207 11985 6209 11985 5967 11986 6209 11986 6210 11986 5809 11987 6210 11987 6274 11987 5968 11988 6274 11988 6273 11988 5989 11989 6273 11989 6272 11989 5990 11990 6272 11990 5991 11990 5992 11991 5991 11991 6271 11991 5810 11992 6271 11992 6270 11992 5993 11993 6270 11993 6269 11993 5969 11994 6269 11994 5994 11994 5995 11995 5994 11995 5996 11995 5970 11996 5996 11996 5971 11996 5812 11997 5971 11997 6267 11997 5972 11998 6267 11998 5973 11998 5997 11999 5973 11999 5974 11999 5813 12000 5974 12000 6266 12000 5998 12001 6266 12001 5999 12001 5975 12002 5999 12002 6265 12002 6000 12003 6265 12003 5976 12003 5814 12004 5976 12004 5977 12004 6001 12005 5977 12005 6264 12005 5978 12006 6264 12006 6263 12006 5979 12007 6263 12007 6002 12007 6003 12008 6002 12008 6004 12008 5815 12009 6004 12009 6005 12009 5816 12010 6005 12010 6006 12010 6007 12011 6006 12011 6289 12011 5817 12012 6289 12012 6288 12012 5818 12013 6288 12013 6287 12013 5980 12014 6287 12014 6290 12014 5819 12015 6290 12015 6284 12015 5821 12016 6284 12016 5981 12016 6008 12017 5981 12017 6009 12017 6010 12018 6009 12018 6282 12018 5822 12019 6282 12019 6281 12019 5823 12020 6281 12020 5982 12020 6011 12021 5982 12021 5983 12021 5824 12022 5983 12022 6280 12022 5825 12023 6280 12023 6279 12023 5984 12024 6279 12024 5985 12024 6012 12025 5985 12025 6200 12025 5827 12026 6200 12026 6201 12026 6013 12027 6201 12027 6202 12027 6014 12028 6202 12028 6203 12028 5986 12029 6203 12029 6204 12029 5959 12030 5986 12030 6204 12030 5962 12031 5963 12031 5964 12031 5964 12032 5965 12032 5828 12032 5828 12033 5987 12033 5829 12033 5829 12034 6206 12034 5966 12034 5966 12035 6207 12035 5988 12035 5988 12036 6209 12036 5967 12036 5967 12037 6210 12037 5809 12037 5809 12038 6274 12038 5968 12038 5968 12039 6273 12039 5989 12039 5989 12040 6272 12040 5990 12040 5990 12041 5991 12041 5992 12041 5992 12042 6271 12042 5810 12042 5810 12043 6270 12043 5993 12043 5993 12044 6269 12044 5969 12044 5969 12045 5994 12045 5995 12045 5995 12046 5996 12046 5970 12046 5970 12047 5971 12047 5812 12047 5812 12048 6267 12048 5972 12048 5972 12049 5973 12049 5997 12049 5997 12050 5974 12050 5813 12050 5813 12051 6266 12051 5998 12051 5998 12052 5999 12052 5975 12052 5975 12053 6265 12053 6000 12053 6000 12054 5976 12054 5814 12054 5814 12055 5977 12055 6001 12055 6001 12056 6264 12056 5978 12056 5978 12057 6263 12057 5979 12057 5979 12058 6002 12058 6003 12058 6003 12059 6004 12059 5815 12059 5815 12060 6005 12060 5816 12060 5816 12061 6006 12061 6007 12061 6007 12062 6289 12062 5817 12062 5817 12063 6288 12063 5818 12063 5818 12064 6287 12064 5980 12064 5980 12065 6290 12065 5819 12065 5819 12066 6284 12066 5821 12066 5821 12067 5981 12067 6008 12067 6008 12068 6009 12068 6010 12068 6010 12069 6282 12069 5822 12069 5822 12070 6281 12070 5823 12070 5823 12071 5982 12071 6011 12071 6011 12072 5983 12072 5824 12072 5824 12073 6280 12073 5825 12073 5825 12074 6279 12074 5984 12074 5984 12075 5985 12075 6012 12075 6012 12076 6200 12076 5827 12076 5827 12077 6201 12077 6013 12077 6013 12078 6202 12078 6014 12078 6014 12079 6203 12079 5986 12079 6015 12080 5892 12080 6016 12080 6015 12081 5894 12081 5892 12081 6015 12082 6216 12082 5894 12082 5894 12083 6216 12083 5893 12083 5893 12084 6216 12084 6018 12084 6017 12085 6018 12085 6215 12085 6042 12086 6215 12086 6019 12086 6043 12087 6019 12087 6020 12087 6044 12088 6020 12088 6214 12088 6045 12089 6214 12089 6046 12089 6047 12090 6046 12090 6213 12090 6021 12091 6213 12091 6212 12091 6048 12092 6212 12092 6022 12092 6049 12093 6022 12093 6023 12093 5895 12094 6023 12094 6050 12094 5896 12095 6050 12095 6051 12095 5808 12096 6051 12096 6025 12096 6024 12097 6025 12097 6052 12097 5807 12098 6052 12098 6248 12098 6026 12099 6248 12099 6027 12099 5806 12100 6027 12100 6247 12100 5805 12101 6247 12101 6028 12101 5804 12102 6028 12102 6029 12102 6053 12103 6029 12103 6054 12103 5803 12104 6054 12104 6030 12104 6055 12105 6030 12105 6031 12105 5802 12106 6031 12106 6245 12106 6056 12107 6245 12107 6246 12107 6057 12108 6246 12108 6244 12108 6058 12109 6244 12109 6032 12109 5800 12110 6032 12110 6243 12110 6059 12111 6243 12111 6033 12111 6060 12112 6033 12112 6061 12112 5798 12113 6061 12113 6242 12113 6034 12114 6242 12114 6241 12114 6062 12115 6241 12115 6063 12115 6064 12116 6063 12116 6240 12116 6065 12117 6240 12117 6066 12117 6035 12118 6066 12118 6239 12118 6067 12119 6239 12119 6036 12119 6037 12120 6036 12120 6238 12120 5885 12121 6238 12121 6297 12121 5889 12122 6297 12122 6295 12122 5884 12123 6295 12123 6039 12123 6038 12124 6039 12124 6294 12124 5882 12125 6294 12125 6068 12125 5881 12126 6068 12126 6069 12126 5887 12127 6069 12127 6070 12127 6071 12128 6070 12128 6072 12128 6040 12129 6072 12129 6220 12129 6073 12130 6220 12130 6219 12130 6041 12131 6219 12131 6218 12131 5890 12132 6218 12132 6074 12132 6075 12133 6074 12133 6016 12133 5892 12134 6075 12134 6016 12134 5893 12135 6018 12135 6017 12135 6017 12136 6215 12136 6042 12136 6042 12137 6019 12137 6043 12137 6043 12138 6020 12138 6044 12138 6044 12139 6214 12139 6045 12139 6045 12140 6046 12140 6047 12140 6047 12141 6213 12141 6021 12141 6021 12142 6212 12142 6048 12142 6048 12143 6022 12143 6049 12143 6049 12144 6023 12144 5895 12144 5895 12145 6050 12145 5896 12145 5896 12146 6051 12146 5808 12146 5808 12147 6025 12147 6024 12147 6024 12148 6052 12148 5807 12148 5807 12149 6248 12149 6026 12149 6026 12150 6027 12150 5806 12150 5806 12151 6247 12151 5805 12151 5805 12152 6028 12152 5804 12152 5804 12153 6029 12153 6053 12153 6053 12154 6054 12154 5803 12154 5803 12155 6030 12155 6055 12155 6055 12156 6031 12156 5802 12156 5802 12157 6245 12157 6056 12157 6056 12158 6246 12158 6057 12158 6057 12159 6244 12159 6058 12159 6058 12160 6032 12160 5800 12160 5800 12161 6243 12161 6059 12161 6059 12162 6033 12162 6060 12162 6060 12163 6061 12163 5798 12163 5798 12164 6242 12164 6034 12164 6034 12165 6241 12165 6062 12165 6062 12166 6063 12166 6064 12166 6064 12167 6240 12167 6065 12167 6065 12168 6066 12168 6035 12168 6035 12169 6239 12169 6067 12169 6067 12170 6036 12170 6037 12170 6037 12171 6238 12171 5885 12171 5885 12172 6297 12172 5889 12172 5889 12173 6295 12173 5884 12173 5884 12174 6039 12174 6038 12174 6038 12175 6294 12175 5882 12175 5882 12176 6068 12176 5881 12176 5881 12177 6069 12177 5887 12177 5887 12178 6070 12178 6071 12178 6071 12179 6072 12179 6040 12179 6040 12180 6220 12180 6073 12180 6073 12181 6219 12181 6041 12181 6041 12182 6218 12182 5890 12182 5890 12183 6074 12183 6075 12183 6077 12184 6076 12184 6224 12184 6077 12185 5859 12185 6076 12185 6077 12186 6223 12186 5859 12186 5859 12187 6223 12187 6078 12187 6078 12188 6223 12188 6079 12188 5880 12189 6079 12189 6221 12189 6116 12190 6221 12190 6222 12190 6117 12191 6222 12191 6080 12191 5888 12192 6080 12192 6298 12192 5883 12193 6298 12193 6081 12193 6118 12194 6081 12194 6082 12194 6083 12195 6082 12195 6084 12195 6085 12196 6084 12196 6086 12196 5886 12197 6086 12197 6296 12197 5797 12198 6296 12198 6237 12198 5857 12199 6237 12199 6236 12199 5856 12200 6236 12200 6087 12200 6088 12201 6087 12201 6235 12201 6119 12202 6235 12202 6089 12202 5855 12203 6089 12203 6234 12203 5854 12204 6234 12204 6090 12204 6120 12205 6090 12205 6121 12205 5853 12206 6121 12206 6091 12206 6092 12207 6091 12207 6122 12207 5851 12208 6122 12208 6094 12208 6093 12209 6094 12209 6096 12209 6095 12210 6096 12210 6123 12210 6124 12211 6123 12211 6232 12211 5850 12212 6232 12212 6098 12212 6097 12213 6098 12213 6099 12213 5849 12214 6099 12214 6231 12214 6100 12215 6231 12215 6101 12215 6125 12216 6101 12216 6230 12216 6102 12217 6230 12217 6126 12217 5848 12218 6126 12218 6229 12218 6127 12219 6229 12219 6103 12219 5847 12220 6103 12220 6104 12220 6105 12221 6104 12221 6106 12221 5866 12222 6106 12222 6107 12222 5865 12223 6107 12223 6128 12223 6129 12224 6128 12224 6228 12224 5864 12225 6228 12225 6108 12225 6130 12226 6108 12226 6131 12226 6109 12227 6131 12227 6132 12227 5863 12228 6132 12228 6227 12228 5862 12229 6227 12229 6133 12229 6134 12230 6133 12230 6135 12230 6136 12231 6135 12231 6110 12231 5860 12232 6110 12232 6111 12232 5861 12233 6111 12233 6226 12233 6137 12234 6226 12234 6113 12234 6112 12235 6113 12235 6115 12235 6114 12236 6115 12236 6225 12236 6138 12237 6225 12237 6224 12237 6076 12238 6138 12238 6224 12238 6078 12239 6079 12239 5880 12239 5880 12240 6221 12240 6116 12240 6116 12241 6222 12241 6117 12241 6117 12242 6080 12242 5888 12242 5888 12243 6298 12243 5883 12243 5883 12244 6081 12244 6118 12244 6118 12245 6082 12245 6083 12245 6083 12246 6084 12246 6085 12246 6085 12247 6086 12247 5886 12247 5886 12248 6296 12248 5797 12248 5797 12249 6237 12249 5857 12249 5857 12250 6236 12250 5856 12250 5856 12251 6087 12251 6088 12251 6088 12252 6235 12252 6119 12252 6119 12253 6089 12253 5855 12253 5855 12254 6234 12254 5854 12254 5854 12255 6090 12255 6120 12255 6120 12256 6121 12256 5853 12256 5853 12257 6091 12257 6092 12257 6092 12258 6122 12258 5851 12258 5851 12259 6094 12259 6093 12259 6093 12260 6096 12260 6095 12260 6095 12261 6123 12261 6124 12261 6124 12262 6232 12262 5850 12262 5850 12263 6098 12263 6097 12263 6097 12264 6099 12264 5849 12264 5849 12265 6231 12265 6100 12265 6100 12266 6101 12266 6125 12266 6125 12267 6230 12267 6102 12267 6102 12268 6126 12268 5848 12268 5848 12269 6229 12269 6127 12269 6127 12270 6103 12270 5847 12270 5847 12271 6104 12271 6105 12271 6105 12272 6106 12272 5866 12272 5866 12273 6107 12273 5865 12273 5865 12274 6128 12274 6129 12274 6129 12275 6228 12275 5864 12275 5864 12276 6108 12276 6130 12276 6130 12277 6131 12277 6109 12277 6109 12278 6132 12278 5863 12278 5863 12279 6227 12279 5862 12279 5862 12280 6133 12280 6134 12280 6134 12281 6135 12281 6136 12281 6136 12282 6110 12282 5860 12282 5860 12283 6111 12283 5861 12283 5861 12284 6226 12284 6137 12284 6137 12285 6113 12285 6112 12285 6112 12286 6115 12286 6114 12286 6114 12287 6225 12287 6138 12287 6139 12288 6140 12288 6170 12288 6139 12289 6141 12289 6140 12289 6139 12290 6254 12290 6141 12290 6141 12291 6254 12291 5834 12291 5834 12292 6254 12292 6142 12292 5835 12293 6142 12293 6143 12293 6144 12294 6143 12294 6145 12294 5836 12295 6145 12295 6146 12295 6172 12296 6146 12296 6173 12296 5838 12297 6173 12297 6253 12297 5837 12298 6253 12298 6147 12298 6174 12299 6147 12299 6252 12299 5839 12300 6252 12300 6251 12300 6175 12301 6251 12301 6250 12301 6148 12302 6250 12302 6283 12302 5840 12303 6283 12303 6176 12303 6177 12304 6176 12304 6285 12304 6178 12305 6285 12305 6286 12305 5841 12306 6286 12306 6149 12306 6179 12307 6149 12307 6151 12307 6150 12308 6151 12308 6291 12308 6180 12309 6291 12309 6292 12309 5843 12310 6292 12310 6293 12310 5844 12311 6293 12311 6152 12311 6181 12312 6152 12312 6182 12312 6153 12313 6182 12313 6154 12313 5845 12314 6154 12314 6183 12314 5867 12315 6183 12315 6262 12315 6155 12316 6262 12316 6156 12316 6184 12317 6156 12317 6185 12317 5868 12318 6185 12318 6261 12318 5869 12319 6261 12319 6186 12319 6187 12320 6186 12320 6157 12320 6188 12321 6157 12321 6260 12321 5871 12322 6260 12322 6158 12322 6189 12323 6158 12323 6160 12323 6159 12324 6160 12324 6259 12324 6190 12325 6259 12325 6161 12325 6191 12326 6161 12326 6258 12326 5872 12327 6258 12327 6257 12327 6162 12328 6257 12328 6192 12328 6193 12329 6192 12329 6256 12329 6194 12330 6256 12330 6163 12330 5873 12331 6163 12331 6195 12331 6196 12332 6195 12332 6278 12332 6164 12333 6278 12333 6197 12333 6165 12334 6197 12334 6166 12334 5875 12335 6166 12335 6167 12335 5876 12336 6167 12336 6168 12336 5877 12337 6168 12337 6198 12337 5878 12338 6198 12338 6199 12338 5879 12339 6199 12339 6255 12339 5832 12340 6255 12340 6169 12340 6171 12341 6169 12341 6170 12341 6140 12342 6171 12342 6170 12342 5834 12343 6142 12343 5835 12343 5835 12344 6143 12344 6144 12344 6144 12345 6145 12345 5836 12345 5836 12346 6146 12346 6172 12346 6172 12347 6173 12347 5838 12347 5838 12348 6253 12348 5837 12348 5837 12349 6147 12349 6174 12349 6174 12350 6252 12350 5839 12350 5839 12351 6251 12351 6175 12351 6175 12352 6250 12352 6148 12352 6148 12353 6283 12353 5840 12353 5840 12354 6176 12354 6177 12354 6177 12355 6285 12355 6178 12355 6178 12356 6286 12356 5841 12356 5841 12357 6149 12357 6179 12357 6179 12358 6151 12358 6150 12358 6150 12359 6291 12359 6180 12359 6180 12360 6292 12360 5843 12360 5843 12361 6293 12361 5844 12361 5844 12362 6152 12362 6181 12362 6181 12363 6182 12363 6153 12363 6153 12364 6154 12364 5845 12364 5845 12365 6183 12365 5867 12365 5867 12366 6262 12366 6155 12366 6155 12367 6156 12367 6184 12367 6184 12368 6185 12368 5868 12368 5868 12369 6261 12369 5869 12369 5869 12370 6186 12370 6187 12370 6187 12371 6157 12371 6188 12371 6188 12372 6260 12372 5871 12372 5871 12373 6158 12373 6189 12373 6189 12374 6160 12374 6159 12374 6159 12375 6259 12375 6190 12375 6190 12376 6161 12376 6191 12376 6191 12377 6258 12377 5872 12377 5872 12378 6257 12378 6162 12378 6162 12379 6192 12379 6193 12379 6193 12380 6256 12380 6194 12380 6194 12381 6163 12381 5873 12381 5873 12382 6195 12382 6196 12382 6196 12383 6278 12383 6164 12383 6164 12384 6197 12384 6165 12384 6165 12385 6166 12385 5875 12385 5875 12386 6167 12386 5876 12386 5876 12387 6168 12387 5877 12387 5877 12388 6198 12388 5878 12388 5878 12389 6199 12389 5879 12389 5879 12390 6255 12390 5832 12390 5832 12391 6169 12391 6171 12391 6268 12392 6217 12392 5811 12392 5811 12393 6217 12393 5891 12393 6217 12394 6275 12394 5891 12394 5891 12395 6275 12395 5858 12395 6275 12396 6276 12396 5858 12396 5858 12397 6276 12397 5870 12397 6276 12398 6268 12398 5870 12398 5870 12399 6268 12399 5811 12399 6208 12400 6200 12400 5776 12400 6208 12401 6201 12401 6200 12401 6208 12402 6202 12402 6201 12402 6208 12403 6203 12403 6202 12403 6208 12404 6204 12404 6203 12404 6208 12405 5960 12405 6204 12405 6208 12406 6205 12406 5960 12406 6208 12407 5963 12407 6205 12407 6208 12408 5965 12408 5963 12408 6208 12409 5987 12409 5965 12409 6208 12410 6206 12410 5987 12410 6208 12411 6207 12411 6206 12411 6208 12412 6209 12412 6207 12412 6208 12413 6210 12413 6209 12413 6208 12414 6268 12414 6210 12414 6208 12415 6211 12415 6268 12415 6268 12416 6211 12416 6217 12416 6217 12417 6211 12417 6248 12417 6052 12418 6217 12418 6248 12418 6052 12419 6025 12419 6217 12419 6217 12420 6025 12420 6051 12420 6050 12421 6217 12421 6051 12421 6050 12422 6023 12422 6217 12422 6217 12423 6023 12423 6022 12423 6212 12424 6217 12424 6022 12424 6212 12425 6213 12425 6217 12425 6217 12426 6213 12426 6046 12426 6214 12427 6217 12427 6046 12427 6214 12428 6020 12428 6217 12428 6217 12429 6020 12429 6019 12429 6215 12430 6217 12430 6019 12430 6215 12431 6018 12431 6217 12431 6217 12432 6018 12432 6216 12432 6015 12433 6217 12433 6216 12433 6015 12434 6016 12434 6217 12434 6217 12435 6016 12435 6074 12435 6218 12436 6217 12436 6074 12436 6218 12437 6219 12437 6217 12437 6217 12438 6219 12438 6220 12438 6221 12439 6220 12439 6222 12439 6221 12440 6217 12440 6220 12440 6221 12441 6275 12441 6217 12441 6221 12442 6079 12442 6275 12442 6275 12443 6079 12443 6223 12443 6077 12444 6275 12444 6223 12444 6077 12445 6224 12445 6275 12445 6275 12446 6224 12446 6225 12446 6115 12447 6275 12447 6225 12447 6115 12448 6113 12448 6275 12448 6275 12449 6113 12449 6226 12449 6111 12450 6275 12450 6226 12450 6111 12451 6110 12451 6275 12451 6275 12452 6110 12452 6135 12452 6133 12453 6275 12453 6135 12453 6133 12454 6227 12454 6275 12454 6275 12455 6227 12455 6132 12455 6131 12456 6275 12456 6132 12456 6131 12457 6108 12457 6275 12457 6275 12458 6108 12458 6228 12458 6128 12459 6275 12459 6228 12459 6128 12460 6107 12460 6275 12460 6275 12461 6107 12461 6106 12461 6104 12462 6275 12462 6106 12462 6104 12463 6277 12463 6275 12463 6104 12464 6103 12464 6277 12464 6277 12465 6103 12465 6229 12465 6126 12466 6277 12466 6229 12466 6126 12467 6230 12467 6277 12467 6277 12468 6230 12468 6101 12468 6231 12469 6277 12469 6101 12469 6231 12470 6099 12470 6277 12470 6277 12471 6099 12471 6098 12471 6232 12472 6277 12472 6098 12472 6232 12473 6123 12473 6277 12473 6277 12474 6123 12474 6096 12474 6094 12475 6277 12475 6096 12475 6094 12476 6122 12476 6277 12476 6277 12477 6122 12477 6233 12477 6233 12478 6122 12478 6091 12478 6121 12479 6233 12479 6091 12479 6121 12480 6090 12480 6233 12480 6233 12481 6090 12481 6234 12481 6089 12482 6233 12482 6234 12482 6089 12483 6235 12483 6233 12483 6233 12484 6235 12484 6087 12484 6236 12485 6233 12485 6087 12485 6236 12486 6249 12486 6233 12486 6236 12487 6237 12487 6249 12487 6249 12488 6237 12488 6238 12488 6036 12489 6249 12489 6238 12489 6036 12490 6239 12490 6249 12490 6249 12491 6239 12491 6066 12491 6240 12492 6249 12492 6066 12492 6240 12493 6063 12493 6249 12493 6249 12494 6063 12494 6241 12494 6242 12495 6249 12495 6241 12495 6242 12496 6061 12496 6249 12496 6249 12497 6061 12497 6033 12497 6211 12498 6033 12498 6243 12498 6032 12499 6211 12499 6243 12499 6032 12500 6244 12500 6211 12500 6211 12501 6244 12501 6246 12501 6245 12502 6211 12502 6246 12502 6245 12503 6031 12503 6211 12503 6211 12504 6031 12504 6030 12504 6054 12505 6211 12505 6030 12505 6054 12506 6029 12506 6211 12506 6211 12507 6029 12507 6028 12507 6247 12508 6211 12508 6028 12508 6247 12509 6027 12509 6211 12509 6211 12510 6027 12510 6248 12510 6211 12511 6249 12511 6033 12511 6233 12512 6249 12512 5788 12512 5788 12513 6249 12513 5776 12513 6250 12514 5776 12514 6283 12514 6250 12515 5788 12515 5776 12515 6250 12516 6251 12516 5788 12516 5788 12517 6251 12517 6252 12517 6147 12518 5788 12518 6252 12518 6147 12519 6253 12519 5788 12519 5788 12520 6253 12520 6173 12520 6146 12521 5788 12521 6173 12521 6146 12522 6145 12522 5788 12522 5788 12523 6145 12523 6143 12523 5787 12524 6143 12524 6142 12524 6254 12525 5787 12525 6142 12525 6254 12526 6139 12526 5787 12526 5787 12527 6139 12527 6170 12527 6169 12528 5787 12528 6170 12528 6169 12529 6255 12529 5787 12529 5787 12530 6255 12530 6199 12530 6198 12531 5787 12531 6199 12531 6198 12532 6168 12532 5787 12532 5787 12533 6168 12533 6167 12533 6166 12534 5787 12534 6167 12534 6166 12535 6197 12535 5787 12535 5787 12536 6197 12536 6278 12536 6276 12537 6278 12537 6195 12537 6163 12538 6276 12538 6195 12538 6163 12539 6256 12539 6276 12539 6276 12540 6256 12540 6192 12540 6257 12541 6276 12541 6192 12541 6257 12542 6258 12542 6276 12542 6276 12543 6258 12543 6161 12543 6259 12544 6276 12544 6161 12544 6259 12545 6160 12545 6276 12545 6276 12546 6160 12546 6158 12546 6260 12547 6276 12547 6158 12547 6260 12548 6157 12548 6276 12548 6276 12549 6157 12549 6186 12549 6261 12550 6276 12550 6186 12550 6261 12551 6185 12551 6276 12551 6276 12552 6185 12552 6156 12552 6262 12553 6276 12553 6156 12553 6262 12554 6183 12554 6276 12554 6276 12555 6183 12555 6154 12555 6182 12556 6276 12556 6154 12556 6182 12557 6152 12557 6276 12557 6276 12558 6152 12558 6002 12558 6268 12559 6002 12559 6263 12559 6264 12560 6268 12560 6263 12560 6264 12561 5977 12561 6268 12561 6268 12562 5977 12562 5976 12562 6265 12563 6268 12563 5976 12563 6265 12564 5999 12564 6268 12564 6268 12565 5999 12565 6266 12565 5974 12566 6268 12566 6266 12566 5974 12567 5973 12567 6268 12567 6268 12568 5973 12568 6267 12568 5971 12569 6268 12569 6267 12569 5971 12570 5996 12570 6268 12570 6268 12571 5996 12571 5994 12571 6269 12572 6268 12572 5994 12572 6269 12573 6270 12573 6268 12573 6268 12574 6270 12574 6271 12574 5991 12575 6268 12575 6271 12575 5991 12576 6272 12576 6268 12576 6268 12577 6272 12577 6273 12577 6274 12578 6268 12578 6273 12578 6274 12579 6210 12579 6268 12579 5787 12580 5788 12580 6143 12580 6275 12581 6277 12581 6276 12581 6276 12582 6277 12582 5787 12582 6278 12583 6276 12583 5787 12583 6200 12584 5985 12584 5776 12584 5776 12585 5985 12585 6279 12585 6280 12586 5776 12586 6279 12586 6280 12587 5983 12587 5776 12587 5776 12588 5983 12588 5982 12588 6281 12589 5776 12589 5982 12589 6281 12590 6282 12590 5776 12590 5776 12591 6282 12591 6009 12591 5981 12592 5776 12592 6009 12592 5981 12593 6283 12593 5776 12593 5981 12594 6176 12594 6283 12594 5981 12595 6284 12595 6176 12595 6176 12596 6284 12596 6285 12596 6285 12597 6284 12597 6290 12597 6286 12598 6290 12598 6287 12598 6149 12599 6287 12599 6288 12599 6151 12600 6288 12600 6289 12600 6291 12601 6289 12601 6006 12601 6292 12602 6006 12602 6005 12602 6293 12603 6005 12603 6004 12603 6152 12604 6004 12604 6002 12604 6152 12605 6293 12605 6004 12605 6285 12606 6290 12606 6286 12606 6286 12607 6287 12607 6149 12607 6149 12608 6288 12608 6151 12608 6151 12609 6289 12609 6291 12609 6291 12610 6006 12610 6292 12610 6292 12611 6005 12611 6293 12611 6276 12612 6002 12612 6268 12612 6220 12613 6072 12613 6222 12613 6222 12614 6072 12614 6080 12614 6080 12615 6072 12615 6070 12615 6298 12616 6070 12616 6069 12616 6081 12617 6069 12617 6068 12617 6082 12618 6068 12618 6294 12618 6084 12619 6294 12619 6039 12619 6086 12620 6039 12620 6295 12620 6296 12621 6295 12621 6297 12621 6237 12622 6297 12622 6238 12622 6237 12623 6296 12623 6297 12623 6080 12624 6070 12624 6298 12624 6298 12625 6069 12625 6081 12625 6081 12626 6068 12626 6082 12626 6082 12627 6294 12627 6084 12627 6084 12628 6039 12628 6086 12628 6086 12629 6295 12629 6296 12629 6299 12630 6300 12630 6410 12630 6299 12631 6301 12631 6300 12631 6299 12632 6453 12632 6301 12632 6301 12633 6453 12633 6304 12633 6303 12634 6304 12634 6308 12634 6302 12635 6308 12635 6307 12635 6302 12636 6303 12636 6308 12636 6302 12637 6734 12637 6303 12637 6303 12638 6734 12638 6411 12638 6301 12639 6411 12639 6300 12639 6301 12640 6303 12640 6411 12640 6301 12641 6304 12641 6303 12641 6453 12642 6454 12642 6304 12642 6304 12643 6454 12643 6305 12643 6308 12644 6305 12644 6309 12644 6307 12645 6309 12645 6306 12645 6307 12646 6308 12646 6309 12646 6454 12647 6455 12647 6305 12647 6305 12648 6455 12648 6413 12648 6309 12649 6413 12649 6310 12649 6306 12650 6310 12650 6735 12650 6306 12651 6309 12651 6310 12651 6455 12652 6313 12652 6413 12652 6413 12653 6313 12653 6311 12653 6310 12654 6311 12654 6312 12654 6735 12655 6312 12655 6316 12655 6735 12656 6310 12656 6312 12656 6313 12657 6456 12657 6311 12657 6311 12658 6456 12658 6314 12658 6312 12659 6314 12659 6315 12659 6316 12660 6315 12660 6736 12660 6316 12661 6312 12661 6315 12661 6456 12662 6457 12662 6314 12662 6314 12663 6457 12663 6414 12663 6315 12664 6414 12664 6320 12664 6736 12665 6320 12665 6317 12665 6736 12666 6315 12666 6320 12666 6457 12667 6318 12667 6414 12667 6414 12668 6318 12668 6319 12668 6320 12669 6319 12669 6321 12669 6317 12670 6321 12670 6322 12670 6317 12671 6320 12671 6321 12671 6318 12672 6459 12672 6319 12672 6319 12673 6459 12673 6415 12673 6321 12674 6415 12674 6416 12674 6322 12675 6416 12675 6323 12675 6322 12676 6321 12676 6416 12676 6459 12677 6325 12677 6415 12677 6415 12678 6325 12678 6417 12678 6416 12679 6417 12679 6327 12679 6323 12680 6327 12680 6324 12680 6323 12681 6416 12681 6327 12681 6325 12682 6452 12682 6417 12682 6417 12683 6452 12683 6326 12683 6327 12684 6326 12684 6418 12684 6324 12685 6418 12685 6737 12685 6324 12686 6327 12686 6418 12686 6452 12687 6328 12687 6326 12687 6326 12688 6328 12688 6419 12688 6418 12689 6419 12689 6331 12689 6737 12690 6331 12690 6329 12690 6737 12691 6418 12691 6331 12691 6328 12692 6451 12692 6419 12692 6419 12693 6451 12693 6330 12693 6331 12694 6330 12694 6420 12694 6329 12695 6420 12695 6738 12695 6329 12696 6331 12696 6420 12696 6451 12697 6450 12697 6330 12697 6330 12698 6450 12698 6332 12698 6420 12699 6332 12699 6333 12699 6738 12700 6333 12700 6739 12700 6738 12701 6420 12701 6333 12701 6450 12702 6458 12702 6332 12702 6332 12703 6458 12703 6334 12703 6333 12704 6334 12704 6337 12704 6739 12705 6337 12705 6336 12705 6739 12706 6333 12706 6337 12706 6458 12707 6338 12707 6334 12707 6334 12708 6338 12708 6421 12708 6337 12709 6421 12709 6335 12709 6336 12710 6335 12710 6740 12710 6336 12711 6337 12711 6335 12711 6338 12712 6449 12712 6421 12712 6421 12713 6449 12713 6422 12713 6335 12714 6422 12714 6340 12714 6740 12715 6340 12715 6741 12715 6740 12716 6335 12716 6340 12716 6449 12717 6448 12717 6422 12717 6422 12718 6448 12718 6423 12718 6340 12719 6423 12719 6339 12719 6741 12720 6339 12720 6343 12720 6741 12721 6340 12721 6339 12721 6448 12722 6341 12722 6423 12722 6423 12723 6341 12723 6424 12723 6339 12724 6424 12724 6342 12724 6343 12725 6342 12725 6742 12725 6343 12726 6339 12726 6342 12726 6341 12727 6345 12727 6424 12727 6424 12728 6345 12728 6346 12728 6342 12729 6346 12729 6344 12729 6742 12730 6344 12730 6348 12730 6742 12731 6342 12731 6344 12731 6345 12732 6349 12732 6346 12732 6346 12733 6349 12733 6347 12733 6344 12734 6347 12734 6351 12734 6348 12735 6351 12735 6353 12735 6348 12736 6344 12736 6351 12736 6349 12737 6354 12737 6347 12737 6347 12738 6354 12738 6350 12738 6351 12739 6350 12739 6352 12739 6353 12740 6352 12740 6355 12740 6353 12741 6351 12741 6352 12741 6354 12742 6436 12742 6350 12742 6350 12743 6436 12743 6425 12743 6352 12744 6425 12744 6356 12744 6355 12745 6356 12745 6359 12745 6355 12746 6352 12746 6356 12746 6436 12747 6357 12747 6425 12747 6425 12748 6357 12748 6358 12748 6356 12749 6358 12749 6360 12749 6359 12750 6360 12750 6361 12750 6359 12751 6356 12751 6360 12751 6357 12752 6442 12752 6358 12752 6358 12753 6442 12753 6362 12753 6360 12754 6362 12754 6363 12754 6361 12755 6363 12755 6743 12755 6361 12756 6360 12756 6363 12756 6442 12757 6443 12757 6362 12757 6362 12758 6443 12758 6364 12758 6363 12759 6364 12759 6426 12759 6743 12760 6426 12760 6367 12760 6743 12761 6363 12761 6426 12761 6443 12762 6365 12762 6364 12762 6364 12763 6365 12763 6366 12763 6426 12764 6366 12764 6368 12764 6367 12765 6368 12765 6745 12765 6367 12766 6426 12766 6368 12766 6365 12767 6439 12767 6366 12767 6366 12768 6439 12768 6427 12768 6368 12769 6427 12769 6428 12769 6745 12770 6428 12770 6746 12770 6745 12771 6368 12771 6428 12771 6439 12772 6445 12772 6427 12772 6427 12773 6445 12773 6429 12773 6428 12774 6429 12774 6369 12774 6746 12775 6369 12775 6747 12775 6746 12776 6428 12776 6369 12776 6445 12777 6370 12777 6429 12777 6429 12778 6370 12778 6430 12778 6369 12779 6430 12779 6372 12779 6747 12780 6372 12780 6748 12780 6747 12781 6369 12781 6372 12781 6370 12782 6447 12782 6430 12782 6430 12783 6447 12783 6371 12783 6372 12784 6371 12784 6373 12784 6748 12785 6373 12785 6374 12785 6748 12786 6372 12786 6373 12786 6447 12787 6440 12787 6371 12787 6371 12788 6440 12788 6375 12788 6373 12789 6375 12789 6431 12789 6374 12790 6431 12790 6749 12790 6374 12791 6373 12791 6431 12791 6440 12792 6377 12792 6375 12792 6375 12793 6377 12793 6379 12793 6431 12794 6379 12794 6376 12794 6749 12795 6376 12795 6380 12795 6749 12796 6431 12796 6376 12796 6377 12797 6378 12797 6379 12797 6379 12798 6378 12798 6382 12798 6376 12799 6382 12799 6384 12799 6380 12800 6384 12800 6381 12800 6380 12801 6376 12801 6384 12801 6378 12802 6441 12802 6382 12802 6382 12803 6441 12803 6383 12803 6384 12804 6383 12804 6386 12804 6381 12805 6386 12805 6751 12805 6381 12806 6384 12806 6386 12806 6441 12807 6387 12807 6383 12807 6383 12808 6387 12808 6432 12808 6386 12809 6432 12809 6385 12809 6751 12810 6385 12810 6388 12810 6751 12811 6386 12811 6385 12811 6387 12812 6391 12812 6432 12812 6432 12813 6391 12813 6433 12813 6385 12814 6433 12814 6390 12814 6388 12815 6390 12815 6389 12815 6388 12816 6385 12816 6390 12816 6391 12817 6446 12817 6433 12817 6433 12818 6446 12818 6392 12818 6390 12819 6392 12819 6393 12819 6389 12820 6393 12820 6394 12820 6389 12821 6390 12821 6393 12821 6446 12822 6395 12822 6392 12822 6392 12823 6395 12823 6434 12823 6393 12824 6434 12824 6396 12824 6394 12825 6396 12825 6730 12825 6394 12826 6393 12826 6396 12826 6395 12827 6444 12827 6434 12827 6434 12828 6444 12828 6397 12828 6396 12829 6397 12829 6398 12829 6730 12830 6398 12830 6399 12830 6730 12831 6396 12831 6398 12831 6444 12832 6400 12832 6397 12832 6397 12833 6400 12833 6435 12833 6398 12834 6435 12834 6401 12834 6399 12835 6401 12835 6403 12835 6399 12836 6398 12836 6401 12836 6400 12837 6438 12837 6435 12837 6435 12838 6438 12838 6402 12838 6401 12839 6402 12839 6404 12839 6403 12840 6404 12840 6731 12840 6403 12841 6401 12841 6404 12841 6438 12842 6437 12842 6402 12842 6402 12843 6437 12843 6408 12843 6404 12844 6408 12844 6406 12844 6731 12845 6406 12845 6405 12845 6731 12846 6404 12846 6406 12846 6437 12847 6407 12847 6408 12847 6408 12848 6407 12848 6412 12848 6406 12849 6412 12849 6409 12849 6405 12850 6409 12850 6733 12850 6405 12851 6406 12851 6409 12851 6407 12852 6410 12852 6412 12852 6412 12853 6410 12853 6300 12853 6409 12854 6300 12854 6411 12854 6733 12855 6411 12855 6734 12855 6733 12856 6409 12856 6411 12856 6412 12857 6406 12857 6408 12857 6409 12858 6412 12858 6300 12858 6308 12859 6304 12859 6305 12859 6309 12860 6305 12860 6413 12860 6310 12861 6413 12861 6311 12861 6312 12862 6311 12862 6314 12862 6315 12863 6314 12863 6414 12863 6320 12864 6414 12864 6319 12864 6321 12865 6319 12865 6415 12865 6416 12866 6415 12866 6417 12866 6327 12867 6417 12867 6326 12867 6418 12868 6326 12868 6419 12868 6331 12869 6419 12869 6330 12869 6420 12870 6330 12870 6332 12870 6333 12871 6332 12871 6334 12871 6337 12872 6334 12872 6421 12872 6335 12873 6421 12873 6422 12873 6340 12874 6422 12874 6423 12874 6339 12875 6423 12875 6424 12875 6342 12876 6424 12876 6346 12876 6344 12877 6346 12877 6347 12877 6351 12878 6347 12878 6350 12878 6352 12879 6350 12879 6425 12879 6356 12880 6425 12880 6358 12880 6360 12881 6358 12881 6362 12881 6363 12882 6362 12882 6364 12882 6426 12883 6364 12883 6366 12883 6368 12884 6366 12884 6427 12884 6428 12885 6427 12885 6429 12885 6369 12886 6429 12886 6430 12886 6372 12887 6430 12887 6371 12887 6373 12888 6371 12888 6375 12888 6431 12889 6375 12889 6379 12889 6376 12890 6379 12890 6382 12890 6384 12891 6382 12891 6383 12891 6386 12892 6383 12892 6432 12892 6385 12893 6432 12893 6433 12893 6390 12894 6433 12894 6392 12894 6393 12895 6392 12895 6434 12895 6396 12896 6434 12896 6397 12896 6398 12897 6397 12897 6435 12897 6401 12898 6435 12898 6402 12898 6404 12899 6402 12899 6408 12899 6407 12900 6436 12900 6410 12900 6407 12901 6357 12901 6436 12901 6407 12902 6437 12902 6357 12902 6357 12903 6437 12903 6442 12903 6442 12904 6437 12904 6438 12904 6443 12905 6438 12905 6400 12905 6365 12906 6400 12906 6444 12906 6439 12907 6444 12907 6395 12907 6445 12908 6395 12908 6446 12908 6370 12909 6446 12909 6391 12909 6447 12910 6391 12910 6387 12910 6440 12911 6387 12911 6441 12911 6377 12912 6441 12912 6378 12912 6377 12913 6440 12913 6441 12913 6442 12914 6438 12914 6443 12914 6443 12915 6400 12915 6365 12915 6365 12916 6444 12916 6439 12916 6439 12917 6395 12917 6445 12917 6445 12918 6446 12918 6370 12918 6370 12919 6391 12919 6447 12919 6447 12920 6387 12920 6440 12920 6436 12921 6354 12921 6410 12921 6410 12922 6354 12922 6299 12922 6299 12923 6354 12923 6349 12923 6453 12924 6349 12924 6345 12924 6454 12925 6345 12925 6341 12925 6455 12926 6341 12926 6448 12926 6313 12927 6448 12927 6449 12927 6456 12928 6449 12928 6338 12928 6457 12929 6338 12929 6458 12929 6318 12930 6458 12930 6450 12930 6459 12931 6450 12931 6451 12931 6325 12932 6451 12932 6328 12932 6452 12933 6325 12933 6328 12933 6299 12934 6349 12934 6453 12934 6453 12935 6345 12935 6454 12935 6454 12936 6341 12936 6455 12936 6455 12937 6448 12937 6313 12937 6313 12938 6449 12938 6456 12938 6456 12939 6338 12939 6457 12939 6457 12940 6458 12940 6318 12940 6318 12941 6450 12941 6459 12941 6459 12942 6451 12942 6325 12942 6693 12943 5902 12943 6692 12943 6693 12944 5903 12944 5902 12944 6693 12945 6548 12945 5903 12945 5903 12946 6548 12946 6460 12946 6460 12947 6548 12947 6547 12947 5904 12948 6547 12948 6596 12948 5905 12949 6596 12949 5906 12949 5905 12950 5904 12950 6596 12950 6460 12951 6547 12951 5904 12951 6596 12952 6595 12952 5906 12952 5906 12953 6595 12953 6461 12953 6461 12954 6595 12954 6594 12954 6466 12955 6594 12955 6467 12955 6468 12956 6467 12956 6462 12956 5907 12957 6462 12957 6463 12957 6465 12958 6463 12958 6464 12958 6465 12959 5907 12959 6463 12959 6461 12960 6594 12960 6466 12960 6466 12961 6467 12961 6468 12961 6468 12962 6462 12962 5907 12962 6463 12963 6608 12963 6464 12963 6464 12964 6608 12964 5908 12964 5908 12965 6608 12965 6609 12965 6469 12966 6609 12966 6470 12966 5909 12967 6470 12967 6612 12967 6471 12968 6612 12968 6472 12968 6471 12969 5909 12969 6612 12969 5908 12970 6609 12970 6469 12970 6469 12971 6470 12971 5909 12971 6612 12972 6473 12972 6472 12972 6472 12973 6473 12973 6475 12973 6475 12974 6473 12974 6614 12974 6476 12975 6614 12975 6474 12975 5911 12976 6474 12976 6477 12976 5912 12977 6477 12977 5913 12977 5912 12978 5911 12978 6477 12978 6475 12979 6614 12979 6476 12979 6476 12980 6474 12980 5911 12980 6477 12981 6478 12981 5913 12981 5913 12982 6478 12982 5914 12982 5914 12983 6478 12983 6588 12983 6479 12984 6588 12984 6622 12984 6480 12985 6622 12985 6586 12985 6481 12986 6586 12986 5915 12986 6481 12987 6480 12987 6586 12987 5914 12988 6588 12988 6479 12988 6479 12989 6622 12989 6480 12989 6586 12990 6482 12990 5915 12990 5915 12991 6482 12991 6483 12991 6483 12992 6482 12992 6585 12992 6486 12993 6585 12993 6487 12993 6488 12994 6487 12994 6484 12994 5916 12995 6484 12995 6489 12995 6485 12996 6489 12996 6490 12996 6485 12997 5916 12997 6489 12997 6483 12998 6585 12998 6486 12998 6486 12999 6487 12999 6488 12999 6488 13000 6484 13000 5916 13000 6489 13001 6491 13001 6490 13001 6490 13002 6491 13002 5917 13002 5917 13003 6491 13003 6583 13003 5918 13004 6583 13004 6492 13004 5920 13005 6492 13005 6580 13005 6493 13006 6580 13006 5921 13006 6493 13007 5920 13007 6580 13007 5917 13008 6583 13008 5918 13008 5918 13009 6492 13009 5920 13009 6580 13010 6494 13010 5921 13010 5921 13011 6494 13011 6496 13011 6496 13012 6494 13012 6636 13012 5922 13013 6636 13013 6579 13013 6497 13014 6579 13014 6498 13014 6495 13015 6498 13015 6499 13015 6495 13016 6497 13016 6498 13016 6496 13017 6636 13017 5922 13017 5922 13018 6579 13018 6497 13018 6498 13019 6501 13019 6499 13019 6499 13020 6501 13020 6500 13020 6500 13021 6501 13021 6502 13021 5923 13022 6502 13022 6503 13022 6505 13023 6503 13023 6575 13023 5924 13024 6575 13024 6574 13024 6504 13025 6574 13025 6506 13025 6504 13026 5924 13026 6574 13026 6500 13027 6502 13027 5923 13027 5923 13028 6503 13028 6505 13028 6505 13029 6575 13029 5924 13029 6574 13030 6573 13030 6506 13030 6506 13031 6573 13031 5926 13031 5926 13032 6573 13032 6571 13032 5927 13033 6571 13033 6507 13033 5928 13034 6507 13034 6569 13034 5929 13035 6569 13035 6508 13035 5929 13036 5928 13036 6569 13036 5926 13037 6571 13037 5927 13037 5927 13038 6507 13038 5928 13038 6569 13039 6568 13039 6508 13039 6508 13040 6568 13040 5930 13040 5930 13041 6568 13041 6567 13041 5931 13042 6567 13042 6566 13042 5932 13043 6566 13043 6565 13043 5933 13044 6565 13044 5934 13044 5933 13045 5932 13045 6565 13045 5930 13046 6567 13046 5931 13046 5931 13047 6566 13047 5932 13047 6565 13048 6653 13048 5934 13048 5934 13049 6653 13049 6512 13049 6512 13050 6653 13050 6513 13050 5935 13051 6513 13051 6509 13051 5936 13052 6509 13052 6510 13052 5937 13053 6510 13053 6514 13053 5938 13054 6514 13054 6511 13054 5938 13055 5937 13055 6514 13055 6512 13056 6513 13056 5935 13056 5935 13057 6509 13057 5936 13057 5936 13058 6510 13058 5937 13058 6514 13059 6515 13059 6511 13059 6511 13060 6515 13060 6516 13060 6516 13061 6515 13061 6518 13061 6519 13062 6518 13062 6562 13062 6520 13063 6562 13063 6560 13063 6517 13064 6560 13064 6521 13064 6517 13065 6520 13065 6560 13065 6516 13066 6518 13066 6519 13066 6519 13067 6562 13067 6520 13067 6560 13068 6522 13068 6521 13068 6521 13069 6522 13069 6526 13069 6526 13070 6522 13070 6559 13070 5939 13071 6559 13071 6671 13071 6525 13072 6671 13072 6524 13072 6523 13073 6524 13073 6527 13073 6523 13074 6525 13074 6524 13074 6526 13075 6559 13075 5939 13075 5939 13076 6671 13076 6525 13076 6524 13077 6528 13077 6527 13077 6527 13078 6528 13078 5940 13078 5940 13079 6528 13079 6677 13079 5941 13080 6677 13080 6530 13080 6531 13081 6530 13081 6532 13081 6529 13082 6532 13082 5942 13082 6529 13083 6531 13083 6532 13083 5940 13084 6677 13084 5941 13084 5941 13085 6530 13085 6531 13085 6532 13086 6533 13086 5942 13086 5942 13087 6533 13087 6535 13087 6535 13088 6533 13088 6536 13088 6534 13089 6536 13089 6682 13089 5943 13090 6682 13090 6556 13090 6537 13091 6556 13091 6554 13091 5945 13092 6554 13092 6538 13092 5945 13093 6537 13093 6554 13093 6535 13094 6536 13094 6534 13094 6534 13095 6682 13095 5943 13095 5943 13096 6556 13096 6537 13096 6554 13097 6539 13097 6538 13097 6538 13098 6539 13098 5946 13098 5946 13099 6539 13099 6552 13099 5901 13100 6552 13100 6690 13100 6541 13101 6690 13101 6692 13101 6540 13102 6692 13102 5902 13102 6540 13103 6541 13103 6692 13103 5946 13104 6552 13104 5901 13104 5901 13105 6690 13105 6541 13105 6543 13106 6549 13106 6542 13106 6543 13107 6545 13107 6549 13107 6543 13108 6544 13108 6545 13108 6545 13109 6544 13109 6546 13109 6547 13110 6546 13110 6596 13110 6547 13111 6545 13111 6546 13111 6547 13112 6548 13112 6545 13112 6545 13113 6548 13113 6549 13113 6549 13114 6548 13114 6693 13114 6550 13115 6693 13115 6692 13115 6551 13116 6692 13116 6690 13116 6689 13117 6690 13117 6552 13117 6553 13118 6552 13118 6539 13118 6686 13119 6539 13119 6554 13119 6555 13120 6554 13120 6556 13120 6685 13121 6556 13121 6682 13121 6683 13122 6682 13122 6536 13122 6681 13123 6536 13123 6533 13123 6679 13124 6533 13124 6532 13124 6557 13125 6532 13125 6530 13125 6676 13126 6530 13126 6677 13126 6675 13127 6677 13127 6528 13127 6672 13128 6528 13128 6524 13128 6673 13129 6524 13129 6671 13129 6558 13130 6671 13130 6559 13130 6668 13131 6559 13131 6522 13131 6667 13132 6522 13132 6560 13132 6561 13133 6560 13133 6562 13133 6563 13134 6562 13134 6518 13134 6663 13135 6518 13135 6515 13135 6662 13136 6515 13136 6514 13136 6660 13137 6514 13137 6510 13137 6659 13138 6510 13138 6509 13138 6656 13139 6509 13139 6513 13139 6564 13140 6513 13140 6653 13140 6651 13141 6653 13141 6565 13141 6652 13142 6565 13142 6566 13142 6650 13143 6566 13143 6567 13143 6648 13144 6567 13144 6568 13144 6647 13145 6568 13145 6569 13145 6645 13146 6569 13146 6507 13146 6644 13147 6507 13147 6571 13147 6570 13148 6571 13148 6573 13148 6572 13149 6573 13149 6574 13149 6641 13150 6574 13150 6575 13150 6639 13151 6575 13151 6503 13151 6640 13152 6503 13152 6502 13152 6637 13153 6502 13153 6501 13153 6576 13154 6501 13154 6498 13154 6577 13155 6498 13155 6579 13155 6578 13156 6579 13156 6636 13156 6635 13157 6636 13157 6494 13157 6633 13158 6494 13158 6580 13158 6581 13159 6580 13159 6492 13159 6582 13160 6492 13160 6583 13160 6631 13161 6583 13161 6491 13161 6630 13162 6491 13162 6489 13162 6584 13163 6489 13163 6484 13163 6628 13164 6484 13164 6487 13164 6627 13165 6487 13165 6585 13165 6626 13166 6585 13166 6482 13166 6624 13167 6482 13167 6586 13167 6623 13168 6586 13168 6622 13168 6587 13169 6622 13169 6588 13169 6619 13170 6588 13170 6478 13170 6617 13171 6478 13171 6477 13171 6589 13172 6477 13172 6474 13172 6616 13173 6474 13173 6614 13173 6615 13174 6614 13174 6473 13174 6590 13175 6473 13175 6612 13175 6611 13176 6612 13176 6470 13176 6591 13177 6470 13177 6609 13177 6592 13178 6609 13178 6608 13178 6607 13179 6608 13179 6463 13179 6606 13180 6463 13180 6462 13180 6604 13181 6462 13181 6467 13181 6593 13182 6467 13182 6594 13182 6601 13183 6594 13183 6595 13183 6598 13184 6595 13184 6596 13184 6546 13185 6598 13185 6596 13185 6546 13186 6597 13186 6598 13186 6546 13187 6544 13187 6597 13187 6597 13188 6599 13188 6598 13188 6598 13189 6599 13189 6601 13189 6595 13190 6598 13190 6601 13190 6599 13191 6600 13191 6601 13191 6601 13192 6600 13192 6593 13192 6594 13193 6601 13193 6593 13193 6600 13194 6602 13194 6593 13194 6593 13195 6602 13195 6604 13195 6467 13196 6593 13196 6604 13196 6602 13197 6603 13197 6604 13197 6604 13198 6603 13198 6606 13198 6462 13199 6604 13199 6606 13199 6603 13200 6605 13200 6606 13200 6606 13201 6605 13201 6607 13201 6463 13202 6606 13202 6607 13202 6605 13203 6714 13203 6607 13203 6607 13204 6714 13204 6592 13204 6608 13205 6607 13205 6592 13205 6714 13206 6721 13206 6592 13206 6592 13207 6721 13207 6591 13207 6609 13208 6592 13208 6591 13208 6721 13209 6610 13209 6591 13209 6591 13210 6610 13210 6611 13210 6470 13211 6591 13211 6611 13211 6610 13212 6613 13212 6611 13212 6611 13213 6613 13213 6590 13213 6612 13214 6611 13214 6590 13214 6613 13215 6723 13215 6590 13215 6590 13216 6723 13216 6615 13216 6473 13217 6590 13217 6615 13217 6723 13218 6724 13218 6615 13218 6615 13219 6724 13219 6616 13219 6614 13220 6615 13220 6616 13220 6724 13221 6725 13221 6616 13221 6616 13222 6725 13222 6589 13222 6474 13223 6616 13223 6589 13223 6725 13224 6718 13224 6589 13224 6589 13225 6718 13225 6617 13225 6477 13226 6589 13226 6617 13226 6718 13227 6618 13227 6617 13227 6617 13228 6618 13228 6619 13228 6478 13229 6617 13229 6619 13229 6618 13230 6620 13230 6619 13230 6619 13231 6620 13231 6587 13231 6588 13232 6619 13232 6587 13232 6620 13233 6621 13233 6587 13233 6587 13234 6621 13234 6623 13234 6622 13235 6587 13235 6623 13235 6621 13236 6726 13236 6623 13236 6623 13237 6726 13237 6624 13237 6586 13238 6623 13238 6624 13238 6726 13239 6719 13239 6624 13239 6624 13240 6719 13240 6626 13240 6482 13241 6624 13241 6626 13241 6719 13242 6625 13242 6626 13242 6626 13243 6625 13243 6627 13243 6585 13244 6626 13244 6627 13244 6625 13245 6717 13245 6627 13245 6627 13246 6717 13246 6628 13246 6487 13247 6627 13247 6628 13247 6717 13248 6629 13248 6628 13248 6628 13249 6629 13249 6584 13249 6484 13250 6628 13250 6584 13250 6629 13251 6716 13251 6584 13251 6584 13252 6716 13252 6630 13252 6489 13253 6584 13253 6630 13253 6716 13254 6722 13254 6630 13254 6630 13255 6722 13255 6631 13255 6491 13256 6630 13256 6631 13256 6722 13257 6715 13257 6631 13257 6631 13258 6715 13258 6582 13258 6583 13259 6631 13259 6582 13259 6715 13260 6713 13260 6582 13260 6582 13261 6713 13261 6581 13261 6492 13262 6582 13262 6581 13262 6713 13263 6632 13263 6581 13263 6581 13264 6632 13264 6633 13264 6580 13265 6581 13265 6633 13265 6632 13266 6634 13266 6633 13266 6633 13267 6634 13267 6635 13267 6494 13268 6633 13268 6635 13268 6634 13269 6712 13269 6635 13269 6635 13270 6712 13270 6578 13270 6636 13271 6635 13271 6578 13271 6712 13272 6711 13272 6578 13272 6578 13273 6711 13273 6577 13273 6579 13274 6578 13274 6577 13274 6711 13275 6710 13275 6577 13275 6577 13276 6710 13276 6576 13276 6498 13277 6577 13277 6576 13277 6710 13278 6709 13278 6576 13278 6576 13279 6709 13279 6637 13279 6501 13280 6576 13280 6637 13280 6709 13281 6720 13281 6637 13281 6637 13282 6720 13282 6640 13282 6502 13283 6637 13283 6640 13283 6720 13284 6638 13284 6640 13284 6640 13285 6638 13285 6639 13285 6503 13286 6640 13286 6639 13286 6638 13287 6694 13287 6639 13287 6639 13288 6694 13288 6641 13288 6575 13289 6639 13289 6641 13289 6694 13290 6696 13290 6641 13290 6641 13291 6696 13291 6572 13291 6574 13292 6641 13292 6572 13292 6696 13293 6642 13293 6572 13293 6572 13294 6642 13294 6570 13294 6573 13295 6572 13295 6570 13295 6642 13296 6703 13296 6570 13296 6570 13297 6703 13297 6644 13297 6571 13298 6570 13298 6644 13298 6703 13299 6643 13299 6644 13299 6644 13300 6643 13300 6645 13300 6507 13301 6644 13301 6645 13301 6643 13302 6646 13302 6645 13302 6645 13303 6646 13303 6647 13303 6569 13304 6645 13304 6647 13304 6646 13305 6704 13305 6647 13305 6647 13306 6704 13306 6648 13306 6568 13307 6647 13307 6648 13307 6704 13308 6705 13308 6648 13308 6648 13309 6705 13309 6650 13309 6567 13310 6648 13310 6650 13310 6705 13311 6649 13311 6650 13311 6650 13312 6649 13312 6652 13312 6566 13313 6650 13313 6652 13313 6649 13314 6699 13314 6652 13314 6652 13315 6699 13315 6651 13315 6565 13316 6652 13316 6651 13316 6699 13317 6654 13317 6651 13317 6651 13318 6654 13318 6564 13318 6653 13319 6651 13319 6564 13319 6654 13320 6655 13320 6564 13320 6564 13321 6655 13321 6656 13321 6513 13322 6564 13322 6656 13322 6655 13323 6657 13323 6656 13323 6656 13324 6657 13324 6659 13324 6509 13325 6656 13325 6659 13325 6657 13326 6658 13326 6659 13326 6659 13327 6658 13327 6660 13327 6510 13328 6659 13328 6660 13328 6658 13329 6708 13329 6660 13329 6660 13330 6708 13330 6662 13330 6514 13331 6660 13331 6662 13331 6708 13332 6661 13332 6662 13332 6662 13333 6661 13333 6663 13333 6515 13334 6662 13334 6663 13334 6661 13335 6664 13335 6663 13335 6663 13336 6664 13336 6563 13336 6518 13337 6663 13337 6563 13337 6664 13338 6665 13338 6563 13338 6563 13339 6665 13339 6561 13339 6562 13340 6563 13340 6561 13340 6665 13341 6666 13341 6561 13341 6561 13342 6666 13342 6667 13342 6560 13343 6561 13343 6667 13343 6666 13344 6669 13344 6667 13344 6667 13345 6669 13345 6668 13345 6522 13346 6667 13346 6668 13346 6669 13347 6702 13347 6668 13347 6668 13348 6702 13348 6558 13348 6559 13349 6668 13349 6558 13349 6702 13350 6670 13350 6558 13350 6558 13351 6670 13351 6673 13351 6671 13352 6558 13352 6673 13352 6670 13353 6707 13353 6673 13353 6673 13354 6707 13354 6672 13354 6524 13355 6673 13355 6672 13355 6707 13356 6701 13356 6672 13356 6672 13357 6701 13357 6675 13357 6528 13358 6672 13358 6675 13358 6701 13359 6674 13359 6675 13359 6675 13360 6674 13360 6676 13360 6677 13361 6675 13361 6676 13361 6674 13362 6678 13362 6676 13362 6676 13363 6678 13363 6557 13363 6530 13364 6676 13364 6557 13364 6678 13365 6700 13365 6557 13365 6557 13366 6700 13366 6679 13366 6532 13367 6557 13367 6679 13367 6700 13368 6706 13368 6679 13368 6679 13369 6706 13369 6681 13369 6533 13370 6679 13370 6681 13370 6706 13371 6680 13371 6681 13371 6681 13372 6680 13372 6683 13372 6536 13373 6681 13373 6683 13373 6680 13374 6684 13374 6683 13374 6683 13375 6684 13375 6685 13375 6682 13376 6683 13376 6685 13376 6684 13377 6698 13377 6685 13377 6685 13378 6698 13378 6555 13378 6556 13379 6685 13379 6555 13379 6698 13380 6697 13380 6555 13380 6555 13381 6697 13381 6686 13381 6554 13382 6555 13382 6686 13382 6697 13383 6687 13383 6686 13383 6686 13384 6687 13384 6553 13384 6539 13385 6686 13385 6553 13385 6687 13386 6688 13386 6553 13386 6553 13387 6688 13387 6689 13387 6552 13388 6553 13388 6689 13388 6688 13389 6695 13389 6689 13389 6689 13390 6695 13390 6551 13390 6690 13391 6689 13391 6551 13391 6695 13392 6691 13392 6551 13392 6551 13393 6691 13393 6550 13393 6692 13394 6551 13394 6550 13394 6691 13395 6542 13395 6550 13395 6550 13396 6542 13396 6549 13396 6693 13397 6550 13397 6549 13397 6691 13398 6694 13398 6542 13398 6691 13399 6696 13399 6694 13399 6691 13400 6695 13400 6696 13400 6696 13401 6695 13401 6642 13401 6642 13402 6695 13402 6688 13402 6703 13403 6688 13403 6687 13403 6643 13404 6687 13404 6697 13404 6646 13405 6697 13405 6698 13405 6704 13406 6698 13406 6684 13406 6705 13407 6684 13407 6680 13407 6649 13408 6680 13408 6706 13408 6699 13409 6706 13409 6700 13409 6654 13410 6700 13410 6678 13410 6655 13411 6678 13411 6674 13411 6657 13412 6674 13412 6701 13412 6658 13413 6701 13413 6707 13413 6708 13414 6707 13414 6670 13414 6661 13415 6670 13415 6702 13415 6664 13416 6702 13416 6669 13416 6665 13417 6669 13417 6666 13417 6665 13418 6664 13418 6669 13418 6642 13419 6688 13419 6703 13419 6703 13420 6687 13420 6643 13420 6643 13421 6697 13421 6646 13421 6646 13422 6698 13422 6704 13422 6704 13423 6684 13423 6705 13423 6705 13424 6680 13424 6649 13424 6649 13425 6706 13425 6699 13425 6699 13426 6700 13426 6654 13426 6654 13427 6678 13427 6655 13427 6655 13428 6674 13428 6657 13428 6657 13429 6701 13429 6658 13429 6658 13430 6707 13430 6708 13430 6708 13431 6670 13431 6661 13431 6661 13432 6702 13432 6664 13432 6694 13433 6638 13433 6542 13433 6542 13434 6638 13434 6543 13434 6543 13435 6638 13435 6720 13435 6544 13436 6720 13436 6709 13436 6597 13437 6709 13437 6710 13437 6599 13438 6710 13438 6711 13438 6600 13439 6711 13439 6712 13439 6602 13440 6712 13440 6634 13440 6603 13441 6634 13441 6632 13441 6605 13442 6632 13442 6713 13442 6714 13443 6713 13443 6715 13443 6721 13444 6715 13444 6722 13444 6610 13445 6722 13445 6716 13445 6613 13446 6716 13446 6629 13446 6723 13447 6629 13447 6717 13447 6724 13448 6717 13448 6625 13448 6725 13449 6625 13449 6719 13449 6718 13450 6719 13450 6726 13450 6618 13451 6726 13451 6621 13451 6620 13452 6618 13452 6621 13452 6543 13453 6720 13453 6544 13453 6544 13454 6709 13454 6597 13454 6597 13455 6710 13455 6599 13455 6599 13456 6711 13456 6600 13456 6600 13457 6712 13457 6602 13457 6602 13458 6634 13458 6603 13458 6603 13459 6632 13459 6605 13459 6605 13460 6713 13460 6714 13460 6714 13461 6715 13461 6721 13461 6721 13462 6722 13462 6610 13462 6610 13463 6716 13463 6613 13463 6613 13464 6629 13464 6723 13464 6723 13465 6717 13465 6724 13465 6724 13466 6625 13466 6725 13466 6725 13467 6719 13467 6718 13467 6718 13468 6726 13468 6618 13468 5952 13469 5953 13469 6727 13469 6727 13470 5953 13470 6728 13470 6727 13471 6728 13471 5899 13471 5899 13472 6728 13472 5792 13472 6729 13473 5792 13473 6752 13473 6729 13474 5899 13474 5792 13474 6403 13475 6731 13475 6732 13475 6399 13476 6732 13476 6730 13476 6399 13477 6403 13477 6732 13477 6731 13478 6405 13478 6732 13478 6732 13479 6405 13479 6733 13479 6734 13480 6732 13480 6733 13480 6734 13481 6302 13481 6732 13481 6732 13482 6302 13482 6307 13482 6306 13483 6732 13483 6307 13483 6306 13484 6735 13484 6732 13484 6732 13485 6735 13485 6316 13485 6736 13486 6732 13486 6316 13486 6736 13487 6752 13487 6732 13487 6736 13488 6317 13488 6752 13488 6752 13489 6317 13489 6322 13489 6729 13490 6322 13490 6323 13490 6324 13491 6729 13491 6323 13491 6324 13492 6737 13492 6729 13492 6729 13493 6737 13493 6744 13493 6744 13494 6737 13494 6329 13494 6738 13495 6744 13495 6329 13495 6738 13496 6739 13496 6744 13496 6744 13497 6739 13497 6336 13497 6740 13498 6744 13498 6336 13498 6740 13499 6741 13499 6744 13499 6744 13500 6741 13500 6343 13500 6742 13501 6744 13501 6343 13501 6742 13502 6348 13502 6744 13502 6744 13503 6348 13503 6353 13503 6355 13504 6744 13504 6353 13504 6355 13505 6359 13505 6744 13505 6744 13506 6359 13506 6361 13506 6743 13507 6744 13507 6361 13507 6743 13508 6367 13508 6744 13508 6744 13509 6367 13509 5830 13509 5830 13510 6367 13510 6745 13510 6746 13511 5830 13511 6745 13511 6746 13512 6747 13512 5830 13512 5830 13513 6747 13513 6748 13513 6374 13514 5830 13514 6748 13514 6374 13515 6750 13515 5830 13515 6374 13516 6749 13516 6750 13516 6750 13517 6749 13517 6380 13517 6381 13518 6750 13518 6380 13518 6381 13519 6751 13519 6750 13519 6750 13520 6751 13520 6388 13520 6732 13521 6388 13521 6389 13521 6394 13522 6732 13522 6389 13522 6394 13523 6730 13523 6732 13523 6752 13524 6322 13524 6729 13524 6750 13525 6388 13525 6732 13525

-
-
-
-
- - - - - - - - - - - - - - -
+ + + + + Blender User + Blender 2.80.75 commit date:2019-07-29, commit time:14:47, hash:f6cb5f54494e + + 2019-10-18T18:38:43 + 2019-10-18T18:38:43 + + Z_UP + + + + + + + + -20.25 28.49997 8.996071 -20.25 1.499969 9.996071 -20.25 36.99997 9.996071 -20.25 1.499969 -10.00393 -20.25 28.49997 -9.00393 -20.25 36.99997 -10.00393 -20.25 30.99997 -9.00393 -20.25 30.99997 8.996071 20.25 36.99997 -10.00393 20.25 30.99997 -9.00393 20.25 28.49997 -9.00393 20.25 30.99997 8.996071 20.25 36.99997 9.996071 20.25 28.49997 8.996071 20.25 1.499969 9.996071 20.25 1.499969 -10.00393 -5.280807 39.99997 2.78357 -4.566337 39.99997 3.19607 -4.723862 39.99997 1.438987 -3.926974 39.99997 1.652512 -4.533899 39.99997 -0.003929555 -3.708899 39.99997 -0.003929555 -4.723862 39.99997 -1.446846 -3.926974 39.99997 -1.660371 -5.280807 39.99997 -2.791429 -4.566337 39.99997 -3.203929 -6.166779 39.99997 -3.94605 -5.583415 39.99997 -4.529413 -7.321399 39.99997 -4.832021 -6.908899 39.99997 -5.546493 -8.665983 39.99997 -5.388966 -8.452458 39.99997 -6.185855 -10.1089 39.99997 -5.578929 -10.1089 39.99997 -6.403929 -11.55181 39.99997 -5.388966 -11.76534 39.99997 -6.185855 -12.8964 39.99997 -4.832021 -13.3089 39.99997 -5.546493 -14.05102 39.99997 -3.94605 -14.63438 39.99997 -4.529413 -14.93699 39.99997 -2.791429 -15.65146 39.99997 -3.203929 -15.49393 39.99997 -1.446846 -16.29083 39.99997 -1.660371 -15.6839 39.99997 -0.003929555 -16.5089 39.99997 -0.003929555 -15.49393 39.99997 1.438987 -16.29083 39.99997 1.652512 -14.93699 39.99997 2.78357 -15.65146 39.99997 3.19607 -14.05102 39.99997 3.938191 -14.63438 39.99997 4.521554 -12.8964 39.99997 4.824162 -13.3089 39.99997 5.538633 -11.55181 39.99997 5.381107 -11.76534 39.99997 6.177996 -10.1089 39.99997 5.57107 -10.1089 39.99997 6.39607 -8.665983 39.99997 5.381107 -8.452458 39.99997 6.177996 -7.321399 39.99997 4.824162 -6.908899 39.99997 5.538633 -6.166779 39.99997 3.938191 -5.583415 39.99997 4.521554 -5.75 38.99997 -9.00393 -11.76534 38.99997 -6.185855 -10.1089 38.99997 -6.403929 15.25651 38.99997 3.840555 16.01656 38.99997 2.679352 -4.566337 38.99997 3.19607 16.4895 38.99997 1.374588 16.65 38.99997 -0.003929555 16.4895 38.99997 -1.382447 16.01656 38.99997 -2.687211 -8.452458 38.99997 -6.185855 -6.908899 38.99997 -5.546493 14.25 38.99997 -9.00393 15.25651 38.99997 -3.848414 14.25 38.99997 -4.80393 -3.926974 38.99997 1.652512 -3.708899 38.99997 -0.003929555 -13.3089 38.99997 -5.546493 -16.33605 38.99997 -7.828414 -14.63438 38.99997 -4.529413 -17.92258 38.99997 -6.244632 -15.65146 38.99997 -3.203929 -19.11644 38.99997 -4.347227 -16.29083 38.99997 -1.660371 -19.85763 38.99997 -2.231552 -16.5089 38.99997 -0.003929555 -20.1089 38.99997 -0.003929555 -16.29083 38.99997 1.652512 -19.85763 38.99997 2.223693 -15.65146 38.99997 3.19607 -19.11644 38.99997 4.339368 -14.63438 38.99997 4.521554 -7.864387 38.99997 -9.748784 -10.09157 38.99997 -10.00391 -12.31962 38.99997 -9.756503 -14.43658 38.99997 -9.018981 -5.583415 38.99997 -4.529413 -4.566337 38.99997 -3.203929 -3.926974 38.99997 -1.660371 -17.92258 38.99997 6.236773 -16.33605 38.99997 7.820555 -13.3089 38.99997 5.538633 -14.43658 38.99997 9.011123 -11.76534 38.99997 6.177996 -12.31962 38.99997 9.748644 -10.1089 38.99997 6.39607 -10.09157 38.99997 9.996056 -8.452458 38.99997 6.177996 -7.864387 38.99997 9.740925 -6.908899 38.99997 5.538633 -5.75 38.99997 8.996071 -5.583415 38.99997 4.521554 14.25 38.99997 8.996071 14.25 38.99997 4.796071 -7.864387 36.99997 -9.748784 -10.09157 36.99997 -10.00391 -10.1089 36.99997 -10.00393 -5.75 36.99997 -9.00393 14.25 36.99997 -9.00393 14.25 36.99997 8.996071 -5.75 36.99997 8.996071 -7.864387 36.99997 9.740925 15.25651 36.99997 -3.848414 16.01656 36.99997 -2.687211 -10.09157 36.99997 9.996056 -10.1089 36.99997 9.996071 14.25 36.99997 -4.80393 16.4895 36.99997 1.374588 16.65 36.99997 -0.003929555 16.4895 36.99997 -1.382447 14.25 36.99997 4.796071 15.25651 36.99997 3.840555 16.01656 36.99997 2.679352 -17.92258 36.99997 -6.244632 -16.33605 36.99997 -7.828414 -20.1089 36.99997 -0.003929555 -16.33605 36.99997 7.820555 -17.92258 36.99997 6.236773 -19.85763 36.99997 -2.231552 -19.11644 36.99997 -4.347227 -14.43658 36.99997 -9.018981 -12.31962 36.99997 -9.756503 -12.31962 36.99997 9.748644 -14.43658 36.99997 9.011123 -19.11644 36.99997 4.339368 -19.85763 36.99997 2.223693 -8.665983 40.49997 -5.388966 -10.1089 40.49997 -5.578929 -11.55181 40.49997 -5.388966 -12.8964 40.49997 -4.832021 -14.05102 40.49997 -3.94605 -14.93699 40.49997 -2.791429 -15.49393 40.49997 -1.446846 -15.6839 40.49997 -0.003929555 -15.49393 40.49997 1.438987 -14.93699 40.49997 2.78357 -14.05102 40.49997 3.938191 -12.8964 40.49997 4.824162 -11.55181 40.49997 5.381107 -10.1089 40.49997 5.57107 -8.665983 40.49997 5.381107 -7.321399 40.49997 4.824162 -6.166779 40.49997 3.938191 -5.280807 40.49997 2.78357 -4.723862 40.49997 1.438987 -4.533899 40.49997 -0.003929555 -4.723862 40.49997 -1.446846 -5.280807 40.49997 -2.791429 -6.166779 40.49997 -3.94605 -7.321399 40.49997 -4.832021 27.25 28.49997 -9.00393 27.25 30.99997 -9.00393 27.25 28.49997 -6.25393 27.25 30.99997 -6.25393 26.17705 28.49997 -6.25393 26.17705 30.99997 -6.25393 25.74655 30.99997 -6.627058 25.74655 28.49997 -6.627058 25.23613 30.99997 -6.880102 25.23613 28.49997 -6.880102 24.67852 30.99997 -6.996836 24.67852 28.49997 -6.996836 24.10947 30.99997 -6.969778 24.10947 28.49997 -6.969778 23.56545 30.99997 -6.800662 23.56545 28.49997 -6.800662 23.08135 30.99997 -6.500331 23.08135 28.49997 -6.500331 22.68819 30.99997 -6.088038 22.68819 28.49997 -6.088038 22.41119 30.99997 -5.590215 22.41119 28.49997 -5.590215 22.2681 30.99997 -5.038779 22.2681 28.49997 -5.038779 22.2681 30.99997 -4.46908 22.2681 28.49997 -4.46908 22.41119 30.99997 -3.917644 22.41119 28.49997 -3.917644 22.68819 30.99997 -3.419821 22.68819 28.49997 -3.419821 23.08135 30.99997 -3.007528 23.08135 28.49997 -3.007528 23.56545 30.99997 -2.707197 23.56545 28.49997 -2.707197 24.10947 30.99997 -2.538081 24.10947 28.49997 -2.538081 24.67852 30.99997 -2.511023 24.67852 28.49997 -2.511023 25.23613 30.99997 -2.627758 25.23613 28.49997 -2.627758 25.74655 30.99997 -2.880801 25.74655 28.49997 -2.880801 26.17705 30.99997 -3.25393 26.17705 28.49997 -3.25393 27.25 28.49997 -3.25393 27.25 30.99997 -3.25393 27.25 28.49997 3.24607 27.25 30.99997 3.24607 22.68819 28.49997 3.411962 22.41119 28.49997 3.909785 22.2681 28.49997 4.461221 22.2681 28.49997 5.03092 26.17705 28.49997 6.24607 27.25 28.49997 6.24607 25.74655 28.49997 6.619199 27.25 28.49997 8.996071 25.23613 28.49997 6.872242 22.41119 28.49997 5.582356 22.68819 28.49997 6.080179 23.08135 28.49997 6.492472 23.56545 28.49997 6.792803 24.10947 28.49997 6.961919 24.67852 28.49997 6.988977 23.08135 28.49997 2.999669 26.17705 28.49997 3.24607 25.74655 28.49997 2.872941 25.23613 28.49997 2.619898 24.67852 28.49997 2.503164 24.10947 28.49997 2.530222 23.56545 28.49997 2.699337 27.25 30.99997 6.24607 26.17705 30.99997 6.24607 27.25 30.99997 8.996071 25.74655 30.99997 6.619199 25.23613 30.99997 6.872242 24.67852 30.99997 6.988977 24.10947 30.99997 6.961919 23.56545 30.99997 6.792803 23.08135 30.99997 6.492472 22.68819 30.99997 6.080179 22.41119 30.99997 5.582356 22.2681 30.99997 5.03092 22.2681 30.99997 4.461221 22.41119 30.99997 3.909785 22.68819 30.99997 3.411962 26.17705 30.99997 3.24607 25.74655 30.99997 2.872941 25.23613 30.99997 2.619898 24.67852 30.99997 2.503164 23.08135 30.99997 2.999669 23.56545 30.99997 2.699337 24.10947 30.99997 2.530222 -27.25 28.49997 -3.25393 -27.25 28.49997 3.24607 -27.25 30.99997 -3.25393 -27.25 30.99997 3.24607 -26.17705 28.49997 -3.25393 -26.17705 30.99997 -3.25393 -25.74655 30.99997 -2.880801 -25.74655 28.49997 -2.880801 -25.23613 30.99997 -2.627758 -25.23613 28.49997 -2.627758 -24.67852 30.99997 -2.511023 -24.67852 28.49997 -2.511023 -24.10947 30.99997 -2.538081 -24.10947 28.49997 -2.538081 -23.56545 30.99997 -2.707197 -23.56545 28.49997 -2.707197 -23.08135 30.99997 -3.007528 -23.08135 28.49997 -3.007528 -22.68819 30.99997 -3.419821 -22.68819 28.49997 -3.419821 -22.41119 30.99997 -3.917644 -22.41119 28.49997 -3.917644 -22.2681 30.99997 -4.46908 -22.2681 28.49997 -4.46908 -22.2681 30.99997 -5.038779 -22.2681 28.49997 -5.038779 -22.41119 30.99997 -5.590215 -22.41119 28.49997 -5.590215 -22.68819 30.99997 -6.088038 -22.68819 28.49997 -6.088038 -23.08135 30.99997 -6.500331 -23.08135 28.49997 -6.500331 -23.56545 30.99997 -6.800662 -23.56545 28.49997 -6.800662 -24.10947 30.99997 -6.969778 -24.10947 28.49997 -6.969778 -24.67852 30.99997 -6.996836 -24.67852 28.49997 -6.996836 -25.23613 30.99997 -6.880102 -25.23613 28.49997 -6.880102 -25.74655 30.99997 -6.627058 -25.74655 28.49997 -6.627058 -26.17705 30.99997 -6.25393 -26.17705 28.49997 -6.25393 -27.25 28.49997 -6.25393 -27.25 30.99997 -6.25393 -27.25 28.49997 -9.00393 -27.25 30.99997 -9.00393 -23.56545 28.49997 6.792803 -23.08135 28.49997 6.492472 -22.68819 28.49997 6.080179 -22.41119 28.49997 5.582356 -27.25 28.49997 6.24607 -26.17705 28.49997 6.24607 -27.25 28.49997 8.996071 -25.74655 28.49997 6.619199 -22.2681 28.49997 5.03092 -22.2681 28.49997 4.461221 -22.41119 28.49997 3.909785 -22.68819 28.49997 3.411962 -25.23613 28.49997 6.872242 -24.67852 28.49997 6.988977 -24.10947 28.49997 6.961919 -23.08135 28.49997 2.999669 -23.56545 28.49997 2.699337 -24.10947 28.49997 2.530222 -26.17705 28.49997 3.24607 -25.74655 28.49997 2.872941 -25.23613 28.49997 2.619898 -24.67852 28.49997 2.503164 -22.41119 30.99997 5.582356 -22.68819 30.99997 6.080179 -23.08135 30.99997 6.492472 -26.17705 30.99997 3.24607 -25.74655 30.99997 2.872941 -22.68819 30.99997 3.411962 -22.41119 30.99997 3.909785 -22.2681 30.99997 4.461221 -22.2681 30.99997 5.03092 -26.17705 30.99997 6.24607 -27.25 30.99997 6.24607 -25.74655 30.99997 6.619199 -27.25 30.99997 8.996071 -25.23613 30.99997 6.872242 -23.56545 30.99997 6.792803 -24.10947 30.99997 6.961919 -24.67852 30.99997 6.988977 -24.10947 30.99997 2.530222 -23.56545 30.99997 2.699337 -23.08135 30.99997 2.999669 -25.23613 30.99997 2.619898 -24.67852 30.99997 2.503164 -8.998908 40.49997 0.2967669 -7.417251 40.49997 1.203353 -9.10856 40.49997 0.5633637 -7.63925 40.49997 1.609556 -9.281068 40.49997 0.7943154 -11.18365 40.49997 0.4052385 -11.04813 40.49997 0.6596624 -12.66772 40.49997 1.464037 -10.05187 40.49997 1.144656 -10.42533 40.49997 2.929051 -10.3393 40.49997 1.122753 -10.88025 40.49997 2.84344 -10.61226 40.49997 1.030056 -11.31618 40.49997 2.687718 -10.73788 40.49997 0.9588149 -11.72238 40.49997 2.465719 -10.85359 40.49997 0.8723905 -12.08886 40.49997 2.182909 -12.40658 40.49997 1.846252 -9.768023 40.49997 1.094389 -9.5035 40.49997 2.883282 -9.962614 40.49997 2.942441 -7.92206 40.49997 1.976029 -8.258718 40.49997 2.293749 -9.505591 40.49997 0.9751104 -8.640933 40.49997 2.554893 -9.059294 40.49997 2.753031 -11.24783 40.49997 -0.1631363 -12.95627 40.49997 -0.7752817 -11.17245 40.49997 -0.441374 -12.80055 40.49997 -1.211212 -11.03025 40.49997 -0.6921254 -12.86586 40.49997 1.045676 -12.99611 40.49997 0.6014695 -11.25164 40.49997 0.125105 -13.05527 40.49997 0.1423565 -13.04188 40.49997 -0.3203583 -8.959 40.49997 0.01127636 -7.175919 40.49997 0.3124991 -7.261529 40.49997 0.7674225 -9.479913 40.49997 -0.9666741 -8.128941 40.49997 -2.190768 -9.26025 40.49997 -0.7800053 -7.81122 40.49997 -1.854111 -9.09391 40.49997 -0.5445729 -7.550076 40.49997 -1.471896 -7.351939 40.49997 -1.053535 -8.991345 40.49997 -0.2751697 -7.221687 40.49997 -0.6093287 -7.162528 40.49997 -0.1502157 -10.30943 40.49997 -1.136311 -10.25518 40.49997 -2.9503 -10.02152 40.49997 -1.150605 -9.79247 40.49997 -2.93691 -9.739099 40.49997 -1.09285 -10.83016 40.49997 -0.899635 -11.57686 40.49997 -2.562753 -10.58474 40.49997 -1.050864 -11.1585 40.49997 -2.76089 -10.7143 40.49997 -2.891141 -12.57855 40.49997 -1.617415 -12.29574 40.49997 -1.983888 -11.95908 40.49997 -2.301609 -9.337548 40.49997 -2.8513 -8.901618 40.49997 -2.695578 -8.495414 40.49997 -2.473578 -9.479913 44.49997 -0.9666741 -9.26025 44.49997 -0.7800053 -8.128941 44.49997 -2.190768 -10.02152 44.49997 -1.150605 -10.25518 44.49997 -2.9503 -10.30943 44.49997 -1.136311 -10.7143 44.49997 -2.891141 -10.58474 44.49997 -1.050864 -10.83016 44.49997 -0.899635 -12.29574 44.49997 -1.983888 -11.03025 44.49997 -0.6921254 -12.57855 44.49997 -1.617415 -11.17245 44.49997 -0.441374 -8.495414 44.49997 -2.473578 -8.901618 44.49997 -2.695578 -9.739099 44.49997 -1.09285 -9.337548 44.49997 -2.8513 -9.79247 44.49997 -2.93691 -8.998908 44.49997 0.2967669 -7.175919 44.49997 0.3124991 -8.959 44.49997 0.01127636 -7.162528 44.49997 -0.1502157 -8.991345 44.49997 -0.2751697 -11.1585 44.49997 -2.76089 -11.57686 44.49997 -2.562753 -11.95908 44.49997 -2.301609 -9.10856 44.49997 0.5633637 -7.417251 44.49997 1.203353 -7.261529 44.49997 0.7674225 -7.221687 44.49997 -0.6093287 -7.351939 44.49997 -1.053535 -9.09391 44.49997 -0.5445729 -7.550076 44.49997 -1.471896 -7.81122 44.49997 -1.854111 -9.505591 44.49997 0.9751104 -8.258718 44.49997 2.293749 -9.281068 44.49997 0.7943154 -7.92206 44.49997 1.976029 -7.63925 44.49997 1.609556 -10.05187 44.49997 1.144656 -9.5035 44.49997 2.883282 -9.768023 44.49997 1.094389 -9.059294 44.49997 2.753031 -8.640933 44.49997 2.554893 -11.18365 44.49997 0.4052385 -11.25164 44.49997 0.125105 -12.99611 44.49997 0.6014695 -10.3393 44.49997 1.122753 -10.42533 44.49997 2.929051 -9.962614 44.49997 2.942441 -12.80055 44.49997 -1.211212 -12.95627 44.49997 -0.7752817 -11.24783 44.49997 -0.1631363 -13.04188 44.49997 -0.3203583 -13.05527 44.49997 0.1423565 -12.86586 44.49997 1.045676 -12.66772 44.49997 1.464037 -11.04813 44.49997 0.6596624 -12.40658 44.49997 1.846252 -10.85359 44.49997 0.8723905 -12.08886 44.49997 2.182909 -10.73788 44.49997 0.9588149 -11.72238 44.49997 2.465719 -10.61226 44.49997 1.030056 -11.31618 44.49997 2.687718 -10.88025 44.49997 2.84344 + + + + + + + + + + -1 0 0 1 0 0 0 1 -1.60227e-6 0 1 -1.39573e-6 0 1 0 0 1 5.5829e-6 0 1 5.5829e-6 0 1 -3.20453e-6 0 1 -2.79145e-6 0 1 1.60227e-6 0 1 1.39572e-6 0 1 -5.5829e-6 0 1 3.20453e-6 0 1 2.79145e-6 0 1 4.54647e-6 0 1 4.58713e-6 0 1 -3.08901e-6 0 1 2.51062e-6 0 1 4.68935e-6 0 1 -5.10131e-6 0 1 -4.25316e-6 0 1 -2.50992e-6 0 1 -1.2113e-4 0 1 4.71655e-6 0 1 9.3237e-7 0 1 -1.92213e-6 0 1 -4.67648e-6 0 1 -3.95821e-6 0 1 4.05993e-6 0 1 2.16782e-6 0 1 1.92213e-6 0 1 -1.91508e-6 0 1 -9.43609e-7 0 1 -1.29849e-6 0 1 -9.33422e-7 0 1 6.62105e-7 0 1 5.5415e-7 0 1 3.95821e-6 0 1 -6.62081e-6 0 1 -3.07284e-6 0 0.9906553 0.1363893 0 1 -4.59196e-6 0 1 -7.9058e-7 0 1 -6.74676e-7 0 1 2.89391e-7 0 1 -3.471e-6 0 1 4.78886e-6 0 1 1.7355e-6 0 0 1 0 0 -1 0 -1 0 0.3322646 0 -0.9431862 0.1138089 0 -0.9935027 0.1138086 0 -0.9935027 8.80572e-4 0 -0.9999997 -0.1103652 9.63883e-4 -0.9938907 -0.1112264 0 -0.9937951 -0.3289936 0 -0.9443323 -0.3289929 0 -0.9443324 -0.5310891 0 -0.847316 -0.706494 0 -0.7077192 -0.7064931 0 -0.70772 -0.8463951 0 -0.5325555 -0.8463944 0 -0.5325567 -0.9437611 0 -0.3306286 -0.9936984 0 -0.1120876 -0.9936984 0 0.1120876 -0.9437611 0 0.3306286 -0.8463951 0 0.5325555 -0.8463944 0 0.5325567 -0.706494 0 0.7077192 -0.7064931 0 0.70772 -0.5310891 0 0.847316 -0.5310897 0 0.8473156 -0.3289929 0 0.9443324 -0.3289936 0 0.9443323 -0.1112264 0 0.9937951 -0.1103654 9.6384e-4 0.9938907 8.80572e-4 0 0.9999997 0.1138089 0 0.9935027 0.1138086 0 0.9935027 0.3322646 0 0.9431862 0.6884956 0 0.7252405 0.6884966 0 0.7252396 0.8367038 0 0.5476558 0.8367037 0 0.5476559 0.9401468 0 0.34077 0.9401467 0 0.34077 0.9932897 0 0.1156533 0.9932897 0 -0.1156533 0.9401467 0 -0.34077 0.9401468 0 -0.34077 0.8367037 0 -0.5476559 0.8367038 0 -0.5476558 0.6884952 0 -0.725241 0.688497 0 -0.7252393 0.1305263 0 -0.9914448 -0.1305252 0 -0.991445 -0.1305269 0 -0.9914448 -0.3826836 0 -0.9238795 -0.3826821 0 -0.9238802 -0.608761 0 -0.7933537 -0.6087632 0 -0.793352 -0.7933548 0 -0.6087596 -0.7933526 0 -0.6087625 -0.9238796 0 -0.3826832 -0.9238789 0 -0.3826853 -0.991445 0 -0.1305263 -0.991445 0 0.1305263 -0.9238796 0 0.3826833 -0.9238789 0 0.3826852 -0.7933548 0 0.6087596 -0.7933526 0 0.6087625 -0.608761 0 0.7933537 -0.6087632 0 0.793352 -0.3826836 0 0.9238795 -0.3826821 0 0.9238802 -0.1305257 0 0.991445 -0.1305263 0 0.9914448 0.1305252 0 0.991445 0.1305269 0 0.9914448 0.3826836 0 0.9238795 0.3826821 0 0.9238802 0.608761 0 0.7933537 0.6087632 0 0.793352 0.7933548 0 0.6087596 0.7933526 0 0.6087625 0.9238796 0 0.3826832 0.9238796 0 0.3826833 0.991445 0 0.1305263 0.991445 0 -0.1305263 0.9238796 0 -0.3826833 0.9238796 0 -0.3826832 0.7933548 0 -0.6087596 0.7933526 0 -0.6087625 0.608761 0 -0.7933537 0.6087632 0 -0.793352 0.3826836 0 -0.9238795 0.3826821 0 -0.9238802 0.1305257 0 -0.991445 0.130526 0 -0.991445 -0.130526 0 -0.991445 -0.3826821 0 -0.9238801 -0.3826858 0 -0.9238786 -0.6087623 0 -0.7933527 -0.6087625 0 -0.7933525 -0.7933536 0 -0.6087613 -0.7933534 0 -0.6087614 -0.9238794 0 -0.3826841 -0.9238794 0 -0.3826841 -0.9914449 0 -0.1305267 -0.9914449 0 0.1305267 -0.9238794 0 0.3826841 -0.9238794 0 0.3826841 -0.7933534 0 0.6087614 -0.7933536 0 0.6087613 -0.6087625 0 0.7933525 -0.6087623 0 0.7933527 -0.3826821 0 0.9238801 -0.3826858 0 0.9238786 -0.130526 0 0.991445 0.130526 0 0.991445 0.3826821 0 0.9238801 0.3826858 0 0.9238786 0.6087623 0 0.7933527 0.6087625 0 0.7933525 0.7933536 0 0.6087613 0.7933534 0 0.6087614 0.9238794 0 0.3826841 0.9238794 0 0.3826841 0.991445 0 0.1305254 0.9914449 0 0.1305267 0.991445 0 -0.1305254 0.9914449 0 -0.1305267 0.9238794 0 -0.3826841 0.9238794 0 -0.3826841 0.7933534 0 -0.6087614 0.7933536 0 -0.6087613 0.6087625 0 -0.7933525 0.6087623 0 -0.7933527 0.3826821 0 -0.9238801 0.3826858 0 -0.9238786 0 1 1.89685e-6 0 1 -1.47283e-6 0 1 4.21477e-6 0 1 -4.3337e-6 0 1 5.47438e-6 0 1 -6.70473e-6 0 1 4.36888e-6 0 1 -1.02645e-6 0 1 -9.82863e-6 0 1 9.49374e-6 0 1 -6.17853e-6 0 1 9.82864e-6 -0.6549591 0 0.7556644 -0.6549599 0 0.7556638 -0.44417 0 0.8959426 -0.4441726 0 0.8959412 -0.2049052 0 0.9787818 -0.2049049 0 0.9787819 0.04749614 0 0.9988715 0.04749345 0 0.9988716 0.2968521 0 0.9549235 0.2968513 0 0.9549238 0.5271762 0 0.8497561 0.7237042 0 0.6901104 0.7237055 0 0.690109 0.873835 0 0.4862228 0.9679448 0 0.2511634 0.9679446 0 -0.2511637 0.9679448 0 -0.251163 0.873835 0 -0.4862228 0.7237042 0 -0.6901104 0.7237049 0 -0.6901097 0.5271767 0 -0.8497558 0.2968515 0 -0.9549238 0.2968501 0 -0.9549241 0.04749482 0 -0.9988715 0.04749548 0 -0.9988715 -0.2049046 0 -0.978782 -0.2049056 0 -0.9787818 -0.4441705 0 -0.8959423 -0.444171 0 -0.895942 -0.6549599 0 -0.7556638 -0.6549591 0 -0.7556644 0 -1 -7.09167e-6 0 -1 2.83062e-6 0 -1 -2.16829e-6 0 -1 4.54075e-6 0 -1 -8.17453e-7 0 -1 1.63967e-6 0 -1 2.51009e-6 0 -1 -4.54075e-6 0 -1 -3.77413e-6 0 -1 6.20522e-6 0 -1 1.34381e-6 0 1 -4.78741e-6 0 1 -2.41825e-5 0 1 7.39877e-6 0 1 -9.43532e-7 0 1 -4.54075e-6 0 1 -4.08726e-7 0 1 -2.1286e-6 0 1 8.8646e-7 0 1 -1.90569e-5 0 1 4.78739e-6 0 1 -7.39875e-6 0 1 5.80286e-7 0 1 -1.21998e-6 0 1 1.53956e-6 0.6549591 0 -0.7556644 0.6549599 0 -0.7556638 0.4441705 0 -0.8959423 0.444171 0 -0.895942 0.2049059 0 -0.9787818 0.2049043 0 -0.9787821 -0.04749548 0 -0.9988715 -0.04749482 0 -0.9988715 -0.2968509 0 -0.9549239 -0.2968508 0 -0.954924 -0.5271767 0 -0.8497558 -0.7237049 0 -0.6901097 -0.7237042 0 -0.6901104 -0.873835 0 -0.4862228 -0.9679448 0 -0.251163 -0.9679446 0 -0.2511637 -0.9679448 0 0.2511634 -0.873835 0 0.4862228 -0.7237055 0 0.690109 -0.7237042 0 0.6901104 -0.5271762 0 0.8497561 -0.2968521 0 0.9549235 -0.2968513 0 0.9549238 -0.04749345 0 0.9988716 -0.04749614 0 0.9988715 0.2049052 0 0.9787818 0.2049049 0 0.9787819 0.4441721 0 0.8959415 0.4441705 0 0.8959423 0.6549599 0 0.7556638 0.6549591 0 0.7556644 0 -1 7.09167e-6 0 -1 -2.8306e-6 0 -1 1.22975e-6 0 -1 2.16828e-6 0 -1 5.98573e-7 0 -1 -8.04081e-7 0 -1 1.0643e-6 0 -1 -5.44969e-7 0 -1 2.83796e-6 0 -1 -1.34381e-6 0 1 -2.83796e-6 0 1 1.90569e-5 0 1 9.4354e-7 0 1 2.41825e-5 0 1 4.54075e-6 0 1 -1.53956e-6 0 1 1.21998e-6 -0.4441711 0 0.8959421 -0.4441716 0 0.8959417 -0.2049043 0 0.9787821 0.04749548 0 0.9988715 0.04749482 0 0.9988715 0.2968509 0 0.9549239 0.2968501 0 0.9549241 0.5271772 0 0.8497555 0.5271767 0 0.8497558 0.7237045 0 0.69011 0.7237049 0 0.6901097 0.9679448 0 -0.2511634 0.8738353 0 -0.4862222 0.7237036 0 -0.6901111 0.5271781 0 -0.8497548 0.5271743 0 -0.8497573 0.2968521 0 -0.9549235 0.2968513 0 -0.9549238 0.04749614 0 -0.9988715 -0.2049052 0 -0.9787818 -0.2049049 0 -0.9787819 -0.4441721 0 -0.8959415 -0.4441705 0 -0.8959423 0.44417 0 -0.8959426 0.4441726 0 -0.8959412 0.2049052 0 -0.9787818 0.2049049 0 -0.9787819 -0.04749614 0 -0.9988715 -0.2968521 0 -0.9549235 -0.2968513 0 -0.9549238 -0.5271743 0 -0.8497573 -0.5271781 0 -0.8497548 -0.7237036 0 -0.6901111 -0.8738353 0 -0.4862222 -0.9679448 0 -0.2511634 -0.7237049 0 0.6901097 -0.7237045 0 0.69011 -0.5271767 0 0.8497558 -0.5271772 0 0.8497555 -0.2968509 0 0.9549239 -0.2968501 0 0.9549241 -0.04749482 0 0.9988715 -0.04749548 0 0.9988715 0.2049046 0 0.978782 0.4441711 0 0.8959421 0.4441716 0 0.8959417 0 -1 -2.92833e-5 0 -1 9.15101e-7 0 -1 -5.75133e-7 0 -1 2.29182e-6 0 -1 4.59232e-6 0 -1 -2.94074e-5 0 -1 -1.47365e-5 0 -1 9.1442e-6 0 -1 4.60108e-6 0 -1 -4.59232e-6 0 -1 1.47365e-5 0 -1 1.83693e-5 0 -1 2.93542e-5 0 -1 -1.83346e-5 0 -1 -2.87384e-6 0 -1 5.74042e-6 0 -1 -1.46772e-5 0 -1 -9.19629e-6 0 -1 2.9473e-5 0 -1 -1.82885e-5 0 -1 9.34589e-6 0 -1 -2.29907e-6 0 -1 -9.21027e-7 0 -1 -9.20209e-6 0 -1 4.59815e-6 0 -1 -7.32083e-6 0 -1 4.59234e-6 0 1 9.14371e-7 0 1 -2.87384e-6 0 1 1.46417e-5 0 1 7.33853e-6 0 1 -4.58364e-6 0 1 4.59815e-6 0 1 -2.93542e-5 0 1 4.60104e-6 0 1 -1.46418e-5 0 1 -2.29617e-6 0 1 -1.83694e-5 0 1 5.16637e-6 0 1 -1.83926e-5 0 1 2.30052e-6 0 1 1.47364e-5 0 1 9.14428e-6 0 1 -9.2021e-6 0 1 -4.58367e-6 0 1 4.59815e-6 0 1 4.59232e-6 0 1 1.83346e-5 0 1 -1.83925e-5 0 1 2.92598e-5 0 1 -2.94731e-5 0 1 1.82885e-5 0 1 -5.75132e-6 0 1 1.83693e-5 0.4795742 0 -0.8775015 0.610942 0 -0.7916754 0.7272649 0 -0.686357 0.7272646 0 -0.6863572 0.8256815 0 -0.5641366 0.903766 0 -0.4280269 0.9595977 0 -0.281376 0.9595977 0 -0.281376 0.9918001 0 -0.127799 0.9918002 0 -0.127799 0.9995815 0 0.02892696 0.9827499 0 0.1849401 0.9417195 0 0.3363993 0.9417195 0 0.3363992 0.8775013 0 0.4795746 0.8775013 0 0.4795743 0.7916756 0 0.6109418 0.7916758 0 0.6109415 0.6863564 0 0.7272654 0.686357 0 0.7272649 0.5641369 0 0.8256813 0.5641373 0 0.825681 0.4280271 0 0.9037659 0.4280263 0 0.9037663 0.281376 0 0.9595977 0.1277989 0 0.9918002 -0.02892696 0 0.9995815 -0.18494 0 0.9827498 -0.3363992 0 0.9417195 -0.3363983 0 0.9417199 -0.4795742 0 0.8775015 -0.610942 0 0.7916754 -0.7272654 0 0.6863564 -0.7272651 0 0.6863567 -0.8256815 0 0.5641366 -0.903766 0 0.4280269 -0.9595974 0 0.2813768 -0.9595974 0 0.281377 -0.9918001 0 0.127799 -0.9918002 0 0.127799 -0.9995815 0 -0.02892899 -0.9827502 0 -0.1849381 -0.9827502 0 -0.1849381 -0.9417191 0 -0.3364002 -0.9417192 0 -0.3364001 -0.8775013 0 -0.4795746 -0.8775013 0 -0.4795743 -0.7916756 0 -0.6109418 -0.7916758 0 -0.6109415 -0.6863559 0 -0.7272659 -0.6863564 0 -0.7272654 -0.5641378 0 -0.8256807 -0.4280271 0 -0.9037659 -0.4280263 0 -0.9037663 -0.281376 0 -0.9595977 -0.1277989 0 -0.9918002 0.0289269 0 -0.9995816 0.1849404 0 -0.9827498 0.3363992 0 -0.9417195 0.3363983 0 -0.9417199 0.4932982 0 -0.8698603 0.5984299 0 -0.8011753 0.3215651 0 -0.9468875 0.3215659 0 -0.9468873 0.07598203 0 -0.9971093 0.07598119 0 -0.9971094 -0.1743769 0 -0.9846791 -0.4137767 0 -0.9103784 -0.4137771 0 -0.9103782 -0.6271813 0 -0.7788733 -0.8011733 0 -0.5984326 -0.9248278 0 -0.380386 -0.9248278 0 -0.3803861 -0.9903709 0 -0.1384395 -0.9936852 0 0.1122046 -0.9936851 0 0.1122046 -0.9345628 0 0.3557985 -0.8167189 0 0.5770359 -0.6475572 0 0.762017 -0.6475574 0 0.7620167 -0.4377055 0 0.8991184 -0.4377062 0 0.8991181 -0.2003532 0 0.9797238 0.04958909 0 0.9987698 0.04958826 0 0.9987698 0.2964146 0 0.9550594 0.2964153 0 0.9550591 0.5246154 0 0.8513395 0.5246151 0 0.8513396 0.7198535 0 0.6941261 0.7198533 0 0.6941263 0.8698601 0 0.4932985 0.9652101 0 0.2614755 0.9652101 0 0.2614755 0.9999127 0 0.01321995 0.9717873 0 -0.2358593 0.9717873 0 -0.2358593 0.8826004 0 -0.4701243 0.8826004 0 -0.4701241 0.7379562 0 -0.6748486 0.5984304 0 -0.8011749 + + + + + + + + + + + + + + +

0 0 1 0 2 0 1 0 0 0 3 0 4 0 3 0 0 0 3 0 4 0 5 0 6 0 5 0 4 0 5 0 6 0 2 0 7 0 2 0 6 0 2 0 7 0 0 0 8 1 9 1 10 1 9 1 8 1 11 1 12 1 11 1 8 1 11 1 12 1 13 1 14 1 13 1 12 1 13 1 14 1 10 1 15 1 10 1 14 1 10 1 15 1 8 1 16 2 17 2 18 2 19 3 18 3 17 3 18 4 19 4 20 4 21 4 20 4 19 4 20 4 21 4 22 4 23 4 22 4 21 4 22 4 23 4 24 4 25 5 24 5 23 5 24 4 25 4 26 4 27 4 26 4 25 4 26 4 27 4 28 4 29 4 28 4 27 4 28 4 29 4 30 4 31 4 30 4 29 4 30 4 31 4 32 4 33 4 32 4 31 4 32 4 33 4 34 4 35 4 34 4 33 4 34 4 35 4 36 4 37 6 36 6 35 6 36 7 37 7 38 7 39 8 38 8 37 8 38 4 39 4 40 4 41 4 40 4 39 4 40 9 41 9 42 9 43 10 42 10 41 10 42 4 43 4 44 4 45 4 44 4 43 4 44 4 45 4 46 4 47 4 46 4 45 4 46 4 47 4 48 4 49 11 48 11 47 11 48 4 49 4 50 4 51 4 50 4 49 4 50 4 51 4 52 4 53 4 52 4 51 4 52 4 53 4 54 4 55 4 54 4 53 4 54 4 55 4 56 4 57 4 56 4 55 4 56 4 57 4 58 4 59 4 58 4 57 4 58 4 59 4 60 4 61 4 60 4 59 4 60 12 61 12 62 12 63 13 62 13 61 13 62 4 63 4 16 4 17 4 16 4 63 4 64 14 65 14 66 14 67 4 68 4 69 4 70 15 69 15 68 15 70 4 71 4 69 4 72 4 69 4 71 4 69 16 72 16 73 16 66 4 74 4 64 4 75 17 64 17 74 17 64 4 75 4 76 4 73 4 77 4 69 4 78 18 69 18 77 18 69 19 78 19 79 19 80 20 79 20 78 20 81 4 82 4 83 4 84 4 83 4 82 4 83 4 84 4 85 4 86 4 85 4 84 4 85 21 86 21 87 21 88 4 87 4 86 4 87 4 88 4 89 4 90 4 89 4 88 4 89 4 90 4 91 4 92 4 91 4 90 4 91 4 92 4 93 4 94 4 93 4 92 4 93 4 94 4 95 4 65 22 64 22 81 22 96 23 81 23 64 23 81 4 96 4 97 4 97 4 98 4 81 4 99 24 81 24 98 24 81 25 99 25 82 25 75 26 100 26 76 26 101 27 76 27 100 27 76 4 101 4 78 4 102 28 78 28 101 28 78 29 102 29 80 29 94 4 103 4 95 4 104 4 95 4 103 4 95 4 104 4 105 4 106 30 105 30 104 30 105 4 106 4 107 4 108 31 107 31 106 31 107 4 108 4 109 4 110 4 109 4 108 4 109 4 110 4 111 4 112 32 111 32 110 32 111 33 112 33 113 33 114 34 113 34 112 34 113 35 114 35 115 35 116 36 115 36 114 36 115 37 116 37 69 37 117 4 69 4 116 4 69 38 117 38 67 38 118 4 8 4 119 4 120 4 119 4 8 4 118 4 121 4 122 4 123 39 124 39 125 39 126 4 127 4 8 4 123 4 125 4 12 4 128 4 12 4 125 4 12 40 128 40 129 40 118 41 122 41 8 41 130 4 8 4 122 4 8 4 130 4 126 4 131 4 12 4 132 4 8 4 132 4 12 4 132 4 8 4 133 4 127 4 133 4 8 4 134 4 123 4 135 4 12 4 135 4 123 4 135 4 12 4 136 4 131 4 136 4 12 4 137 4 138 4 5 4 139 4 5 4 2 4 2 42 140 42 141 42 139 43 142 43 5 43 143 44 5 44 142 44 5 4 143 4 137 4 138 45 144 45 5 45 145 46 5 46 144 46 5 4 145 4 120 4 129 4 146 4 2 4 147 4 2 4 146 4 2 47 147 47 140 47 141 4 148 4 2 4 149 4 2 4 148 4 2 4 149 4 139 4 2 48 1 48 129 48 14 48 129 48 1 48 129 48 14 48 12 48 8 49 15 49 120 49 3 49 120 49 15 49 120 49 3 49 5 49 15 50 14 50 3 50 1 50 3 50 14 50 64 51 121 51 118 51 64 51 118 51 96 51 119 52 96 52 118 52 96 53 119 53 97 53 120 54 97 54 119 54 97 55 120 55 98 55 120 56 145 56 98 56 144 57 98 57 145 57 98 58 144 58 99 58 138 59 99 59 144 59 99 59 138 59 82 59 137 60 82 60 138 60 82 61 137 61 84 61 143 62 84 62 137 62 84 63 143 63 86 63 142 64 86 64 143 64 86 64 142 64 88 64 139 65 88 65 142 65 88 65 139 65 90 65 149 66 90 66 139 66 90 66 149 66 92 66 148 67 92 67 149 67 92 67 148 67 94 67 141 68 94 68 148 68 94 69 141 69 103 69 140 70 103 70 141 70 103 71 140 71 104 71 147 72 104 72 140 72 104 73 147 73 106 73 146 74 106 74 147 74 106 75 146 75 108 75 129 76 108 76 146 76 108 77 129 77 110 77 129 78 128 78 110 78 125 79 110 79 128 79 110 80 125 80 112 80 124 81 112 81 125 81 112 81 124 81 114 81 116 48 114 48 123 48 124 48 123 48 114 48 117 1 116 1 134 1 123 1 134 1 116 1 117 82 134 82 135 82 117 83 135 83 67 83 136 84 67 84 135 84 67 85 136 85 68 85 131 86 68 86 136 86 68 87 131 87 70 87 132 88 70 88 131 88 70 88 132 88 71 88 133 89 71 89 132 89 71 89 133 89 72 89 127 90 72 90 133 90 72 91 127 91 73 91 126 92 73 92 127 92 73 93 126 93 77 93 130 94 77 94 126 94 77 95 130 95 78 95 76 1 78 1 122 1 130 1 122 1 78 1 64 49 76 49 121 49 122 49 121 49 76 49 31 96 66 96 33 96 65 97 33 97 66 97 33 98 65 98 35 98 81 99 35 99 65 99 35 100 81 100 37 100 83 101 37 101 81 101 37 102 83 102 39 102 85 103 39 103 83 103 39 104 85 104 41 104 87 105 41 105 85 105 41 106 87 106 43 106 89 107 43 107 87 107 43 107 89 107 45 107 91 108 45 108 89 108 45 108 91 108 47 108 93 109 47 109 91 109 47 110 93 110 49 110 95 111 49 111 93 111 49 112 95 112 51 112 105 113 51 113 95 113 51 114 105 114 53 114 107 115 53 115 105 115 53 116 107 116 55 116 109 117 55 117 107 117 55 118 109 118 57 118 111 119 57 119 109 119 57 120 111 120 59 120 113 121 59 121 111 121 59 122 113 122 61 122 115 123 61 123 113 123 61 124 115 124 63 124 69 125 63 125 115 125 63 126 69 126 17 126 79 127 17 127 69 127 17 128 79 128 19 128 80 129 19 129 79 129 19 129 80 129 21 129 102 130 21 130 80 130 21 130 102 130 23 130 101 131 23 131 102 131 23 132 101 132 25 132 100 133 25 133 101 133 25 134 100 134 27 134 75 135 27 135 100 135 27 136 75 136 29 136 74 137 29 137 75 137 29 138 74 138 31 138 66 139 31 139 74 139 150 140 32 140 151 140 34 141 151 141 32 141 151 141 34 141 152 141 36 142 152 142 34 142 152 143 36 143 153 143 38 144 153 144 36 144 153 145 38 145 154 145 40 146 154 146 38 146 154 147 40 147 155 147 42 148 155 148 40 148 155 149 42 149 156 149 44 150 156 150 42 150 156 150 44 150 157 150 46 151 157 151 44 151 157 151 46 151 158 151 48 152 158 152 46 152 158 153 48 153 159 153 50 154 159 154 48 154 159 155 50 155 160 155 52 156 160 156 50 156 160 157 52 157 161 157 54 158 161 158 52 158 161 159 54 159 162 159 56 160 162 160 54 160 162 160 56 160 163 160 58 161 163 161 56 161 163 161 58 161 164 161 60 162 164 162 58 162 164 163 60 163 165 163 62 164 165 164 60 164 165 165 62 165 166 165 16 166 166 166 62 166 166 167 16 167 167 167 18 168 167 168 16 168 167 169 18 169 168 169 20 170 168 170 18 170 168 171 20 171 169 171 22 172 169 172 20 172 169 173 22 173 170 173 24 174 170 174 22 174 170 175 24 175 171 175 26 176 171 176 24 176 171 177 26 177 172 177 28 178 172 178 26 178 172 179 28 179 173 179 30 180 173 180 28 180 173 181 30 181 150 181 32 140 150 140 30 140 151 4 152 4 153 4 166 4 153 4 165 4 154 182 165 182 153 182 165 183 154 183 155 183 166 4 167 4 153 4 168 184 153 184 167 184 153 185 168 185 169 185 169 186 170 186 153 186 171 187 153 187 170 187 153 188 171 188 172 188 155 4 156 4 165 4 157 4 165 4 156 4 165 189 157 189 158 189 172 4 173 4 153 4 150 190 153 190 173 190 153 4 150 4 151 4 162 4 163 4 161 4 160 191 161 191 163 191 158 4 159 4 165 4 160 4 165 4 159 4 165 192 160 192 164 192 163 193 164 193 160 193 174 49 10 49 175 49 9 49 175 49 10 49 176 1 174 1 177 1 175 1 177 1 174 1 178 48 176 48 179 48 177 48 179 48 176 48 178 194 179 194 180 194 178 195 180 195 181 195 182 196 181 196 180 196 181 197 182 197 183 197 184 198 183 198 182 198 183 199 184 199 185 199 186 200 185 200 184 200 185 201 186 201 187 201 188 202 187 202 186 202 187 203 188 203 189 203 190 204 189 204 188 204 189 204 190 204 191 204 192 205 191 205 190 205 191 206 192 206 193 206 194 207 193 207 192 207 193 207 194 207 195 207 196 208 195 208 194 208 195 208 196 208 197 208 198 1 197 1 196 1 197 1 198 1 199 1 200 209 199 209 198 209 199 210 200 210 201 210 202 211 201 211 200 211 201 211 202 211 203 211 204 212 203 212 202 212 203 213 204 213 205 213 206 214 205 214 204 214 205 214 206 214 207 214 208 215 207 215 206 215 207 216 208 216 209 216 210 217 209 217 208 217 209 218 210 218 211 218 212 219 211 219 210 219 211 220 212 220 213 220 214 221 213 221 212 221 213 222 214 222 215 222 216 223 215 223 214 223 215 224 216 224 217 224 218 49 217 49 219 49 216 49 219 49 217 49 220 1 218 1 221 1 219 1 221 1 218 1 176 50 178 50 174 50 181 50 174 50 178 50 189 50 191 50 10 50 193 225 10 225 191 225 10 226 193 226 195 226 215 50 217 50 218 50 181 50 183 50 174 50 185 50 174 50 183 50 174 50 185 50 10 50 187 50 10 50 185 50 10 227 187 227 189 227 195 228 197 228 10 228 199 50 10 50 197 50 10 229 199 229 222 229 201 50 222 50 199 50 222 50 201 50 203 50 222 230 223 230 10 230 224 231 10 231 223 231 10 50 224 50 13 50 225 50 13 50 224 50 226 50 227 50 228 50 229 50 228 50 227 50 228 50 229 50 230 50 225 232 231 232 13 232 232 233 13 233 231 233 13 234 232 234 233 234 233 50 234 50 13 50 235 50 13 50 234 50 13 50 235 50 229 50 236 50 229 50 235 50 229 235 236 235 230 235 203 50 205 50 222 50 207 50 222 50 205 50 222 50 207 50 237 50 220 50 238 50 218 50 239 50 218 50 238 50 218 50 239 50 215 50 240 50 215 50 239 50 215 50 240 50 213 50 241 50 213 50 240 50 213 50 241 50 211 50 211 50 241 50 209 50 242 50 209 50 241 50 209 50 242 50 207 50 243 50 207 50 242 50 207 50 243 50 237 50 244 4 245 4 246 4 247 4 246 4 245 4 9 4 198 4 196 4 247 236 248 236 246 236 249 4 246 4 248 4 246 4 249 4 11 4 250 237 11 237 249 237 11 4 250 4 251 4 251 238 252 238 11 238 253 4 11 4 252 4 11 239 253 239 254 239 254 240 255 240 11 240 256 4 11 4 255 4 11 4 256 4 9 4 256 4 257 4 9 4 258 4 9 4 257 4 9 241 258 241 198 241 200 242 198 242 258 242 196 4 194 4 9 4 192 4 9 4 194 4 9 243 192 243 190 243 179 244 177 244 180 244 175 4 180 4 177 4 180 245 175 245 182 245 206 4 204 4 258 4 202 4 258 4 204 4 258 4 202 4 200 4 259 244 221 244 260 244 219 4 260 4 221 4 190 246 188 246 9 246 186 4 9 4 188 4 9 4 186 4 175 4 184 4 175 4 186 4 175 4 184 4 182 4 219 247 216 247 260 247 214 4 260 4 216 4 260 4 214 4 261 4 262 248 261 248 214 248 258 4 263 4 206 4 264 4 206 4 263 4 206 4 264 4 265 4 214 249 212 249 262 249 210 4 262 4 212 4 262 4 210 4 265 4 208 4 265 4 210 4 265 4 208 4 206 4 266 0 267 0 268 0 269 0 268 0 267 0 270 49 266 49 271 49 268 49 271 49 266 49 270 250 271 250 272 250 270 251 272 251 273 251 274 252 273 252 272 252 273 253 274 253 275 253 276 254 275 254 274 254 275 255 276 255 277 255 278 256 277 256 276 256 277 257 278 257 279 257 280 258 279 258 278 258 279 259 280 259 281 259 282 260 281 260 280 260 281 260 282 260 283 260 284 261 283 261 282 261 283 262 284 262 285 262 286 263 285 263 284 263 285 263 286 263 287 263 288 264 287 264 286 264 287 265 288 265 289 265 290 0 289 0 288 0 289 0 290 0 291 0 292 266 291 266 290 266 291 266 292 266 293 266 294 267 293 267 292 267 293 267 294 267 295 267 296 268 295 268 294 268 295 269 296 269 297 269 298 270 297 270 296 270 297 270 298 270 299 270 300 271 299 271 298 271 299 272 300 272 301 272 302 273 301 273 300 273 301 274 302 274 303 274 304 275 303 275 302 275 303 276 304 276 305 276 306 277 305 277 304 277 305 278 306 278 307 278 308 279 307 279 306 279 307 280 308 280 309 280 310 48 309 48 311 48 308 48 311 48 309 48 312 0 310 0 313 0 311 0 313 0 310 0 4 49 312 49 6 49 313 49 6 49 312 49 314 50 315 50 0 50 316 281 0 281 315 281 0 282 316 282 317 282 318 50 319 50 320 50 321 50 320 50 319 50 317 232 322 232 0 232 323 50 0 50 322 50 0 50 323 50 4 50 324 231 4 231 323 231 4 283 324 283 325 283 321 50 326 50 320 50 327 50 320 50 326 50 320 50 327 50 0 50 328 50 0 50 327 50 0 284 328 284 314 284 293 50 295 50 4 50 297 225 4 225 295 225 309 50 310 50 307 50 312 50 307 50 310 50 307 50 312 50 305 50 325 50 329 50 285 50 330 50 285 50 329 50 285 50 330 50 283 50 331 285 283 285 330 285 283 286 331 286 281 286 285 50 287 50 325 50 289 287 325 287 287 287 325 288 289 288 4 288 291 50 4 50 289 50 4 289 291 289 293 289 297 50 299 50 4 50 301 50 4 50 299 50 4 50 301 50 312 50 303 50 312 50 301 50 312 290 303 290 305 290 332 50 267 50 333 50 266 50 333 50 267 50 333 50 266 50 270 50 270 50 273 50 333 50 275 50 333 50 273 50 333 50 275 50 334 50 277 50 334 50 275 50 334 50 277 50 335 50 335 50 277 50 331 50 279 50 331 50 277 50 331 50 279 50 281 50 336 4 337 4 7 4 338 4 7 4 337 4 269 4 339 4 268 4 340 4 268 4 339 4 341 4 342 4 6 4 343 4 6 4 342 4 6 4 343 4 7 4 344 4 7 4 343 4 7 291 344 291 336 291 345 292 346 292 347 292 348 4 347 4 346 4 347 236 348 236 349 236 311 4 308 4 313 4 306 4 313 4 308 4 298 246 296 246 6 246 294 4 6 4 296 4 6 293 294 293 292 293 338 238 350 238 7 238 351 4 7 4 350 4 7 4 351 4 348 4 352 4 348 4 351 4 348 4 352 4 349 4 306 245 304 245 313 245 302 4 313 4 304 4 313 4 302 4 6 4 300 294 6 294 302 294 6 4 300 4 298 4 292 295 290 295 6 295 288 4 6 4 290 4 6 241 288 241 341 241 282 4 353 4 284 4 354 4 284 4 353 4 288 242 286 242 341 242 284 4 341 4 286 4 341 4 284 4 355 4 354 4 355 4 284 4 340 296 356 296 276 296 357 4 276 4 356 4 276 4 357 4 278 4 353 4 278 4 357 4 278 4 353 4 280 4 282 4 280 4 353 4 271 292 268 292 272 292 340 4 272 4 268 4 272 4 340 4 274 4 276 297 274 297 340 297 238 48 220 48 259 48 221 48 259 48 220 48 238 195 259 195 260 195 238 194 260 194 239 194 261 298 239 298 260 298 239 299 261 299 240 299 262 198 240 198 261 198 240 300 262 300 241 300 265 301 241 301 262 301 241 302 265 302 242 302 264 303 242 303 265 303 242 304 264 304 243 304 263 305 243 305 264 305 243 306 263 306 237 306 258 307 237 307 263 307 237 308 258 308 222 308 257 207 222 207 258 207 222 207 257 207 223 207 256 208 223 208 257 208 223 208 256 208 224 208 255 1 224 1 256 1 224 1 255 1 225 1 254 209 225 209 255 209 225 309 254 309 231 309 253 310 231 310 254 310 231 211 253 211 232 211 252 213 232 213 253 213 232 311 252 311 233 311 251 312 233 312 252 312 233 313 251 313 234 313 250 314 234 314 251 314 234 315 250 315 235 315 249 316 235 316 250 316 235 316 249 316 236 316 248 317 236 317 249 317 236 318 248 318 230 318 247 319 230 319 248 319 230 320 247 320 228 320 245 223 228 223 247 223 228 224 245 224 226 224 227 49 226 49 244 49 245 49 244 49 226 49 229 1 227 1 246 1 244 1 246 1 227 1 13 48 229 48 11 48 246 48 11 48 229 48 320 48 0 48 348 48 7 48 348 48 0 48 318 0 320 0 346 0 348 0 346 0 320 0 319 49 318 49 345 49 346 49 345 49 318 49 319 250 345 250 347 250 319 251 347 251 321 251 349 321 321 321 347 321 321 322 349 322 326 322 352 323 326 323 349 323 326 324 352 324 327 324 351 325 327 325 352 325 327 325 351 325 328 325 350 326 328 326 351 326 328 327 350 327 314 327 338 328 314 328 350 328 314 329 338 329 315 329 337 330 315 330 338 330 315 261 337 261 316 261 336 263 316 263 337 263 316 331 336 331 317 331 344 332 317 332 336 332 317 265 344 265 322 265 343 0 322 0 344 0 322 0 343 0 323 0 342 266 323 266 343 266 323 266 342 266 324 266 341 267 324 267 342 267 324 267 341 267 325 267 355 333 325 333 341 333 325 334 355 334 329 334 354 335 329 335 355 335 329 336 354 336 330 336 353 337 330 337 354 337 330 338 353 338 331 338 357 339 331 339 353 339 331 340 357 340 335 340 356 341 335 341 357 341 335 276 356 276 334 276 340 342 334 342 356 342 334 343 340 343 333 343 339 280 333 280 340 280 333 279 339 279 332 279 267 48 332 48 269 48 339 48 269 48 332 48 358 344 359 344 360 344 361 50 360 50 359 50 360 50 361 50 362 50 363 50 364 50 365 50 366 345 367 345 368 345 369 346 368 346 367 346 368 50 369 50 370 50 371 347 370 347 369 347 370 50 371 50 372 50 373 348 372 348 371 348 372 349 373 349 374 349 375 50 374 50 373 50 374 350 375 350 364 350 376 351 364 351 375 351 364 352 376 352 365 352 377 50 378 50 366 50 379 353 366 353 378 353 366 50 379 50 367 50 361 50 380 50 362 50 381 50 362 50 380 50 362 50 381 50 382 50 383 50 382 50 381 50 382 354 383 354 377 354 384 50 377 50 383 50 377 50 384 50 378 50 385 50 386 50 387 50 388 50 387 50 386 50 387 50 388 50 389 50 365 50 390 50 363 50 391 355 363 355 390 355 363 344 391 344 392 344 393 50 392 50 391 50 392 356 393 356 385 356 394 357 385 357 393 357 385 358 394 358 386 358 395 50 396 50 358 50 397 50 358 50 396 50 358 359 397 359 359 359 398 50 399 50 400 50 401 50 400 50 399 50 400 360 401 360 402 360 401 50 403 50 402 50 404 361 402 361 403 361 402 50 404 50 405 50 406 50 405 50 404 50 405 362 406 362 395 362 407 363 395 363 406 363 395 364 407 364 396 364 408 50 409 50 410 50 411 365 410 365 409 365 410 366 411 366 412 366 413 50 414 50 415 50 416 367 415 367 414 367 415 50 416 50 408 50 417 50 408 50 416 50 408 368 417 368 409 368 388 50 418 50 389 50 419 50 389 50 418 50 389 50 419 50 413 50 420 50 413 50 419 50 413 50 420 50 414 50 411 50 421 50 412 50 422 50 412 50 421 50 412 369 422 369 398 369 423 50 398 50 422 50 398 370 423 370 399 370 424 4 425 4 426 4 427 371 428 371 429 371 430 372 429 372 428 372 429 4 430 4 431 4 432 4 433 4 434 4 435 4 434 4 433 4 434 4 435 4 436 4 426 4 437 4 424 4 438 4 424 4 437 4 424 373 438 373 439 373 440 4 439 4 438 4 439 374 440 374 427 374 441 375 427 375 440 375 427 376 441 376 428 376 442 4 443 4 444 4 445 4 444 4 443 4 444 377 445 377 446 377 430 4 447 4 431 4 448 378 431 378 447 378 431 379 448 379 432 379 449 4 432 4 448 4 432 380 449 380 433 380 450 4 451 4 442 4 452 381 442 381 451 381 442 382 452 382 443 382 445 4 453 4 446 4 454 4 446 4 453 4 446 4 454 4 455 4 456 383 455 383 454 383 455 4 456 4 425 4 457 4 425 4 456 4 425 384 457 384 426 384 458 4 459 4 460 4 461 4 460 4 459 4 460 385 461 385 450 385 462 386 450 386 461 386 450 387 462 387 451 387 463 4 464 4 465 4 466 4 465 4 464 4 465 4 466 4 458 4 467 388 458 388 466 388 458 389 467 389 459 389 468 4 469 4 470 4 471 4 472 4 463 4 473 4 463 4 472 4 463 390 473 390 464 390 435 391 474 391 436 391 475 392 436 392 474 392 436 393 475 393 476 393 477 4 476 4 475 4 476 394 477 394 469 394 478 395 469 395 477 395 469 396 478 396 470 396 470 4 479 4 468 4 480 397 468 397 479 397 468 4 480 4 481 4 482 4 481 4 480 4 481 4 482 4 483 4 484 4 483 4 482 4 483 4 484 4 485 4 486 4 485 4 484 4 485 4 486 4 487 4 488 4 487 4 486 4 487 4 488 4 471 4 489 4 471 4 488 4 471 4 489 4 472 4 422 398 437 398 423 398 426 399 423 399 437 399 423 399 426 399 399 399 457 400 399 400 426 400 399 401 457 401 401 401 456 402 401 402 457 402 401 402 456 402 403 402 454 403 403 403 456 403 403 403 454 403 404 403 453 404 404 404 454 404 404 405 453 405 406 405 445 406 406 406 453 406 406 407 445 407 407 407 443 408 407 408 445 408 407 408 443 408 396 408 452 409 396 409 443 409 396 409 452 409 397 409 451 410 397 410 452 410 397 411 451 411 359 411 462 412 359 412 451 412 359 413 462 413 361 413 461 414 361 414 462 414 361 415 461 415 380 415 459 416 380 416 461 416 380 417 459 417 381 417 467 418 381 418 459 418 381 419 467 419 383 419 466 420 383 420 467 420 383 421 466 421 384 421 464 422 384 422 466 422 384 422 464 422 378 422 473 423 378 423 464 423 378 423 473 423 379 423 472 424 379 424 473 424 379 424 472 424 367 424 489 425 367 425 472 425 367 425 489 425 369 425 488 426 369 426 489 426 369 427 488 427 371 427 486 428 371 428 488 428 371 428 486 428 373 428 484 429 373 429 486 429 373 429 484 429 375 429 482 430 375 430 484 430 375 431 482 431 376 431 480 432 376 432 482 432 376 432 480 432 365 432 479 433 365 433 480 433 365 433 479 433 390 433 470 434 390 434 479 434 390 435 470 435 391 435 478 436 391 436 470 436 391 437 478 437 393 437 477 438 393 438 478 438 393 438 477 438 394 438 475 439 394 439 477 439 394 440 475 440 386 440 474 441 386 441 475 441 386 442 474 442 388 442 435 443 388 443 474 443 388 444 435 444 418 444 433 445 418 445 435 445 418 446 433 446 419 446 449 447 419 447 433 447 419 448 449 448 420 448 448 449 420 449 449 449 420 449 448 449 414 449 447 450 414 450 448 450 414 451 447 451 416 451 430 452 416 452 447 452 416 452 430 452 417 452 428 453 417 453 430 453 417 453 428 453 409 453 441 454 409 454 428 454 409 454 441 454 411 454 440 455 411 455 441 455 411 455 440 455 421 455 438 456 421 456 440 456 421 457 438 457 422 457 437 398 422 398 438 398 370 458 372 458 485 458 485 459 372 459 483 459 485 458 487 458 370 458 471 460 370 460 487 460 370 461 471 461 368 461 463 462 368 462 471 462 368 463 463 463 366 463 465 464 366 464 463 464 366 464 465 464 377 464 458 465 377 465 465 465 377 466 458 466 382 466 460 467 382 467 458 467 382 467 460 467 362 467 450 468 362 468 460 468 362 468 450 468 360 468 442 469 360 469 450 469 360 470 442 470 358 470 444 471 358 471 442 471 358 471 444 471 395 471 446 472 395 472 444 472 395 473 446 473 405 473 455 474 405 474 446 474 405 474 455 474 402 474 425 475 402 475 455 475 402 475 425 475 400 475 424 476 400 476 425 476 400 477 424 477 398 477 439 478 398 478 424 478 398 479 439 479 412 479 427 480 412 480 439 480 412 480 427 480 410 480 429 481 410 481 427 481 410 482 429 482 408 482 431 483 408 483 429 483 408 484 431 484 415 484 432 485 415 485 431 485 415 486 432 486 413 486 434 487 413 487 432 487 413 488 434 488 389 488 436 489 389 489 434 489 389 489 436 489 387 489 476 490 387 490 436 490 387 491 476 491 385 491 469 492 385 492 476 492 385 492 469 492 392 492 468 493 392 493 469 493 392 494 468 494 363 494 481 495 363 495 468 495 363 496 481 496 364 496 483 497 364 497 481 497 364 497 483 497 374 497 372 498 374 498 483 498

+
+
+
+ + + + -23.02464 12.74978 -0.36821 -23.08883 12.12621 0.1343591 -21.97404 13.13403 2.766456 -23 12.12621 3.49725 -23 -0.1250023 3.49725 -23.08859 -0.1250023 0.134114 -21.97404 -1.134031 2.766456 -23.02464 -0.7497797 -0.36821 -18 -1.668354 2.766456 -18 13.66835 2.766456 0 -0.1250023 3.49725 2.500006 -1.679797 2.788294 0 12.12621 3.49725 2.500006 13.6798 2.788294 -20.62528 -0.8341527 -1.001104 0 -0.1250023 0.1317899 -20.62528 12.83415 -1.001104 0 12.12621 0.1317899 2.500002 -3.013431 -1.001104 2.500002 15.01343 -1.001104 0 12.125 15.76538 2.5 12.125 15.87855 2.5 12.125 5.49725 4.89252 14.125 15.92735 4.89252 14.125 5.49725 4.906906 14.76441 6.616097 0 -0.125 15.76538 4.906906 14.76441 17.37442 4.89252 16.125 3.217576 2.5 -0.125 5.49725 4.89252 -2.125 15.92735 5.637458 14.76441 16.81384 4.906906 18.66069 6.889855 2.5 -0.125 15.87855 4.906906 -2.768826 17.37474 5.637458 14.76441 7.519469 5.637458 17.93013 16.87406 5.637458 17.93013 7.620406 4.89252 16.125 -1.834837 -23 -0.125 3.49725 4.906906 -2.768826 6.616461 4.906906 18.66069 17.60891 5.39394 18.17365 7.620406 4.89252 -2.125 5.49725 5.637458 -2.768826 16.81409 5.39394 18.17365 16.87406 4.906906 -3.864653 -1.435642 4.906906 -3.864653 3.471926 5.637458 -2.768826 7.519837 4.906918 20.67728 -2.259981 4.906906 -6.6651 6.889855 5.637458 -5.934549 16.87406 4.906906 -6.6651 17.60891 4.906918 20.67728 26.74889 5.985668 18.17365 7.620406 4.906918 -8.331581 -2.259981 5.637458 -5.934549 7.620406 5.39394 -6.178065 16.87406 5.39394 -6.178065 7.620406 5.985668 -2.768826 16.85356 5.98568 22.08094 -3.663637 5.985668 14.76441 7.571423 5.98568 -9.735237 -3.663637 5.985668 -2.768826 7.571715 5.985668 14.76441 16.8534 5.98568 22.08094 28.15254 5.985668 18.17365 16.87406 5.98568 -9.735237 28.15254 5.985668 -6.178065 16.87406 4.906918 -8.331581 26.74889 5.985668 -6.178065 7.620406 -15.45923 6.182103 -1.555756 -15.45 6 -1.555756 -15.45 6 -3.590281 -15.45923 6.182103 -3.590281 -15.48685 6.362337 -1.555756 -15.48685 6.362337 -3.590281 -15.53255 6.538854 -1.555756 -15.53255 6.538854 -3.590281 -15.59588 6.709841 -1.555756 -15.59588 6.709841 -3.590281 -15.67618 6.873544 -1.555756 -15.67618 6.873544 -3.590281 -15.77263 7.028283 -1.555756 -15.77263 7.028283 -3.590281 -15.88424 7.172471 -1.555756 -15.88424 7.172471 -3.590281 -16.00986 7.304627 -1.555756 -16.00986 7.304627 -3.590281 -16.14821 7.423396 -1.555756 -16.14821 7.423396 -3.590281 -16.29787 7.52756 -1.555756 -16.29787 7.52756 -3.590281 -16.45729 7.616048 -1.555756 -16.45729 7.616048 -3.590281 -16.62485 7.687954 -1.555756 -16.62485 7.687954 -3.590281 -16.79883 7.742539 -3.590281 -16.79883 7.742539 -1.555756 -16.97743 7.779243 -1.555756 -16.97743 7.779243 -3.590281 -17.15883 7.79769 -3.590281 -17.15883 7.79769 -1.555756 -17.34117 7.79769 -1.555756 -17.34117 7.79769 -3.590281 -17.52257 7.779243 -1.555756 -17.52257 7.779243 -3.590281 -17.70117 7.742539 -3.590281 -17.70117 7.742539 -1.555756 -17.87515 7.687954 -1.555756 -17.87515 7.687954 -3.590281 -18.04271 7.616048 -1.555756 -18.04271 7.616048 -3.590281 -18.20214 7.52756 -3.590281 -18.20214 7.52756 -1.555756 -18.35179 7.423396 -1.555756 -18.35179 7.423396 -3.590281 -18.49014 7.304627 -1.555756 -18.49014 7.304627 -3.590281 -18.61577 7.172471 -3.590281 -18.61577 7.172471 -1.555756 -18.72738 7.028283 -1.555756 -18.72738 7.028283 -3.590281 -18.82382 6.873544 -1.555756 -18.82382 6.873544 -3.590281 -18.90412 6.709841 -1.555756 -18.90412 6.709841 -3.590281 -18.96745 6.538854 -3.590281 -18.96745 6.538854 -1.555756 -19.01315 6.362337 -1.555756 -19.01315 6.362337 -3.590281 -19.04076 6.182103 -1.555756 -19.04076 6.182103 -3.590281 -19.05 6 -3.590281 -19.05 6 -1.555756 -19.04076 5.817897 -1.555756 -19.04076 5.817897 -3.590281 -19.01315 5.637663 -1.555756 -19.01315 5.637663 -3.590281 -18.96745 5.461146 -3.590281 -18.96745 5.461146 -1.555756 -18.90412 5.290159 -1.555756 -18.90412 5.290159 -3.590281 -18.82382 5.126456 -1.555756 -18.82382 5.126456 -3.590281 -18.72738 4.971717 -3.590281 -18.72738 4.971717 -1.555756 -18.61577 4.827529 -1.555756 -18.61577 4.827529 -3.590281 -18.49014 4.695373 -1.555756 -18.49014 4.695373 -3.590281 -18.35179 4.576604 -3.590281 -18.35179 4.576604 -1.555756 -18.20214 4.47244 -1.555756 -18.20214 4.47244 -3.590281 -18.04271 4.383952 -3.590281 -18.04271 4.383952 -1.555756 -17.87515 4.312046 -1.555756 -17.87515 4.312046 -3.590281 -17.70117 4.257461 -1.555756 -17.70117 4.257461 -3.590281 -17.52257 4.220757 -3.590281 -17.52257 4.220757 -1.555756 -17.34117 4.20231 -1.555756 -17.34117 4.20231 -3.590281 -17.15883 4.20231 -1.555756 -17.15883 4.20231 -3.590281 -16.97743 4.220757 -1.555756 -16.97743 4.220757 -3.590281 -16.79883 4.257461 -3.590281 -16.79883 4.257461 -1.555756 -16.62485 4.312046 -1.555756 -16.62485 4.312046 -3.590281 -16.45729 4.383952 -1.555756 -16.45729 4.383952 -3.590281 -16.29787 4.47244 -1.555756 -16.29787 4.47244 -3.590281 -16.14821 4.576604 -1.555756 -16.14821 4.576604 -3.590281 -16.00986 4.695373 -1.555756 -16.00986 4.695373 -3.590281 -15.88424 4.827529 -3.590281 -15.88424 4.827529 -1.555756 -15.77263 4.971717 -1.555756 -15.77263 4.971717 -3.590281 -15.67618 5.126456 -1.555756 -15.67618 5.126456 -3.590281 -15.59588 5.290159 -1.555756 -15.59588 5.290159 -3.590281 -15.53255 5.461146 -3.590281 -15.53255 5.461146 -1.555756 -15.48686 5.638929 -1.555762 -15.48685 5.637663 -3.590281 -15.45923 5.817897 -1.555756 -15.45923 5.817897 -3.590281 -13.99922 6.330574 -1.001104 -13.98245 6.000001 -1.001104 -14.04934 6.657753 -1.001104 -14.1323 6.978184 -1.001104 -14.24726 7.288578 -1.001104 -14.39303 7.585748 -1.001104 -14.56812 7.866646 -1.001104 -14.77072 8.128393 -1.001104 -14.99877 8.368296 -1.001104 -15.24991 8.583898 -1.001104 -15.52159 8.772986 -1.001104 -15.81099 8.933621 -1.001104 -16.11517 9.06415 -1.001104 -16.43098 9.163239 -1.001104 -16.7552 9.229868 -1.001104 -17.0845 9.263355 -1.001104 -17.4155 9.263355 -1.001104 -17.7448 9.229868 -1.001104 -18.06902 9.163239 -1.001104 -18.38484 9.06415 -1.001104 -18.68901 8.933621 -1.001104 -18.97842 8.772986 -1.001104 -19.25008 8.583898 -1.001104 -19.50123 8.368296 -1.001104 -19.72928 8.128393 -1.001104 -19.93189 7.866646 -1.001104 -20.10697 7.585748 -1.001104 -20.25274 7.288578 -1.001104 -20.3677 6.978184 -1.001104 -20.45066 6.657753 -1.001104 -20.50078 6.330574 -1.001104 -20.51755 6.000001 -1.001104 -20.50078 5.669429 -1.001104 -20.45066 5.342249 -1.001104 -20.3677 5.021818 -1.001104 -20.25274 4.711424 -1.001104 -20.10697 4.414253 -1.001104 -19.93189 4.133355 -1.001104 -19.72928 3.871611 -1.001104 -19.50123 3.631707 -1.001104 -19.25008 3.416104 -1.001104 -18.97842 3.227016 -1.001104 -18.68901 3.066382 -1.001104 -18.38484 2.935852 -1.001104 -18.06902 2.836763 -1.001104 -17.7448 2.770134 -1.001104 -17.4155 2.736648 -1.001104 -17.0845 2.736648 -1.001104 -16.7552 2.770134 -1.001104 -16.43098 2.836763 -1.001104 -16.11517 2.935852 -1.001104 -15.81099 3.066382 -1.001104 -15.52159 3.227016 -1.001104 -15.24991 3.416104 -1.001104 -14.99877 3.631707 -1.001104 -14.77072 3.871611 -1.001104 -14.56812 4.133355 -1.001104 -14.39303 4.414253 -1.001104 -14.24726 4.711424 -1.001104 -14.1323 5.021818 -1.001104 -14.04934 5.342249 -1.001104 -13.99922 5.669429 -1.001104 + + + + + + + + + + -0.6939806 0.7197604 0.01833021 -0.9919412 -1.69624e-5 -0.1266999 -0.08609473 -0.6403322 0.7632579 -0.08616381 0.6408476 0.7628173 0 1 -6.87773e-7 -0.001200914 -0.4164826 0.909143 -0.06158006 -0.9929707 -0.1010805 -0.255052 0 -0.9669274 1.0066e-4 0 1 0 -1 -1.25895e-7 0 0.4282314 0.9036692 -0.09271419 -0.9838312 -0.1532323 -1 0 0 -8.8023e-4 0.9432867 0.3319783 -0.06158041 0.9929706 -0.1010807 -0.6413633 0.7672374 0 0 1 0 -1 0 0 -0.3696343 0.8058762 0.46253 -0.9997471 0.02249294 0 -0.5869736 0.7021728 0.4030079 -0.9999303 0.00887376 0.007785558 -0.6634057 0.5624763 0.4934706 7.63281e-7 -1 1.84623e-7 -0.9999507 1.87891e-7 0.009938955 -0.9999916 -2.89167e-4 0.00411719 -0.7153136 0.6591719 0.231989 -0.3699697 -0.8057608 0.462463 -0.686449 0.04368573 -0.7258646 -0.7774039 -0.02004492 0.6286823 -0.6413634 -0.7672373 0 -0.707107 -0.7071067 0 -0.7071078 -0.7071059 0 -0.1776662 0 -0.9840908 -0.9125683 -0.3088682 0.2679916 -0.5869734 -0.7021726 0.4030085 0 -1 0 -0.7071077 -0.7071059 0 -0.3334214 -0.9427779 0 -0.6586878 -0.5730733 0.4875628 -0.9998915 -0.01391375 0.004848659 -1 5.64722e-6 0 -0.9986033 -0.001773297 -0.05280452 -0.02039343 0 0.9997921 -0.6864845 -0.04362392 -0.7258347 0 0 1 -1 0 -1.38827e-5 -0.04522132 0 0.9989771 -0.7298175 0.04785209 0.6819653 -0.7071021 0.7071115 1.8842e-6 -0.7928901 0.6093648 0 -0.7928896 0 -0.6093655 -0.7071074 0.7071062 0 -0.7071021 0.7071115 0 -1 -6.62839e-6 0 -0.1472882 0.03140556 0.988595 -0.7928895 -0.6093655 0 -0.7928885 0 0.6093667 1 -3.06858e-6 0 1 3.23578e-6 0 -0.1474951 -0.03151941 0.9885604 1 0 -1.05505e-6 0.5712745 0.8207591 0 0.6513681 0.758762 0 0.7247934 0.6889663 0 -0.5712649 -0.8207658 0 -0.4853106 -0.8743419 0 -0.3943564 -0.9189577 0 0.3943564 0.9189577 0 0.2012994 0.9795299 0 0.2993559 0.9541416 0 0.1011698 0.9948692 0 -0.2012994 0.9795299 0 -0.1011698 0.9948692 0 -0.3943489 0.9189608 0 -0.2993558 0.9541416 0 -0.9884713 -0.1514084 0 -0.9680755 -0.2506591 0 0.7907747 0.6121073 0 -0.9987163 -0.05065423 0 0.8978093 0.4403846 0 0.8486408 0.5289698 0 0.968078 0.2506494 0 0.9987163 0.05065423 0 0.9377508 0.3473092 0 0.9884698 0.1514185 0 0.9884682 -0.1514291 9.93477e-5 0.9685221 -0.2489276 1.5987e-4 0.937754 -0.3473002 0 0.8978012 -0.4404011 0 0.8486408 -0.5289697 0 0.7247934 -0.6889663 0 0.7907846 -0.6120945 0 0.6513681 -0.758762 0 0.5712745 -0.8207591 0 0.9987163 -0.05065423 0 0.3177663 -0.155871 -0.9352694 0.2305448 -0.2685472 -0.9352709 0.1059544 0.3377058 -0.9352692 0.3319037 0.1229262 -0.9352695 0.03580701 -0.3521225 -0.9352688 -0.2305432 0.268553 -0.9352696 -0.2565304 0.2438538 -0.9352687 -0.2021926 0.2904944 -0.9352707 -0.3498554 0.05359077 -0.9352696 0.3177646 0.1558705 -0.9352701 0.2798822 0.2166459 -0.9352703 0.3426375 -0.08871084 -0.93527 0.3534818 -0.01792263 -0.9352698 -0.317766 0.1558706 -0.9352695 -0.2798822 0.2166459 -0.9352703 0 0.3539367 -0.9352695 0.3498153 -0.05359131 -0.9352846 -0.3498536 -0.05359447 -0.9352701 -0.2565309 -0.2438431 -0.9352715 -0.07124775 0.34669 -0.93527 -0.1395763 -0.325251 -0.9352702 -0.3426375 0.08871823 -0.9352693 0.07124686 -0.3466925 -0.9352692 -0.2798833 -0.2166488 -0.9352693 -0.03580707 0.3521202 -0.9352696 0.07124632 0.3466926 -0.9352691 0.3003648 0.1872169 -0.9352706 -0.1717656 0.3094649 -0.935269 -0.1717649 -0.3094587 -0.9352712 -0.3426385 -0.08871251 -0.9352695 0.3534803 0.01792961 -0.9352702 -0.3177649 -0.1558705 -0.93527 -0.2021915 -0.2905062 -0.9352673 0.2021916 -0.2904996 -0.9352693 0.2021911 0.2904996 -0.9352694 0 -0.3539375 -0.9352692 -0.3534839 0.01793611 -0.9352687 -0.3003662 0.1872178 -0.9352698 0.2305426 0.2685543 -0.9352694 -0.105956 0.3377007 -0.935271 0.1717663 -0.3094645 -0.9352691 0.2565306 -0.2438458 -0.9352707 0.2798847 -0.2166426 -0.9352704 0.139575 0.3252523 -0.93527 0.1717661 0.3094576 -0.9352714 -0.03580683 -0.3521174 -0.9352707 -0.07124686 -0.3466923 -0.9352692 0.3498514 0.05359411 -0.935271 0.3426372 0.08871883 -0.9352693 0.1059557 -0.3377026 -0.9352702 0.3319042 -0.1229287 -0.9352691 -0.3003657 -0.1872175 -0.9352701 -0.3534812 -0.01792234 -0.93527 0.03580731 0.3521091 -0.9352738 -0.1395758 0.3252565 -0.9352684 -0.2305417 -0.2685495 -0.9352709 0.139577 -0.3252507 -0.9352701 -0.3319033 -0.1229258 -0.9352697 0.3003631 -0.1872228 -0.9352698 -0.331903 0.1229148 -0.9352713 0 0 -1 0.2565292 0.2438548 -0.9352687 -0.1059545 -0.3377063 -0.935269 -0.004237473 0.006060481 -0.9999728 1 0 0 0.1126059 -0.01882052 -0.9934615 -0.9997505 -0.0223391 0 -0.937754 -0.3473002 0 -0.8978093 -0.4403845 0 -0.8486546 -0.5289475 0 -0.7907747 -0.6121074 0 -0.7247831 -0.6889771 0 -0.6513681 -0.758762 0 -0.9987163 0.05065423 0 -0.9884683 0.1514286 0 -0.9680755 0.2506591 0 -0.9377473 0.3473183 0 -0.8978012 0.4404011 0 -0.8486546 0.5289475 0 -0.7907747 0.6121073 0 -0.7248037 0.6889555 0 -0.6513783 0.7587532 0 -0.5712649 0.8207658 0 -0.4853104 0.8743419 0 0.4853019 0.8743467 0 -0.2993558 -0.9541416 0 -0.2012993 -0.9795299 0 -0.1011698 -0.9948692 0 0.1011698 -0.9948692 0 0.2012994 -0.9795299 0 0.2993558 -0.9541416 0 0.3943564 -0.9189577 0 0.4853019 -0.8743467 0 -6.47634e-7 0 -1 0 0 -1 -1 0 1.22083e-6 -0.9999903 0.004302144 9.46912e-4 -1 -9.74394e-7 -1.02328e-6 1 0 1.06101e-6 -0.9123277 0.3097416 0.2678029 -0.9919951 0 -0.126277 0 1 0 0 -0.427958 0.9037986 -0.1321992 -0.9832352 -0.1255868 -0.2550519 0 -0.9669274 1.11275e-4 -2.00036e-5 1 -0.001200973 0.4167495 0.9090206 -8.80186e-4 -0.9432866 0.3319786 -0.09271419 0.9838314 -0.1532317 -0.1321988 0.9832353 -0.1255867 -0.6413634 0.7672373 0 4.0389e-4 1 9.82582e-5 -1 0 0 -0.999747 0.02249318 0 -0.9999506 0 0.009941041 -0.4213474 0.9068993 0 -0.6086979 0.01509046 -0.7932587 -0.7298446 -0.04791349 0.6819319 -0.6413633 -0.7672374 0 -0.7071064 -0.7071073 0 -0.3210659 -0.01913994 -0.9468635 -0.6944308 -0.719327 0.01829212 -0.688962 -0.6836917 0.2406179 -1 5.4776e-6 0 -0.02039241 0 0.999792 -0.6087497 -0.01502704 -0.7932202 -0.7774065 0.01997178 0.6286816 -0.7928899 0.6093649 0 -0.7928896 0 -0.6093654 -0.7071072 0.7071064 0 -1 -6.83366e-6 0 0.009985625 0.01427984 0.9998482 -0.7928897 -0.6093653 0 -0.7928887 0 0.6093666 1 -3.16514e-6 0 1 3.70845e-6 0 0.01004582 -0.01436567 0.9998463 1 0 -1.05919e-6 1 0 -1.05503e-6 1 0 -1.05897e-6 0.5712553 0.8207725 0 0.6513783 0.7587532 0 0.7247934 0.6889663 0 -0.4853017 -0.8743469 0 -0.3943489 -0.9189608 0 0.3943564 0.9189577 0 0.2012993 0.9795299 0 0.2993676 0.954138 0 0.1011698 0.9948692 0 -0.2012993 0.9795299 0 -0.1011698 0.9948692 0 -0.3943564 0.9189577 0 -0.2993676 0.9541379 0 -0.9884682 -0.1514286 0 -0.9680805 -0.2506397 0 0.7907846 0.6120945 0 0.8978012 0.4404011 0 0.8486408 0.5289697 0 0.9680755 0.2506591 0 0.937754 0.3473002 0 0.9884682 0.1514286 0 0.9883006 -0.1525194 0 0.9680755 -0.2506591 0 0.9377508 -0.3473092 0 0.8978093 -0.4403846 0 0.8486408 -0.5289698 0 0.7247934 -0.6889663 0 0.7907747 -0.6121073 0 0.6513783 -0.7587532 0 0.5712553 -0.8207725 0 0.3177645 -0.1558697 -0.9352703 0.230543 -0.2685548 -0.9352692 0.1059564 0.3377021 -0.9352703 0.3319025 0.1229279 -0.9352697 0.03580731 -0.3521088 -0.935274 -0.2305412 0.268549 -0.9352712 -0.2565301 0.2438423 -0.9352719 -0.2021906 0.2905048 -0.9352679 -0.3498514 0.05359411 -0.935271 0.3177651 0.1558701 -0.93527 0.2798843 0.216641 -0.9352709 0.3426972 -0.08810949 -0.9353049 0.35348 -0.01792961 -0.9352703 -0.3177666 0.1558715 -0.9352692 -0.2798832 0.2166476 -0.9352697 0 0.3539343 -0.9352703 0.3498151 -0.05395388 -0.9352639 -0.3498536 -0.05359041 -0.9352704 -0.2565301 -0.2438527 -0.9352691 -0.07124567 0.3466907 -0.9352698 -0.1395765 -0.3252527 -0.9352695 -0.3426353 0.08871167 -0.9352708 0.07124638 -0.3466929 -0.9352689 -0.2798831 -0.2166466 -0.9352699 -0.03580677 0.352117 -0.9352709 0.07124668 0.3466915 -0.9352695 0.3003656 0.1872212 -0.9352694 -0.1717661 0.3094576 -0.9352714 -0.1717659 -0.3094706 -0.9352671 -0.3426364 -0.08871793 -0.9352697 0.353484 0.01792275 -0.9352689 -0.3177663 -0.1558704 -0.9352696 -0.2021932 -0.2904939 -0.9352707 0.2021919 -0.2904995 -0.9352692 0.2021909 0.2904987 -0.9352697 0 -0.3539344 -0.9352703 -0.3534804 0.01792222 -0.9352703 -0.3003648 0.1872169 -0.9352706 0.2305436 0.2685468 -0.9352713 -0.1059544 0.3377058 -0.9352692 0.1717653 -0.3094594 -0.9352709 0.2565292 -0.2438548 -0.9352687 0.2798831 -0.2166466 -0.9352699 0.1395761 0.3252505 -0.9352704 0.1717661 0.3094621 -0.9352698 -0.03580713 -0.3521208 -0.9352695 -0.07124727 -0.3466899 -0.93527 0.3498542 0.05359429 -0.93527 0.3426367 0.08871066 -0.9352704 0.1059545 -0.3377063 -0.935269 0.3319029 -0.1229256 -0.93527 -0.3003652 -0.1872178 -0.9352702 -0.3534817 -0.01793599 -0.9352695 0.03580689 0.3521217 -0.9352691 -0.1395754 0.3252496 -0.9352708 -0.2305443 -0.2685533 -0.9352692 0.1395758 -0.3252522 -0.9352698 -0.3319037 -0.1229152 -0.935271 0.3003654 -0.1872178 -0.9352702 -0.3319025 0.1229258 -0.93527 -3.54421e-5 0 -1 -3.54403e-5 0 -1 -1.77198e-5 0 -1 -1.77217e-5 0 -1 -1.19155e-5 0 -1 1.1854e-5 0 -1 3.54396e-5 0 -1 -1.1916e-5 0 -1 3.5444e-5 0 -1 5.92726e-6 0 -1 -1.77204e-5 0 -1 -3.54423e-5 0 -1 -3.0806e-6 0 -1 4.46452e-6 0 -1 3.85077e-7 0 -1 8.92897e-6 0 -1 4.05662e-7 0 -1 8.82881e-7 0 -1 8.11325e-7 0 -1 -8.11327e-7 0 -1 4.78463e-7 0 -1 -3.12133e-7 0 -1 0.2565304 0.2438465 -0.9352707 -0.1059552 -0.3377011 -0.9352709 0.112857 0.01889836 -0.9934315 1 0 0 -0.004207789 -0.006013512 -0.9999731 -0.9997505 -0.0223388 0 -0.9377474 -0.3473183 0 -0.8978011 -0.4404011 0 -0.8486361 -0.5289771 0 -0.7907747 -0.6121073 0 -0.7248037 -0.6889555 0 -0.6513783 -0.7587532 0 -0.9884713 0.1514085 0 -0.9680805 0.2506397 0 -0.937754 0.3473002 0 -0.8978093 0.4403846 0 -0.8486361 0.5289772 0 -0.7907747 0.6121074 0 -0.7247831 0.6889771 0 -0.6513681 0.758762 0 -0.4853019 0.8743467 0 0.4853017 0.8743469 0 -0.2993676 -0.9541379 0 -0.2012994 -0.9795299 0 -0.1011698 -0.9948692 0 0.1011698 -0.9948692 0 0.2012993 -0.9795299 0 0.2993676 -0.9541379 0 0.3943564 -0.9189577 0 0.4853017 -0.8743469 0 0 0 -1 0 0 -1 2.60068e-6 0 -1 0 0 -1 0 0 -1 -1.6779e-6 0 -1 1.26624e-6 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 1.22643e-7 0 -1 -6.74119e-7 0 -1 0 0 -1 3.31326e-7 0 -1 -6.40871e-7 0 -1 4.40789e-7 0 -1 -3.37551e-7 0 -1 3.50487e-7 0 -1 -2.94512e-7 0 -1 0 0 -1 -7.93709e-7 0 -1 1.95051e-6 0 -1 3.2697e-7 0 -1 -1.04869e-6 0 -1 7.12261e-7 0 -1 1.94559e-7 0 -1 0 0 -1 1.50524e-7 0 -1 -1.85096e-7 0 -1 -4.4239e-7 0 -1 1.86884e-7 0 -1 1.44885e-7 0 -1 2.48494e-7 0 -1 0 0 -1 -2.32067e-7 0 -1 1.97149e-7 0 -1 -8.54507e-7 0 -1 6.41147e-7 0 -1 0 0 -1 -5.23576e-7 0 -1 -1 0 1.26597e-6 -1 0 1.22077e-6 -1 0 1.26894e-6 -0.9999951 0.003162503 0 -1 -2.56671e-6 0 1 -3.94863e-7 1.05171e-6 1 0 1.06105e-6 1 0 1.05121e-6 + + + + + + + + + + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + + + + + + + + + + + + +

10 4 5 4

+
+ + + + +

2 0 0 1 0 1 3 0 2 0 1 3 5 1 4 1 1 5 4 2 6 6 2 7 8 2 8 9 3 9 2 3 10 3 3 11 39 4 12 15 4 13 5 4 14 10 5 15 8 5 16 11 5 17 14 6 18 6 6 19 7 6 20 14 7 21 0 7 22 16 7 23 17 8 24 5 8 25 15 8 26 17 9 27 3 9 28 1 9 29 12 10 30 9 10 31 3 10 32 18 11 33 8 11 34 14 11 35 17 12 36 10 12 37 12 12 38 19 13 39 9 13 40 13 13 41 2 14 42 16 14 43 0 14 44 24 15 45 21 15 46 23 15 47 22 16 48 20 16 49 21 16 50 20 17 51 10 17 52 26 17 53 12 18 54 22 18 55 13 18 56 23 19 57 25 19 58 24 19 59 13 20 60 22 20 61 24 20 62 28 21 63 24 21 64 25 21 65 13 22 66 24 22 67 28 22 68 29 23 69 26 23 70 10 23 71 27 24 72 30 24 73 34 24 74 32 25 75 28 25 76 25 25 77 28 26 78 19 26 79 13 26 80 11 27 81 29 27 82 10 27 83 36 28 84 27 28 85 41 28 86 37 29 87 25 29 88 35 29 89 43 30 90 33 30 91 29 30 92 45 31 93 32 31 94 42 31 95 42 32 96 32 32 97 37 32 98 19 33 99 46 33 100 18 33 101 6 34 102 5 34 103 7 34 104 29 35 105 11 35 106 43 35 107 34 36 108 48 36 109 44 36 110 41 37 111 45 37 112 36 37 113 47 38 114 18 38 115 46 38 116 47 39 117 43 39 118 11 39 119 43 40 120 47 40 121 40 40 122 58 16 123 68 16 124 70 16 125 53 41 126 32 41 127 41 41 128 38 42 129 49 42 130 46 42 131 21 43 132 30 43 133 23 43 134 40 12 135 47 12 136 50 12 137 34 44 138 51 44 139 52 44 140 58 45 141 70 45 142 56 45 143 46 46 144 49 46 145 55 46 146 21 47 147 26 47 148 33 47 149 56 48 150 40 48 151 50 48 152 52 49 153 51 49 154 57 49 155 49 50 156 65 50 157 60 50 158 62 51 159 49 51 160 60 51 161 50 52 162 57 52 163 58 52 164 58 53 165 56 53 166 50 53 167 27 16 168 35 16 169 25 16 170 69 54 171 50 54 172 55 54 173 56 55 174 63 55 175 48 55 176 55 56 177 67 56 178 69 56 179 44 36 180 63 36 181 59 36 182 42 45 183 37 45 184 54 45 185 69 57 186 65 57 187 53 57 188 65 58 189 54 58 190 60 58 191 66 36 192 42 36 193 54 36 194 31 16 195 61 16 196 35 16 197 70 59 198 67 59 199 62 59 200 61 60 201 37 60 202 35 60 203 59 61 204 64 61 205 67 61 206 91 62 207 90 62 208 92 62 209 87 63 210 90 63 211 89 63 212 85 64 213 88 64 214 87 64 215 154 65 216 152 65 217 151 65 218 156 66 219 154 66 220 155 66 221 156 67 222 158 67 223 157 67 224 93 68 225 96 68 226 95 68 227 100 69 228 98 69 229 97 69 230 95 70 231 97 70 232 98 70 233 103 16 234 101 16 235 104 16 236 99 71 237 101 71 238 102 71 239 108 72 240 106 72 241 107 72 242 105 73 243 104 73 244 106 73 245 109 74 246 112 74 247 111 74 248 110 75 249 108 75 250 107 75 251 137 76 252 136 76 253 138 76 254 140 77 255 138 77 256 139 77 257 85 78 258 84 78 259 86 78 260 135 79 261 133 79 262 136 79 263 79 80 264 82 80 265 81 80 266 81 81 267 84 81 268 83 81 269 78 82 270 75 82 271 76 82 272 74 83 273 72 83 274 73 83 275 80 84 276 77 84 277 78 84 278 75 85 279 74 85 280 76 85 281 194 86 282 191 86 283 192 86 284 190 87 285 192 87 286 191 87 287 188 88 288 190 88 289 187 88 290 187 89 291 186 89 292 188 89 293 186 90 294 183 90 295 184 90 296 179 91 297 181 91 298 182 91 299 182 92 300 184 92 301 183 92 302 180 93 303 177 93 304 178 93 305 178 94 306 175 94 307 176 94 308 73 95 309 193 95 310 194 95 311 187 96 312 252 96 313 185 96 314 179 97 315 248 97 316 177 97 317 98 98 318 207 98 319 95 98 320 77 99 321 199 99 322 198 99 323 165 100 324 243 100 325 242 100 326 117 101 327 217 101 328 115 101 329 117 102 330 219 102 331 218 102 332 114 103 333 217 103 334 216 103 335 129 104 336 225 104 337 224 104 338 81 105 339 199 105 340 79 105 341 85 106 342 201 106 343 83 106 344 190 107 345 255 107 346 254 107 347 72 108 348 256 108 349 193 108 350 123 109 351 222 109 352 221 109 353 121 110 354 219 110 355 120 110 356 103 111 357 210 111 358 102 111 359 191 112 360 256 112 361 255 112 362 137 113 363 227 113 364 135 113 365 149 114 366 233 114 367 147 114 368 105 115 369 213 115 370 212 115 371 157 116 372 237 116 373 156 116 374 128 117 375 224 117 376 223 117 377 170 118 378 243 118 379 167 118 380 146 119 381 233 119 382 232 119 383 105 120 384 211 120 385 103 120 386 98 121 387 209 121 388 208 121 389 83 122 390 200 122 391 81 122 392 111 123 393 216 123 394 215 123 395 153 124 396 237 124 397 236 124 398 140 125 399 228 125 400 137 125 401 72 126 402 195 126 403 196 126 404 143 127 405 230 127 406 141 127 407 153 128 408 235 128 409 152 128 410 177 129 411 247 129 412 175 129 413 89 130 414 205 130 415 204 130 416 165 131 417 241 131 418 163 131 419 134 132 420 225 132 421 131 132 422 121 133 423 221 133 424 220 133 425 87 134 426 204 134 427 203 134 428 108 135 429 214 135 430 213 135 431 175 136 432 246 136 433 173 136 434 179 137 435 250 137 436 249 137 437 182 138 438 251 138 439 250 138 440 93 139 441 207 139 442 206 139 443 93 140 444 205 140 445 91 140 446 162 141 447 241 141 448 240 141 449 162 142 450 239 142 451 159 142 452 75 143 453 195 143 454 71 143 455 77 144 456 197 144 457 75 144 458 170 145 459 245 145 460 244 145 461 190 146 462 253 146 463 187 146 464 146 147 465 231 147 466 143 147 467 134 148 468 227 148 469 226 148 470 102 149 471 209 149 472 99 149 473 111 150 474 214 150 475 109 150 476 149 151 477 235 151 478 234 151 479 173 152 480 245 152 481 171 152 482 140 153 483 230 153 484 229 153 485 185 154 486 251 154 487 183 154 488 128 155 489 222 155 490 125 155 491 172 156 492 139 156 493 107 156 494 87 157 495 202 157 496 85 157 497 159 158 498 238 158 499 157 158 500 45 156 501 66 156 502 36 156 503 64 159 504 36 159 505 66 159 506 63 160 507 64 160 508 59 160 509 68 156 510 57 156 511 51 156 512 59 161 513 51 161 514 44 161 515 30 162 516 40 162 517 34 162 518 140 163 519 142 163 520 141 163 521 143 164 522 142 164 523 144 164 524 144 165 525 146 165 526 143 165 527 146 166 528 148 166 529 147 166 530 150 167 531 147 167 532 148 167 533 152 168 534 150 168 535 151 168 536 134 169 537 132 169 538 133 169 539 131 170 540 130 170 541 132 170 542 130 171 543 128 171 544 127 171 545 125 172 546 127 172 547 128 172 548 125 173 549 124 173 550 126 173 551 123 174 552 122 174 553 124 174 554 122 175 555 120 175 556 119 175 557 118 176 558 120 176 559 117 176 560 117 177 561 116 177 562 118 177 563 114 178 564 116 178 565 115 178 566 112 179 567 114 179 568 111 179 569 93 180 570 92 180 571 94 180 572 159 181 573 158 181 574 160 181 575 160 182 576 162 182 577 159 182 578 164 183 579 162 183 580 161 183 581 165 36 582 164 36 583 166 36 584 167 184 585 166 184 586 168 184 587 170 185 588 168 185 589 169 185 590 172 186 591 170 186 592 169 186 593 173 187 594 172 187 595 174 187 596 175 188 597 174 188 598 176 188 599 16 189 600 226 189 601 14 189 602 19 156 603 18 156 604 196 156 605 16 156 606 210 156 607 211 156 608 14 190 609 242 190 610 243 190 611 27 191 612 34 191 613 69 191 614 49 192 615 28 192 616 32 192 617 47 193 618 55 193 619 50 193 620 61 194 621 63 194 622 62 194 623 2 195 624 0 195 625 1 195 626 0 196 627 7 196 628 5 196 629 39 197 630 10 197 631 15 197 632 10 198 633 39 198 634 8 198 635 14 199 636 8 199 637 6 199 638 14 200 639 7 200 640 0 200 641 17 201 642 1 201 643 5 201 644 17 36 645 12 36 646 3 36 647 12 202 648 13 202 649 9 202 650 18 203 651 11 203 652 8 203 653 17 12 654 15 12 655 10 12 656 19 204 657 16 204 658 9 204 659 2 205 660 9 205 661 16 205 662 24 206 663 22 206 664 21 206 665 22 207 666 12 207 667 20 207 668 20 208 669 12 208 670 10 208 671 23 209 672 27 209 673 25 209 674 29 36 675 33 36 676 26 36 677 27 210 678 23 210 679 30 210 680 28 211 681 38 211 682 19 211 683 36 212 684 31 212 685 27 212 686 37 213 687 32 213 688 25 213 689 43 214 690 30 214 691 33 214 692 45 215 693 41 215 694 32 215 695 19 216 696 38 216 697 46 216 698 6 217 699 39 217 700 5 217 701 34 36 702 40 36 703 48 36 704 47 218 705 11 218 706 18 218 707 58 16 708 57 16 709 68 16 710 53 219 711 49 219 712 32 219 713 21 220 714 33 220 715 30 220 716 34 221 717 44 221 718 51 221 719 21 47 720 20 47 721 26 47 722 56 222 723 48 222 724 40 222 725 49 223 726 53 223 727 65 223 728 62 224 729 55 224 730 49 224 731 50 225 732 52 225 733 57 225 734 27 16 735 31 16 736 35 16 737 69 226 738 52 226 739 50 226 740 56 227 741 70 227 742 63 227 743 55 228 744 62 228 745 67 228 746 44 36 747 48 36 748 63 36 749 69 229 750 67 229 751 65 229 752 65 230 753 66 230 754 54 230 755 66 36 756 45 36 757 42 36 758 31 16 759 64 16 760 61 16 761 70 231 762 68 231 763 67 231 764 61 232 765 54 232 766 37 232 767 66 233 768 65 233 769 64 233 770 65 234 771 67 234 772 64 234 773 67 235 774 68 235 775 59 235 776 91 236 777 89 236 778 90 236 779 87 237 780 88 237 781 90 237 782 85 238 783 86 238 784 88 238 785 154 65 786 153 65 787 152 65 788 156 239 789 153 239 790 154 239 791 156 240 792 155 240 793 158 240 794 93 241 795 94 241 796 96 241 797 100 242 798 99 242 799 98 242 800 95 243 801 96 243 802 97 243 803 103 16 804 102 16 805 101 16 806 99 244 807 100 244 808 101 244 809 108 245 810 105 245 811 106 245 812 105 246 813 103 246 814 104 246 815 109 247 816 110 247 817 112 247 818 110 248 819 109 248 820 108 248 821 137 249 822 135 249 823 136 249 824 140 250 825 137 250 826 138 250 827 85 251 828 83 251 829 84 251 830 135 79 831 134 79 832 133 79 833 79 252 834 80 252 835 82 252 836 81 253 837 82 253 838 84 253 839 78 254 840 77 254 841 75 254 842 74 83 843 71 83 844 72 83 845 80 255 846 79 255 847 77 255 848 75 256 849 71 256 850 74 256 851 194 257 852 193 257 853 191 257 854 190 258 855 189 258 856 192 258 857 188 259 858 189 259 859 190 259 860 187 260 861 185 260 862 186 260 863 186 261 864 185 261 865 183 261 866 179 262 867 180 262 868 181 262 869 182 263 870 181 263 871 184 263 872 180 264 873 179 264 874 177 264 875 178 265 876 177 265 877 175 265 878 73 95 879 72 95 880 193 95 881 187 266 882 253 266 883 252 266 884 179 267 885 249 267 886 248 267 887 98 268 888 208 268 889 207 268 890 77 269 891 79 269 892 199 269 893 165 270 894 167 270 895 243 270 896 117 271 897 218 271 898 217 271 899 117 272 900 120 272 901 219 272 902 114 273 903 115 273 904 217 273 905 129 274 906 131 274 907 225 274 908 81 275 909 200 275 910 199 275 911 85 276 912 202 276 913 201 276 914 190 277 915 191 277 916 255 277 917 72 278 918 196 278 919 256 278 920 123 279 921 125 279 922 222 279 923 121 280 924 220 280 925 219 280 926 103 281 927 211 281 928 210 281 929 191 282 930 193 282 931 256 282 932 137 283 933 228 283 934 227 283 935 149 284 936 234 284 937 233 284 938 105 285 939 108 285 940 213 285 941 157 286 942 238 286 943 237 286 944 128 287 945 129 287 946 224 287 947 170 288 948 244 288 949 243 288 950 146 289 951 147 289 952 233 289 953 105 290 954 212 290 955 211 290 956 98 291 957 99 291 958 209 291 959 83 292 960 201 292 961 200 292 962 111 293 963 114 293 964 216 293 965 153 294 966 156 294 967 237 294 968 140 295 969 229 295 970 228 295 971 72 296 972 71 296 973 195 296 974 143 297 975 231 297 976 230 297 977 153 298 978 236 298 979 235 298 980 177 299 981 248 299 982 247 299 983 89 300 984 91 300 985 205 300 986 165 301 987 242 301 988 241 301 989 134 302 990 226 302 991 225 302 992 121 303 993 123 303 994 221 303 995 87 304 996 89 304 997 204 304 998 108 305 999 109 305 1000 214 305 1001 175 306 1002 247 306 1003 246 306 1004 179 307 1005 182 307 1006 250 307 1007 182 308 1008 183 308 1009 251 308 1010 93 309 1011 95 309 1012 207 309 1013 93 310 1014 206 310 1015 205 310 1016 162 311 1017 163 311 1018 241 311 1019 162 312 1020 240 312 1021 239 312 1022 75 313 1023 197 313 1024 195 313 1025 77 314 1026 198 314 1027 197 314 1028 170 315 1029 171 315 1030 245 315 1031 190 316 1032 254 316 1033 253 316 1034 146 317 1035 232 317 1036 231 317 1037 134 318 1038 135 318 1039 227 318 1040 102 319 1041 210 319 1042 209 319 1043 111 320 1044 215 320 1045 214 320 1046 149 321 1047 152 321 1048 235 321 1049 173 322 1050 246 322 1051 245 322 1052 140 323 1053 141 323 1054 230 323 1055 185 324 1056 252 324 1057 251 324 1058 128 325 1059 223 325 1060 222 325 1061 73 326 1062 194 326 1063 192 326 1064 192 327 1065 189 327 1066 188 327 1067 188 156 1068 186 156 1069 184 156 1070 184 156 1071 181 156 1072 180 156 1073 180 328 1074 178 328 1075 176 328 1076 176 156 1077 174 156 1078 172 156 1079 172 156 1080 169 156 1081 168 156 1082 168 156 1083 166 156 1084 164 156 1085 164 156 1086 161 156 1087 160 156 1088 160 329 1089 158 329 1090 155 329 1091 155 330 1092 154 330 1093 148 330 1094 154 331 1095 151 331 1096 148 331 1097 151 156 1098 150 156 1099 148 156 1100 148 332 1101 145 332 1102 144 332 1103 144 156 1104 142 156 1105 148 156 1106 142 156 1107 139 156 1108 148 156 1109 139 333 1110 138 333 1111 132 333 1112 138 156 1113 136 156 1114 132 156 1115 136 156 1116 133 156 1117 132 156 1118 132 334 1119 130 334 1120 127 334 1121 127 156 1122 126 156 1123 132 156 1124 126 156 1125 124 156 1126 132 156 1127 124 332 1128 122 332 1129 119 332 1130 119 156 1131 118 156 1132 116 156 1133 116 156 1134 113 156 1135 107 156 1136 113 335 1137 112 335 1138 107 335 1139 112 329 1140 110 329 1141 107 329 1142 107 156 1143 106 156 1144 100 156 1145 106 156 1146 104 156 1147 100 156 1148 104 156 1149 101 156 1150 100 156 1151 100 156 1152 97 156 1153 96 156 1154 96 156 1155 94 156 1156 92 156 1157 92 336 1158 90 336 1159 88 336 1160 88 156 1161 86 156 1162 92 156 1163 86 156 1164 84 156 1165 92 156 1166 84 156 1167 82 156 1168 80 156 1169 80 327 1170 78 327 1171 76 327 1172 76 337 1173 74 337 1174 73 337 1175 73 156 1176 192 156 1177 76 156 1178 192 156 1179 188 156 1180 76 156 1181 188 338 1182 184 338 1183 172 338 1184 184 156 1185 180 156 1186 172 156 1187 180 339 1188 176 339 1189 172 339 1190 172 156 1191 168 156 1192 164 156 1193 164 156 1194 160 156 1195 172 156 1196 160 340 1197 155 340 1198 172 340 1199 124 156 1200 119 156 1201 107 156 1202 119 156 1203 116 156 1204 107 156 1205 100 156 1206 96 156 1207 107 156 1208 96 156 1209 92 156 1210 107 156 1211 84 341 1212 80 341 1213 76 341 1214 155 342 1215 148 342 1216 172 342 1217 148 156 1218 139 156 1219 172 156 1220 139 343 1221 132 343 1222 107 343 1223 132 344 1224 124 344 1225 107 344 1226 92 345 1227 84 345 1228 107 345 1229 84 156 1230 76 156 1231 107 156 1232 76 346 1233 188 346 1234 172 346 1235 76 347 1236 172 347 1237 107 347 1238 87 348 1239 203 348 1240 202 348 1241 159 349 1242 239 349 1243 238 349 1244 64 350 1245 31 350 1246 36 350 1247 63 351 1248 61 351 1249 64 351 1250 59 352 1251 68 352 1252 51 352 1253 30 353 1254 43 353 1255 40 353 1256 140 354 1257 139 354 1258 142 354 1259 143 355 1260 141 355 1261 142 355 1262 144 356 1263 145 356 1264 146 356 1265 146 357 1266 145 357 1267 148 357 1268 150 358 1269 149 358 1270 147 358 1271 152 359 1272 149 359 1273 150 359 1274 134 169 1275 131 169 1276 132 169 1277 131 360 1278 129 360 1279 130 360 1280 130 361 1281 129 361 1282 128 361 1283 125 362 1284 126 362 1285 127 362 1286 125 363 1287 123 363 1288 124 363 1289 123 364 1290 121 364 1291 122 364 1292 122 365 1293 121 365 1294 120 365 1295 118 366 1296 119 366 1297 120 366 1298 117 367 1299 115 367 1300 116 367 1301 114 178 1302 113 178 1303 116 178 1304 112 368 1305 113 368 1306 114 368 1307 93 369 1308 91 369 1309 92 369 1310 159 370 1311 157 370 1312 158 370 1313 160 371 1314 161 371 1315 162 371 1316 164 372 1317 163 372 1318 162 372 1319 165 36 1320 163 36 1321 164 36 1322 167 373 1323 165 373 1324 166 373 1325 170 374 1326 167 374 1327 168 374 1328 172 375 1329 171 375 1330 170 375 1331 173 376 1332 171 376 1333 172 376 1334 175 377 1335 173 377 1336 174 377 1337 19 378 1338 196 378 1339 195 378 1340 19 379 1341 195 379 1342 197 379 1343 225 156 1344 226 156 1345 16 156 1346 224 380 1347 225 380 1348 16 380 1349 19 381 1350 197 381 1351 198 381 1352 19 382 1353 198 382 1354 199 382 1355 223 383 1356 224 383 1357 16 383 1358 222 384 1359 223 384 1360 16 384 1361 19 385 1362 199 385 1363 200 385 1364 19 386 1365 200 386 1366 201 386 1367 221 156 1368 222 156 1369 16 156 1370 220 156 1371 221 156 1372 16 156 1373 19 387 1374 201 387 1375 202 387 1376 19 388 1377 202 388 1378 203 388 1379 219 156 1380 220 156 1381 16 156 1382 218 156 1383 219 156 1384 16 156 1385 19 156 1386 203 156 1387 204 156 1388 19 389 1389 204 389 1390 205 389 1391 217 156 1392 218 156 1393 16 156 1394 216 390 1395 217 390 1396 16 390 1397 16 156 1398 19 156 1399 206 156 1400 19 391 1401 205 391 1402 206 391 1403 215 156 1404 216 156 1405 16 156 1406 214 392 1407 215 392 1408 16 392 1409 16 393 1410 206 393 1411 207 393 1412 16 394 1413 207 394 1414 208 394 1415 213 395 1416 214 395 1417 16 395 1418 212 396 1419 213 396 1420 16 396 1421 16 397 1422 208 397 1423 209 397 1424 16 398 1425 209 398 1426 210 398 1427 211 156 1428 212 156 1429 16 156 1430 14 399 1431 226 399 1432 227 399 1433 14 400 1434 227 400 1435 228 400 1436 256 156 1437 196 156 1438 18 156 1439 255 401 1440 256 401 1441 18 401 1442 14 402 1443 228 402 1444 229 402 1445 14 403 1446 229 403 1447 230 403 1448 254 156 1449 255 156 1450 18 156 1451 253 156 1452 254 156 1453 18 156 1454 14 404 1455 230 404 1456 231 404 1457 14 405 1458 231 405 1459 232 405 1460 252 156 1461 253 156 1462 18 156 1463 251 156 1464 252 156 1465 18 156 1466 14 406 1467 232 406 1468 233 406 1469 14 407 1470 233 407 1471 234 407 1472 250 156 1473 251 156 1474 18 156 1475 249 156 1476 250 156 1477 18 156 1478 14 156 1479 234 156 1480 235 156 1481 14 408 1482 235 408 1483 236 408 1484 248 156 1485 249 156 1486 18 156 1487 247 409 1488 248 409 1489 18 409 1490 14 410 1491 236 410 1492 237 410 1493 14 411 1494 237 411 1495 238 411 1496 247 156 1497 18 156 1498 246 156 1499 18 412 1500 14 412 1501 246 412 1502 14 413 1503 238 413 1504 239 413 1505 14 414 1506 239 414 1507 240 414 1508 245 415 1509 246 415 1510 14 415 1511 244 416 1512 245 416 1513 14 416 1514 14 417 1515 240 417 1516 241 417 1517 14 156 1518 241 156 1519 242 156 1520 243 418 1521 244 418 1522 14 418 1523 34 419 1524 52 419 1525 69 419 1526 69 420 1527 53 420 1528 27 420 1529 53 421 1530 41 421 1531 27 421 1532 49 422 1533 38 422 1534 28 422 1535 47 423 1536 46 423 1537 55 423 1538 63 424 1539 70 424 1540 62 424 1541 62 425 1542 60 425 1543 61 425 1544 60 426 1545 54 426 1546 61 426 1547

+
+
+
+
+ + + + 0.001 0 0 -0.019826 0 0.001 0 -0.02155 0 0 0.001 0 0 0 0 1 + + + + 0.00175885 0 0 4.27e-4 0 -7.2866e-11 0.00166698 -0.02045 0 -0.00166698 -7.2866e-11 0.01 0 0 0 1 + + + + + + + +
\ No newline at end of file diff --git a/models/rg_robot/meshes/ActiveCardanHinge_Servo_HolderV1.dae b/models/rg_robot/meshes/ActiveCardanHinge_Servo_HolderV1.dae new file mode 100644 index 0000000000..bb5147bdfd --- /dev/null +++ b/models/rg_robot/meshes/ActiveCardanHinge_Servo_HolderV1.dae @@ -0,0 +1,63 @@ + + + + + VCGLab + VCGLib | MeshLab + + Y_UP + do sep. 17 13:41:27 2015 + do sep. 17 13:41:27 2015 + + + + + + + + + -0.00462891 0.013625 0.00102316 -0.00468326 0.015125 0.00121587 -0.00477896 0.013625 0.00149116 -0.00487462 0.013625 0.00171763 -0.0049836 0.013625 0.001938 -0.0050839 0.015125 0.00211571 -0.00538637 0.013625 0.00255479 -0.00536183 0.015125 0.00252336 -0.00551858 0.015125 0.002714 -0.00554432 0.013625 0.00274318 -0.00568645 0.015125 0.00289492 -0.00648564 0.013625 0.00352687 -0.00689103 0.015125 0.00374286 -0.00714708 0.013625 0.00385115 -0.00831674 0.015125 0.00411347 -0.00858875 0.013625 0.00412484 -0.00883454 0.013625 0.00411968 -0.00905618 0.015125 0.0041024 -0.00907958 0.013625 0.00409988 -0.00930075 0.015125 0.00406928 -0.0102462 0.015125 0.00379305 -0.010702 0.013625 0.00356392 -0.0108974 0.015125 0.00344262 -0.0110993 0.015125 0.00330055 -0.0112922 0.015125 0.00314667 -0.0114756 0.015125 0.00298153 -0.0116488 0.015125 0.00280571 -0.0119622 0.015125 0.00242462 -0.0124419 0.015125 0.00156431 -0.0126025 0.013625 0.00109324 -0.0126013 0.015125 0.0010974 -0.0127039 0.015125 0.000614801 -0.0127482 0.015125 0.000123403 -0.00462708 0.0139446 0.00101601 -0.00460362 0.0141276 0.000918764 -0.00459151 0.0141777 0.000864038 -0.00458464 0.0142015 0.000831338 -0.00452596 0.0143304 0.00046208 -0.00451188 0.0143478 0.000312794 -0.00450738 0.015125 0.000246694 -0.00450321 0.0143564 0.00016276 -0.00450002 0.0143591 1.25336e-05 -0.00452334 0.014334 -0.000438175 -0.00453301 0.0143201 -0.000520838 -0.00456629 0.015125 -0.000736545 -0.00452951 0.015125 -0.000492499 -0.00455933 0.0142704 -0.000697079 -0.00458285 0.0142072 -0.000822609 -0.00457503 0.0142307 -0.000783179 -0.00459694 0.0141567 -0.000889016 -0.00461349 0.0140744 -0.00096096 -0.00462705 0.0139453 -0.00101588 -0.00462844 0.0139104 -0.00102132 -0.0046176 0.015125 -0.000977955 -0.00468326 0.015125 -0.00121586 -0.00462891 0.013625 -0.00102316 -0.00469695 0.013625 -0.0012594 -0.00476303 0.015125 -0.00144942 -0.00487462 0.013625 -0.00171763 -0.00510552 0.013625 -0.00215148 -0.00523993 0.013625 -0.00235732 -0.00737912 0.013625 -0.00393235 -0.00807129 0.015125 -0.00408767 -0.00785565 0.013625 -0.00405262 -0.00858875 0.013625 -0.00412484 -0.00956396 0.013625 -0.00401671 -0.00980158 0.013625 -0.00395364 -0.0102462 0.015125 -0.00379305 -0.0104702 0.015125 -0.00368931 -0.0106875 0.015125 -0.00357235 -0.0108974 0.015125 -0.00344261 -0.010702 0.013625 -0.00356392 -0.0109107 0.013625 -0.00343385 -0.0111112 0.013625 -0.0032916 -0.0113028 0.013625 -0.00313764 -0.011657 0.013625 -0.0027969 -0.0118112 0.015125 -0.00261985 -0.0121012 0.015125 -0.00222069 -0.0122318 0.013625 -0.00200169 -0.0125303 0.013625 -0.00132824 -0.0127043 0.013625 -0.000612423 -0.00456629 0.015125 0.000736549 -0.0045 0.015125 1.9834e-09 -0.00536183 0.015125 -0.00252336 -0.00568645 0.015125 -0.00289491 -0.00586484 0.015125 -0.00306547 -0.00589244 0.013625 -0.0030901 -0.00627934 0.013625 -0.00339315 -0.00667029 0.015125 -0.00363245 -0.00669954 0.013625 -0.00364804 -0.00689103 0.015125 -0.00374285 -0.00782782 0.015125 -0.00404724 -0.00834309 0.013625 -0.00411535 -0.00856329 0.015125 -0.00412454 -0.0095429 0.015125 -0.00402158 -0.0102634 0.013625 -0.00378565 -0.0116488 0.015125 -0.00280571 -0.0122278 0.015125 -0.00200883 -0.0123446 0.013625 -0.00178327 -0.0126013 0.015125 -0.0010974 -0.0126025 0.013625 -0.00109323 -0.0126599 0.015125 -0.000857634 -0.0127039 0.015125 -0.000614798 -0.0126606 0.013625 0.000854348 -0.0126599 0.015125 0.000857637 -0.0124442 0.013625 0.00155853 -0.0123446 0.013625 0.00178328 -0.0122318 0.013625 0.00200169 -0.0121061 0.013625 0.002213 -0.0121012 0.015125 0.0022207 -0.0118112 0.015125 0.00261985 -0.011485 0.013625 0.00297255 -0.0113028 0.013625 0.00313765 -0.0111112 0.013625 0.0032916 -0.0104702 0.015125 0.00368931 -0.00980158 0.013625 0.00395365 -0.00956396 0.013625 0.00401671 -0.00932301 0.013625 0.00406552 -0.00834309 0.013625 0.00411536 -0.00809843 0.013625 0.00409126 -0.00785565 0.013625 0.00405262 -0.00761559 0.013625 0.00399959 -0.00782782 0.015125 0.00404724 -0.00737912 0.013625 0.00393236 -0.00627934 0.013625 0.00339316 -0.0060531 0.015125 0.00322506 -0.0046176 0.015125 0.000977959 -0.00452951 0.015125 0.000492503 -0.00476303 0.015125 0.00144942 -0.00485662 0.015125 0.00167779 -0.00693989 0.015125 0.000472146 -0.00712977 0.015125 0.000909274 -0.00521677 0.015125 0.0023237 -0.00496371 0.015125 0.00190015 -0.00721864 0.015125 0.00104148 -0.00731918 0.015125 0.00116505 -0.00755178 0.015125 0.00138228 -0.00586484 0.015125 0.00306547 -0.00625058 0.015125 0.00337309 -0.00768193 0.015125 0.00147415 -0.00645655 0.015125 0.00350906 -0.00667029 0.015125 0.00363246 -0.00781988 0.015125 0.0015538 -0.00711797 0.015125 0.00383986 -0.00796452 0.015125 0.00162058 -0.0073503 0.015125 0.00392311 -0.0075872 0.015125 0.00399232 -0.00826895 0.015125 0.0017134 -0.00807129 0.015125 0.00408767 -0.00856329 0.015125 0.00412454 -0.00881007 0.015125 0.00412085 -0.0095429 0.015125 0.00402158 -0.00978176 0.015125 0.00395949 -0.0100165 0.015125 0.00388322 -0.0106875 0.015125 0.00357236 -0.0095 0.015125 0.00151555 -0.00963419 0.015125 0.0014297 -0.00976002 0.015125 0.00133201 -0.00987644 0.015125 0.00122327 -0.00998249 0.015125 0.00110441 -0.0122278 0.015125 0.00200883 -0.0123415 0.015125 0.00178977 -0.0125286 0.015125 0.00133324 -0.0101601 0.015125 0.000840277 -0.0102301 0.015125 0.000697204 -0.0103298 0.015125 0.00039496 -0.0103587 0.015125 0.000238294 -0.0127334 0.015125 0.000369764 -0.0127482 0.015125 -0.000123399 -0.0103732 0.015125 -7.96484e-05 -0.0127334 0.015125 -0.00036976 -0.0103587 0.015125 -0.00023829 -0.0125286 0.015125 -0.00133324 -0.0103298 0.015125 -0.000394956 -0.0124419 0.015125 -0.0015643 -0.0123415 0.015125 -0.00178977 -0.0100773 0.015125 -0.000976383 -0.0119622 0.015125 -0.00242461 -0.00998249 0.015125 -0.0011044 -0.0114756 0.015125 -0.00298153 -0.0112922 0.015125 -0.00314667 -0.0110993 0.015125 -0.00330055 -0.00963419 0.015125 -0.0014297 -0.00935856 0.015125 -0.00158883 -0.00921104 0.015125 -0.00164895 -0.0100165 0.015125 -0.00388322 -0.00978176 0.015125 -0.00395948 -0.00930075 0.015125 -0.00406927 -0.00890269 0.015125 -0.00172783 -0.00905618 0.015125 -0.0041024 -0.00881007 0.015125 -0.00412084 -0.00831674 0.015125 -0.00411346 -0.00842624 0.015125 -0.00173867 -0.0075872 0.015125 -0.00399232 -0.0073503 0.015125 -0.00392311 -0.00711797 0.015125 -0.00383985 -0.00645655 0.015125 -0.00350905 -0.0060531 0.015125 -0.00322505 -0.00625058 0.015125 -0.00337309 -0.00755178 0.015125 -0.00138228 -0.00743053 0.015125 -0.00127896 -0.00551858 0.015125 -0.00271399 -0.00521677 0.015125 -0.00232369 -0.00721864 0.015125 -0.00104147 -0.0050839 0.015125 -0.00211571 -0.00496371 0.015125 -0.00190015 -0.00485662 0.015125 -0.00167779 -0.00693989 0.015125 -0.000472142 -0.00690394 0.015125 -0.000316951 -0.00450738 0.015125 -0.00024669 -0.00469695 0.013625 0.0012594 -0.00397035 0.013625 0.00180348 -0.00363902 0.013625 0.00232165 -0.00352604 0.013625 0.00206171 -0.00445009 0.013625 0.00131588 -0.00390438 0.013625 0.00282236 -0.00405604 0.013625 0.00306181 -0.00510552 0.013625 0.00215148 -0.00458207 0.013625 0.00372891 -0.00589244 0.013625 0.0030901 -0.00608137 0.013625 0.0032474 -0.00669954 0.013625 0.00364805 -0.00692028 0.013625 0.00375627 -0.00498722 0.013625 0.00412512 -0.0056659 0.013625 0.00463614 -0.00590867 0.013625 0.00478242 -0.00641518 0.013625 0.00503654 -0.00721718 0.013625 0.00531677 -0.00749295 0.013625 0.00538224 -0.00777173 0.013625 0.00543341 -0.00833533 0.013625 0.00549237 -0.00861866 0.013625 0.0055 -0.00918462 0.013625 0.00547146 -0.00523993 0.013625 0.00235732 -0.00571321 0.013625 0.00292183 -0.00890201 0.013625 0.00549302 -0.0105606 0.013625 0.00514816 -0.010486 0.013625 0.00368132 -0.00974465 0.013625 0.00538483 -0.0102928 0.013625 0.00524104 -0.010035 0.013625 0.00387653 -0.0102634 0.013625 0.00378565 -0.01108 0.013625 0.00492168 -0.0109107 0.013625 0.00343386 -0.0118087 0.013625 0.00448488 -0.0124614 0.013625 0.00394109 -0.011657 0.013625 0.0027969 -0.0118182 0.013625 0.00261131 -0.0131869 0.013625 0.00307232 -0.0134788 0.013625 0.00258662 -0.0133391 0.013625 0.00283323 -0.0128465 0.013625 0.00352542 -0.0119681 0.013625 0.00241645 -0.0136056 0.013625 0.00233313 -0.0125303 0.013625 0.00132824 -0.0139777 0.013625 0.00126419 -0.0138192 0.013625 0.00180826 -0.0139055 0.013625 0.00153827 -0.0140358 0.013625 0.000986761 -0.0127043 0.013625 0.000612426 -0.0127335 0.013625 0.000368328 -0.0127482 0.013625 0.000122923 -0.0140794 0.013625 0.000706708 -0.0127482 0.013625 -0.000122919 -0.0141086 0.013625 -0.000424774 -0.0127335 0.013625 -0.000368325 -0.0140358 0.013625 -0.000986758 -0.0139055 0.013625 -0.00153827 -0.0139777 0.013625 -0.00126419 -0.0126606 0.013625 -0.000854345 -0.0138192 0.013625 -0.00180826 -0.0136056 0.013625 -0.00233313 -0.0133391 0.013625 -0.00283323 -0.0124442 0.013625 -0.00155852 -0.0119681 0.013625 -0.00241644 -0.0118182 0.013625 -0.00261131 -0.0126593 0.013625 -0.00373821 -0.0122533 0.013625 -0.00413349 -0.0120355 0.013625 -0.00431491 -0.0121061 0.013625 -0.002213 -0.011485 0.013625 -0.00297255 -0.010486 0.013625 -0.00368132 -0.00932301 0.013625 -0.00406551 -0.00907958 0.013625 -0.00409987 -0.00918462 0.013625 -0.00547145 -0.00946575 0.013625 -0.00543536 -0.01108 0.013625 -0.00492167 -0.010035 0.013625 -0.00387653 -0.0100206 0.013625 -0.00532 -0.00890201 0.013625 -0.00549302 -0.00883454 0.013625 -0.00411967 -0.00833533 0.013625 -0.00549236 -0.00777173 0.013625 -0.00543341 -0.00809843 0.013625 -0.00409125 -0.00749295 0.013625 -0.00538223 -0.00761559 0.013625 -0.00399959 -0.00667758 0.013625 -0.00514369 -0.00714708 0.013625 -0.00385115 -0.00641518 0.013625 -0.00503653 -0.00648564 0.013625 -0.00352686 -0.00694515 0.013625 -0.00523718 -0.00692028 0.013625 -0.00375627 -0.00608137 0.013625 -0.00324739 -0.00554432 0.013625 -0.00274318 -0.00439534 0.013625 -0.00351568 -0.00538637 0.013625 -0.00255478 -0.00421984 0.013625 -0.00329311 -0.00405604 0.013625 -0.0030618 -0.00571321 0.013625 -0.00292183 -0.00477954 0.013625 -0.00393223 -0.00390438 0.013625 -0.00282236 -0.0049836 0.013625 -0.00193799 -0.00397035 0.013625 -0.00180348 -0.00352604 0.013625 -0.0020617 -0.00368057 0.013625 -0.00198702 -0.00410391 0.013625 -0.00169568 -0.00454508 0.013625 -0.00117293 -0.00477896 0.013625 -0.00149116 -0.00445009 0.013625 -0.00131588 -0.00422886 0.013625 0.00157802 -0.000875 0.014375 1.88505e-09 -0.000817376 0.014371 0.000145978 -0.000864188 0.014371 0.000434003 -0.000707291 0.0143215 0.000472675 -0.000523635 0.014155 0.000517942 -0.000483477 0.014081 0.00052784 -0.00044213 0.0139174 0.000538031 -0.000406148 0.0139174 0.000362069 -0.000382358 0.013875 0.000181816 -0.000541477 0.014221 0.000339355 -0.000656319 0.0143215 0.000159131 -0.000834991 0.014371 0.000291008 -0.000939895 0.014375 0.000472146 -0.00109712 0.014081 0.0015907 -0.0012122 0.014001 0.00173037 -0.00135444 0.013875 0.00185693 -0.00120314 0.0139174 0.00174147 -0.00106762 0.0139174 0.00162141 -0.000894802 0.014155 0.00130016 -0.000861737 0.014081 0.00132501 -0.000795793 0.014155 0.00115672 -0.000840822 0.014221 0.00112825 -0.00074151 0.014277 0.000802482 -0.000783695 0.0143531 0.000453843 -0.000826184 0.0143531 0.000600535 -0.000989811 0.014375 0.000623425 -0.000956609 0.014371 0.000710837 -0.000945811 0.0143531 0.000881308 -0.000894629 0.014277 0.00109422 -0.000937394 0.014221 0.00126816 -0.00123007 0.014081 0.00170848 -0.00134788 0.0139174 0.00185023 -0.00101922 0.014371 0.000842782 -0.00102217 0.0143531 0.00101357 -0.00109256 0.014277 0.00135762 -0.00150807 0.014001 0.00193459 -0.00150864 0.013875 0.00195352 -0.00112977 0.014375 0.000909274 -0.00114662 0.0143215 0.00130974 -0.00120677 0.014277 0.00147653 -0.00137207 0.014081 0.00181519 -0.00167015 0.013875 0.00203734 -0.00120552 0.0143531 0.00125756 -0.0012568 0.0143215 0.00142445 -0.00133018 0.014277 0.00158586 -0.00139556 0.014155 0.00178115 -0.0018379 0.013875 0.00210784 -0.00182778 0.0139174 0.0021021 -0.00121865 0.014375 0.00104148 -0.00154287 0.014155 0.0018743 -0.00126757 0.014371 0.00120258 -0.00136874 0.014371 0.00130791 -0.00146199 0.014277 0.00168491 -0.0016972 0.014155 0.0019553 -0.00185754 0.014155 0.00202362 -0.0021753 0.0139174 0.00220277 -0.00168193 0.014375 0.00147415 -0.0020554 0.014277 0.00196649 -0.00235914 0.014081 0.00218953 -0.00289599 0.0139174 0.00223181 -0.00273092 0.013875 0.00224751 -0.00254896 0.013875 0.00224872 -0.00253447 0.0139174 0.00224638 -0.00202287 0.014155 0.00207881 -0.00189901 0.014277 0.00191428 -0.00177829 0.0143215 0.00178441 -0.00154772 0.0143531 0.00156072 -0.00159481 0.014371 0.00149249 -0.00147805 0.014371 0.00140476 -0.00171825 0.014371 0.00157055 -0.00184756 0.014371 0.00163842 -0.00181202 0.0143531 0.00171332 -0.00237055 0.014221 0.00209559 -0.00236413 0.014155 0.00214848 -0.00289426 0.014001 0.00221758 -0.0030747 0.0139174 0.00220277 -0.00181989 0.014375 0.0015538 -0.00198192 0.014371 0.00169566 -0.00222993 0.0143215 0.00193519 -0.00254 0.014221 0.00210927 -0.00209739 0.0143531 0.00182154 -0.00212045 0.014371 0.00174191 -0.00224567 0.0143531 0.00185809 -0.00271 0.014221 0.00210927 -0.00325049 0.0139174 0.00215944 -0.00211462 0.014375 0.00167392 -0.00238693 0.0143215 0.0019607 -0.00323864 0.014081 0.00211853 -0.00341714 0.014001 0.0020887 -0.00342222 0.0139174 0.0021021 -0.00287178 0.014277 0.00203239 -0.00287945 0.014221 0.00209559 -0.00305791 0.014155 0.00212052 -0.00304725 0.014221 0.00206832 -0.00358264 0.014001 0.00201819 -0.00360684 0.013875 0.00202448 -0.00358879 0.0139174 0.00203114 -0.00242623 0.014375 0.00173868 -0.00284359 0.014371 0.00180029 -0.0035528 0.014155 0.0019553 -0.00406456 0.013875 0.00172921 -0.00418238 0.0139174 0.00162141 -0.00404686 0.0139174 0.00174147 -0.00374193 0.014001 0.00193459 -0.00357053 0.014081 0.00199267 -0.0031946 0.014277 0.00196649 -0.00285359 0.0143531 0.00188259 -0.00286307 0.0143215 0.0019607 -0.00269802 0.014371 0.00181204 -0.00274442 0.014375 0.00174592 -0.00350267 0.014277 0.00184965 -0.00368049 0.014221 0.00182816 -0.0043078 0.0139174 0.00149083 -0.00315261 0.0143531 0.00182154 -0.00415288 0.014081 0.0015907 -0.00429707 0.014001 0.00148133 -0.00442231 0.0139174 0.00135059 -0.00432431 0.013875 0.00147474 -0.00321104 0.014375 0.00164896 -0.00326808 0.014371 0.00169566 -0.00329748 0.0143531 0.00177318 -0.00343798 0.0143531 0.00171332 -0.00399377 0.014155 0.00167644 -0.00427593 0.014081 0.00146259 -0.00452515 0.0139174 0.00120159 -0.00335856 0.014375 0.00158884 -0.00361255 0.0143215 0.00171049 -0.00391982 0.014277 0.00158586 -0.00441085 0.014001 0.00134198 -0.00451304 0.014001 0.00119393 -0.00453968 0.013875 0.00118175 -0.00340244 0.014371 0.00163842 -0.00387414 0.0143215 0.00152993 -0.00408733 0.014221 0.00152244 -0.00424497 0.014155 0.00143517 -0.00438826 0.014081 0.00132501 -0.00461568 0.0139174 0.00104479 -0.0035 0.014375 0.00151555 -0.00365519 0.014371 0.00149249 -0.00382438 0.0143531 0.00146897 -0.0039932 0.0143215 0.00142445 -0.00415744 0.014277 0.00135762 -0.00460299 0.014001 0.00103813 -0.00462182 0.0140114 0.000995091 -0.00376002 0.014375 0.00133201 -0.00387644 0.014375 0.00122327 -0.00454135 0.014155 0.00100578 -0.00445421 0.014155 0.00115672 -0.00440918 0.014221 0.00112825 -0.00431261 0.014221 0.00126816 -0.00420398 0.0143215 0.00118653 -0.00414107 0.0143531 0.00113926 -0.00422783 0.0143531 0.00101357 -0.00443781 0.014277 0.000951437 -0.0045773 0.0142242 0.000794817 -0.00456706 0.014221 0.000827435 -0.0040748 0.014371 0.00108945 -0.00415776 0.014371 0.00096926 -0.00456194 0.0142643 0.000712138 -0.00450849 0.014277 0.000802482 -0.00436966 0.0143531 0.000743332 -0.00454782 0.0142944 0.000626288 -0.00444205 0.0143215 0.000774176 -0.00423078 0.014371 0.000842782 -0.00453572 0.0143158 0.000541698 -0.00449846 0.0143215 0.000625454 -0.00423012 0.014375 0.000697204 -0.00428687 0.014375 0.000548354 -0.0044663 0.0143531 0.000453843 -0.00449685 0.0143531 0.000304208 -0.0043587 0.014375 0.000238294 -0.00441502 0.014371 0.000290909 -0.00443851 0.014371 1.88452e-09 -0.00443263 0.014371 0.000145929 -0.00438581 0.014371 0.000434003 -0.00437319 0.014375 7.96523e-05 -0.00437319 0.014375 -7.96485e-05 -0.00449685 0.0143531 -0.000304204 -0.00451005 0.0143497 -0.000287795 -0.0043587 0.014375 -0.00023829 -0.00441502 0.014371 -0.000290906 -0.00454511 0.0142995 -0.00060839 -0.00449846 0.0143215 -0.00062545 -0.00450849 0.014277 -0.000802478 -0.00440918 0.014221 -0.00112824 -0.00427593 0.014081 -0.00146259 -0.00404686 0.0139174 -0.00174146 -0.00418238 0.0139174 -0.0016214 -0.00429707 0.014001 -0.00148133 -0.00456706 0.014221 -0.000827431 -0.00444205 0.0143215 -0.000774172 -0.00438581 0.014371 -0.000433999 -0.00436966 0.0143531 -0.000743328 -0.00437387 0.0143215 -0.000917874 -0.00390212 0.0139174 -0.00185023 -0.00392013 0.013875 -0.00183988 -0.00434518 0.014371 -0.000574278 -0.00430419 0.0143531 -0.000881304 -0.00429434 0.0143215 -0.00105562 -0.00420509 0.014221 -0.00139984 -0.00412423 0.014155 -0.00156086 -0.00389398 0.014001 -0.00183843 -0.0037491 0.0139174 -0.001947 -0.0040773 0.014375 -0.000976383 -0.0040748 0.014371 -0.00108945 -0.00382417 0.014221 -0.0017373 -0.00370713 0.014155 -0.0018743 -0.00358264 0.014001 -0.00201819 -0.00342222 0.0139174 -0.0021021 -0.00325049 0.0139174 -0.00215943 -0.00326791 0.013875 -0.00215619 -0.00344004 0.013875 -0.00209719 -0.00399377 0.014155 -0.00167644 -0.00410338 0.0143215 -0.00130973 -0.00415776 0.014371 -0.000969256 -0.00414107 0.0143531 -0.00113925 -0.00398249 0.014375 -0.0011044 -0.00398243 0.014371 -0.00120258 -0.00391982 0.014277 -0.00158586 -0.00368049 0.014221 -0.00182816 -0.0032465 0.014001 -0.00214567 -0.00374699 0.0143215 -0.00162548 -0.0035528 0.014155 -0.0019553 -0.0030747 0.0139174 -0.00220276 -0.00273092 0.013875 -0.0022475 -0.00388126 0.014371 -0.00130791 -0.00377194 0.014371 -0.00140475 -0.00370228 0.0143531 -0.00156071 -0.00361255 0.0143215 -0.00171049 -0.00352996 0.014221 -0.00190716 -0.00289599 0.0139174 -0.0022318 -0.00271553 0.0139174 -0.00224637 -0.00254896 0.013875 -0.00224871 -0.00337356 0.014221 -0.0019738 -0.00335099 0.014277 -0.00191427 -0.00271495 0.014001 -0.00223205 -0.00253447 0.0139174 -0.00224637 -0.00363419 0.014375 -0.0014297 -0.00353175 0.014371 -0.00157054 -0.00347171 0.0143215 -0.0017844 -0.00321231 0.014221 -0.00202763 -0.00288587 0.014155 -0.00214847 -0.00253505 0.014001 -0.00223205 -0.00343798 0.0143531 -0.00171331 -0.00340244 0.014371 -0.00163841 -0.00329748 0.0143531 -0.00177317 -0.0031946 0.014277 -0.00196648 -0.00218772 0.013875 -0.0022071 -0.00312955 0.014371 -0.00174191 -0.00305866 0.014375 -0.00169541 -0.00298775 0.014371 -0.00177686 -0.00285359 0.0143531 -0.00188258 -0.00270744 0.014277 -0.00204565 -0.00254256 0.014277 -0.00204565 -0.00166121 0.0139174 -0.00203113 -0.00167015 0.013875 -0.00203734 -0.00199951 0.0139174 -0.00215943 -0.00253785 0.014155 -0.0021625 -0.00271 0.014221 -0.00210926 -0.00300433 0.0143531 -0.00185808 -0.00315261 0.0143531 -0.00182154 -0.00290269 0.014375 -0.00172783 -0.00237822 0.014277 -0.00203239 -0.00220275 0.014221 -0.00206831 -0.00166736 0.014001 -0.00201819 -0.0015009 0.0139174 -0.001947 -0.00269802 0.014371 -0.00181204 -0.00238693 0.0143215 -0.0019607 -0.00202287 0.014155 -0.0020788 -0.00184288 0.014081 -0.00206228 -0.00150807 0.014001 -0.00193458 -0.00258516 0.014375 -0.00174954 -0.00255198 0.014371 -0.00181204 -0.00203769 0.014221 -0.00202763 -0.00185754 0.014155 -0.00202361 -0.00152219 0.014081 -0.00191012 -0.00134788 0.0139174 -0.00185023 -0.00120854 0.013875 -0.00174818 -0.00120314 0.0139174 -0.00174146 -0.00222993 0.0143215 -0.00193518 -0.00224567 0.0143531 -0.00185808 -0.00172004 0.014221 -0.00190716 -0.00154287 0.014155 -0.0018743 -0.00137207 0.014081 -0.00181518 -0.00106762 0.0139174 -0.0016214 -0.00240641 0.014371 -0.00180028 -0.00226225 0.014371 -0.00177686 -0.00192462 0.0143215 -0.00184675 -0.00139556 0.014155 -0.00178115 -0.00212045 0.014371 -0.00174191 -0.00209739 0.0143531 -0.00182154 -0.00156951 0.014221 -0.00182816 -0.00125623 0.014155 -0.00167644 -0.00211462 0.014375 -0.00167392 -0.00184756 0.014371 -0.00163841 -0.00167679 0.0143531 -0.00164234 -0.00150301 0.0143215 -0.00162548 -0.00120677 0.014277 -0.00147653 -0.000736958 0.014001 -0.00119392 -0.000634319 0.0139174 -0.00104479 -0.00064701 0.014001 -0.00103813 -0.000556704 0.0139174 -0.000881217 -0.000556552 0.013875 -0.000885448 -0.000724845 0.0139174 -0.00120158 -0.00112577 0.014155 -0.00156086 -0.00133018 0.014277 -0.00158586 -0.00181202 0.0143531 -0.00171331 -0.00198192 0.014371 -0.00169566 -0.00159481 0.014371 -0.00149249 -0.00109256 0.014277 -0.00135762 -0.000937394 0.014221 -0.00126815 -0.000894802 0.014155 -0.00130016 -0.00155178 0.014375 -0.00138228 -0.00131131 0.0143531 -0.00136769 -0.000840822 0.014221 -0.00112824 -0.000795793 0.014155 -0.00115672 -0.000708648 0.014155 -0.00100578 -0.000569889 0.014001 -0.000875599 -0.000440932 0.013875 -0.000540691 -0.000492501 0.0139174 -0.000711931 -0.00147805 0.014371 -0.00140475 -0.00136874 0.014371 -0.00130791 -0.000755822 0.014221 -0.000981019 -0.000404384 0.013875 -0.000362437 -0.00104602 0.0143215 -0.00118653 -0.00063393 0.014155 -0.000848314 -0.000532894 0.014081 -0.000698445 -0.000483477 0.014081 -0.000527836 -0.000420266 0.014001 -0.000359594 -0.000382358 0.013875 -0.000181812 -0.000406148 0.0139174 -0.000362065 -0.00110893 0.0143531 -0.00113925 -0.000955664 0.0143215 -0.00105562 -0.000812191 0.014277 -0.000951434 -0.000876134 0.0143215 -0.000917874 -0.000398429 0.014001 -0.000180385 -0.00038415 0.0139174 0.000181629 -0.00121865 0.014375 -0.00104147 -0.00112977 0.014375 -0.00090927 -0.00101922 0.014371 -0.000842778 -0.000956609 0.014371 -0.000710833 -0.0004678 0.014155 -0.000174605 -0.0004678 0.014155 0.000174609 -0.000419386 0.014081 1.84649e-09 -0.000426582 0.014081 0.000178027 -0.000398429 0.014001 0.000180389 -0.000391133 0.014001 1.83601e-09 -0.000426582 0.014081 -0.000178023 -0.000448124 0.014081 -0.000354886 -0.000520897 0.014221 -0.000170229 -0.000675539 0.0143215 -0.000317222 -0.000514022 0.014221 1.86486e-09 -0.000460745 0.014155 1.8562e-09 -0.00090482 0.014371 -0.000574278 -0.000864188 0.014371 -0.000433999 -0.000753186 0.0143531 -0.000304445 -0.000656319 0.0143215 -0.000159127 -0.000584345 0.014277 0.000165024 -0.000577683 0.014277 1.8722e-09 -0.000649898 0.0143215 1.87803e-09 -0.000734748 0.0143531 0.000152721 -0.000817376 0.014371 -0.000145975 -0.000811491 0.014371 1.88452e-09 -0.000728589 0.0143531 1.88218e-09 -0.00462891 0.013875 -0.00102316 -0.00448916 0.014081 -0.00117882 -0.00451304 0.014001 -0.00119392 -0.00441085 0.014001 -0.00134198 -0.00452515 0.0139174 -0.00120158 -0.0046217 0.0140126 -0.000994576 -0.00460308 0.0141301 -0.000916388 -0.00457797 0.014081 -0.001025 -0.00454135 0.014155 -0.00100578 -0.00459018 0.0141825 -0.000857822 -0.00461568 0.0139174 -0.00104479 -0.00460299 0.014001 -0.00103813 -0.00443781 0.014277 -0.000951434 -0.00443263 0.014371 -0.000145925 -0.0045023 0.0143572 -0.00013771 -0.00461375 0.0140728 0.000962019 -0.00339245 0.014155 0.00202362 -0.00340712 0.014081 0.00206229 -0.00322713 0.014155 0.00207881 -0.00254864 0.0143531 0.00189488 -0.00240641 0.014371 0.00180029 -0.00255198 0.014371 0.00181204 -0.00217817 0.014001 0.00218872 -0.00187644 0.014221 0.0019738 -0.00143053 0.014375 0.00127897 -0.00107191 0.013875 0.00162801 -0.000942199 0.0139174 0.00149083 -0.000708648 0.014155 0.00100578 -0.000736958 0.014001 0.00119393 -0.000622661 0.014221 0.000668481 -0.000829945 0.013875 0.00135657 -0.000572125 0.014155 0.000685352 -0.000637175 0.014277 0.000489957 -0.000724845 0.0139174 0.00120159 -0.000575363 0.014221 0.000505192 -0.000556552 0.013875 0.000885452 -0.000456045 0.014001 0.000534601 -0.000492501 0.0139174 0.000711935 -0.000376801 0.0139174 1.82505e-09 -0.000634866 0.013875 -0.0010497 -0.000726195 0.013875 -0.00120708 -0.000827694 0.0139174 -0.00135059 -0.00109712 0.014081 -0.00159069 -0.00142583 0.014221 -0.0017373 -0.00163745 0.0143215 -0.00171049 -0.00160134 0.014277 -0.00177303 -0.00177829 0.0143215 -0.0017844 -0.00226895 0.014375 -0.0017134 -0.0018379 0.013875 -0.00210783 -0.0020108 0.013875 -0.00216454 -0.00271215 0.014155 -0.0021625 -0.00287945 0.014221 -0.00209558 -0.00335856 0.014375 -0.00158883 -0.00326808 0.014371 -0.00169566 -0.00426171 0.014277 -0.00122991 -0.00420398 0.0143215 -0.00118653 -0.00423078 0.014371 -0.000842778 -0.00429339 0.014371 -0.000710833 -0.0043078 0.0139174 -0.00149083 -0.00432431 0.013875 -0.00147473 -0.00442231 0.0139174 -0.00135059 -0.000584345 0.014277 -0.000165021 -0.00442382 0.0143531 0.000600535 -0.00434518 0.014371 0.000574282 -0.00357321 0.0143531 -0.00164234 -0.0011752 0.014371 -0.00108945 -0.00109224 0.014371 -0.000969256 -0.000488919 0.014155 -0.000348074 -0.000753186 0.0143531 0.000304449 -0.000675539 0.0143215 0.000317225 -0.000520897 0.014221 0.000170233 -0.000604286 0.014277 0.000328973 -0.000448124 0.014081 0.00035489 -0.000488919 0.014155 0.000348078 -0.000420266 0.014001 0.000359598 -0.000751544 0.0143215 0.000625454 -0.000880341 0.0143531 0.000743332 -0.00090482 0.014371 0.000574282 -0.000683045 0.014277 0.000648322 -0.00063393 0.014155 0.000848318 -0.000532894 0.014081 0.000698449 -0.000506096 0.014001 0.000707396 -0.000812191 0.014277 0.000951437 -0.000807947 0.0143215 0.000774176 -0.000682943 0.014221 0.000827435 -0.000672026 0.014081 0.001025 -0.00059588 0.014081 0.000864529 -0.00064701 0.014001 0.00103813 -0.000569889 0.014001 0.000875603 -0.000556704 0.0139174 0.000881221 -0.000955664 0.0143215 0.00105563 -0.000876134 0.0143215 0.000917878 -0.000755822 0.014221 0.000981022 -0.000634319 0.0139174 0.00104479 -0.0011752 0.014371 0.00108945 -0.00110893 0.0143531 0.00113926 -0.00109224 0.014371 0.00096926 -0.00104602 0.0143215 0.00118653 -0.000760837 0.014081 0.00117883 -0.000988288 0.014277 0.00122991 -0.00100503 0.014155 0.00143517 -0.000974074 0.014081 0.00146259 -0.000839151 0.014001 0.00134198 -0.000827694 0.0139174 0.00135059 -0.00131131 0.0143531 0.0013677 -0.00128992 0.014221 0.00163518 -0.00104491 0.014221 0.00139984 -0.00116267 0.014221 0.00152244 -0.00112577 0.014155 0.00156087 -0.00125623 0.014155 0.00167644 -0.000952927 0.014001 0.00148133 -0.00107755 0.014001 0.00161107 -0.00142562 0.0143531 0.00146897 -0.00137586 0.0143215 0.00152993 -0.00142583 0.014221 0.0017373 -0.00150301 0.0143215 0.00162548 -0.00156951 0.014221 0.00182816 -0.00160134 0.014277 0.00177303 -0.0015009 0.0139174 0.001947 -0.00135602 0.014001 0.00183844 -0.00167679 0.0143531 0.00164234 -0.00163745 0.0143215 0.00171049 -0.00172004 0.014221 0.00190717 -0.00152219 0.014081 0.00191012 -0.00167947 0.014081 0.00199267 -0.00166121 0.0139174 0.00203114 -0.00166736 0.014001 0.00201819 -0.00174733 0.014277 0.00184965 -0.00195252 0.0143531 0.00177318 -0.00183286 0.014001 0.0020887 -0.00207549 0.0143215 0.00189712 -0.00192462 0.0143215 0.00184675 -0.00184288 0.014081 0.00206229 -0.00201136 0.014081 0.00211853 -0.00199951 0.0139174 0.00215944 -0.0020035 0.014001 0.00214567 -0.00226225 0.014371 0.00177686 -0.00221548 0.014277 0.00200594 -0.00219209 0.014155 0.00212052 -0.00203769 0.014221 0.00202763 -0.00220275 0.014221 0.00206832 -0.00218382 0.014081 0.00216104 -0.00237822 0.014277 0.00203239 -0.00235574 0.014001 0.00221758 -0.00270136 0.0143531 0.00189488 -0.00239641 0.0143531 0.00188259 -0.00254547 0.0143215 0.0019735 -0.00270453 0.0143215 0.0019735 -0.00254256 0.014277 0.00204566 -0.00270744 0.014277 0.00204566 -0.00253785 0.014155 0.0021625 -0.00253619 0.014081 0.00220383 -0.00271381 0.014081 0.00220383 -0.00253505 0.014001 0.00223206 -0.00271495 0.014001 0.00223206 -0.00235401 0.0139174 0.00223181 -0.00271553 0.0139174 0.00224638 -0.00289086 0.014081 0.00218953 -0.00271215 0.014155 0.0021625 -0.00288587 0.014155 0.00214848 -0.00298775 0.014371 0.00177686 -0.00300433 0.0143531 0.00185809 -0.00307183 0.014001 0.00218872 -0.00312955 0.014371 0.00174191 -0.00302007 0.0143215 0.00193519 -0.00321231 0.014221 0.00202763 -0.00303452 0.014277 0.00200594 -0.00306618 0.014081 0.00216104 -0.00335099 0.014277 0.00191428 -0.00332538 0.0143215 0.00184675 -0.00317451 0.0143215 0.00189712 -0.0032465 0.014001 0.00214567 -0.00347171 0.0143215 0.00178441 -0.00337356 0.014221 0.0019738 -0.00357321 0.0143531 0.00164234 -0.00364866 0.014277 0.00177303 -0.00370713 0.014155 0.0018743 -0.00352996 0.014221 0.00190717 -0.00353175 0.014371 0.00157055 -0.00374699 0.0143215 0.00162548 -0.00370228 0.0143531 0.00156072 -0.00378801 0.014277 0.00168491 -0.00382417 0.014221 0.0017373 -0.00389398 0.014001 0.00183844 -0.00372781 0.014081 0.00191012 -0.00385444 0.014155 0.00178115 -0.0037491 0.0139174 0.001947 -0.0040378 0.014001 0.00173037 -0.00387793 0.014081 0.00181519 -0.00390212 0.0139174 0.00185023 -0.00363419 0.014375 0.0014297 -0.00426171 0.014277 0.00122991 -0.00404323 0.014277 0.00147653 -0.00412423 0.014155 0.00156087 -0.00396008 0.014221 0.00163518 -0.00417245 0.014001 0.00161107 -0.00401993 0.014081 0.00170848 -0.00377194 0.014371 0.00140476 -0.00393869 0.0143531 0.0013677 -0.00388126 0.014371 0.00130791 -0.00410338 0.0143215 0.00130974 -0.00404448 0.0143531 0.00125756 -0.00398243 0.014371 0.00120258 -0.00420509 0.014221 0.00139984 -0.00449418 0.014221 0.000981022 -0.00435537 0.014277 0.00109422 -0.0043552 0.014155 0.00130016 -0.00437387 0.0143215 0.000917878 -0.00429434 0.0143215 0.00105563 -0.00448916 0.014081 0.00117883 -0.00457797 0.014081 0.001025 -0.00429339 0.014371 0.000710837 -0.00430419 0.0143531 0.000881308 -0.0044663 0.0143531 -0.000453839 -0.00442382 0.0143531 -0.000600531 -0.00435537 0.014277 -0.00109422 -0.00449418 0.014221 -0.000981019 -0.00422783 0.0143531 -0.00101357 -0.00431261 0.014221 -0.00126815 -0.0043552 0.014155 -0.00130016 -0.00445421 0.014155 -0.00115672 -0.00424497 0.014155 -0.00143516 -0.00438826 0.014081 -0.001325 -0.00415744 0.014277 -0.00135762 -0.0039932 0.0143215 -0.00142445 -0.00404323 0.014277 -0.00147653 -0.00404448 0.0143531 -0.00125755 -0.00415288 0.014081 -0.00159069 -0.00417245 0.014001 -0.00161107 -0.00382438 0.0143531 -0.00146897 -0.00393869 0.0143531 -0.00136769 -0.00408733 0.014221 -0.00152244 -0.00396008 0.014221 -0.00163517 -0.00387414 0.0143215 -0.00152992 -0.0040378 0.014001 -0.00173036 -0.00365519 0.014371 -0.00149249 -0.00378801 0.014277 -0.00168491 -0.00387793 0.014081 -0.00181518 -0.00401993 0.014081 -0.00170848 -0.00364866 0.014277 -0.00177303 -0.00372781 0.014081 -0.00191012 -0.00385444 0.014155 -0.00178115 -0.00332538 0.0143215 -0.00184675 -0.00350267 0.014277 -0.00184965 -0.00339245 0.014155 -0.00202361 -0.00357053 0.014081 -0.00199266 -0.00340712 0.014081 -0.00206228 -0.00374193 0.014001 -0.00193458 -0.00358879 0.0139174 -0.00203113 -0.00323864 0.014081 -0.00211853 -0.00322713 0.014155 -0.0020788 -0.00341714 0.014001 -0.0020887 -0.00317451 0.0143215 -0.00189712 -0.00303452 0.014277 -0.00200594 -0.00306618 0.014081 -0.00216104 -0.00305791 0.014155 -0.00212052 -0.00307183 0.014001 -0.00218872 -0.00302007 0.0143215 -0.00193518 -0.00304725 0.014221 -0.00206831 -0.00270136 0.0143531 -0.00189487 -0.00284359 0.014371 -0.00180028 -0.00287178 0.014277 -0.00203239 -0.00286307 0.0143215 -0.0019607 -0.00289086 0.014081 -0.00218953 -0.00289426 0.014001 -0.00221758 -0.00254864 0.0143531 -0.00189487 -0.00254547 0.0143215 -0.0019735 -0.00254 0.014221 -0.00210926 -0.00270453 0.0143215 -0.0019735 -0.00271381 0.014081 -0.00220382 -0.00253619 0.014081 -0.00220382 -0.00237055 0.014221 -0.00209558 -0.00235401 0.0139174 -0.0022318 -0.00235574 0.014001 -0.00221758 -0.00239641 0.0143531 -0.00188258 -0.00221548 0.014277 -0.00200594 -0.00235914 0.014081 -0.00218953 -0.00236413 0.014155 -0.00214847 -0.00219209 0.014155 -0.00212052 -0.00195252 0.0143531 -0.00177317 -0.0020554 0.014277 -0.00196648 -0.00189901 0.014277 -0.00191427 -0.00207549 0.0143215 -0.00189712 -0.00187644 0.014221 -0.0019738 -0.00218382 0.014081 -0.00216104 -0.00183286 0.014001 -0.0020887 -0.00201136 0.014081 -0.00211853 -0.0021753 0.0139174 -0.00220276 -0.00217817 0.014001 -0.00218872 -0.00182778 0.0139174 -0.0021021 -0.0020035 0.014001 -0.00214567 -0.00174733 0.014277 -0.00184965 -0.0016972 0.014155 -0.0019553 -0.00167947 0.014081 -0.00199266 -0.00154772 0.0143531 -0.00156071 -0.00171825 0.014371 -0.00157054 -0.00142562 0.0143531 -0.00146897 -0.00146199 0.014277 -0.00168491 -0.00137586 0.0143215 -0.00152992 -0.0012122 0.014001 -0.00173036 -0.00135602 0.014001 -0.00183843 -0.00128992 0.014221 -0.00163517 -0.00123007 0.014081 -0.00170848 -0.00126757 0.014371 -0.00120258 -0.00114662 0.0143215 -0.00130973 -0.0012568 0.0143215 -0.00142445 -0.00116267 0.014221 -0.00152244 -0.000942199 0.0139174 -0.00149083 -0.00107755 0.014001 -0.00161107 -0.00120552 0.0143531 -0.00125755 -0.000974074 0.014081 -0.00146259 -0.00100503 0.014155 -0.00143516 -0.00104491 0.014221 -0.00139984 -0.000952927 0.014001 -0.00148133 -0.000894629 0.014277 -0.00109422 -0.00102217 0.0143531 -0.00101357 -0.000988288 0.014277 -0.00122991 -0.000839151 0.014001 -0.00134198 -0.000861737 0.014081 -0.001325 -0.000760837 0.014081 -0.00117882 -0.000945811 0.0143531 -0.000881304 -0.000880341 0.0143531 -0.000743328 -0.000807947 0.0143215 -0.000774172 -0.000751544 0.0143215 -0.00062545 -0.000682943 0.014221 -0.000827431 -0.00059588 0.014081 -0.000864525 -0.000672026 0.014081 -0.001025 -0.000826184 0.0143531 -0.000600531 -0.000683045 0.014277 -0.000648318 -0.00074151 0.014277 -0.000802478 -0.000622661 0.014221 -0.000668477 -0.000572125 0.014155 -0.000685349 -0.000783695 0.0143531 -0.000453839 -0.000707291 0.0143215 -0.000472671 -0.000637175 0.014277 -0.000489953 -0.000575363 0.014221 -0.000505188 -0.000523635 0.014155 -0.000517938 -0.00044213 0.0139174 -0.000538027 -0.000506096 0.014001 -0.000707392 -0.000834991 0.014371 -0.000291004 -0.000541477 0.014221 -0.000339352 -0.000456045 0.014001 -0.000534597 -0.000734748 0.0143531 -0.000152718 -0.000604286 0.014277 -0.000328969 -0.00038415 0.0139174 -0.000181625 -0.00688225 0.015125 -0.000159134 -0.00693989 0.01575 -0.000472142 -0.00693989 0.017875 -0.000472142 -0.00690394 0.01575 -0.000316951 -0.00690394 0.017875 -0.000316951 -0.00688225 0.01575 -0.000159134 -0.00693989 0.01575 0.000472146 -0.00698981 0.01575 0.000623425 -0.00705328 0.017875 0.000769538 -0.00721864 0.017875 0.00104148 -0.00731918 0.017875 0.00116505 -0.00743053 0.01575 0.00127897 -0.00755178 0.01575 0.00138228 -0.00768193 0.017875 0.00147415 -0.00781988 0.017875 0.0015538 -0.00811462 0.01575 0.00167392 -0.00842624 0.01575 0.00173868 -0.00858516 0.017875 0.00174955 -0.00874442 0.01575 0.00174592 -0.00890269 0.01575 0.00172783 -0.00905866 0.017875 0.00169542 -0.00963419 0.017875 0.0014297 -0.0101601 0.01575 0.000840277 -0.0103732 0.01575 7.96525e-05 -0.0103732 0.01575 -7.96483e-05 -0.0103732 0.017875 7.96527e-05 -0.0103298 0.01575 -0.000394956 -0.0103298 0.017875 -0.000394956 -0.0102301 0.01575 -0.0006972 -0.00998249 0.017875 -0.0011044 -0.00963419 0.01575 -0.0014297 -0.00976002 0.017875 -0.001332 -0.0095 0.017875 -0.00151554 -0.00921104 0.017875 -0.00164895 -0.00905866 0.017875 -0.00169541 -0.00890269 0.01575 -0.00172783 -0.00890269 0.017875 -0.00172782 -0.00874442 0.01575 -0.00174592 -0.00874442 0.017875 -0.00174592 -0.00858516 0.017875 -0.00174954 -0.00826895 0.01575 -0.0017134 -0.00826895 0.017875 -0.00171339 -0.00796452 0.01575 -0.00162057 -0.00796452 0.017875 -0.00162057 -0.00768193 0.01575 -0.00147415 -0.00731918 0.017875 -0.00116504 -0.00712977 0.01575 -0.00090927 -0.00698981 0.015125 -0.000623421 -0.00698981 0.01575 -0.000623421 -0.00705328 0.01575 -0.000769534 -0.00705328 0.015125 -0.000769534 -0.00712977 0.015125 -0.00090927 -0.00721864 0.01575 -0.00104147 -0.00731918 0.015125 -0.00116504 -0.00731918 0.01575 -0.00116504 -0.00743053 0.01575 -0.00127896 -0.00755178 0.01575 -0.00138228 -0.00768193 0.015125 -0.00147415 -0.00781988 0.01575 -0.0015538 -0.00781988 0.015125 -0.0015538 -0.00796452 0.015125 -0.00162057 -0.00811462 0.01575 -0.00167392 -0.00811462 0.015125 -0.00167392 -0.00826895 0.015125 -0.0017134 -0.00842624 0.01575 -0.00173867 -0.00842624 0.017875 -0.00173867 -0.00858516 0.01575 -0.00174954 -0.00858516 0.015125 -0.00174954 -0.00874442 0.015125 -0.00174592 -0.00905866 0.015125 -0.00169541 -0.00921104 0.01575 -0.00164895 -0.00905866 0.01575 -0.00169541 -0.00935856 0.01575 -0.00158883 -0.0095 0.01575 -0.00151554 -0.0095 0.015125 -0.00151554 -0.00976002 0.01575 -0.001332 -0.00976002 0.015125 -0.001332 -0.00987644 0.01575 -0.00122327 -0.00987644 0.015125 -0.00122327 -0.0100773 0.017875 -0.000976382 -0.00998249 0.01575 -0.0011044 -0.0101601 0.017875 -0.000840273 -0.0100773 0.01575 -0.000976383 -0.0101601 0.01575 -0.000840273 -0.0102301 0.017875 -0.000697199 -0.0101601 0.015125 -0.000840273 -0.0102301 0.015125 -0.0006972 -0.0102869 0.015125 -0.00054835 -0.0102869 0.01575 -0.00054835 -0.0103587 0.01575 -0.00023829 -0.0103732 0.015125 7.96524e-05 -0.0103587 0.01575 0.000238294 -0.0103298 0.017875 0.00039496 -0.0102869 0.01575 0.000548354 -0.0103298 0.01575 0.00039496 -0.0102869 0.015125 0.000548354 -0.0102301 0.017875 0.000697204 -0.0102301 0.01575 0.000697204 -0.0101601 0.017875 0.000840277 -0.0100773 0.01575 0.000976387 -0.0100773 0.015125 0.000976387 -0.00998249 0.01575 0.00110441 -0.00987644 0.017875 0.00122327 -0.00987644 0.01575 0.00122327 -0.00976002 0.01575 0.00133201 -0.0095 0.017875 0.00151555 -0.00963419 0.01575 0.0014297 -0.0095 0.01575 0.00151555 -0.00935856 0.017875 0.00158884 -0.00935856 0.01575 0.00158884 -0.00935856 0.015125 0.00158884 -0.00921104 0.01575 0.00164896 -0.00921104 0.015125 0.00164896 -0.00905866 0.01575 0.00169542 -0.00905866 0.015125 0.00169542 -0.00890269 0.017875 0.00172783 -0.00890269 0.015125 0.00172783 -0.00874442 0.015125 0.00174592 -0.00858516 0.015125 0.00174955 -0.00858516 0.01575 0.00174955 -0.00842624 0.015125 0.00173868 -0.00826895 0.01575 0.0017134 -0.00796452 0.01575 0.00162058 -0.00811462 0.015125 0.00167392 -0.00781988 0.01575 0.0015538 -0.00768193 0.01575 0.00147415 -0.00743053 0.015125 0.00127897 -0.00731918 0.01575 0.00116505 -0.00721864 0.01575 0.00104148 -0.00705328 0.01575 0.000769538 -0.00712977 0.01575 0.000909274 -0.00705328 0.015125 0.000769538 -0.00698981 0.015125 0.000623425 -0.00690394 0.01575 0.000316955 -0.00690394 0.015125 0.000316955 -0.00688225 0.01575 0.000159138 -0.006875 0.01575 2.06536e-09 -0.00688225 0.015125 0.000159138 -0.006875 0.015125 1.9834e-09 -0.00688225 0.017875 -0.000159133 -0.0103587 0.017875 -0.000238289 -0.00705328 0.017875 -0.000769533 -0.00712977 0.017875 -0.000909269 -0.00743053 0.017875 -0.00127896 -0.00963419 0.017875 -0.0014297 -0.0102869 0.017875 -0.000548349 -0.00698981 0.017875 -0.000623421 -0.00721864 0.017875 -0.00104147 -0.00987644 0.017875 -0.00122327 -0.00755178 0.017875 -0.00138228 -0.00768193 0.017875 -0.00147415 -0.00781988 0.017875 -0.0015538 -0.00935856 0.017875 -0.00158883 -0.00811462 0.017875 -0.00167392 -0.0103732 0.017875 -7.96481e-05 -0.006875 0.017875 2.34402e-09 -0.0103587 0.017875 0.000238294 -0.00712977 0.017875 0.000909274 -0.00743053 0.017875 0.00127897 -0.00976002 0.017875 0.00133201 -0.00755178 0.017875 0.00138229 -0.00826895 0.017875 0.0017134 -0.00842624 0.017875 0.00173868 -0.00874442 0.017875 0.00174592 -0.00688225 0.017875 0.000159138 -0.00690394 0.017875 0.000316956 -0.00693989 0.017875 0.000472146 -0.0102869 0.017875 0.000548354 -0.00698981 0.017875 0.000623425 -0.0100773 0.017875 0.000976387 -0.00998249 0.017875 0.00110441 -0.00796452 0.017875 0.00162058 -0.00921104 0.017875 0.00164896 -0.00811462 0.017875 0.00167392 -0.00454508 0.013625 0.00117294 -0.00462891 0.013875 0.00102316 -0.00434448 0.013625 0.00145117 -0.00419959 0.013875 0.00160723 -0.00410391 0.013625 0.00169568 -0.00382897 0.013625 0.00190078 -0.00348089 0.0136226 0.00208086 -0.00339117 0.0136034 0.00211554 -0.0033474 0.0135869 0.00213088 -0.00326791 0.013875 0.0021562 -0.00330519 0.0135659 0.00214473 -0.00326489 0.0135407 0.00215709 -0.00322685 0.0135115 0.00216801 -0.0031915 0.0134785 0.00217752 -0.00313067 0.013403 0.00219244 -0.00309157 0.013875 0.0022011 -0.00310574 0.0133609 0.00219804 -0.00306844 0.0132704 0.00220587 -0.00308489 0.0133165 0.0022025 -0.00304687 0.009875 0.0022101 -0.00218772 0.013875 0.0022071 -0.00215321 0.009875 0.00219998 -0.00164396 0.009875 0.00202486 -0.0011909 0.009875 0.00173374 -0.00105698 0.009875 0.00161363 -0.000945434 0.013875 0.00149719 -0.000726195 0.013875 0.00120708 -0.000440932 0.013875 0.000540695 -0.000375 0.013875 1.81949e-09 -0.00040372 0.009875 -0.000358346 -0.000439446 0.009875 -0.000534655 -0.000552496 0.009875 -0.000875913 -0.000718452 0.009875 -0.00119481 -0.000933081 0.009875 -0.00148321 -0.00107191 0.013875 -0.001628 -0.0011909 0.009875 -0.00173374 -0.00133399 0.009875 -0.00184277 -0.00150864 0.013875 -0.00195352 -0.0023675 0.013875 -0.00223521 -0.00250962 0.009875 -0.00224704 -0.0026895 0.009875 -0.00224907 -0.00286897 0.009875 -0.00223673 -0.00304929 0.0131741 -0.00220963 -0.00305649 0.0132226 -0.00220824 -0.00309157 0.013875 -0.00220109 -0.00348074 0.0136226 -0.00208091 -0.00382897 0.013625 -0.00190078 -0.00443792 0.013875 -0.0013326 -0.00434448 0.013625 -0.00145117 -0.00453968 0.013875 -0.00118174 -0.00443792 0.013875 0.0013326 -0.00392013 0.013875 0.00183988 -0.00376722 0.013875 0.00193852 -0.00368057 0.013625 0.00198703 -0.00344004 0.013875 0.00209719 -0.0030493 0.0131742 0.00220963 -0.00291218 0.013875 0.0022316 -0.0026895 0.009875 0.00224908 -0.0023675 0.013875 0.00223522 -0.00233047 0.009875 0.00223064 -0.0020108 0.013875 0.00216455 -0.00148533 0.009875 0.00194002 -0.00120854 0.013875 0.00174818 -0.000933081 0.009875 0.00148321 -0.000718452 0.009875 0.00119481 -0.000634866 0.013875 0.0010497 -0.000629095 0.009875 0.00103868 -0.000552496 0.009875 0.000875916 -0.000491766 0.013875 0.000715413 -0.000489144 0.009875 0.000707548 -0.000404384 0.013875 0.000362441 -0.000491766 0.013875 -0.000715409 -0.000829945 0.013875 -0.00135657 -0.000945434 0.013875 -0.00149718 -0.00105698 0.009875 -0.00161363 -0.00135444 0.013875 -0.00185692 -0.00148533 0.009875 -0.00194001 -0.00164396 0.009875 -0.00202486 -0.00233047 0.009875 -0.00223064 -0.00291218 0.013875 -0.0022316 -0.00319149 0.0134785 -0.00217752 -0.00334739 0.0135869 -0.00213088 -0.00360684 0.013875 -0.00202447 -0.00376722 0.013875 -0.00193851 -0.00406456 0.013875 -0.00172921 -0.00419959 0.013875 -0.00160722 -0.00422886 0.013625 -0.00157802 -0.000903942 0.014375 0.000316955 -0.00105328 0.014375 0.000769538 -0.00168998 0.014375 0.000354607 -0.00173954 0.014375 0.000464725 -0.00131918 0.014375 0.00116505 -0.00155178 0.014375 0.00138228 -0.00196188 0.014375 0.000748513 -0.00205694 0.014375 0.000822986 -0.00216028 0.014375 0.000885458 -0.00196452 0.014375 0.00162058 -0.0022704 0.014375 0.000935018 -0.00238568 0.014375 0.000970944 -0.00258516 0.014375 0.00174955 -0.00226895 0.014375 0.0017134 -0.00250446 0.014375 0.000992711 -0.002625 0.014375 0.001 -0.00305866 0.014375 0.00169542 -0.00286432 0.014375 0.000970944 -0.00290269 0.014375 0.00172783 -0.0029796 0.014375 0.000935018 -0.00319306 0.014375 0.000822986 -0.00308972 0.014375 0.000885458 -0.00344798 0.014375 0.000568067 -0.0040773 0.014375 0.000976387 -0.00416007 0.014375 0.000840277 -0.00337351 0.014375 0.000663125 -0.00398249 0.014375 0.00110441 -0.00432985 0.014375 0.00039496 -0.00359594 0.014375 0.000239317 -0.00361771 0.014375 0.000120539 -0.00432985 0.014375 -0.000394956 -0.00428687 0.014375 -0.00054835 -0.00423012 0.014375 -0.0006972 -0.00416007 0.014375 -0.000840273 -0.00351046 0.014375 -0.000464721 -0.00344798 0.014375 -0.000568063 -0.00337351 0.014375 -0.000663121 -0.00387644 0.014375 -0.00122327 -0.00328812 0.014375 -0.000748509 -0.0035 0.014375 -0.00151554 -0.00376002 0.014375 -0.001332 -0.00319306 0.014375 -0.000822982 -0.00321104 0.014375 -0.00164896 -0.00274442 0.014375 -0.00174592 -0.00242623 0.014375 -0.00173867 -0.00216028 0.014375 -0.000885454 -0.00181989 0.014375 -0.0015538 -0.00196452 0.014375 -0.00162057 -0.00168193 0.014375 -0.00147415 -0.00143053 0.014375 -0.00127896 -0.00187649 0.014375 -0.000663121 -0.00131918 0.014375 -0.00116504 -0.00196188 0.014375 -0.000748509 -0.00180202 0.014375 -0.000568063 -0.00105328 0.014375 -0.000769534 -0.00173954 0.014375 -0.000464721 -0.000989811 0.014375 -0.000623421 -0.000939895 0.014375 -0.000472142 -0.000903942 0.014375 -0.000316951 -0.00165406 0.014375 -0.000239314 -0.000882251 0.014375 -0.000159134 -0.00163229 0.014375 -0.000120535 -0.000882251 0.014375 0.000159138 -0.0031298 0.013331 0.00229612 -0.00327586 0.013527 0.00223509 -0.00343582 0.0136154 0.00209883 -0.00360395 0.013621 0.00239619 -0.0038197 0.013527 0.00324314 -0.00462615 0.0131674 0.00447075 -0.00432649 0.013125 0.00418603 -0.00440994 0.0131674 0.00426752 -0.00420406 0.0131674 0.00405385 -0.00421462 0.013251 0.00404416 -0.00402002 0.013251 0.0038211 -0.00385929 0.013331 0.00357175 -0.00376693 0.013471 0.00327875 -0.00372277 0.013405 0.00330856 -0.00352913 0.0136031 0.0024319 -0.0034151 0.0136031 0.00217691 -0.00309051 0.0131674 0.00231254 -0.00305653 0.0132228 0.00220823 -0.00334249 0.0135715 0.00220725 -0.00315935 0.0134423 0.00218563 -0.00321712 0.013471 0.00225963 -0.0034916 0.013621 0.00214495 -0.00376525 0.013625 0.00257543 -0.0037943 0.0136031 0.0029234 -0.00407359 0.013405 0.00377665 -0.00485213 0.0131674 0.00466303 -0.0037286 0.013621 0.00264157 -0.00404375 0.0135715 0.0034335 -0.00416358 0.013527 0.00370198 -0.00443987 0.013331 0.00423722 -0.00446893 0.013405 0.00420779 -0.0046357 0.013251 0.00446006 -0.00508735 0.0131674 0.00484391 -0.00386522 0.013621 0.00288048 -0.00487892 0.013331 0.00462993 -0.00509581 0.013251 0.00483234 -0.00533123 0.0131674 0.00501293 -0.00421984 0.013625 0.00329312 -0.00410672 0.0136031 0.00338631 -0.00421915 0.0135715 0.00365586 -0.00471763 0.013471 0.00436847 -0.00547948 0.013125 0.00510938 -0.00417306 0.013621 0.00333659 -0.00427971 0.0136031 0.00360561 -0.00455111 0.013527 0.0041246 -0.00513686 0.013405 0.00477612 -0.00535462 0.013331 0.00497734 -0.00559044 0.013251 0.00515734 -0.00573543 0.013125 0.00525837 -0.00599842 0.013125 0.00539454 -0.00558317 0.0131674 0.00516969 -0.00584255 0.0131674 0.00531379 -0.00452444 0.013621 0.00376006 -0.00537733 0.013405 0.00494278 -0.00560477 0.013331 0.00513299 -0.0058492 0.013251 0.0053011 -0.00610874 0.0131674 0.00544489 -0.0062678 0.013125 0.00551757 -0.00471541 0.013621 0.00395825 -0.00477954 0.013625 0.00393224 -0.00491594 0.013621 0.00414675 -0.00512555 0.013621 0.0043251 -0.00544154 0.013527 0.00484505 -0.00568504 0.013527 0.00499655 -0.00588149 0.013405 0.00523943 -0.00639702 0.0133309 0.00552318 -0.00665893 0.0131674 0.00566683 -0.00666363 0.013251 0.00565329 -0.00694158 0.0131674 0.00575713 -0.00638109 0.0131674 0.00556267 -0.00611476 0.013251 0.00543188 -0.00612661 0.0133309 0.00540624 -0.00586231 0.013331 0.00527607 -0.00520583 0.013527 0.00468169 -0.00520457 0.013625 0.00430705 -0.00552441 0.0136031 0.00471893 -0.00616631 0.013471 0.00532033 -0.0064125 0.013405 0.00548482 -0.00739601 0.013125 0.00587278 -0.00543099 0.013625 0.00447754 -0.00534373 0.013621 0.00449287 -0.00556993 0.013621 0.00464965 -0.00576157 0.0136031 0.00486649 -0.00643243 0.013471 0.00543541 -0.00695354 0.0133309 0.00571626 -0.00694561 0.013251 0.00574337 -0.00768729 0.013125 0.00592627 -0.00722836 0.0131674 0.00583333 -0.00751855 0.0131674 0.00589527 -0.00600574 0.0136031 0.00500215 -0.00596924 0.0135715 0.00507186 -0.00619301 0.013527 0.00526254 -0.00668645 0.013405 0.00558753 -0.00696514 0.013405 0.00567656 -0.00580361 0.013621 0.00479504 -0.0060442 0.013621 0.0049287 -0.00622331 0.0135715 0.00519699 -0.00781144 0.0131674 0.00594277 -0.00827599 0.013125 0.00598984 -0.00615865 0.013625 0.00491601 -0.00648326 0.0135715 0.0053094 -0.00781722 0.0133309 0.00590058 -0.00810757 0.013251 0.00596145 -0.00754386 0.013471 0.0057604 -0.00782283 0.013405 0.0058596 -0.00840248 0.0131674 0.00599407 -0.00667758 0.013625 0.00514369 -0.00654371 0.013621 0.00515954 -0.00704031 0.0136031 0.00541948 -0.00729194 0.0135715 0.00556774 -0.0075556 0.013527 0.00569783 -0.00869901 0.013251 0.00598341 -0.00869919 0.0131674 0.00599774 -0.00886809 0.013125 0.00599508 -0.00680141 0.013621 0.00525616 -0.00694515 0.013625 0.00523719 -0.00756892 0.0135715 0.00562686 -0.00783869 0.013527 0.00574375 -0.00811359 0.013405 0.0058921 -0.00758344 0.0136031 0.00554952 -0.00784848 0.0135715 0.0056722 -0.00869866 0.0133309 0.00595516 -0.00899482 0.013251 0.00597243 -0.00899307 0.0133309 0.00594423 -0.00928973 0.013251 0.00594683 -0.00945782 0.013125 0.00594192 -0.0095853 0.0131674 0.00592083 -0.00732957 0.013621 0.00541059 -0.0081237 0.013527 0.0057756 -0.00869749 0.013471 0.00586053 -0.00987693 0.0131674 0.0058661 -0.0078704 0.013621 0.0055121 -0.00813675 0.0136031 0.00562526 -0.00841262 0.0135715 0.00572116 -0.00957848 0.0133309 0.0058788 -0.0101655 0.0131674 0.005797 -0.0100394 0.013125 0.0058309 -0.00814392 0.013621 0.00554267 -0.00869581 0.0135715 0.00572467 -0.00898723 0.013471 0.00584978 -0.00986805 0.0133309 0.00582445 -0.00805277 0.013625 0.00547015 -0.00841553 0.0136031 0.00564253 -0.00869484 0.0136031 0.00564598 -0.00897883 0.0135715 0.00571416 -0.009269 0.013527 0.00576144 -0.0104459 0.013251 0.00570008 -0.00841861 0.013621 0.00555968 -0.00897396 0.0136031 0.00563562 -0.0104373 0.0133309 0.00567316 -0.0108843 0.013125 0.00555838 -0.0107306 0.0131674 0.00561647 -0.00925224 0.0136031 0.00561147 -0.00955314 0.013527 0.00572254 -0.0101439 0.013405 0.00571588 -0.0110001 0.013251 0.00549231 -0.0112752 0.0131674 0.00538099 -0.00924303 0.013621 0.00552908 -0.0101139 0.013527 0.00560286 -0.0104248 0.013405 0.00563377 -0.0104085 0.013471 0.00558302 -0.0107157 0.0133309 0.0055766 -0.0116797 0.013125 0.00516418 -0.0114212 0.013125 0.00530862 -0.011538 0.0131674 0.00524334 -0.00946575 0.013625 0.00543536 -0.0100954 0.0135715 0.00553307 -0.0106825 0.013471 0.00548798 -0.0109725 0.013405 0.00542842 -0.0115311 0.013251 0.00523082 -0.0100752 0.0136031 0.00545702 -0.0106601 0.013527 0.00542837 -0.0117862 0.013251 0.0050807 -0.0119308 0.013125 0.00500716 -0.0120418 0.0131674 0.00492993 -0.0097862 0.013621 0.00544098 -0.0109261 0.013527 0.00532109 -0.0109513 0.013471 0.00537952 -0.0112145 0.013471 0.00525789 -0.0114973 0.013405 0.00516996 -0.0115173 0.013331 0.00520612 -0.0100206 0.013625 0.00532 -0.0100539 0.013621 0.0053769 -0.0106348 0.0135715 0.00536075 -0.0120175 0.013331 0.00489493 -0.0122814 0.0131674 0.00475493 -0.010578 0.013621 0.00520944 -0.0114405 0.013527 0.00506774 -0.0122554 0.013331 0.00472117 -0.0125028 0.013251 0.00455737 -0.0127332 0.0131674 0.00437047 -0.0111197 0.0136031 0.0050654 -0.0114054 0.0135715 0.00500462 -0.0116876 0.013527 0.00492231 -0.0122302 0.013405 0.00468838 -0.0124845 0.013331 0.00453585 -0.0127234 0.013251 0.00436003 -0.0129443 0.0131674 0.00416195 -0.0113303 0.013625 0.00478867 -0.0113269 0.013621 0.00486336 -0.0115641 0.013621 0.00472379 -0.0121149 0.0135715 0.00453844 -0.0121589 0.013527 0.00459568 -0.0124231 0.013471 0.00446378 -0.0123819 0.013527 0.00441529 -0.0133231 0.013251 0.00370603 -0.0134314 0.013125 0.00359148 -0.0135123 0.0131674 0.00347747 -0.0133343 0.0131674 0.00371491 -0.012704 0.013331 0.00433944 -0.0119273 0.013527 0.00476483 -0.0116495 0.0135715 0.00486099 -0.0113672 0.0136031 0.00493583 -0.0116079 0.0136031 0.00479418 -0.0120669 0.0136031 0.00447606 -0.0123351 0.0135715 0.00436029 -0.0125956 0.013527 0.0042241 -0.0128455 0.013471 0.00406674 -0.0133009 0.013331 0.00368853 -0.0136783 0.0131674 0.00323152 -0.0115734 0.013625 0.00464294 -0.0117941 0.013621 0.00457266 -0.0122841 0.0136031 0.00430036 -0.0135006 0.013251 0.00346916 -0.013762 0.013125 0.00310024 -0.0120355 0.013625 0.00431492 -0.0120164 0.013621 0.00441034 -0.0125462 0.0135715 0.00417148 -0.0124923 0.0136031 0.00411415 -0.0127477 0.0135715 0.00397246 -0.0134439 0.013405 0.0034288 -0.0136424 0.013331 0.00320858 -0.0139087 0.013125 0.00284299 -0.0139728 0.0131674 0.00271651 -0.0122304 0.013621 0.00423722 -0.0122533 0.013625 0.00413349 -0.0141006 0.0131674 0.00244872 -0.0140425 0.013125 0.00257881 -0.0124355 0.013621 0.00405374 -0.0136076 0.013405 0.00318629 -0.0139348 0.013331 0.00269723 -0.0126313 0.013621 0.00386034 -0.0130581 0.0136031 0.00349703 -0.0132898 0.0135715 0.00331914 -0.013509 0.013527 0.00312329 -0.0137128 0.013471 0.00290954 -0.0142703 0.013125 0.00203226 -0.014215 0.0131674 0.00217494 -0.0126593 0.013625 0.00373822 -0.0132257 0.0136031 0.00327352 -0.0134482 0.0135715 0.00308439 -0.0138505 0.013471 0.00265437 -0.014024 0.013405 0.00241445 -0.0140617 0.013331 0.00243134 -0.0144432 0.013125 0.00146592 -0.0130226 0.013625 0.00330326 -0.0129931 0.013621 0.00344569 -0.0133819 0.0136031 0.00304199 -0.0137937 0.013527 0.00262554 -0.0143021 0.013251 0.0018913 -0.0144025 0.0131674 0.00161208 -0.0135265 0.0136031 0.00280302 -0.0139753 0.013471 0.0023927 -0.0142361 0.013405 0.0018693 -0.0142753 0.013331 0.00188237 -0.0144752 0.0131674 0.00132439 -0.0133121 0.013621 0.00299733 -0.0139172 0.013527 0.00236671 -0.0141855 0.013471 0.00185246 -0.0144612 0.013251 0.00132123 -0.0145335 0.0131674 0.00103346 -0.0134546 0.013621 0.00276187 -0.0136592 0.0136031 0.00255719 -0.0143217 0.013405 0.00158952 -0.0145194 0.013251 0.00103099 -0.0135852 0.013621 0.00251965 -0.0137795 0.0136031 0.00230511 -0.0142703 0.013471 0.0015752 -0.0143413 0.013471 0.00129409 -0.0145924 0.013251 0.000443662 -0.0146067 0.0131674 0.000444725 -0.013982 0.0136031 0.00178464 -0.014209 0.013527 0.00155809 -0.0142792 0.013527 0.00128004 -0.014625 0.013125 1.72114e-09 -0.0137192 0.013625 0.00207345 -0.0139033 0.013621 0.00175844 -0.0140637 0.0136031 0.00151754 -0.0141395 0.0135715 0.00153869 -0.0142088 0.0135715 0.00126409 -0.0144941 0.013405 0.000729641 -0.0144412 0.013471 0.000723069 -0.014523 0.013405 0.000438501 -0.0145642 0.013331 0.000441567 -0.014607 0.013251 0.000148009 -0.0143356 0.013527 0.000998848 -0.0145788 0.013331 0.000147311 -0.0145788 0.013331 -0.000147307 -0.0146067 0.0131674 -0.000444721 -0.0145774 0.0131674 -0.000739994 -0.0141321 0.0136031 0.00124672 -0.0142645 0.0135715 0.000986406 -0.014187 0.0136031 0.000972848 -0.0144842 0.013471 0.00014497 -0.0145642 0.013331 -0.000441564 -0.0145632 0.013251 -0.000738226 -0.0145335 0.0131674 -0.00103346 -0.0141053 0.013621 0.000958565 -0.0143343 0.0135715 0.000424477 -0.0145351 0.013331 -0.00073474 -0.0145194 0.013251 -0.00103099 -0.0144752 0.0131674 -0.00132439 -0.0145084 0.013125 -0.00117704 -0.014146 0.013621 0.00068637 -0.0142559 0.0136031 0.000418642 -0.0144205 0.013527 0.000143395 -0.0143484 0.0135715 0.000141609 -0.0144941 0.013405 -0.000729638 -0.0144612 0.013251 -0.00132122 -0.0144025 0.0131674 -0.00161208 -0.0144432 0.013125 -0.00146592 -0.0141086 0.013625 0.000424777 -0.0144412 0.013471 -0.000723065 -0.0144336 0.013331 -0.00131499 -0.0143637 0.013125 -0.00175122 -0.0143157 0.0131674 -0.00189583 -0.0142703 0.013125 -0.00203226 -0.0141232 0.013625 0.000141719 -0.0142697 0.0136031 0.000139663 -0.0143983 0.013471 -0.00100981 -0.0143615 0.013331 -0.00160063 -0.0143887 0.013251 -0.00160823 -0.014215 0.0131674 -0.00217493 -0.0141868 0.013621 -0.000137609 -0.0142016 0.013251 -0.00216974 -0.0141006 0.0131674 -0.00244872 -0.0141232 0.013625 -0.000141716 -0.0142559 0.0136031 -0.000418639 -0.0143356 0.013527 -0.000998845 -0.0140794 0.013625 -0.000706704 -0.0140512 0.013621 -0.00122841 -0.0140637 0.0136031 -0.00151753 -0.0141251 0.013527 -0.00183233 -0.0140278 0.013527 -0.00210209 -0.013898 0.013405 -0.00267849 -0.0135123 0.0131674 -0.00347746 -0.0136027 0.013125 -0.00334994 -0.0140617 0.013331 -0.00243133 -0.0141368 0.013405 -0.0021445 -0.0141053 0.013621 -0.000958561 -0.0141321 0.0136031 -0.00124671 -0.0139838 0.013621 -0.00149525 -0.0136424 0.013331 -0.00320857 -0.0134314 0.013125 -0.00359147 -0.013982 0.0136031 -0.00178464 -0.0139605 0.0135715 -0.00207591 -0.0138871 0.0136031 -0.00204738 -0.0138505 0.013471 -0.00265437 -0.0137128 0.013471 -0.00290954 -0.0136076 0.013405 -0.00318629 -0.0133231 0.013251 -0.00370603 -0.0131449 0.0131674 -0.00394325 -0.0139033 0.013621 -0.00175844 -0.0131341 0.013251 -0.00393383 -0.0138099 0.013621 -0.00201732 -0.0137192 0.013625 -0.00207344 -0.0135948 0.0135715 -0.00284208 -0.012934 0.013251 -0.00415201 -0.0129443 0.0131674 -0.00416195 -0.0137038 0.013621 -0.00227126 -0.0135852 0.013621 -0.00251964 -0.0135265 0.0136031 -0.00280302 -0.0134482 0.0135715 -0.00308438 -0.0134005 0.013471 -0.00339791 -0.0132266 0.013471 -0.00362992 -0.0130816 0.013405 -0.00388807 -0.0127332 0.0131674 -0.00437047 -0.0134788 0.013625 -0.00258661 -0.0133486 0.013527 -0.003361 -0.0132898 0.0135715 -0.00331913 -0.0128839 0.013405 -0.0041037 -0.0125121 0.0131674 -0.00456828 -0.0124083 0.013125 -0.00465693 -0.0122814 0.0131674 -0.00475492 -0.0129935 0.013527 -0.00381119 -0.0128455 0.013471 -0.00406674 -0.012704 0.013331 -0.00433944 -0.0120418 0.0131674 -0.00492993 -0.0119308 0.013125 -0.00500715 -0.0121739 0.013125 -0.00483793 -0.0131869 0.013625 -0.00307232 -0.0131581 0.013621 -0.00322545 -0.0130581 0.0136031 -0.00349703 -0.0127997 0.013527 -0.00402257 -0.0126757 0.013405 -0.0043093 -0.0120336 0.013251 -0.00491815 -0.0117938 0.0131674 -0.00509287 -0.0129391 0.0135715 -0.00376372 -0.0122302 0.013405 -0.00468838 -0.0120175 0.013331 -0.00489493 -0.011538 0.0131674 -0.00524334 -0.0130226 0.013625 -0.00330325 -0.0129931 0.013621 -0.00344568 -0.0128465 0.013625 -0.00352541 -0.0119939 0.013405 -0.00486093 -0.0112752 0.0131674 -0.00538099 -0.012691 0.0136031 -0.00391786 -0.0119636 0.013471 -0.00481714 -0.0117494 0.013405 -0.00502159 -0.0110058 0.0131674 -0.00550546 -0.0119273 0.013527 -0.00476482 -0.0112563 0.013331 -0.00534278 -0.0106073 0.013125 -0.00566308 -0.0108843 0.013125 -0.00555837 -0.0124614 0.013625 -0.00394108 -0.0122841 0.0136031 -0.00430036 -0.0122304 0.013621 -0.00423722 -0.0118862 0.0135715 -0.00470547 -0.0117213 0.013471 -0.00497636 -0.0116876 0.013527 -0.0049223 -0.0114973 0.013405 -0.00516996 -0.0109889 0.013331 -0.00546638 -0.0110001 0.013251 -0.00549231 -0.0104503 0.0131674 -0.00571372 -0.0112145 0.013471 -0.00525789 -0.0112381 0.013405 -0.00530568 -0.0107157 0.013331 -0.00557659 -0.0101655 0.0131674 -0.005797 -0.0103254 0.013125 -0.005754 -0.0118087 0.013625 -0.00448488 -0.0117941 0.013621 -0.00457265 -0.0116495 0.0135715 -0.00486099 -0.0114054 0.0135715 -0.00500461 -0.00987693 0.0131674 -0.00586609 -0.00975 0.013125 -0.00589359 -0.0100394 0.013125 -0.0058309 -0.0115734 0.013625 -0.00464294 -0.0106825 0.013471 -0.00548798 -0.0095853 0.0131674 -0.00592083 -0.00945782 0.013125 -0.00594192 -0.0115641 0.013621 -0.00472378 -0.0113303 0.013625 -0.00478866 -0.0111197 0.0136031 -0.0050654 -0.0104085 0.013471 -0.00558301 -0.0104248 0.013405 -0.00563376 -0.0101546 0.013331 -0.00575585 -0.00986805 0.013331 -0.00582445 -0.0113269 0.013621 -0.00486336 -0.0106348 0.0135715 -0.00536075 -0.0103892 0.013527 -0.00552237 -0.0101439 0.013405 -0.00571587 -0.0101303 0.013471 -0.00566438 -0.00957848 0.013331 -0.00587879 -0.00928973 0.013251 -0.00594683 -0.0108232 0.013625 -0.00504161 -0.0103672 0.0135715 -0.00545358 -0.0101139 0.013527 -0.00560286 -0.00928659 0.013331 -0.00591875 -0.00869919 0.0131674 -0.00599774 -0.00857197 0.013125 -0.00599976 -0.0108333 0.013621 -0.00510648 -0.0100752 0.0136031 -0.00545701 -0.0100954 0.0135715 -0.00553307 -0.00981993 0.0135715 -0.00559901 -0.009835 0.013527 -0.00566963 -0.00928199 0.013405 -0.00587765 -0.00899307 0.013331 -0.00594423 -0.00869866 0.013331 -0.00595516 -0.00869901 0.013251 -0.00598341 -0.00810633 0.0131674 -0.00597573 -0.0102928 0.013625 -0.00524104 -0.0097862 0.013621 -0.00544098 -0.00974465 0.013625 -0.00538483 -0.00951571 0.013621 -0.00549174 -0.00925224 0.0136031 -0.00561146 -0.00926098 0.0135715 -0.00568967 -0.00869749 0.013471 -0.00586053 -0.0086967 0.013527 -0.00579687 -0.00840758 0.013471 -0.00585694 -0.00811359 0.013405 -0.0058921 -0.00811001 0.013331 -0.00593331 -0.00710772 0.013125 -0.00580498 -0.00739601 0.013125 -0.00587278 -0.00722836 0.0131674 -0.00583333 -0.00781144 0.0131674 -0.00594277 -0.00840406 0.013331 -0.00595151 -0.00898723 0.013471 -0.00584977 -0.009269 0.013527 -0.00576143 -0.00952898 0.0136031 -0.00557358 -0.00897396 0.0136031 -0.00563562 -0.00781722 0.013331 -0.00590058 -0.0075264 0.013331 -0.00585341 -0.00694158 0.0131674 -0.00575712 -0.00924303 0.013621 -0.00552907 -0.00896884 0.013621 -0.00555287 -0.00840994 0.013527 -0.00579332 -0.00811819 0.013471 -0.00583902 -0.00694561 0.013251 -0.00574337 -0.00869484 0.0136031 -0.00564598 -0.00812994 0.0135715 -0.00570366 -0.00695354 0.013331 -0.00571625 -0.00666363 0.013251 -0.00565329 -0.00665893 0.0131674 -0.00566683 -0.00638109 0.0131674 -0.00556267 -0.0062678 0.013125 -0.00551757 -0.00869381 0.013621 -0.00556308 -0.00841861 0.013621 -0.00555968 -0.00726031 0.013471 -0.00569988 -0.00610874 0.0131674 -0.00544489 -0.00861866 0.013625 -0.00549999 -0.00756892 0.0135715 -0.00562685 -0.0075556 0.013527 -0.00569783 -0.00668645 0.013405 -0.00558752 -0.00667289 0.013331 -0.0056266 -0.00611476 0.013251 -0.00543188 -0.00599842 0.013125 -0.00539454 -0.00584255 0.0131674 -0.00531379 -0.00805277 0.013625 -0.00547015 -0.0078704 0.013621 -0.00551209 -0.00758344 0.0136031 -0.00554951 -0.00729194 0.0135715 -0.00556774 -0.0058492 0.013251 -0.00530109 -0.00759873 0.013621 -0.00546803 -0.00731027 0.0136031 -0.00549121 -0.00701823 0.0135715 -0.005495 -0.00670391 0.013471 -0.00553719 -0.00672478 0.013527 -0.00547704 -0.00558317 0.0131674 -0.00516968 -0.00547948 0.013125 -0.00510938 -0.00533123 0.0131674 -0.00501293 -0.00643243 0.013471 -0.00543541 -0.00616631 0.013471 -0.00532033 -0.00614396 0.013405 -0.00536869 -0.00586231 0.013331 -0.00527606 -0.00588149 0.013405 -0.00523942 -0.00559044 0.013251 -0.00515733 -0.00560477 0.013331 -0.00513298 -0.0052312 0.013125 -0.00494794 -0.00508735 0.0131674 -0.00484391 -0.00732957 0.013621 -0.00541059 -0.00721718 0.013625 -0.00531677 -0.00677424 0.0136031 -0.00533448 -0.00648326 0.0135715 -0.0053094 -0.00619301 0.013527 -0.00526254 -0.00535462 0.013331 -0.00497734 -0.0053391 0.013251 -0.00500095 -0.00509581 0.013251 -0.00483233 -0.00485213 0.0131674 -0.00466303 -0.00680141 0.013621 -0.00525615 -0.00622331 0.0135715 -0.00519698 -0.00593574 0.013527 -0.00513583 -0.00562574 0.013405 -0.00509734 -0.00537733 0.013405 -0.00494277 -0.00476002 0.013125 -0.00458933 -0.00462615 0.0131674 -0.00447074 -0.00654371 0.013621 -0.00515954 -0.00596924 0.0135715 -0.00507185 -0.00568504 0.013527 -0.00499655 -0.00432649 0.013125 -0.00418602 -0.00600574 0.0136031 -0.00500214 -0.00572166 0.0135715 -0.00493431 -0.00513686 0.013405 -0.00477612 -0.0046357 0.013251 -0.00446006 -0.00465454 0.013331 -0.004439 -0.00420406 0.0131674 -0.00405384 -0.0062911 0.013621 -0.0050503 -0.00615865 0.013625 -0.004916 -0.00544154 0.013527 -0.00484504 -0.00520583 0.013527 -0.00468168 -0.00516828 0.013471 -0.00473309 -0.00493845 0.013471 -0.00455635 -0.00468211 0.013405 -0.00440817 -0.00442002 0.013251 -0.00425732 -0.00421462 0.013251 -0.00404416 -0.00400899 0.0131674 -0.00383025 -0.0060442 0.013621 -0.0049287 -0.00552441 0.0136031 -0.00471893 -0.00402002 0.013251 -0.0038211 -0.00393482 0.013125 -0.00374195 -0.00590867 0.013625 -0.00478242 -0.0056659 0.013625 -0.00463613 -0.00471763 0.013471 -0.00436846 -0.00404176 0.013331 -0.00380306 -0.00365319 0.0131674 -0.00335551 -0.00382522 0.0131674 -0.00359728 -0.00543099 0.013625 -0.00447753 -0.00529483 0.0136031 -0.00455982 -0.00507341 0.0136031 -0.00438955 -0.00430519 0.013471 -0.0039611 -0.00407359 0.013405 -0.00377665 -0.00383669 0.013251 -0.00358869 -0.00512555 0.013621 -0.0043251 -0.00460185 0.0135715 -0.00407321 -0.00385929 0.013331 -0.00357175 -0.00368849 0.013331 -0.00333169 -0.00349333 0.0131674 -0.00310553 -0.00334603 0.0131674 -0.00284796 -0.00343403 0.013125 -0.00300896 -0.00329188 0.013125 -0.00274915 -0.00520457 0.013625 -0.00430704 -0.00498722 0.013625 -0.00412512 -0.00491594 0.013621 -0.00414675 -0.00465715 0.0136031 -0.00401723 -0.00435212 0.013527 -0.00391808 -0.00389239 0.013405 -0.00354694 -0.00393502 0.013471 -0.00351499 -0.00372277 0.013405 -0.00330855 -0.00321165 0.0131674 -0.00258341 -0.00421915 0.0135715 -0.00365586 -0.00338351 0.013331 -0.00282774 -0.00309051 0.0131674 -0.00231253 -0.00304687 0.013125 -0.00221009 -0.00458207 0.013625 -0.00372891 -0.00446334 0.0136031 -0.00381609 -0.00427971 0.0136031 -0.00360561 -0.00356515 0.013405 -0.00306207 -0.00361073 0.013471 -0.00303449 -0.00325008 0.013331 -0.00256506 -0.00322458 0.013251 -0.00257723 -0.00310373 0.013251 -0.00230701 -0.00452444 0.013621 -0.00376006 -0.00341991 0.013405 -0.0028081 -0.00306839 0.0132702 -0.00220588 -0.00434351 0.013621 -0.00355267 -0.00366519 0.013527 -0.00300153 -0.00328741 0.013405 -0.00254725 -0.00333549 0.013471 -0.0025243 -0.0031298 0.013331 -0.00229612 -0.00308488 0.0133165 -0.0022025 -0.0040135 0.013621 -0.00311234 -0.00334249 0.0135715 -0.00220724 -0.00326474 0.0135406 -0.00215713 -0.00322677 0.0135114 -0.00216803 -0.00315926 0.0134422 -0.00218565 -0.0031057 0.0133608 -0.00219805 -0.00313057 0.0134028 -0.00219246 -0.0037286 0.013621 -0.00264156 -0.00376525 0.013625 -0.00257542 -0.00360395 0.013621 -0.00239619 -0.00330507 0.0135658 -0.00214476 -0.00363902 0.013625 -0.00232164 -0.0034916 0.013621 -0.00214494 -0.0034151 0.0136031 -0.00217691 -0.00339104 0.0136034 -0.00211558 -0.0034356 0.0136153 -0.00209891 -0.00352282 0.013527 -0.00275258 -0.00345812 0.0135715 -0.00246578 -0.00798085 0.013125 -0.00596532 -0.00840302 0.013251 -0.00597975 -0.00869815 0.013405 -0.0059138 -0.0100539 0.013621 -0.00537689 -0.010318 0.013621 -0.00529965 -0.0105606 0.013625 -0.00514816 -0.0139087 0.013125 -0.00284298 -0.0138319 0.0131674 -0.00297766 -0.0139728 0.0131674 -0.00271651 -0.0141753 0.013331 -0.00215949 -0.0142361 0.013405 -0.00186929 -0.0141855 0.013471 -0.00185246 -0.0142703 0.013471 -0.0015752 -0.014187 0.0136031 -0.000972845 -0.014146 0.013621 -0.000686366 -0.0142283 0.0136031 -0.000696594 -0.0141732 0.013621 -0.000412492 -0.0119636 0.013471 0.00481715 -0.0110831 0.013621 0.00499103 -0.0108232 0.013625 0.00504162 -0.0108333 0.013621 0.00510648 -0.00540658 0.013471 0.00489825 -0.00516828 0.013471 0.0047331 -0.00486067 0.0136031 0.00420854 -0.00480821 0.0135715 0.00426719 -0.00439534 0.013625 0.00351568 -0.00352282 0.013527 0.00275258 -0.00345812 0.0135715 0.00246579 -0.00341991 0.013405 0.0028081 -0.00339294 0.013527 0.00249689 -0.0034668 0.013471 0.00278281 -0.00382522 0.0131674 0.00359729 -0.00366507 0.013251 0.0033475 -0.00352976 0.013331 0.00308349 -0.00328741 0.013405 0.00254725 -0.00333549 0.013471 0.00252431 -0.00350559 0.013251 0.00309812 -0.00325008 0.013331 0.00256507 -0.00316797 0.013405 0.00228017 -0.00349333 0.0131674 0.00310554 -0.00335864 0.013251 0.00284115 -0.00322458 0.013251 0.00257724 -0.00334603 0.0131674 0.00284796 -0.00321165 0.0131674 0.00258341 -0.00310373 0.013251 0.00230701 -0.00316272 0.013125 0.00248265 -0.0142697 0.0136031 -0.000139659 -0.0141868 0.013621 0.000137612 -0.00417306 0.013621 -0.00333659 -0.00365563 0.0136031 0.00268093 -0.00366519 0.013527 0.00300153 -0.00358638 0.0135715 0.00271829 -0.00372697 0.0135715 0.00296414 -0.00338351 0.013331 0.00282774 -0.00394478 0.0136031 0.00315872 -0.00361073 0.013471 0.00303449 -0.00387956 0.0135715 0.00320274 -0.00356515 0.013405 0.00306208 -0.00365319 0.0131674 0.00335552 -0.0040135 0.013621 0.00311234 -0.00389239 0.013405 0.00354694 -0.00398596 0.013527 0.00347681 -0.00368849 0.013331 0.0033317 -0.00393502 0.013471 0.00351499 -0.00411459 0.013471 0.00374263 -0.00383669 0.013251 0.00358869 -0.00434351 0.013621 0.00355267 -0.00440534 0.0135715 0.00386927 -0.00460185 0.0135715 0.00407322 -0.00446334 0.0136031 0.00381609 -0.00465715 0.0136031 0.00401723 -0.00430519 0.013471 0.00396111 -0.00435212 0.013527 0.00391808 -0.00450637 0.013471 0.00416989 -0.00423544 0.013331 0.00402506 -0.00426593 0.013405 0.00399711 -0.00404176 0.013331 0.00380306 -0.00442002 0.013251 0.00425732 -0.00400899 0.0131674 0.00383025 -0.00476007 0.013527 0.00432102 -0.00468211 0.013405 0.00440818 -0.00465454 0.013331 0.00443901 -0.00490493 0.013405 0.00459778 -0.00507341 0.0136031 0.00438955 -0.00529483 0.0136031 0.00455982 -0.00497849 0.013527 0.00450687 -0.00502391 0.0135715 0.00445073 -0.00493845 0.013471 0.00455636 -0.00511247 0.013331 0.00480952 -0.00486115 0.013251 0.00465189 -0.00524842 0.0135715 0.00462337 -0.0054812 0.0135715 0.0047847 -0.00572166 0.0135715 0.00493431 -0.0053391 0.013251 0.00500095 -0.00593574 0.013527 0.00513583 -0.00562574 0.013405 0.00509734 -0.00565276 0.013471 0.00505142 -0.0062911 0.013621 0.0050503 -0.00590621 0.013471 0.00519223 -0.00651269 0.0136031 0.00523643 -0.00625632 0.0136031 0.00512556 -0.00638645 0.013251 0.00554938 -0.00614396 0.013405 0.00536869 -0.00674845 0.0135715 0.00540882 -0.00672478 0.013527 0.00547705 -0.00645624 0.013527 0.00537637 -0.00670391 0.013471 0.00553719 -0.00706358 0.013621 0.00533991 -0.00677424 0.0136031 0.00533448 -0.0069801 0.013471 0.00562542 -0.00667289 0.0133309 0.0056266 -0.00731027 0.0136031 0.00549122 -0.00727513 0.013527 0.00563797 -0.00701823 0.0135715 0.00549501 -0.00699796 0.013527 0.00556432 -0.0072479 0.013405 0.0057517 -0.00723827 0.0133309 0.00579192 -0.00723169 0.013251 0.0058194 -0.00759873 0.013621 0.00546804 -0.00753403 0.013405 0.00581276 -0.00726031 0.013471 0.00569989 -0.0075264 0.0133309 0.00585341 -0.00752119 0.013251 0.00588118 -0.00785916 0.0136031 0.00559424 -0.00783005 0.013471 0.00580682 -0.00781338 0.013251 0.00592857 -0.00811819 0.013471 0.00583903 -0.00811001 0.0133309 0.00593331 -0.00810633 0.0131674 0.00597573 -0.00840994 0.013527 0.00579333 -0.00812994 0.0135715 0.00570366 -0.0084056 0.013405 0.00591019 -0.00840302 0.013251 0.00597975 -0.00869381 0.013621 0.00556309 -0.00869815 0.013405 0.0059138 -0.00840758 0.013471 0.00585695 -0.00840406 0.0133309 0.00595152 -0.00896884 0.013621 0.00555288 -0.0086967 0.013527 0.00579688 -0.00899052 0.013405 0.00590295 -0.00899571 0.0131674 0.00598673 -0.00926098 0.0135715 0.00568967 -0.00898329 0.013527 0.00578624 -0.00928659 0.0133309 0.00591876 -0.00929132 0.0131674 0.00596108 -0.00952898 0.0136031 0.00557358 -0.00954158 0.0135715 0.00565126 -0.00927608 0.013471 0.0058247 -0.00957186 0.013405 0.00583797 -0.00928199 0.013405 0.00587765 -0.00951571 0.013621 0.00549175 -0.00980351 0.0136031 0.00552206 -0.00981993 0.0135715 0.00559901 -0.009835 0.013527 0.00566964 -0.00985941 0.013405 0.005784 -0.00956333 0.013471 0.00578538 -0.00958301 0.013251 0.00590668 -0.0101303 0.013471 0.00566439 -0.00984829 0.013471 0.0057319 -0.00987394 0.013251 0.00585208 -0.010318 0.013621 0.00529965 -0.0103672 0.0135715 0.00545358 -0.0103892 0.013527 0.00552237 -0.0101546 0.0133309 0.00575585 -0.0104503 0.0131674 0.00571373 -0.0101618 0.013251 0.00578315 -0.0103432 0.0136031 0.00537863 -0.0107012 0.013405 0.00553787 -0.0107256 0.013251 0.00560305 -0.0108974 0.0135715 0.0052548 -0.0106071 0.0136031 0.00528707 -0.0111545 0.0135715 0.00513599 -0.0108662 0.0136031 0.00518258 -0.0112381 0.013405 0.00530568 -0.0112563 0.0133309 0.00534279 -0.0109889 0.0133309 0.00546638 -0.0112688 0.013251 0.00536813 -0.0110058 0.0131674 0.00550547 -0.0111864 0.013527 0.00520078 -0.0117213 0.013471 0.00497636 -0.0114714 0.013471 0.00512339 -0.0117494 0.013405 0.0050216 -0.0117938 0.0131674 0.00509287 -0.0118862 0.0135715 0.00470547 -0.0117713 0.013331 0.00505671 -0.0120336 0.013251 0.00491815 -0.0118414 0.0136031 0.0046408 -0.0121977 0.013471 0.00464615 -0.0119939 0.013405 0.00486094 -0.0122726 0.013251 0.00474356 -0.0124577 0.013405 0.00450436 -0.0125121 0.0131674 0.00456829 -0.0126757 0.013405 0.0043093 -0.0128839 0.013405 0.00410371 -0.0126392 0.013471 0.00427049 -0.0128173 0.013621 0.00365749 -0.0128798 0.0136031 0.00371199 -0.012691 0.0136031 0.00391786 -0.0129935 0.013527 0.00381119 -0.0130415 0.013471 0.00385305 -0.0127997 0.013527 0.00402257 -0.0130816 0.013405 0.00388807 -0.0129137 0.013331 0.00413241 -0.012934 0.013251 0.00415201 -0.0131199 0.0135715 0.00354577 -0.0129391 0.0135715 0.00376372 -0.0131766 0.013527 0.00359049 -0.0132266 0.013471 0.00362992 -0.0131449 0.0131674 0.00394326 -0.0131341 0.013251 0.00393383 -0.0131128 0.013331 0.00391526 -0.0133486 0.013527 0.00336101 -0.0132684 0.013405 0.00366292 -0.0131581 0.013621 0.00322545 -0.0134005 0.013471 0.00339791 -0.0134776 0.013331 0.00345278 -0.0136575 0.013527 0.00287794 -0.013759 0.013405 0.00293599 -0.0135627 0.013471 0.00315759 -0.0138319 0.0131674 0.00297766 -0.0136662 0.013251 0.0032238 -0.0135948 0.0135715 0.00284209 -0.01396 0.013251 0.00271002 -0.0137949 0.013331 0.00295652 -0.0138195 0.013251 0.00297054 -0.0138513 0.0135715 0.00233723 -0.0137293 0.0135715 0.00259283 -0.013898 0.013405 0.0026785 -0.0138871 0.0136031 0.00204738 -0.0137038 0.013621 0.00227126 -0.0139605 0.0135715 0.00207591 -0.0140278 0.013527 0.0021021 -0.0140871 0.013471 0.00212518 -0.0140875 0.013251 0.00244287 -0.0138099 0.013621 0.00201732 -0.0141368 0.013405 0.0021445 -0.0141753 0.013331 0.0021595 -0.0143157 0.0131674 0.00189583 -0.0142016 0.013251 0.00216974 -0.0139838 0.013621 0.00149526 -0.0140566 0.0135715 0.00180951 -0.0141251 0.013527 0.00183234 -0.0143615 0.013331 0.00160064 -0.0143887 0.013251 0.00160823 -0.0140512 0.013621 0.00122841 -0.0144508 0.013405 0.001019 -0.0143933 0.013405 0.00130586 -0.0144916 0.013331 0.00102612 -0.0144336 0.013331 0.00131499 -0.0142283 0.0136031 0.000696597 -0.0143064 0.0135715 0.000706305 -0.0143983 0.013471 0.00100982 -0.0145351 0.013331 0.000734744 -0.0145632 0.013251 0.000738229 -0.0145774 0.0131674 0.000739997 -0.0144698 0.013471 0.000434551 -0.014378 0.013527 0.000715215 -0.0141732 0.013621 0.000412495 -0.0144064 0.013527 0.000429831 -0.0145374 0.013405 0.000146287 -0.0146214 0.0131674 0.000148364 -0.0146214 0.0131674 -0.00014836 -0.0143343 0.0135715 -0.000424473 -0.0143484 0.0135715 -0.000141606 -0.0144064 0.013527 -0.000429827 -0.0144698 0.013471 -0.000434547 -0.0144205 0.013527 -0.000143392 -0.014378 0.013527 -0.000715211 -0.0144842 0.013471 -0.000144966 -0.0145374 0.013405 -0.000146284 -0.014523 0.013405 -0.000438497 -0.0145924 0.013251 -0.000443659 -0.014607 0.013251 -0.000148006 -0.0143064 0.0135715 -0.000706302 -0.0142645 0.0135715 -0.000986403 -0.0144916 0.013331 -0.00102612 -0.0142088 0.0135715 -0.00126409 -0.0144508 0.013405 -0.00101899 -0.0143933 0.013405 -0.00130585 -0.014209 0.013527 -0.00155809 -0.0142792 0.013527 -0.00128003 -0.0143413 0.013471 -0.00129409 -0.0140566 0.0135715 -0.00180951 -0.0141395 0.0135715 -0.00153868 -0.0143217 0.013405 -0.00158952 -0.0143021 0.013251 -0.0018913 -0.0142753 0.013331 -0.00188237 -0.0137795 0.0136031 -0.0023051 -0.0138513 0.0135715 -0.00233723 -0.0139172 0.013527 -0.00236671 -0.0139753 0.013471 -0.0023927 -0.0140871 0.013471 -0.00212518 -0.0140875 0.013251 -0.00244287 -0.0136592 0.0136031 -0.00255719 -0.0137293 0.0135715 -0.00259283 -0.0139348 0.013331 -0.00269722 -0.014024 0.013405 -0.00241445 -0.01396 0.013251 -0.00271002 -0.0134546 0.013621 -0.00276186 -0.0136575 0.013527 -0.00287793 -0.0137937 0.013527 -0.00262553 -0.013759 0.013405 -0.00293598 -0.0137949 0.013331 -0.00295652 -0.0133819 0.0136031 -0.00304199 -0.013509 0.013527 -0.00312329 -0.0136783 0.0131674 -0.00323151 -0.0136662 0.013251 -0.00322379 -0.0138195 0.013251 -0.00297054 -0.0132257 0.0136031 -0.00327351 -0.0133121 0.013621 -0.00299733 -0.0134439 0.013405 -0.0034288 -0.0135627 0.013471 -0.00315759 -0.0132684 0.013405 -0.00366291 -0.0133009 0.013331 -0.00368853 -0.0134776 0.013331 -0.00345278 -0.0133343 0.0131674 -0.0037149 -0.0135006 0.013251 -0.00346915 -0.0128173 0.013621 -0.00365748 -0.0128798 0.0136031 -0.00371198 -0.0131199 0.0135715 -0.00354576 -0.0131766 0.013527 -0.00359049 -0.0130415 0.013471 -0.00385304 -0.0126313 0.013621 -0.00386033 -0.0131128 0.013331 -0.00391526 -0.0124355 0.013621 -0.00405374 -0.0127477 0.0135715 -0.00397246 -0.0125956 0.013527 -0.0042241 -0.0127234 0.013251 -0.00436002 -0.0129137 0.013331 -0.0041324 -0.0123351 0.0135715 -0.00436029 -0.0124923 0.0136031 -0.00411414 -0.0123819 0.013527 -0.00441529 -0.0125462 0.0135715 -0.00417148 -0.0124231 0.013471 -0.00446378 -0.0124577 0.013405 -0.00450435 -0.0126392 0.013471 -0.00427048 -0.0124845 0.013331 -0.00453585 -0.0125028 0.013251 -0.00455737 -0.0120669 0.0136031 -0.00447605 -0.0121149 0.0135715 -0.00453843 -0.0122554 0.013331 -0.00472117 -0.0120164 0.013621 -0.00441033 -0.0121589 0.013527 -0.00459568 -0.0121977 0.013471 -0.00464615 -0.0122726 0.013251 -0.00474356 -0.0116079 0.0136031 -0.00479417 -0.0118414 0.0136031 -0.00464079 -0.0117862 0.013251 -0.0050807 -0.0113672 0.0136031 -0.00493583 -0.0114405 0.013527 -0.00506774 -0.0115173 0.013331 -0.00520612 -0.0117713 0.013331 -0.00505671 -0.0115311 0.013251 -0.00523081 -0.0110831 0.013621 -0.00499103 -0.0111864 0.013527 -0.00520077 -0.0114714 0.013471 -0.00512339 -0.0112688 0.013251 -0.00536813 -0.0108662 0.0136031 -0.00518257 -0.0108974 0.0135715 -0.0052548 -0.0111545 0.0135715 -0.00513599 -0.0109513 0.013471 -0.00537951 -0.0106071 0.0136031 -0.00528707 -0.0109261 0.013527 -0.00532108 -0.0107012 0.013405 -0.00553786 -0.0109725 0.013405 -0.00542841 -0.0107256 0.013251 -0.00560305 -0.0107306 0.0131674 -0.00561647 -0.010578 0.013621 -0.00520944 -0.0106601 0.013527 -0.00542837 -0.0104373 0.013331 -0.00567316 -0.00980351 0.0136031 -0.00552205 -0.0103432 0.0136031 -0.00537862 -0.0104459 0.013251 -0.00570007 -0.0101618 0.013251 -0.00578315 -0.00956333 0.013471 -0.00578538 -0.00955314 0.013527 -0.00572254 -0.00984829 0.013471 -0.00573189 -0.00985941 0.013405 -0.005784 -0.00957186 0.013405 -0.00583797 -0.00958301 0.013251 -0.00590668 -0.00987394 0.013251 -0.00585208 -0.00954158 0.0135715 -0.00565125 -0.00927608 0.013471 -0.0058247 -0.00929132 0.0131674 -0.00596107 -0.00898329 0.013527 -0.00578623 -0.00897883 0.0135715 -0.00571416 -0.00899482 0.013251 -0.00597243 -0.00899571 0.0131674 -0.00598673 -0.00869581 0.0135715 -0.00572466 -0.00899052 0.013405 -0.00590295 -0.00841553 0.0136031 -0.00564252 -0.00841262 0.0135715 -0.00572116 -0.00840248 0.0131674 -0.00599407 -0.00813675 0.0136031 -0.00562526 -0.0084056 0.013405 -0.00591018 -0.00814392 0.013621 -0.00554267 -0.00785916 0.0136031 -0.00559423 -0.00784848 0.0135715 -0.00567219 -0.00783869 0.013527 -0.00574374 -0.0081237 0.013527 -0.0057756 -0.00781338 0.013251 -0.00592857 -0.00810757 0.013251 -0.00596145 -0.00754386 0.013471 -0.0057604 -0.00753403 0.013405 -0.00581276 -0.00783005 0.013471 -0.00580682 -0.00782283 0.013405 -0.0058596 -0.00752119 0.013251 -0.00588118 -0.00751855 0.0131674 -0.00589526 -0.00727513 0.013527 -0.00563797 -0.00723827 0.013331 -0.00579192 -0.00723169 0.013251 -0.00581939 -0.00704031 0.0136031 -0.00541948 -0.0069801 0.013471 -0.00562542 -0.00696514 0.013405 -0.00567655 -0.0072479 0.013405 -0.0057517 -0.00706358 0.013621 -0.00533991 -0.00674845 0.0135715 -0.00540882 -0.00699796 0.013527 -0.00556432 -0.00651269 0.0136031 -0.00523642 -0.00645624 0.013527 -0.00537637 -0.0064125 0.013405 -0.00548482 -0.00638645 0.013251 -0.00554937 -0.00612661 0.013331 -0.00540623 -0.00639702 0.013331 -0.00552317 -0.00625632 0.0136031 -0.00512555 -0.00590621 0.013471 -0.00519223 -0.00580361 0.013621 -0.00479504 -0.0054812 0.0135715 -0.00478469 -0.00576157 0.0136031 -0.00486649 -0.00565276 0.013471 -0.00505142 -0.00534373 0.013621 -0.00449287 -0.00556993 0.013621 -0.00464964 -0.00524842 0.0135715 -0.00462336 -0.00540658 0.013471 -0.00489825 -0.00502391 0.0135715 -0.00445072 -0.00497849 0.013527 -0.00450686 -0.00487892 0.013331 -0.00462992 -0.00511247 0.013331 -0.00480952 -0.00486115 0.013251 -0.00465189 -0.00490493 0.013405 -0.00459777 -0.00486067 0.0136031 -0.00420854 -0.00455111 0.013527 -0.00412459 -0.00480821 0.0135715 -0.00426719 -0.00476007 0.013527 -0.00432102 -0.00450637 0.013471 -0.00416989 -0.00446893 0.013405 -0.00420779 -0.00443987 0.013331 -0.00423722 -0.00440994 0.0131674 -0.00426751 -0.00471541 0.013621 -0.00395825 -0.00426593 0.013405 -0.00399711 -0.00423544 0.013331 -0.00402506 -0.00440534 0.0135715 -0.00386927 -0.00416358 0.013527 -0.00370197 -0.00404375 0.0135715 -0.0034335 -0.00398596 0.013527 -0.00347681 -0.00411459 0.013471 -0.00374262 -0.00387956 0.0135715 -0.00320274 -0.00410672 0.0136031 -0.00338631 -0.0038197 0.013527 -0.00324314 -0.00394478 0.0136031 -0.00315872 -0.00376693 0.013471 -0.00327875 -0.00372697 0.0135715 -0.00296414 -0.00366507 0.013251 -0.0033475 -0.00352976 0.013331 -0.00308349 -0.00365563 0.0136031 -0.00268093 -0.0037943 0.0136031 -0.0029234 -0.00386522 0.013621 -0.00288048 -0.0034668 0.013471 -0.0027828 -0.00350559 0.013251 -0.00309811 -0.00352913 0.0136031 -0.00243189 -0.00358638 0.0135715 -0.00271829 -0.00327586 0.013527 -0.00223509 -0.00339294 0.013527 -0.00249689 -0.00321712 0.013471 -0.00225963 -0.00335864 0.013251 -0.00284115 -0.00316797 0.013405 -0.00228017 0.00860795 -0.0109977 -0.00554111 0.00854207 -0.0111485 -0.0055355 0.00836212 -0.0113147 -0.00552018 0.00858124 -0.0110758 -0.00553883 0.008431 -0.0112696 -0.00552605 0.00841889 -0.0112795 -0.0055 0.00835199 -0.0113205 -0.0055 0.00828676 -0.0113479 -0.00551377 0.00827951 -0.0113505 -0.0055 0.00820322 -0.0113688 -0.0055 0.00849154 -0.0112136 -0.0055312 0.00816205 -0.0113682 -0.00557349 0.00817394 -0.0113682 -0.00556617 0.00819318 -0.0113682 -0.00554609 0.008125 -0.0113505 -0.00565451 0.00815523 -0.0113147 -0.00573605 0.00816401 -0.0112696 -0.00580462 0.00821493 -0.0112696 -0.00579365 0.00823214 -0.0113147 -0.00571249 0.00826326 -0.0112696 -0.00577423 0.0084222 -0.0112696 -0.00557739 0.00823272 -0.0112136 -0.00585174 0.00834374 -0.0112136 -0.00579576 0.00837944 -0.0112696 -0.00567197 0.008125 -0.0112286 -0.00585356 0.008125 -0.0111689 -0.00590451 0.00817173 -0.0112136 -0.00586488 0.00831345 -0.0111485 -0.00587377 0.00839058 -0.0112136 -0.00575454 0.00850642 -0.0111485 -0.00567242 0.00853008 -0.0111485 -0.00560548 0.008125 -0.011102 -0.00594551 0.00825908 -0.0110758 -0.00593782 0.0083739 -0.0111485 -0.00583654 0.00850436 -0.0110758 -0.00575641 0.00856811 -0.0110758 -0.00561538 0.00818316 -0.0110758 -0.00595418 0.00826693 -0.0109977 -0.00596346 0.00856667 -0.0109977 -0.00569965 0.00834321 -0.0109977 -0.0059328 0.00834933 -0.0109163 -0.00594494 0.0084213 -0.0109163 -0.00590063 0.00847493 -0.0109977 -0.00583538 0.00848474 -0.0109163 -0.00584479 0.00859406 -0.0109977 -0.00562214 0.00827951 -0.010875 -0.00597553 0.00835199 -0.010875 -0.00594551 0.00841889 -0.010875 -0.00590451 0.00852951 -0.010875 -0.00579389 0.00860721 -0.0109163 -0.00562556 0.00861884 -0.010875 -0.00557822 0.00827091 -0.0109163 -0.00597645 0.00828211 -0.0113479 -0.00554091 0.008207 -0.0113682 -0.00550698 0.00820464 -0.0113682 -0.00552074 0.00819999 -0.0113682 -0.0055339 0.00825951 -0.0113479 -0.00559091 0.00818441 -0.0113682 -0.00555695 0.00814562 -0.0113479 -0.00566104 0.00813545 -0.0113682 -0.00558163 0.00827294 -0.0113479 -0.00556687 0.00824221 -0.0113479 -0.00561234 0.00822154 -0.0113479 -0.00563053 0.00819468 -0.0113147 -0.00572754 0.00819809 -0.0113479 -0.00564497 0.00861884 -0.0109532 -0.0055 0.0086215 -0.0109163 -0.00554226 0.00818829 -0.0109163 -0.00599426 0.00818656 -0.0109977 -0.00598078 0.00817817 -0.0111485 -0.00591519 0.0081491 -0.0113682 -0.00557869 0.00817254 -0.0113479 -0.00565524 0.00833114 -0.0110758 -0.00590886 0.00824757 -0.0111485 -0.00590024 0.00841322 -0.0109977 -0.0058897 0.00829061 -0.0112136 -0.00582847 0.00839727 -0.0110758 -0.00586814 0.00834672 -0.0112696 -0.0057125 0.00830761 -0.0112696 -0.00574691 0.00829681 -0.0113147 -0.00566466 0.00826651 -0.0113147 -0.00569133 0.00852658 -0.0109977 -0.00577142 0.00845557 -0.0110758 -0.00581683 0.0084718 -0.0111485 -0.0057344 0.0084272 -0.0111485 -0.00578964 0.00853784 -0.0109163 -0.00577903 0.00854224 -0.0110758 -0.00568861 0.00842978 -0.0112136 -0.005706 0.00840484 -0.0112696 -0.0056265 0.00834185 -0.0113147 -0.00559802 0.00832216 -0.0113147 -0.00563326 0.00857906 -0.0109163 -0.00570525 0.00848099 -0.0112136 -0.00559269 0.00846021 -0.0112136 -0.00565152 0.00835529 -0.0113147 -0.00555997 -0.002375 -0.0108856 -0.00599989 0.008125 -0.010875 -0.006 0.002875 -0.0108856 -0.00599989 0.006375 -0.0108856 -0.00599989 0.002875 -0.0111281 -0.00593123 -0.000625 -0.0112573 -0.0058223 -0.002375 -0.0113062 -0.00575306 -0.012875 -0.0113062 -0.00575306 -0.012875 -0.0112573 -0.0058223 -0.012875 -0.0111973 -0.00588227 -0.012875 -0.0109699 -0.00599091 -0.009375 -0.0108856 -0.00599989 -0.005875 -0.0108856 -0.00599989 -0.014125 -0.010875 -0.006 -0.005875 -0.0109699 -0.00599091 -0.004125 -0.0109699 -0.00599091 -0.007625 -0.0110515 -0.0059678 -0.009375 -0.0111281 -0.00593123 -0.007625 -0.0111281 -0.00593123 -0.011125 -0.0111973 -0.00588227 0.008125 -0.0109532 -0.00599385 0.006375 -0.0109699 -0.00599091 0.008125 -0.0110295 -0.00597553 0.001125 -0.0112573 -0.0058223 -0.004125 -0.0113428 -0.00567654 -0.005875 -0.0113659 -0.00559494 -0.000625 -0.0113428 -0.00567654 -0.004125 -0.0113749 -0.00551061 -0.005875 -0.0113749 -0.00551061 0.006375 -0.0111281 -0.00593123 0.006375 -0.0111973 -0.00588227 0.004625 -0.0111973 -0.00588227 0.002875 -0.0112573 -0.0058223 0.001125 -0.0113062 -0.00575306 0.002875 -0.0113062 -0.00575306 -0.000625 -0.0113659 -0.00559494 -0.002375 -0.0113659 -0.00559494 0.004625 -0.0112573 -0.0058223 0.001125 -0.0113749 -0.00551061 0.008125 -0.0112795 -0.00579389 0.008125 -0.0113205 -0.005727 0.004625 -0.0113659 -0.00559494 0.006375 -0.0113659 -0.00559494 0.002875 -0.0113749 -0.00551061 0.004625 -0.0113749 -0.00551061 0.006375 -0.0113749 -0.00551061 0.008125 -0.0113688 -0.00557822 -0.012875 -0.0113749 -0.00551061 -0.0146226 -0.0113726 -0.00554901 -0.0146035 -0.0113535 -0.00564514 -0.012875 -0.0113659 -0.00559494 -0.014566 -0.011316 -0.0057357 -0.0145407 -0.0112907 -0.00577779 -0.012875 -0.0111281 -0.00593123 -0.0144028 -0.0111528 -0.00591574 -0.0143163 -0.0110663 -0.00596194 -0.012875 -0.0110515 -0.0059678 -0.0142701 -0.0110201 -0.00597847 -0.014174 -0.010924 -0.00599759 -0.012875 -0.0108856 -0.00599989 -0.012875 -0.0113428 -0.00567654 0.004625 -0.0109699 -0.00599091 -0.002375 -0.0109699 -0.00599091 -0.000625 -0.0108856 -0.00599989 0.001125 -0.0109699 -0.00599091 0.004625 -0.0108856 -0.00599989 0.001125 -0.0108856 -0.00599989 -0.004125 -0.0108856 -0.00599989 -0.007625 -0.0109699 -0.00599091 -0.009375 -0.0109699 -0.00599091 -0.011125 -0.0108856 -0.00599989 -0.011125 -0.0109699 -0.00599091 -0.007625 -0.0108856 -0.00599989 0.006375 -0.0110515 -0.0059678 0.004625 -0.0110515 -0.0059678 0.002875 -0.0110515 -0.0059678 0.002875 -0.0109699 -0.00599091 -0.002375 -0.0110515 -0.0059678 -0.000625 -0.0109699 -0.00599091 -0.005875 -0.0110515 -0.0059678 -0.009375 -0.0110515 -0.0059678 -0.011125 -0.0110515 -0.0059678 0.004625 -0.0111281 -0.00593123 0.001125 -0.0110515 -0.0059678 0.001125 -0.0111281 -0.00593123 -0.005875 -0.0111281 -0.00593123 -0.004125 -0.0110515 -0.0059678 -0.002375 -0.0111281 -0.00593123 -0.000625 -0.0110515 -0.0059678 -0.011125 -0.0111281 -0.00593123 0.002875 -0.0111973 -0.00588227 -0.000625 -0.0111281 -0.00593123 -0.002375 -0.0111973 -0.00588227 -0.004125 -0.0111281 -0.00593123 -0.009375 -0.0111973 -0.00588227 0.001125 -0.0111973 -0.00588227 -0.002375 -0.0112573 -0.0058223 -0.000625 -0.0111973 -0.00588227 -0.004125 -0.0112573 -0.0058223 -0.007625 -0.0111973 -0.00588227 -0.004125 -0.0111973 -0.00588227 -0.005875 -0.0111973 -0.00588227 0.006375 -0.0112573 -0.0058223 -0.005875 -0.0112573 -0.0058223 -0.005875 -0.0113062 -0.00575306 -0.007625 -0.0112573 -0.0058223 -0.011125 -0.0112573 -0.0058223 -0.009375 -0.0112573 -0.0058223 -0.007625 -0.0113062 -0.00575306 -0.009375 -0.0113062 -0.00575306 0.006375 -0.0113062 -0.00575306 0.004625 -0.0113428 -0.00567654 0.002875 -0.0113428 -0.00567654 0.004625 -0.0113062 -0.00575306 0.001125 -0.0113428 -0.00567654 -0.002375 -0.0113428 -0.00567654 -0.000625 -0.0113062 -0.00575306 -0.004125 -0.0113062 -0.00575306 -0.005875 -0.0113428 -0.00567654 -0.011125 -0.0113428 -0.00567654 -0.011125 -0.0113062 -0.00575306 0.006375 -0.0113428 -0.00567654 0.002875 -0.0113659 -0.00559494 0.001125 -0.0113659 -0.00559494 -0.004125 -0.0113659 -0.00559494 -0.009375 -0.0113659 -0.00559494 -0.009375 -0.0113428 -0.00567654 -0.007625 -0.0113428 -0.00567654 -0.002375 -0.0113749 -0.00551061 -0.000625 -0.0113749 -0.00551061 -0.007625 -0.0113749 -0.00551061 -0.007625 -0.0113659 -0.00559494 -0.011125 -0.0113659 -0.00559494 -0.011125 -0.0113749 -0.00551061 -0.009375 -0.0113749 -0.00551061 0.00827951 0.005375 -0.00597553 0.00821994 0.0037 -0.0059909 0.00830154 0.0037 -0.0059678 0.0084473 -0.001325 -0.00588226 0.0084473 0.000350001 -0.00588226 0.00850726 -0.003 -0.0058223 0.00850726 -0.001325 -0.0058223 0.0085928 -0.00635 -0.00567654 0.0086159 -0.0097 -0.00559494 0.00860053 -0.010875 -0.00565451 0.00855623 -0.00635 -0.00575306 0.00837806 -0.003 -0.00593123 0.00821994 0.000350001 -0.0059909 0.00821994 0.002025 -0.0059909 0.00813561 0.002025 -0.00599989 0.00813561 0.0037 -0.00599989 0.00830154 0.002025 -0.0059678 0.00837806 0.000350001 -0.00593123 0.00855623 -0.004675 -0.00575306 0.00862489 -0.0097 -0.00551061 0.0086159 -0.00635 -0.00559494 0.0086159 -0.004675 -0.00559494 0.00862489 -0.00635 -0.00551061 0.00837806 0.0037 -0.00593123 0.00855623 -0.001325 -0.00575306 0.00855623 0.000350001 -0.00575306 0.0086159 -0.003 -0.00559494 0.00841889 0.005375 -0.00590451 0.0084473 0.0037 -0.00588226 0.00847855 0.005375 -0.00585355 0.00850726 0.0037 -0.0058223 0.00850726 0.002025 -0.0058223 0.0085928 0.000350001 -0.00567654 0.0085928 -0.001325 -0.00567654 0.0086159 -0.001325 -0.00559494 0.00862489 -0.004675 -0.00551061 0.00862489 -0.003 -0.00551061 0.0085928 0.002025 -0.00567654 0.00862489 -0.001325 -0.00551061 0.008625 0.005375 -0.0055 0.008625 -0.010875 -0.0055 0.00852951 0.005375 -0.00579389 0.00855623 0.0037 -0.00575306 0.0086159 0.002025 -0.00559494 0.00862489 0.000350001 -0.00551061 0.0085928 0.0037 -0.00567654 0.00862489 0.002025 -0.00551061 0.0086159 0.0037 -0.00559494 0.00862489 0.0037 -0.00551061 0.00850726 -0.00635 -0.0058223 0.00850726 -0.008025 -0.0058223 0.0084473 -0.004675 -0.00588226 0.00821994 -0.001325 -0.0059909 0.00813561 0.000350001 -0.00599989 0.0085705 -0.010875 -0.005727 0.00855623 -0.0097 -0.00575306 0.00830154 -0.004675 -0.0059678 0.00850726 -0.0097 -0.0058223 0.00847855 -0.010875 -0.00585356 0.00830154 -0.00635 -0.0059678 0.00813561 -0.001325 -0.00599989 0.0084473 -0.0097 -0.00588227 0.00821994 -0.004675 -0.0059909 0.00837806 -0.0097 -0.00593123 0.00821994 -0.008025 -0.00599091 0.00813561 -0.008025 -0.00599989 0.00813561 -0.00635 -0.00599989 0.00821994 -0.0097 -0.00599091 0.00820322 -0.010875 -0.00599385 0.00813561 -0.0097 -0.00599989 0.00813561 -0.004675 -0.00599989 0.00813561 -0.003 -0.00599989 0.00821994 -0.003 -0.0059909 0.00821994 -0.00635 -0.00599091 0.00830154 -0.0097 -0.0059678 0.00837806 -0.001325 -0.00593123 0.00830154 0.000350001 -0.0059678 0.00837806 0.002025 -0.00593123 0.00830154 -0.003 -0.0059678 0.00830154 -0.001325 -0.0059678 0.00837806 -0.00635 -0.00593123 0.00830154 -0.008025 -0.0059678 0.0084473 0.002025 -0.00588226 0.00837806 -0.004675 -0.00593123 0.00837806 -0.008025 -0.00593123 0.00850726 0.000350001 -0.0058223 0.00850726 -0.004675 -0.0058223 0.0084473 -0.003 -0.00588226 0.0084473 -0.00635 -0.00588226 0.0084473 -0.008025 -0.00588227 0.00855623 -0.003 -0.00575306 0.00855623 0.002025 -0.00575306 0.0085928 -0.003 -0.00567654 0.0085928 -0.004675 -0.00567654 0.0085928 -0.008025 -0.00567654 0.0085928 -0.0097 -0.00567654 0.00855623 -0.008025 -0.00575306 0.0086159 0.000350001 -0.00559494 0.0086159 -0.008025 -0.00559494 0.00862489 -0.008025 -0.00551061 0.00861884 -0.0109532 0.0055 0.00854224 -0.0110758 0.0056886 0.00850642 -0.0111485 0.00567241 0.0084718 -0.0111485 0.00573439 0.00823214 -0.0113147 0.00571249 0.00819809 -0.0113479 0.00564496 0.0085705 -0.010875 0.00572699 0.00847493 -0.0109977 0.00583538 0.00823272 -0.0112136 0.00585174 0.00816401 -0.0112696 0.00580462 0.00815523 -0.0113147 0.00573605 0.008125 -0.0112795 0.00579389 0.00813545 -0.0113682 0.00558163 0.00819468 -0.0113147 0.00572754 0.00817254 -0.0113479 0.00565523 0.00821493 -0.0112696 0.00579364 0.00852951 -0.010875 0.00579389 0.00848474 -0.0109163 0.00584479 0.00839727 -0.0110758 0.00586814 0.00841322 -0.0109977 0.0058897 0.00831345 -0.0111485 0.00587376 0.00824757 -0.0111485 0.00590023 0.00817173 -0.0112136 0.00586488 0.00847855 -0.010875 0.00585355 0.00833114 -0.0110758 0.00590886 0.0084213 -0.0109163 0.00590062 0.00834933 -0.0109163 0.00594494 0.00826693 -0.0109977 0.00596345 0.008125 -0.011102 0.0059455 0.00817817 -0.0111485 0.00591519 0.00827951 -0.010875 0.00597553 0.00820322 -0.010875 0.00599384 0.00818656 -0.0109977 0.00598077 0.00818316 -0.0110758 0.00595418 0.00827091 -0.0109163 0.00597645 0.00818829 -0.0109163 0.00599426 0.00820464 -0.0113682 0.00552074 0.00819318 -0.0113682 0.00554608 0.008125 -0.011375 0.0055 0.00818441 -0.0113682 0.00555694 0.00816205 -0.0113682 0.00557348 0.0081491 -0.0113682 0.00557869 0.00828676 -0.0113479 0.00551377 0.00836212 -0.0113147 0.00552018 0.008431 -0.0112696 0.00552604 0.00837944 -0.0112696 0.00567197 0.00834672 -0.0112696 0.0057125 0.00822154 -0.0113479 0.00563053 0.00826651 -0.0113147 0.00569133 0.00841889 -0.0112795 0.0055 0.00847855 -0.0112286 0.0055 0.00842978 -0.0112136 0.00570599 0.00849154 -0.0112136 0.0055312 0.00848099 -0.0112136 0.00559269 0.00846021 -0.0112136 0.00565152 0.00858124 -0.0110758 0.00553883 0.00856811 -0.0110758 0.00561538 0.00853008 -0.0111485 0.00560547 0.0085705 -0.011102 0.0055 0.00860053 -0.0110295 0.0055 0.00860795 -0.0109977 0.0055411 0.00854207 -0.0111485 0.0055355 0.0084222 -0.0112696 0.00557738 0.00835529 -0.0113147 0.00555996 0.008207 -0.0113682 0.00550698 0.00828211 -0.0113479 0.00554091 0.0086215 -0.0109163 0.00554226 0.00859406 -0.0109977 0.00562213 0.00860721 -0.0109163 0.00562556 0.00856667 -0.0109977 0.00569965 0.00850436 -0.0110758 0.0057564 0.00839058 -0.0112136 0.00575454 0.00826326 -0.0112696 0.00577422 0.00830761 -0.0112696 0.00574691 0.00819999 -0.0113682 0.0055339 0.00857906 -0.0109163 0.00570525 0.0083739 -0.0111485 0.00583654 0.00829061 -0.0112136 0.00582847 0.00840484 -0.0112696 0.0056265 0.00834185 -0.0113147 0.00559802 0.00825951 -0.0113479 0.00559091 0.00827294 -0.0113479 0.00556687 0.00853784 -0.0109163 0.00577903 0.00852658 -0.0109977 0.00577142 0.00845557 -0.0110758 0.00581683 0.0084272 -0.0111485 0.00578963 0.00832216 -0.0113147 0.00563326 0.00829681 -0.0113147 0.00566466 0.00834374 -0.0112136 0.00579576 0.00824221 -0.0113479 0.00561234 0.00817394 -0.0113682 0.00556617 0.00834321 -0.0109977 0.0059328 0.00814562 -0.0113479 0.00566103 0.00825908 -0.0110758 0.00593782 -0.002375 -0.0113749 0.00551061 0.001125 -0.0113749 0.00551061 0.008125 -0.0113688 0.00557822 0.004625 -0.0113659 0.00559494 0.002875 -0.0113062 0.00575306 -0.002375 -0.0111281 0.00593123 -0.004125 -0.0110515 0.0059678 -0.012875 -0.0109699 0.0059909 -0.012875 -0.0112573 0.00582229 -0.012875 -0.0113062 0.00575306 -0.012875 -0.0113428 0.00567654 -0.014625 -0.011375 0.0055 -0.0146226 -0.0113726 0.00554901 -0.011125 -0.0113749 0.00551061 -0.009375 -0.0113749 0.00551061 -0.005875 -0.0113749 0.00551061 -0.007625 -0.0113749 0.00551061 -0.005875 -0.0113659 0.00559494 -0.007625 -0.0113659 0.00559494 -0.007625 -0.0113428 0.00567654 -0.009375 -0.0113428 0.00567654 -0.011125 -0.0113062 0.00575306 0.008125 -0.0113505 0.00565451 0.004625 -0.0113428 0.00567654 -0.002375 -0.0110515 0.0059678 -0.014125 -0.010875 0.006 -0.007625 -0.0108856 0.00599989 0.006375 -0.0113428 0.00567654 0.008125 -0.0113205 0.00572699 0.004625 -0.0113062 0.00575306 0.004625 -0.0112573 0.00582229 0.001125 -0.0111281 0.00593123 -0.005875 -0.0108856 0.00599989 0.006375 -0.0113062 0.00575306 0.006375 -0.0112573 0.00582229 0.004625 -0.0111973 0.00588226 0.002875 -0.0111973 0.00588226 -0.000625 -0.0110515 0.0059678 0.001125 -0.0110515 0.0059678 -0.000625 -0.0109699 0.0059909 -0.002375 -0.0108856 0.00599989 -0.004125 -0.0108856 0.00599989 -0.000625 -0.0108856 0.00599989 0.008125 -0.0112286 0.00585355 0.008125 -0.0111689 0.00590451 0.006375 -0.0111973 0.00588226 0.004625 -0.0110515 0.0059678 0.002875 -0.0110515 0.0059678 0.001125 -0.0108856 0.00599989 0.004625 -0.0109699 0.0059909 0.002875 -0.0108856 0.00599989 0.002875 -0.0109699 0.0059909 0.006375 -0.0110515 0.0059678 0.008125 -0.0110295 0.00597553 0.008125 -0.0109532 0.00599384 0.004625 -0.0108856 0.00599989 0.006375 -0.0109699 0.0059909 0.008125 -0.010875 0.006 0.006375 -0.0108856 0.00599989 -0.0144028 -0.0111528 0.00591573 -0.0144422 -0.0111922 0.0058865 -0.0144786 -0.0112286 0.00585355 -0.014566 -0.011316 0.0057357 -0.0146035 -0.0113535 0.00564514 -0.012875 -0.0113749 0.00551061 -0.012875 -0.0113659 0.00559494 0.006375 -0.0113659 0.00559494 0.006375 -0.0113749 0.00551061 0.004625 -0.0113749 0.00551061 -0.000625 -0.0113749 0.00551061 -0.004125 -0.0113749 0.00551061 0.002875 -0.0113749 0.00551061 -0.009375 -0.0113659 0.00559494 -0.011125 -0.0113659 0.00559494 0.002875 -0.0113428 0.00567654 0.001125 -0.0113428 0.00567654 -0.000625 -0.0113659 0.00559494 -0.000625 -0.0113428 0.00567654 -0.002375 -0.0113659 0.00559494 -0.002375 -0.0113428 0.00567654 -0.004125 -0.0113659 0.00559494 0.001125 -0.0113659 0.00559494 0.002875 -0.0113659 0.00559494 -0.004125 -0.0113428 0.00567654 -0.007625 -0.0113062 0.00575306 -0.009375 -0.0113062 0.00575306 -0.002375 -0.0113062 0.00575306 -0.000625 -0.0113062 0.00575306 -0.005875 -0.0113428 0.00567654 -0.011125 -0.0113428 0.00567654 0.002875 -0.0112573 0.00582229 0.001125 -0.0113062 0.00575306 -0.000625 -0.0112573 0.00582229 -0.004125 -0.0112573 0.00582229 -0.005875 -0.0112573 0.00582229 -0.005875 -0.0113062 0.00575306 -0.004125 -0.0113062 0.00575306 0.001125 -0.0111973 0.00588226 0.001125 -0.0112573 0.00582229 -0.002375 -0.0111973 0.00588226 -0.004125 -0.0111973 0.00588226 -0.011125 -0.0112573 0.00582229 -0.011125 -0.0111973 0.00588226 -0.012875 -0.0111973 0.00588226 -0.009375 -0.0112573 0.00582229 -0.005875 -0.0111973 0.00588226 -0.002375 -0.0112573 0.00582229 -0.007625 -0.0111973 0.00588226 -0.007625 -0.0112573 0.00582229 -0.009375 -0.0111973 0.00588226 -0.000625 -0.0111973 0.00588226 -0.004125 -0.0111281 0.00593123 -0.009375 -0.0111281 0.00593123 -0.012875 -0.0111281 0.00593123 0.006375 -0.0111281 0.00593123 0.004625 -0.0111281 0.00593123 0.002875 -0.0111281 0.00593123 -0.000625 -0.0111281 0.00593123 -0.011125 -0.0110515 0.0059678 -0.011125 -0.0111281 0.00593123 -0.012875 -0.0110515 0.0059678 -0.009375 -0.0110515 0.0059678 -0.007625 -0.0111281 0.00593123 -0.005875 -0.0111281 0.00593123 0.001125 -0.0109699 0.0059909 -0.002375 -0.0109699 0.0059909 -0.004125 -0.0109699 0.0059909 -0.005875 -0.0109699 0.0059909 -0.007625 -0.0109699 0.0059909 -0.005875 -0.0110515 0.0059678 -0.007625 -0.0110515 0.0059678 -0.011125 -0.0109699 0.0059909 -0.009375 -0.0108856 0.00599989 -0.012875 -0.0108856 0.00599989 -0.009375 -0.0109699 0.0059909 -0.011125 -0.0108856 0.00599989 0.00855623 -0.0111281 -1.45927e-09 0.00855623 -0.0111281 0.00183333 0.0084473 -0.0112573 -0.00366667 0.0086159 -0.0109699 0.00366667 0.0085928 -0.0110515 0.00183333 0.0086159 -0.0109699 0.00183333 0.0085928 -0.0110515 0.00366667 0.00855623 -0.0111281 0.00366667 0.00850726 -0.0111973 -1.46835e-09 0.00837806 -0.0113062 -0.00366667 0.00830154 -0.0113428 -0.00366667 0.00852951 -0.0111689 0.0055 0.00821994 -0.0113659 -0.00366667 0.00850726 -0.0111973 0.00366667 0.0084473 -0.0112573 0.00183333 0.00837806 -0.0113062 0.00183333 0.00837806 -0.0113062 -1.48263e-09 0.00830154 -0.0113428 -0.00183334 0.00830154 -0.0113428 -1.48743e-09 0.00837806 -0.0113062 0.00366667 0.00830154 -0.0113428 0.00183333 0.008125 -0.011375 -0.0055 0.00813561 -0.0113749 -0.00366667 0.00835199 -0.0113205 0.0055 0.00830154 -0.0113428 0.00366667 0.00821994 -0.0113659 0.00183333 0.00813561 -0.0113749 -0.00183334 0.00813561 -0.0113749 -1.49164e-09 0.00827951 -0.0113505 0.0055 0.00821994 -0.0113659 0.00366667 0.00813561 -0.0113749 0.00366667 0.00813561 -0.0113749 0.00183333 0.00820322 -0.0113688 0.0055 0.00847855 -0.0112286 -0.0055 0.00852951 -0.0111689 -0.0055 0.00855623 -0.0111281 -0.00183334 0.00862489 -0.0108856 0.00366667 0.00862489 -0.0108856 0.00183333 0.0085705 -0.011102 -0.0055 0.0086159 -0.0109699 -1.43853e-09 0.00860053 -0.0110295 -0.0055 0.0086159 -0.0109699 -0.00366667 0.00862489 -0.0108856 -1.42748e-09 0.00862489 -0.0108856 -0.00183334 0.00862489 -0.0108856 -0.00366667 0.0086159 -0.0109699 -0.00183334 0.0085928 -0.0110515 -1.44923e-09 0.0085928 -0.0110515 -0.00183334 0.0085928 -0.0110515 -0.00366667 0.00850726 -0.0111973 0.00183333 0.00850726 -0.0111973 -0.00366667 0.00855623 -0.0111281 -0.00366667 0.0084473 -0.0112573 0.00366667 0.0084473 -0.0112573 -1.47621e-09 0.00850726 -0.0111973 -0.00183334 0.0084473 -0.0112573 -0.00183334 0.00837806 -0.0113062 -0.00183334 0.00821994 -0.0113659 -1.49046e-09 0.00821994 -0.0113659 -0.00183334 0.00861884 0.005375 0.00557822 0.00862489 0.0037 0.00551061 0.00860053 0.005375 0.00565451 0.0086159 0.0037 0.00559494 0.0085928 0.0037 0.00567654 0.0085928 0.002025 0.00567654 0.00850726 0.00035 0.0058223 0.00837806 -0.004675 0.00593123 0.00821994 -0.00635 0.0059909 0.00813561 -0.008025 0.00599989 0.00813561 -0.0097 0.00599989 0.00821994 -0.0097 0.0059909 0.0084473 -0.004675 0.00588226 0.00850726 -0.004675 0.0058223 0.00855623 -0.001325 0.00575306 0.0085928 0.00035 0.00567654 0.00862489 0.002025 0.00551061 0.00837806 -0.00635 0.00593123 0.00821994 -0.008025 0.0059909 0.00855623 0.0037 0.00575306 0.00850726 0.002025 0.0058223 0.0084473 0.00035 0.00588226 0.00852951 0.005375 0.00579389 0.00837806 -0.001325 0.00593123 0.00813561 -0.00635 0.00599989 0.00821994 -0.004675 0.0059909 0.00813561 -0.004675 0.00599989 0.00850726 0.0037 0.0058223 0.00847855 0.005375 0.00585355 0.00837806 0.00035 0.00593123 0.00821994 -0.001325 0.0059909 0.00813561 -0.003 0.00599989 0.0084473 0.0037 0.00588226 0.00837806 0.002025 0.00593123 0.00830154 0.00035 0.0059678 0.00813561 -0.001325 0.00599989 0.00841889 0.005375 0.00590451 0.00837806 0.0037 0.00593123 0.00821994 0.00035 0.0059909 0.00821994 0.002025 0.0059909 0.00813561 0.002025 0.00599989 0.00813561 0.00035 0.00599989 0.00821994 0.0037 0.0059909 0.00820322 0.005375 0.00599385 0.00813561 0.0037 0.00599989 0.00830154 -0.0097 0.0059678 0.00837806 -0.008025 0.00593123 0.00855623 -0.004675 0.00575306 0.0086159 -0.001325 0.00559494 0.00862489 0.00035 0.00551061 0.0086159 0.00035 0.00559494 0.00835199 -0.010875 0.0059455 0.00841889 -0.010875 0.00590451 0.00837806 -0.0097 0.00593123 0.0084473 -0.0097 0.00588226 0.00850726 -0.008025 0.00582229 0.00855623 -0.00635 0.00575306 0.00862489 -0.001325 0.00551061 0.008625 0.005375 0.0055 0.00850726 -0.0097 0.00582229 0.0085928 -0.004675 0.00567654 0.00862489 -0.003 0.00551061 0.008625 -0.010875 0.0055 0.0085928 -0.0097 0.00567654 0.00862489 -0.004675 0.00551061 0.00862489 -0.00635 0.00551061 0.0086159 -0.0097 0.00559494 0.0086159 -0.008025 0.00559494 0.00862489 -0.008025 0.00551061 0.00860053 -0.010875 0.00565451 0.00861884 -0.010875 0.00557822 0.00862489 -0.0097 0.00551061 0.0086159 0.002025 0.00559494 0.0085928 -0.001325 0.00567654 0.0085928 -0.003 0.00567654 0.0086159 -0.003 0.00559494 0.0086159 -0.00635 0.00559494 0.0086159 -0.004675 0.00559494 0.0085928 -0.008025 0.00567654 0.00855623 -0.003 0.00575306 0.00855623 -0.008025 0.00575306 0.0085928 -0.00635 0.00567654 0.00855623 -0.0097 0.00575306 0.00855623 0.002025 0.00575306 0.00850726 -0.001325 0.0058223 0.00855623 0.00035 0.00575306 0.0084473 -0.001325 0.00588226 0.00850726 -0.003 0.0058223 0.00850726 -0.00635 0.0058223 0.0084473 0.002025 0.00588226 0.00837806 -0.003 0.00593123 0.0084473 -0.00635 0.00588226 0.0084473 -0.003 0.00588226 0.0084473 -0.008025 0.00588226 0.00830154 -0.003 0.0059678 0.00830154 -0.008025 0.0059678 0.00830154 -0.004675 0.0059678 0.00830154 0.0037 0.0059678 0.00830154 0.002025 0.0059678 0.00830154 -0.001325 0.0059678 0.00821994 -0.003 0.0059909 0.00830154 -0.00635 0.0059678 -0.0146249 -0.001 0.00551061 -0.0146249 -0.00272917 0.00551061 -0.0146249 -0.00964583 0.00551061 -0.0146159 -0.00964583 0.00559494 -0.0145073 -0.00272917 0.0058223 -0.0143015 0.00245833 0.0059678 -0.0142199 0.00245833 0.0059909 -0.0141356 0.00591667 0.00599989 -0.0142032 0.009375 0.00599385 -0.014352 0.009375 0.00594551 -0.0144189 0.009375 0.00590451 -0.0144786 0.009375 0.00585356 -0.0145562 0.00764583 0.00575306 -0.0146005 0.009375 0.00565451 -0.0146159 0.00764583 0.00559494 -0.014625 0.009375 0.0055 -0.0146159 0.00591667 0.00559494 -0.0146249 0.0041875 0.00551061 -0.0146159 0.0041875 0.00559494 -0.0145562 0.00591667 0.00575306 -0.0143781 0.0041875 0.00593123 -0.0146154 -0.0113654 0.00559754 -0.0145869 -0.0113369 0.00569134 -0.0145928 -0.00964583 0.00567654 -0.0145562 -0.00964583 0.00575306 -0.0145407 -0.0112907 0.00577778 -0.0145073 -0.00964583 0.00582229 -0.0144473 -0.00791667 0.00588226 -0.0143015 -0.00445833 0.0059678 -0.0142199 -0.00272917 0.0059909 -0.0141356 0.0041875 0.00599989 -0.0143015 0.000729166 0.0059678 -0.0145928 -0.00791667 0.00567654 -0.0145073 -0.0061875 0.0058223 -0.0143781 -0.00272917 0.00593123 -0.0143781 -0.001 0.00593123 -0.0143015 -0.001 0.0059678 -0.0145115 -0.0112615 0.0058172 -0.0143607 -0.0111107 0.00594096 -0.0142701 -0.0110201 0.00597847 -0.0142225 -0.0109725 0.00599039 -0.0142199 -0.00964583 0.0059909 -0.014174 -0.010924 0.00599759 -0.0141356 -0.00791667 0.00599989 -0.0142199 -0.00791667 0.0059909 -0.0143781 -0.00791667 0.00593123 -0.0143163 -0.0110663 0.00596194 -0.0143015 -0.00964583 0.0059678 -0.0141356 -0.00272917 0.00599989 -0.0142199 -0.00445833 0.0059909 -0.0142199 -0.0061875 0.0059909 -0.0143015 -0.00791667 0.0059678 -0.0141356 -0.00445833 0.00599989 -0.0144473 0.0041875 0.00588226 -0.0144473 0.00591667 0.00588226 -0.0144473 0.00764583 0.00588226 -0.0145073 0.00591667 0.0058223 -0.0145295 0.009375 0.00579389 -0.0145928 0.00591667 0.00567654 -0.0145928 0.00764583 0.00567654 -0.0146249 0.00591667 0.00551061 -0.0146249 0.00764583 0.00551061 -0.0146159 -0.00791667 0.00559494 -0.0146159 -0.0061875 0.00559494 -0.0146159 -0.00272917 0.00559494 -0.0146249 0.000729166 0.00551061 -0.0146159 0.00245833 0.00559494 -0.0146249 0.00245833 0.00551061 -0.0146249 -0.0061875 0.00551061 -0.0146249 -0.00791667 0.00551061 -0.0146249 -0.00445833 0.00551061 -0.0146159 -0.00445833 0.00559494 -0.0145928 -0.00272917 0.00567654 -0.0145928 0.00245833 0.00567654 -0.0145928 0.0041875 0.00567654 -0.0145928 -0.001 0.00567654 -0.0146159 -0.001 0.00559494 -0.0146159 0.000729166 0.00559494 -0.0145928 -0.0061875 0.00567654 -0.0145562 0.0041875 0.00575306 -0.0145928 -0.00445833 0.00567654 -0.0145562 -0.00272917 0.00575306 -0.0145928 0.000729166 0.00567654 -0.0145562 0.00245833 0.00575306 -0.0144473 -0.0061875 0.00588226 -0.0145562 -0.00791667 0.00575306 -0.0145073 -0.00445833 0.0058223 -0.0145562 -0.0061875 0.00575306 -0.0145562 -0.00445833 0.00575306 -0.0145562 0.000729166 0.00575306 -0.0145073 0.0041875 0.0058223 -0.0145562 -0.001 0.00575306 -0.0145073 -0.00791667 0.0058223 -0.0144473 -0.00445833 0.00588226 -0.0144473 -0.00272917 0.00588226 -0.0144473 -0.001 0.00588226 -0.0145073 -0.001 0.0058223 -0.0144473 0.000729166 0.00588226 -0.0145073 0.00245833 0.0058223 -0.0145073 0.000729166 0.0058223 -0.0145073 0.00764583 0.0058223 -0.0143781 -0.00964583 0.00593123 -0.0144473 -0.00964583 0.00588226 -0.0143781 0.000729166 0.00593123 -0.0144473 0.00245833 0.00588226 -0.0143781 -0.0061875 0.00593123 -0.0143015 -0.0061875 0.0059678 -0.0143015 -0.00272917 0.0059678 -0.0143781 -0.00445833 0.00593123 -0.0143015 0.0041875 0.0059678 -0.0143781 0.00245833 0.00593123 -0.0142199 0.0041875 0.0059909 -0.0143781 0.00764583 0.00593123 -0.0143781 0.00591667 0.00593123 -0.0142199 0.000729166 0.0059909 -0.0141356 0.000729166 0.00599989 -0.0141356 -0.001 0.00599989 -0.0142199 -0.001 0.0059909 -0.0141356 0.00245833 0.00599989 -0.0142199 0.00764583 0.0059909 -0.0143015 0.00764583 0.0059678 -0.0143015 0.00591667 0.0059678 -0.0141356 -0.00964583 0.00599989 -0.0141356 0.00764583 0.00599989 -0.0142199 0.00591667 0.0059909 -0.0141356 -0.0061875 0.00599989 -0.01087 0.00937855 -0.006 -0.0107176 0.00938849 -0.006 -0.0105657 0.00940394 -0.006 0.008125 0.005375 -0.006 -0.00683572 0.00942419 -0.006 -0.0104143 0.0094242 -0.006 -0.00803465 0.00970009 -0.006 -0.00981222 0.00954239 -0.006 -0.00773688 0.00961777 -0.006 -0.014625 -0.011375 -0.0055 -0.012625 -0.011375 0.002 -0.012625 -0.011375 -0.002 -0.014625 0.009375 -0.00239791 -0.014625 0.00937855 -0.00224503 -0.014625 -0.011375 -0.002 -0.014625 -0.009375 -0.002 -0.014625 0.0094768 0.00148758 -0.014625 0.00938849 0.00209264 -0.014625 0.00937855 0.00224504 -0.014625 -0.011375 0.002 -0.014625 0.00970009 0.000590338 -0.014625 0.00942419 -0.00178928 -0.014625 0.00940394 -0.00194073 0.008125 0.00986884 0.00557822 0.00819999 0.00986818 0.0055339 0.00820322 0.00986884 0.0055 0.00815523 0.00981474 0.00573605 0.00814562 0.00984791 0.00566104 0.008125 0.0098205 0.005727 0.00821493 0.00976957 0.00579365 0.00826651 0.00981474 0.00569133 0.00826326 0.00976957 0.00577423 0.00837944 0.00976957 0.00567197 0.00840484 0.00976957 0.0056265 0.008431 0.00976957 0.00552605 0.00841889 0.00977951 0.0055 0.00836212 0.00981474 0.00552018 0.00823214 0.00981474 0.00571249 0.00822154 0.00984791 0.00563053 0.00817254 0.00984791 0.00565524 0.008125 0.00977951 0.00579389 0.00823272 0.00971364 0.00585174 0.00834672 0.00976957 0.0057125 0.00847855 0.00972855 0.0055 0.008125 0.00972855 0.00585356 0.00817173 0.00971364 0.00586488 0.008125 0.00966889 0.00590451 0.00829061 0.00971364 0.00582848 0.00824757 0.00964847 0.00590024 0.00831345 0.00964847 0.00587377 0.0084718 0.00964847 0.0057344 0.00817817 0.00964847 0.00591519 0.008125 0.009602 0.00594551 0.0084272 0.00964847 0.00578964 0.00818316 0.00957585 0.00595418 0.00825908 0.00957585 0.00593782 0.00826693 0.00949774 0.00596346 0.00839727 0.00957585 0.00586814 0.00845557 0.00957585 0.00581683 0.00856811 0.00957585 0.00561538 0.008125 0.00952951 0.00597553 0.00841322 0.00949774 0.0058897 0.00859406 0.00949774 0.00562214 0.00857906 0.00941629 0.00570525 0.00860795 0.00949774 0.00554111 0.00861884 0.00945322 0.0055 0.008125 0.00945322 0.00599385 0.00834933 0.00941629 0.00594494 0.00841889 0.009375 0.00590451 0.0084213 0.00941629 0.00590063 0.00848474 0.00941629 0.00584479 0.00852951 0.009375 0.00579389 0.0086215 0.00941629 0.00554226 0.00827091 0.00941629 0.00597645 0.00827951 0.009375 0.00597553 0.00827951 0.00985053 0.0055 0.00820464 0.00986818 0.00552074 0.00816205 0.00986818 0.00557349 0.0081491 0.00986818 0.00557869 0.00813545 0.00986818 0.00558163 0.00818829 0.00941629 0.00599426 0.00818656 0.00949774 0.00598078 0.00816401 0.00976957 0.00580462 0.00834321 0.00949774 0.0059328 0.00819809 0.00984791 0.00564497 0.00819468 0.00981474 0.00572754 0.00833114 0.00957585 0.00590886 0.0083739 0.00964847 0.00583654 0.00834374 0.00971364 0.00579576 0.00817394 0.00986818 0.00556617 0.00830761 0.00976957 0.00574691 0.00824221 0.00984791 0.00561234 0.00818441 0.00986818 0.00555695 0.00847493 0.00949774 0.00583538 0.00839058 0.00971364 0.00575454 0.00829681 0.00981474 0.00566466 0.00832216 0.00981474 0.00563326 0.00819318 0.00986818 0.00554609 0.00852658 0.00949774 0.00577142 0.00853784 0.00941629 0.00577903 0.00854224 0.00957585 0.00568861 0.00850642 0.00964847 0.00567242 0.00850436 0.00957585 0.00575641 0.00842978 0.00971364 0.005706 0.00834185 0.00981474 0.00559802 0.00825951 0.00984791 0.00559091 0.00827294 0.00984791 0.00556687 0.00856667 0.00949774 0.00569965 0.00848099 0.00971364 0.00559269 0.0084222 0.00976957 0.00557739 0.00846021 0.00971364 0.00565152 0.00828211 0.00984791 0.00554091 0.00835529 0.00981474 0.00555997 0.00860721 0.00941629 0.00562556 0.00858124 0.00957585 0.00553883 0.00854207 0.00964847 0.0055355 0.00849154 0.00971364 0.0055312 0.00853008 0.00964847 0.00560548 0.008207 0.00986818 0.00550698 0.00828676 0.00984791 0.00551377 0.00820322 0.009375 0.00599385 0.00837806 0.00771428 0.00593123 0.00860053 0.009375 0.00565451 0.00862489 0.00771428 0.00551061 0.00827951 0.006875 0.00597553 0.00821994 0.00771428 0.0059909 0.00841889 0.006875 0.00590451 0.00847855 0.006875 0.00585355 0.0085705 0.006875 0.005727 0.0086159 0.00771428 0.00559494 0.00861884 0.006875 0.00557822 0.0084473 0.00771428 0.00588226 0.0085928 0.00771428 0.00567654 0.00860053 0.006875 0.00565451 0.00861884 0.009375 0.00557822 0.0085705 0.009375 0.005727 0.00855623 0.00771428 0.00575306 0.00847855 0.009375 0.00585356 0.00850726 0.00771428 0.0058223 0.00835199 0.009375 0.00594551 0.00830154 0.00771428 0.0059678 0.00813561 0.00771428 0.00599989 0.00819318 0.00986818 -0.00554608 0.00818441 0.00986818 -0.00555694 0.00816205 0.00986818 -0.00557348 0.00828676 0.00984791 -0.00551377 0.00835199 0.00982051 -0.0055 0.00841889 0.00977951 -0.0055 0.00836212 0.00981474 -0.00552018 0.00832216 0.00981474 -0.00563326 0.00823214 0.00981474 -0.00571249 0.00819468 0.00981474 -0.00572754 0.008125 0.00977951 -0.00579389 0.00815523 0.00981474 -0.00573604 0.00819809 0.00984791 -0.00564496 0.00822154 0.00984791 -0.00563053 0.00824221 0.00984791 -0.00561234 0.00828211 0.00984791 -0.00554091 0.008431 0.00976957 -0.00552604 0.00848099 0.00971364 -0.00559269 0.00846021 0.00971364 -0.00565152 0.00837944 0.00976957 -0.00567197 0.00842978 0.00971364 -0.00570599 0.00834672 0.00976957 -0.0057125 0.00830761 0.00976957 -0.00574691 0.00829061 0.00971364 -0.00582847 0.00816401 0.00976957 -0.00580462 0.00817173 0.00971364 -0.00586488 0.008125 0.00972856 -0.00585355 0.008125 0.00966889 -0.00590451 0.00847855 0.00972856 -0.0055 0.00849154 0.00971364 -0.0055312 0.00852951 0.00966889 -0.0055 0.00854207 0.00964847 -0.0055355 0.00834374 0.00971364 -0.00579576 0.008125 0.009602 -0.0059455 0.00845557 0.00957585 -0.00581683 0.00839727 0.00957585 -0.00586814 0.00833114 0.00957585 -0.00590886 0.00817817 0.00964847 -0.00591519 0.0085705 0.009602 -0.0055 0.00860053 0.00952951 -0.0055 0.00860795 0.00949774 -0.0055411 0.00841322 0.00949774 -0.0058897 0.00861884 0.00945322 -0.0055 0.00859406 0.00949774 -0.00562213 0.00856667 0.00949774 -0.00569965 0.00852658 0.00949774 -0.00577142 0.00853784 0.00941629 -0.00577903 0.0084213 0.00941629 -0.00590062 0.00834933 0.00941629 -0.00594494 0.008125 0.00945322 -0.00599384 0.00818656 0.00949774 -0.00598077 0.0086215 0.00941629 -0.00554226 0.00857906 0.00941629 -0.00570525 0.00860053 0.009375 -0.00565451 0.0085705 0.009375 -0.00572699 0.00852951 0.009375 -0.00579389 0.00848474 0.00941629 -0.00584479 0.00841889 0.009375 -0.00590451 0.00818829 0.00941629 -0.00599425 0.00861884 0.009375 -0.00557822 0.00860721 0.00941629 -0.00562556 0.00817254 0.00984791 -0.00565523 0.00817394 0.00986818 -0.00556617 0.00819999 0.00986818 -0.0055339 0.00820464 0.00986818 -0.00552074 0.008207 0.00986818 -0.00550698 0.00856811 0.00957585 -0.00561538 0.00858124 0.00957585 -0.00553883 0.0084222 0.00976957 -0.00557738 0.00853008 0.00964847 -0.00560547 0.00834185 0.00981474 -0.00559802 0.00835529 0.00981474 -0.00555996 0.00827294 0.00984791 -0.00556687 0.00850436 0.00957585 -0.0057564 0.0084718 0.00964847 -0.00573439 0.00854224 0.00957585 -0.0056886 0.00850642 0.00964847 -0.00567241 0.00825951 0.00984791 -0.00559091 0.00840484 0.00976957 -0.0056265 0.00847493 0.00949774 -0.00583538 0.0083739 0.00964847 -0.00583654 0.0084272 0.00964847 -0.00578963 0.00839058 0.00971364 -0.00575454 0.00829681 0.00981474 -0.00566466 0.00826326 0.00976957 -0.00577422 0.00826651 0.00981474 -0.00569133 0.00834321 0.00949774 -0.0059328 0.00831345 0.00964847 -0.00587376 0.00821493 0.00976957 -0.00579364 0.0081491 0.00986818 -0.00557869 0.00826693 0.00949774 -0.00596345 0.00827091 0.00941629 -0.00597645 0.00818316 0.00957585 -0.00595418 0.00825908 0.00957585 -0.00593781 0.00824757 0.00964847 -0.00590023 0.00823272 0.00971364 -0.00585174 0.00814562 0.00984791 -0.00566103 0.00813545 0.00986818 -0.00558163 0.008625 0.009375 -0.0055 0.00862489 0.00771429 -0.00551061 0.0086159 0.00771429 -0.00559494 0.00850726 0.00771429 -0.0058223 0.00827951 0.009375 -0.00597553 0.00821994 0.00771429 -0.0059909 0.00820322 0.009375 -0.00599384 0.00861884 0.006875 -0.00557822 0.00860053 0.006875 -0.00565451 0.0085705 0.006875 -0.00572699 0.00855623 0.00771429 -0.00575306 0.0084473 0.00771429 -0.00588226 0.00847855 0.006875 -0.00585355 0.00841889 0.006875 -0.00590451 0.00837806 0.00771429 -0.00593123 0.00830154 0.00771429 -0.0059678 0.008125 0.006875 -0.006 0.00813561 0.00771429 -0.00599989 0.00835199 0.006875 -0.0059455 0.00835199 0.009375 -0.0059455 0.00847855 0.009375 -0.00585355 0.0085928 0.00771429 -0.00567654 -0.00622708 0.00945322 -0.00599384 -0.00034375 0.00975726 -0.00582229 0.0047375 0.00986591 -0.00559494 0.008125 0.00986885 -0.00557822 0.00643125 0.00987489 -0.00551061 0.008125 0.00985053 -0.00565451 0.00643125 0.0098428 -0.00567654 0.00304375 0.00980623 -0.00575306 0.00304375 0.00975726 -0.00582229 -0.00373125 0.00955154 -0.0059678 -0.0020375 0.00955154 -0.0059678 -0.005425 0.00946994 -0.0059909 -0.005425 0.00938561 -0.00599989 -0.00622708 0.00952951 -0.00597553 -0.005425 0.00955154 -0.0059678 0.0047375 0.00987489 -0.00551061 -0.005425 0.0096973 -0.00588226 -0.0020375 0.00975726 -0.00582229 0.00304375 0.00986591 -0.00559494 -0.00622708 0.00972856 -0.00585355 -0.005425 0.00975726 -0.00582229 -0.0020375 0.0098428 -0.00567654 0.00304375 0.00987489 -0.00551061 -0.00622708 0.00977951 -0.00579389 0.00135 0.00987489 -0.00551061 -0.00622708 0.00982051 -0.00572699 -0.00373125 0.0098428 -0.00567654 -0.00373125 0.00986591 -0.00559494 -0.00034375 0.00987489 -0.00551061 -0.0020375 0.00987489 -0.00551061 -0.00622708 0.00985053 -0.00565451 -0.005425 0.00986591 -0.00559494 -0.00373125 0.00987489 -0.00551061 -0.005425 0.00987489 -0.00551061 0.008125 0.00982051 -0.00572699 -0.00622708 0.009375 -0.006 -0.00373125 0.00938561 -0.00599989 0.00643125 0.00980623 -0.00575306 0.0047375 0.0096973 -0.00588226 0.00135 0.00962806 -0.00593123 -0.00034375 0.00946994 -0.0059909 -0.0020375 0.00938561 -0.00599989 0.00643125 0.0096973 -0.00588226 -0.00034375 0.00938561 -0.00599989 0.0047375 0.00962806 -0.00593123 0.0047375 0.00946994 -0.0059909 0.00304375 0.00946994 -0.0059909 0.00304375 0.00938561 -0.00599989 0.00135 0.00938561 -0.00599989 0.008125 0.009375 -0.006 0.008125 0.00952951 -0.00597553 0.00643125 0.00946994 -0.0059909 0.0047375 0.00938561 -0.00599989 0.00643125 0.00938561 -0.00599989 -0.00373125 0.00946994 -0.0059909 -0.00034375 0.00955154 -0.0059678 -0.0020375 0.00946994 -0.0059909 0.00135 0.00946994 -0.0059909 0.00304375 0.00955154 -0.0059678 0.0047375 0.00955154 -0.0059678 0.00643125 0.00955154 -0.0059678 -0.00373125 0.00962806 -0.00593123 -0.0020375 0.00962806 -0.00593123 -0.005425 0.00962806 -0.00593123 0.00135 0.00955154 -0.0059678 -0.0020375 0.0096973 -0.00588226 -0.00373125 0.0096973 -0.00588226 -0.00034375 0.00962806 -0.00593123 0.00304375 0.00962806 -0.00593123 0.00643125 0.00962806 -0.00593123 -0.00034375 0.0096973 -0.00588226 0.00135 0.0096973 -0.00588226 0.00304375 0.0096973 -0.00588226 0.00643125 0.00975726 -0.00582229 -0.005425 0.00980623 -0.00575306 -0.00373125 0.00980623 -0.00575306 -0.0020375 0.00980623 -0.00575306 -0.00373125 0.00975726 -0.00582229 0.00135 0.00980623 -0.00575306 0.00135 0.00975726 -0.00582229 -0.00034375 0.00980623 -0.00575306 0.0047375 0.00980623 -0.00575306 0.0047375 0.00975726 -0.00582229 -0.005425 0.0098428 -0.00567654 -0.00034375 0.0098428 -0.00567654 0.00304375 0.0098428 -0.00567654 -0.0020375 0.00986591 -0.00559494 -0.00034375 0.00986591 -0.00559494 0.00135 0.00986591 -0.00559494 0.00135 0.0098428 -0.00567654 0.00643125 0.00986591 -0.00559494 0.0047375 0.0098428 -0.00567654 0.00813561 0.00987489 0.00366667 0.00850726 0.0096973 -0.00183333 0.00850726 0.0096973 -0.00366667 0.00855623 0.00962806 -0.00366667 0.0084473 0.00975726 -0.00366667 0.00830154 0.0098428 1.29073e-09 0.00821994 0.0098659 0.00366667 0.00830154 0.0098428 0.00183333 0.00837806 0.00980623 0.00183333 0.00837806 0.00980623 0.00366667 0.0084473 0.00975726 1.27951e-09 0.00835199 0.0098205 0.0055 0.00855623 0.00962806 1.26257e-09 0.0086159 0.00946994 -0.00366667 0.00850726 0.0096973 0.00366667 0.00862489 0.00938561 -0.00366667 0.00852951 0.00966889 0.0055 0.00855623 0.00962806 0.00366667 0.0085928 0.00955154 0.00183333 0.00862489 0.00938561 -0.00183333 0.00862489 0.00938561 1.23077e-09 0.0085705 0.009602 0.0055 0.00860053 0.00952951 0.0055 0.0085928 0.00955154 0.00366667 0.0086159 0.00946994 0.00183333 0.0086159 0.00946994 0.00366667 0.00862489 0.00938561 0.00183333 0.00862489 0.00938561 0.00366667 0.00837806 0.00980623 -0.00366667 0.00821994 0.0098659 1.29376e-09 0.00830154 0.0098428 -0.00366667 0.00813561 0.00987489 1.29494e-09 0.00813561 0.00987489 0.00183333 0.00827951 0.00985053 -0.0055 0.00821994 0.0098659 -0.00366667 0.00820322 0.00986885 -0.0055 0.00813561 0.00987489 -0.00366667 0.00821994 0.0098659 -0.00183333 0.00813561 0.00987489 -0.00183333 0.00821994 0.0098659 0.00183333 0.00830154 0.0098428 -0.00183333 0.00830154 0.0098428 0.00366667 0.0084473 0.00975726 0.00366667 0.00837806 0.00980623 1.28593e-09 0.0084473 0.00975726 0.00183333 0.00837806 0.00980623 -0.00183333 0.0084473 0.00975726 -0.00183333 0.00850726 0.0096973 0.00183333 0.00850726 0.0096973 1.27165e-09 0.00855623 0.00962806 -0.00183333 0.0085928 0.00955154 1.25253e-09 0.00855623 0.00962806 0.00183333 0.0085928 0.00955154 -0.00366667 0.0086159 0.00946994 1.24183e-09 0.0086159 0.00946994 -0.00183333 0.0085928 0.00955154 -0.00183333 -0.00622708 0.00986884 0.00557822 -0.00443307 0.0098659 0.00559494 -0.00263906 0.00980623 0.00575306 -0.000845053 0.00975726 0.0058223 0.00453698 0.00962806 0.00593123 -0.00263906 0.0098428 0.00567654 -0.00443307 0.0098428 0.00567654 -0.000845053 0.0096973 0.00588227 0.000948958 0.0096973 0.00588227 0.00274297 0.00962806 0.00593123 0.00453698 0.00946994 0.00599091 -0.00443307 0.00975726 0.0058223 -0.00263906 0.0096973 0.00588227 -0.000845053 0.00962806 0.00593123 0.000948958 0.00955154 0.0059678 0.00633099 0.00938561 0.00599989 -0.00622708 0.00972855 0.00585356 -0.00443307 0.0096973 0.00588227 -0.00263906 0.00962806 0.00593123 0.00274297 0.00938561 0.00599989 0.00453698 0.00938561 0.00599989 0.000948958 0.00946994 0.00599091 -0.000845053 0.00938561 0.00599989 0.000948958 0.00938561 0.00599989 -0.00443307 0.00946994 0.00599091 -0.00263906 0.00938561 0.00599989 -0.00622708 0.00952951 0.00597553 -0.00622708 0.009375 0.006 -0.00622708 0.00945322 0.00599385 -0.00443307 0.00938561 0.00599989 0.00633099 0.00962806 0.00593123 0.00453698 0.00975726 0.0058223 0.000948958 0.00980623 0.00575306 -0.00443307 0.00987489 0.00551061 -0.00263906 0.0098659 0.00559494 0.00633099 0.00975726 0.0058223 0.00453698 0.00980623 0.00575306 0.00274297 0.00980623 0.00575306 -0.00263906 0.00987489 0.00551061 -0.000845053 0.0098659 0.00559494 0.00453698 0.0098428 0.00567654 -0.000845053 0.00987489 0.00551061 0.000948958 0.0098659 0.00559494 0.008125 0.00985053 0.00565451 0.00274297 0.00987489 0.00551061 0.00453698 0.0098659 0.00559494 0.00633099 0.0098659 0.00559494 0.00453698 0.00987489 0.00551061 0.00633099 0.00987489 0.00551061 0.00274297 0.0098659 0.00559494 0.000948958 0.00987489 0.00551061 0.00274297 0.0098428 0.00567654 -0.000845053 0.00980623 0.00575306 -0.000845053 0.0098428 0.00567654 0.000948958 0.0098428 0.00567654 0.00633099 0.0098428 0.00567654 -0.00263906 0.00975726 0.0058223 -0.00443307 0.00980623 0.00575306 0.00633099 0.00980623 0.00575306 0.000948958 0.00975726 0.0058223 0.00274297 0.0096973 0.00588227 0.00453698 0.0096973 0.00588227 0.00274297 0.00975726 0.0058223 0.00633099 0.0096973 0.00588227 -0.00443307 0.00962806 0.00593123 -0.000845053 0.00955154 0.0059678 0.000948958 0.00962806 0.00593123 0.00633099 0.00955154 0.0059678 -0.00263906 0.00946994 0.00599091 -0.00443307 0.00955154 0.0059678 -0.00263906 0.00955154 0.0059678 0.00274297 0.00946994 0.00599091 0.00274297 0.00955154 0.0059678 0.00633099 0.00946994 0.00599091 0.00453698 0.00955154 0.0059678 -0.000845053 0.00946994 0.00599091 -0.00622708 0.00986885 -0.00557822 -0.00646553 0.00985527 -0.0056688 -0.00646784 0.00982522 -0.00573655 -0.00646934 0.00980565 -0.00576993 -0.00648204 0.0096405 -0.00592994 -0.00661244 0.00963659 -0.00593903 -0.00649172 0.00951442 -0.00598313 -0.0066339 0.00943888 -0.00599836 -0.00668427 0.00940394 -0.006 -0.00676946 0.0094526 -0.00599859 -0.00694117 0.00947518 -0.00599885 -0.00710694 0.00953243 -0.00599633 -0.00727274 0.0095868 -0.00599355 -0.00760544 0.00969094 -0.00598906 -0.00831385 0.00983595 -0.00599728 -0.00634744 0.00979497 -0.00577521 -0.00634656 0.00981631 -0.0057397 -0.00647104 0.00978356 -0.00580169 -0.00647292 0.00975909 -0.00583164 -0.00634843 0.00977077 -0.00580883 -0.00634473 0.0098609 -0.00562766 -0.00634953 0.00974387 -0.00584034 -0.00622708 0.009602 -0.0059455 -0.00648855 0.00955571 -0.00597014 -0.00635709 0.00955936 -0.00596584 -0.00635489 0.00961322 -0.00594105 -0.00635074 0.00971445 -0.0058695 -0.00635889 0.00951538 -0.00598069 -0.00636075 0.00947021 -0.00599139 -0.00649497 0.00947216 -0.00599248 -0.00636263 0.00942424 -0.00599784 -0.00649827 0.00942924 -0.00599811 -0.00637996 0.00937855 -0.006 -0.00653236 0.00938849 -0.006 -0.00711273 0.00950281 -0.00599908 -0.00761164 0.00966679 -0.00599371 -0.00811884 0.00980396 -0.00599412 -0.00831535 0.00983093 -0.0059978 -0.00846888 0.00985788 -0.00599918 -0.00846963 0.00985537 -0.00599932 -0.00728413 0.00953479 -0.00599928 -0.00744455 0.00961563 -0.00599512 -0.00744994 0.00959313 -0.00599783 -0.00778427 0.00969833 -0.00599569 -0.00812122 0.00979578 -0.00599536 -0.00831684 0.00982591 -0.00599827 -0.00847374 0.00984173 -0.00599986 -0.0078053 0.009875 -0.00594374 -0.0077429 0.00985111 -0.00594711 -0.00757641 0.00980411 -0.00594858 -0.00706954 0.00972378 -0.00592982 -0.0067403 0.00966584 -0.00593144 -0.00660909 0.00966743 -0.00592065 -0.00661592 0.00960451 -0.00595514 -0.00648464 0.00960657 -0.0059484 -0.00739713 0.00981377 -0.00591862 -0.00722792 0.00979134 -0.00590773 -0.00723216 0.00977199 -0.00592109 -0.00689595 0.00974405 -0.00589504 -0.00647954 0.00967291 -0.0059089 -0.00673281 0.00972066 -0.00589385 -0.00660287 0.00972477 -0.00587741 -0.00647719 0.0097036 -0.00588541 -0.00660589 0.00969687 -0.00590007 -0.00687899 0.00984488 -0.00578797 -0.00700096 0.009875 -0.00577603 -0.00659274 0.00981806 -0.00576847 -0.00659494 0.00979774 -0.00579818 -0.00659738 0.00977533 -0.00582633 -0.00671545 0.00984756 -0.00574456 -0.00658918 0.00985085 -0.00570777 -0.00847627 0.00983328 -0.00599999 -0.00847543 0.0098361 -0.00599996 -0.00813481 0.00974907 -0.00599963 -0.00796751 0.00969456 -0.00599983 -0.00832534 0.00979735 -0.00599986 -0.00813751 0.00973981 -0.00599991 -0.00847458 0.00983891 -0.00599992 -0.00832366 0.009803 -0.00599969 -0.00795243 0.009748 -0.00599528 -0.00795692 0.0097321 -0.00599728 -0.00796044 0.00971961 -0.00599847 -0.00779281 0.00966677 -0.00599892 -0.00847039 0.00985286 -0.00599945 -0.00811174 0.00982836 -0.00598951 -0.00810939 0.00983644 -0.00598768 -0.00793088 0.00982436 -0.00597762 -0.00775309 0.00981347 -0.00596459 -0.00740123 0.00979662 -0.00592962 -0.00758039 0.00978859 -0.00595612 -0.00740544 0.00977904 -0.00593988 -0.00740974 0.00976106 -0.00594938 -0.00723653 0.00975204 -0.00593349 -0.00810259 0.00985982 -0.00598153 -0.00792218 0.00985518 -0.00596651 -0.00774955 0.00982654 -0.00595897 -0.00832702 0.0097917 -0.00599996 -0.00756521 0.00984778 -0.00592342 -0.00635342 0.0096489 -0.00592003 -0.00635204 0.00968272 -0.00589612 -0.00622708 0.00966889 -0.00590451 -0.00634581 0.00983464 -0.00570254 -0.00660002 0.00975097 -0.00585279 -0.00647497 0.00973239 -0.0058596 -0.00672933 0.00974607 -0.00587237 -0.00688842 0.00978883 -0.00585519 -0.00672018 0.00981301 -0.00579833 -0.00672606 0.00977001 -0.00584922 -0.00676441 0.00948953 -0.00599438 -0.00759691 0.00972422 -0.00598047 -0.00776762 0.00975979 -0.00598302 -0.00662529 0.00951815 -0.00598535 -0.00662957 0.00947877 -0.00599347 -0.00692458 0.00957381 -0.00598174 -0.00725034 0.00968904 -0.00596464 -0.00776394 0.0097734 -0.00597902 -0.0066211 0.00955674 -0.00597406 -0.00674841 0.00960655 -0.00596131 -0.00675451 0.00956193 -0.00597765 -0.0070835 0.00965236 -0.00596544 -0.00707872 0.00967678 -0.00595488 -0.00758854 0.00975682 -0.00596947 -0.00776029 0.00978689 -0.00597462 -0.00724563 0.00971052 -0.00595529 -0.00775667 0.00980025 -0.0059698 -0.00675942 0.00952604 -0.00598738 -0.00693558 0.00950842 -0.00599541 -0.00693004 0.00954134 -0.0059897 -0.0072671 0.00961251 -0.00598856 -0.00743243 0.00966627 -0.00598495 -0.00760116 0.00970765 -0.00598507 -0.00811647 0.00981212 -0.00599273 -0.00831087 0.00984596 -0.00599608 -0.00846813 0.00986039 -0.00599902 -0.00691776 0.0096144 -0.00596836 -0.00691312 0.00964198 -0.00595693 -0.00674429 0.00963669 -0.00594737 -0.00690422 0.00969487 -0.00592912 -0.0069086 0.00966884 -0.00594384 -0.00707406 0.00970061 -0.005943 -0.0069 0.00971997 -0.00591283 -0.00673647 0.00969387 -0.00591357 -0.006723 0.00979235 -0.0058245 -0.00709551 0.0095909 -0.00598537 -0.00708837 0.0096274 -0.00597463 -0.00706515 0.00974621 -0.00591539 -0.00706092 0.00976785 -0.00589974 -0.00689208 0.00976704 -0.0058758 -0.00705686 0.00978862 -0.00588292 -0.00688497 0.00980936 -0.00583328 -0.00704609 0.00984372 -0.00582768 -0.00705298 0.00980847 -0.00586499 -0.00727842 0.00956087 -0.00599713 -0.00710119 0.00956183 -0.00599175 -0.00726001 0.0096449 -0.00598015 -0.0074392 0.009638 -0.00599133 -0.00725513 0.00966715 -0.00597293 -0.00742776 0.00968577 -0.00597947 -0.00742315 0.00970503 -0.00597316 -0.00741861 0.00972402 -0.00596604 -0.00724103 0.00973153 -0.0059449 -0.00741413 0.00974271 -0.0059581 -0.00722383 0.00981004 -0.00589344 -0.00753472 0.009875 -0.00590011 -0.0072164 0.00984393 -0.00586358 -0.00762636 0.00960941 -0.00599961 -0.0077971 0.00965093 -0.00599973 -0.00778853 0.00968258 -0.00599757 -0.00761653 0.00964774 -0.00599646 -0.00745535 0.00957052 -0.00599946 -0.00762143 0.00962861 -0.00599842 -0.00758444 0.00977282 -0.00596309 -0.0075725 0.00981937 -0.00594048 -0.00738957 0.00984533 -0.00589553 -0.00713742 0.00947679 -0.006 -0.00743777 0.00954239 -0.006 -0.00796397 0.0097071 -0.00599932 -0.00813212 0.00975833 -0.00599916 -0.00832198 0.00980865 -0.00599944 -0.00812943 0.00976757 -0.00599851 -0.008126 0.00977936 -0.00599741 -0.00847265 0.00984533 -0.00599976 -0.00831983 0.00981586 -0.00599903 -0.00777884 0.00971836 -0.0059925 -0.00777134 0.00974607 -0.0059866 -0.00777508 0.00973226 -0.00598976 -0.0079431 0.00978106 -0.00598929 -0.0075927 0.00974061 -0.00597527 -0.00794002 0.00979198 -0.00598677 -0.00793391 0.00981364 -0.00598093 -0.00793695 0.00980284 -0.00598398 -0.00831833 0.00982088 -0.00599868 -0.00812361 0.00978758 -0.00599646 -0.00794931 0.00975907 -0.00599355 -0.0079462 0.00977009 -0.00599156 -0.0081141 0.00982025 -0.00599119 -0.00831236 0.00984096 -0.00599671 -0.00792787 0.00983501 -0.00597406 -0.00810705 0.00984449 -0.00598571 -0.00830506 0.00986549 -0.00599308 -0.0083079 0.00985595 -0.00599465 -0.00846737 0.0098629 -0.00599885 -0.00830938 0.00985096 -0.00599539 -0.00846662 0.0098654 -0.00599867 -0.0084719 0.00984784 -0.00599967 -0.00847114 0.00985035 -0.00599957 -0.008351 0.009875 -0.00599374 -0.00846518 0.0098702 -0.00599827 -0.014125 0.00986885 -0.00557822 -0.0141285 0.00986818 -0.00558222 -0.0141423 0.00986818 -0.00558045 -0.0141968 0.00986818 -0.00554014 -0.0142061 0.00986818 -0.00551391 -0.014125 0.00982051 -0.00572699 -0.0141319 0.00984791 -0.0056622 -0.014125 0.00977951 -0.00579389 -0.0144189 0.00977951 -0.0055 -0.0144277 0.00976957 -0.0055519 -0.0143596 0.00981474 -0.00554022 -0.0143102 0.00981474 -0.0056495 -0.0142513 0.00984791 -0.00560199 -0.0141592 0.00984791 -0.0056587 -0.0141351 0.00981474 -0.00573776 -0.014138 0.00976957 -0.00580683 -0.0141897 0.00976957 -0.00580021 -0.0141406 0.00971364 -0.00586753 -0.014368 0.00971364 -0.00577614 -0.0143931 0.00976957 -0.00564977 -0.0144146 0.00976957 -0.00560231 -0.0144786 0.00972856 -0.0055 -0.0142025 0.00971364 -0.0058596 -0.0141428 0.00964847 -0.00591821 -0.0144112 0.00971364 -0.0057311 -0.0144461 0.00971364 -0.0056794 -0.0144507 0.00964847 -0.00576296 -0.0144876 0.00971364 -0.00556217 -0.0145376 0.00964847 -0.00557074 -0.0145295 0.00966889 -0.0055 -0.014125 0.00966889 -0.00590451 -0.0141444 0.00957585 -0.00595747 -0.0142215 0.00957585 -0.00594761 -0.0144275 0.00957585 -0.00584372 -0.0144813 0.00957585 -0.00578765 -0.0145247 0.00957585 -0.00572331 -0.0145567 0.00957585 -0.00565254 -0.0145705 0.009602 -0.0055 -0.014125 0.009602 -0.0059455 -0.0142957 0.00957585 -0.00592487 -0.0142271 0.00949774 -0.00597382 -0.0143057 0.00949774 -0.00594975 -0.0145763 0.00957585 -0.00557738 -0.0146027 0.00949774 -0.00558191 -0.014125 0.00945322 -0.00599384 -0.0141456 0.00949774 -0.00598426 -0.0141461 0.00941629 -0.00599784 -0.0144542 0.00941629 -0.00587405 -0.0145481 0.00949774 -0.00573638 -0.01456 0.00941629 -0.00574301 -0.0145948 0.00941629 -0.005666 -0.0142032 0.009375 -0.00599384 -0.0142795 0.009375 -0.00597553 -0.0143863 0.00941629 -0.00592431 -0.0145705 0.009375 -0.00572699 -0.0143108 0.00941629 -0.00596236 -0.0146161 0.00941629 -0.00558421 -0.014352 0.00982051 -0.0055 -0.0142026 0.00986818 -0.00552742 -0.014189 0.00986818 -0.0055517 -0.0142323 0.00984791 -0.00562187 -0.0141794 0.00986818 -0.00556178 -0.0141855 0.00984791 -0.00565064 -0.0141681 0.00986818 -0.00557008 -0.0141557 0.00986818 -0.00557636 -0.01423 0.00941629 -0.00598711 -0.0142132 0.00964847 -0.00590919 -0.0141751 0.00981474 -0.00573263 -0.0143445 0.00964847 -0.00585643 -0.0142622 0.00971364 -0.00584133 -0.0142811 0.00964847 -0.0058884 -0.0142137 0.00981474 -0.00572081 -0.0142498 0.00981474 -0.00570264 -0.0142395 0.00976957 -0.00578496 -0.0142101 0.00984791 -0.00563824 -0.0143791 0.00949774 -0.00591273 -0.0143651 0.00957585 -0.0058899 -0.014286 0.00976957 -0.00576151 -0.0143179 0.00971364 -0.00581324 -0.0144452 0.00949774 -0.00586385 -0.0144015 0.00964847 -0.00581422 -0.0143639 0.00976957 -0.00569293 -0.0143279 0.00976957 -0.00573054 -0.0142822 0.00981474 -0.00567864 -0.0145021 0.00949774 -0.0058045 -0.0145127 0.00941629 -0.00581304 -0.0143328 0.00981474 -0.00561606 -0.0142667 0.00984791 -0.00557918 -0.0144904 0.00964847 -0.00570414 -0.0143494 0.00981474 -0.00557928 -0.0142781 0.00984791 -0.00555408 -0.014582 0.00949774 -0.00566147 -0.0145197 0.00964847 -0.00563945 -0.0144718 0.00971364 -0.00562255 -0.014285 0.00984791 -0.00552744 -0.0146249 0.00245833 -0.00551061 -0.0146249 0.000729168 -0.00551061 -0.0146249 0.0041875 -0.00551061 -0.0146188 0.009375 -0.00557822 -0.0146159 0.00591667 -0.00559494 -0.0145928 0.00591667 -0.00567654 -0.0144473 -0.000999999 -0.00588226 -0.0143015 -0.00445833 -0.0059678 -0.0141356 -0.00964583 -0.00599989 -0.0142225 -0.0109725 -0.00599039 -0.0143015 -0.00964583 -0.0059678 -0.0144422 -0.0111922 -0.00588651 -0.0144786 -0.0112286 -0.00585356 -0.0145115 -0.0112615 -0.0058172 -0.0145073 -0.00964583 -0.0058223 -0.0145928 -0.00964583 -0.00567654 -0.0145869 -0.0113369 -0.00569134 -0.0146154 -0.0113654 -0.00559755 -0.0146159 -0.00964583 -0.00559494 -0.0146249 -0.00791667 -0.00551061 -0.0146249 -0.0061875 -0.00551061 -0.0145928 -0.0061875 -0.00567654 -0.0145562 -0.0061875 -0.00575306 -0.0143781 -0.0061875 -0.00593123 -0.0141356 -0.0061875 -0.00599989 -0.0141356 -0.00791667 -0.00599989 -0.0146005 0.009375 -0.00565451 -0.0145928 0.00764584 -0.00567654 -0.0145562 0.0041875 -0.00575306 -0.0145073 0.0041875 -0.0058223 -0.0143781 0.000729168 -0.00593123 -0.0142199 -0.00445833 -0.0059909 -0.0142199 -0.00272917 -0.0059909 -0.0141356 -0.00445833 -0.00599989 -0.0145562 0.00591667 -0.00575306 -0.0145073 0.00591667 -0.0058223 -0.0143781 0.00245833 -0.00593123 -0.0143015 0.000729168 -0.0059678 -0.0145562 0.00764584 -0.00575306 -0.0143015 0.00245833 -0.0059678 -0.0142199 0.000729168 -0.0059909 -0.0141356 -0.000999999 -0.00599989 -0.0141356 -0.00272917 -0.00599989 -0.0145295 0.009375 -0.00579389 -0.0145073 0.00764584 -0.0058223 -0.0144786 0.009375 -0.00585355 -0.0144473 0.00591667 -0.00588226 -0.0143781 0.00591667 -0.00593123 -0.0141356 0.000729168 -0.00599989 -0.014125 0.009375 -0.006 -0.0144189 0.009375 -0.00590451 -0.0143781 0.00764584 -0.00593123 -0.0143015 0.00764584 -0.0059678 -0.0142199 0.00591667 -0.0059909 -0.0141356 0.00245833 -0.00599989 -0.0141356 0.0041875 -0.00599989 -0.014352 0.009375 -0.0059455 -0.0142199 0.00764584 -0.0059909 -0.0141356 0.00591667 -0.00599989 -0.0141356 0.00764584 -0.00599989 -0.0143015 -0.0061875 -0.0059678 -0.0143607 -0.0111107 -0.00594096 -0.0143781 -0.00964583 -0.00593123 -0.0144473 -0.0061875 -0.00588226 -0.0144473 -0.00964583 -0.00588227 -0.0144473 -0.00791667 -0.00588226 -0.0145073 -0.00791667 -0.0058223 -0.0145562 -0.00964583 -0.00575306 -0.0145562 -0.00791667 -0.00575306 -0.0146249 -0.00964583 -0.00551061 -0.0146249 0.00764584 -0.00551061 -0.0146159 0.000729168 -0.00559494 -0.0146249 -0.00272917 -0.00551061 -0.0146159 -0.00445833 -0.00559494 -0.0146249 -0.00445833 -0.00551061 -0.0146159 -0.0061875 -0.00559494 -0.0146249 0.00591667 -0.00551061 -0.0146159 0.0041875 -0.00559494 -0.0146249 -0.000999999 -0.00551061 -0.0146159 0.00764584 -0.00559494 -0.0145928 0.00245833 -0.00567654 -0.0146159 0.00245833 -0.00559494 -0.0145928 -0.000999999 -0.00567654 -0.0146159 -0.000999999 -0.00559494 -0.0146159 -0.00272917 -0.00559494 -0.0146159 -0.00791667 -0.00559494 -0.0145928 0.0041875 -0.00567654 -0.0145928 -0.00272917 -0.00567654 -0.0145562 -0.00445833 -0.00575306 -0.0145562 -0.000999999 -0.00575306 -0.0145928 0.000729168 -0.00567654 -0.0145928 -0.00445833 -0.00567654 -0.0145928 -0.00791667 -0.00567654 -0.0145073 0.000729168 -0.0058223 -0.0145562 0.00245833 -0.00575306 -0.0145562 0.000729168 -0.00575306 -0.0145562 -0.00272917 -0.00575306 -0.0144473 0.00764584 -0.00588226 -0.0144473 0.0041875 -0.00588226 -0.0144473 0.00245833 -0.00588226 -0.0144473 0.000729168 -0.00588226 -0.0145073 0.00245833 -0.0058223 -0.0145073 -0.0061875 -0.0058223 -0.0145073 -0.00445833 -0.0058223 -0.0144473 -0.00445833 -0.00588226 -0.0145073 -0.00272917 -0.0058223 -0.0145073 -0.000999999 -0.0058223 -0.0143781 0.0041875 -0.00593123 -0.0143781 -0.000999999 -0.00593123 -0.0144473 -0.00272917 -0.00588226 -0.0143781 -0.00272917 -0.00593123 -0.0143015 0.00591667 -0.0059678 -0.0143015 -0.000999999 -0.0059678 -0.0143781 -0.00445833 -0.00593123 -0.0143015 -0.00272917 -0.0059678 -0.0142199 -0.0061875 -0.00599091 -0.0143015 -0.00791667 -0.0059678 -0.0143781 -0.00791667 -0.00593123 -0.0142199 -0.00964583 -0.00599091 -0.0142199 0.0041875 -0.0059909 -0.0142199 0.00245833 -0.0059909 -0.0143015 0.0041875 -0.0059678 -0.0142199 -0.000999999 -0.0059909 -0.0142199 -0.00791667 -0.00599091 -0.0146249 0.00938561 -0.0038 -0.0143781 0.00980623 -0.0038 -0.0142795 0.00985053 -0.0055 -0.0141356 0.00987489 -0.0038 -0.0145928 0.00955154 -0.0038 -0.0142795 0.00985053 -0.00239791 -0.0142199 0.0098659 -0.0038 -0.0145562 0.00962806 -0.0038 -0.0145073 0.0096973 -0.0038 -0.0143015 0.0098428 -0.0038 -0.0142032 0.00986885 -0.0055 -0.0144473 0.00975726 -0.0038 -0.0146005 0.00952951 -0.0055 -0.0146188 0.00945322 -0.0055 -0.014625 0.009375 -0.0055 -0.0146159 0.00946994 -0.0038 -0.0126719 0.00986591 -0.00559494 -0.0112187 0.00986591 -0.00559494 -0.014125 0.00985053 -0.00565451 -0.014125 0.009875 -0.0055 -0.0126719 0.00987489 -0.00551061 -0.0112187 0.00987489 -0.00551061 -0.0112187 0.0098428 -0.00567654 -0.0110229 0.00982051 -0.00572699 -0.0126719 0.00980623 -0.00575306 -0.0112187 0.00980623 -0.00575306 -0.0110229 0.00972856 -0.00585355 -0.0110229 0.00977951 -0.00579389 -0.0112187 0.00975726 -0.00582229 -0.0112187 0.0096973 -0.00588226 -0.0126719 0.00962806 -0.00593123 -0.0126719 0.0096973 -0.00588226 -0.014125 0.00972856 -0.00585355 -0.0126719 0.00975726 -0.00582229 -0.0112187 0.00962806 -0.00593123 -0.0110229 0.009602 -0.0059455 -0.0112187 0.00955154 -0.0059678 -0.0110229 0.00952951 -0.00597553 -0.0126719 0.00946994 -0.0059909 -0.014125 0.00952951 -0.00597553 -0.0126719 0.00955154 -0.0059678 -0.0110229 0.00945322 -0.00599384 -0.0110229 0.009375 -0.006 -0.0112187 0.00946994 -0.0059909 -0.0112187 0.00938561 -0.00599989 -0.0126719 0.00938561 -0.00599989 -0.0126719 0.0098428 -0.00567654 -0.0101126 0.0094768 -0.006 -0.0108325 0.00951434 -0.0059818 -0.0106493 0.00960453 -0.00595439 -0.00987702 0.00962777 -0.00599032 -0.0101424 0.00962916 -0.00597529 -0.00931086 0.00972853 -0.00599714 -0.00931545 0.00974486 -0.00599502 -0.0089728 0.00981971 -0.00599779 -0.0106527 0.00963682 -0.00593802 -0.0101472 0.00965381 -0.00596633 -0.00931864 0.00975622 -0.0059932 -0.0110229 0.00966889 -0.00590451 -0.0104015 0.00963933 -0.00595342 -0.00988871 0.0096782 -0.00597707 -0.0101519 0.00967796 -0.00595605 -0.00989342 0.00969848 -0.00597003 -0.00914786 0.00979291 -0.005995 -0.00932182 0.00976753 -0.0059911 -0.0108483 0.00978952 -0.00578716 -0.0101736 0.00978879 -0.00588593 -0.0099114 0.00977601 -0.00593296 -0.00991565 0.00979431 -0.00592155 -0.00965352 0.00980509 -0.00595055 -0.00949235 0.00980032 -0.00596987 -0.00933747 0.00982318 -0.00597642 -0.00898284 0.00985357 -0.00599319 -0.00898602 0.00986431 -0.00599119 -0.0110229 0.00985053 -0.00565451 -0.0106718 0.00981878 -0.00576489 -0.0104283 0.00981041 -0.00582033 -0.00949593 0.00981353 -0.00596467 -0.00917242 0.009875 -0.00597497 -0.008625 0.009875 -0.006 -0.0106752 0.0098513 -0.00570339 -0.00991977 0.0098121 -0.00590933 -0.0096574 0.00982011 -0.00594276 -0.00916771 0.00985927 -0.00598006 -0.00934053 0.00983408 -0.00597266 -0.0110229 0.00986885 -0.00557822 -0.0110229 0.009875 -0.0055 -0.0107693 0.009875 -0.00560376 -0.010249 0.009875 -0.00577603 -0.00971528 0.009875 -0.00590011 -0.00966463 0.00984812 -0.00592633 -0.0094447 0.009875 -0.00594374 -0.00950612 0.00985113 -0.00594723 -0.0104338 0.00984571 -0.00577184 -0.0101843 0.00984368 -0.00583205 -0.00934634 0.00985473 -0.00596471 -0.00951311 0.00961777 -0.006 -0.00960416 0.00961402 -0.00599962 -0.00986046 0.00955638 -0.00599939 -0.010124 0.00953538 -0.00599642 -0.0101297 0.0095644 -0.00599197 -0.0106442 0.0095564 -0.00597363 -0.0103797 0.00950056 -0.00599503 -0.0106402 0.00951749 -0.00598511 -0.0108349 0.00955713 -0.00596779 -0.0108379 0.00960968 -0.00594438 -0.010636 0.00947779 -0.00599336 -0.00880535 0.00986959 -0.0059978 -0.00880287 0.00986135 -0.00599854 -0.00898117 0.00984795 -0.00599413 -0.0101654 0.0097467 -0.00591757 -0.0104179 0.0097443 -0.00588667 -0.00880203 0.00985852 -0.00599875 -0.00915292 0.00980985 -0.00599216 -0.00989805 0.00971844 -0.00596208 -0.0108419 0.00967782 -0.00590191 -0.00880033 0.00985286 -0.00599914 -0.00879863 0.0098472 -0.00599945 -0.00879778 0.00984436 -0.00599958 -0.00896943 0.00980837 -0.00599877 -0.00913912 0.00976369 -0.00599839 -0.00946052 0.00968278 -0.00599758 -0.0101353 0.00959311 -0.00598575 -0.0103904 0.00956863 -0.00598024 -0.010397 0.00961076 -0.00596577 -0.00879571 0.00983747 -0.00599982 -0.00945624 0.00966699 -0.00599892 -0.00986601 0.00958031 -0.00599757 -0.00987153 0.00960413 -0.00599454 -0.0087938 0.00983111 -0.00599995 -0.00896324 0.00978748 -0.00599982 -0.00896135 0.0097811 -0.00599995 -0.00921534 0.00970009 -0.006 -0.00930002 0.00968999 -0.00599982 -0.00913339 0.00974453 -0.0059996 -0.00913052 0.00973494 -0.0059999 -0.0108506 0.00983018 -0.00571784 -0.0108522 0.00985826 -0.00564612 -0.0108495 0.00981124 -0.00575332 -0.0108469 0.00976515 -0.00581913 -0.0106591 0.00969745 -0.00589844 -0.0108453 0.00973829 -0.00584904 -0.010656 0.00966786 -0.00591934 -0.0108436 0.00970912 -0.00587669 -0.01041 0.00969389 -0.00592339 -0.00963739 0.00974268 -0.00597623 -0.00947769 0.0097462 -0.00598663 -0.00948141 0.0097599 -0.00598306 -0.00932498 0.00977879 -0.00598872 -0.0108399 0.0096446 -0.00592452 -0.0108274 0.00942585 -0.00599797 -0.0108299 0.00947046 -0.00599188 -0.0106318 0.00943755 -0.00599834 -0.0106697 0.00979852 -0.00579501 -0.0104216 0.0097677 -0.00586597 -0.0106673 0.00977613 -0.00582358 -0.010662 0.00972546 -0.00587542 -0.0106648 0.00975173 -0.00585043 -0.00990705 0.00975724 -0.00594353 -0.00964557 0.00977432 -0.00596451 -0.00915544 0.00981828 -0.0059905 -0.00933127 0.00980112 -0.00598312 -0.0103851 0.00953485 -0.00598885 -0.0101695 0.00976816 -0.00590232 -0.0104251 0.00978976 -0.00584383 -0.010161 0.00972447 -0.00593164 -0.0104141 0.00971967 -0.00590583 -0.0104058 0.00966708 -0.00593927 -0.0101183 0.00950614 -0.0059991 -0.0103743 0.00946593 -0.00599876 -0.0101774 0.00980853 -0.00586844 -0.00990259 0.00973804 -0.00595325 -0.0101565 0.00970154 -0.00594448 -0.00992733 0.00984466 -0.00588372 -0.00964957 0.00978982 -0.00595781 -0.0096248 0.00969394 -0.00598949 -0.00961869 0.00967025 -0.00599395 -0.00988394 0.00965763 -0.00598319 -0.00930364 0.00970286 -0.00599928 -0.00913625 0.00975411 -0.0059991 -0.00949947 0.00982658 -0.00595906 -0.00948874 0.00978697 -0.00597468 -0.00964151 0.0097586 -0.00597065 -0.00948509 0.0097735 -0.00597907 -0.00963324 0.00972658 -0.00598123 -0.00962904 0.00971033 -0.00598565 -0.00945196 0.00965117 -0.00599973 -0.00960902 0.00963284 -0.00599848 -0.00961386 0.00965159 -0.00599659 -0.00915796 0.00982668 -0.00598869 -0.00933438 0.00981219 -0.00597991 -0.00932813 0.00978999 -0.00598606 -0.0094702 0.00971852 -0.00599252 -0.00947396 0.0097324 -0.00598978 -0.00946478 0.00969851 -0.0059957 -0.00930725 0.00971571 -0.00599839 -0.00916046 0.00983505 -0.00598671 -0.00880372 0.00986418 -0.0059983 -0.00897783 0.00983668 -0.0059958 -0.00880118 0.00985569 -0.00599895 -0.0089795 0.00984232 -0.005995 -0.00897616 0.00983103 -0.00599654 -0.00915039 0.00980139 -0.00599366 -0.00897448 0.00982538 -0.0059972 -0.00914532 0.00978442 -0.00599618 -0.00879948 0.00985003 -0.0059993 -0.00914277 0.0097759 -0.00599721 -0.00897112 0.00981404 -0.00599831 -0.00896513 0.00979386 -0.0059996 -0.00896702 0.00980023 -0.00599929 -0.00879475 0.00983429 -0.0059999 -0.00879284 0.00982793 -0.00599999 -0.00916296 0.00984339 -0.00598458 -0.00879692 0.00984153 -0.00599969 -0.0142288 0.009875 -0.00214425 -0.0142938 0.00985527 -0.00215947 -0.0143616 0.00982521 -0.00215716 -0.0144566 0.00975909 -0.00215208 -0.0145251 0.00969687 -0.00201911 -0.0145339 0.00967291 -0.00214546 -0.0145734 0.00960657 -0.00214036 -0.0146104 0.00951814 -0.00199971 -0.0146213 0.00953243 -0.00151806 -0.0146186 0.0095868 -0.00135226 -0.01461 0.00966627 -0.00119256 -0.0146101 0.00970765 -0.00102384 -0.0146177 0.00981212 -0.000508522 -0.0142032 0.00986885 -0.00239791 -0.0143647 0.00981631 -0.00227844 -0.0143275 0.00983464 -0.00227919 -0.014352 0.0098205 -0.00239791 -0.0144189 0.00977951 -0.00239791 -0.0144267 0.00978356 -0.00215396 -0.0144002 0.00979497 -0.00227756 -0.0143949 0.00980565 -0.00215566 -0.0142527 0.0098609 -0.00228027 -0.0144338 0.00977077 -0.00227657 -0.0144786 0.00972855 -0.00239791 -0.0144653 0.00974387 -0.00227547 -0.0144846 0.00973239 -0.00215003 -0.0144945 0.00971445 -0.00227426 -0.0145104 0.0097036 -0.00214782 -0.0145705 0.009602 -0.00239791 -0.014566 0.00961321 -0.00227012 -0.0145908 0.00955936 -0.00226791 -0.0146057 0.00951538 -0.00226611 -0.014545 0.0096489 -0.00227158 -0.0145211 0.00968272 -0.00227296 -0.0146005 0.00952951 -0.00239791 -0.0146164 0.00947021 -0.00226425 -0.0146188 0.00945322 -0.00239791 -0.0146228 0.00942424 -0.00226237 -0.0146231 0.00942924 -0.00212673 -0.014625 0.00938849 -0.00209263 -0.0146163 0.009638 -0.00118579 -0.0146187 0.00966679 -0.00101335 -0.0146141 0.00969094 -0.00101955 -0.0146204 0.00979578 -0.000503769 -0.0146241 0.00950281 -0.00151227 -0.0146243 0.00953479 -0.00134087 -0.0146221 0.00956087 -0.00134658 -0.0146228 0.00959313 -0.00117505 -0.0146215 0.00964774 -0.00100847 -0.0146186 0.00975907 -0.000675683 -0.0146203 0.00974801 -0.000672561 -0.0146215 0.00978758 -0.000501383 -0.0146228 0.00983093 -0.000309659 -0.0146243 0.00985537 -0.000155365 -0.014625 0.00961777 -0.000888107 -0.014625 0.00954239 -0.00118722 -0.014625 0.00970009 -0.000590335 -0.0146248 0.00969457 -0.00065748 -0.0146242 0.00975833 -0.000492871 -0.0146244 0.00980865 -0.000303028 -0.0146065 0.00985982 -0.000522398 -0.0145915 0.00985518 -0.000702806 -0.0145687 0.009875 -0.000819703 -0.014584 0.00982654 -0.00087544 -0.0145736 0.00980411 -0.00104858 -0.0145548 0.00972378 -0.00155546 -0.0145541 0.00969486 -0.00172078 -0.0145386 0.00969387 -0.00188854 -0.0145456 0.00966743 -0.00201592 -0.014564 0.00963659 -0.00201257 -0.0145801 0.0096045 -0.00200909 -0.0145724 0.00963669 -0.00188072 -0.0145721 0.00985111 -0.000882094 -0.0145655 0.00981937 -0.00105249 -0.0145327 0.00979134 -0.00139707 -0.0145189 0.00972065 -0.0018922 -0.0145024 0.00972477 -0.00202214 -0.014401 0.009875 -0.00162404 -0.0143935 0.00981806 -0.00203227 -0.0144232 0.00979774 -0.00203006 -0.0144233 0.00981301 -0.00190483 -0.0144495 0.00979235 -0.001902 -0.0143696 0.00984756 -0.00190955 -0.0143328 0.00985085 -0.00203582 -0.014625 0.0098361 -0.000149573 -0.0146249 0.00973981 -0.000487483 -0.0146249 0.00983891 -0.000150419 -0.0146249 0.00979735 -0.000299667 -0.0146247 0.009803 -0.000301348 -0.0146247 0.00984784 -0.000153102 -0.0146237 0.00982088 -0.000306669 -0.0146207 0.00969833 -0.000840725 -0.0146243 0.0097071 -0.000661017 -0.0146245 0.00985286 -0.000154611 -0.0146246 0.00985035 -0.000153856 -0.0145811 0.00978859 -0.0010446 -0.0145896 0.00981347 -0.000871902 -0.0145649 0.00977904 -0.00121956 -0.0145585 0.00975204 -0.00138846 -0.0145744 0.00976106 -0.00121525 -0.0146181 0.00986549 -0.000319943 -0.0146107 0.00984449 -0.000517939 -0.014625 0.0097917 -0.000297986 -0.014625 0.00983328 -0.000148726 -0.0146239 0.00947518 -0.00168383 -0.0145549 0.0096405 -0.00214297 -0.0145295 0.00966889 -0.00239791 -0.0144778 0.00975097 -0.00202498 -0.0144974 0.00974607 -0.00189568 -0.0144802 0.00978883 -0.00173658 -0.014413 0.00984488 -0.00174601 -0.0144513 0.00977533 -0.00202763 -0.0144742 0.00977001 -0.00189895 -0.0146234 0.00943888 -0.00199111 -0.0146194 0.00948953 -0.0018606 -0.0146104 0.0095909 -0.00152949 -0.0146045 0.00968577 -0.00119723 -0.014604 0.0097734 -0.000861051 -0.0146118 0.00979198 -0.000684972 -0.0146145 0.00982836 -0.000513248 -0.0146175 0.00947216 -0.00213003 -0.0146067 0.00957381 -0.00170042 -0.0145996 0.0096274 -0.00153663 -0.0145896 0.00968904 -0.00137466 -0.014591 0.00972402 -0.00120639 -0.0145982 0.00970503 -0.00120184 -0.0146127 0.00983644 -0.000515598 -0.0146081 0.00951442 -0.00213328 -0.0145991 0.00955674 -0.0020039 -0.0146026 0.00956193 -0.0018705 -0.0145934 0.0096144 -0.00170725 -0.0146059 0.00981364 -0.000691083 -0.0145948 0.00980025 -0.000868323 -0.0146185 0.00947877 -0.00199544 -0.0145951 0.00955571 -0.00213645 -0.0145688 0.00966884 -0.0017164 -0.0145799 0.00967678 -0.00154628 -0.0145803 0.00971052 -0.00137937 -0.0146051 0.0096449 -0.00136499 -0.0146116 0.00974607 -0.000853651 -0.014624 0.00986039 -0.000156873 -0.0146236 0.0094526 -0.00185555 -0.0146147 0.00954134 -0.00169496 -0.0146124 0.00952604 -0.00186559 -0.0145819 0.00964198 -0.00171189 -0.0145863 0.00960655 -0.0018766 -0.0145564 0.00966584 -0.00188471 -0.014568 0.00970061 -0.00155094 -0.0145378 0.00971997 -0.001725 -0.0146204 0.00950841 -0.00168942 -0.0145904 0.00965235 -0.0015415 -0.0145247 0.00976785 -0.00156408 -0.01452 0.00974405 -0.00172905 -0.0145008 0.00976704 -0.00173292 -0.0144583 0.00980936 -0.00174004 -0.0146168 0.00956183 -0.00152381 -0.0146136 0.00961251 -0.00135789 -0.0145979 0.00966715 -0.00136987 -0.0145404 0.00974621 -0.00155985 -0.0145699 0.00973153 -0.00138397 -0.0145831 0.00974271 -0.00121086 -0.0145079 0.00978862 -0.00156814 -0.0145251 0.009875 -0.00109028 -0.0144886 0.00984393 -0.0014086 -0.0145205 0.00984533 -0.00123542 -0.0145461 0.00977199 -0.00139283 -0.0144527 0.00984372 -0.00157891 -0.01449 0.00980847 -0.00157202 -0.0145184 0.00981004 -0.00140117 -0.0146246 0.00960941 -0.000998634 -0.0146247 0.00965093 -0.000827891 -0.0146234 0.00962861 -0.00100356 -0.0146201 0.00961564 -0.00118044 -0.0146055 0.00972422 -0.00102809 -0.0145881 0.00977282 -0.00104055 -0.0145945 0.00975682 -0.00103645 -0.0145436 0.00981377 -0.00122787 -0.0145546 0.00979662 -0.00122376 -0.0145484 0.00984778 -0.00105978 -0.014625 0.0094768 -0.00148758 -0.0146245 0.00957052 -0.00116964 -0.0146239 0.00966678 -0.00083218 -0.0146246 0.00974907 -0.000490178 -0.0146223 0.0097321 -0.000668072 -0.0146226 0.00968258 -0.000836459 -0.0146235 0.00971961 -0.000664548 -0.014624 0.00981585 -0.000305173 -0.0146235 0.00976757 -0.000495561 -0.0146175 0.00971836 -0.000846147 -0.0146166 0.00977009 -0.000678794 -0.0146148 0.00973226 -0.00084991 -0.014608 0.00975979 -0.000857365 -0.0146143 0.00978106 -0.00068189 -0.0146003 0.00974061 -0.00103229 -0.0145996 0.00978689 -0.000864704 -0.0145991 0.00983501 -0.000697115 -0.0146026 0.00982436 -0.00069411 -0.0146204 0.00985096 -0.000315619 -0.0146237 0.0098654 -0.00015838 -0.0146224 0.00977937 -0.000498992 -0.0146233 0.00982591 -0.000308165 -0.0146223 0.00983595 -0.000311152 -0.0146191 0.00980396 -0.000506148 -0.0146162 0.00982026 -0.000510889 -0.014609 0.00980284 -0.000688037 -0.0146211 0.00984596 -0.000314131 -0.0146217 0.00984096 -0.000312642 -0.0146239 0.00986289 -0.000157627 -0.0146197 0.00985595 -0.000317104 -0.0146242 0.00985788 -0.000156119 -0.0146249 0.00984173 -0.000151266 -0.0146248 0.00984533 -0.000152347 -0.0146187 0.009875 -0.000273995 -0.0146233 0.0098702 -0.000159823 -0.00687568 0.00946593 0.00599876 -0.00683572 0.00942419 0.006 -0.00642005 0.00947045 0.00599188 -0.00641753 0.00951434 0.0059818 -0.00622708 0.009602 0.00594551 -0.00762519 0.00969393 0.00598949 -0.00778521 0.00969851 0.0059957 -0.00793454 0.00974486 0.00599502 -0.00810722 0.0097759 0.00599721 -0.00641006 0.0096446 0.00592452 -0.00710279 0.00965381 0.00596633 -0.00622708 0.00966889 0.00590451 -0.00640815 0.00967782 0.00590191 -0.00659404 0.00966785 0.00591934 -0.00810213 0.00979291 0.005995 -0.0080996 0.00980139 0.00599366 -0.00845052 0.00985003 0.0059993 -0.00622708 0.00977951 0.00579389 -0.00640174 0.00978952 0.00578716 -0.00622708 0.0098205 0.005727 -0.00640049 0.00981124 0.00575332 -0.00708045 0.00976816 0.00590232 -0.00759647 0.00980509 0.00595056 -0.00622708 0.00985053 0.00565451 -0.00682169 0.0098104 0.00582033 -0.00707257 0.00980853 0.00586844 -0.00733022 0.0098121 0.00590933 -0.00775405 0.00981353 0.00596467 -0.00775052 0.00982658 0.00595906 -0.00790945 0.00983408 0.00597266 -0.00807758 0.009875 0.00597498 -0.00844464 0.00986959 0.0059978 -0.008351 0.009875 0.00599374 -0.00639779 0.00985826 0.00564612 -0.00657818 0.00981878 0.00576489 -0.00681616 0.00984571 0.00577184 -0.00790365 0.00985473 0.00596471 -0.00808229 0.00985927 0.00598007 -0.00657476 0.0098513 0.00570339 -0.00726643 0.009875 0.00584417 -0.00753472 0.009875 0.00590011 -0.0078053 0.009875 0.00594374 -0.00774387 0.00985113 0.00594723 -0.00706573 0.00984368 0.00583205 -0.00660069 0.00960452 0.00595439 -0.00685959 0.00956862 0.00598024 -0.00686488 0.00953485 0.00598885 -0.00641508 0.00955713 0.00596779 -0.00660984 0.00951749 0.00598511 -0.00668427 0.00940394 0.006 -0.00653236 0.00938849 0.006 -0.00844627 0.00986418 0.0059983 -0.00808953 0.00983505 0.00598671 -0.00809204 0.00982668 0.00598869 -0.00760442 0.00977431 0.00596451 -0.00683205 0.0097443 0.00588667 -0.00658522 0.00975173 0.00585043 -0.00640314 0.00976515 0.00581913 -0.00827216 0.00983668 0.00599581 -0.00809707 0.00980984 0.00599216 -0.00776858 0.0097599 0.00598306 -0.0073474 0.00973803 0.00595325 -0.0076126 0.00974268 0.00597623 -0.00683591 0.00971967 0.00590583 -0.00683995 0.00969389 0.00592339 -0.00659093 0.00969745 0.00589844 -0.00640468 0.00973829 0.00584905 -0.00845137 0.00984719 0.00599945 -0.00845307 0.00984153 0.00599969 -0.00828056 0.00980836 0.00599877 -0.00811087 0.00976369 0.0059984 -0.00712028 0.0095644 0.00599197 -0.00828486 0.00979385 0.0059996 -0.00794997 0.00968998 0.00599982 -0.00764583 0.00961402 0.00599962 -0.00764097 0.00963283 0.00599849 -0.00738398 0.00958031 0.00599758 -0.00828864 0.0097811 0.00599996 -0.00811947 0.00973494 0.0059999 -0.00828675 0.00978747 0.00599982 -0.00738953 0.00955638 0.00599939 -0.00713168 0.00950614 0.00599911 -0.0063994 0.00983018 0.00571785 -0.00658031 0.00979851 0.00579502 -0.00658798 0.00972546 0.00587543 -0.00640635 0.00970912 0.0058767 -0.00684416 0.00966708 0.00593927 -0.00709807 0.00967796 0.00595605 -0.00709346 0.00970154 0.00594448 -0.00735657 0.00969847 0.00597003 -0.00735194 0.00971843 0.00596209 -0.0065973 0.00963682 0.00593803 -0.00641206 0.00960967 0.00594438 -0.00661824 0.00943754 0.00599834 -0.00661401 0.00947779 0.00599336 -0.00642261 0.00942585 0.00599797 -0.00682493 0.00978976 0.00584383 -0.00658266 0.00977613 0.00582358 -0.00708464 0.0097467 0.00591757 -0.00792185 0.00978999 0.00598606 -0.00809455 0.00981827 0.0059905 -0.00826882 0.00984795 0.00599413 -0.00685298 0.00961075 0.00596577 -0.00660575 0.00955639 0.00597363 -0.00682839 0.0097677 0.00586597 -0.0068485 0.00963933 0.00595342 -0.00712596 0.00953537 0.00599643 -0.00687025 0.00950056 0.00599504 -0.00707642 0.00978879 0.00588593 -0.00734294 0.00975724 0.00594353 -0.00733859 0.00977601 0.00593296 -0.00708898 0.00972447 0.00593164 -0.00736128 0.00967819 0.00597707 -0.00710761 0.00962916 0.00597529 -0.00711466 0.0095931 0.00598575 -0.00737297 0.00962777 0.00599032 -0.00732267 0.00984466 0.00588372 -0.00733434 0.00979431 0.00592155 -0.00762095 0.00971032 0.00598565 -0.00736605 0.00965763 0.00598319 -0.00763613 0.00965158 0.0059966 -0.00737846 0.00960413 0.00599455 -0.00778947 0.00968277 0.00599758 -0.00794635 0.00970285 0.00599929 -0.00759259 0.00982011 0.00594276 -0.00758536 0.00984811 0.00592633 -0.00776125 0.00978697 0.00597468 -0.00760042 0.00978982 0.00595781 -0.00760848 0.00975859 0.00597066 -0.00761675 0.00972658 0.00598123 -0.0076313 0.00967025 0.00599395 -0.00779803 0.00965116 0.00599973 -0.00779375 0.00966699 0.00599892 -0.00791252 0.00982318 0.00597642 -0.00775763 0.00980032 0.00596987 -0.00791561 0.00981219 0.00597991 -0.0077649 0.00977349 0.00597907 -0.00791872 0.00980112 0.00598312 -0.00777229 0.00974619 0.00598663 -0.00777603 0.00973239 0.00598978 -0.00793135 0.00975622 0.0059932 -0.007925 0.00977878 0.00598872 -0.00777979 0.00971851 0.00599252 -0.00792817 0.00976753 0.0059911 -0.00810467 0.00978442 0.00599619 -0.00793913 0.00972853 0.00599714 -0.00794274 0.00971571 0.00599839 -0.00808704 0.00984339 0.00598458 -0.00826715 0.00985357 0.00599319 -0.00826397 0.00986431 0.00599119 -0.00827049 0.00984232 0.005995 -0.00844967 0.00985286 0.00599914 -0.00827551 0.00982537 0.0059972 -0.00827384 0.00983103 0.00599654 -0.00827887 0.00981404 0.00599832 -0.00845222 0.00984436 0.00599958 -0.00827719 0.00981971 0.00599779 -0.00811374 0.00975411 0.0059991 -0.00828297 0.00980023 0.00599929 -0.0081166 0.00974452 0.0059996 -0.0084562 0.00983111 0.00599996 -0.00803465 0.00970009 0.006 -0.00845715 0.00982793 0.00599999 -0.00844797 0.00985852 0.00599876 -0.00844712 0.00986135 0.00599854 -0.00844882 0.00985569 0.00599896 -0.00845429 0.00983747 0.00599983 -0.00845525 0.00983429 0.0059999 -0.0142026 0.00986818 0.00552742 -0.014352 0.0098205 0.0055 -0.0143596 0.00981474 0.00554022 -0.0144189 0.00977951 0.0055 -0.0143931 0.00976957 0.00564978 -0.014286 0.00976957 0.00576151 -0.0142137 0.00981474 0.00572082 -0.014125 0.00977951 0.00579389 -0.014125 0.0098205 0.005727 -0.0141751 0.00981474 0.00573263 -0.0141855 0.00984791 0.00565064 -0.0142323 0.00984791 0.00562187 -0.0142513 0.00984791 0.00560199 -0.0142667 0.00984791 0.00557918 -0.0143494 0.00981474 0.00557928 -0.0144277 0.00976957 0.0055519 -0.0144146 0.00976957 0.00560231 -0.0144876 0.00971364 0.00556217 -0.0144461 0.00971364 0.00567941 -0.0143279 0.00976957 0.00573054 -0.0142395 0.00976957 0.00578496 -0.0141897 0.00976957 0.00580022 -0.0141406 0.00971364 0.00586753 -0.014125 0.00972855 0.00585356 -0.0144786 0.00972855 0.0055 -0.0145295 0.00966889 0.0055 -0.0145376 0.00964847 0.00557074 -0.014368 0.00971364 0.00577615 -0.0142622 0.00971364 0.00584134 -0.0142132 0.00964847 0.00590919 -0.014125 0.009602 0.00594551 -0.0144904 0.00964847 0.00570414 -0.0145247 0.00957585 0.00572331 -0.0143651 0.00957585 0.00588991 -0.0141428 0.00964847 0.00591821 -0.0145567 0.00957585 0.00565254 -0.014582 0.00949774 0.00566147 -0.0144813 0.00957585 0.00578766 -0.0142957 0.00957585 0.00592487 -0.0143791 0.00949774 0.00591274 -0.0143057 0.00949774 0.00594975 -0.0142271 0.00949774 0.00597382 -0.0141456 0.00949774 0.00598426 -0.0146027 0.00949774 0.00558192 -0.0146161 0.00941629 0.00558421 -0.0145948 0.00941629 0.005666 -0.0145021 0.00949774 0.0058045 -0.0143108 0.00941629 0.00596236 -0.0141461 0.00941629 0.00599784 -0.0146188 0.009375 0.00557822 -0.0145705 0.009375 0.005727 -0.0144542 0.00941629 0.00587406 -0.01423 0.00941629 0.00598711 -0.01456 0.00941629 0.00574302 -0.0142795 0.009375 0.00597553 -0.0141557 0.00986818 0.00557636 -0.0141681 0.00986818 0.00557008 -0.0141794 0.00986818 0.00556178 -0.014189 0.00986818 0.0055517 -0.0141968 0.00986818 0.00554014 -0.0142795 0.00985053 0.0055 -0.0142061 0.00986818 0.00551391 -0.0141285 0.00986818 0.00558222 -0.0145763 0.00957585 0.00557738 -0.014285 0.00984791 0.00552744 -0.0145197 0.00964847 0.00563945 -0.0144718 0.00971364 0.00562255 -0.0143328 0.00981474 0.00561606 -0.0142781 0.00984791 0.00555409 -0.0145481 0.00949774 0.00573639 -0.0144507 0.00964847 0.00576296 -0.0143639 0.00976957 0.00569293 -0.0145127 0.00941629 0.00581304 -0.0144112 0.00971364 0.0057311 -0.0143102 0.00981474 0.0056495 -0.0142822 0.00981474 0.00567864 -0.0144452 0.00949774 0.00586385 -0.0144275 0.00957585 0.00584372 -0.0143445 0.00964847 0.00585644 -0.0144015 0.00964847 0.00581422 -0.0142498 0.00981474 0.00570264 -0.0143863 0.00941629 0.00592431 -0.0142811 0.00964847 0.0058884 -0.0143179 0.00971364 0.00581325 -0.0142101 0.00984791 0.00563825 -0.0141423 0.00986818 0.00558045 -0.0141444 0.00957585 0.00595748 -0.0142215 0.00957585 0.00594761 -0.014138 0.00976957 0.00580683 -0.0142025 0.00971364 0.00585961 -0.0141351 0.00981474 0.00573776 -0.0141592 0.00984791 0.00565871 -0.0141319 0.00984791 0.00566221 -0.0124312 0.00946994 0.00599091 -0.0124312 0.0098659 0.00559494 -0.014125 0.00986884 0.00557822 -0.0110229 0.00952951 0.00597553 -0.0110229 0.00945322 0.00599385 -0.0110229 0.009602 0.00594551 -0.0124312 0.00980623 0.00575306 -0.0110229 0.00977951 0.00579389 -0.0110229 0.00985053 0.00565451 -0.0110229 0.00986884 0.00557822 -0.0124312 0.00987489 0.00551061 -0.0124312 0.00962806 0.00593123 -0.0110229 0.00972855 0.00585356 -0.0124312 0.0098428 0.00567654 -0.014125 0.00985053 0.00565451 -0.0124312 0.00975726 0.0058223 -0.0124312 0.0096973 0.00588227 -0.014125 0.00966889 0.00590451 -0.014125 0.00952951 0.00597553 -0.0124312 0.00955154 0.0059678 -0.014125 0.00945322 0.00599385 -0.0124312 0.00938561 0.00599989 -0.0142199 0.0098659 0.00394896 -0.0144473 0.00975726 0.00394896 -0.0145705 0.009602 0.0055 -0.0146159 0.00946994 0.00394896 -0.0146188 0.00945322 0.0055 -0.0142795 0.00985053 0.00239792 -0.0143015 0.0098428 0.00394896 -0.0145295 0.00966889 0.00239792 -0.0145705 0.009602 0.00239792 -0.0146188 0.00945322 0.00239792 -0.0146249 0.00938561 0.00394896 -0.0144786 0.00972855 0.00239792 -0.0145073 0.0096973 0.00394896 -0.0146005 0.00952951 0.0055 -0.0145928 0.00955154 0.00394896 -0.0145562 0.00962806 0.00394896 -0.0143781 0.00980623 0.00394896 -0.0142032 0.00986884 0.0055 -0.0142032 0.00986885 0.00239792 -0.0141356 0.00987489 0.00394896 -0.014625 0.00940394 0.00194073 -0.014625 0.00942419 0.00178928 -0.0146068 0.00951434 0.00220747 -0.0146005 0.00952951 0.00239792 -0.0145694 0.00960968 0.00221294 -0.0146052 0.00956863 0.00176541 -0.0146108 0.00959311 0.00151033 -0.0146003 0.00962916 0.00151738 -0.014619 0.00967025 0.000993688 -0.0145794 0.00960452 0.00202431 -0.0145784 0.00963933 0.00177649 -0.0145913 0.00965382 0.0015222 -0.0146021 0.0096782 0.00126372 -0.0146145 0.00969394 0.000999805 -0.0146245 0.0098472 0.000173627 -0.0146222 0.00982537 0.000349479 -0.0146243 0.00985003 0.000174477 -0.014563 0.00963682 0.0020277 -0.0145643 0.00966708 0.00178084 -0.0146062 0.00972658 0.00100824 -0.0146182 0.00975622 0.000693642 -0.0146187 0.00980139 0.000525396 -0.0144189 0.00977951 0.00239792 -0.0144122 0.00978952 0.00222326 -0.014352 0.0098205 0.00239792 -0.0143783 0.00981124 0.00222451 -0.01442 0.00979852 0.00204469 -0.0145109 0.00978879 0.00154857 -0.0145466 0.00979431 0.00129065 -0.0145897 0.00981353 0.000870934 -0.0146049 0.00981219 0.000709379 -0.0146233 0.00986418 0.000178726 -0.0144688 0.00978976 0.00180007 -0.0144453 0.00981041 0.0018033 -0.0144934 0.00980853 0.00155242 -0.0145841 0.00982658 0.00087447 -0.0146228 0.00986959 0.000180352 -0.0146162 0.00986431 0.000361022 -0.0143899 0.00981878 0.00204682 -0.0145897 0.00985473 0.000721339 -0.0142711 0.00985826 0.00222721 -0.014125 0.009875 0.00239792 -0.0143208 0.009875 0.00188611 -0.0143968 0.00984571 0.00180884 -0.0145687 0.009875 0.000819705 -0.0145513 0.00984811 0.00103963 -0.014457 0.00984368 0.00155927 -0.0146 0.009875 0.000547423 -0.014625 0.00961777 0.000888109 -0.014625 0.00954239 0.00118723 -0.0146244 0.00955638 0.00123546 -0.0145986 0.0095564 0.00201925 -0.0146139 0.00953485 0.00176012 -0.0145928 0.00955713 0.00220992 -0.0146238 0.00946593 0.00174932 -0.014623 0.00942585 0.00220239 -0.0146182 0.00985357 0.000357839 -0.0146235 0.00986135 0.000177877 -0.0146137 0.00982668 0.000532961 -0.0145949 0.00980032 0.000867357 -0.0145828 0.00978982 0.00102457 -0.0145685 0.00975724 0.00128205 -0.0145426 0.0097467 0.00154036 -0.014491 0.0097677 0.00179661 -0.0145117 0.0097443 0.00179294 -0.0144441 0.00976515 0.00222187 -0.01462 0.00984232 0.000354503 -0.0146208 0.00983668 0.000352831 -0.0146111 0.00978999 0.000703136 -0.0146041 0.0097735 0.000860092 -0.0145782 0.00973804 0.0012776 -0.0145566 0.00972447 0.00153602 -0.0145871 0.00971844 0.00127305 -0.0145269 0.00967782 0.00221685 -0.0146246 0.00984436 0.000172776 -0.0146233 0.00981404 0.000346119 -0.0146247 0.00984153 0.000171925 -0.0146234 0.00976369 0.00051412 -0.0146226 0.00968278 0.000835521 -0.0146195 0.00960413 0.00124654 -0.0145908 0.00961075 0.00177201 -0.0146248 0.00968999 0.000675021 -0.0146239 0.00966699 0.000831245 -0.0146247 0.00965117 0.000826959 -0.0146246 0.00961402 0.000979164 -0.0146226 0.00958031 0.00124101 -0.014617 0.0095644 0.00150471 -0.014625 0.00982793 0.000167842 -0.014625 0.0097811 0.000336352 -0.0146249 0.00973494 0.000505521 -0.0146214 0.00953538 0.00149904 -0.0143428 0.00983018 0.0022256 -0.014474 0.00973829 0.00222032 -0.0145017 0.00970912 0.00221865 -0.0145234 0.00969745 0.00203407 -0.0146081 0.0097599 0.000856409 -0.0146137 0.00977879 0.000699987 -0.014624 0.00985569 0.000176178 -0.0145495 0.0096446 0.00221494 -0.014625 0.009375 0.00239792 -0.0146233 0.00943755 0.00200676 -0.0146184 0.00947779 0.00201099 -0.0146169 0.00947046 0.00220495 -0.0143284 0.0098513 0.00205024 -0.0144486 0.00977613 0.00204234 -0.0145004 0.00972546 0.00203701 -0.0144754 0.00975173 0.00203977 -0.0145308 0.00971967 0.00178908 -0.0145484 0.00969389 0.00178504 -0.0146081 0.00980112 0.000706268 -0.0146238 0.00985852 0.000177028 -0.0146191 0.00984795 0.000356172 -0.0145443 0.00966785 0.00203096 -0.0146101 0.00951749 0.00201516 -0.0145273 0.00976816 0.00154455 -0.0145695 0.00970154 0.00153153 -0.0146241 0.00950614 0.00149332 -0.01462 0.00950056 0.00175474 -0.0145343 0.0098121 0.00129477 -0.014558 0.00977601 0.0012864 -0.014595 0.00969848 0.00126842 -0.0145811 0.00967796 0.00152693 -0.0146153 0.00962777 0.00125202 -0.0145678 0.00982011 0.0010324 -0.0145087 0.00984466 0.00130233 -0.0145756 0.00980509 0.00102852 -0.0145895 0.00977432 0.00102057 -0.0146012 0.00974268 0.0010124 -0.0145957 0.0097586 0.00101651 -0.0146107 0.00971033 0.00100404 -0.0146082 0.00965763 0.00125895 -0.0146235 0.00963283 0.000984023 -0.0146243 0.00970286 0.000678639 -0.0146234 0.00971571 0.000682252 -0.0146116 0.0097462 0.000852697 -0.0146148 0.0097324 0.00084896 -0.0146207 0.00969851 0.000839783 -0.0146216 0.00965159 0.000988867 -0.0145722 0.00985113 0.000881119 -0.0145977 0.00983408 0.000715535 -0.0146014 0.00982318 0.000712469 -0.0146117 0.00983505 0.000535466 -0.0145997 0.00978697 0.000863742 -0.0146172 0.00980985 0.000527925 -0.0146155 0.00981828 0.000530447 -0.0146161 0.00976753 0.000696822 -0.01462 0.00974486 0.000690449 -0.0146175 0.00971852 0.0008452 -0.0146221 0.00972853 0.000685858 -0.0146212 0.00978442 0.000520319 -0.0146222 0.0097759 0.000517773 -0.0146241 0.00975411 0.000511256 -0.01462 0.00979291 0.00052286 -0.0146215 0.00983103 0.000351157 -0.0146228 0.00981971 0.0003478 -0.0146243 0.00980023 0.000342024 -0.0146248 0.00983747 0.000170706 -0.0146238 0.00980837 0.000344436 -0.0146246 0.00974453 0.000508389 -0.0146246 0.00979386 0.000340134 -0.0146248 0.00978748 0.000338243 -0.014625 0.00983111 0.000168797 -0.0146249 0.00983429 0.000169752 -0.0146096 0.00984339 0.000537961 -0.0146051 0.00985927 0.000542714 -0.0146241 0.00985286 0.000175328 -0.0106573 0.00981806 0.00576847 -0.0107822 0.00982521 0.00573655 -0.010779 0.00978356 0.00580169 -0.01065 0.00975097 0.00585279 -0.0106471 0.00972477 0.00587741 -0.0106409 0.00966743 0.00592065 -0.010768 0.0096405 0.00592994 -0.0107615 0.00955571 0.00597014 -0.010755 0.00947215 0.00599248 -0.0107517 0.00942924 0.00599812 -0.0107176 0.00938849 0.006 -0.0106161 0.00943888 0.00599837 -0.0104143 0.00942419 0.006 -0.0101431 0.00953242 0.00599633 -0.0098108 0.00963799 0.00599134 -0.00964455 0.00969094 0.00598906 -0.00947491 0.00973226 0.00598976 -0.00930379 0.00977009 0.00599156 -0.00913115 0.00980396 0.00599412 -0.00878112 0.00985788 0.00599918 -0.0109053 0.0098609 0.00562766 -0.0109042 0.00983464 0.00570254 -0.0110229 0.0098205 0.005727 -0.0109026 0.00979497 0.00577521 -0.0107807 0.00980565 0.00576993 -0.0109016 0.00977077 0.00580884 -0.0109005 0.00974387 0.00584034 -0.0107771 0.00975909 0.00583164 -0.010775 0.00973239 0.0058596 -0.0107728 0.0097036 0.00588541 -0.0108993 0.00971445 0.0058695 -0.0108929 0.00955935 0.00596585 -0.0108951 0.00961321 0.00594105 -0.0107654 0.00960657 0.00594841 -0.0108966 0.0096489 0.00592003 -0.010898 0.00968272 0.00589613 -0.0108911 0.00951537 0.00598069 -0.0107583 0.00951442 0.00598313 -0.0108893 0.00947021 0.00599139 -0.0108874 0.00942424 0.00599784 -0.01087 0.00937854 0.006 -0.00980544 0.00961563 0.00599512 -0.00930068 0.00975907 0.00599355 -0.00912877 0.00979578 0.00599536 -0.00996587 0.00953479 0.00599928 -0.00980006 0.00959313 0.00599783 -0.00963347 0.00964774 0.00599646 -0.00929756 0.009748 0.00599528 -0.00951311 0.00961777 0.006 -0.00911518 0.00974907 0.00599963 -0.00877373 0.00983328 0.00599999 -0.00921534 0.00970009 0.006 -0.00917242 0.009875 0.00597498 -0.0091474 0.00985982 0.00598153 -0.00932781 0.00985517 0.00596651 -0.00984877 0.00979662 0.00592963 -0.00984456 0.00977904 0.00593989 -0.0100135 0.00975204 0.00593349 -0.01035 0.00971997 0.00591283 -0.0106376 0.00963658 0.00593904 -0.00971528 0.009875 0.00590011 -0.0096775 0.00981936 0.00594048 -0.00968478 0.00984777 0.00592342 -0.00985287 0.00981377 0.00591862 -0.0100221 0.00979134 0.00590774 -0.0100178 0.00977199 0.0059211 -0.0107705 0.00967291 0.0059089 -0.0106441 0.00969687 0.00590007 -0.00986043 0.00984533 0.00589553 -0.0101931 0.00978862 0.00588292 -0.0105207 0.00974607 0.00587237 -0.010249 0.009875 0.00577603 -0.0102039 0.00984372 0.00582768 -0.010371 0.00984488 0.00578797 -0.0106551 0.00979774 0.00579818 -0.0105346 0.00984755 0.00574456 -0.00892299 0.0097917 0.00599997 -0.00928248 0.00969456 0.00599983 -0.00911248 0.00973981 0.00599991 -0.00877542 0.00983891 0.00599992 -0.00877627 0.00984173 0.00599986 -0.00892635 0.009803 0.00599969 -0.0087781 0.00984784 0.00599967 -0.00912399 0.00977936 0.00599741 -0.00912638 0.00978758 0.00599646 -0.00929307 0.0097321 0.00599729 -0.00946573 0.00969833 0.00599569 -0.00928602 0.0097071 0.00599932 -0.00945289 0.00965093 0.00599973 -0.00877961 0.00985286 0.00599945 -0.00893317 0.00982591 0.00599827 -0.00893167 0.00982088 0.00599868 -0.00893913 0.00984596 0.00599608 -0.00878263 0.00986289 0.00599885 -0.00894062 0.00985096 0.00599539 -0.0094969 0.00981347 0.00596459 -0.00878338 0.0098654 0.00599867 -0.00878482 0.0098702 0.00599828 -0.0089421 0.00985595 0.00599465 -0.00932211 0.00983501 0.00597406 -0.00950709 0.00985111 0.00594711 -0.00950044 0.00982654 0.00595897 -0.00967358 0.00980411 0.00594858 -0.00877457 0.0098361 0.00599997 -0.0101373 0.0095028 0.00599908 -0.0105111 0.009875 0.00569584 -0.0106608 0.00985085 0.00570778 -0.0107693 0.009875 0.00560377 -0.0107845 0.00985527 0.0056688 -0.0110229 0.00966889 0.00590451 -0.0109034 0.00981631 0.0057397 -0.0103616 0.00978883 0.00585519 -0.0106526 0.00977533 0.00582633 -0.0105239 0.00977 0.00584922 -0.0104856 0.00948953 0.00599438 -0.01032 0.00954134 0.00598971 -0.0101616 0.0096274 0.00597463 -0.0101545 0.0095909 0.00598537 -0.00982223 0.00968577 0.00597947 -0.00948237 0.00975979 0.00598302 -0.00930997 0.00979198 0.00598677 -0.0106204 0.00947877 0.00599347 -0.0103322 0.00961439 0.00596837 -0.00999487 0.00966715 0.00597294 -0.00999966 0.00968904 0.00596465 -0.00965729 0.00974061 0.00597527 -0.00931304 0.00980284 0.00598399 -0.0091406 0.00983644 0.00598769 -0.0106247 0.00951814 0.00598536 -0.00966556 0.00977282 0.00596309 -0.00949332 0.00980025 0.00596981 -0.00931608 0.00981364 0.00598094 -0.00931911 0.00982436 0.00597763 -0.0106341 0.0096045 0.00595514 -0.0105057 0.00963669 0.00594738 -0.00984025 0.00976106 0.00594938 -0.00983586 0.00974271 0.00595811 -0.0106289 0.00955674 0.00597406 -0.00964884 0.00970765 0.00598507 -0.00893764 0.00984096 0.00599671 -0.0103088 0.00947518 0.00599885 -0.0103144 0.00950841 0.00599542 -0.0104805 0.0094526 0.0059986 -0.0103254 0.00957381 0.00598175 -0.0104906 0.00952604 0.00598739 -0.0104955 0.00956193 0.00597765 -0.0105016 0.00960655 0.00596131 -0.0105097 0.00966584 0.00593144 -0.0103414 0.00966884 0.00594384 -0.0103369 0.00964198 0.00595694 -0.0101713 0.00967678 0.00595489 -0.0103458 0.00969486 0.00592912 -0.0101759 0.00970061 0.005943 -0.0105135 0.00969387 0.00591357 -0.0103541 0.00974405 0.00589504 -0.0105172 0.00972065 0.00589385 -0.0105298 0.00981301 0.00579834 -0.010527 0.00979235 0.0058245 -0.0101488 0.00956183 0.00599176 -0.0101665 0.00965235 0.00596544 -0.0101805 0.00972378 0.00592982 -0.0103579 0.00976703 0.0058758 -0.010365 0.00980935 0.00583328 -0.00997158 0.00956087 0.00599713 -0.00998289 0.0096125 0.00598857 -0.00997726 0.0095868 0.00599356 -0.00981756 0.00966627 0.00598495 -0.00998999 0.0096449 0.00598015 -0.0100044 0.00971052 0.00595529 -0.00982684 0.00970503 0.00597316 -0.0101849 0.00974621 0.00591539 -0.010009 0.00973153 0.0059449 -0.0101891 0.00976784 0.00589974 -0.010197 0.00980847 0.00586499 -0.0100336 0.00984393 0.00586358 -0.0100262 0.00981004 0.00589345 -0.00945718 0.00966677 0.00599892 -0.00962856 0.00962861 0.00599843 -0.00979465 0.00957052 0.00599946 -0.00963835 0.00966679 0.00599371 -0.00966145 0.00975682 0.00596948 -0.00983139 0.00972402 0.00596604 -0.0096696 0.00978859 0.00595612 -0.00962364 0.00960941 0.00599961 -0.00928955 0.00971961 0.00599847 -0.00911787 0.00975833 0.00599916 -0.00912056 0.00976757 0.00599851 -0.00946146 0.00968258 0.00599757 -0.00947115 0.00971836 0.0059925 -0.00947865 0.00974607 0.0059866 -0.00965309 0.00972422 0.00598047 -0.00930689 0.00978106 0.0059893 -0.00948605 0.0097734 0.00597903 -0.00948971 0.00978689 0.00597462 -0.00893017 0.00981585 0.00599903 -0.00893466 0.00983093 0.0059978 -0.00893615 0.00983594 0.00599728 -0.00913589 0.00982025 0.00599119 -0.00913352 0.00981212 0.00599273 -0.00913825 0.00982836 0.00598951 -0.00914294 0.00984449 0.00598571 -0.00878187 0.00986039 0.00599902 -0.00878037 0.00985537 0.00599932 -0.00877886 0.00985035 0.00599957 -0.00877735 0.00984532 0.00599976 -0.00892803 0.00980864 0.00599945 -0.00892467 0.00979735 0.00599986 -0.00894494 0.00986549 0.00599308 0.008625 0.009375 0.0055 -0.0137355 0.009875 -0.00314372 -0.014125 0.009875 -0.00239791 -0.014125 0.009375 0.006 -0.0110229 0.009375 0.006 -0.0105657 0.00940394 0.006 -0.00713742 0.00947679 0.006 -0.00743777 0.00954239 0.006 -0.00637996 0.00937854 0.006 0.008125 0.009375 0.006 0.008125 0.005375 0.006 -0.0101126 0.00947679 0.006 -0.00981222 0.00954239 0.006 -0.00773688 0.00961777 0.006 -0.0138769 0.009875 0.00290129 -0.0135829 0.009875 0.00337927 -0.014125 0.009875 0.0055 -0.0128676 0.009875 0.00424264 -0.0122324 0.009875 0.00479442 -0.0120043 0.009875 0.00495788 -0.0112775 0.009875 0.00538184 -0.00165406 0.014375 0.000239317 -0.00163229 0.014375 0.000120539 -0.00173954 0.014875 0.000464725 -0.00187649 0.014375 0.000663125 -0.00187649 0.014875 0.000663125 -0.00205694 0.014875 0.000822986 -0.00250446 0.014875 0.000992711 -0.00308972 0.014875 0.000885458 -0.00328812 0.014875 0.000748513 -0.00351046 0.014375 0.000464725 -0.00351046 0.014875 0.000464725 -0.003625 0.014375 1.88505e-09 -0.00361771 0.014875 0.000120539 -0.00361771 0.014875 -0.000120535 -0.00359594 0.014875 -0.000239314 -0.00337351 0.014875 -0.000663121 -0.00308972 0.014375 -0.000885454 -0.00308972 0.014875 -0.000885454 -0.0029796 0.014875 -0.000935014 -0.00286432 0.014875 -0.00097094 -0.002625 0.014375 -0.000999998 -0.00250446 0.014375 -0.000992707 -0.00238568 0.014375 -0.00097094 -0.0022704 0.014375 -0.000935014 -0.00205694 0.014375 -0.000822982 -0.00216028 0.014875 -0.000885454 -0.00168998 0.014375 -0.000354603 -0.00163229 0.014875 -0.000120535 -0.001625 0.014375 1.88505e-09 -0.00180202 0.014375 0.000568067 -0.002625 0.014875 0.001 -0.00274554 0.014375 0.000992711 -0.0029796 0.014875 0.000935018 -0.00328812 0.014375 0.000748513 -0.00356002 0.014375 0.000354607 -0.00356002 0.014875 0.000354607 -0.00359594 0.014875 0.000239317 -0.00361771 0.014375 -0.000120535 -0.00359594 0.014375 -0.000239314 -0.00356002 0.014375 -0.000354603 -0.00351046 0.014875 -0.000464721 -0.00319306 0.014875 -0.000822982 -0.0029796 0.014375 -0.000935014 -0.00286432 0.014375 -0.00097094 -0.00274554 0.014375 -0.000992707 -0.00274554 0.014875 -0.000992707 -0.00238568 0.014875 -0.00097094 -0.0022704 0.014875 -0.000935014 -0.00196188 0.014875 -0.000748509 -0.001625 0.014875 1.95062e-09 -0.00356002 0.014875 -0.000354603 -0.00165406 0.014875 -0.000239314 -0.00168998 0.014875 -0.000354603 -0.00173954 0.014875 -0.000464721 -0.00187649 0.014875 -0.000663121 -0.00205694 0.014875 -0.000822982 -0.002625 0.014875 -0.000999998 -0.00250446 0.014875 -0.000992707 -0.00344798 0.014875 -0.000568063 -0.00180202 0.014875 -0.000568063 -0.00328812 0.014875 -0.000748509 -0.003625 0.014875 1.95062e-09 -0.00344798 0.014875 0.000568067 -0.00337351 0.014875 0.000663125 -0.00196188 0.014875 0.000748513 -0.00216028 0.014875 0.000885458 -0.00238568 0.014875 0.000970944 -0.00286432 0.014875 0.000970944 -0.00274554 0.014875 0.000992711 -0.00163229 0.014875 0.000120539 -0.00165406 0.014875 0.000239317 -0.00168998 0.014875 0.000354607 -0.00180202 0.014875 0.000568067 -0.00319306 0.014875 0.000822986 -0.0022704 0.014875 0.000935018 -0.00304687 0.013125 0.0022101 -0.00342986 0.009875 0.00300176 -0.00343403 0.013125 0.00300896 -0.00375589 0.013125 0.00350596 -0.00411236 0.009875 0.00395425 -0.00393482 0.013125 0.00374195 -0.00453828 0.013125 0.00439303 -0.00476002 0.013125 0.00458933 -0.00520382 0.009875 0.00492905 -0.00499119 0.013125 0.00477445 -0.00544886 0.009875 0.0050904 -0.0052312 0.013125 0.00494794 -0.00682313 0.013125 0.00572305 -0.00798085 0.013125 0.00596532 -0.00857197 0.013125 0.00599977 -0.0094447 0.009875 0.00594374 -0.00916361 0.013125 0.00597578 -0.00329188 0.013125 0.00274915 -0.00358281 0.009875 0.00325212 -0.00358883 0.013125 0.00326143 -0.00412517 0.013125 0.00396882 -0.00431105 0.009875 0.00417011 -0.00452005 0.009875 0.004376 -0.0059611 0.009875 0.00537621 -0.00622708 0.009875 0.0055 -0.00648075 0.009875 0.00560377 -0.00654293 0.013125 0.00562717 -0.00673889 0.009875 0.00569584 -0.00700096 0.009875 0.00577603 -0.00710772 0.013125 0.00580499 -0.008625 0.009875 0.006 -0.008899 0.009875 0.00599374 -0.0103254 0.013125 0.005754 -0.0106073 0.013125 0.00566309 -0.0121739 0.013125 0.00483794 -0.0124083 0.013125 0.00465693 -0.0126646 0.009875 0.00443641 -0.0128489 0.013125 0.00426135 -0.0132455 0.009875 0.0038277 -0.0132483 0.013125 0.00382427 -0.0134194 0.009875 0.00360743 -0.0130614 0.009875 0.00403959 -0.00975 0.013125 0.00589359 -0.00998357 0.009875 0.00584417 -0.0110229 0.009875 0.0055 -0.0111558 0.013125 0.00544012 -0.0115263 0.009875 0.00525191 -0.0117687 0.009875 0.00511048 -0.0124527 0.009875 0.00462047 -0.0126334 0.013125 0.00446458 -0.013054 0.013125 0.00404774 -0.0136027 0.013125 0.00334994 -0.0142288 0.009875 0.00214425 -0.0141632 0.013125 0.00230834 -0.0143637 0.013125 0.00175122 -0.014625 0.009875 1.29495e-09 -0.0137355 0.009875 0.00314372 -0.0140068 0.009875 0.0026525 -0.014401 0.009875 0.00162404 -0.0144692 0.009875 0.00135858 -0.0145084 0.013125 0.00117704 -0.0145251 0.009875 0.00109028 -0.0145593 0.013125 0.000885302 -0.0145958 0.013125 0.000591403 -0.0146177 0.013125 0.000296063 -0.0146187 0.009875 0.000273998 -0.0146 0.009875 -0.00054742 -0.0146177 0.013125 -0.000296059 -0.0145593 0.013125 -0.000885298 -0.0134194 0.009875 -0.00360743 -0.0132455 0.009875 -0.0038277 -0.0132483 0.013125 -0.00382426 -0.0145958 0.013125 -0.000591399 -0.0144692 0.009875 -0.00135857 -0.0143208 0.009875 -0.00188611 -0.0141632 0.013125 -0.00230834 -0.0140425 0.013125 -0.0025788 -0.0140068 0.009875 -0.0026525 -0.0138769 0.009875 -0.00290128 -0.013762 0.013125 -0.00310024 -0.0135829 0.009875 -0.00337927 -0.0130614 0.009875 -0.00403959 -0.013054 0.013125 -0.00404774 -0.0128676 0.009875 -0.00424264 -0.0120043 0.009875 -0.00495787 -0.0115263 0.009875 -0.00525191 -0.0112775 0.009875 -0.00538184 -0.0114212 0.013125 -0.00530861 -0.0111558 0.013125 -0.00544012 -0.0105111 0.009875 -0.00569584 -0.00998357 0.009875 -0.00584416 -0.00916361 0.013125 -0.00597577 -0.0128489 0.013125 -0.00426135 -0.0126646 0.009875 -0.00443641 -0.0126334 0.013125 -0.00446457 -0.0124527 0.009875 -0.00462047 -0.0122324 0.009875 -0.00479442 -0.0117687 0.009875 -0.00511048 -0.0116797 0.013125 -0.00516417 -0.008899 0.009875 -0.00599374 -0.00886809 0.013125 -0.00599507 -0.00807758 0.009875 -0.00597497 -0.00827599 0.013125 -0.00598984 -0.00726643 0.009875 -0.00584416 -0.00570148 0.009875 -0.00523956 -0.00573543 0.013125 -0.00525836 -0.00412517 0.013125 -0.00396882 -0.00392446 0.009875 -0.00372894 -0.00375589 0.013125 -0.00350596 -0.00358883 0.013125 -0.00326143 -0.00342986 0.009875 -0.00300176 -0.00768729 0.013125 -0.00592627 -0.00682313 0.013125 -0.00572305 -0.00673889 0.009875 -0.00569584 -0.00654293 0.013125 -0.00562716 -0.00648075 0.009875 -0.00560376 -0.00499119 0.013125 -0.00477445 -0.00453828 0.013125 -0.00439303 -0.00316272 0.013125 -0.00248265 -0.00316158 0.009875 -0.00248012 0.012125 0.005375 -0.006 0.0122455 0.006875 -0.00599271 0.0125897 0.006875 -0.00588546 0.0125897 0.005375 -0.00588546 0.0126931 0.006875 -0.00582298 0.0128735 0.006875 -0.00566312 0.0130105 0.005375 -0.00546472 0.0130959 0.006875 -0.00523932 0.013125 0.006875 -0.005 0.0123643 0.005375 -0.00597094 0.0124796 0.006875 -0.00593502 0.0126931 0.005375 -0.00582298 0.0127881 0.006875 -0.00574851 0.0127881 0.005375 -0.00574851 0.0130105 0.006875 -0.00546472 0.01306 0.005375 -0.0053546 0.0106265 0.005375 0.000663123 0.0107119 0.005375 0.000748512 0.0108069 0.005375 0.000822984 0.0110204 0.005375 0.000935017 0.0112545 0.005375 0.000992709 0.0116143 0.005375 0.000970943 0.0117296 0.005375 0.000935017 0.0119431 0.005375 0.000822984 0.0125897 0.005375 0.00588546 0.01231 0.005375 0.000354605 0.0123677 0.005375 -0.000120536 0.0119431 0.005375 -0.000822983 0.0122455 0.005375 -0.00599271 0.0118397 0.005375 -0.000885455 0.0117296 0.005375 -0.000935016 0.0112545 0.005375 -0.000992708 0.0110204 0.005375 -0.000935016 0.0108069 0.005375 -0.000822983 0.00861884 0.005375 -0.00557822 0.00860053 0.005375 -0.00565451 0.0085705 0.005375 -0.00572699 0.00835199 0.005375 -0.0059455 0.00820322 0.005375 -0.00599384 0.012375 0.005375 7.04846e-10 0.0104041 0.005375 -0.000239315 0.0103823 0.005375 -0.000120536 0.0124796 0.005375 -0.00593502 0.013125 0.005375 -0.005 0.0128735 0.005375 -0.00566312 0.012948 0.005375 -0.00556806 0.0130959 0.005375 -0.00523932 0.0131177 0.005375 -0.00512054 0.0130959 0.005375 0.00523932 0.01306 0.005375 0.00535461 0.0130105 0.005375 0.00546472 0.012948 0.005375 0.00556807 0.0127881 0.005375 0.00574851 0.0126931 0.005375 0.00582298 0.00827951 0.005375 0.00597553 0.012125 0.005375 0.006 0.00835199 0.005375 0.0059455 0.0085705 0.005375 0.005727 0.0131177 0.006875 0.00512054 0.013125 0.005375 0.005 0.0131177 0.005375 0.00512054 0.012948 0.006875 0.00556807 0.0128735 0.006875 0.00566312 0.0125897 0.006875 0.00588546 0.0124796 0.006875 0.00593502 0.0123643 0.005375 0.00597094 0.0122455 0.006875 0.00599271 0.0130959 0.006875 0.00523932 0.01306 0.006875 0.00535461 0.0130105 0.006875 0.00546472 0.0128735 0.005375 0.00566312 0.0127881 0.006875 0.00574851 0.0126931 0.006875 0.00582298 0.0124796 0.005375 0.00593502 0.0123643 0.006875 0.00597094 0.0122455 0.005375 0.00599271 -0.012625 -0.009375 0.002 -0.014625 -0.009375 0.002 -0.012625 -0.009375 -0.002 -0.00622708 0.009875 -0.0055 -0.0059611 0.009875 -0.00537621 -0.00544886 0.009875 -0.00509039 -0.00520382 0.009875 -0.00492905 -0.00496697 0.009875 -0.00475592 -0.00473886 0.009875 -0.00457143 0.008125 0.009875 -0.0055 -0.00452005 0.009875 -0.004376 -0.00431105 0.009875 -0.00417011 -0.00411236 0.009875 -0.00395425 -0.000819998 0.009875 -0.0013433 -0.000629095 0.009875 -0.00103868 -0.000489144 0.009875 -0.000707545 -0.000382192 0.009875 -0.000179747 -0.000375 0.009875 1.29495e-09 -0.000382192 0.009875 0.000179749 -0.00040372 0.009875 0.000358349 -0.000439446 0.009875 0.000534657 -0.000819998 0.009875 0.00134331 -0.00133399 0.009875 0.00184277 -0.00180885 0.009875 0.00209676 -0.00392446 0.009875 0.00372894 -0.00374781 0.009875 0.00349471 -0.00197897 0.009875 0.00215526 -0.00250962 0.009875 0.00224704 -0.00316158 0.009875 0.00248013 -0.00286897 0.009875 0.00223674 -0.00197897 0.009875 -0.00215526 -0.00374781 0.009875 -0.0034947 -0.00215321 0.009875 -0.00219998 -0.00180885 0.009875 -0.00209676 -0.00358281 0.009875 -0.00325212 -0.00328934 0.009875 -0.00274422 -0.00304687 0.009875 -0.00221009 -0.00328934 0.009875 0.00274422 -0.00473886 0.009875 0.00457143 -0.00496697 0.009875 0.00475593 0.008125 0.009875 0.0055 -0.00570148 0.009875 0.00523957 0.0103823 0.005375 0.000120538 0.0104041 0.005375 0.000239316 0.0103823 0.006875 0.000120538 0.0104041 0.006875 0.000239316 0.01044 0.005375 0.000354605 0.010552 0.005375 0.000568065 0.0106265 0.006875 0.000663124 0.0109103 0.006875 0.000885457 0.0112545 0.006875 0.00099271 0.011375 0.005375 0.001 0.0114955 0.006875 0.00099271 0.0121235 0.005375 0.000663123 0.012198 0.005375 0.000568065 0.0123459 0.006875 0.000239316 0.0123459 0.005375 -0.000239315 0.0123459 0.006875 -0.000239315 0.01231 0.005375 -0.000354604 0.0122605 0.005375 -0.000464722 0.01231 0.006875 -0.000354604 0.0119431 0.006875 -0.000822983 0.0118397 0.006875 -0.000885455 0.011375 0.005375 -0.000999999 0.0114955 0.006875 -0.000992708 0.0111357 0.005375 -0.000970941 0.0109103 0.005375 -0.000885455 0.0106265 0.005375 -0.000663122 0.010552 0.005375 -0.000568064 0.0106265 0.006875 -0.000663122 0.0104895 0.005375 -0.000464722 0.010552 0.006875 -0.000568064 0.0104895 0.006875 -0.000464722 0.0103823 0.006875 -0.000120536 0.010375 0.005375 7.04846e-10 0.0104895 0.005375 0.000464724 0.0107119 0.006875 0.000748512 0.0108069 0.006875 0.000822985 0.0109103 0.005375 0.000885457 0.0110204 0.006875 0.000935017 0.0111357 0.005375 0.000970943 0.0111357 0.006875 0.000970943 0.011375 0.006875 0.001 0.0114955 0.005375 0.000992709 0.0118397 0.005375 0.000885457 0.0118397 0.006875 0.000885457 0.0119431 0.006875 0.000822985 0.0120381 0.005375 0.000748512 0.0120381 0.006875 0.000748512 0.0122605 0.005375 0.000464724 0.0123459 0.005375 0.000239316 0.0123677 0.005375 0.000120538 0.012198 0.005375 -0.000568064 0.0121235 0.005375 -0.000663122 0.0120381 0.005375 -0.00074851 0.0120381 0.006875 -0.00074851 0.0117296 0.006875 -0.000935015 0.0116143 0.005375 -0.000970941 0.0116143 0.006875 -0.000970941 0.0114955 0.005375 -0.000992708 0.011375 0.006875 -0.000999999 0.0110204 0.006875 -0.000935015 0.0107119 0.005375 -0.00074851 0.01044 0.005375 -0.000354604 0.01044 0.006875 -0.000354604 0.0104041 0.006875 -0.000239315 0.0131177 0.006875 -0.00512054 0.01306 0.006875 -0.0053546 0.012948 0.006875 -0.00556806 0.0123643 0.006875 -0.00597094 0.0121235 0.006875 -0.000663122 0.012198 0.006875 -0.000568064 0.0122605 0.006875 -0.000464722 0.0123677 0.006875 -0.000120536 0.012375 0.006875 9.01547e-10 0.0123677 0.006875 0.000120538 0.01231 0.006875 0.000354606 0.0122605 0.006875 0.000464724 0.012198 0.006875 0.000568066 0.0121235 0.006875 0.000663124 0.0117296 0.006875 0.000935017 0.0116143 0.006875 0.000970943 0.00852951 0.006875 0.00579389 0.00835199 0.006875 0.0059455 0.00820322 0.006875 0.00599385 0.008125 0.006875 0.006 0.012125 0.006875 0.006 0.012125 0.006875 -0.006 0.00820322 0.006875 -0.00599384 0.00827951 0.006875 -0.00597553 0.00852951 0.006875 -0.00579389 0.0109103 0.006875 -0.000885455 0.0111357 0.006875 -0.000970941 0.0112545 0.006875 -0.000992708 0.008625 0.006875 0.0055 0.010375 0.006875 9.01547e-10 0.01044 0.006875 0.000354606 0.0104895 0.006875 0.000464724 0.010552 0.006875 0.000568066 0.013125 0.006875 0.005 0.008625 0.006875 -0.0055 0.0107119 0.006875 -0.00074851 0.0108069 0.006875 -0.000822983 0.014625 -0.003625 0.008 0.013625 -0.004125 0.009 0.013125 -0.004625 0.013 0.014125 0.005375 0.012 0.013625 0.005875 0.009 0.014125 0.005375 0.0085 0.014125 -0.003625 0.012 0.014125 -0.003625 0.0085 0.013125 -0.004625 -0.009 0.014125 -0.003625 -0.012 0.014125 0.005375 -0.012 0.014625 -0.003625 -0.012 0.014625 -0.003625 -0.009 0.014125 0.005375 -0.009 0.014125 -0.003625 -0.009 0.00829657 -0.007875 0.006125 0.008625 -0.00764277 0.006125 0.011125 -0.00728922 0.008125 0.011125 -0.005875 0.006125 0.013125 -0.005875 0.008125 0.013125 -0.004625 0.009 0.013125 0.013375 -0.0115 0.013125 0.0126265 0.0118369 0.013125 0.006375 0.009 0.013125 0.01244 0.0121454 0.013125 0.006375 0.013 0.013125 0.0123823 0.0126205 0.013125 0.01244 0.0128546 0.013125 0.012552 0.0130681 0.013125 0.0126265 0.0131631 0.013125 0.0127119 0.0132485 0.013125 0.0129103 0.0133855 0.013125 0.0131357 0.0134709 0.013125 -0.0112704 0.013435 0.013125 -0.0118643 0.0134709 0.013125 -0.014625 0.0155 0.013125 -0.0123735 0.0131631 0.013125 -0.01256 0.0128546 0.013125 -0.0126177 0.0123795 0.013125 -0.0123735 0.0118369 0.013125 -0.0122881 0.0117515 0.013125 -0.0120897 0.0116145 0.013125 -0.0119796 0.011565 0.013125 -0.0117455 0.0115073 0.013125 -0.012875 0.010125 0.013125 -0.011625 0.0115 0.013125 -0.0112704 0.011565 0.013125 -0.0111603 0.0116145 0.013125 -0.0109619 0.0117515 0.013125 -0.0108765 0.0118369 0.013125 -0.00728922 0.010125 0.013125 -0.01069 0.0121454 0.013125 -0.010802 0.0130681 0.013125 -0.0108765 0.0131631 0.013125 0.005375 -0.008125 0.013125 -0.005875 -0.008125 0.013125 -0.0106323 -0.0126205 0.013125 -0.00728921 -0.010125 0.013125 -0.01069 -0.0121454 0.013125 -0.0107395 -0.0120353 0.013125 -0.0108765 -0.0118369 0.013125 -0.0111603 -0.0116145 0.013125 -0.0110569 -0.011677 0.013125 -0.0113857 -0.0115291 0.013125 -0.0117455 -0.0115073 0.013125 -0.0120897 -0.0116145 0.013125 -0.012875 -0.010125 0.013125 -0.012448 -0.0119319 0.013125 -0.0125105 -0.0120353 0.013125 -0.0126177 -0.0123795 0.013125 0.006375 -0.013 0.013125 0.0129103 -0.0133855 0.013125 0.0127119 -0.0132485 0.013125 0.01244 -0.0128546 0.013125 0.0123823 -0.0126205 0.013125 0.01244 -0.0121454 0.013125 0.006375 -0.009 0.013125 0.012552 -0.0119319 0.013125 0.0127119 -0.0117515 0.013125 0.0128069 -0.011677 0.013125 0.0131357 -0.0115291 0.013125 0.0132545 -0.0115073 0.013125 0.016375 -0.0155 0.013125 0.0143459 -0.0122607 0.013125 0.014198 -0.0130681 0.013125 0.0142605 -0.0129647 0.013125 0.0139431 -0.013323 0.013125 0.0138397 -0.0133855 0.013125 0.0134955 -0.0134927 0.013125 0.0132545 -0.0134927 0.013125 0.0131357 -0.0134709 0.013125 -0.012625 -0.0125 0.013125 -0.01256 -0.0128546 0.013125 -0.0125105 -0.0129647 0.013125 -0.014625 -0.0155 0.013125 -0.0122881 -0.0132485 0.013125 -0.0117455 -0.0134927 0.013125 -0.0112704 -0.013435 0.013125 -0.004625 -0.013 0.013125 -0.0108765 -0.0131631 0.013125 -0.010802 -0.0130681 0.013125 -0.0107395 -0.0129647 0.013125 -0.01069 -0.0128546 0.013125 -0.0106541 -0.0127393 0.013125 0.0142605 -0.0120353 0.013125 0.0140381 0.0117515 0.013125 0.0139431 0.011677 0.013125 0.0139431 -0.011677 0.013125 0.0137296 0.011565 0.013125 0.0134955 0.0115073 0.013125 0.0134955 -0.0115073 0.013125 0.0141235 0.0118369 0.013125 0.0140381 -0.0117515 0.013125 0.0136143 0.0115291 0.013125 0.0143677 0.0123795 0.013125 0.016375 0.0155 0.013125 0.0143677 0.0126205 0.013125 0.01431 0.0128546 0.013125 0.0143459 0.0127393 0.013125 0.013375 0.0135 0.013125 0.0132545 0.0134927 0.011125 -0.00728921 -0.008125 0.0102966 -0.007875 -0.008125 0.008625 -0.00764277 -0.006125 0.011125 -0.005875 -0.006125 -0.00820748 -0.012875 -0.00297081 -0.00883427 -0.012875 -0.00299269 -0.00904252 -0.012875 -0.00297081 -0.00945191 -0.012875 -0.00288379 -0.00965106 -0.012875 -0.00281908 -0.00984521 -0.012875 -0.00274064 -0.0105534 -0.012875 -0.00229813 -0.010989 -0.012875 -0.00184699 -0.0112231 -0.012875 -0.0015 -0.014375 -0.012875 -0.007125 -0.0115794 -0.012875 -0.000520946 -0.0116086 -0.012875 -0.000313587 -0.0116232 -0.012875 -0.0001047 -0.0116232 -0.012875 0.000104697 -0.0114782 -0.012875 0.000927049 -0.0111121 -0.012875 0.00167758 -0.010709 -0.012875 0.00215802 -0.0105534 -0.012875 0.00229813 -0.014375 -0.012875 0.007125 -0.0103884 -0.012875 0.00242705 -0.0100334 -0.012875 0.00264884 -0.00965106 -0.012875 0.00281908 -0.008625 -0.012875 0.003 -0.00820748 -0.012875 0.0029708 -0.013375 -0.012875 0.008125 -0.00759894 -0.012875 0.00281908 -0.00740479 -0.012875 0.00274063 -0.00721658 -0.012875 0.00264884 -0.00703524 -0.012875 0.00254414 -0.00669664 -0.012875 0.00229813 -0.00654102 -0.012875 0.00215802 -0.00639557 -0.012875 0.00200739 -0.00626097 -0.012875 0.00184698 -0.00613789 -0.012875 0.00167758 -0.00592862 -0.012875 0.00131511 -0.00584345 -0.012875 0.00112382 -0.00577183 -0.012875 0.000927049 -0.00571411 -0.012875 0.000725764 -0.00577183 -0.012875 -0.000927053 -0.00613789 -0.012875 -0.00167758 -0.00626097 -0.012875 -0.00184699 -0.00669664 -0.012875 -0.00229813 -0.00721658 -0.012875 -0.00264884 -0.013375 -0.012875 -0.008125 -0.00759894 -0.012875 -0.00281908 -0.00800126 -0.012875 -0.00293444 0.011125 -0.012875 -0.008125 0.011125 -0.012875 0.008125 -0.013375 -0.007875 -0.008125 -0.013375 -0.007875 0.008125 -0.014375 -0.007875 -0.007125 -0.014375 -0.011375 -0.006125 -0.014375 -0.011375 0.006125 -0.014375 -0.007875 0.006125 0.0102966 -0.007875 0.008125 -0.014375 -0.007875 0.007125 -0.014375 -0.007875 -0.006125 0.00829657 -0.007875 -0.006125 0.013125 -0.0106323 0.0126205 0.014625 -0.0106323 0.0126205 0.013125 -0.0106541 0.0127393 0.013125 -0.01069 0.0128546 0.014625 -0.01069 0.0128546 0.013125 -0.0107395 0.0129647 0.014625 -0.0107395 0.0129647 0.013125 -0.0109619 0.0132485 0.013125 -0.0111603 0.0133855 0.013125 -0.0113857 0.0134709 0.013125 -0.0120897 0.0133855 0.013125 -0.0122881 0.0132485 0.014625 -0.0122881 0.0132485 0.013125 -0.012448 0.0130681 0.014625 -0.012448 0.0130681 0.014625 -0.0125105 0.0129647 0.013125 -0.0126177 0.0126205 0.014625 -0.012625 0.0125 0.014625 -0.0126177 0.0123795 0.013125 -0.01256 0.0121454 0.013125 -0.0125105 0.0120353 0.013125 -0.0118643 0.0115291 0.014625 -0.011625 0.0115 0.014625 -0.0111603 0.0116145 0.014625 -0.0110569 0.011677 0.013125 -0.010802 0.0119319 0.014625 -0.010802 0.0119319 0.013125 -0.010625 0.0125 0.014625 -0.010802 0.0130681 0.013125 -0.0110569 0.013323 0.013125 -0.0115045 0.0134927 0.013125 -0.011625 0.0135 0.014625 -0.011625 0.0135 0.013125 -0.0117455 0.0134927 0.013125 -0.0119796 0.013435 0.014625 -0.0120897 0.0133855 0.013125 -0.0121931 0.013323 0.014625 -0.0121931 0.013323 0.013125 -0.0125105 0.0129647 0.013125 -0.0125959 0.0127393 0.014625 -0.0125959 0.0127393 0.013125 -0.012625 0.0125 0.013125 -0.0125959 0.0122607 0.014625 -0.0125105 0.0120353 0.013125 -0.012448 0.0119319 0.014625 -0.012448 0.0119319 0.014625 -0.0123735 0.0118369 0.014625 -0.0122881 0.0117515 0.013125 -0.0121931 0.011677 0.013125 -0.0115045 0.0115073 0.014625 -0.0115045 0.0115073 0.013125 -0.0113857 0.0115291 0.013125 -0.0110569 0.011677 0.013125 -0.0107395 0.0120353 0.013125 -0.0106541 0.0122607 0.013125 -0.0106323 0.0123795 0.014625 0.0143677 0.0126205 0.014625 0.014375 0.0125 0.013125 0.0142605 0.0129647 0.014625 0.01431 0.0128546 0.014625 0.014198 0.0130681 0.014625 0.0141235 0.0131631 0.013125 0.0137296 0.013435 0.014625 0.0136143 0.0134709 0.014625 0.0134955 0.0134927 0.013125 0.0130204 0.013435 0.014625 0.0131357 0.0134709 0.013125 0.0128069 0.013323 0.014625 0.0128069 0.013323 0.014625 0.0126265 0.0131631 0.014625 0.012552 0.0130681 0.014625 0.01244 0.0128546 0.014625 0.0124041 0.0127393 0.014625 0.0124041 0.0122607 0.014625 0.0124895 0.0120353 0.013125 0.0127119 0.0117515 0.013125 0.0131357 0.0115291 0.014625 0.0132545 0.0115073 0.013125 0.013375 0.0115 0.013125 0.0138397 0.0116145 0.014625 0.0137296 0.011565 0.013125 0.0142605 0.0120353 0.013125 0.0143459 0.0122607 0.013125 0.014198 0.0130681 0.013125 0.0141235 0.0131631 0.013125 0.0140381 0.0132485 0.013125 0.0139431 0.013323 0.014625 0.0139431 0.013323 0.013125 0.0138397 0.0133855 0.013125 0.0136143 0.0134709 0.013125 0.0134955 0.0134927 0.014625 0.013375 0.0135 0.014625 0.0132545 0.0134927 0.014625 0.0130204 0.013435 0.013125 0.0124895 0.0129647 0.014625 0.0124895 0.0129647 0.013125 0.0124041 0.0127393 0.013125 0.012375 0.0125 0.013125 0.0123823 0.0123795 0.013125 0.0124041 0.0122607 0.013125 0.0124895 0.0120353 0.013125 0.012552 0.0119319 0.014625 0.012552 0.0119319 0.013125 0.0128069 0.011677 0.014625 0.0128069 0.011677 0.013125 0.0129103 0.0116145 0.013125 0.0130204 0.011565 0.014625 0.0130204 0.011565 0.013125 0.0132545 0.0115073 0.014625 0.0139431 0.011677 0.014625 0.0140381 0.0117515 0.014625 0.0141235 0.0118369 0.013125 0.014198 0.0119319 0.014625 0.014198 0.0119319 0.013125 0.01431 0.0121454 0.014625 0.0143677 0.0123795 0.013125 0.014375 0.0125 0.013125 0.0143677 -0.0123795 0.014625 0.0143677 -0.0123795 0.013125 0.01431 -0.0121454 0.014625 0.01431 -0.0121454 0.014625 0.0141235 -0.0118369 0.014625 0.0139431 -0.011677 0.014625 0.0138397 -0.0116145 0.013125 0.0137296 -0.011565 0.014625 0.0137296 -0.011565 0.013125 0.0136143 -0.0115291 0.014625 0.0136143 -0.0115291 0.014625 0.0131357 -0.0115291 0.013125 0.0130204 -0.011565 0.014625 0.0129103 -0.0116145 0.014625 0.0127119 -0.0117515 0.014625 0.012552 -0.0119319 0.013125 0.0124895 -0.0120353 0.013125 0.0124041 -0.0122607 0.014625 0.01244 -0.0121454 0.013125 0.0123823 -0.0123795 0.014625 0.0124041 -0.0122607 0.013125 0.0124041 -0.0127393 0.014625 0.0123823 -0.0126205 0.014625 0.0124041 -0.0127393 0.013125 0.0124895 -0.0129647 0.014625 0.0124895 -0.0129647 0.013125 0.0126265 -0.0131631 0.014625 0.0128069 -0.013323 0.014625 0.0129103 -0.0133855 0.013125 0.0130204 -0.013435 0.014625 0.0130204 -0.013435 0.014625 0.0131357 -0.0134709 0.014625 0.0134955 -0.0134927 0.013125 0.0137296 -0.013435 0.014625 0.0141235 -0.0131631 0.014625 0.014198 -0.0130681 0.013125 0.0143459 -0.0127393 0.014625 0.01431 -0.0128546 0.013125 0.0143677 -0.0126205 0.014625 0.0143459 -0.0127393 0.013125 0.014198 -0.0119319 0.013125 0.0141235 -0.0118369 0.013125 0.0138397 -0.0116145 0.013125 0.0129103 -0.0116145 0.013125 0.0126265 -0.0118369 0.014625 0.0126265 -0.0118369 0.014625 0.0124895 -0.0120353 0.014625 0.0123823 -0.0123795 0.013125 0.012375 -0.0125 0.013125 0.012552 -0.0130681 0.014625 0.0126265 -0.0131631 0.013125 0.0128069 -0.013323 0.014625 0.0132545 -0.0134927 0.013125 0.013375 -0.0135 0.013125 0.0136143 -0.0134709 0.014625 0.0136143 -0.0134709 0.014625 0.0137296 -0.013435 0.014625 0.0139431 -0.013323 0.013125 0.0140381 -0.0132485 0.014625 0.0140381 -0.0132485 0.013125 0.0141235 -0.0131631 0.013125 0.01431 -0.0128546 0.013125 0.014375 -0.0125 0.014625 -0.0106323 -0.0123795 0.013125 -0.0106323 -0.0123795 0.013125 -0.0106541 -0.0122607 0.014625 -0.01069 -0.0121454 0.014625 -0.0107395 -0.0120353 0.013125 -0.010802 -0.0119319 0.014625 -0.010802 -0.0119319 0.014625 -0.0108765 -0.0118369 0.014625 -0.0111603 -0.0116145 0.013125 -0.011625 -0.0115 0.014625 -0.0120897 -0.0116145 0.013125 -0.0122881 -0.0117515 0.014625 -0.0121931 -0.011677 0.014625 -0.0125105 -0.0120353 0.013125 -0.0125959 -0.0122607 0.014625 -0.0125959 -0.0122607 0.013125 -0.0126177 -0.0126205 0.014625 -0.0126177 -0.0126205 0.014625 -0.012448 -0.0130681 0.014625 -0.0122881 -0.0132485 0.013125 -0.0120897 -0.0133855 0.014625 -0.0121931 -0.013323 0.014625 -0.0119796 -0.013435 0.013125 -0.011625 -0.0135 0.014625 -0.0113857 -0.0134709 0.013125 -0.0110569 -0.013323 0.013125 -0.0109619 -0.0132485 0.014625 -0.0109619 -0.0132485 0.014625 -0.0108765 -0.0131631 0.014625 -0.010802 -0.0130681 0.014625 -0.0106323 -0.0126205 0.014625 -0.010625 -0.0125 0.013125 -0.010625 -0.0125 0.013125 -0.0109619 -0.0117515 0.014625 -0.0109619 -0.0117515 0.013125 -0.0112704 -0.011565 0.013125 -0.0115045 -0.0115073 0.014625 -0.0117455 -0.0115073 0.013125 -0.0118643 -0.0115291 0.013125 -0.0119796 -0.011565 0.013125 -0.0121931 -0.011677 0.013125 -0.0123735 -0.0118369 0.013125 -0.01256 -0.0121454 0.014625 -0.01256 -0.0121454 0.014625 -0.0126177 -0.0123795 0.013125 -0.0125959 -0.0127393 0.014625 -0.0125959 -0.0127393 0.014625 -0.0125105 -0.0129647 0.013125 -0.012448 -0.0130681 0.013125 -0.0123735 -0.0131631 0.013125 -0.0121931 -0.013323 0.013125 -0.0119796 -0.013435 0.013125 -0.0118643 -0.0134709 0.014625 -0.011625 -0.0135 0.013125 -0.0115045 -0.0134927 0.013125 -0.0113857 -0.0134709 0.014625 -0.0112704 -0.013435 0.013125 -0.0111603 -0.0133855 0.014625 -0.0110569 -0.013323 0.014625 -0.0107395 -0.0129647 0.014625 -0.01069 -0.0128546 0.014625 -0.0107395 0.0120353 0.014625 -0.01069 0.0121454 0.014625 -0.0106541 0.0122607 0.014625 -0.0106323 0.0123795 0.014625 -0.010625 0.0125 0.014625 -0.0106541 0.0127393 0.014625 -0.0108765 0.0131631 0.014625 -0.0109619 0.0132485 0.014625 -0.003625 0.012 0.014625 -0.0110569 0.013323 0.014625 -0.0111603 0.0133855 0.014625 0.005375 0.012 0.014625 0.0137296 0.013435 0.014625 0.0138397 0.0133855 0.014625 0.0140381 0.0132485 0.014625 0.0142605 0.0129647 0.014625 0.0143459 0.0127393 0.014625 0.017875 0.017 0.014625 0.0143459 0.0122607 0.014625 0.01431 0.0121454 0.014625 0.0142605 0.0120353 0.014625 0.0142605 -0.0120353 0.014625 0.014198 -0.0119319 0.014625 0.0143459 -0.0122607 0.014625 0.014375 -0.0125 0.014625 0.0143677 -0.0126205 0.014625 0.0142605 -0.0129647 0.014625 0.0138397 -0.0133855 0.014625 0.013375 -0.0135 0.014625 0.0127119 -0.0132485 0.014625 0.012552 -0.0130681 0.014625 0.01244 -0.0128546 0.014625 0.012375 -0.0125 0.014625 0.005375 -0.009 0.014625 0.0128069 -0.011677 0.014625 0.0130204 -0.011565 0.014625 0.0132545 -0.0115073 0.014625 0.013375 -0.0115 0.014625 0.013375 0.0115 0.014625 0.0131357 0.0115291 0.014625 0.0129103 0.0116145 0.014625 0.0127119 0.0117515 0.014625 0.0126265 0.0118369 0.014625 0.01244 0.0121454 0.014625 0.0123823 0.0123795 0.014625 0.0123823 0.0126205 0.014625 0.012375 0.0125 0.014625 0.0127119 0.0132485 0.014625 0.0129103 0.0133855 0.014625 0.005375 0.008 0.014625 -0.0115045 -0.0115073 0.014625 -0.0113857 -0.0115291 0.014625 -0.0112704 -0.011565 0.014625 -0.0110569 -0.011677 0.014625 -0.0106541 -0.0122607 0.014625 -0.0106541 -0.0127393 0.014625 -0.0115045 -0.0134927 0.014625 -0.0117455 -0.0134927 0.014625 -0.0118643 -0.0134709 0.014625 -0.0120897 -0.0133855 0.014625 -0.0123735 -0.0131631 0.014625 -0.01256 -0.0128546 0.014625 -0.012625 -0.0125 0.014625 -0.01256 0.0121454 0.014625 -0.0125959 0.0122607 0.014625 -0.0126177 0.0126205 0.014625 -0.01256 0.0128546 0.014625 -0.0123735 0.0131631 0.014625 -0.016125 0.017 0.014625 -0.0119796 0.013435 0.014625 -0.0118643 0.0134709 0.014625 -0.0117455 0.0134927 0.014625 -0.0115045 0.0134927 0.014625 -0.0113857 0.0134709 0.014625 -0.0112704 0.013435 0.014625 0.017875 -0.017 0.014625 -0.016125 -0.017 0.014625 0.005375 -0.012 0.014625 -0.0111603 -0.0133855 0.014625 -0.0108765 0.0118369 0.014625 -0.0109619 0.0117515 0.014625 -0.0112704 0.011565 0.014625 -0.0113857 0.0115291 0.014625 -0.011625 -0.0115 0.014625 -0.0117455 0.0115073 0.014625 -0.0118643 -0.0115291 0.014625 -0.0119796 -0.011565 0.014625 -0.0119796 0.011565 0.014625 -0.0120897 0.0116145 0.014625 -0.0121931 0.011677 0.014625 -0.0118643 0.0115291 0.014625 -0.0122881 -0.0117515 0.014625 -0.0123735 -0.0118369 0.014625 -0.012448 -0.0119319 0.014625 0.0138397 0.0116145 0.014625 0.0136143 0.0115291 0.014625 0.0134955 -0.0115073 0.014625 0.0134955 0.0115073 0.014625 0.0140381 -0.0117515 0.0112706 0.000375 0.000742701 0.0112706 0.00204167 0.000742701 0.0111683 0.00204167 0.000720947 0.0109776 0.005375 0.000636037 0.0110699 0.00370833 0.00068516 0.0110699 0.00204167 0.00068516 0.0109776 0.00204167 0.000636036 0.0108176 0.005375 0.000501848 0.0108929 0.005375 0.000574534 0.0109776 0.00370833 0.000636036 0.0108929 0.00370833 0.000574534 0.0108176 0.00370833 0.000501848 0.0108176 0.00204167 0.000501848 0.0107532 0.00370833 0.000419395 0.0108929 0.000375 0.000574534 0.0107532 0.00204167 0.000419395 0.0107009 0.00370833 0.000328779 0.0107009 0.005375 0.000328779 0.0106364 0.005375 0.000130237 0.0107009 0.000375 0.000328778 0.0106617 0.00204167 0.000231763 0.0106617 0.00370833 0.000231763 0.0106364 0.00370833 0.000130237 0.0106255 0.005375 2.61753e-05 0.0106291 0.005375 -7.83956e-05 0.0106473 0.005375 -0.000181441 0.0106364 0.000375 0.000130236 0.0106291 0.00204167 -7.83961e-05 0.0106291 0.00370833 -7.83958e-05 0.0106291 0.000375 -7.83963e-05 0.0107255 0.005375 -0.000374999 0.0106796 0.00204167 -0.000280955 0.0106796 0.00370833 -0.000280954 0.0107255 0.00204167 -0.000375 0.010784 0.00370833 -0.000461746 0.010784 0.00204167 -0.000461746 0.0109342 0.00370833 -0.000606762 0.0109342 0.005375 -0.000606762 0.010854 0.00370833 -0.000539505 0.010784 0.000375 -0.000461746 0.0111185 0.00370833 -0.000704769 0.0110229 0.00370833 -0.00066221 0.0110229 0.000375 -0.00066221 0.0112191 0.00370833 -0.00073361 0.0112191 0.005375 -0.00073361 0.0113227 0.00370833 -0.000748173 0.0111185 0.000375 -0.00070477 0.0112191 0.00204167 -0.00073361 0.0113227 0.00204167 -0.000748173 0.0114273 0.005375 -0.000748173 0.0112191 0.000375 -0.000733611 0.0114273 0.00204167 -0.000748173 0.0114273 0.00370833 -0.000748173 0.0115309 0.00370833 -0.00073361 0.0115309 0.005375 -0.00073361 0.0113227 0.000375 -0.000748173 0.0116315 0.005375 -0.000704769 0.0116315 0.00370833 -0.000704769 0.0115309 0.000375 -0.000733611 0.0116315 0.00204167 -0.000704769 0.0117271 0.005375 -0.00066221 0.0117271 0.00370833 -0.00066221 0.0118158 0.005375 -0.000606762 0.0117271 0.00204167 -0.00066221 0.0118158 0.00370833 -0.000606762 0.0118158 0.00204167 -0.000606763 0.0118158 0.000375 -0.000606763 0.011896 0.00204167 -0.000539505 0.011966 0.005375 -0.000461746 0.011966 0.00370833 -0.000461746 0.0120704 0.00370833 -0.000280954 0.0120245 0.000375 -0.000375 0.0121027 0.00204167 -0.000181441 0.0121027 0.00370833 -0.000181441 0.0121209 0.00370833 -7.83958e-05 0.0121245 0.005375 2.61753e-05 0.0121209 0.00204167 -7.83961e-05 0.0121136 0.00370833 0.000130237 0.0121209 0.000375 -7.83963e-05 0.0121245 0.000375 2.61746e-05 0.0121245 0.00204167 2.61748e-05 0.0120883 0.005375 0.000231764 0.0120491 0.005375 0.000328779 0.0121136 0.00204167 0.000130236 0.0120883 0.00204167 0.000231763 0.0120883 0.00370833 0.000231763 0.0119968 0.00370833 0.000419395 0.0120491 0.00370833 0.000328779 0.0120883 0.000375 0.000231763 0.0119324 0.005375 0.000501848 0.0118571 0.005375 0.000574534 0.0119324 0.00370833 0.000501848 0.0120491 0.000375 0.000328778 0.0119324 0.00204167 0.000501848 0.0118571 0.00370833 0.000574534 0.0117724 0.005375 0.000636037 0.0119324 0.000375 0.000501848 0.0117724 0.00370833 0.000636036 0.0117724 0.00204167 0.000636036 0.0116801 0.00370833 0.00068516 0.0115817 0.005375 0.000720947 0.0117724 0.000375 0.000636036 0.0115817 0.00370833 0.000720947 0.0115817 0.00204167 0.000720947 0.0114794 0.005375 0.000742702 0.0114794 0.00370833 0.000742702 0.0112706 0.005375 0.000742702 0.011375 0.00370833 0.000750001 0.0114794 0.000375 0.000742701 0.0114794 0.00204167 0.000742701 0.0112706 0.00370833 0.000742702 0.011375 0.000375 0.00075 0.0111683 0.00370833 0.000720947 0.011375 0.00204167 0.00075 0.0108929 0.00204167 0.000574534 0.0107009 0.00204167 0.000328779 0.0106364 0.00204167 0.000130236 0.0106255 0.00370833 2.6175e-05 0.0106255 0.00204167 2.61748e-05 0.0106473 0.00370833 -0.000181441 0.0106473 0.00204167 -0.000181441 0.0107255 0.00370833 -0.000375 0.010854 0.00204167 -0.000539505 0.0109342 0.00204167 -0.000606763 0.0110229 0.00204167 -0.00066221 0.0111185 0.00204167 -0.000704769 0.0115309 0.00204167 -0.00073361 0.011896 0.00370833 -0.000539505 0.011966 0.00204167 -0.000461746 0.0120245 0.00370833 -0.000375 0.0120245 0.00204167 -0.000375 0.0120704 0.00204167 -0.000280955 0.0121245 0.00370833 2.6175e-05 0.0120491 0.00204167 0.000328779 0.0119968 0.00204167 0.000419395 0.0118571 0.00204167 0.000574534 0.0116801 0.00204167 0.00068516 0.0114273 0.000375 -0.000748173 0.0115817 0.000375 0.000720946 0.0116801 0.000375 0.000685159 0.011896 0.000375 -0.000539505 0.0121027 0.000375 -0.000181441 0.0121136 0.000375 0.000130236 0.0116315 0.000375 -0.00070477 0.0117271 0.000375 -0.00066221 0.0118571 0.000375 0.000574534 0.011966 0.000375 -0.000461746 0.0119968 0.000375 0.000419395 0.0120704 0.000375 -0.000280955 0.0109342 0.000375 -0.000606763 0.010854 0.000375 -0.000539505 0.0106796 0.000375 -0.000280955 0.0106473 0.000375 -0.000181441 0.0106255 0.000375 2.61746e-05 0.0111683 0.000375 0.000720946 0.0110699 0.000375 0.000685159 0.0109776 0.000375 0.000636036 0.0108176 0.000375 0.000501848 0.0107532 0.000375 0.000419395 0.0107255 0.000375 -0.000375 0.0106617 0.000375 0.000231763 -0.00924874 -0.012875 -0.00293444 -0.0100334 -0.012875 -0.00264884 -0.00989586 -0.013875 -0.00154431 -0.0100239 -0.013875 -0.00142935 -0.0108544 -0.012875 -0.00200739 -0.010709 -0.012875 -0.00215802 -0.0102148 -0.012875 -0.00254415 -0.0097584 -0.013875 -0.00164785 -0.0103884 -0.012875 -0.00242705 -0.0111121 -0.012875 -0.00167758 -0.0103425 -0.013875 -0.00102474 -0.0113214 -0.012875 -0.00131512 -0.0114066 -0.012875 -0.00112382 -0.0104927 -0.013875 -0.000715398 -0.0105876 -0.013875 -0.000384905 -0.0114782 -0.012875 -0.000927053 -0.0115359 -0.012875 -0.000725767 -0.0106134 -0.013875 -0.000214765 -0.0106245 -0.013875 -4.3034e-05 -0.0116086 -0.012875 0.000313584 -0.0115794 -0.012875 0.000520943 -0.0115359 -0.012875 0.000725764 -0.0105217 -0.013875 0.000634381 -0.0114066 -0.012875 0.00112382 -0.0102968 -0.013875 0.00109768 -0.010989 -0.012875 0.00184698 -0.0113214 -0.012875 0.00131511 -0.010385 -0.013875 0.0009499 -0.0112231 -0.012875 0.0015 -0.0101963 -0.013875 0.00123734 -0.0108544 -0.012875 0.00200739 -0.0100841 -0.013875 0.00136784 -0.00982825 -0.013875 0.00159756 -0.0102148 -0.012875 0.00254414 -0.00953681 -0.013875 0.00178005 -0.00924874 -0.012875 0.00293444 -0.00984521 -0.012875 0.00274063 -0.00945191 -0.012875 0.00288378 -0.00905204 -0.013875 0.00195388 -0.00904252 -0.012875 0.0029708 -0.00883427 -0.012875 0.00299269 -0.0088825 -0.013875 0.00198335 -0.00871104 -0.013875 0.00199815 -0.00853895 -0.013875 0.00199815 -0.00800126 -0.012875 0.00293444 -0.00841573 -0.012875 0.00299269 -0.00779809 -0.012875 0.00288378 -0.00771319 -0.013875 0.00178005 -0.00686164 -0.012875 0.00242705 -0.00678983 -0.013875 0.000795084 -0.00672828 -0.013875 0.000634381 -0.00567058 -0.012875 0.000520943 -0.00602692 -0.012875 0.0015 -0.00686498 -0.013875 0.0009499 -0.00668076 -0.013875 0.000468981 -0.00664765 -0.013875 0.000300109 -0.00564143 -0.012875 0.000313584 -0.00564143 -0.012875 -0.000313587 -0.00662917 -0.013875 0.000129015 -0.00562683 -0.012875 0.000104697 -0.00562683 -0.012875 -0.0001047 -0.00567058 -0.012875 -0.000520946 -0.00666239 -0.013875 -0.000384905 -0.00592862 -0.012875 -0.00131512 -0.00682574 -0.013875 -0.000873304 -0.00584345 -0.012875 -0.00112382 -0.00571411 -0.012875 -0.000725767 -0.00602692 -0.012875 -0.0015 -0.00690747 -0.013875 -0.00102474 -0.00654102 -0.012875 -0.00215802 -0.00710839 -0.013875 -0.0013038 -0.00639557 -0.012875 -0.00200739 -0.00722608 -0.013875 -0.00142935 -0.00735414 -0.013875 -0.00154431 -0.00703524 -0.012875 -0.00254415 -0.00686164 -0.012875 -0.00242705 -0.0074916 -0.013875 -0.00164785 -0.00740479 -0.012875 -0.00274064 -0.00779809 -0.012875 -0.00288379 -0.00811429 -0.013875 -0.0019337 -0.008625 -0.012875 -0.003 -0.00841573 -0.012875 -0.00299269 -0.008625 -0.017875 -0.002 -0.00879693 -0.017875 -0.0019926 -0.00896759 -0.017875 -0.00197044 -0.00896759 -0.015375 -0.00197044 -0.00913571 -0.015375 -0.0019337 -0.00913571 -0.013875 -0.0019337 -0.00896759 -0.013875 -0.00197044 -0.00879693 -0.015375 -0.0019926 -0.008625 -0.015375 -0.002 -0.00845307 -0.015375 -0.0019926 -0.00828241 -0.013875 -0.00197044 -0.00811429 -0.015375 -0.0019337 -0.00794995 -0.013875 -0.00188264 -0.00779061 -0.015375 -0.00181764 -0.00779061 -0.013875 -0.00181764 -0.00710839 -0.015375 -0.0013038 -0.00670274 -0.015375 -0.000552196 -0.00670274 -0.013875 -0.000552196 -0.00663656 -0.013875 -0.000214765 -0.00662546 -0.015375 -4.30342e-05 -0.00662546 -0.013875 -4.3034e-05 -0.00662917 -0.015375 0.000129015 -0.00695315 -0.015375 0.00109768 -0.0070537 -0.013875 0.00123734 -0.00716589 -0.013875 0.00136784 -0.00728887 -0.013875 0.00148821 -0.00742175 -0.013875 0.00159756 -0.00756354 -0.013875 0.00169508 -0.00803157 -0.015375 0.00190993 -0.00786958 -0.013875 0.00185185 -0.00819796 -0.015375 0.00195387 -0.00803157 -0.013875 0.00190993 -0.00819796 -0.013875 0.00195388 -0.0083675 -0.013875 0.00198335 -0.00905204 -0.015375 0.00195387 -0.00921843 -0.015375 0.00190993 -0.00938042 -0.015375 0.00185185 -0.00921843 -0.013875 0.00190993 -0.00968646 -0.013875 0.00169508 -0.00982825 -0.015375 0.00159756 -0.00996113 -0.015375 0.00148821 -0.00996113 -0.013875 0.00148821 -0.0102968 -0.015375 0.00109768 -0.0104602 -0.013875 0.000795084 -0.0105692 -0.013875 0.000468981 -0.0106208 -0.015375 0.000129015 -0.0106208 -0.013875 0.000129015 -0.0105876 -0.015375 -0.000384905 -0.0104243 -0.015375 -0.000873305 -0.0102481 -0.015375 -0.0011686 -0.0101416 -0.015375 -0.0013038 -0.00961255 -0.015375 -0.00173918 -0.00961255 -0.013875 -0.00173918 -0.00945939 -0.013875 -0.00181764 -0.00930004 -0.013875 -0.00188264 -0.00913571 -0.017875 -0.0019337 -0.00930004 -0.015375 -0.00188264 -0.00930004 -0.017875 -0.00188264 -0.00945939 -0.017875 -0.00181764 -0.00945939 -0.015375 -0.00181764 -0.00961255 -0.017875 -0.00173918 -0.0097584 -0.017875 -0.00164785 -0.0097584 -0.015375 -0.00164785 -0.00989586 -0.017875 -0.00154432 -0.00989586 -0.015375 -0.00154432 -0.0100239 -0.015375 -0.00142935 -0.0101416 -0.013875 -0.0013038 -0.0102481 -0.013875 -0.0011686 -0.0102481 -0.017875 -0.0011686 -0.0103425 -0.015375 -0.00102474 -0.0104243 -0.013875 -0.000873304 -0.0103425 -0.017875 -0.00102474 -0.0105473 -0.013875 -0.000552196 -0.0104927 -0.015375 -0.000715398 -0.0105473 -0.015375 -0.000552196 -0.0106134 -0.015375 -0.000214765 -0.0106134 -0.017875 -0.000214765 -0.0106245 -0.015375 -4.30342e-05 -0.0106245 -0.017875 -4.30345e-05 -0.0106208 -0.017875 0.000129015 -0.0106024 -0.013875 0.000300109 -0.0106024 -0.015375 0.000300109 -0.0105692 -0.015375 0.000468981 -0.0105217 -0.017875 0.00063438 -0.0105217 -0.015375 0.000634381 -0.0104602 -0.015375 0.000795083 -0.010385 -0.015375 0.0009499 -0.010385 -0.017875 0.0009499 -0.0101963 -0.015375 0.00123734 -0.0100841 -0.015375 0.00136784 -0.00982825 -0.017875 0.00159756 -0.00968646 -0.015375 0.00169508 -0.00968646 -0.017875 0.00169508 -0.00953681 -0.015375 0.00178005 -0.00938042 -0.013875 0.00185185 -0.0088825 -0.015375 0.00198335 -0.00871104 -0.017875 0.00199815 -0.00853895 -0.015375 0.00199815 -0.00871104 -0.015375 0.00199815 -0.0083675 -0.015375 0.00198335 -0.00819796 -0.017875 0.00195387 -0.00786958 -0.017875 0.00185185 -0.00786958 -0.015375 0.00185185 -0.00771319 -0.015375 0.00178005 -0.00771319 -0.017875 0.00178005 -0.00756354 -0.015375 0.00169508 -0.00742175 -0.015375 0.00159756 -0.00728887 -0.017875 0.00148821 -0.00728887 -0.015375 0.00148821 -0.0070537 -0.015375 0.00123734 -0.00716589 -0.015375 0.00136784 -0.00695315 -0.013875 0.00109768 -0.0070537 -0.017875 0.00123734 -0.00695315 -0.017875 0.00109768 -0.00686498 -0.015375 0.0009499 -0.00686498 -0.017875 0.0009499 -0.00678983 -0.017875 0.000795083 -0.00678983 -0.015375 0.000795083 -0.00672828 -0.015375 0.000634381 -0.00668076 -0.017875 0.00046898 -0.00668076 -0.015375 0.000468981 -0.00664765 -0.015375 0.000300109 -0.00664765 -0.017875 0.000300108 -0.00662917 -0.017875 0.000129015 -0.00662546 -0.017875 -4.30345e-05 -0.00663656 -0.015375 -0.000214765 -0.00666239 -0.015375 -0.000384905 -0.00663656 -0.017875 -0.000214765 -0.00670274 -0.017875 -0.000552196 -0.00675733 -0.013875 -0.000715398 -0.00682574 -0.015375 -0.000873305 -0.00675733 -0.015375 -0.000715398 -0.00690747 -0.017875 -0.00102474 -0.00690747 -0.015375 -0.00102474 -0.00700192 -0.015375 -0.0011686 -0.00700192 -0.013875 -0.0011686 -0.00700192 -0.017875 -0.0011686 -0.00722608 -0.015375 -0.00142935 -0.00735414 -0.017875 -0.00154432 -0.00735414 -0.015375 -0.00154432 -0.00763745 -0.013875 -0.00173918 -0.0074916 -0.015375 -0.00164785 -0.0074916 -0.017875 -0.00164785 -0.00763745 -0.015375 -0.00173918 -0.00794995 -0.015375 -0.00188264 -0.00794995 -0.017875 -0.00188264 -0.00811429 -0.017875 -0.0019337 -0.00828241 -0.015375 -0.00197044 -0.00845307 -0.013875 -0.0019926 -0.00845307 -0.017875 -0.0019926 -0.008625 -0.013875 -0.002 -0.00879693 -0.013875 -0.0019926 -0.00853895 -0.017875 0.00199815 -0.00828241 -0.017875 -0.00197044 -0.0083675 -0.017875 0.00198335 -0.00779061 -0.017875 -0.00181764 -0.00763745 -0.017875 -0.00173918 -0.00716589 -0.017875 0.00136784 -0.00710839 -0.017875 -0.0013038 -0.00682574 -0.017875 -0.000873305 -0.00666239 -0.017875 -0.000384905 -0.00803157 -0.017875 0.00190993 -0.00756354 -0.017875 0.00169508 -0.00742175 -0.017875 0.00159756 -0.00722608 -0.017875 -0.00142935 -0.00675733 -0.017875 -0.000715399 -0.00672828 -0.017875 0.00063438 -0.00905204 -0.017875 0.00195387 -0.00921843 -0.017875 0.00190993 -0.00938042 -0.017875 0.00185185 -0.00953681 -0.017875 0.00178005 -0.00996113 -0.017875 0.00148821 -0.0100239 -0.017875 -0.00142935 -0.0100841 -0.017875 0.00136784 -0.0102968 -0.017875 0.00109768 -0.0104602 -0.017875 0.000795083 -0.0105876 -0.017875 -0.000384905 -0.0105692 -0.017875 0.00046898 -0.0088825 -0.017875 0.00198335 -0.0101416 -0.017875 -0.0013038 -0.0101963 -0.017875 0.00123734 -0.0104243 -0.017875 -0.000873305 -0.0104927 -0.017875 -0.000715399 -0.0105473 -0.017875 -0.000552196 -0.0106024 -0.017875 0.000300108 0.008625 -0.011375 -0.006125 0.008625 -0.011375 0.006125 0.008625 0.005375 -0.006125 0.0116801 0.005375 0.00068516 0.011375 0.005375 0.000750001 0.011125 0.005375 0.006125 0.0111683 0.005375 0.000720947 0.0110699 0.005375 0.00068516 0.0107532 0.005375 0.000419395 0.0106617 0.005375 0.000231764 0.0106796 0.005375 -0.000280954 0.010784 0.005375 -0.000461746 0.010854 0.005375 -0.000539504 0.0110229 0.005375 -0.00066221 0.0111185 0.005375 -0.000704769 0.0113227 0.005375 -0.000748173 0.011896 0.005375 -0.000539504 0.011125 0.005375 -0.006125 0.0120245 0.005375 -0.000374999 0.0120704 0.005375 -0.000280954 0.0121027 0.005375 -0.000181441 0.0121209 0.005375 -7.83956e-05 0.0121136 0.005375 0.000130237 0.013125 0.005375 0.008125 0.0119968 0.005375 0.000419395 0.008625 0.005375 0.006125 + + + + + + + + + + 0.960928 -0.000738615 0.276797 0.946327 0.000741691 0.32321 0.942732 -0.000726469 0.33355 0.925305 0.000729491 0.379223 0.921188 -0.000714553 0.389116 0.900968 0.00071707 0.433886 0.873408 0.000705253 0.486988 0.84272 0.000692385 0.538352 0.809019 0.000679942 0.587783 0.772415 0.000666477 0.635118 0.733052 0.000653816 0.680173 0.691063 0.000640407 0.722794 0.646601 0.000627152 0.762828 0.59982 0.000613554 0.800135 0.550898 0.000600161 0.834572 0.5 0.000586023 0.866025 0.447313 0.000572032 0.894377 0.393024 0.000558078 0.919528 0.33733 0.000543791 0.941386 0.280428 0.000529378 0.959875 0.22252 0.000514854 0.974928 0.163818 0.000500352 0.98649 0.104529 0.000485523 0.994522 0.0448638 0.000470284 0.998993 -0.0149602 0.000455364 0.999888 -0.0747299 0.00044028 0.997204 -0.134231 0.000424774 0.99095 -0.193257 0.000408806 0.981148 -0.251587 0.000393288 0.967835 -0.309014 0.000377491 0.951057 -0.365342 0.000361096 0.930873 -0.420357 0.000345125 0.907359 -0.47387 0.000328745 0.880595 -0.525684 0.00031243 0.85068 -0.575614 0.000295883 0.817721 -0.623492 0.000278284 0.78183 -0.669131 0.000262246 0.743145 -0.712378 0.000245456 0.701796 -0.753068 0.000227674 0.657943 -0.791074 0.000210091 0.611721 -0.826236 0.000193291 0.563325 -0.858453 0.000174279 0.512893 -0.887582 0.000158057 0.460651 -0.913546 0.000138924 0.406736 -0.936236 0.000120672 0.351372 -0.955573 0.000103366 0.294756 -0.971491 8.48324e-05 0.237077 -0.983929 6.66017e-05 0.178561 -0.992847 4.70276e-05 0.119395 -0.99821 2.76809e-05 0.0597995 -1 1.0555e-05 0 -0.998224 -8.52809e-06 -0.0595785 -1 1.0555e-05 0 0.968313 0.000275585 0.249739 0.969772 7.93292e-05 0.244014 0.971474 -3.27843e-05 0.237147 0.973633 0.000111871 0.22812 0.976213 0.00081914 0.216812 0.978146 0.00194309 0.207909 0.978392 0.00178045 0.206749 0.980285 0.000802203 0.197585 0.983192 -0.000204153 0.182575 0.98673 0.000386844 0.16237 0.98989 0.00101875 0.141836 0.992576 -0.000230358 0.121629 0.995568 0.000839369 0.0940402 0.995972 0.00186587 0.0896406 0.998339 -0.000872686 0.0576019 0.999774 0.000322872 0.021256 0.999885 -0.000273178 -0.0151724 0.99955 0.00237188 -0.0299147 0.99867 -0.000704605 -0.0515494 0.996107 0.00124429 -0.0881404 0.993224 -0.000236695 -0.116219 0.990572 0.000506318 -0.136995 0.988828 0.00223049 -0.149042 0.987362 0.00072762 -0.15848 0.983792 -0.000263584 -0.179315 0.980805 0.000582869 -0.194988 0.978819 0.0015317 -0.204724 0.978146 0.00195856 -0.207909 0.977105 0.00131043 -0.212756 0.975623 0.000663642 -0.219453 0.973726 0.000124176 -0.227724 0.971517 -3.4032e-05 -0.23697 0.969795 7.62062e-05 -0.243923 0.968705 0.000222857 -0.248214 0.965569 0.000676847 -0.260145 0.962445 0 -0.271476 0.960928 -0.000738529 -0.276797 0.946327 0.000741791 -0.32321 0.942732 -0.000726365 -0.33355 0.925305 0.000729609 -0.379223 0.921188 -0.000714435 -0.389116 0.896373 -0.000701333 -0.443301 0.868371 -0.00068888 -0.495915 0.837287 -0.000676035 -0.546763 0.803228 -0.000664035 -0.595672 0.766316 -0.000650379 -0.642463 0.726683 -0.000637549 -0.686973 0.684469 -0.000624307 -0.729042 0.639823 -0.000610951 -0.768522 0.592905 -0.000597061 -0.805272 0.543881 -0.000583194 -0.839162 0.492923 -0.000569598 -0.870073 0.440217 -0.000555608 -0.897891 0.385943 -0.000541512 -0.922523 0.330307 -0.000527194 -0.943874 0.273489 -0.000512691 -0.961875 0.215704 -0.000498457 -0.976459 0.157151 -0.000483135 -0.987574 0.0980393 -0.000468452 -0.995182 0.038582 -0.000453503 -0.999255 -0.0210136 -0.000438386 -0.999779 -0.0805355 -0.000422764 -0.996752 -0.139769 -0.000407418 -0.990184 -0.198511 -0.000391778 -0.980099 -0.256542 -0.000375874 -0.966533 -0.313666 -0.000359971 -0.949533 -0.369673 -0.00034384 -0.929162 -0.424367 -0.000327819 -0.90549 -0.477559 -0.000311073 -0.8786 -0.529047 -0.000294464 -0.848593 -0.57866 -0.00027796 -0.815569 -0.626215 -0.000260686 -0.77965 -0.671552 -0.000243976 -0.740958 -0.714501 -0.00022704 -0.699634 -0.754904 -0.00020991 -0.655836 -0.792636 -0.000191477 -0.609695 -0.82755 -0.000175443 -0.561392 -0.859521 -0.000156038 -0.511101 -0.888442 -0.00013941 -0.458989 -0.914208 -0.00012151 -0.405245 -0.936724 -0.00010241 -0.350068 -0.955916 -8.42065e-05 -0.293642 -0.971711 -6.53571e-05 -0.236174 -0.984054 -4.76035e-05 -0.177868 -0.992903 -2.93252e-05 -0.11893 -0.99821 2.76971e-05 -0.0597995 0.988828 0.00220278 0.149042 0.99955 0.00215728 0.0299148 0.995973 0.00161018 -0.0896406 0.963962 0.000904423 -0.266037 0.900968 0.000717206 -0.433886 0.873408 0.000705431 -0.486989 0.84272 0.000692706 -0.538351 0.809019 0.000680003 -0.587782 0.772414 0.000666524 -0.635119 0.733052 0.000653994 -0.680172 0.691063 0.000640519 -0.722794 0.6466 0.000627186 -0.762829 0.599821 0.000613642 -0.800134 0.550897 0.000600327 -0.834573 0.500001 0.000586657 -0.866024 0.447314 0.00057243 -0.894377 0.393023 0.000558401 -0.919529 0.337332 0.0005439 -0.941386 0.280427 0.000529712 -0.959875 0.222524 0.000515084 -0.974927 0.163815 0.000500116 -0.986491 0.104529 0.000485832 -0.994522 0.0448657 0.000470561 -0.998993 -0.0149583 0.000455332 -0.999888 -0.0747318 0.000440001 -0.997204 -0.134231 0.000424774 -0.99095 -0.193257 0.00040911 -0.981148 -0.251587 0.000393288 -0.967835 -0.309018 0.00037754 -0.951056 -0.365341 0.000361373 -0.930874 -0.420355 0.000345234 -0.90736 -0.473873 0.000328784 -0.880593 -0.525683 0.000312544 -0.850681 -0.575615 0.000296026 -0.81772 -0.62349 0.00027899 -0.781831 -0.66913 0.000262586 -0.743145 -0.712378 0.000245244 -0.701795 -0.753068 0.000227669 -0.657943 -0.791075 0.000210294 -0.61172 -0.826235 0.000193633 -0.563326 -0.858453 0.000174438 -0.512893 -0.887582 0.000158271 -0.460651 -0.913546 0.00013905 -0.406736 -0.936236 0.000120781 -0.351372 -0.955573 0.000103458 -0.294756 -0.971491 8.4906e-05 -0.237077 -0.983929 6.66572e-05 -0.178561 -0.992847 4.70647e-05 -0.119395 -0.998224 -8.54312e-06 0.0595785 -0.992903 -2.93576e-05 0.11893 -0.984054 -4.76587e-05 0.177868 -0.971711 -6.54304e-05 0.236174 -0.955916 -8.42976e-05 0.293642 -0.936724 -0.000102519 0.350068 -0.914208 -0.000121636 0.405245 -0.888442 -0.000139555 0.458989 -0.859521 -0.000156273 0.511101 -0.82755 -0.000175617 0.561392 -0.792636 -0.000191859 0.609696 -0.754905 -0.0002101 0.655834 -0.714502 -0.000227034 0.699634 -0.67155 -0.00024377 0.74096 -0.626215 -0.000261049 0.77965 -0.578662 -0.000278697 0.815568 -0.529048 -0.000294578 0.848592 -0.477557 -0.000311237 0.878601 -0.424369 -0.000327799 0.90549 -0.369673 -0.000343985 0.929162 -0.313665 -0.000360289 0.949534 -0.256542 -0.000375873 0.966533 -0.198509 -0.000391805 0.980099 -0.139771 -0.000407697 0.990184 -0.0805355 -0.000422764 0.996752 -0.0210136 -0.000438075 0.999779 0.038582 -0.000453503 0.999255 0.0980393 -0.000468761 0.995182 0.157151 -0.000483441 0.987574 0.215704 -0.000498154 0.976459 0.27349 -0.000513025 0.961875 0.330304 -0.000527432 0.943874 0.385945 -0.000541709 0.922522 0.440217 -0.000555887 0.897891 0.492923 -0.000570003 0.870073 0.543879 -0.00058381 0.839163 0.592904 -0.00059715 0.805273 0.639823 -0.000611087 0.768522 0.68447 -0.000624325 0.729041 0.726682 -0.000637638 0.686973 0.766317 -0.000650594 0.642463 0.803227 -0.000664021 0.595672 0.837288 -0.000676147 0.546762 0.868371 -0.000689187 0.495915 0.896373 -0.000701471 0.443301 0.963962 0.000904324 0.266037 0.962445 0 0.271476 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -2.76257e-06 1 2.58896e-06 1.20143e-07 1 6.54509e-07 8.74229e-08 1 6.68693e-07 5.31514e-08 1 6.76037e-07 2.61081e-08 1 6.76189e-07 -1.42281e-08 1 6.76939e-07 -5.45758e-08 1 6.7546e-07 -8.63338e-08 1 6.72403e-07 -5.15103e-08 1 6.7507e-07 0 1 0 0 1 0 0 1 0 0 1 0 1.82473e-08 1 6.77646e-07 -1.66768e-08 1 6.77322e-07 -9.45776e-08 1 6.70019e-07 -1.20896e-07 1 6.67276e-07 -1.34284e-07 1 6.62998e-07 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 3.11861e-06 1 1.09261e-06 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 1.34285e-07 1 6.62998e-07 9.45763e-08 1 6.70019e-07 5.15103e-08 1 6.7507e-07 8.63317e-08 1 6.72403e-07 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -3.11861e-06 1 1.09261e-06 1.20897e-07 1 6.67276e-07 5.45759e-08 1 6.75461e-07 1.66779e-08 1 6.77323e-07 1.42281e-08 1 6.76939e-07 -2.61081e-08 1 6.76189e-07 -6.6321e-08 1 6.73215e-07 -1.06302e-07 1 6.68025e-07 -1.45937e-07 1 6.60635e-07 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -1.82473e-08 1 6.77646e-07 -5.31514e-08 1 6.76037e-07 -8.79217e-08 1 6.72499e-07 -1.22442e-07 1 6.67044e-07 3.15092e-06 1 9.53071e-07 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0.0636169 0.99797 0.00289862 0.0627945 0.998023 0.0025316 0.0638482 0.99793 0.00775501 0.0617973 0.998053 0.00849377 0.0634687 0.9979 0.0129591 0.206596 0.977516 0.0421909 0.365563 0.927787 0.0746688 0.513997 0.851341 0.105008 0.647589 0.750412 0.132324 0.762524 0.62791 0.155839 0.855492 0.487396 0.174871 0.923848 0.332912 0.188879 0.965657 0.168859 0.197465 0.978826 0.0402581 0.200695 0.978842 0.0424842 0.200161 0.991757 0.0423267 0.120938 0.991756 0.0424441 0.120911 0.978395 0.16891 0.119221 0.936031 0.33294 0.114007 0.866773 0.487411 0.105526 0.77257 0.627931 0.0940132 0.656119 0.750426 0.0798048 0.520765 0.851349 0.063312 0.370378 0.92779 0.0450093 0.209313 0.977518 0.0254233 0.209334 0.977513 0.0254388 0.206585 0.977519 0.0421805 0.0604257 0.998075 0.0139986 0.0624806 0.997882 0.0180977 0.197156 0.977516 0.0747724 0.337092 0.927785 0.159951 0.454329 0.85134 0.262308 0.543966 0.750413 0.375475 0.602864 0.627912 0.492221 0.629748 0.487379 0.604879 0.625296 0.33291 0.705815 0.592115 0.168863 0.787962 0.597259 0.0341044 0.801323 0.59999 0.0500126 0.798442 0.66004 0.0347564 0.750426 0.662314 0.0493604 0.747598 0.710846 0.16886 0.682777 0.730418 0.332914 0.596371 0.718619 0.487387 0.496025 0.674019 0.627907 0.389142 0.597158 0.750408 0.283356 0.490519 0.851342 0.18603 0.358387 0.927784 0.103806 0.202536 0.977516 0.058664 0.202534 0.977517 0.0586645 0.0587523 0.998084 0.019386 0.0608844 0.997878 0.0230904 0.190502 0.977516 0.0903946 0.323125 0.927786 0.186558 0.431749 0.85134 0.298015 0.511992 0.750411 0.418028 0.5613 0.627912 0.539137 0.579032 0.487384 0.653589 0.566477 0.332904 0.753843 0.526792 0.168861 0.833052 0.530565 0.0334509 0.846984 0.53378 0.0506755 0.844104 0.0567751 0.998082 0.0246607 0.0587081 0.997886 0.0278574 0.182611 0.977516 0.10543 0.307068 0.927785 0.211954 0.406369 0.85134 0.331789 0.476695 0.750411 0.457871 0.516097 0.627911 0.582556 0.524558 0.487391 0.698061 0.503978 0.332908 0.796981 0.458049 0.168868 0.87274 0.460397 0.0328049 0.887107 0.46411 0.0513224 0.884289 0.0544915 0.998069 0.0298282 0.0559869 0.997908 0.0323242 0.173534 0.977516 0.119782 0.289018 0.927785 0.235975 0.378353 0.85134 0.363413 0.438306 0.750411 0.494744 0.467552 0.627907 0.622196 0.466688 0.487385 0.738009 0.438216 0.332904 0.83495 0.386338 0.168866 0.906768 0.387217 0.0321664 0.921427 0.391437 0.051965 0.918736 0.0518983 0.998043 0.034891 0.0527718 0.997942 0.0364257 0.163333 0.977516 0.133357 0.269092 0.927785 0.258466 0.347884 0.85134 0.392679 0.397073 0.750411 0.52841 0.415966 0.627911 0.657799 0.405791 0.487387 0.773167 0.369611 0.332902 0.867505 0.31212 0.168864 0.934915 0.311499 0.0315318 0.949723 0.31623 0.0526028 0.947223 0.0489892 0.998004 0.0398558 0.0491196 0.997987 0.0401051 0.152072 0.977516 0.146067 0.24742 0.927785 0.279281 0.315157 0.851341 0.419398 0.353267 0.75041 0.558648 0.361688 0.62791 0.689138 0.342259 0.487388 0.803313 0.298606 0.332907 0.894431 0.235879 0.168862 0.956999 0.233742 0.0309026 0.971807 0.238977 0.0532323 0.969565 0.0460889 0.997956 0.0442689 0.139826 0.977516 0.15783 0.224147 0.927784 0.298286 0.280388 0.851342 0.443396 0.307171 0.750409 0.585263 0.305061 0.62791 0.716008 0.276511 0.487387 0.828249 0.225663 0.332907 0.915559 0.158107 0.168866 0.972875 0.154453 0.0302851 0.987536 0.160178 0.0538545 0.985618 0.0403662 0.998061 0.0473718 0.0389836 0.997892 0.0518779 0.0357268 0.998079 0.0506132 0.0347906 0.997879 0.0550167 0.0979905 0.977517 0.186706 0.146249 0.927785 0.343259 0.166128 0.851341 0.497614 0.15818 0.750412 0.641764 0.124846 0.62791 0.768207 0.0702628 0.487386 0.870355 0 0.332904 0.942961 -0.0793125 0.168869 0.982442 -0.0873914 0.0284549 0.995768 -0.0803432 0.0556876 0.99521 -0.00664061 0.0290518 0.999556 0 0.0550789 0.998482 0.0793098 0.168869 0.982443 0.151261 0.332908 0.930748 0.208966 0.487388 0.847813 0.24646 0.627909 0.738232 0.259079 0.75041 0.608081 0.2438 0.85134 0.464522 0.199419 0.927784 0.315355 0.126671 0.977517 0.168569 0.112697 0.977517 0.178215 0.112697 0.977517 0.178216 0.0309289 0.998085 0.0535705 0.0302507 0.997879 0.0576377 0.0826497 0.977516 0.193987 0.118155 0.927785 0.353914 0.125549 0.85134 0.50937 0.106027 0.75041 0.652413 0.0626267 0.62791 0.775762 0 0.487385 0.873187 -0.0758756 0.332904 0.939903 -0.158107 0.168863 0.972876 -0.167572 0.0278551 0.985466 -0.160156 0.0562909 0.985485 0.0259689 0.998079 0.0562468 0.0254357 0.997892 0.0597001 0.0667728 0.977516 0.200008 0.0892925 0.927785 0.362273 0.0841537 0.85134 0.51782 0.0531869 0.75041 0.658829 0 0.627911 0.778286 -0.0702628 0.487385 0.870356 -0.151262 0.332908 0.930748 -0.235879 0.168867 0.956998 -0.246661 0.0272625 0.968718 -0.238929 0.0568812 0.96937 0.0208424 0.998061 0.0586445 0.0204234 0.997918 0.0611752 0.0504618 0.977516 0.204732 0.0598515 0.927785 0.368283 0.0422141 0.851341 0.522912 0 0.750411 0.660972 -0.0626267 0.62791 0.775762 -0.140069 0.487386 0.861879 -0.225664 0.332905 0.91556 -0.31212 0.168862 0.934915 -0.324136 0.0266811 0.945634 -0.316145 0.0574661 0.946969 0.0155423 0.998031 0.0607634 0.0152936 0.997956 0.062048 0.0338247 0.977516 0.20813 0.030024 0.927785 0.371906 0 0.85134 0.524614 -0.0531868 0.750411 0.658828 -0.124846 0.627911 0.768207 -0.208967 0.487387 0.847813 -0.298606 0.332907 0.894431 -0.386338 0.168864 0.906768 -0.399497 0.0260973 0.916363 -0.391306 0.0580518 0.918428 0.0101719 0.997987 0.0625908 0.0169672 0.977516 0.210175 0 0.927785 0.373116 -0.0422142 0.85134 0.522913 -0.106027 0.75041 0.652413 -0.186255 0.62791 0.755671 -0.27651 0.487386 0.828249 -0.36961 0.332906 0.867503 -0.458048 0.168867 0.87274 -0.472245 0.0255191 0.881098 -0.463923 0.0586193 0.883934 0.00426762 0.998043 0.0623911 0 0.997908 0.0646488 -0.00141413 0.998069 0.0621052 -0.00522882 0.997886 0.0647712 -0.0338245 0.977516 0.208128 -0.0892923 0.927785 0.362273 -0.166128 0.85134 0.497616 -0.259078 0.75041 0.608081 -0.361688 0.62791 0.689138 -0.466688 0.487387 0.738008 -0.566477 0.332909 0.753841 -0.6536 0.168865 0.737761 -0.670169 0.0238478 0.741825 -0.661916 0.060313 0.747148 -0.608025 0.0244025 0.793543 -0.599669 0.0597568 0.798014 -0.526789 0.16886 0.833054 -0.438217 0.332904 0.83495 -0.342259 0.487388 0.803313 -0.246459 0.627911 0.738232 -0.158181 0.750412 0.641763 -0.0841535 0.85134 0.51782 -0.0300235 0.927785 0.371907 0 0.977517 0.210859 -0.0169671 0.977517 0.210175 -0.0169669 0.977517 0.210175 -0.00703047 0.998082 0.0614994 -0.0104453 0.997878 0.0642732 -0.050462 0.977516 0.204733 -0.118154 0.927785 0.353913 -0.205631 0.851341 0.482633 -0.307169 0.750412 0.58526 -0.415966 0.62791 0.6578 -0.52456 0.487385 0.698063 -0.625297 0.332904 0.705818 -0.710845 0.168861 0.682777 -0.727932 0.0232906 0.685254 -0.719865 0.0608621 0.691441 -0.0125876 0.998084 0.0605741 -0.0155673 0.997882 0.0631586 -0.0667726 0.977516 0.200008 -0.146248 0.927785 0.343259 -0.243799 0.85134 0.464523 -0.353268 0.750408 0.558649 -0.46755 0.627911 0.622194 -0.57903 0.487389 0.653588 -0.680065 0.33291 0.65321 -0.763483 0.168864 0.623361 -0.780934 0.0227505 0.624199 -0.773145 0.0613966 0.63125 -0.0180895 0.998075 0.0593297 -0.0205145 0.997899 0.061448 -0.0826496 0.977516 0.193986 -0.173396 0.927785 0.330378 -0.280387 0.851342 0.443395 -0.397074 0.75041 0.52841 -0.5161 0.627907 0.582558 -0.629746 0.487384 0.604877 -0.730419 0.332912 0.59637 -0.811166 0.168857 0.559908 -0.828826 0.0222239 0.559065 -0.821404 0.061934 0.566975 -0.0235423 0.998053 0.0577634 -0.0252105 0.997929 0.0591717 -0.097991 0.977516 0.186707 -0.199419 0.927784 0.315355 -0.315157 0.851342 0.419397 -0.438306 0.750412 0.494743 -0.5613 0.627912 0.539137 -0.676376 0.487379 0.552247 -0.776042 0.332908 0.535659 -0.853589 0.168867 0.492819 -0.871305 0.021693 0.490263 -0.864334 0.0624801 0.499022 -0.0289495 0.998018 0.05587 -0.0295883 0.997971 0.0563757 -0.112697 0.977517 0.178216 -0.224147 0.927784 0.298287 -0.347883 0.851341 0.392679 -0.476695 0.75041 0.457873 -0.602864 0.627912 0.492222 -0.71862 0.487386 0.496025 -0.816628 0.332905 0.471481 -0.851249 0.16883 0.496862 -0.904312 0.0670243 0.421577 -0.0340287 0.997971 0.053812 -0.126672 0.977517 0.168569 -0.247422 0.927784 0.279283 -0.378354 0.85134 0.363413 -0.511992 0.750412 0.418027 -0.640516 0.627913 0.442114 -0.756202 0.487387 0.436593 -0.831528 0.333139 0.444499 -0.877514 0.204898 0.433574 -0.0382533 0.998053 0.0492693 -0.0429579 0.9979 0.0484895 -0.0423361 0.998075 0.0453311 -0.0469139 0.997882 0.0450612 -0.163333 0.977516 0.133357 -0.307068 0.927785 0.211954 -0.454328 0.85134 0.262309 -0.597157 0.750408 0.283357 -0.661083 0.665995 0.345572 -0.680488 0.657779 0.322898 -0.696398 0.628135 0.347098 -0.744325 0.566193 0.354128 -0.775413 0.487681 0.401125 -0.756202 0.487386 0.436593 -0.674018 0.627906 0.389145 -0.640518 0.627906 0.442121 -0.543966 0.750413 0.375475 -0.43175 0.851339 0.298016 -0.431749 0.85134 0.298015 -0.0461654 0.998084 0.0411881 -0.0504391 0.997878 0.0411824 -0.173535 0.977516 0.119782 -0.323127 0.927785 0.186559 -0.473962 0.851341 0.224898 -0.605775 0.750667 0.263698 -0.683824 0.673514 0.28065 -0.0497443 0.998082 0.0368383 -0.0534794 0.997886 0.0369139 -0.18261 0.977516 0.105429 -0.33709 0.927786 0.15995 -0.487674 0.851452 0.19288 -0.532794 0.822254 0.200071 -0.60674 0.75341 0.253456 -0.0530779 0.998069 0.0322774 -0.0559872 0.997908 0.0323245 -0.190502 0.977516 0.0903952 -0.34887 0.927784 0.132309 -0.473379 0.867676 0.151824 -0.470875 0.863936 0.17858 -0.0561658 0.998043 0.0274993 -0.0579306 0.997942 0.0274885 -0.197158 0.977516 0.0747723 -0.360441 0.927671 0.0975067 -0.34887 0.927784 0.13231 -0.0590099 0.998004 0.0224978 -0.0592913 0.997987 0.0224861 -0.202534 0.977517 0.0586638 -0.34447 0.935689 0.0763323 -0.341048 0.934841 0.0987843 -0.371749 0.920034 0.123855 -0.0613819 0.997956 0.0177796 -0.2066 0.977516 0.0421791 -0.312439 0.949254 0.036032 -0.299172 0.952242 0.0610786 -0.0612084 0.998061 0.0112724 -0.0644195 0.997892 0.00782197 -0.0616953 0.998079 0.00563331 -0.0650404 0.997879 0.00262091 -0.190974 0.981546 -0.00978738 -0.18757 0.982222 0.00755844 -0.1993 0.979843 0.0136958 -0.196923 0.980127 0.0239109 -0.208258 0.97755 0.0320045 -0.206597 0.977517 0.0421772 -0.0631903 0.997918 0.0129004 -0.0603932 0.998031 0.0169215 -0.0618574 0.998085 0 -0.0650404 0.997879 -0.00262091 -0.197458 0.979837 -0.030492 -0.325282 0.94303 -0.0699026 -0.283035 0.95855 -0.0327688 -0.0616953 0.998079 -0.00563331 -0.0644195 0.997892 -0.00782197 -0.206597 0.977517 -0.0421786 -0.341745 0.933568 -0.10799 -0.463494 0.874721 -0.141552 -0.374147 0.920651 -0.111429 -0.0612084 0.998061 -0.0112724 -0.0631903 0.997918 -0.0129004 -0.202536 0.977516 -0.0586644 -0.34887 0.927784 -0.132309 -0.473959 0.851342 -0.224897 -0.572422 0.750408 -0.330486 -0.640521 0.627906 -0.442117 -0.676373 0.487386 -0.552245 -0.680064 0.332912 -0.653209 -0.6536 0.168863 -0.737762 -0.608025 0.0244025 -0.793543 -0.661916 0.060313 -0.747148 -0.670169 0.0238478 -0.741825 -0.719865 0.0608621 -0.691441 -0.76348 0.168866 -0.623363 -0.776042 0.332908 -0.53566 -0.756201 0.487387 -0.436593 -0.696563 0.628131 -0.346772 -0.674819 0.662632 -0.324866 -0.677078 0.683859 -0.27185 -0.601778 0.75059 -0.272905 -0.60071 0.762057 -0.241694 -0.510647 0.837737 -0.193483 -0.473961 0.859992 -0.189141 -0.490519 0.851343 -0.18603 -0.0603932 0.998031 -0.0169215 -0.0613819 0.997956 -0.0177796 -0.197157 0.977516 -0.0747719 -0.337093 0.927784 -0.159952 -0.454327 0.851341 -0.262308 -0.543965 0.750414 -0.375474 -0.602863 0.627913 -0.492221 -0.629748 0.487379 -0.604879 -0.625296 0.33291 -0.705816 -0.592115 0.168868 -0.787962 -0.541905 0.0249627 -0.840069 -0.599669 0.0597568 -0.798014 -0.0592913 0.997987 -0.0224861 -0.190501 0.977516 -0.0903948 -0.323125 0.927786 -0.186558 -0.431749 0.85134 -0.298015 -0.511991 0.750413 -0.418026 -0.5613 0.627912 -0.539137 -0.579031 0.487384 -0.65359 -0.566478 0.332903 -0.753842 -0.526792 0.168863 -0.833052 -0.472245 0.0255191 -0.881098 -0.533531 0.0591836 -0.843708 -0.0561658 0.998043 -0.0274993 -0.0559872 0.997908 -0.0323245 -0.0530779 0.998069 -0.0322774 -0.0534794 0.997886 -0.0369139 -0.163333 0.977516 -0.133357 -0.269091 0.927785 -0.258465 -0.347884 0.85134 -0.39268 -0.397073 0.750412 -0.528409 -0.415966 0.627911 -0.6578 -0.405791 0.487387 -0.773167 -0.36961 0.332902 -0.867505 -0.312119 0.168867 -0.934915 -0.246662 0.027263 -0.968718 -0.316143 0.0574656 -0.946969 -0.324137 0.0266712 -0.945634 -0.391304 0.0580407 -0.918429 -0.458049 0.168872 -0.872739 -0.50398 0.332904 -0.796981 -0.524562 0.487385 -0.698062 -0.516102 0.627907 -0.582556 -0.476696 0.75041 -0.457871 -0.40637 0.851339 -0.33179 -0.307068 0.927785 -0.211954 -0.182609 0.977516 -0.10543 -0.173534 0.977516 -0.119782 -0.173535 0.977516 -0.119782 -0.0497443 0.998082 -0.0368382 -0.0504391 0.997878 -0.0411824 -0.152072 0.977516 -0.146068 -0.24742 0.927786 -0.27928 -0.315158 0.851341 -0.419398 -0.353267 0.75041 -0.558647 -0.361688 0.62791 -0.689138 -0.34226 0.487388 -0.803313 -0.298607 0.332901 -0.894433 -0.235883 0.168862 -0.956998 -0.16757 0.0278434 -0.985467 -0.238932 0.0568721 -0.969369 -0.0461654 0.998084 -0.0411881 -0.0469139 0.997882 -0.0450612 -0.139827 0.977516 -0.15783 -0.224147 0.927784 -0.298286 -0.280388 0.851342 -0.443396 -0.307171 0.750409 -0.585263 -0.30506 0.62791 -0.716008 -0.276507 0.487393 -0.828246 -0.225666 0.332908 -0.915559 -0.158104 0.168869 -0.972875 -0.0873965 0.0284459 -0.995767 -0.160154 0.0562792 -0.985486 -0.0423361 0.998075 -0.0453312 -0.0429579 0.9979 -0.0484895 -0.126672 0.977517 -0.168569 -0.199419 0.927784 -0.315355 -0.243797 0.851342 -0.464521 -0.259077 0.750412 -0.608079 -0.246462 0.627908 -0.738234 -0.208968 0.487384 -0.847814 -0.15126 0.332907 -0.930749 -0.07931 0.168866 -0.982443 -0.00663677 0.0290724 -0.999555 -0.0803406 0.0557085 -0.99521 -0.0382533 0.998053 -0.0492693 -0.038639 0.997929 -0.0514193 -0.112697 0.977517 -0.178215 -0.173396 0.927784 -0.330379 -0.205631 0.85134 -0.482634 -0.20931 0.75041 -0.626956 -0.186256 0.62791 -0.755671 -0.140071 0.487385 -0.861879 -0.0758769 0.3329 -0.939904 0 0.168864 -0.985639 0.074148 0.0296665 -0.996806 0 0.0550844 -0.998482 -0.0339103 0.998018 -0.0530065 -0.0340287 0.997971 -0.053812 -0.0979906 0.977517 -0.186706 -0.146248 0.927785 -0.343259 -0.166128 0.851341 -0.497614 -0.15818 0.750412 -0.641764 -0.124844 0.627912 -0.768207 -0.0702625 0.487392 -0.870352 0 0.3329 -0.942962 0.0793101 0.168864 -0.982443 0.154452 0.0302856 -0.987536 0.0803461 0.0544769 -0.995277 -0.0295883 0.997971 -0.0563757 -0.0826497 0.977516 -0.193987 -0.118155 0.927785 -0.353914 -0.125548 0.85134 -0.50937 -0.106028 0.750412 -0.652411 -0.0626269 0.627907 -0.775764 0 0.487392 -0.873184 0.075877 0.3329 -0.939905 0.158104 0.168865 -0.972876 0.233743 0.0308969 -0.971807 0.160175 0.0538499 -0.985619 -0.0235423 0.998053 -0.0577634 -0.0205145 0.997899 -0.061448 -0.0180895 0.998075 -0.0593297 -0.0155673 0.997882 -0.0631586 -0.0338247 0.977516 -0.20813 -0.030024 0.927785 -0.371906 0 0.85134 -0.524614 0.0531867 0.750412 -0.658827 0.124844 0.627907 -0.76821 0.208968 0.487385 -0.847814 0.298606 0.332907 -0.894431 0.386336 0.168866 -0.906769 0.460397 0.0327952 -0.887107 0.391435 0.0519555 -0.918738 0.387217 0.0321664 -0.921427 0.316228 0.0526032 -0.947224 0.23588 0.168862 -0.956999 0.151263 0.332907 -0.930748 0.0702629 0.487392 -0.870352 0 0.627907 -0.778288 -0.0531855 0.750412 -0.658827 -0.0841533 0.851341 -0.517819 -0.0892919 0.927785 -0.362273 -0.0667728 0.977516 -0.200008 -0.0504618 0.977516 -0.204732 -0.0504625 0.977516 -0.204733 -0.0125876 0.998084 -0.0605741 -0.0104453 0.997878 -0.0642732 -0.0169672 0.977516 -0.210175 0 0.927785 -0.373116 0.0422135 0.85134 -0.522913 0.106028 0.75041 -0.652413 0.186256 0.627911 -0.755669 0.276509 0.487385 -0.828251 0.36961 0.332902 -0.867505 0.458049 0.168871 -0.872739 0.530564 0.0334465 -0.846985 0.464111 0.0513173 -0.884289 -0.00703047 0.998082 -0.0614994 -0.00522882 0.997886 -0.0647712 0 0.977517 -0.210859 0.030024 0.927785 -0.371906 0.0841549 0.851341 -0.517818 0.15818 0.750411 -0.641765 0.246461 0.62791 -0.738232 0.342258 0.487393 -0.80331 0.438216 0.332901 -0.834951 0.526791 0.168871 -0.833051 0.597259 0.0341043 -0.801323 0.53378 0.0506755 -0.844104 -0.00141413 0.998069 -0.0621052 0 0.997908 -0.0646488 0.0169671 0.977517 -0.210175 0.0598518 0.927785 -0.368285 0.125549 0.85134 -0.50937 0.209309 0.750412 -0.626954 0.30506 0.627908 -0.71601 0.405791 0.487388 -0.773167 0.503978 0.332904 -0.796982 0.592115 0.168863 -0.787962 0.66004 0.0347564 -0.750426 0.59999 0.0500126 -0.798442 0.00426762 0.998043 -0.0623911 0.00515966 0.997942 -0.0639145 0.0338245 0.977516 -0.208128 0.0892923 0.927785 -0.362273 0.166128 0.85134 -0.497616 0.259078 0.75041 -0.608082 0.361688 0.62791 -0.689138 0.466688 0.487386 -0.738009 0.566476 0.332908 -0.753842 0.6536 0.168865 -0.737761 0.718504 0.0354296 -0.694619 0.662315 0.0493604 -0.747598 0.0100216 0.998004 -0.0623538 0.0101719 0.997987 -0.0625908 0.050462 0.977516 -0.204733 0.118154 0.927785 -0.353913 0.205631 0.851341 -0.482632 0.307169 0.750412 -0.585259 0.415966 0.62791 -0.6578 0.524559 0.487385 -0.698064 0.625298 0.332904 -0.705817 0.710845 0.168862 -0.682778 0.772267 0.0360972 -0.634272 0.720346 0.0486916 -0.691904 0.0152936 0.997956 -0.062048 0.0667728 0.977516 -0.200008 0.146249 0.927785 -0.343259 0.243799 0.85134 -0.464523 0.353268 0.750408 -0.558649 0.46755 0.627911 -0.622193 0.579029 0.487391 -0.653586 0.680064 0.33291 -0.65321 0.763482 0.168866 -0.623361 0.820973 0.0367794 -0.569781 0.773713 0.0480225 -0.631714 0.0208424 0.998061 -0.0586445 0.0254357 0.997892 -0.0597001 0.0259689 0.998079 -0.0562468 0.0302507 0.997879 -0.0576377 0.112697 0.977517 -0.178216 0.224147 0.927784 -0.298287 0.347883 0.851341 -0.392678 0.476695 0.750411 -0.457871 0.602863 0.627913 -0.492221 0.718619 0.487387 -0.496026 0.816629 0.332904 -0.471479 0.890476 0.168868 -0.422537 0.933766 0.0388388 -0.35577 0.902496 0.0459582 -0.42824 0.901989 0.038153 -0.43007 0.865082 0.0466594 -0.499455 0.811166 0.168866 -0.559905 0.730419 0.332907 -0.596373 0.629747 0.487379 -0.604881 0.516099 0.627911 -0.582554 0.397074 0.750411 -0.528409 0.280387 0.851342 -0.443396 0.173396 0.927784 -0.33038 0.0826496 0.977516 -0.193987 0.097991 0.977516 -0.186707 0.0979911 0.977517 -0.186706 0.0309289 0.998085 -0.0535705 0.0347906 0.997879 -0.0550167 0.126672 0.977517 -0.168569 0.247422 0.927784 -0.279283 0.378353 0.85134 -0.363413 0.511993 0.750411 -0.418028 0.640516 0.627912 -0.442115 0.756201 0.487388 -0.436592 0.851918 0.332903 -0.404242 0.921588 0.168866 -0.349515 0.959435 0.0395339 -0.279145 0.934057 0.0452539 -0.354244 0.0357268 0.998079 -0.0506132 0.0389836 0.997892 -0.0518779 0.139826 0.977517 -0.15783 0.269092 0.927785 -0.258465 0.406369 0.85134 -0.331789 0.543968 0.750411 -0.375476 0.674018 0.627907 -0.389144 0.788881 0.487387 -0.374328 0.881682 0.332907 -0.334381 0.946725 0.168865 -0.274219 0.978826 0.0402581 -0.200695 0.959565 0.0445662 -0.277938 0.0403662 0.998061 -0.0473718 0.0427673 0.997918 -0.0482744 0.152073 0.977516 -0.146068 0.289017 0.927785 -0.235975 0.431749 0.85134 -0.298015 0.572417 0.750413 -0.330483 0.703142 0.627911 -0.333645 0.816443 0.487385 -0.309639 0.90573 0.332907 -0.262348 0.965657 0.168859 -0.197465 0.991757 0.0423267 -0.120938 0.978842 0.0424842 -0.200161 0.044851 0.998031 -0.0438416 0.0460889 0.997956 -0.0442689 0.163333 0.977516 -0.133357 0.307069 0.927784 -0.211955 0.454329 0.85134 -0.262308 0.597157 0.750408 -0.283357 0.727712 0.627907 -0.275986 0.83871 0.487388 -0.242935 0.923848 0.332912 -0.188879 0.978409 0.168783 -0.119283 0.998285 0.042366 -0.0403985 0.991756 0.0424441 -0.120911 0.0491196 0.997987 -0.0401051 0.173534 0.977516 -0.119782 0.323127 0.927785 -0.186558 0.473962 0.85134 -0.224899 0.618021 0.75041 -0.234382 0.747557 0.627912 -0.21653 0.855491 0.487396 -0.174871 0.93606 0.332838 -0.114062 0.984839 0.168833 -0.0398435 0.998284 0.042413 0.0403875 0.998285 0.042366 0.0403985 0.0518983 0.998043 -0.034891 0.0559869 0.997908 -0.0323242 0.0544915 0.998069 -0.0298282 0.0587081 0.997886 -0.0278574 0.197158 0.977516 -0.0747729 0.358386 0.927784 -0.103809 0.513996 0.851341 -0.105008 0.65617 0.750377 -0.0798486 0.777672 0.627885 -0.031423 0.872478 0.48738 0.0352537 0.872493 0.487353 0.0352694 0.942189 0.332909 0.0380868 0.942202 0.332871 0.0381066 0.984835 0.16886 0.0398308 0.984835 0.16886 -0.0398308 0.942202 0.332871 -0.0381066 0.936031 0.33294 -0.114007 0.866812 0.48733 -0.105576 0.77262 0.627862 -0.0940633 0.77257 0.627931 -0.0940132 0.0567751 0.998082 -0.0246607 0.0608844 0.997878 -0.0230904 0.202534 0.977516 -0.0586635 0.365563 0.927787 -0.0746688 0.520808 0.85132 -0.0633467 0.660453 0.750394 -0.0266733 0.777652 0.62791 0.0314065 0.777672 0.627885 0.031423 0.0587523 0.998084 -0.019386 0.0624806 0.997882 -0.0180977 0.206596 0.977516 -0.0421909 0.370409 0.927777 -0.0450325 0.524204 0.85133 -0.0211604 0.660434 0.750411 0.0266595 0.660453 0.750394 0.0266733 0.0604257 0.998075 -0.0139986 0.0634687 0.9979 -0.0129591 0.209334 0.977513 -0.0254388 0.372823 0.92778 -0.0150417 0.524186 0.851341 0.0211485 0.524204 0.85133 0.0211604 0.0617973 0.998053 -0.00849377 0.0638482 0.99793 -0.00775501 0.210694 0.977515 -0.00849682 0.372812 0.927785 0.0150347 0.372823 0.92778 0.0150417 0.0628616 0.998018 -0.00286421 0.0636169 0.997971 -0.00256476 0.21069 0.977516 0.0084941 0.210694 0.977515 0.00849682 -0.871305 0.021693 -0.490263 -0.853589 0.168865 -0.492819 -0.816627 0.332907 -0.47148 -0.756202 0.487386 -0.436593 -0.816628 0.332905 -0.47148 -0.776043 0.332905 -0.53566 -0.811168 0.168859 -0.559905 -0.821403 0.061934 -0.566975 -0.828826 0.0222239 -0.559065 -0.864334 0.0624801 -0.499022 -0.882507 0.0360776 -0.468913 -0.886584 0.105644 -0.450342 -0.877872 0.206405 -0.432132 -0.835612 0.343936 -0.428323 -0.794223 0.463287 -0.393159 -0.776006 0.487677 -0.399982 -0.751411 0.549099 -0.365886 -0.713126 0.608253 -0.348539 -0.851033 0.168828 -0.497231 -0.830858 0.333135 -0.445753 -0.677491 0.661559 -0.321476 -0.597154 0.750411 -0.283356 -0.597158 0.750408 -0.283355 -0.333062 0.94045 -0.0679975 -0.194975 0.980777 -0.00785684 -0.796952 0.45833 0.393448 -0.835469 0.341691 0.430393 -0.541905 0.0249627 0.840069 -0.533531 0.0591836 0.843708 -0.458049 0.168869 0.87274 -0.369609 0.332902 0.867506 -0.276511 0.487387 0.828248 -0.186255 0.62791 0.755671 -0.106027 0.75041 0.652413 -0.0422147 0.851341 0.522912 0 0.927785 0.373116 0.0169669 0.977517 0.210175 0.0100216 0.998004 0.0623538 0.00515967 0.997942 0.0639145 0.0741493 0.0296606 0.996806 0.0803487 0.054476 0.995277 0.158109 0.168863 0.972876 0.225665 0.332905 0.91556 0.276511 0.487387 0.828249 0.30506 0.627911 0.716007 0.307166 0.750412 0.585261 0.280387 0.851342 0.443396 0.224147 0.927784 0.298286 0.139825 0.977517 0.15783 0.044851 0.998031 0.0438416 0.0427673 0.997918 0.0482744 0.718504 0.0354296 0.694619 0.720346 0.0486916 0.691904 0.763481 0.168864 0.623363 0.776041 0.332907 0.535661 0.756201 0.487388 0.436593 0.703141 0.627911 0.333648 0.61802 0.75041 0.234383 0.503899 0.851341 0.145958 0.358381 0.927786 0.103807 0.772267 0.0360972 0.634272 0.773713 0.0480225 0.631714 0.811167 0.16886 0.559906 0.816628 0.332904 0.47148 0.78888 0.487387 0.374329 0.727714 0.627907 0.275982 0.634878 0.750408 0.183894 0.503903 0.851339 0.145957 0.820973 0.0367794 0.569781 0.822061 0.0473487 0.567427 0.853589 0.168865 0.492819 0.851919 0.332903 0.40424 0.816444 0.487385 0.309637 0.747556 0.627912 0.216533 0.634876 0.75041 0.183895 0.864309 0.0374685 0.501563 0.865082 0.0466593 0.499455 0.890475 0.168867 0.422537 0.881682 0.332907 0.334381 0.838711 0.487388 0.242933 0.747561 0.627907 0.216531 0.901989 0.038153 0.43007 0.902495 0.0459582 0.42824 0.921587 0.168866 0.349515 0.90573 0.332907 0.262348 0.838708 0.487392 0.242935 0.933766 0.0388388 0.35577 0.934057 0.0452539 0.354244 0.946724 0.168865 0.274223 0.905729 0.332909 0.262348 0.959435 0.0395339 0.279145 0.959565 0.0445661 0.277938 0.946727 0.168857 0.274219 0.998284 0.042413 -0.0403875 0.864309 0.0374685 -0.501563 0.822061 0.0473488 -0.567427 0.763482 0.168862 -0.623363 0.680065 0.332913 -0.653208 0.579029 0.487384 -0.653592 0.46755 0.627907 -0.622198 0.353268 0.75041 -0.558647 0.243799 0.851342 -0.46452 0.146248 0.927785 -0.343259 0.0667728 0.977516 -0.200008 0.0155423 0.998031 -0.0607634 0.0204234 0.997918 -0.0611752 0.311499 0.031537 -0.949723 0.23898 0.0532365 -0.969564 0.158106 0.168869 -0.972875 0.0758774 0.332901 -0.939904 0 0.487392 -0.873184 -0.062627 0.627907 -0.775765 -0.106029 0.75041 -0.652413 -0.125549 0.85134 -0.50937 -0.118154 0.927785 -0.353913 -0.0826493 0.977516 -0.193986 -0.0289495 0.998018 -0.05587 -0.0252105 0.997929 -0.0591717 -0.399494 0.0260907 -0.916364 -0.463924 0.058615 -0.883933 -0.526788 0.168872 -0.833053 -0.566475 0.332909 -0.753842 -0.579028 0.487389 -0.653589 -0.5613 0.627912 -0.539137 -0.511992 0.750412 -0.418026 -0.43175 0.851339 -0.298015 -0.323128 0.927785 -0.186557 -0.190503 0.977516 -0.0903947 -0.0590099 0.998004 -0.0224978 -0.0579306 0.997942 -0.0274885 -0.727932 0.0232906 -0.685254 -0.773145 0.0613966 -0.63125 -0.763483 0.168859 -0.623362 -0.780934 0.0227505 -0.624199 0.21069 0.977516 -0.0084941 0.984839 0.168833 0.0398435 0.372812 0.927785 -0.0150347 0.524186 0.851341 -0.0211485 0.660434 0.750411 -0.0266595 0.777652 0.62791 -0.0314065 0.872478 0.48738 -0.0352537 0.872493 0.487353 -0.0352694 0.866772 0.487411 -0.105526 0.942189 0.332909 -0.0380868 0.163332 0.977516 0.133357 -0.0979911 0.977517 0.186706 -0.197157 0.977517 0.0747712 -0.112697 0.977517 -0.178216 0.033824 0.977516 -0.20813 0.0489892 0.998004 -0.0398558 0.0527718 0.997942 -0.0364257 0.18261 0.977516 -0.10543 0.33709 0.927786 -0.15995 0.49052 0.851342 -0.186029 0.634878 0.750408 -0.183896 0.762524 0.62791 -0.155839 0.855467 0.487451 -0.174835 0.370409 0.927777 0.0450324 0.365543 0.927796 0.0746509 0.520808 0.85132 0.0633467 0.513969 0.85136 0.104982 0.65617 0.750377 0.0798486 0.647559 0.750443 0.132294 0.77262 0.627862 0.0940633 0.762493 0.627957 0.155803 0.866812 0.48733 0.105576 0.855467 0.487451 0.174835 0.93606 0.332838 0.114062 0.923831 0.33298 0.18884 0.978409 0.168783 0.119283 0.965652 0.168938 0.197425 0.348871 0.927784 0.13231 0.348869 0.927785 0.13231 0.197158 0.977516 0.0747721 0.490521 0.851341 0.18603 0.618023 0.750408 0.234383 0.727708 0.627912 0.275984 0.816442 0.487388 0.309638 0.881681 0.332907 0.334381 0.921588 0.168865 0.349515 0.190502 0.977516 0.0903946 0.47396 0.851342 0.224897 0.473964 0.85134 0.224897 0.337089 0.927786 0.159951 0.597155 0.75041 0.283356 0.703145 0.627907 0.333647 0.788882 0.487385 0.374328 0.851917 0.332907 0.404241 0.890476 0.168866 0.422537 0.18261 0.977516 0.10543 0.323128 0.927785 0.186557 0.572422 0.750408 0.330486 0.572415 0.750413 0.330486 0.454329 0.85134 0.262308 0.674015 0.627911 0.389143 0.756202 0.487387 0.436593 0.816629 0.332903 0.47148 0.853588 0.168867 0.492819 0.173534 0.977516 0.119782 0.307069 0.927784 0.211954 0.431749 0.85134 0.298015 0.64052 0.627907 0.442117 0.640514 0.627912 0.442117 0.543968 0.750411 0.375475 0.718618 0.487388 0.496025 0.776043 0.332904 0.535661 0.811165 0.168865 0.559907 0.289017 0.927785 0.235975 0.406369 0.85134 0.331789 0.511993 0.750411 0.418028 0.602863 0.627913 0.492221 0.676373 0.487387 0.552245 0.676379 0.487379 0.552244 0.730421 0.332907 0.59637 0.763483 0.168861 0.623362 0.152073 0.977516 0.146067 0.269091 0.927785 0.258466 0.247423 0.927784 0.279282 0.378354 0.85134 0.363413 0.347881 0.851341 0.392679 0.476695 0.750411 0.457871 0.438306 0.750411 0.494744 0.561301 0.627911 0.539137 0.516101 0.627907 0.582557 0.629745 0.487384 0.604879 0.579026 0.487391 0.653589 0.680063 0.332913 0.653209 0.680066 0.33291 0.653209 0.6253 0.332904 0.705815 0.710844 0.168864 0.682778 0.653599 0.168863 0.737763 0.6536 0.16886 0.737762 0.126672 0.977517 0.16857 0.315156 0.851342 0.419397 0.397074 0.75041 0.52841 0.467548 0.627911 0.622195 0.524562 0.487385 0.698062 0.566475 0.332908 0.753843 0.592116 0.168861 0.787962 0.199419 0.927784 0.315355 0.353269 0.750408 0.558649 0.415967 0.62791 0.6578 0.466688 0.487386 0.738009 0.50398 0.332904 0.796981 0.526788 0.168869 0.833053 0.173396 0.927784 0.330379 0.173395 0.927785 0.330378 0.0979916 0.977516 0.186707 0.243797 0.851342 0.464521 0.361688 0.62791 0.689138 0.40579 0.487388 0.773167 0.438218 0.332901 0.83495 0.458049 0.168867 0.87274 0.0826495 0.977516 0.193987 0.205631 0.85134 0.482634 0.20563 0.851341 0.482633 0.146248 0.927785 0.343259 0.259077 0.750412 0.608079 0.342259 0.487387 0.803313 0.369608 0.332907 0.867504 0.386339 0.168864 0.906767 0.0667728 0.977516 0.200008 0.118154 0.927785 0.353913 0.20931 0.75041 0.626956 0.209308 0.750412 0.626954 0.16613 0.85134 0.497616 0.246459 0.627911 0.738232 0.298606 0.332907 0.894431 0.312122 0.168861 0.934915 0.0504625 0.977516 0.204733 0.0892919 0.927785 0.362273 0.125548 0.851341 0.509369 0.186255 0.627909 0.755671 0.186255 0.62791 0.755671 0.158182 0.75041 0.641766 0.208967 0.487387 0.847813 0.235877 0.168866 0.956999 0.0338238 0.977516 0.208129 0.0598527 0.927785 0.368285 0.0841535 0.851341 0.51782 0.106027 0.75041 0.652413 0.140068 0.487387 0.861879 0.140069 0.487386 0.861879 0.124845 0.627911 0.768207 0.151262 0.332905 0.930749 0 0.977517 0.210859 0.0300234 0.927785 0.371906 0.0422148 0.85134 0.522913 0 0.85134 0.524614 0.0531865 0.750411 0.658828 0 0.750411 0.660972 0.0626268 0.62791 0.775762 0 0.627911 0.778285 0.0702632 0.487385 0.870356 0 0.487385 0.873187 0.0758756 0.332908 0.939902 0.0758772 0.332905 0.939903 0 0.332904 0.942961 0.0793126 0.168863 0.982443 0 0.168869 0.985639 0 0.168869 0.985639 -0.030024 0.927785 0.371906 -0.0531866 0.75041 0.658829 -0.0626268 0.627911 0.775762 -0.0702632 0.487386 0.870355 -0.0758772 0.332908 0.939902 -0.0793097 0.168863 0.982444 -0.0598518 0.927785 0.368285 -0.0598524 0.927785 0.368283 -0.033824 0.977516 0.20813 -0.0841536 0.851341 0.51782 -0.124845 0.62791 0.768208 -0.140069 0.487387 0.861879 -0.151261 0.332905 0.93075 -0.158109 0.168867 0.972875 -0.0504623 0.977516 0.204732 -0.125549 0.851341 0.509369 -0.125549 0.85134 0.50937 -0.0892921 0.927785 0.362273 -0.15818 0.75041 0.641766 -0.208966 0.487386 0.847814 -0.225665 0.332908 0.915559 -0.235877 0.168862 0.956999 -0.0667727 0.977516 0.200008 -0.118154 0.927785 0.353914 -0.209309 0.750412 0.626954 -0.209309 0.75041 0.626956 -0.166129 0.851341 0.497614 -0.246459 0.62791 0.738232 -0.298605 0.332906 0.894432 -0.312121 0.168864 0.934915 -0.0826495 0.977516 0.193987 -0.146249 0.927785 0.343259 -0.20563 0.85134 0.482635 -0.305061 0.627911 0.716007 -0.305061 0.62791 0.716008 -0.259078 0.750412 0.608079 -0.342259 0.487388 0.803313 -0.386339 0.168867 0.906767 -0.173396 0.927784 0.330379 -0.243799 0.851342 0.46452 -0.307168 0.750409 0.585265 -0.361688 0.62791 0.689138 -0.405791 0.487388 0.773167 -0.405791 0.487387 0.773167 -0.438216 0.332901 0.834951 -0.112697 0.977517 0.178215 -0.199419 0.927784 0.315355 -0.280387 0.851341 0.443397 -0.224147 0.927784 0.298286 -0.353268 0.750411 0.558646 -0.315157 0.85134 0.419399 -0.415967 0.627911 0.657799 -0.397074 0.750412 0.528408 -0.466688 0.487385 0.73801 -0.46755 0.627907 0.622197 -0.503978 0.332904 0.796982 -0.503979 0.332908 0.79698 -0.524561 0.487389 0.69806 -0.526792 0.168869 0.833051 -0.566476 0.332903 0.753843 -0.592117 0.168866 0.787961 -0.592115 0.16886 0.787963 -0.038639 0.997929 0.0514193 -0.0339103 0.998018 0.0530065 -0.139826 0.977517 0.157829 -0.269091 0.927786 0.258465 -0.40637 0.851339 0.331789 -0.511992 0.750413 0.418026 -0.247421 0.927786 0.279279 -0.347883 0.85134 0.39268 -0.438306 0.75041 0.494745 -0.516099 0.627912 0.582553 -0.579029 0.487384 0.653591 -0.625298 0.33291 0.705814 -0.653599 0.168861 0.737763 -0.126672 0.977517 0.16857 -0.139826 0.977516 0.157831 -0.152073 0.977516 0.146068 -0.289017 0.927785 0.235974 -0.406371 0.851339 0.33179 -0.152073 0.977516 0.146067 -0.269091 0.927785 0.258466 -0.378354 0.85134 0.363413 -0.476694 0.750412 0.45787 -0.5613 0.627912 0.539137 -0.629746 0.487379 0.604881 -0.680065 0.332912 0.653209 -0.710846 0.168864 0.682776 -0.163333 0.977516 0.133358 -0.289017 0.927785 0.235975 -0.602863 0.627913 0.492221 -0.676375 0.487386 0.552242 -0.730419 0.332908 0.596373 -0.763482 0.168857 0.623364 -0.173534 0.977516 0.119781 -0.307068 0.927785 0.211954 -0.543965 0.750415 0.375473 -0.57242 0.750408 0.33049 -0.572416 0.750415 0.330482 -0.674017 0.62791 0.389141 -0.718619 0.487387 0.496024 -0.776042 0.332905 0.535661 -0.811167 0.168866 0.559904 -0.182611 0.977516 0.105431 -0.323126 0.927786 0.186556 -0.454327 0.851341 0.262308 -0.816627 0.332907 0.471479 -0.853589 0.168863 0.49282 -0.190502 0.977516 0.0903943 -0.337092 0.927784 0.159954 -0.47396 0.851342 0.224895 -0.597155 0.750411 0.283354 -0.202536 0.977516 0.0586656 -0.209321 0.977517 -0.0254163 -0.2066 0.977516 -0.0421777 -0.202534 0.977517 -0.058665 -0.358386 0.927784 -0.103806 -0.197158 0.977516 -0.0747716 -0.34887 0.927784 -0.13231 -0.182611 0.977516 -0.10543 -0.337089 0.927786 -0.159952 -0.473962 0.851341 -0.224896 -0.454329 0.85134 -0.262308 -0.572413 0.750415 -0.330486 -0.674016 0.62791 -0.389143 -0.67402 0.627906 -0.389143 -0.853588 0.168868 -0.49282 -0.811164 0.168868 -0.559907 -0.307068 0.927785 -0.211954 -0.543967 0.750413 -0.375474 -0.640514 0.627913 -0.442117 -0.718619 0.487387 -0.496025 -0.71862 0.487386 -0.496025 -0.289018 0.927785 -0.235974 -0.289017 0.927785 -0.235974 -0.163333 0.977516 -0.133357 -0.406371 0.851339 -0.33179 -0.602864 0.627912 -0.492221 -0.676378 0.487379 -0.552244 -0.730421 0.332908 -0.596371 -0.730418 0.332912 -0.596372 -0.152073 0.977516 -0.146068 -0.378354 0.85134 -0.363413 -0.378353 0.85134 -0.363413 -0.269091 0.927786 -0.258465 -0.476693 0.750412 -0.457871 -0.629745 0.487384 -0.604879 -0.680065 0.33291 -0.653209 -0.710845 0.168866 -0.682777 -0.710846 0.168863 -0.682776 -0.139825 0.977517 -0.15783 -0.247423 0.927784 -0.279281 -0.438307 0.75041 -0.494744 -0.438305 0.750412 -0.494744 -0.347882 0.851341 -0.392679 -0.516097 0.627912 -0.582555 -0.6253 0.332904 -0.705815 -0.653598 0.168867 -0.737763 -0.126671 0.977517 -0.168569 -0.224148 0.927784 -0.298286 -0.315156 0.851342 -0.419397 -0.467552 0.627908 -0.622196 -0.467548 0.627911 -0.622195 -0.397074 0.75041 -0.528409 -0.524559 0.487389 -0.698062 -0.592117 0.168862 -0.787961 -0.199418 0.927784 -0.315355 -0.280387 0.851342 -0.443396 -0.353269 0.750408 -0.558649 -0.415967 0.62791 -0.6578 -0.466689 0.487385 -0.738009 -0.466687 0.487387 -0.738009 -0.503978 0.332908 -0.796981 -0.0979915 0.977516 -0.186707 -0.173395 0.927785 -0.330378 -0.146249 0.927785 -0.343259 -0.243801 0.85134 -0.464522 -0.20563 0.851341 -0.482633 -0.307166 0.750412 -0.585261 -0.259079 0.75041 -0.608081 -0.361688 0.62791 -0.689138 -0.305062 0.627907 -0.71601 -0.40579 0.487388 -0.773167 -0.342256 0.487394 -0.803311 -0.438216 0.332904 -0.83495 -0.438217 0.332901 -0.834951 -0.36961 0.332902 -0.867505 -0.458049 0.168871 -0.872739 -0.386338 0.168867 -0.906767 -0.386335 0.168872 -0.906768 -0.0667726 0.977516 -0.200008 -0.166129 0.85134 -0.497616 -0.209308 0.750412 -0.626955 -0.246459 0.62791 -0.738232 -0.276514 0.487384 -0.828249 -0.298603 0.332907 -0.894432 -0.312121 0.168862 -0.934915 -0.0892925 0.927785 -0.362273 -0.158181 0.750411 -0.641765 -0.186255 0.627911 -0.75567 -0.208968 0.487385 -0.847814 -0.225666 0.332907 -0.915559 -0.235879 0.168869 -0.956997 -0.0598515 0.927785 -0.368283 -0.0598527 0.927785 -0.368285 -0.0338238 0.977516 -0.208129 -0.0841553 0.85134 -0.517821 -0.124847 0.627907 -0.768209 -0.140066 0.487392 -0.861876 -0.151264 0.332901 -0.930751 -0.158106 0.168865 -0.972876 -0.0169669 0.977517 -0.210175 -0.0422133 0.851341 -0.522911 -0.0422148 0.85134 -0.522913 -0.0300234 0.927785 -0.371906 -0.0531869 0.75041 -0.658829 -0.0702629 0.487391 -0.870352 -0.0758774 0.3329 -0.939905 -0.079311 0.168864 -0.982443 0 0.977517 -0.210859 0 0.927785 -0.373116 0 0.750412 -0.660971 0 0.750412 -0.660971 0 0.85134 -0.524614 0 0.627907 -0.778288 0 0.3329 -0.942962 0 0.168864 -0.985639 0.0169669 0.977517 -0.210175 0.0300235 0.927785 -0.371907 0.0422146 0.851341 -0.522911 0.0626269 0.627907 -0.775765 0.062627 0.627907 -0.775764 0.0531856 0.75041 -0.658829 0.0702625 0.487391 -0.870352 0.0793111 0.168866 -0.982443 0.0598524 0.927785 -0.368283 0.0841537 0.85134 -0.517821 0.106029 0.750411 -0.652412 0.124846 0.627912 -0.768206 0.14007 0.487392 -0.861876 0.140067 0.487385 -0.86188 0.15126 0.332901 -0.930751 0.0504623 0.977516 -0.204732 0.0892921 0.927785 -0.362273 0.118154 0.927785 -0.353914 0.125549 0.85134 -0.50937 0.166129 0.851341 -0.497614 0.158181 0.750412 -0.641763 0.209308 0.75041 -0.626957 0.186255 0.627909 -0.755671 0.24646 0.627908 -0.738234 0.208968 0.487385 -0.847814 0.276512 0.487393 -0.828245 0.225666 0.332907 -0.915559 0.225666 0.332908 -0.915559 0.298604 0.332902 -0.894434 0.235883 0.168869 -0.956997 0.312121 0.168866 -0.934914 0.312119 0.168861 -0.934916 0.0826496 0.977516 -0.193987 0.205631 0.85134 -0.482634 0.259079 0.750412 -0.608079 0.305061 0.62791 -0.716008 0.342257 0.487388 -0.803314 0.369609 0.332902 -0.867505 0.386337 0.168871 -0.906767 0.173396 0.927785 -0.330378 0.307168 0.750409 -0.585265 0.361688 0.62791 -0.689138 0.405791 0.487387 -0.773168 0.438217 0.332904 -0.834949 0.458049 0.16887 -0.872739 0.199419 0.927784 -0.315355 0.199419 0.927784 -0.315355 0.112697 0.977517 -0.178215 0.280387 0.851342 -0.443395 0.415967 0.627911 -0.657799 0.466688 0.487385 -0.738009 0.503979 0.332908 -0.79698 0.526789 0.168863 -0.833054 0.126672 0.977517 -0.16857 0.315156 0.851342 -0.419397 0.315156 0.851341 -0.419398 0.224147 0.927784 -0.298286 0.397074 0.75041 -0.528411 0.52456 0.487391 -0.698059 0.566476 0.332904 -0.753844 0.592116 0.168865 -0.787962 0.139826 0.977516 -0.157831 0.247421 0.927785 -0.27928 0.438306 0.750411 -0.494744 0.438306 0.750411 -0.494744 0.347883 0.85134 -0.39268 0.516099 0.627907 -0.582559 0.625299 0.33291 -0.705814 0.653599 0.168862 -0.737763 0.152072 0.977516 -0.146067 0.269092 0.927785 -0.258466 0.378353 0.85134 -0.363412 0.561301 0.627911 -0.539137 0.561301 0.627912 -0.539136 0.476695 0.750411 -0.457871 0.629747 0.487384 -0.604877 0.710846 0.168866 -0.682776 0.163333 0.977516 -0.133357 0.289018 0.927785 -0.235976 0.406369 0.85134 -0.331789 0.511992 0.750411 -0.418027 0.676376 0.487379 -0.552248 0.676375 0.487387 -0.552242 0.602863 0.627912 -0.492222 0.73042 0.332914 -0.596369 0.173534 0.977516 -0.119782 0.307068 0.927785 -0.211953 0.431749 0.85134 -0.298015 0.323126 0.927786 -0.186556 0.543967 0.750413 -0.375474 0.454328 0.85134 -0.262308 0.640518 0.627907 -0.44212 0.57242 0.750408 -0.330489 0.718618 0.487388 -0.496025 0.674016 0.627911 -0.389141 0.776042 0.332907 -0.535661 0.776042 0.332904 -0.535662 0.756201 0.487387 -0.436593 0.811166 0.168862 -0.559907 0.816629 0.332903 -0.47148 0.853589 0.168868 -0.492818 0.853589 0.168866 -0.492819 0.182611 0.977516 -0.105431 0.190502 0.977516 -0.0903946 0.348869 0.927785 -0.132309 0.5039 0.851341 -0.145956 0.647589 0.750412 -0.132324 0.762493 0.627957 -0.155803 0.190502 0.977516 -0.0903946 0.337091 0.927785 -0.159952 0.47396 0.851342 -0.224895 0.597155 0.75041 -0.283355 0.703144 0.627907 -0.333649 0.788881 0.487385 -0.374329 0.851918 0.332907 -0.404239 0.890475 0.168866 -0.422537 0.197157 0.977516 -0.0747715 0.348871 0.927784 -0.132311 0.490521 0.851341 -0.18603 0.618023 0.750408 -0.234384 0.727709 0.627912 -0.275981 0.816442 0.487388 -0.309637 0.881682 0.332907 -0.334381 0.921588 0.168865 -0.349515 0.202536 0.977516 -0.058665 0.358381 0.927786 -0.103804 0.503902 0.851339 -0.145959 0.634876 0.75041 -0.183894 0.74756 0.627907 -0.216534 0.838709 0.487392 -0.242933 0.905729 0.332909 -0.262347 0.946725 0.168857 -0.274223 0.206585 0.977519 -0.0421805 0.365543 0.927796 -0.0746509 0.513969 0.85136 -0.104982 0.647559 0.750443 -0.132294 0.923831 0.33298 -0.18884 0.965652 0.168938 -0.197425 0.209313 0.977518 -0.0254233 0.370378 0.92779 -0.0450093 0.520765 0.851349 -0.063312 0.656119 0.750426 -0.0798047 0.978395 0.16891 -0.119221 0.998964 6.35858e-09 -0.0455164 0.990686 1.90218e-08 -0.136163 0.990686 1.78415e-08 -0.136163 0.974199 3.15287e-08 -0.225691 0.94964 4.29153e-08 -0.313344 0.974199 3.09104e-08 -0.225691 0.974199 3.1485e-08 -0.225691 0.990686 1.86488e-08 -0.136163 0.990686 1.77164e-08 -0.136163 0.998964 5.96876e-09 -0.0455164 0.998964 -5.9222e-09 0.0455164 0.990686 -1.86488e-08 0.136163 0.974199 -3.09104e-08 0.225691 0.94964 -3.43322e-08 0.313344 0.917212 -4.36517e-08 0.3984 0.87718 -5.261e-08 0.480161 0.829889 -6.11308e-08 0.557929 0.77571 -6.91468e-08 0.631089 0.715108 -7.65891e-08 0.699014 0.648586 -8.33963e-08 0.761142 0.576681 -8.95132e-08 0.81697 0.500002 -9.48879e-08 0.866024 0.419175 -9.94768e-08 0.907905 0.334876 -1.03241e-07 0.942262 0.24781 -1.0615e-07 0.968809 0.158682 -1.08179e-07 0.98733 0.0682419 -1.09312e-07 0.997669 -0.022764 -1.09539e-07 0.999741 -0.113579 -1.08858e-07 0.993529 -0.203457 -1.07276e-07 0.979084 -0.291646 -1.04804e-07 0.956526 -0.37742 -1.01464e-07 0.926042 -0.460063 -9.72833e-08 0.887886 -0.538895 -9.22966e-08 0.842373 -0.613271 -8.65442e-08 0.789873 -0.682553 -8.00758e-08 0.730836 -0.746186 -7.29431e-08 0.665738 -0.80363 -6.52067e-08 0.595129 -0.854416 -5.693e-08 0.519589 -0.89813 -4.818e-08 0.43973 -0.934395 -3.90322e-08 0.356239 -0.962915 -3.69524e-08 0.269806 -0.983463 -2.35641e-08 0.181107 -0.995857 -1.18314e-08 0.0909333 -1 0 0 -0.995857 1.18314e-08 -0.0909333 -0.983463 2.48043e-08 -0.181107 -0.962915 2.95619e-08 -0.269806 -0.934395 3.90322e-08 -0.356239 -0.89813 4.818e-08 -0.43973 -0.854416 5.693e-08 -0.519589 -0.80363 6.52067e-08 -0.595129 -0.746186 7.29431e-08 -0.665738 -0.682553 8.00758e-08 -0.730836 -0.613271 8.65442e-08 -0.789873 -0.538895 9.22966e-08 -0.842373 -0.460063 9.72833e-08 -0.887886 -0.37742 1.01464e-07 -0.926042 -0.291646 1.04804e-07 -0.956526 -0.203457 1.07276e-07 -0.979084 -0.113579 1.08858e-07 -0.993529 -0.022764 1.09539e-07 -0.999741 0.0682419 1.09312e-07 -0.997669 0.158682 1.08179e-07 -0.98733 0.24781 1.0615e-07 -0.968809 0.334876 1.03241e-07 -0.942262 0.419175 9.94768e-08 -0.907905 0.500002 9.48879e-08 -0.866024 0.576681 8.95132e-08 -0.81697 0.648586 8.33963e-08 -0.761142 0.715108 7.65891e-08 -0.699014 0.77571 6.91468e-08 -0.631089 0.829889 6.11308e-08 -0.557929 0.87718 5.261e-08 -0.480161 0.917212 4.36517e-08 -0.3984 0.94964 3.41087e-08 -0.313344 0.94964 4.37736e-08 -0.313344 0.974199 3.15287e-08 -0.225691 0.94964 6.24452e-08 -0.313344 0.917212 7.42079e-08 -0.3984 0.917212 4.36517e-08 -0.3984 0.917212 8.0287e-08 -0.3984 0.87718 8.94371e-08 -0.480161 0.87718 5.261e-08 -0.480161 0.87718 8.92073e-08 -0.480161 0.829889 1.03923e-07 -0.557929 0.829889 6.11308e-08 -0.557929 0.829889 1.07049e-07 -0.557929 0.77571 1.1755e-07 -0.631089 0.77571 6.91468e-08 -0.631089 0.77571 1.2489e-07 -0.631089 0.715108 1.30202e-07 -0.699014 0.715108 7.65891e-08 -0.699014 0.715108 1.33811e-07 -0.699014 0.648586 1.41774e-07 -0.761142 0.648586 8.33963e-08 -0.761142 0.648586 1.42733e-07 -0.761142 0.576681 1.52173e-07 -0.81697 0.576681 8.95132e-08 -0.81697 0.576681 1.51653e-07 -0.81697 0.500002 1.6131e-07 -0.866024 0.500002 9.48879e-08 -0.866024 0.500002 1.60575e-07 -0.866024 0.419175 1.69111e-07 -0.907905 0.419175 9.94768e-08 -0.907905 0.419175 1.69494e-07 -0.907905 0.334876 1.7551e-07 -0.942262 0.334876 1.03241e-07 -0.942262 0.334876 1.78413e-07 -0.942262 0.24781 1.80455e-07 -0.968809 0.24781 1.0615e-07 -0.968809 0.24781 1.78417e-07 -0.968809 0.158682 1.83905e-07 -0.98733 0.158682 1.08179e-07 -0.98733 0.158682 1.82873e-07 -0.98733 0.0682419 1.8583e-07 -0.997669 0.0682419 1.09312e-07 -0.997669 0.0682419 1.86221e-07 -0.997669 -0.022764 1.86216e-07 -0.999741 -0.022764 1.09539e-07 -0.999741 -0.022764 1.86221e-07 -0.999741 -0.113579 1.85059e-07 -0.993529 -0.113579 1.08858e-07 -0.993529 -0.113579 1.85106e-07 -0.993529 -0.203457 1.82369e-07 -0.979084 -0.203457 1.07276e-07 -0.979084 -0.203457 1.78415e-07 -0.979084 -0.291646 1.78167e-07 -0.956526 -0.291646 1.04804e-07 -0.956526 -0.291646 1.78416e-07 -0.956526 -0.37742 1.72489e-07 -0.926042 -0.37742 1.01464e-07 -0.926042 -0.37742 1.78415e-07 -0.926042 -0.460063 1.65382e-07 -0.887886 -0.460063 9.72833e-08 -0.887886 -0.460063 1.69494e-07 -0.887886 -0.538895 1.56904e-07 -0.842373 -0.538895 9.22966e-08 -0.842373 -0.538895 1.51652e-07 -0.842373 -0.613271 1.47125e-07 -0.789873 -0.613271 8.65442e-08 -0.789873 -0.613271 1.51653e-07 -0.789873 -0.682553 1.36129e-07 -0.730836 -0.682553 8.00758e-08 -0.730836 -0.682553 1.33812e-07 -0.730836 -0.746186 1.24003e-07 -0.665738 -0.746186 7.29431e-08 -0.665738 -0.746186 1.24891e-07 -0.665738 -0.80363 1.10852e-07 -0.595129 -0.80363 6.52067e-08 -0.595129 -0.80363 1.07049e-07 -0.595129 -0.854416 9.67811e-08 -0.519589 -0.854416 5.693e-08 -0.519589 -0.854416 9.81281e-08 -0.519589 -0.89813 8.19061e-08 -0.43973 -0.89813 4.818e-08 -0.43973 -0.89813 8.02869e-08 -0.43973 -0.934395 6.63548e-08 -0.356239 -0.934395 3.90322e-08 -0.356239 -0.934395 6.24454e-08 -0.356239 -0.962915 5.02554e-08 -0.269806 -0.962915 3.69524e-08 -0.269806 -0.962915 3.56829e-08 -0.269806 -0.983463 2.53004e-08 -0.181107 -0.983463 2.35641e-08 -0.181107 -0.983463 2.67624e-08 -0.181107 -0.995857 1.27032e-08 -0.0909333 -0.995857 1.18314e-08 -0.0909333 -0.995857 1.33811e-08 -0.0909333 -1 0 0 -1 0 0 -1 0 0 -0.995857 -1.16446e-08 0.0909333 -0.995857 -1.18314e-08 0.0909333 -0.995857 -1.33811e-08 0.0909333 -0.983463 -2.53004e-08 0.181107 -0.983463 -2.48043e-08 0.181107 -0.983463 -2.67624e-08 0.181107 -0.962915 -3.76915e-08 0.269806 -0.962915 -2.95619e-08 0.269806 -0.962915 -4.90641e-08 0.269806 -0.934395 -6.63548e-08 0.356239 -0.934395 -3.90322e-08 0.356239 -0.934395 -6.24454e-08 0.356239 -0.89813 -8.19061e-08 0.43973 -0.89813 -4.818e-08 0.43973 -0.89813 -8.02869e-08 0.43973 -0.854416 -9.67811e-08 0.519589 -0.854416 -5.693e-08 0.519589 -0.854416 -9.81281e-08 0.519589 -0.80363 -1.10852e-07 0.595129 -0.80363 -6.52067e-08 0.595129 -0.80363 -1.07049e-07 0.595129 -0.746186 -1.24003e-07 0.665738 -0.746186 -7.29431e-08 0.665738 -0.746186 -1.24891e-07 0.665738 -0.682553 -1.36129e-07 0.730836 -0.682553 -8.00758e-08 0.730836 -0.682553 -1.33812e-07 0.730836 -0.613271 -1.47125e-07 0.789873 -0.613271 -8.65442e-08 0.789873 -0.613271 -1.51653e-07 0.789873 -0.538895 -1.56904e-07 0.842373 -0.538895 -9.22966e-08 0.842373 -0.538895 -1.51652e-07 0.842373 -0.460063 -1.65382e-07 0.887886 -0.460063 -9.72833e-08 0.887886 -0.460063 -1.69494e-07 0.887886 -0.37742 -1.72489e-07 0.926042 -0.37742 -1.01464e-07 0.926042 -0.37742 -1.78415e-07 0.926042 -0.291646 -1.78167e-07 0.956526 -0.291646 -1.04804e-07 0.956526 -0.291646 -1.78416e-07 0.956526 -0.203457 -1.82369e-07 0.979084 -0.203457 -1.07276e-07 0.979084 -0.203457 -1.78415e-07 0.979084 -0.113579 -1.85059e-07 0.993529 -0.113579 -1.08858e-07 0.993529 -0.113579 -1.85106e-07 0.993529 -0.022764 -1.86216e-07 0.999741 -0.022764 -1.09539e-07 0.999741 -0.022764 -1.86221e-07 0.999741 0.0682419 -1.8583e-07 0.997669 0.0682419 -1.09312e-07 0.997669 0.0682419 -1.86221e-07 0.997669 0.158682 -1.83905e-07 0.98733 0.158682 -1.08179e-07 0.98733 0.158682 -1.82873e-07 0.98733 0.24781 -1.80455e-07 0.968809 0.24781 -1.0615e-07 0.968809 0.24781 -1.78417e-07 0.968809 0.334876 -1.7551e-07 0.942262 0.334876 -1.03241e-07 0.942262 0.334876 -1.78413e-07 0.942262 0.419175 -1.69111e-07 0.907905 0.419175 -9.94768e-08 0.907905 0.419175 -1.69494e-07 0.907905 0.500002 -1.6131e-07 0.866024 0.500002 -9.48879e-08 0.866024 0.500002 -1.60575e-07 0.866024 0.576681 -1.52173e-07 0.81697 0.576681 -8.95132e-08 0.81697 0.576681 -1.51653e-07 0.81697 0.648586 -1.41774e-07 0.761142 0.648586 -8.33963e-08 0.761142 0.648586 -1.42733e-07 0.761142 0.715108 -1.30202e-07 0.699014 0.715108 -7.65891e-08 0.699014 0.715108 -1.33811e-07 0.699014 0.77571 -1.1755e-07 0.631089 0.77571 -6.91468e-08 0.631089 0.77571 -1.2489e-07 0.631089 0.829889 -1.03923e-07 0.557929 0.829889 -6.11308e-08 0.557929 0.829889 -1.07049e-07 0.557929 0.87718 -8.94371e-08 0.480161 0.87718 -5.261e-08 0.480161 0.87718 -8.92073e-08 0.480161 0.917212 -7.42079e-08 0.3984 0.917212 -4.36517e-08 0.3984 0.917212 -8.0287e-08 0.3984 0.94964 -5.83648e-08 0.313344 0.94964 -4.29153e-08 0.313344 0.94964 -4.46037e-08 0.313344 0.974199 -3.15287e-08 0.225691 0.974199 -3.09104e-08 0.225691 0.974199 -3.12227e-08 0.225691 0.990686 -1.90218e-08 0.136163 0.990686 -1.77164e-08 0.136163 0.990686 -1.78415e-08 0.136163 0.998964 -6.35858e-09 0.0455164 0.998964 -5.96876e-09 0.0455164 0.998964 -6.69058e-09 0.0455164 0.998964 5.96875e-09 -0.0455164 0.998964 5.9222e-09 -0.0455164 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -0.872633 0 0.488377 -0.871509 0.00157532 0.490377 -0.832868 -0.00148413 0.55347 -0.788252 -0.0027724 0.615346 -0.739052 -0.00387672 0.673638 -0.68555 -0.0047901 0.72801 -0.62806 -0.00551299 0.778146 -0.566917 -0.00604571 0.823753 -0.502476 -0.00638733 0.864567 -0.435112 -0.00653909 0.900353 -0.390285 0.00357177 0.920687 -0.370423 -0.00017169 0.928863 -0.350418 0.000202186 0.936594 -0.329278 0.00452149 0.944222 -0.310709 0.00220781 0.950503 -0.293412 -8.17567e-05 0.955986 -0.275889 8.99923e-05 0.96119 -0.257632 0.00231585 0.966241 -0.241333 0.0031181 0.970437 -0.22959 0.00117706 0.973287 -0.218826 0.00024007 0.975764 -0.209104 -4.05749e-05 0.977894 -0.200562 6.60695e-05 0.979681 -0.193022 0.000366479 0.981194 -0.184838 0.000823402 0.982769 -0.16424 0.00121591 0.98642 -0.157641 -7.07444e-08 0.987496 -0.148084 -0.000329864 0.988975 -0.0874268 0.000334708 0.996171 -0.068602 -0.000318948 0.997644 0.0113172 -0.000307533 0.999936 0.0911639 -0.000295689 0.995836 0.170427 -0.000283281 0.98537 0.248603 -0.000270476 0.968605 0.325187 -0.00025715 0.94565 0.399696 -0.000243378 0.916648 0.471645 -0.000229094 0.881788 0.540583 -0.000214317 0.84129 0.606064 -0.000199014 0.795416 0.667671 -0.00018336 0.744456 0.725011 -0.000167206 0.688738 0.777716 -0.000150565 0.628615 0.825447 -0.000133377 0.564479 0.867906 -0.000115767 0.496728 0.904814 -9.76277e-05 0.425806 0.935938 -7.89684e-05 0.352165 0.961081 -5.9978e-05 0.276265 0.980081 -4.05275e-05 0.1986 0.992813 -2.06272e-05 0.119673 0.999201 -5.24245e-09 0.0399778 0.999201 5.24587e-09 -0.0399778 0.999182 2.05777e-05 -0.0404348 0.992813 -2.05959e-05 -0.119673 0.992647 4.09225e-05 -0.121047 0.980081 -4.04755e-05 -0.1986 0.961081 -5.99056e-05 -0.276265 0.935938 -7.88762e-05 -0.352165 0.904814 -9.75162e-05 -0.425806 0.867906 -0.000115652 -0.496728 0.825447 -0.000133245 -0.564479 0.777716 -0.000150419 -0.628615 0.725011 -0.000167046 -0.688738 0.667671 -0.000183186 -0.744456 0.606064 -0.000198828 -0.795416 0.540583 -0.000214121 -0.84129 0.471644 -0.000228845 -0.881789 0.399698 -0.000243147 -0.916647 0.325186 -0.000256829 -0.94565 0.248603 -0.000270194 -0.968605 0.170428 -0.00028304 -0.98537 0.0911626 -0.000295353 -0.995836 0.0113133 -0.000307222 -0.999936 -0.0685994 -0.000318922 -0.997644 -0.148084 -0.000329519 -0.988975 -0.157636 7.07445e-08 -0.987497 -0.164214 0.00121225 -0.986424 -0.167634 0.00189942 -0.985847 -0.184755 0.00082939 -0.982784 -0.192994 0.000369053 -0.9812 -0.200566 6.67821e-05 -0.97968 -0.209139 -4.0341e-05 -0.977886 -0.218774 0.000236933 -0.975776 -0.229565 0.00117373 -0.973293 -0.241314 0.00311279 -0.970442 -0.257594 0.00232309 -0.96625 -0.275826 9.64387e-05 -0.961208 -0.293351 -8.39863e-05 -0.956005 -0.31069 0.00220343 -0.950509 -0.329247 0.0045341 -0.944233 -0.350327 0.000216025 -0.936627 -0.370354 -0.000177207 -0.928891 -0.390256 0.00356028 -0.920699 -0.43511 -0.00654074 -0.900354 -0.502475 -0.00638767 -0.864568 -0.566917 -0.00604571 -0.823753 -0.62806 -0.00551299 -0.778146 -0.68555 -0.0047901 -0.72801 -0.739052 -0.00387672 -0.673638 -0.788252 -0.0027724 -0.615346 -0.832868 -0.00148413 -0.55347 -0.872633 0 -0.488377 -0.871509 0.00157532 -0.490377 -0.829027 0.00296667 0.559201 -0.78113 0.00416256 0.624355 -0.72812 0.00516914 0.685431 -0.670348 0.00598631 0.742023 -0.608193 0.00661324 0.793762 -0.542061 0.00705115 0.84031 -0.472386 0.00729971 0.881362 -0.399623 0.00735735 0.91665 -0.324245 0.00634203 0.945952 -0.24675 0.00456483 0.969069 -0.167637 0.00189838 0.985847 -0.00664341 0.000323521 0.999978 0.074182 0.000311854 0.997245 0.154524 0.000299693 0.987989 0.233854 0.000287145 0.972272 0.311654 0.000274005 0.950196 0.387417 0.000260489 0.921904 0.460645 0.000246393 0.887584 0.530862 0.000231914 0.847458 0.597607 0.000216959 0.801789 0.660439 0.000201557 0.750879 0.718956 0.00018557 0.695056 0.772771 0.000169129 0.634685 0.821529 0.000152203 0.570167 0.864917 0.000134902 0.501916 0.902646 0.000117046 0.430384 0.934471 9.87658e-05 0.356039 0.960185 8.006e-05 0.279363 0.97962 6.07221e-05 0.200858 0.992647 4.08907e-05 0.121047 0.999182 2.05671e-05 0.0404348 0.97962 6.07747e-05 -0.200858 0.960185 8.0133e-05 -0.279363 0.934471 9.88592e-05 -0.356039 0.902646 0.000117146 -0.430383 0.864917 0.000135019 -0.501916 0.821529 0.000152336 -0.570167 0.772771 0.000169275 -0.634685 0.718956 0.000185732 -0.695056 0.660439 0.000201732 -0.750879 0.597607 0.000217145 -0.801789 0.530861 0.000232153 -0.847459 0.460645 0.0002466 -0.887584 0.387417 0.000260812 -0.921904 0.311654 0.000274281 -0.950196 0.233855 0.000287382 -0.972272 0.154523 0.000300026 -0.987989 0.0741807 0.00031219 -0.997245 -0.00663957 0.000323561 -0.999978 -0.0874319 0.000335 -0.99617 -0.246751 0.00456453 -0.969068 -0.324246 0.00634302 -0.945952 -0.399619 0.00735604 -0.916652 -0.472386 0.00729889 -0.881362 -0.542061 0.00705114 -0.84031 -0.608193 0.00661324 -0.793762 -0.670348 0.00598631 -0.742023 -0.72812 0.00516914 -0.685431 -0.78113 0.00416257 -0.624355 -0.829027 0.00296667 -0.559201 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0.920312 0.00204237 0.39118 0.899753 0.168939 0.402373 0.860752 0.333062 0.384936 0.79701 0.48758 0.356427 0.710339 0.628096 0.317668 0.603214 0.750575 0.269759 0.478733 0.851457 0.214093 0.34045 0.927854 0.152252 0.214986 0.971873 0.0961438 0.138698 0.989148 0.0484622 0.0396872 0.998694 0.0321597 0.0572237 0.998052 0.0248718 0.0590721 0.997904 0.0264173 0.187906 0.977538 0.0954501 0.323745 0.927854 0.185139 0.441809 0.851458 0.282532 0.538399 0.75058 0.383088 0.610936 0.628094 0.481929 0.657897 0.487578 0.573968 0.678988 0.333058 0.654253 0.675062 0.168949 0.718155 0.662844 0.00123762 0.748757 0.682513 0.0835543 0.726082 0.69898 0.00132291 0.71514 0.71759 0.0834889 0.691444 0.742706 0.168937 0.647956 0.740301 0.333053 0.583978 0.711378 0.487581 0.506168 0.655556 0.628093 0.41922 0.573609 0.75058 0.328028 0.467557 0.851459 0.237504 0.340449 0.927855 0.152252 0.332503 0.927855 0.168901 0.332505 0.927854 0.168902 0.926074 0.0491776 0.374124 0.91488 0.146838 0.376076 0.900316 0.243341 0.360854 0.868281 0.336216 0.364755 0.83889 0.427785 0.336547 0.801662 0.48767 0.345711 0.793672 0.51388 0.325594 0.744102 0.595585 0.302639 0.687544 0.671539 0.276259 0.619616 0.740109 0.261372 0.604118 0.750601 0.267656 0.553944 0.803289 0.218801 0.477697 0.851425 0.216522 0.472677 0.856976 0.205348 0.398032 0.903905 0.156611 0.334839 0.927636 0.165453 0.312261 0.941417 0.127388 0.224479 0.969999 0.0933301 0.907172 0.168987 0.385334 0.868526 0.33315 0.366979 0.712533 0.628154 0.312601 0.166573 0.982532 0.0829734 0.192397 0.977538 0.0860406 0.0560174 0.998041 0.0278619 0.0575239 0.997916 0.0292205 0.182954 0.977538 0.104625 0.314191 0.927854 0.200922 0.427296 0.851459 0.304033 0.518797 0.750578 0.409247 0.586352 0.628097 0.511552 0.628706 0.487582 0.6058 0.645807 0.333051 0.68703 0.63872 0.168944 0.750663 0.625089 0.00117028 0.780553 0.645764 0.083646 0.758941 0.0546758 0.998029 0.0308056 0.0558229 0.99793 0.0319232 0.177557 0.977538 0.113545 0.303875 0.927853 0.216216 0.411735 0.851461 0.324791 0.497926 0.750575 0.434404 0.560341 0.628092 0.539925 0.59798 0.487578 0.636151 0.611037 0.333052 0.718129 0.600817 0.168947 0.78133 0.585813 0.0010905 0.810446 0.607439 0.0837236 0.789942 0.053199 0.998015 0.0336969 0.0539779 0.997945 0.0345181 0.171723 0.977538 0.122187 0.292809 0.927853 0.230978 0.395169 0.85146 0.344758 0.47583 0.750579 0.458495 0.53295 0.628098 0.566972 0.565784 0.487584 0.664944 0.574779 0.333049 0.747468 0.561442 0.168945 0.810087 0.545108 0.00101058 0.838365 0.567627 0.0837995 0.81901 0.0515899 0.998 0.0365328 0.051995 0.997962 0.036996 0.165472 0.977538 0.13053 0.281026 0.927854 0.245176 0.377638 0.851459 0.363879 0.452578 0.750575 0.481468 0.504262 0.628094 0.592636 0.532209 0.487578 0.692113 0.537108 0.333058 0.774976 0.520692 0.168945 0.836861 0.503077 0.000937444 0.864241 0.526425 0.083868 0.846075 0.049849 0.997983 0.0393075 0.0498826 0.99798 0.0393493 0.158814 0.977538 0.138554 0.268558 0.927854 0.258773 0.35918 0.851459 0.38211 0.428209 0.750579 0.503258 0.474332 0.628101 0.616845 0.497334 0.487575 0.717586 0.498123 0.333057 0.800591 0.478669 0.168951 0.861587 0.459819 0.000870737 0.888012 0.483936 0.0839479 0.871068 0.0480479 0.997965 0.0419184 0.151768 0.977538 0.146238 0.255431 0.927854 0.271737 0.339846 0.851457 0.399406 0.402802 0.750574 0.523822 0.443249 0.628097 0.63955 0.461237 0.487575 0.741304 0.457924 0.333051 0.824247 0.435475 0.168948 0.884205 0.415441 0.000790893 0.90962 0.440265 0.0840136 0.893929 0.0452055 0.998013 0.0439033 0.0440153 0.997933 0.0468252 0.0427334 0.998026 0.0460118 0.041787 0.997919 0.0491102 0.128473 0.977538 0.167073 0.21244 0.927854 0.306522 0.277045 0.851458 0.44527 0.32091 0.750577 0.577625 0.343802 0.628094 0.698068 0.346544 0.487575 0.80136 0.331004 0.333051 0.882901 0.299934 0.168941 0.938882 0.27668 0.000596954 0.960962 0.303227 0.0842174 0.94919 0.323759 0.000661599 0.946139 0.349798 0.0841547 0.933038 0.391215 0.168947 0.904659 0.4166 0.333053 0.845884 0.424009 0.487583 0.763204 0.411077 0.6281 0.660686 0.376403 0.750576 0.543099 0.319678 0.851458 0.415724 0.241683 0.927853 0.284038 0.144349 0.977539 0.153564 0.136578 0.977539 0.160514 0.136579 0.977538 0.160515 0.0401791 0.998039 0.0480005 0.0394251 0.997906 0.0512702 0.120055 0.977538 0.173223 0.197021 0.927854 0.316653 0.254687 0.851457 0.458427 0.291951 0.750577 0.592789 0.308858 0.628096 0.714215 0.30649 0.487577 0.817516 0.286933 0.333054 0.89819 0.253137 0.168946 0.952564 0.228925 0.000520283 0.973444 0.255915 0.0842778 0.963019 0.037549 0.99805 0.0498684 0.036939 0.997895 0.0532983 0.11134 0.977538 0.178947 0.181119 0.927855 0.326008 0.231704 0.851458 0.470459 0.262279 0.750574 0.606505 0.273159 0.628095 0.728616 0.265684 0.487582 0.831671 0.242166 0.333047 0.911282 0.205717 0.168949 0.963917 0.180616 0.000468474 0.983554 0.207973 0.0843346 0.974492 0.0348468 0.998059 0.0516132 0.0343365 0.997886 0.0551858 0.102355 0.977538 0.184235 0.164776 0.927854 0.334568 0.20815 0.85146 0.481342 0.231961 0.750579 0.618728 0.236794 0.628098 0.74123 0.22423 0.487577 0.843795 0.196802 0.333054 0.922141 0.157797 0.168946 0.972912 0.131861 0.000415039 0.991268 0.159527 0.0844114 0.983578 0.0320764 0.998067 0.0532318 0.0316255 0.997878 0.0569245 0.0931187 0.977538 0.189071 0.148029 0.927854 0.342309 0.184098 0.851454 0.491054 0.201084 0.750575 0.629446 0.199845 0.628098 0.752034 0.182228 0.487577 0.853851 0.150958 0.333055 0.930745 0.109489 0.168946 0.979525 0.0827876 0.000355097 0.996567 0.110689 0.0844687 0.990259 0.0292425 0.998073 0.0547221 0.0288153 0.997871 0.0585081 0.0836536 0.977539 0.193444 0.130917 0.927854 0.349211 0.159586 0.851459 0.49955 0.169707 0.750576 0.638619 0.162411 0.628096 0.760998 0.139779 0.487576 0.861818 0.104744 0.333053 0.937072 0.0609156 0.168947 0.983741 0.033515 0.000291332 0.999438 0.0615829 0.0845165 0.994517 0.026351 0.998078 0.0560838 0.0259172 0.997866 0.0599315 0.0739827 0.977538 0.197347 0.11349 0.927853 0.355258 0.134685 0.85146 0.506829 0.137918 0.750576 0.646231 0.124579 0.628096 0.768099 0.0969891 0.487574 0.867678 0.0582758 0.333046 0.941108 0.0121902 0.168946 0.98555 -0.0158433 0.000244489 0.999874 0.0123236 0.0845807 0.99634 0.0234046 0.998082 0.0573132 0.0229393 0.997863 0.0611874 0.0641385 0.977536 0.200772 0.095783 0.927853 0.360436 0.109456 0.85146 0.51287 0.10579 0.750576 0.652261 0.0864404 0.628098 0.773318 0.0539577 0.487578 0.87141 0.0116616 0.333054 0.942836 -0.0365628 0.168947 0.984947 -0.0651627 0.000180643 0.997875 -0.036963 0.0846315 0.995727 0.0204096 0.998084 0.05841 0.0198925 0.99786 0.0622803 0.0541278 0.977538 0.203688 0.0778396 0.927854 0.364729 0.0839589 0.85146 0.517655 0.0734044 0.750575 0.656696 0.0480905 0.628098 0.776646 0.0107996 0.487576 0.873014 -0.0349787 0.333049 0.942261 -0.0852294 0.168948 0.981933 -0.114321 0.000129717 0.993444 -0.0861619 0.0846753 0.992676 0.0173737 0.998084 0.0593851 0.0167938 0.99786 0.0631958 0.0439896 0.977538 0.206118 0.0597084 0.927854 0.368134 0.0582554 0.851459 0.521176 0.0408378 0.750577 0.65952 0.00962428 0.628099 0.778074 -0.0323903 0.487578 0.872478 -0.0815354 0.333056 0.939375 -0.133684 0.168948 0.976517 -0.1632 8.22226e-05 0.986593 -0.135146 0.0847258 0.987197 0.0142888 0.998084 0.0602046 0.0136399 0.997862 0.0639119 0.0337418 0.977538 0.208039 0.0414286 0.927854 0.370635 0.0324113 0.851459 0.523419 0.00817397 0.750575 0.660735 -0.0288669 0.628096 0.7776 -0.0754949 0.487575 0.869811 -0.127889 0.333047 0.934197 -0.181812 0.168944 0.968712 -0.211686 2.64527e-05 0.977338 -0.183799 0.0847789 0.979301 0.0111761 0.998082 0.0608847 0.0104551 0.997865 0.0644615 0.0234122 0.977538 0.209453 0.0230487 0.927854 0.37223 0.00648592 0.85146 0.524379 -0.0245142 0.750578 0.660328 -0.0672873 0.628099 0.775219 -0.118419 0.487583 0.865008 -0.173934 0.333052 0.926727 -0.229499 0.16895 0.958533 -0.259651 -6.36562e-06 0.965703 -0.232007 0.0848219 0.969009 0.00803164 0.998079 0.0614336 0.00724627 0.99787 0.064828 0.0130255 0.977538 0.210355 0.00461246 0.927854 0.372915 -0.0194545 0.85146 0.524058 -0.0571391 0.750575 0.65831 -0.105541 0.628096 0.770945 -0.161051 0.48758 0.858096 -0.21955 0.333048 0.916993 -0.276617 0.168938 0.946014 -0.306988 -7.05878e-05 0.951713 -0.279638 0.0848699 0.956347 0.00486221 0.998074 0.0618429 0.00402559 0.997877 0.0650106 0.00260672 0.977538 0.210742 -0.0138343 0.927854 0.372688 -0.0453475 0.851458 0.522459 -0.0896239 0.750575 0.654679 -0.143537 0.628095 0.764783 -0.203293 0.487576 0.849083 -0.264631 0.333057 0.90501 -0.323065 0.168952 0.931174 -0.353572 -0.000108993 0.935408 -0.326593 0.084897 0.941344 0.00167249 0.998068 0.0621108 0.000804113 0.997884 0.0650091 -0.00781806 0.977538 0.210613 -0.0322491 0.927854 0.371547 -0.0711295 0.851458 0.519577 -0.121891 0.750577 0.649443 -0.181186 0.628099 0.756745 -0.245032 0.487579 0.837989 -0.309064 0.333048 0.890819 -0.36872 0.168952 0.914057 -0.399301 -0.000138912 0.91682 -0.372745 0.0849608 0.924036 -0.00153238 0.99806 0.0622373 -0.0024064 0.997894 0.0648249 -0.0182244 0.977538 0.209968 -0.0505841 0.927854 0.369498 -0.0967373 0.851458 0.515423 -0.153861 0.750578 0.64262 -0.218385 0.628099 0.74686 -0.286174 0.487579 0.824846 -0.352738 0.333044 0.874447 -0.41347 0.168941 0.894708 -0.444051 -0.000199458 0.896002 -0.417982 0.084982 0.904472 -0.00474772 0.998051 0.0622214 -0.00559496 0.997905 0.0644596 -0.028586 0.977539 0.208808 -0.0687947 0.927854 0.366543 -0.122109 0.851458 0.510008 -0.185451 0.750574 0.634229 -0.255055 0.628097 0.735148 -0.326619 0.487583 0.809681 -0.395551 0.333064 0.855925 -0.457212 0.168953 0.873162 -0.487724 -0.000235387 0.872998 -0.462201 0.0850224 0.88269 -0.00796865 0.99804 0.0620631 -0.00874959 0.997917 0.0639137 -0.0388775 0.977538 0.207142 -0.0868376 0.927854 0.362693 -0.14718 0.851458 0.503346 -0.21659 0.750576 0.62428 -0.291098 0.628098 0.721634 -0.366257 0.487573 0.792546 -0.437397 0.333051 0.835321 -0.499837 0.168959 0.84948 -0.530204 -0.000240337 0.84787 -0.505289 0.0850656 0.858747 -0.0111896 0.998028 0.06176 -0.0118605 0.997931 0.0631929 -0.0490736 0.977539 0.204964 -0.104668 0.927854 0.357955 -0.171894 0.851459 0.495449 -0.247198 0.750578 0.612802 -0.326427 0.628101 0.706353 -0.405003 0.487579 0.773459 -0.478171 0.333045 0.812671 -0.541233 0.168936 0.823728 -0.571396 -0.000270468 0.820675 -0.547132 0.085124 0.832707 -0.0144036 0.998014 0.0613167 -0.0149193 0.997945 0.0623134 -0.0591497 0.977538 0.202287 -0.122243 0.927854 0.352341 -0.196183 0.851458 0.486345 -0.277198 0.75057 0.599838 -0.360963 0.62809 0.689354 -0.442761 0.48758 0.752482 -0.517774 0.33306 0.788023 -0.581307 0.168941 0.795953 -0.611199 -0.000309512 0.791477 -0.587642 0.0851639 0.804627 -0.0176179 0.997998 0.0607381 -0.017903 0.997963 0.0612386 -0.069082 0.977536 0.199124 -0.139517 0.927851 0.345872 -0.219997 0.851457 0.47605 -0.306524 0.750579 0.585384 -0.394611 0.628098 0.670653 -0.479432 0.487575 0.729669 -0.556112 0.333059 0.761453 -0.619956 0.168942 0.766233 -0.649506 -0.000382786 0.760357 -0.626713 0.0851547 0.774584 -0.0208066 0.997982 0.0599896 -0.0208209 0.99798 0.0600125 -0.0788438 0.977538 0.195456 -0.156451 0.927854 0.338541 -0.243268 0.851459 0.464583 -0.335099 0.750576 0.569513 -0.427297 0.628095 0.650319 -0.514926 0.487582 0.705064 -0.59309 0.333051 0.733022 -0.657096 0.168955 0.734629 -0.68623 -0.000358737 0.727385 -0.664257 0.0851908 0.742634 -0.0238585 0.997964 0.0591456 -0.0884136 0.977538 0.191316 -0.173002 0.927854 0.33039 -0.265945 0.85146 0.451983 -0.362855 0.750577 0.552242 -0.458933 0.628095 0.628392 -0.549165 0.487587 0.678731 -0.628613 0.333057 0.702794 -0.69262 0.168934 0.701241 -0.72129 -0.000381938 0.692633 -0.70016 0.0852654 0.708876 -0.0266617 0.998013 0.0570865 -0.0298162 0.997932 0.0569414 -0.0294637 0.998027 0.0554426 -0.0327052 0.997918 0.0555836 -0.115732 0.977538 0.176138 -0.219956 0.927855 0.301173 -0.329859 0.851461 0.407684 -0.440527 0.750581 0.492509 -0.54681 0.628101 0.553614 -0.643499 0.487575 0.590067 -0.725631 0.333047 0.602112 -0.7887 0.168951 0.591108 -0.81559 -0.000505853 0.578629 -0.797288 0.0852782 0.597545 -0.786047 -0.000479402 0.618167 -0.766765 0.0852566 0.636241 -0.726448 0.168957 0.666128 -0.662603 0.333049 0.670847 -0.582061 0.487577 0.650748 -0.489446 0.628094 0.60493 -0.389724 0.750572 0.533627 -0.287972 0.85146 0.438279 -0.189129 0.927854 0.321431 -0.097766 0.977539 0.186708 -0.10688 0.977539 0.181645 -0.10688 0.977538 0.181646 -0.03218 0.998039 0.0536833 -0.0355202 0.997906 0.0540601 -0.124302 0.977538 0.1702 -0.234582 0.927853 0.28993 -0.349621 0.851457 0.390879 -0.464348 0.750574 0.470127 -0.57352 0.628097 0.525898 -0.671892 0.487578 0.557521 -0.75452 0.333047 0.56549 -0.816969 0.168956 0.551376 -0.843152 -0.000503876 0.537676 -0.825863 0.0853222 0.557378 -0.0348068 0.99805 0.0518111 -0.0382507 0.997895 0.0523745 -0.132567 0.977538 0.163844 -0.248633 0.927855 0.277971 -0.368519 0.851462 0.373105 -0.487027 0.750575 0.446588 -0.598827 0.628094 0.496894 -0.698644 0.487577 0.523608 -0.781558 0.333061 0.527482 -0.843238 0.16894 0.510302 -0.868655 -0.000534146 0.495418 -0.852415 0.0853342 0.515856 -0.037341 0.998059 0.0498308 -0.0408865 0.997885 0.0505332 -0.140507 0.977538 0.157088 -0.262078 0.927852 0.265341 -0.386522 0.851459 0.354426 -0.508517 0.750578 0.421952 -0.622663 0.628106 0.466663 -0.723681 0.487576 0.488421 -0.806691 0.333057 0.488183 -0.867442 0.168946 0.467976 -0.892038 -0.000531174 0.451959 -0.876882 0.085346 0.473069 -0.0397785 0.998067 0.0477449 -0.0434175 0.997877 0.048541 -0.148103 0.977539 0.149945 -0.274876 0.927854 0.252051 -0.403576 0.851461 0.334875 -0.528761 0.750576 0.396292 -0.644987 0.628093 0.435305 -0.746944 0.48759 0.452029 -0.829847 0.33305 0.447695 -0.889524 0.168943 0.424505 -0.913257 -0.000524244 0.407384 -0.899199 0.0854131 0.429122 -0.0421161 0.998074 0.0455571 -0.0458339 0.997871 0.0464044 -0.155338 0.977538 0.14244 -0.287007 0.927853 0.238151 -0.419641 0.85146 0.314511 -0.547717 0.750573 0.369657 -0.665725 0.628092 0.402878 -0.768387 0.487586 0.414537 -0.850971 0.333053 0.406109 -0.90943 0.168933 0.379999 -0.932246 -0.00056166 0.361826 -0.91932 0.0854074 0.384131 -0.0443499 0.998079 0.0432714 -0.0481273 0.997866 0.0441311 -0.162193 0.977538 0.134584 -0.298435 0.927852 0.223668 -0.43468 0.851463 0.293366 -0.565321 0.750578 0.342117 -0.684834 0.628091 0.369464 -0.787951 0.487579 0.376032 -0.870013 0.333059 0.363524 -0.927108 0.168949 0.334556 -0.948963 -0.000571279 0.315387 -0.937192 0.0854088 0.338196 -0.0464777 0.998082 0.040892 -0.0502896 0.997863 0.0417292 -0.168651 0.977538 0.126398 -0.309129 0.927853 0.208633 -0.44866 0.851459 0.271518 -0.581553 0.750575 0.313741 -0.702263 0.628099 0.335138 -0.80558 0.487585 0.336603 -0.886926 0.333054 0.320058 -0.942519 0.168944 0.288299 -0.96337 -0.00058486 0.268175 -0.952769 0.0854279 0.291434 -0.0484956 0.998084 0.0384213 -0.0523116 0.997861 0.0392054 -0.174693 0.977539 0.117901 -0.319068 0.927853 0.193091 -0.461537 0.851462 0.248992 -0.596351 0.750581 0.284594 -0.717975 0.6281 0.300003 -0.821248 0.487569 0.296358 -0.90167 0.33305 0.275806 -0.955624 0.168938 0.241335 -0.975429 -0.00059469 0.220315 -0.966016 0.0854266 0.243959 -0.0504018 0.998085 0.0358664 -0.0541876 0.997861 0.036572 -0.180313 0.977538 0.10912 -0.328224 0.927854 0.177075 -0.473295 0.851453 0.225874 -0.609709 0.750566 0.254765 -0.731935 0.628101 0.264123 -0.83489 0.487588 0.255373 -0.914201 0.333071 0.23087 -0.966387 0.168953 0.193781 -0.985112 -0.000602612 0.171914 -0.976897 0.0854396 0.195888 -0.052191 0.998084 0.0332273 -0.0559093 0.997862 0.0338343 -0.185486 0.977538 0.100069 -0.336582 0.927853 0.160626 -0.483877 0.851461 0.202182 -0.621552 0.750577 0.224294 -0.744107 0.62809 0.227611 -0.846509 0.487567 0.213779 -0.924503 0.333057 0.185384 -0.974789 0.168946 0.14575 -0.992394 -0.000590128 0.123098 -0.985389 0.0854476 0.147335 -0.0538632 0.998082 0.0305113 -0.0574708 0.997866 0.0310053 -0.190208 0.977538 0.0907717 -0.344111 0.927854 0.143784 -0.493287 0.851459 0.178008 -0.631884 0.750577 0.193279 -0.754443 0.628105 0.190526 -0.85604 0.487577 0.171655 -0.93254 0.333057 0.139435 -0.980805 0.16894 0.0973671 -0.99726 -0.0006027 0.073974 -0.991467 0.0854702 0.0984255 -0.0554121 0.998079 0.0277203 -0.0588675 0.99787 0.0280931 -0.194465 0.977538 0.0812552 -0.350799 0.927855 0.126588 -0.501482 0.851462 0.153392 -0.640669 0.750576 0.161796 -0.762947 0.628098 0.152986 -0.863477 0.487585 0.129108 -0.938295 0.333057 0.0931415 -0.984417 0.16896 0.048739 -0.999695 -0.000588272 0.02468 -0.995123 0.0854582 0.0492691 -0.0568386 0.998074 0.0248604 -0.0600955 0.997877 0.0251102 -0.198245 0.977538 0.0715391 -0.356635 0.927853 0.109089 -0.508463 0.851456 0.128408 -0.647885 0.750577 0.129918 -0.769591 0.628084 0.11507 -0.868805 0.487586 0.0862468 -0.941757 0.333046 0.0466377 -0.98563 0.168921 0 -0.999695 -0.000588254 -0.02468 -0.99634 0.0854775 0 -0.0581388 0.998067 0.0219348 -0.061148 0.997885 0.022066 -0.20154 0.977539 0.0616463 -0.361586 0.927856 0.0913145 -0.514187 0.851458 0.103107 -0.653513 0.750582 0.0977107 -0.774332 0.628093 0.0768685 -0.872007 0.487586 0.0431787 -0.942905 0.333063 0 -0.984424 0.168921 -0.0487393 -0.99726 -0.000602665 -0.073974 -0.995123 0.0854583 -0.0492691 -0.0593086 0.99806 0.0189483 -0.0620247 0.997894 0.0189723 -0.204346 0.977538 0.0516068 -0.365667 0.927853 0.0733251 -0.518658 0.851458 0.0775494 -0.65755 0.750578 0.0652766 -0.777189 0.62809 0.0384852 -0.873078 0.48758 0 -0.941751 0.333063 -0.0466374 -0.980802 0.16896 -0.0973667 -0.992394 -0.000590093 -0.123098 -0.991467 0.0854702 -0.0984255 -0.0603481 0.998051 0.0159053 -0.062724 0.997905 0.0158403 -0.206644 0.977538 0.0414363 -0.368842 0.927854 0.0551481 -0.521854 0.85146 0.0518076 -0.65998 0.750572 0.0326769 -0.77813 0.628103 0 -0.87201 0.48758 -0.0431788 -0.938299 0.333046 -0.093142 -0.97479 0.16894 -0.14575 -0.985112 -0.000602612 -0.171914 -0.985389 0.0854476 -0.147335 -0.0612555 0.99804 0.0128104 -0.0632433 0.997918 0.0126813 -0.208438 0.977539 0.0311657 -0.371122 0.927853 0.0368435 -0.523778 0.85146 0.0259353 -0.660795 0.750567 0 -0.777178 0.628103 -0.0384847 -0.868805 0.487586 -0.0862468 -0.93254 0.333057 -0.139435 -0.966388 0.168946 -0.193781 -0.975429 -0.00059469 -0.220315 -0.976897 0.0854396 -0.195888 -0.062028 0.998028 0.00966791 -0.0635801 0.997931 0.00950677 -0.209727 0.977538 0.02082 -0.372486 0.927854 0.0184436 -0.524418 0.851461 0 -0.659986 0.750567 -0.0326772 -0.774335 0.62809 -0.0768688 -0.863476 0.487586 -0.129108 -0.924503 0.333058 -0.185384 -0.955622 0.168953 -0.241335 -0.96337 -0.00058486 -0.268175 -0.966016 0.0854266 -0.243959 -0.0626613 0.998014 0.00648181 -0.0637342 0.997947 0.00632725 -0.210502 0.977538 0.0104228 -0.372941 0.927855 0 -0.523776 0.851461 -0.0259352 -0.657556 0.750572 -0.0652772 -0.769583 0.628093 -0.115069 -0.856035 0.487585 -0.171655 -0.914205 0.333057 -0.230871 -0.94252 0.168939 -0.288299 -0.948963 -0.000570359 -0.315387 -0.952769 0.0854279 -0.291434 -0.0631541 0.997998 0.00325795 -0.063711 0.997963 0.00315454 -0.210759 0.977538 0 -0.372485 0.927855 -0.0184436 -0.521854 0.85146 -0.0518076 -0.653518 0.750578 -0.0977115 -0.762958 0.628084 -0.152988 -0.846503 0.487577 -0.213778 -0.901663 0.333071 -0.275804 -0.927108 0.168944 -0.334557 -0.932246 -0.000558618 -0.361826 -0.937192 0.0854106 -0.338196 -0.0635086 0.997981 0 -0.0635086 0.997981 0 -0.210501 0.977538 -0.0104227 -0.371118 0.927854 -0.0368431 -0.518654 0.85146 -0.0775488 -0.64788 0.750582 -0.129917 -0.754448 0.628098 -0.190527 -0.834902 0.487567 -0.255377 -0.886927 0.33305 -0.320058 -0.909428 0.168948 -0.379997 -0.913257 -0.000526508 -0.407384 -0.91932 0.0854086 -0.38413 -0.063711 0.997963 -0.00315454 -0.209729 0.977538 -0.0208202 -0.368847 0.927853 -0.0551487 -0.514188 0.851458 -0.103107 -0.640668 0.750577 -0.161795 -0.744096 0.628105 -0.227608 -0.821238 0.487588 -0.296355 -0.870013 0.333057 -0.363524 -0.889525 0.168937 -0.424507 -0.892038 -0.000526223 -0.451959 -0.899198 0.0854141 -0.429123 -0.0626613 0.998014 -0.00648181 -0.0635801 0.997931 -0.00950677 -0.062028 0.998028 -0.00966791 -0.0632433 0.997918 -0.0126813 -0.204342 0.977538 -0.0516058 -0.356627 0.927856 -0.109087 -0.493282 0.851462 -0.178006 -0.6097 0.750575 -0.254761 -0.702264 0.628099 -0.335138 -0.768391 0.487578 -0.414539 -0.806691 0.333055 -0.488184 -0.816972 0.168937 -0.551378 -0.815589 -0.000499772 -0.578631 -0.825862 0.0853343 -0.557378 -0.843152 -0.000496236 -0.537675 -0.852414 0.0853355 -0.515856 -0.867442 0.168944 -0.467977 -0.85097 0.333057 -0.406107 -0.805577 0.487586 -0.336608 -0.731933 0.628102 -0.264126 -0.631884 0.750577 -0.193279 -0.508463 0.851456 -0.128407 -0.365668 0.927853 -0.0733238 -0.208438 0.977539 -0.0311665 -0.206641 0.977539 -0.0414358 -0.206644 0.977538 -0.0414355 -0.0612555 0.99804 -0.0128104 -0.062724 0.997905 -0.0158403 -0.201544 0.977538 -0.0616475 -0.350805 0.927853 -0.12659 -0.48388 0.851459 -0.202184 -0.596365 0.750567 -0.2846 -0.68483 0.628098 -0.369461 -0.746947 0.487585 -0.45203 -0.781559 0.333059 -0.527482 -0.7887 0.168951 -0.591107 -0.786047 -0.00047926 -0.618166 -0.797288 0.0852767 -0.597544 -0.0603481 0.998051 -0.0159053 -0.0620247 0.997894 -0.0189723 -0.198244 0.977539 -0.0715389 -0.344109 0.927855 -0.143783 -0.473285 0.851461 -0.225869 -0.581547 0.750581 -0.313739 -0.665724 0.628093 -0.402878 -0.723674 0.487591 -0.488417 -0.754515 0.333063 -0.565487 -0.758505 0.168953 -0.629385 -0.754589 -0.000457901 -0.656198 -0.766766 0.0852554 -0.63624 -0.0593086 0.99806 -0.0189483 -0.061148 0.997885 -0.022066 -0.194464 0.977538 -0.0812548 -0.33658 0.927854 -0.160625 -0.461548 0.851454 -0.248998 -0.565326 0.750574 -0.342119 -0.644988 0.628092 -0.435305 -0.698645 0.487576 -0.523608 -0.725632 0.333046 -0.602112 -0.726446 0.168961 -0.66613 -0.72129 -0.000389577 -0.692633 -0.734359 0.0852546 -0.673386 -0.0581388 0.998067 -0.0219348 -0.0600955 0.997877 -0.0251102 -0.190208 0.977538 -0.0907719 -0.328226 0.927853 -0.177077 -0.448655 0.851462 -0.271515 -0.547713 0.750577 -0.369655 -0.62267 0.628094 -0.466669 -0.671891 0.48758 -0.55752 -0.694966 0.333051 -0.637259 -0.692619 0.168948 -0.701239 -0.68623 -0.000370194 -0.727384 -0.700162 0.085254 -0.708876 -0.0568387 0.998074 -0.0248605 -0.0588676 0.99787 -0.0280932 -0.185485 0.977539 -0.100069 -0.319067 0.927854 -0.19309 -0.434684 0.85146 -0.29337 -0.528763 0.750574 -0.396293 -0.598821 0.628104 -0.496889 -0.643497 0.48758 -0.590066 -0.6626 0.333059 -0.670845 -0.657097 0.168942 -0.73463 -0.649506 -0.000391273 -0.760357 -0.664257 0.0851826 -0.742635 -0.0554122 0.998079 -0.0277203 -0.0574708 0.997866 -0.0310053 -0.18031 0.977538 -0.109118 -0.309128 0.927853 -0.208633 -0.419638 0.851463 -0.314508 -0.508518 0.750576 -0.421953 -0.573521 0.628096 -0.525899 -0.613533 0.487575 -0.621166 -0.628614 0.333056 -0.702793 -0.619953 0.168959 -0.766232 -0.611199 -0.000309512 -0.791477 -0.626712 0.0851501 -0.774585 -0.0538631 0.998082 -0.0305113 -0.0559093 0.997862 -0.0338344 -0.174695 0.977538 -0.117903 -0.298432 0.927853 -0.223666 -0.403577 0.85146 -0.334876 -0.487027 0.750576 -0.446587 -0.546813 0.628095 -0.553617 -0.582058 0.48758 -0.650749 -0.593087 0.333068 -0.733017 -0.581307 0.168938 -0.795954 -0.571396 -0.000261556 -0.820675 -0.587641 0.085168 -0.804627 -0.0521911 0.998084 -0.0332273 -0.0541874 0.997861 -0.0365717 -0.168649 0.977538 -0.126397 -0.28701 0.927852 -0.238153 -0.386519 0.851462 -0.354424 -0.464346 0.750577 -0.470124 -0.518765 0.628096 -0.579982 -0.54917 0.487573 -0.678738 -0.556108 0.333066 -0.761453 -0.541233 0.168941 -0.823727 -0.530203 -0.000245571 -0.847871 -0.547133 0.0851289 -0.832706 -0.0504015 0.998085 -0.0358661 -0.0523115 0.997861 -0.0392054 -0.162193 0.977538 -0.134584 -0.27488 0.927853 -0.252053 -0.368523 0.851458 -0.373108 -0.440529 0.750576 -0.492514 -0.489447 0.628094 -0.604929 -0.514926 0.487584 -0.705062 -0.517775 0.333063 -0.788022 -0.49984 0.168937 -0.849483 -0.487724 -0.000254484 -0.872998 -0.505291 0.085052 -0.858747 -0.0484957 0.998084 -0.0384214 -0.0502897 0.997863 -0.0417292 -0.155339 0.977538 -0.14244 -0.262074 0.927854 -0.265337 -0.349616 0.851461 -0.390874 -0.41563 0.750579 -0.513696 -0.458933 0.628095 -0.628392 -0.479429 0.487582 -0.729665 -0.478166 0.333068 -0.812665 -0.457209 0.168967 -0.873161 -0.444052 -0.000193937 -0.896001 -0.462199 0.0850101 -0.882692 -0.0464778 0.998082 -0.040892 -0.0481275 0.997866 -0.0441314 -0.148104 0.977538 -0.149947 -0.248637 0.927852 -0.277976 -0.329864 0.851457 -0.40769 -0.389723 0.750572 -0.533627 -0.427297 0.628095 -0.650319 -0.442762 0.487575 -0.752484 -0.437401 0.33305 -0.835319 -0.413471 0.168949 -0.894706 -0.3993 -0.000144571 -0.91682 -0.417983 0.0849875 -0.904471 -0.0443503 0.998078 -0.0432717 -0.0458342 0.997871 -0.0464047 -0.140505 0.977539 -0.157086 -0.23458 0.927854 -0.289928 -0.309294 0.851461 -0.423499 -0.362858 0.750571 -0.552247 -0.394612 0.628095 -0.670655 -0.405005 0.48758 -0.773458 -0.395552 0.33306 -0.855926 -0.368721 0.168942 -0.914059 -0.353573 -0.000113591 -0.935407 -0.372746 0.0849506 -0.924037 -0.0421161 0.998074 -0.0455573 -0.0434175 0.997877 -0.048541 -0.132567 0.977538 -0.163844 -0.219957 0.927853 -0.301176 -0.28797 0.851462 -0.438275 -0.335098 0.750577 -0.569512 -0.36096 0.628098 -0.689348 -0.366254 0.487588 -0.792538 -0.352735 0.333072 -0.874438 -0.323063 0.168951 -0.931174 -0.306988 -6.01198e-05 -0.951713 -0.326592 0.0849016 -0.941345 -0.0397784 0.998067 -0.0477448 -0.0408862 0.997885 -0.0505327 -0.124302 0.977538 -0.1702 -0.204794 0.927853 -0.311685 -0.265945 0.851461 -0.451982 -0.306526 0.750576 -0.585387 -0.326433 0.62809 -0.70636 -0.326619 0.487576 -0.809685 -0.309064 0.333059 -0.890815 -0.276618 0.168941 -0.946013 -0.259653 -4.24374e-07 -0.965702 -0.27964 0.0848757 -0.956346 -0.0373407 0.998059 -0.0498302 -0.0382507 0.997895 -0.0523748 -0.115732 0.977538 -0.176138 -0.189129 0.927854 -0.32143 -0.243269 0.851459 -0.464584 -0.2772 0.750579 -0.599826 -0.2911 0.628095 -0.721635 -0.286175 0.487579 -0.824846 -0.264629 0.333063 -0.905008 -0.229499 0.168939 -0.958535 -0.211686 3.72035e-05 -0.977338 -0.232007 0.0848325 -0.969008 -0.034807 0.99805 -0.0518114 -0.0355204 0.997906 -0.0540605 -0.10688 0.977539 -0.181645 -0.173001 0.927854 -0.33039 -0.219994 0.851459 -0.476046 -0.247195 0.750586 -0.612794 -0.255054 0.628094 -0.73515 -0.245031 0.487583 -0.837987 -0.219547 0.333065 -0.916988 -0.181813 0.168955 -0.96871 -0.1632 8.21872e-05 -0.986593 -0.183801 0.0847849 -0.9793 -0.0321802 0.998039 -0.0536836 -0.0327053 0.997918 -0.0555836 -0.0977659 0.977539 -0.186709 -0.156452 0.927853 -0.338543 -0.19619 0.851456 -0.486347 -0.216587 0.75058 -0.624275 -0.218387 0.628093 -0.746864 -0.203292 0.487579 -0.849081 -0.173935 0.333054 -0.926726 -0.133681 0.168949 -0.976517 -0.114318 0.000139195 -0.993444 -0.135143 0.0847351 -0.987196 -0.0294636 0.998027 -0.0554426 -0.029816 0.997932 -0.0569411 -0.0884125 0.977539 -0.191316 -0.139518 0.927855 -0.345862 -0.171894 0.85146 -0.495449 -0.18545 0.750576 -0.634228 -0.181186 0.628099 -0.756746 -0.161053 0.487576 -0.858098 -0.127889 0.333057 -0.934193 -0.085231 0.168943 -0.981934 -0.0651627 0.000169698 -0.997875 -0.0861635 0.0846706 -0.992677 -0.0266615 0.998013 -0.0570861 -0.0268636 0.997948 -0.0581295 -0.0788444 0.977538 -0.195456 -0.122241 0.927857 -0.352333 -0.14718 0.851458 -0.503347 -0.15386 0.750576 -0.642621 -0.143535 0.628103 -0.764778 -0.118417 0.487588 -0.865006 -0.0815353 0.333057 -0.939375 -0.0365628 0.168949 -0.984946 -0.0158433 0.000233508 -0.999874 -0.036963 0.0846207 -0.995727 -0.0237774 0.997998 -0.0586112 -0.0238587 0.997964 -0.0591464 -0.0690844 0.977539 -0.199112 -0.104667 0.927854 -0.357954 -0.122111 0.851457 -0.51001 -0.121892 0.750573 -0.649448 -0.105543 0.628095 -0.770945 -0.0754947 0.487579 -0.869809 -0.0349785 0.333066 -0.942255 0.0121902 0.168947 -0.98555 0.0335135 0.000286637 -0.999438 0.0123237 0.0845698 -0.996341 -0.0208213 0.99798 -0.0600133 -0.0591468 0.977541 -0.202276 -0.086838 0.927853 -0.362694 -0.0967353 0.85146 -0.515421 -0.0896266 0.750577 -0.654676 -0.0672876 0.628093 -0.775223 -0.032389 0.487571 -0.872483 0.0116616 0.333064 -0.942832 0.060914 0.168946 -0.983741 0.0827891 0.000337875 -0.996567 0.0615813 0.0845119 -0.994518 -0.0176096 0.998 -0.0607099 -0.0149124 0.997947 -0.0622844 -0.0144095 0.998015 -0.0613124 -0.0118605 0.997931 -0.0631929 -0.0285864 0.977538 -0.208811 -0.0322497 0.927854 -0.371548 -0.0194557 0.851455 -0.524067 0.00817286 0.750578 -0.660731 0.0480907 0.628096 -0.776649 0.0969889 0.487578 -0.867675 0.150959 0.333063 -0.930742 0.205717 0.168951 -0.963917 0.228925 0.000520283 -0.973444 0.207973 0.0843346 -0.974492 0.180614 0.000463948 -0.983554 0.159528 0.0843944 -0.983579 0.10949 0.168952 -0.979524 0.0582746 0.333061 -0.941103 0.0107972 0.48758 -0.873011 -0.0288668 0.628093 -0.777603 -0.0571374 0.75058 -0.658305 -0.0711289 0.851458 -0.519577 -0.0687944 0.927855 -0.366541 -0.0490734 0.977539 -0.204964 -0.0388772 0.977538 -0.20714 -0.0388779 0.977538 -0.207142 -0.0111898 0.998028 -0.0617604 -0.00874966 0.997917 -0.0639142 -0.0182237 0.977539 -0.209967 -0.0138335 0.927856 -0.372682 0.00648592 0.85146 -0.524379 0.0408399 0.750577 -0.65952 0.0864413 0.628102 -0.773315 0.139779 0.487578 -0.861818 0.196802 0.333059 -0.922139 0.253137 0.168954 -0.952563 0.276679 0.000603037 -0.960962 0.255915 0.0842778 -0.963019 -0.00796841 0.99804 -0.0620628 -0.00559492 0.997905 -0.0644591 -0.00781917 0.977538 -0.210614 0.0046125 0.927853 -0.372918 0.0324103 0.85146 -0.523417 0.0734037 0.750572 -0.656699 0.124578 0.628095 -0.768099 0.182228 0.487576 -0.853851 0.242162 0.333059 -0.911279 0.299931 0.168942 -0.938882 0.323761 0.000645189 -0.946139 0.303224 0.0842191 -0.94919 -0.00474769 0.998051 -0.062221 -0.00240649 0.997894 -0.0648243 0.00260707 0.977538 -0.210742 0.0230482 0.927854 -0.372231 0.0582563 0.851459 -0.521176 0.105791 0.750575 -0.652262 0.16241 0.628096 -0.760998 0.22423 0.487573 -0.843797 0.286934 0.333061 -0.898186 0.345998 0.168956 -0.922897 0.370053 0.000714789 -0.929011 0.3498 0.0841385 -0.933038 -0.00153247 0.99806 -0.0622367 0.00080433 0.997884 -0.0650089 0.0130251 0.977539 -0.210354 0.0414292 0.927854 -0.370635 0.08396 0.851459 -0.517657 0.137918 0.750577 -0.646229 0.199847 0.628102 -0.75203 0.265685 0.487581 -0.831671 0.331002 0.333058 -0.882899 0.391217 0.168952 -0.904657 0.415441 0.000800795 -0.90962 0.395518 0.0840762 -0.914602 0.0016725 0.998068 -0.0621113 0.00402538 0.997876 -0.0650107 0.0234121 0.977538 -0.209453 0.059709 0.927854 -0.368134 0.109456 0.85146 -0.512869 0.169706 0.750575 -0.63862 0.236792 0.628094 -0.741233 0.306489 0.487578 -0.817516 0.37426 0.333055 -0.86545 0.435474 0.168947 -0.884206 0.459819 0.000861063 -0.888012 0.440263 0.0840169 -0.89393 0.00486218 0.998074 -0.0618425 0.00724647 0.99787 -0.0648279 0.0337423 0.977539 -0.208037 0.0778396 0.927854 -0.364731 0.134685 0.851458 -0.506832 0.201083 0.750577 -0.629445 0.273159 0.628101 -0.728611 0.346545 0.48758 -0.801356 0.416601 0.333061 -0.84588 0.478671 0.168953 -0.861585 0.503077 0.00094678 -0.864241 0.483938 0.0839461 -0.871066 0.00803177 0.998079 -0.0614337 0.0104548 0.997865 -0.0644611 0.0439893 0.977538 -0.206118 0.0957815 0.927855 -0.360433 0.159586 0.851459 -0.49955 0.231963 0.750575 -0.618733 0.308859 0.628095 -0.714215 0.385749 0.487575 -0.783242 0.457923 0.333052 -0.824247 0.520693 0.168942 -0.836862 0.545109 0.0010144 -0.838365 0.526425 0.0838771 -0.846074 0.0111759 0.998082 -0.0608841 0.0136399 0.997862 -0.0639116 0.0541283 0.977538 -0.20369 0.11349 0.927854 -0.355257 0.184096 0.851458 -0.491049 0.262281 0.75058 -0.606497 0.343802 0.628104 -0.698059 0.42401 0.48759 -0.763198 0.498122 0.333063 -0.800589 0.561442 0.168936 -0.810089 0.585812 0.00108683 -0.810446 0.567626 0.0837959 -0.819011 0.0142916 0.998085 -0.0601883 0.0167861 0.997862 -0.0631668 0.0641353 0.977538 -0.200762 0.130922 0.927854 -0.349208 0.208153 0.851462 -0.481336 0.291952 0.750574 -0.592793 0.377902 0.628091 -0.680215 0.461237 0.487574 -0.741304 0.537107 0.33307 -0.774972 0.600817 0.168946 -0.78133 0.62509 0.00116519 -0.780552 0.607439 0.0837236 -0.789942 0.0173658 0.998086 -0.0593579 0.0198946 0.997861 -0.0622647 0.0739846 0.977541 -0.197334 0.14803 0.927854 -0.342306 0.231702 0.851461 -0.470455 0.32091 0.750577 -0.577626 0.411079 0.628097 -0.660688 0.497333 0.487578 -0.717585 0.574775 0.333066 -0.747463 0.63872 0.168947 -0.750662 0.662843 0.00123437 -0.748758 0.645765 0.0836378 -0.758942 0.0204096 0.998084 -0.05841 0.0229395 0.997863 -0.0611876 0.0836539 0.977538 -0.193445 0.164777 0.927853 -0.334571 0.254685 0.85146 -0.458423 0.349082 0.750579 -0.561046 0.44325 0.628096 -0.639551 0.532209 0.487578 -0.692113 0.611037 0.333056 -0.718127 0.675062 0.168948 -0.718156 0.69898 0.00132291 -0.71514 0.682513 0.0835543 -0.726082 0.0234048 0.998082 -0.0573137 0.0259174 0.997866 -0.0599321 0.0931182 0.977539 -0.18907 0.18112 0.927854 -0.32601 0.277046 0.851457 -0.445271 0.376402 0.750576 -0.5431 0.474335 0.628095 -0.61685 0.565787 0.487578 -0.664946 0.645805 0.333056 -0.68703 0.709752 0.168949 -0.683892 0.733418 0.00141599 -0.679776 0.71759 0.0834889 -0.691444 0.0263511 0.998078 -0.0560842 0.0288155 0.997871 -0.0585085 0.102355 0.977538 -0.184236 0.19702 0.927855 -0.316652 0.298728 0.851458 -0.431023 0.402798 0.75058 -0.523816 0.504259 0.628101 -0.592632 0.597978 0.487584 -0.636148 0.678988 0.333059 -0.654253 0.742706 0.168937 -0.647957 0.766066 0.00150299 -0.64276 0.750911 0.0834078 -0.655115 0.0292427 0.998073 -0.0547223 0.0316255 0.997878 -0.0569245 0.111341 0.977538 -0.178948 0.212439 0.927855 -0.306521 0.319678 0.851456 -0.415726 0.428212 0.750576 -0.50326 0.532951 0.628096 -0.566973 0.628708 0.487578 -0.605801 0.710513 0.333067 -0.619869 0.773841 0.168942 -0.610434 0.796851 0.00158518 -0.604174 0.782395 0.0833304 -0.617182 0.0320764 0.998067 -0.0532318 0.0343365 0.997886 -0.0551858 0.120054 0.977538 -0.173223 0.227339 0.927854 -0.295642 0.339844 0.851458 -0.399405 0.452576 0.750576 -0.481468 0.560338 0.628098 -0.539921 0.657896 0.487579 -0.573968 0.740301 0.33306 -0.583974 0.80308 0.168947 -0.571419 0.82569 0.00165959 -0.564121 0.811966 0.0832259 -0.577741 0.0348468 0.998059 -0.0516132 0.036939 0.997895 -0.0532983 0.128474 0.977538 -0.167073 0.241682 0.927853 -0.284039 0.359181 0.851458 -0.38211 0.475834 0.750574 -0.458498 0.586354 0.628094 -0.511554 0.685478 0.487574 -0.540733 0.768276 0.333048 -0.546655 0.830358 0.168947 -0.531002 0.852521 0.00175818 -0.52269 0.839551 0.083141 -0.536881 0.037549 0.99805 -0.0498684 0.0394251 0.997906 -0.0512702 0.136579 0.977538 -0.160515 0.255431 0.927855 -0.271737 0.377638 0.851459 -0.363878 0.497924 0.750577 -0.434403 0.610933 0.6281 -0.481925 0.711378 0.487583 -0.506166 0.794367 0.333059 -0.507988 0.855601 0.168943 -0.489291 0.877273 0.00183684 -0.479987 0.86508 0.0830502 -0.494712 0.0401791 0.998039 -0.0480005 0.041787 0.997919 -0.0491102 0.144349 0.977538 -0.153564 0.268558 0.927854 -0.258774 0.395171 0.851458 -0.34476 0.518801 0.750572 -0.409252 0.634023 0.628092 -0.451127 0.73554 0.487579 -0.470369 0.818517 0.333058 -0.468083 0.878751 0.168945 -0.446379 0.899888 0.0019302 -0.436118 0.888494 0.0829467 -0.451329 0.0427334 0.998026 -0.0460117 0.044015 0.997933 -0.0468249 0.151766 0.977539 -0.146237 0.281025 0.927854 -0.245175 0.411736 0.85146 -0.324792 0.538402 0.750577 -0.383089 0.655555 0.628094 -0.41922 0.757902 0.487578 -0.433419 0.840665 0.333059 -0.427029 0.899751 0.168957 -0.402371 0.920312 0.00204619 -0.391179 0.909734 0.0828727 -0.406836 0.0452052 0.998013 -0.0439032 0.0461046 0.997948 -0.0444249 0.158813 0.977538 -0.138554 0.292809 0.927854 -0.230977 0.427294 0.851461 -0.304031 0.556686 0.750579 -0.355995 0.675487 0.628092 -0.386288 0.778409 0.487578 -0.395407 0.860755 0.333053 -0.384937 0.907153 0.168987 -0.385379 0.914941 0.146549 -0.37604 0.926066 0.0491023 -0.374153 0.0475926 0.997997 -0.0416784 0.0480478 0.997965 -0.0419183 0.165471 0.977539 -0.130529 0.303872 0.927854 -0.216214 0.441808 0.851459 -0.282531 0.573609 0.750581 -0.328026 0.693761 0.628096 -0.352407 0.797009 0.487582 -0.356427 0.868556 0.33315 -0.366908 0.900397 0.242981 -0.360894 0.0498825 0.99798 -0.0393492 0.171724 0.977538 -0.122188 0.314196 0.927853 -0.200925 0.455241 0.851458 -0.260337 0.589131 0.750579 -0.29926 0.710343 0.628092 -0.31767 0.801671 0.48767 -0.345689 0.838925 0.4277 -0.336566 0.868328 0.336034 -0.364812 0.0515902 0.998 -0.036533 0.0539782 0.997945 -0.0345183 0.053199 0.998015 -0.0336968 0.0558228 0.99793 -0.0319232 0.187905 0.977538 -0.0954495 0.340449 0.927855 -0.152251 0.477739 0.851426 -0.216422 0.554166 0.803105 -0.218912 0.604104 0.750603 -0.267684 0.619677 0.740052 -0.261387 0.687676 0.671382 -0.276312 0.744291 0.595356 -0.302626 0.79377 0.513637 -0.32574 0.0546756 0.998029 -0.0308056 0.0575238 0.997916 -0.0292204 0.192397 0.977538 -0.0860417 0.312343 0.941372 -0.12752 0.322242 0.935624 -0.14411 0.352429 0.921517 -0.163097 0.398201 0.903832 -0.156599 0.472896 0.856796 -0.205597 0.0560172 0.998041 -0.0278618 0.0590718 0.997904 -0.0264171 0.184327 0.977222 -0.105167 0.22489 0.969902 -0.0933553 0.0572236 0.998052 -0.0248717 0.0398816 0.998689 -0.0320789 0.139047 0.98909 -0.0486459 0.712536 0.628154 -0.312596 0.71034 0.628096 -0.317667 0.603211 0.750578 -0.269758 0.589131 0.750578 -0.299261 0.467557 0.851459 -0.237505 0.332505 0.927854 -0.168902 0.332503 0.927855 -0.168901 0.131861 0.000393255 -0.991268 0.110689 0.0844471 -0.990261 0.0609142 0.168947 -0.983741 0.0116603 0.333059 -0.942834 -0.0323863 0.487578 -0.872478 -0.0672852 0.628099 -0.775219 -0.0896252 0.75058 -0.654674 -0.0967389 0.851456 -0.515427 -0.0868371 0.927854 -0.362693 -0.0591529 0.977539 -0.202285 -0.0208065 0.997982 -0.0599897 -0.0179093 0.997963 -0.0612359 -0.868654 -0.000531601 -0.495419 -0.876881 0.085351 -0.473069 -0.889523 0.168942 -0.424508 -0.870011 0.333061 -0.363525 -0.82125 0.487568 -0.296353 -0.744109 0.62809 -0.227606 -0.640669 0.750576 -0.161795 -0.514187 0.851458 -0.103107 -0.368842 0.927854 -0.0551494 -0.209727 0.977538 -0.0208207 -0.0631541 0.997998 -0.00325795 -0.0637342 0.997947 -0.00632725 -0.754589 -0.000450687 0.656198 -0.734359 0.0852619 0.673385 -0.692621 0.168948 0.701237 -0.628613 0.333053 0.702795 -0.549166 0.487573 0.678741 -0.458933 0.628095 0.628392 -0.362856 0.750571 0.552249 -0.265945 0.851461 0.451982 -0.173002 0.927853 0.330392 -0.0884135 0.977538 0.191316 -0.0237774 0.997998 0.0586116 -0.0268637 0.997948 0.0581299 0.370053 0.000724974 0.929011 0.395515 0.0840781 0.914603 0.435476 0.168947 0.884205 0.457926 0.333047 0.824247 0.461237 0.487574 0.741304 0.443248 0.628099 0.639549 0.402798 0.750578 0.523819 0.339843 0.85146 0.399404 0.255431 0.927855 0.271737 0.151767 0.977538 0.146238 0.0475926 0.997997 0.0416784 0.0461046 0.997948 0.0444249 0.733418 0.00141599 0.679776 0.75091 0.0834021 0.655117 0.77384 0.168947 0.610433 0.768278 0.333048 0.546651 0.735542 0.487575 0.470371 0.675485 0.628094 0.386286 0.589132 0.750578 0.29926 0.478732 0.851458 0.214093 0.467559 0.851458 0.237505 0.766066 0.00149097 0.64276 0.782396 0.0833216 0.617182 0.80308 0.168947 0.571419 0.794366 0.33306 0.507989 0.757903 0.487577 0.433419 0.693761 0.628096 0.352409 0.603208 0.75058 0.269759 0.589129 0.75058 0.299259 0.79685 0.00158263 0.604175 0.811966 0.0832247 0.577742 0.830357 0.168948 0.531003 0.818519 0.333055 0.468082 0.778411 0.487576 0.395406 0.710344 0.628091 0.317668 0.693765 0.628091 0.352409 0.82569 0.00165647 0.564121 0.839552 0.0831404 0.536881 0.855601 0.168942 0.48929 0.840664 0.33306 0.427031 0.79701 0.487581 0.356428 0.778408 0.487581 0.395407 0.852521 0.00176285 0.522691 0.865079 0.0830556 0.494711 0.878753 0.168944 0.446376 0.860756 0.333052 0.384935 0.840667 0.333052 0.42703 0.877274 0.00183867 0.479987 0.888494 0.0829476 0.451328 0.899749 0.168956 0.402376 0.878749 0.168956 0.446378 0.899888 0.00192949 0.436117 0.909734 0.0828688 0.406837 0.165472 0.977538 0.13053 -0.0690817 0.977538 0.199116 -0.210759 0.977538 0 0.0498491 0.997983 -0.0393077 0.0519952 0.997962 -0.0369962 0.177556 0.977539 -0.113544 0.323745 0.927854 -0.185138 0.45524 0.851459 -0.260336 0.187904 0.977538 0.0954498 0.182955 0.977538 0.104626 0.455239 0.851459 0.260336 0.455241 0.851458 0.260336 0.323744 0.927855 0.185138 0.573611 0.750578 0.328028 0.675484 0.628096 0.386286 0.757904 0.487576 0.433419 0.818517 0.33306 0.468083 0.855601 0.168944 0.489291 0.177556 0.977538 0.113545 0.314196 0.927853 0.200923 0.556685 0.75058 0.355994 0.556685 0.75058 0.355994 0.441807 0.851459 0.282531 0.655555 0.628094 0.41922 0.735541 0.487577 0.470371 0.794369 0.333055 0.507989 0.830359 0.168942 0.531002 0.171725 0.977538 0.122187 0.303873 0.927853 0.216216 0.427293 0.851461 0.304032 0.634022 0.628093 0.451126 0.634022 0.628093 0.451126 0.538402 0.750578 0.383088 0.711382 0.487575 0.506168 0.768272 0.33306 0.546652 0.80308 0.168948 0.571419 0.292808 0.927854 0.230977 0.411736 0.85146 0.324792 0.5188 0.750575 0.409248 0.610933 0.628097 0.481928 0.685476 0.487581 0.540729 0.685477 0.487578 0.540729 0.740304 0.333048 0.583978 0.77384 0.168947 0.610433 0.158814 0.977538 0.138554 0.281026 0.927854 0.245175 0.268558 0.927854 0.258773 0.395171 0.851459 0.344759 0.377638 0.851459 0.363879 0.497922 0.750579 0.434403 0.475835 0.750574 0.458497 0.586357 0.628091 0.511554 0.560336 0.628098 0.539923 0.657894 0.487582 0.573968 0.628708 0.487578 0.6058 0.710516 0.333053 0.619873 0.710514 0.333058 0.619873 0.678991 0.333052 0.654253 0.742703 0.168947 0.647958 0.70975 0.168949 0.683894 0.709754 0.168938 0.683893 0.144349 0.977538 0.153564 0.359182 0.851457 0.382111 0.452573 0.750579 0.481465 0.532953 0.628094 0.566974 0.597976 0.487584 0.63615 0.645806 0.333052 0.68703 0.675064 0.168944 0.718155 0.24168 0.927855 0.284037 0.428214 0.750574 0.50326 0.504257 0.628101 0.592634 0.565788 0.487578 0.664945 0.611039 0.333049 0.71813 0.638719 0.168947 0.750663 0.22734 0.927853 0.295643 0.227338 0.927854 0.295642 0.128474 0.977538 0.167074 0.319675 0.851459 0.415722 0.474335 0.628097 0.616847 0.532212 0.487574 0.692114 0.574774 0.333058 0.747467 0.600817 0.168946 0.78133 0.120053 0.977538 0.173222 0.298728 0.851458 0.431024 0.298728 0.851458 0.431024 0.21244 0.927854 0.306522 0.376401 0.750578 0.543097 0.497334 0.487575 0.717586 0.537108 0.333058 0.774976 0.561442 0.168945 0.810087 0.111341 0.977538 0.178948 0.19702 0.927855 0.316653 0.349083 0.750576 0.56105 0.349082 0.750577 0.561049 0.277046 0.851457 0.445271 0.411078 0.628099 0.660687 0.498126 0.333051 0.800592 0.52069 0.168951 0.836861 0.102355 0.977538 0.184235 0.181121 0.927854 0.326009 0.254686 0.851458 0.458425 0.3779 0.6281 0.680208 0.377904 0.628093 0.680211 0.32091 0.750577 0.577625 0.424013 0.487575 0.763206 0.478669 0.168949 0.861587 0.0931175 0.977539 0.18907 0.164777 0.927854 0.334569 0.231702 0.85146 0.470457 0.291954 0.750574 0.592792 0.385747 0.487582 0.783239 0.385751 0.487575 0.783241 0.3438 0.628096 0.698067 0.416603 0.333047 0.845885 0.0836549 0.977538 0.193445 0.148029 0.927854 0.342308 0.208157 0.851454 0.481349 0.262274 0.75058 0.6065 0.308859 0.628095 0.714215 0.374259 0.333053 0.865451 0.37426 0.333051 0.865452 0.346543 0.487577 0.801359 0.391215 0.168947 0.904659 0.0739886 0.977536 0.197355 0.130917 0.927853 0.349211 0.184092 0.851459 0.491047 0.231965 0.750575 0.618732 0.273157 0.628098 0.728614 0.306487 0.487582 0.817514 0.345997 0.168947 0.922899 0.345999 0.168941 0.922899 0.331003 0.333053 0.8829 0.0641321 0.977538 0.200763 0.113491 0.927853 0.355258 0.159585 0.85146 0.499548 0.201082 0.750577 0.629445 0.236794 0.628098 0.74123 0.265687 0.487577 0.831673 0.286935 0.333047 0.898191 0.299932 0.168947 0.938881 0.054129 0.977538 0.20369 0.095781 0.927855 0.360433 0.134686 0.85146 0.506829 0.169708 0.750575 0.63862 0.199846 0.628096 0.752035 0.22423 0.487577 0.843795 0.242163 0.333054 0.911281 0.253136 0.168949 0.952564 0.0439885 0.977538 0.206116 0.077841 0.927853 0.364731 0.109455 0.85146 0.512869 0.137918 0.750575 0.646232 0.162411 0.628096 0.760998 0.182228 0.487576 0.853851 0.196802 0.333055 0.922141 0.205718 0.168945 0.963918 0.0337421 0.977538 0.208039 0.0597075 0.927854 0.368132 0.0839598 0.851459 0.517657 0.105791 0.750575 0.652262 0.124578 0.628098 0.768097 0.13978 0.487574 0.86182 0.150958 0.333053 0.930746 0.157797 0.168946 0.972912 0.023412 0.977538 0.209453 0.0414286 0.927854 0.370635 0.0582557 0.851459 0.521176 0.0734032 0.750577 0.656693 0.0864402 0.628099 0.773318 0.0969876 0.487578 0.867676 0.104746 0.333046 0.937075 0.109489 0.168947 0.979525 0.0130256 0.977538 0.210355 0.0230492 0.927854 0.372231 0.0324102 0.85146 0.523417 0.0408392 0.750574 0.659523 0.0480903 0.628099 0.776646 0.0539584 0.487576 0.871411 0.0582732 0.333054 0.941105 0.0609158 0.168946 0.983741 0.00260672 0.977538 0.210742 0.00461269 0.927854 0.372916 0.00648587 0.85146 0.524379 0.00817243 0.750578 0.660731 0.0096256 0.628096 0.778077 0.0107986 0.487579 0.873012 0.0116633 0.333049 0.942837 0.0121899 0.168947 0.98555 -0.00781824 0.977538 0.210612 -0.0138344 0.927854 0.372688 -0.0194534 0.851458 0.524062 -0.0245133 0.750575 0.66033 -0.0288678 0.628099 0.777598 -0.0323891 0.487575 0.87248 -0.0349808 0.333056 0.942258 -0.0365633 0.168949 0.984946 -0.0182247 0.977539 0.209967 -0.032249 0.927854 0.371548 -0.0453476 0.851458 0.522458 -0.057139 0.750575 0.65831 -0.0672866 0.628096 0.775221 -0.0754972 0.487583 0.869806 -0.0815329 0.333048 0.939378 -0.0852293 0.168948 0.981933 -0.0285856 0.977538 0.208811 -0.0505844 0.927854 0.369497 -0.0711296 0.851458 0.519576 -0.0896245 0.750577 0.654676 -0.10554 0.628095 0.770946 -0.118418 0.487579 0.865011 -0.127891 0.333053 0.934195 -0.133683 0.168944 0.976518 -0.0388776 0.977538 0.20714 -0.0687947 0.927854 0.366543 -0.0967372 0.851458 0.515423 -0.121891 0.750577 0.649443 -0.143538 0.628099 0.76478 -0.161051 0.487576 0.858098 -0.173933 0.333049 0.926729 -0.181813 0.16895 0.96871 -0.0490735 0.977538 0.204965 -0.0868376 0.927854 0.362692 -0.122109 0.851458 0.510008 -0.15386 0.750574 0.642624 -0.181185 0.628099 0.756746 -0.203293 0.487579 0.849081 -0.219551 0.333056 0.91699 -0.229496 0.168938 0.958536 -0.0591495 0.977536 0.202297 -0.104668 0.927854 0.357956 -0.14718 0.851459 0.503344 -0.185451 0.750576 0.634227 -0.218385 0.628097 0.746862 -0.245032 0.487579 0.837989 -0.26463 0.333049 0.905014 -0.27662 0.168952 0.946011 -0.122243 0.927851 0.352348 -0.171894 0.851458 0.495451 -0.21659 0.750578 0.624277 -0.255055 0.628097 0.735147 -0.286174 0.487583 0.824844 -0.309063 0.333045 0.890821 -0.323064 0.168951 0.931174 -0.0788437 0.977538 0.195455 -0.139516 0.927854 0.345864 -0.156451 0.927854 0.338541 -0.196183 0.851457 0.486347 -0.219996 0.851459 0.476045 -0.247198 0.75057 0.612811 -0.277198 0.750579 0.599827 -0.291098 0.628101 0.721631 -0.326427 0.62809 0.706363 -0.326618 0.487573 0.809687 -0.366258 0.487579 0.792541 -0.352741 0.333063 0.874438 -0.39555 0.333051 0.855931 -0.368718 0.168942 0.91406 -0.413472 0.168954 0.894704 -0.0977663 0.977538 0.18671 -0.243268 0.85146 0.464582 -0.306525 0.750576 0.585387 -0.360963 0.628098 0.689347 -0.405003 0.48758 0.773459 -0.437397 0.333046 0.835323 -0.457212 0.168958 0.873161 -0.18913 0.927853 0.321432 -0.335099 0.750577 0.569512 -0.394611 0.628095 0.670656 -0.442761 0.487575 0.752485 -0.478172 0.333059 0.812664 -0.499833 0.168937 0.849486 -0.204793 0.927854 0.311685 -0.204792 0.927855 0.311683 -0.115733 0.977538 0.176139 -0.287971 0.851461 0.438277 -0.427297 0.628095 0.650318 -0.479431 0.487582 0.729664 -0.517774 0.333058 0.788024 -0.541233 0.168941 0.823727 -0.124302 0.977538 0.170199 -0.309295 0.85146 0.423501 -0.309294 0.851461 0.423498 -0.219957 0.927853 0.301176 -0.389724 0.750571 0.533628 -0.514926 0.487587 0.70506 -0.556112 0.33305 0.761457 -0.581307 0.168942 0.795952 -0.132567 0.977538 0.163844 -0.234581 0.927855 0.289927 -0.415635 0.750571 0.513703 -0.415631 0.750581 0.513692 -0.329862 0.851457 0.407691 -0.489446 0.628096 0.604929 -0.59309 0.333056 0.73302 -0.619958 0.168955 0.766229 -0.140506 0.977539 0.157086 -0.248635 0.927852 0.277977 -0.349618 0.851461 0.390872 -0.518765 0.628094 0.579983 -0.518764 0.628101 0.579978 -0.44053 0.750574 0.492516 -0.582062 0.487573 0.650751 -0.657094 0.168935 0.734635 -0.148104 0.977538 0.149948 -0.262076 0.927854 0.265336 -0.36852 0.851459 0.373108 -0.464347 0.750575 0.470126 -0.613532 0.487577 0.621166 -0.613532 0.487575 0.621167 -0.546811 0.628097 0.553617 -0.662603 0.333053 0.670845 -0.155338 0.977538 0.14244 -0.274878 0.927853 0.252053 -0.386521 0.851461 0.354425 -0.487026 0.750578 0.446585 -0.573521 0.628094 0.5259 -0.694966 0.333049 0.63726 -0.694966 0.333047 0.637261 -0.643499 0.487578 0.590065 -0.726448 0.168948 0.666131 -0.162193 0.977538 0.134584 -0.287009 0.927852 0.238153 -0.403576 0.85146 0.334876 -0.508518 0.750576 0.421954 -0.598823 0.628105 0.496885 -0.671892 0.487577 0.557521 -0.758503 0.168958 0.629386 -0.758503 0.168951 0.629388 -0.725631 0.333047 0.602112 -0.168649 0.977539 0.126395 -0.298433 0.927853 0.223665 -0.419638 0.851463 0.314507 -0.528763 0.750573 0.396295 -0.622668 0.628093 0.466673 -0.698644 0.487576 0.523608 -0.754519 0.33306 0.565484 -0.7887 0.168956 0.591106 -0.174695 0.977538 0.117904 -0.309129 0.927853 0.208633 -0.434685 0.851459 0.293372 -0.547713 0.750578 0.369652 -0.644988 0.628092 0.435306 -0.723677 0.48759 0.488413 -0.781559 0.333057 0.527484 -0.816969 0.16894 0.551381 -0.180311 0.977538 0.109118 -0.319066 0.927854 0.193089 -0.448657 0.851462 0.271514 -0.565324 0.750575 0.34212 -0.665725 0.628091 0.402879 -0.746946 0.487586 0.452031 -0.806692 0.33305 0.488186 -0.843238 0.168946 0.5103 -0.185485 0.977538 0.100069 -0.328225 0.927853 0.177077 -0.461547 0.851453 0.249002 -0.581548 0.750581 0.313736 -0.68483 0.628099 0.369458 -0.768389 0.487579 0.414541 -0.829847 0.333053 0.447694 -0.867442 0.168943 0.467977 -0.190209 0.977538 0.0907726 -0.33658 0.927855 0.160624 -0.473286 0.851461 0.225865 -0.596363 0.750567 0.284606 -0.702263 0.6281 0.335137 -0.787949 0.487585 0.376029 -0.850969 0.333059 0.406107 -0.889525 0.168934 0.424509 -0.194465 0.977538 0.0812548 -0.344109 0.927855 0.143783 -0.48388 0.851459 0.202185 -0.6097 0.750577 0.254756 -0.717975 0.6281 0.300003 -0.805586 0.487569 0.336611 -0.870014 0.333054 0.363526 -0.909429 0.168949 0.379993 -0.198244 0.977538 0.0715387 -0.350804 0.927853 0.126592 -0.493283 0.851462 0.178004 -0.621552 0.750577 0.224293 -0.731942 0.62809 0.264129 -0.82124 0.487588 0.296349 -0.886927 0.33305 0.320059 -0.927108 0.168944 0.334558 -0.201543 0.977538 0.0616488 -0.356628 0.927856 0.109084 -0.50149 0.851456 0.153398 -0.631885 0.750576 0.19328 -0.744098 0.628105 0.227603 -0.8349 0.487567 0.255384 -0.901665 0.333071 0.275798 -0.942519 0.168939 0.288301 -0.204342 0.977538 0.0516047 -0.361594 0.927853 0.0913193 -0.50846 0.851458 0.128406 -0.640668 0.750577 0.161795 -0.754448 0.628098 0.190529 -0.846504 0.487577 0.213775 -0.914204 0.333057 0.230875 -0.955623 0.168953 0.24133 -0.206642 0.977539 0.0414349 -0.365664 0.927854 0.0733229 -0.514188 0.851458 0.103107 -0.64788 0.750582 0.129915 -0.762957 0.628084 0.152994 -0.856036 0.487585 0.171652 -0.924503 0.333058 0.185384 -0.966388 0.168946 0.193783 -0.20844 0.977538 0.0311669 -0.368846 0.927853 0.05515 -0.518654 0.85146 0.0775474 -0.653518 0.750578 0.0977135 -0.769584 0.628093 0.115065 -0.863476 0.487586 0.129108 -0.93254 0.333057 0.139435 -0.97479 0.16894 0.145752 -0.209729 0.977538 0.0208209 -0.371118 0.927855 0.0368418 -0.521854 0.85146 0.0518075 -0.657556 0.750572 0.0652796 -0.774335 0.62809 0.0768701 -0.868805 0.487586 0.0862469 -0.938299 0.333046 0.0931455 -0.980802 0.16896 0.0973611 -0.210501 0.977538 0.0104226 -0.372485 0.927855 0.0184432 -0.523776 0.851461 0.0259347 -0.659986 0.750567 0.0326798 -0.777178 0.628103 0.0384796 -0.87201 0.48758 0.0431806 -0.941751 0.333063 0.0466321 -0.984423 0.168921 0.0487506 -0.372941 0.927855 0 -0.524418 0.851461 0 -0.660795 0.750567 0 -0.77813 0.628103 0 -0.873078 0.48758 0 -0.942905 0.333063 0 -0.98563 0.168921 0 -0.210502 0.977538 -0.0104226 -0.372486 0.927854 -0.0184432 -0.371122 0.927853 -0.0368422 -0.523778 0.85146 -0.0259348 -0.521854 0.85146 -0.0518075 -0.65998 0.750573 -0.0326795 -0.65755 0.750578 -0.0652789 -0.777189 0.628089 -0.0384801 -0.774332 0.628093 -0.0768699 -0.872007 0.487586 -0.0431804 -0.868805 0.487586 -0.0862469 -0.941757 0.333046 -0.0466324 -0.938295 0.333058 -0.0931451 -0.984417 0.16896 -0.0487503 -0.980806 0.16894 -0.0973615 -0.20844 0.977538 -0.031166 -0.518658 0.851458 -0.077548 -0.653513 0.750582 -0.0977127 -0.769591 0.628083 -0.115066 -0.863477 0.487585 -0.129108 -0.93254 0.333058 -0.139435 -0.974789 0.168946 -0.145752 -0.365663 0.927854 -0.0733243 -0.647885 0.750577 -0.129916 -0.762946 0.628098 -0.152991 -0.85604 0.487576 -0.171653 -0.924503 0.333057 -0.185384 -0.966387 0.168953 -0.193783 -0.361594 0.927853 -0.0913166 -0.361585 0.927856 -0.0913172 -0.204346 0.977538 -0.0516056 -0.50846 0.851458 -0.128407 -0.754442 0.628105 -0.190528 -0.84651 0.487567 -0.213776 -0.9142 0.333071 -0.230874 -0.955625 0.168938 -0.241331 -0.201539 0.977538 -0.0616476 -0.501491 0.851456 -0.153395 -0.501481 0.851462 -0.153395 -0.356636 0.927853 -0.109087 -0.631885 0.750576 -0.193279 -0.834888 0.487588 -0.25538 -0.901672 0.33305 -0.2758 -0.942518 0.168944 -0.288301 -0.198244 0.977538 -0.0715389 -0.350798 0.927855 -0.12659 -0.621552 0.750577 -0.224294 -0.621554 0.750575 -0.224294 -0.493287 0.851459 -0.178006 -0.731943 0.628091 -0.264125 -0.886925 0.333057 -0.320059 -0.927107 0.168948 -0.334558 -0.194465 0.977538 -0.0812548 -0.344111 0.927854 -0.143783 -0.483877 0.851461 -0.202184 -0.717974 0.628102 -0.300003 -0.717976 0.628099 -0.300003 -0.60971 0.750567 -0.254761 -0.805589 0.487568 -0.336607 -0.909431 0.168936 -0.379995 -0.190207 0.977539 -0.0907718 -0.336583 0.927853 -0.160625 -0.473297 0.851453 -0.22587 -0.596349 0.750581 -0.284599 -0.787947 0.487586 -0.376031 -0.787952 0.487578 -0.376031 -0.702265 0.628098 -0.335138 -0.850968 0.333061 -0.406108 -0.185486 0.977538 -0.100069 -0.328224 0.927854 -0.177076 -0.461533 0.851462 -0.248996 -0.581555 0.750574 -0.31374 -0.684834 0.628093 -0.369461 -0.829845 0.333057 -0.447694 -0.829847 0.333054 -0.447694 -0.768386 0.487585 -0.414539 -0.867442 0.168942 -0.467977 -0.180313 0.977538 -0.109119 -0.319068 0.927853 -0.19309 -0.448659 0.85146 -0.271516 -0.565322 0.750577 -0.342119 -0.665725 0.628092 -0.402878 -0.746943 0.487591 -0.45203 -0.843238 0.168943 -0.510302 -0.843239 0.168937 -0.510301 -0.80669 0.333059 -0.488184 -0.174693 0.977538 -0.117902 -0.309129 0.927853 -0.208633 -0.434679 0.851463 -0.293368 -0.547717 0.750574 -0.369655 -0.644987 0.628094 -0.435305 -0.723684 0.487576 -0.488417 -0.781557 0.333063 -0.527483 -0.816967 0.168952 -0.55138 -0.168651 0.977538 -0.126397 -0.298436 0.927852 -0.223667 -0.419642 0.85146 -0.314509 -0.52876 0.750576 -0.396293 -0.622661 0.628105 -0.466667 -0.698642 0.48758 -0.523608 -0.754523 0.333045 -0.565487 -0.788699 0.168953 -0.591108 -0.162193 0.977538 -0.134584 -0.287007 0.927853 -0.238152 -0.403574 0.851462 -0.334875 -0.508518 0.750576 -0.421953 -0.598829 0.628095 -0.496891 -0.671891 0.487579 -0.55752 -0.72563 0.33305 -0.602112 -0.758502 0.168961 -0.629386 -0.155338 0.977538 -0.14244 -0.274876 0.927854 -0.252052 -0.386524 0.851458 -0.354427 -0.487026 0.750577 -0.446587 -0.573521 0.628095 -0.525899 -0.643499 0.487575 -0.590067 -0.694963 0.333058 -0.637259 -0.72645 0.168948 -0.666128 -0.148102 0.977539 -0.149945 -0.262081 0.927851 -0.26534 -0.368518 0.851462 -0.373106 -0.464347 0.750576 -0.470125 -0.546812 0.628096 -0.553617 -0.613531 0.48758 -0.621165 -0.662602 0.333056 -0.670845 -0.692621 0.168942 -0.701238 -0.140507 0.977538 -0.157088 -0.248631 0.927855 -0.277972 -0.349623 0.851457 -0.390878 -0.440526 0.750579 -0.492512 -0.518766 0.628094 -0.579983 -0.582063 0.487573 -0.65075 -0.628609 0.333067 -0.702792 -0.657092 0.168959 -0.734631 -0.132567 0.977538 -0.163844 -0.234583 0.927853 -0.28993 -0.329858 0.851461 -0.407686 -0.415638 0.750571 -0.5137 -0.489446 0.628096 -0.604928 -0.549163 0.487584 -0.678735 -0.593088 0.333066 -0.733017 -0.61996 0.168937 -0.766231 -0.124301 0.977538 -0.1702 -0.219957 0.927853 -0.301176 -0.309292 0.851462 -0.423498 -0.389724 0.750571 -0.533628 -0.458933 0.628095 -0.628392 -0.514928 0.487582 -0.705063 -0.55611 0.333063 -0.761453 -0.581306 0.168941 -0.795954 -0.115731 0.977539 -0.176138 -0.204793 0.927854 -0.311685 -0.287972 0.851461 -0.438277 -0.362853 0.750577 -0.552243 -0.427297 0.628095 -0.650319 -0.479433 0.487575 -0.729667 -0.517772 0.333068 -0.788021 -0.541235 0.168936 -0.823727 -0.10688 0.977539 -0.181645 -0.189129 0.927854 -0.32143 -0.265947 0.851459 -0.451984 -0.335099 0.750576 -0.569512 -0.39461 0.628098 -0.670653 -0.442759 0.48758 -0.752483 -0.478174 0.333049 -0.812667 -0.499829 0.168968 -0.849483 -0.0977662 0.977539 -0.186709 -0.173002 0.927853 -0.330392 -0.243268 0.851459 -0.464583 -0.306523 0.750579 -0.585384 -0.360966 0.62809 -0.689352 -0.405 0.487589 -0.773455 -0.437396 0.33306 -0.835318 -0.457215 0.168949 -0.873161 -0.0884141 0.977538 -0.191317 -0.156449 0.927855 -0.33854 -0.219999 0.851455 -0.476051 -0.277194 0.750586 -0.599821 -0.32643 0.628095 -0.706357 -0.36626 0.487577 -0.792542 -0.395546 0.333073 -0.855924 -0.413473 0.168941 -0.894706 -0.0788429 0.977539 -0.195454 -0.139514 0.927857 -0.345857 -0.196186 0.85146 -0.486341 -0.247199 0.75058 -0.612799 -0.2911 0.628094 -0.721636 -0.326618 0.487579 -0.809684 -0.352741 0.333058 -0.874441 -0.368717 0.168952 -0.914058 -0.0690779 0.977541 -0.199103 -0.122245 0.927854 -0.352338 -0.171896 0.851458 -0.495451 -0.216591 0.750576 -0.624279 -0.255054 0.628093 -0.73515 -0.286173 0.487583 -0.824844 -0.309062 0.333062 -0.890814 -0.323066 0.168942 -0.931175 -0.0490731 0.977539 -0.204964 -0.104669 0.927853 -0.357956 -0.147181 0.851457 -0.503348 -0.122108 0.85146 -0.510006 -0.18545 0.750576 -0.634227 -0.153862 0.750573 -0.642625 -0.218384 0.628099 -0.74686 -0.181184 0.628103 -0.756743 -0.245033 0.487579 -0.837989 -0.203294 0.487576 -0.849083 -0.264628 0.333066 -0.905008 -0.219552 0.333053 -0.916991 -0.276619 0.168938 -0.946014 -0.229494 0.168955 -0.958534 -0.0687953 0.927854 -0.366543 -0.121889 0.750577 -0.649444 -0.143539 0.628095 -0.764783 -0.161048 0.487588 -0.858092 -0.173934 0.333057 -0.926726 -0.181815 0.168949 -0.96871 -0.050583 0.927855 -0.369495 -0.0505846 0.927854 -0.369498 -0.0285852 0.977539 -0.208809 -0.0711309 0.851455 -0.519581 -0.105544 0.628093 -0.770947 -0.118421 0.487579 -0.86501 -0.127889 0.333057 -0.934194 -0.133683 0.168943 -0.976518 -0.018225 0.977538 -0.20997 -0.0453467 0.851458 -0.522458 -0.0453489 0.851455 -0.522463 -0.0322473 0.927856 -0.371542 -0.0571377 0.75058 -0.658305 -0.0754978 0.487571 -0.869813 -0.0815324 0.333066 -0.939372 -0.0852293 0.168948 -0.981933 -0.00781862 0.977538 -0.210613 -0.0138363 0.927853 -0.37269 -0.0245131 0.75058 -0.660325 -0.0245143 0.750578 -0.660328 -0.0194524 0.85146 -0.524058 -0.0288643 0.628099 -0.777598 -0.0349792 0.333063 -0.942255 -0.0365634 0.168947 -0.984947 0.00260742 0.977539 -0.210741 0.0046133 0.927854 -0.372915 0.00648587 0.85146 -0.524379 0.0096231 0.628092 -0.778079 0.00962435 0.628096 -0.778077 0.00817244 0.750577 -0.660733 0.0107968 0.487579 -0.873012 0.0121899 0.168946 -0.98555 0.0130249 0.977538 -0.210355 0.0230485 0.927854 -0.37223 0.0324095 0.851459 -0.523419 0.0408382 0.750572 -0.659525 0.053959 0.48758 -0.871409 0.0539584 0.487578 -0.87141 0.0480927 0.628102 -0.776644 0.058274 0.333059 -0.941104 0.0234125 0.977539 -0.209451 0.0414288 0.927853 -0.370637 0.0582564 0.851459 -0.521175 0.0734045 0.750575 -0.656696 0.0864393 0.628095 -0.773321 0.104745 0.33306 -0.93707 0.104746 0.333063 -0.937069 0.0969889 0.487578 -0.867675 0.109489 0.168947 -0.979525 0.0337417 0.977538 -0.208041 0.059709 0.927854 -0.368134 0.0839603 0.85146 -0.517655 0.105791 0.750578 -0.652258 0.124578 0.628096 -0.768099 0.139778 0.487576 -0.861819 0.157798 0.168951 -0.972911 0.157798 0.168951 -0.972911 0.150958 0.33306 -0.930743 0.0439893 0.977538 -0.206118 0.0778398 0.927854 -0.364729 0.109455 0.851458 -0.512873 0.137918 0.750576 -0.646231 0.162411 0.628102 -0.760993 0.182227 0.487573 -0.853854 0.196801 0.333058 -0.92214 0.205718 0.168955 -0.963916 0.0541284 0.977538 -0.203688 0.0957814 0.927854 -0.360435 0.134685 0.851459 -0.506831 0.169707 0.750576 -0.638619 0.199846 0.628094 -0.752037 0.224232 0.487581 -0.843792 0.242163 0.333061 -0.911278 0.253134 0.168941 -0.952566 0.0641352 0.977541 -0.200751 0.11349 0.927854 -0.355256 0.159586 0.851458 -0.499552 0.201083 0.750575 -0.629446 0.236793 0.628101 -0.741227 0.265684 0.487578 -0.831673 0.286934 0.333058 -0.898188 0.299934 0.168956 -0.938879 0.0739853 0.977538 -0.197345 0.130922 0.927854 -0.349207 0.184096 0.851462 -0.49104 0.231963 0.75058 -0.618727 0.273158 0.628095 -0.728616 0.306489 0.487581 -0.817514 0.331001 0.333056 -0.8829 0.345997 0.168952 -0.922898 0.0836538 0.977539 -0.193444 0.14803 0.927853 -0.34231 0.208153 0.851461 -0.481338 0.262281 0.750574 -0.606504 0.30886 0.628103 -0.714207 0.346545 0.487575 -0.80136 0.374261 0.333062 -0.865447 0.391216 0.168947 -0.904658 0.0931187 0.977538 -0.189073 0.164777 0.927854 -0.334568 0.231702 0.85146 -0.470458 0.291952 0.750577 -0.592789 0.343802 0.628091 -0.698071 0.38575 0.48759 -0.783233 0.4166 0.333052 -0.845885 0.435475 0.168953 -0.884204 0.102355 0.977538 -0.184235 0.181119 0.927855 -0.326008 0.254686 0.851457 -0.458427 0.320909 0.750579 -0.577623 0.377902 0.628097 -0.68021 0.42401 0.487574 -0.763208 0.457924 0.333064 -0.824242 0.478669 0.168941 -0.861589 0.111341 0.977538 -0.178948 0.19702 0.927855 -0.316652 0.277046 0.851458 -0.44527 0.349083 0.750576 -0.56105 0.411079 0.628096 -0.660688 0.461237 0.487578 -0.741302 0.498123 0.33307 -0.800586 0.520692 0.168936 -0.836864 0.120054 0.977538 -0.173223 0.21244 0.927854 -0.306524 0.298729 0.851456 -0.431026 0.376401 0.75058 -0.543095 0.44325 0.628095 -0.639552 0.497333 0.487578 -0.717585 0.537107 0.333066 -0.774973 0.561443 0.168945 -0.810086 0.128474 0.977538 -0.167073 0.227339 0.927853 -0.295643 0.319677 0.851458 -0.415723 0.402799 0.750576 -0.523821 0.474333 0.628101 -0.616845 0.53221 0.487578 -0.692113 0.574775 0.333056 -0.747467 0.600817 0.168947 -0.781329 0.136579 0.977539 -0.160514 0.241681 0.927855 -0.284036 0.339844 0.851458 -0.399405 0.428212 0.750575 -0.50326 0.50426 0.628096 -0.592636 0.565786 0.487584 -0.664942 0.611037 0.333056 -0.718128 0.63872 0.168948 -0.750662 0.144348 0.977539 -0.153563 0.255432 0.927854 -0.271739 0.35918 0.851459 -0.382109 0.452577 0.750575 -0.481469 0.53295 0.628098 -0.566972 0.597978 0.487578 -0.636152 0.645805 0.333059 -0.687029 0.675062 0.168949 -0.718155 0.151767 0.977538 -0.146238 0.268557 0.927855 -0.258772 0.377639 0.851458 -0.36388 0.475832 0.750577 -0.458495 0.560339 0.628094 -0.539924 0.628708 0.487579 -0.6058 0.678988 0.333068 -0.654249 0.709751 0.168938 -0.683895 0.158813 0.977539 -0.138553 0.281026 0.927854 -0.245177 0.395169 0.85146 -0.344757 0.497927 0.750572 -0.434408 0.586351 0.6281 -0.511549 0.657897 0.487574 -0.573971 0.710513 0.333061 -0.619873 0.742706 0.168942 -0.647956 0.165472 0.977538 -0.13053 0.292808 0.927854 -0.230977 0.411735 0.851461 -0.324791 0.518798 0.750577 -0.409247 0.610936 0.628092 -0.481932 0.685475 0.487584 -0.540727 0.740302 0.333048 -0.58398 0.773841 0.168947 -0.610432 0.171723 0.977539 -0.122186 0.303874 0.927853 -0.216218 0.427295 0.851459 -0.304034 0.314193 0.927854 -0.200922 0.538401 0.750578 -0.383087 0.441809 0.851458 -0.282532 0.634022 0.628094 -0.451125 0.556684 0.750581 -0.355993 0.711379 0.487579 -0.506168 0.655556 0.628092 -0.419222 0.768275 0.333059 -0.546649 0.735541 0.487578 -0.47037 0.80308 0.168947 -0.571419 0.794368 0.333058 -0.507989 0.830358 0.168943 -0.531003 0.177556 0.977538 -0.113545 0.182954 0.977538 -0.104625 0.323746 0.927854 -0.185139 0.182955 0.977538 -0.104626 0.573611 0.750579 -0.328028 0.675484 0.628096 -0.386285 0.757902 0.487578 -0.433419 0.818517 0.333058 -0.468083 0.855601 0.168945 -0.48929 0.187906 0.977538 -0.0954509 0.467557 0.851459 -0.237505 0.478733 0.851457 -0.214093 0.478731 0.851459 -0.214091 0.603212 0.750577 -0.269759 0.693763 0.628092 -0.35241 0.778408 0.487582 -0.395405 0.840666 0.333053 -0.427032 0.878751 0.168957 -0.446375 0.192394 0.977539 -0.0860391 0.79701 0.48758 -0.356428 0.860753 0.333062 -0.384934 0.899751 0.168939 -0.402377 0.996908 -0.00426838 -0.0784612 0.972267 -0.164019 -0.166717 0.932561 -0.323664 -0.159911 0.867535 -0.474614 -0.148744 0.778852 -0.612826 -0.133546 0.668839 -0.734509 -0.114681 0.540449 -0.836259 -0.092659 0.397118 -0.915238 -0.0680976 0.242764 -0.969192 -0.0416269 0.0816931 -0.996559 -0.0140082 0.97236 -0.233437 -0.0047451 0.942364 -0.324711 -0.0806991 0.923873 -0.382692 -0.00232208 0.875982 -0.475938 -0.0783484 0.852645 -0.522489 0.00129019 0.760389 -0.64944 0.00602264 0.649408 -0.76035 0.011727 0.5224 -0.852501 0.0184413 0.382564 -0.923566 0.025904 0.233304 -0.971809 0.0340005 0.0783547 -0.995555 -0.0522525 0.785571 -0.614185 -0.0751992 0.67359 -0.735657 -0.0713147 0.543116 -0.836986 -0.0669221 0.397707 -0.915411 -0.0620701 0.241334 -0.968759 -0.0571345 0.0376816 -0.996209 -0.0784062 0.0174646 -0.996559 -0.0810243 0.0309029 -0.996559 -0.0769089 0.0434553 -0.996559 -0.0705803 0.0547595 -0.996559 -0.0622197 0.0644874 -0.996559 -0.0520699 0.0723608 -0.996559 -0.0404208 0.0781513 -0.996559 -0.0276108 -0.00374137 -0.92387 -0.382688 0.0555626 -0.915762 -0.39786 0.00620562 -0.852628 -0.522482 0.0639168 -0.837157 -0.543215 0.115523 -0.836262 -0.536023 0.204445 -0.836262 -0.50879 0.287498 -0.836262 -0.466917 0.362271 -0.836262 -0.411614 0.426616 -0.836262 -0.344479 0.478701 -0.836262 -0.267416 0.517004 -0.836263 -0.182675 0.540438 -0.836264 -0.0926741 0.015149 -0.760318 -0.649375 0.0714681 -0.735645 -0.673587 0.142974 -0.734503 -0.663373 0.253023 -0.734501 -0.629673 0.355789 -0.734508 -0.577852 0.448346 -0.734505 -0.5094 0.527971 -0.734505 -0.426319 0.59244 -0.734501 -0.330943 0.639836 -0.734504 -0.226083 0.668845 -0.734504 -0.114672 0.0229018 -0.649275 -0.760209 0.0780011 -0.614051 -0.785403 0.166482 -0.612828 -0.77248 0.294631 -0.612831 -0.733233 0.414326 -0.61282 -0.672894 0.522077 -0.612828 -0.593193 0.614823 -0.61282 -0.496432 0.689861 -0.612835 -0.385391 0.74508 -0.612825 -0.263252 0.778853 -0.612825 -0.133545 0.0293153 -0.522273 -0.852274 0.0832959 -0.475753 -0.875626 0.185436 -0.474621 -0.860435 0.328183 -0.474618 -0.816721 0.461488 -0.474625 -0.749506 0.58152 -0.474624 -0.660732 0.68482 -0.474623 -0.55295 0.76842 -0.474616 -0.429267 0.82991 -0.474621 -0.293231 0.86753 -0.47462 -0.148751 0.0342379 -0.382463 -0.923336 0.0871995 -0.324523 -0.94185 0.199334 -0.323658 -0.924939 0.352793 -0.323651 -0.877945 0.49609 -0.323648 -0.805696 0.625111 -0.323653 -0.710271 0.736156 -0.323658 -0.594407 0.826025 -0.323655 -0.461443 0.89213 -0.323649 -0.315207 0.93257 -0.323647 -0.159894 0.0375766 -0.233275 -0.971685 0.089581 -0.164486 -0.982303 0.207817 -0.164019 -0.964319 0.367817 -0.164013 -0.91532 0.517206 -0.164016 -0.839998 0.651726 -0.164013 -0.740508 0.7675 -0.164011 -0.619713 0.861196 -0.164006 -0.481085 0.930104 -0.16402 -0.328639 0.972266 -0.164021 -0.166719 0.0392614 -0.0783955 -0.996149 0.0784427 -0.0184184 -0.996749 0.233355 -0.0268823 -0.97202 0.382421 -0.0373124 -0.923234 0.521961 -0.0453316 -0.851764 0.523665 -0.0494527 -0.850488 0.649078 -0.033628 -0.759978 0.659475 -0.0602041 -0.749312 0.760189 -0.0238596 -0.649264 0.852535 -0.0160357 -0.522425 0.923829 -0.0101718 -0.382671 0.972352 -0.00624036 -0.233435 0.982168 -0.0835561 -0.168415 0.210207 -0.0662616 -0.975409 0.372264 -0.0568243 -0.926386 0.776182 -0.0689833 -0.626724 0.870506 -0.0757971 -0.486286 0.939801 -0.0806648 -0.332065 0.242763 -0.969192 -0.0416274 0.232241 -0.969191 -0.0820505 0.215035 -0.969191 -0.120119 0.191638 -0.969191 -0.154737 0.16273 -0.969191 -0.1849 0.129135 -0.969192 -0.209742 0.0918332 -0.969192 -0.228548 0.0518997 -0.969191 -0.24078 -0.0144546 -0.97227 -0.233415 0.046722 -0.969284 -0.241467 0.39712 -0.915237 -0.0680944 0.379896 -0.915238 -0.134232 0.351748 -0.915239 -0.196498 0.313481 -0.915237 -0.253121 0.266188 -0.915239 -0.302457 0.211254 -0.915239 -0.343089 0.150231 -0.915238 -0.373858 0.084885 -0.915237 -0.393873 0.0848785 -0.915238 -0.393872 0.115511 -0.836265 -0.536021 0.996899 -0.0784605 -0.00596967 0.982937 -0.164596 -0.0821164 0.20782 -0.164015 -0.964318 0.199338 -0.323654 -0.924939 0.185438 -0.474619 -0.860435 0.16649 -0.612823 -0.772482 0.142969 -0.734505 -0.663372 0.0518914 -0.969192 -0.24078 0.367812 -0.164017 -0.915321 0.352785 -0.323658 -0.877945 0.328178 -0.474622 -0.816721 0.294637 -0.612827 -0.733234 0.253018 -0.734503 -0.629672 0.150227 -0.915239 -0.373858 0.204446 -0.836262 -0.50879 0.0918394 -0.969191 -0.228547 0.51721 -0.164012 -0.839996 0.496086 -0.323652 -0.805697 0.461498 -0.474617 -0.749505 0.414308 -0.61283 -0.672895 0.355804 -0.734501 -0.57785 0.21125 -0.915239 -0.343089 0.287495 -0.836263 -0.466917 0.129145 -0.969191 -0.209739 0.651724 -0.164016 -0.74051 0.625118 -0.323647 -0.710268 0.581519 -0.474625 -0.660732 0.522092 -0.612819 -0.593189 0.448337 -0.734509 -0.509402 0.2662 -0.915237 -0.302453 0.362275 -0.836261 -0.411613 0.162728 -0.969191 -0.184901 0.767499 -0.164013 -0.619714 0.73616 -0.323654 -0.594404 0.68482 -0.474623 -0.55295 0.614809 -0.612829 -0.496438 0.527971 -0.734506 -0.42632 0.313476 -0.915238 -0.253124 0.426614 -0.836262 -0.34448 0.191637 -0.969191 -0.154737 0.861193 -0.164011 -0.481089 0.826022 -0.323659 -0.461446 0.768412 -0.474624 -0.429273 0.689883 -0.612819 -0.385376 0.592431 -0.734506 -0.330949 0.351751 -0.915238 -0.196496 0.478704 -0.836261 -0.267414 0.215031 -0.969191 -0.120123 0.930111 -0.164006 -0.328627 0.892125 -0.323655 -0.315214 0.829915 -0.474616 -0.293226 0.745066 -0.612836 -0.263266 0.639842 -0.734501 -0.226077 0.3799 -0.915237 -0.134227 0.517008 -0.836262 -0.182671 0.232236 -0.969192 -0.0820577 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0.000411846 -0.078456 -0.996918 -0.000658331 -0.105931 -0.994373 0 -0.272463 -0.962166 0 -0.431153 -0.902279 0 -0.577432 -0.816439 0 -0.70711 -0.707104 0 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0 -0.962164 -0.272469 0 -0.902282 -0.431146 0 -0.816437 -0.577434 0 -0.70711 -0.707104 0 -0.577432 -0.816439 0 -0.431153 -0.902279 0 -0.272463 -0.962166 0 -0.105931 -0.994374 -0.000324218 -0.0487508 -0.998811 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.0105742 -0.999944 0 -0.105931 -0.994374 0 -0.272463 -0.962166 0 -0.431153 -0.902279 0 -0.577432 -0.816439 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0.000597161 -0.233439 -0.972371 -0.000820796 -0.272463 -0.962166 0 -0.431153 -0.902279 0 -0.577432 -0.816439 0 -0.70711 -0.707104 0 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000735962 -0.382688 -0.923877 -0.000936536 -0.431152 -0.902279 0 -0.577432 -0.816439 0 -0.70711 -0.707104 0 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000828606 -0.522498 -0.85264 -0.00100611 -0.577432 -0.816438 0 -0.70711 -0.707104 0 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000874936 -0.649445 -0.760408 -0.00102949 -0.707109 -0.707104 0 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.999944 -0.0106181 0.000874668 -0.760405 -0.649449 -0.00100642 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000828574 -0.852644 -0.522492 -0.000936535 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000735657 -0.923876 -0.382691 -0.000821035 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000597162 -0.972371 -0.233439 -0.000658331 -0.994373 -0.105931 0 -0.999944 -0.0106181 0 -0.994374 -0.105931 0.000411616 -0.996917 -0.0784619 -0.000233408 -0.998794 -0.0490951 0.00102111 -0.994373 -0.105931 -7.10303e-05 -0.989177 -0.146727 7.86155e-05 -0.970034 -0.242968 0.000992058 -0.962164 -0.272469 -0.00030734 -0.94151 -0.336985 0.000601694 -0.904104 -0.427313 -0.000345316 -0.857636 -0.514257 0.000226575 -0.803289 -0.595589 -0.000172195 -0.740873 -0.671645 0.00112351 -0.707109 -0.707104 -0.000179319 -0.67145 -0.74105 0.000234557 -0.595864 -0.803086 -0.000386242 -0.513812 -0.857903 0.000912404 -0.428295 -0.903639 0.000761071 -0.431152 -0.902279 -0.000370411 -0.336562 -0.941661 0.00122892 -0.272463 -0.962166 0.00010185 -0.243083 -0.970006 -9.18887e-05 -0.146625 -0.989192 0.00137373 -0.105931 -0.994373 0.000764696 -0.902282 -0.431146 0.000975876 -0.816437 -0.577434 0.00106411 -0.577432 -0.816438 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.272463 -0.962166 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.431153 -0.902279 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.577432 -0.816439 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.70711 -0.707104 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.816437 -0.577434 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.902282 -0.431146 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.962164 -0.272469 0 -0.994374 -0.105931 0 -0.994374 -0.105931 0 -0.994374 -0.105931 0 -0.994374 -0.105931 0 -0.994374 -0.105931 0 -0.994374 -0.105931 0 -0.999944 -0.0106181 0.078456 0.000430563 -0.996917 0.105931 -0.000687532 -0.994373 0.233439 0.00062417 -0.972371 0.272466 -0.000857391 -0.962165 0.431148 2.5084e-07 -0.902281 0.577432 0 -0.816439 0.70711 0 -0.707104 0.816437 1.60531e-07 -0.577434 0.902282 0 -0.431146 0.962165 7.57483e-08 -0.272469 0.994374 2.94495e-08 -0.105931 0.999944 0 -0.0106181 0.996917 -0.000613045 -0.0784619 0.994373 0.000980492 -0.105931 0.972371 -0.000889389 -0.233439 0.962164 0.00122282 -0.272469 0.902282 0 -0.431146 0.816437 1.60531e-07 -0.577434 0.70711 1.9658e-07 -0.707104 0.577432 0 -0.816439 0.431148 2.5084e-07 -0.902281 0.272466 0 -0.962166 0.105931 0 -0.994374 0.0105742 2.77991e-07 -0.999944 0.105931 2.76443e-07 -0.994374 0.105931 2.76678e-07 -0.994374 0.272466 2.67489e-07 -0.962166 0.431148 0 -0.902281 0.577432 0 -0.816439 0.70711 1.9658e-07 -0.707104 0.816437 0 -0.577434 0.902282 1.19861e-07 -0.431146 0.962165 7.57483e-08 -0.272469 0.994374 0 -0.105931 0.962164 0 -0.272469 0.962164 0 -0.272469 0.382688 0.000768959 -0.923877 0.431148 -0.000978303 -0.902281 0.577432 2.26976e-07 -0.816439 0.70711 0 -0.707104 0.816437 0 -0.577434 0.902282 1.19861e-07 -0.431146 0.962164 0 -0.272469 0.994374 2.94495e-08 -0.105931 0.999944 2.95168e-09 -0.0106173 0.994374 2.94495e-08 -0.105931 0.522498 0.000865944 -0.85264 0.577432 -0.00105093 -0.816438 0.70711 1.9658e-07 -0.707104 0.816437 0 -0.577434 0.902282 0 -0.431146 0.962164 7.57483e-08 -0.272469 0.994374 0 -0.105931 0.999944 2.95168e-09 -0.0106173 0.994374 2.94495e-08 -0.105931 0.649445 0.000914323 -0.760408 0.707109 -0.00107539 -0.707104 0.816437 1.60531e-07 -0.577434 0.902282 0 -0.431146 0.962164 0 -0.272469 0.994374 2.94495e-08 -0.105931 0.999944 0 -0.010619 0.994374 0 -0.105931 0.760405 0.000914012 -0.649449 0.816437 -0.00105132 -0.577434 0.902282 1.19861e-07 -0.431146 0.962164 0 -0.272469 0.994374 0 -0.105931 0.999944 2.95216e-09 -0.010619 0.999944 1.52134e-09 -0.0106179 0.852644 0.00086582 -0.522492 0.902282 -0.000978349 -0.431146 0.962164 7.57483e-08 -0.272469 0.994374 0 -0.105931 0.999944 0 -0.0106172 0.994374 0 -0.105931 0.923876 0.000768703 -0.382691 0.962164 -0.000857721 -0.272469 0.994374 2.94495e-08 -0.105931 0.999944 0 -0.0106172 0.994374 0 -0.105931 0.972371 0.000623964 -0.233439 0.994373 -0.000687779 -0.105931 0.999944 2.9519e-09 -0.0106181 0.994374 2.94495e-08 -0.105931 0.996917 0.000430068 -0.0784619 0.923876 -0.00109566 -0.382691 0.902281 0.00139484 -0.431146 0.816437 0 -0.577434 0.70711 1.9658e-07 -0.707104 0.577432 2.26976e-07 -0.816439 0.431148 0 -0.902281 0.272466 2.67489e-07 -0.962166 0.105931 0 -0.994374 0.0104865 0 -0.999945 0.105931 0 -0.994374 0.852644 -0.00123405 -0.522491 0.816436 0.00149892 -0.577434 0.70711 0 -0.707104 0.577432 2.26976e-07 -0.816439 0.431148 2.5084e-07 -0.902281 0.272466 0 -0.962166 0.105931 2.76443e-07 -0.994374 0.0104865 0 -0.999945 0.105931 0 -0.994374 0.760404 -0.00130269 -0.649449 0.707109 0.00153329 -0.707103 0.577432 0 -0.816439 0.431148 2.5084e-07 -0.902281 0.272466 2.67489e-07 -0.962166 0.105931 0 -0.994374 0.010662 2.77991e-07 -0.999943 0.105931 2.76443e-07 -0.994374 0.649445 -0.0013031 -0.760408 0.577431 0.00149846 -0.816438 0.431148 0 -0.902281 0.272466 2.67489e-07 -0.962165 0.105931 2.76443e-07 -0.994374 0.010662 0 -0.999943 0.0105557 1.43272e-07 -0.999944 0.522497 -0.00123409 -0.85264 0.431148 0.00139496 -0.90228 0.272466 0 -0.962166 0.105931 2.76443e-07 -0.994374 0.0104996 2.77991e-07 -0.999945 0.105931 2.76443e-07 -0.994374 0.382688 -0.00109581 -0.923877 0.272465 0.00122262 -0.962165 0.105931 0 -0.994374 0.0104996 2.77991e-07 -0.999945 0.105931 2.76443e-07 -0.994374 0.233439 -0.000889389 -0.972371 0.105931 0.000980492 -0.994373 0.0105742 0 -0.999944 0.105931 0 -0.994374 0.078456 -0.000613387 -0.996917 0.105931 0 -0.994374 0.272466 0 -0.962166 0.272466 0 -0.962166 0.272466 2.67489e-07 -0.962166 0.272466 0 -0.962166 0.272466 2.67299e-07 -0.962166 0.272466 0 -0.962166 0.272466 2.67299e-07 -0.962166 0.272466 2.67299e-07 -0.962165 0.272466 0 -0.962166 0.431148 2.5084e-07 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 2.50101e-07 -0.902281 0.431148 0 -0.902281 0.431148 2.50101e-07 -0.902281 0.431148 2.50101e-07 -0.902281 0.431148 0 -0.902281 0.577432 2.26976e-07 -0.816439 0.577432 0 -0.816439 0.577432 2.26655e-07 -0.816439 0.577432 2.26976e-07 -0.816439 0.577432 0 -0.816439 0.577432 0 -0.816439 0.577432 2.26655e-07 -0.816439 0.577432 2.26655e-07 -0.816439 0.577432 0 -0.816439 0.70711 1.9658e-07 -0.707104 0.70711 0 -0.707104 0.70711 0 -0.707104 0.70711 0 -0.707104 0.70711 0 -0.707104 0.70711 1.9658e-07 -0.707104 0.70711 1.96955e-07 -0.707104 0.70711 1.96955e-07 -0.707104 0.70711 0 -0.707104 0.816437 1.60531e-07 -0.577434 0.816437 0 -0.577434 0.816437 0 -0.577434 0.816437 1.60531e-07 -0.577434 0.816437 1.61004e-07 -0.577434 0.816437 1.60531e-07 -0.577434 0.816437 0 -0.577434 0.816437 1.61004e-07 -0.577434 0.816437 0 -0.577434 0.902282 1.19861e-07 -0.431146 0.902282 0 -0.431146 0.902282 0 -0.431146 0.902282 1.19861e-07 -0.431146 0.902282 0 -0.431146 0.902282 1.18799e-07 -0.431146 0.902282 1.19862e-07 -0.431146 0.902282 1.19861e-07 -0.431146 0.902282 0 -0.431146 0.962164 7.57483e-08 -0.272469 0.962164 0 -0.272469 0.962164 0 -0.272469 0.962164 7.57483e-08 -0.272469 0.962164 0 -0.272469 0.962165 7.57483e-08 -0.272469 0.962165 7.57483e-08 -0.272469 0.994374 2.94495e-08 -0.105931 0.994374 0 -0.105931 0.996899 -0.0784605 0.00596967 0.972267 -0.164019 0.16672 0.892128 -0.323647 0.315216 0.768416 -0.474614 0.429276 0.614817 -0.61282 0.49644 0.448335 -0.734507 0.509407 0.287497 -0.83626 0.46692 0.150231 -0.915239 0.373856 0.0518917 -0.969191 0.240781 0.972353 -0.00623935 0.233434 0.939802 -0.0806635 0.332064 0.923829 -0.0101675 0.382671 0.870507 -0.0757917 0.486286 0.767499 -0.164011 0.619714 0.625119 -0.323657 0.710262 0.461492 -0.474625 0.749504 0.294637 -0.612828 0.733233 0.142969 -0.734505 0.663372 0.0639156 -0.837155 0.543218 0.00619757 -0.852628 0.522482 0.055557 -0.915763 0.397858 -0.00374137 -0.92387 0.382688 0.046722 -0.969284 0.241467 0.0518917 -0.969191 0.240781 0.0848849 -0.915237 0.393872 0.150224 -0.915238 0.373863 0.20444 -0.836262 0.508792 0.253018 -0.734503 0.629672 0.253021 -0.734505 0.629668 0.852535 -0.0160298 0.522425 0.77618 -0.0689777 0.626727 0.651725 -0.164009 0.74051 0.496079 -0.323648 0.805703 0.32818 -0.474614 0.816725 0.166484 -0.612824 0.772482 0.0714611 -0.735644 0.673588 0.0151476 -0.760315 0.649378 0.760186 -0.0238533 0.649267 0.659478 -0.0601946 0.74931 0.517206 -0.164016 0.839998 0.352795 -0.323656 0.877942 0.185443 -0.474626 0.860431 0.0779959 -0.614054 0.7854 0.0228961 -0.649278 0.760206 0.649082 -0.0336189 0.759976 0.523665 -0.0494431 0.850488 0.367812 -0.164018 0.915321 0.199332 -0.323653 0.924941 0.0832935 -0.475749 0.875629 0.0293078 -0.522273 0.852274 0.521961 -0.045322 0.851765 0.382416 -0.0373017 0.923237 0.233361 -0.0268835 0.972019 0.0784427 -0.0184071 0.996749 0.039254 -0.0783955 0.996149 0.0895738 -0.164486 0.982304 0.0375649 -0.23328 0.971684 0.0871878 -0.324528 0.941849 0.199339 -0.323659 0.924937 0.207826 -0.164013 0.964317 0.367808 -0.164012 0.915324 0.372255 -0.0568222 0.92639 0.210217 -0.0662545 0.975407 0.207827 -0.164015 0.964317 0.0342354 -0.382458 0.923338 -0.0144645 -0.97227 0.233414 0.037678 -0.996209 0.0784066 0.0783547 -0.995555 0.052249 0.081693 -0.996559 0.0140082 0.078151 -0.996559 0.0276116 0.0723608 -0.996559 0.0404208 0.0644874 -0.996559 0.0520699 0.0547595 -0.996559 0.0622197 0.0434553 -0.996559 0.0705803 0.0309052 -0.996559 0.0769078 0.017462 -0.996559 0.0810246 0.382564 -0.923566 -0.0259042 0.397707 -0.915411 0.06207 0.5224 -0.852501 -0.0184413 0.543117 -0.836986 0.0669241 0.540443 -0.836264 0.0926563 0.517006 -0.836262 0.182675 0.478703 -0.83626 0.26742 0.426617 -0.836262 0.344476 0.362271 -0.83626 0.411619 0.362274 -0.836262 0.411611 0.266199 -0.915238 0.302452 0.211249 -0.915238 0.343093 0.211251 -0.915239 0.34309 0.649407 -0.76035 -0.0117274 0.673589 -0.735658 0.0713144 0.668843 -0.734505 0.114683 0.639839 -0.734501 0.226085 0.592435 -0.734507 0.33094 0.527972 -0.734507 0.426316 0.527972 -0.734507 0.426316 0.760389 -0.64944 -0.00602255 0.785572 -0.614184 0.0751997 0.778852 -0.612826 0.133544 0.745072 -0.612835 0.263251 0.68987 -0.61282 0.385396 0.689872 -0.612836 0.385369 0.852645 -0.522489 -0.00129023 0.875982 -0.475938 0.0783487 0.867532 -0.474619 0.148743 0.829912 -0.474615 0.293234 0.829912 -0.47462 0.293227 0.923873 -0.382692 0.00232204 0.942364 -0.324711 0.0806991 0.932567 -0.323647 0.159913 0.932564 -0.323664 0.159893 0.97236 -0.233437 0.0047451 0.982937 -0.164596 0.0821164 0.867534 -0.474614 0.148749 0.778853 -0.612824 0.133546 0.668839 -0.734511 0.114669 0.540448 -0.836258 0.0926757 0.397119 -0.915237 0.0680977 0.242764 -0.969192 0.0416269 0.242764 -0.969192 0.0416274 0.241334 -0.968759 0.0571343 0.233304 -0.971809 -0.0339982 0.397118 -0.915238 0.0680942 0.982169 -0.0835543 0.168415 0.996908 -0.0042679 0.0784616 0.930108 -0.164022 0.328626 0.826025 -0.323655 0.461444 0.684822 -0.474623 0.552948 0.522084 -0.612828 0.593186 0.355803 -0.734511 0.577839 0.287501 -0.836262 0.466914 0.892128 -0.323655 0.315207 0.745074 -0.612825 0.263269 0.639838 -0.734505 0.226076 0.517006 -0.836264 0.18267 0.379899 -0.915238 0.134227 0.232237 -0.969192 0.0820567 0.232238 -0.969192 0.0820523 0.379899 -0.915237 0.13423 0.972267 -0.164022 0.166717 0.930106 -0.16401 0.328638 0.861196 -0.164012 0.481084 0.736158 -0.323662 0.594402 0.581521 -0.474621 0.660734 0.414315 -0.612822 0.672898 0.355795 -0.734504 0.577853 0.861196 -0.164011 0.481085 0.826026 -0.323661 0.461437 0.768418 -0.474623 0.429263 0.592434 -0.734501 0.330954 0.478704 -0.836262 0.267411 0.35175 -0.915239 0.196493 0.215031 -0.969191 0.120126 0.215033 -0.969192 0.120117 0.351749 -0.915237 0.196502 0.767498 -0.164009 0.619716 0.736157 -0.323658 0.594407 0.684821 -0.474621 0.552951 0.61482 -0.612828 0.496425 0.426615 -0.836259 0.344486 0.313476 -0.915238 0.253127 0.191637 -0.969191 0.154738 0.191638 -0.969191 0.154737 0.313478 -0.915239 0.253118 0.651729 -0.164015 0.740505 0.625113 -0.323647 0.710272 0.581523 -0.474625 0.660729 0.52208 -0.612822 0.593196 0.448339 -0.734511 0.509398 0.16273 -0.969191 0.184899 0.266196 -0.915237 0.302458 0.16273 -0.969191 0.184898 0.129136 -0.969191 0.209744 0.129141 -0.969191 0.209739 0.0918408 -0.969191 0.228546 0.0918395 -0.969191 0.228547 0.517206 -0.164017 0.839998 0.496085 -0.323656 0.805696 0.461484 -0.474614 0.749516 0.414319 -0.612827 0.672891 0.352793 -0.323654 0.877944 0.328191 -0.474625 0.816714 0.294632 -0.612824 0.733238 0.204444 -0.836264 0.508789 0.115523 -0.836263 0.536021 0.115524 -0.836263 0.53602 0.0848875 -0.915238 0.39387 0.185432 -0.474615 0.860439 0.166487 -0.612827 0.77248 0.142969 -0.734505 0.663372 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0.000411619 -0.996917 0.0784624 -0.00065828 -0.994373 0.10593 0 -0.962164 0.272469 0 -0.902281 0.431148 0 -0.816437 0.577434 0 -0.70711 0.707104 0 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.0105742 0.999944 0 -0.105936 0.994373 0 -0.272463 0.962166 0 -0.431153 0.902279 0 -0.577428 0.816441 0 -0.70711 0.707104 0 -0.816437 0.577434 0 -0.902281 0.431148 0 -0.962164 0.272469 0 -0.994374 0.10593 -0.000233408 -0.998794 0.0490951 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.999944 0.0106181 0 -0.994374 0.10593 0 -0.962164 0.272469 0 -0.902281 0.431148 0 -0.816437 0.577434 0 -0.816437 0.577434 0.000597272 -0.972372 0.233438 -0.000820962 -0.962164 0.272469 0 -0.902281 0.431148 0 -0.816437 0.577434 0 -0.70711 0.707104 0 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000735759 -0.923876 0.382691 -0.000936494 -0.902281 0.431148 0 -0.816437 0.577434 0 -0.70711 0.707104 0 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000828575 -0.852644 0.522492 -0.00100642 -0.816437 0.577434 0 -0.70711 0.707104 0 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000874746 -0.760402 0.649452 -0.0010293 -0.707109 0.707104 0 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.0105742 0.999944 0.000875029 -0.649448 0.760405 -0.0010062 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000828379 -0.522498 0.85264 -0.000936776 -0.431152 0.902279 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000735889 -0.382683 0.92388 -0.000820797 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000596959 -0.233445 0.97237 -0.000658543 -0.105936 0.994373 0 -0.0105742 0.999944 0 -0.105936 0.994373 0.000411846 -0.078456 0.996918 -0.000324139 -0.0487414 0.998811 0.00137425 -0.105936 0.994372 -9.15281e-05 -0.146635 0.989191 0.000102167 -0.243074 0.970008 0.00122956 -0.272463 0.962166 -0.000370004 -0.336571 0.941658 0.000761362 -0.431152 0.902279 0.000912291 -0.428302 0.903635 -0.000386243 -0.513812 0.857903 0.00106402 -0.577428 0.816441 0.000234588 -0.595858 0.80309 -0.000179349 -0.671456 0.741045 -0.000172221 -0.740869 0.67165 0.000226575 -0.803289 0.595589 0.000975876 -0.816437 0.577434 -0.0003454 -0.857638 0.514253 0.000764549 -0.902281 0.431148 0.000601769 -0.9041 0.42732 -0.00030737 -0.941511 0.336982 0.000991985 -0.962164 0.272469 7.84816e-05 -0.970035 0.242966 -7.11067e-05 -0.989177 0.146728 0.0010211 -0.994373 0.10593 0.00112331 -0.707109 0.707104 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.994374 0.10593 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.962164 0.272469 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.902281 0.431148 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.816437 0.577434 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.70711 0.707104 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.577428 0.816441 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.431153 0.902279 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.272463 0.962166 0 -0.105936 0.994373 0 -0.105936 0.994373 0 -0.105936 0.994373 0 -0.105936 0.994373 0 -0.105936 0.994373 0 -0.105936 0.994373 0 -0.0105742 0.999944 0.996917 -0.0784619 0.000392867 0.994373 -0.105931 -0.000628461 0.972371 -0.233439 0.000569899 0.962165 -0.272468 -0.000783802 0.902281 -0.431148 0 0.816439 -0.577432 0 0.707107 -0.707107 0 0.577432 -0.816439 0 0.522489 -0.852646 -0.000791427 0.577432 -0.816439 0.000960172 0.649452 -0.760402 -0.000835537 0.707106 -0.707106 0.00098207 0.816439 -0.577432 0 0.902281 -0.431148 0 0.962165 -0.272468 0 0.994374 -0.105931 0 0.962165 -0.272468 0 0.962165 -0.272468 0 0.923875 -0.382693 0.00070215 0.902281 -0.431148 -0.000894036 0.816439 -0.577432 0 0.707107 -0.707107 0 0.577432 -0.816439 0 0.431144 -0.902283 0 0.382693 -0.923875 -0.000702619 0.431144 -0.902283 0.000893435 0.852646 -0.522489 0.00079073 0.816439 -0.577432 -0.000960879 0.707107 -0.707107 0 0.577432 -0.816439 0 0.431144 -0.902283 0 0.272471 -0.962164 0 0.233439 -0.972371 -0.000570512 0.272471 -0.962164 0.000783276 0.760402 -0.649452 0.00083482 0.707106 -0.707106 -0.000982787 0.577432 -0.816439 0 0.431144 -0.902283 0 0.272471 -0.962164 0 0.105931 -0.994374 0 0.0784619 -0.996917 -0.000393413 0.105931 -0.994373 0.000627902 0.649452 -0.760402 0.000834764 0.577432 -0.816439 -0.000961002 0.431144 -0.902283 0 0.272471 -0.962164 0 0.105931 -0.994374 0 0.0105304 -0.999945 0 0.105931 -0.994374 0 0.522489 -0.852646 0.000790561 0.431144 -0.902283 -0.000894352 0.272471 -0.962164 0 0.105931 -0.994374 0 0.0105304 -0.999945 0 0.105931 -0.994374 0 0.382693 -0.923875 0.00070168 0.272471 -0.962164 -0.000784254 0.105931 -0.994374 0 0.0107058 -0.999943 0 0.0106181 -0.999944 -1.69322e-07 0.233439 -0.972371 0.000569524 0.105931 -0.994373 -0.000628912 0.0107058 -0.999943 0 0.105931 -0.994374 0 0.0784619 -0.996917 0.0003924 0.760402 -0.649452 -0.00083548 0.816439 -0.577432 0.000960293 0.902281 -0.431148 0 0.962165 -0.272468 0 0.994374 -0.105931 0 0.999944 -0.010619 0 0.994374 -0.105931 0 0.852646 -0.522489 -0.00079126 0.902281 -0.431148 0.000893598 0.962165 -0.272468 0 0.994374 -0.105931 0 0.999944 -0.010619 0 0.994374 -0.105931 0 0.923875 -0.382693 -0.000702539 0.962165 -0.272468 0.000783526 0.994374 -0.105931 0 0.999944 -0.0106172 0 0.999944 -0.0106181 -1.79798e-09 0.972371 -0.233439 -0.000570136 0.994373 -0.105931 0.000628353 0.999944 -0.0106172 0 0.994374 -0.105931 0 0.996917 -0.0784619 -0.000392946 0.994374 -0.105931 0 0.962165 -0.272468 0 0.962165 -0.272468 0 0.962165 -0.272468 0 0.902281 -0.431148 0 0.902281 -0.431148 0 0.902281 -0.431148 0 0.902281 -0.431148 0 0.902281 -0.431148 0 0.816439 -0.577432 0 0.816439 -0.577432 0 0.816439 -0.577432 0 0.816439 -0.577432 0 0.816439 -0.577432 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.431144 -0.902283 0 0.431144 -0.902283 0 0.431144 -0.902283 0 0.431144 -0.902283 0 0.272471 -0.962164 0 0.272471 -0.962164 0 0.272471 -0.962164 0 0.272471 -0.962164 0 0.105931 -0.994374 0 0.996917 0.000430028 0.0784624 0.994373 -0.000687785 0.10593 0.972371 0.000623951 0.233438 0.962164 -0.000857798 0.272469 0.902281 -1.19862e-07 0.431148 0.816437 0 0.577434 0.70711 0 0.707104 0.577428 -2.26976e-07 0.816441 0.431148 0 0.902281 0.272466 -2.67489e-07 0.962166 0.105936 -2.76442e-07 0.994373 0.0105742 0 0.999944 0.078456 -0.000613387 0.996917 0.105936 0.000980808 0.994372 0.233445 -0.000889086 0.97237 0.272465 0.00122262 0.962165 0.431148 0 0.902281 0.577428 -2.26976e-07 0.816441 0.70711 -1.9658e-07 0.707104 0.816437 0 0.577434 0.902281 -1.19862e-07 0.431148 0.962164 0 0.272469 0.994374 0 0.10593 0.999944 -2.9519e-09 0.0106181 0.994374 -2.94493e-08 0.10593 0.994374 -2.93089e-08 0.10593 0.962164 -7.57483e-08 0.272469 0.902281 0 0.431148 0.816437 0 0.577434 0.70711 -1.9658e-07 0.707104 0.577428 0 0.816441 0.431148 -2.5084e-07 0.902281 0.272466 -2.67489e-07 0.962165 0.105936 0 0.994373 0.272466 0 0.962166 0.272466 0 0.962166 0.923876 0.000768597 0.382691 0.902281 -0.000978547 0.431148 0.816437 -1.60531e-07 0.577434 0.70711 0 0.707104 0.577428 0 0.816441 0.431148 -2.5084e-07 0.902281 0.272466 0 0.962166 0.105936 -2.76442e-07 0.994373 0.0106489 -2.77991e-07 0.999943 0.105936 -2.76442e-07 0.994373 0.852644 0.00086553 0.522492 0.816437 -0.00105164 0.577434 0.70711 -1.9658e-07 0.707104 0.577428 0 0.816441 0.431148 0 0.902281 0.272466 -2.67489e-07 0.962166 0.105936 0 0.994373 0.0106489 -2.77991e-07 0.999943 0.105936 -2.76442e-07 0.994373 0.760402 0.000913734 0.649452 0.707109 -0.00107559 0.707104 0.577428 -2.26976e-07 0.816441 0.431148 0 0.902281 0.272466 0 0.962166 0.105936 -2.76442e-07 0.994373 0.0104865 0 0.999945 0.105936 0 0.994373 0.649448 0.000913998 0.760405 0.577428 -0.00105148 0.816441 0.431148 -2.5084e-07 0.902281 0.272466 0 0.962165 0.105936 0 0.994373 0.0104865 -2.77991e-07 0.999945 0.0105928 -1.43272e-07 0.999944 0.522498 0.000865234 0.85264 0.431148 -0.000979056 0.902281 0.272466 -2.67489e-07 0.962166 0.105936 0 0.994373 0.010662 0 0.999943 0.105936 0 0.994373 0.382683 0.00076837 0.92388 0.272466 -0.000857926 0.962165 0.105936 -2.76442e-07 0.994373 0.010662 0 0.999943 0.105936 0 0.994373 0.233445 0.000623418 0.97237 0.105936 -0.000688306 0.994373 0.0105742 -2.77991e-07 0.999944 0.105936 -2.76442e-07 0.994373 0.078456 0.000430009 0.996918 0.382683 -0.0010957 0.923879 0.431148 0.00139532 0.90228 0.577428 0 0.816441 0.70711 -1.9658e-07 0.707104 0.816437 -1.60531e-07 0.577434 0.902281 0 0.431148 0.962164 -7.57483e-08 0.272469 0.994374 0 0.10593 0.999944 0 0.010619 0.994374 0 0.10593 0.522497 -0.00123376 0.85264 0.577428 0.0014986 0.816441 0.70711 0 0.707104 0.816437 -1.60531e-07 0.577434 0.902281 -1.19862e-07 0.431148 0.962164 0 0.272469 0.994374 -2.94493e-08 0.10593 0.999944 0 0.010619 0.994374 0 0.10593 0.649448 -0.00130323 0.760405 0.707109 0.00153301 0.707103 0.816437 0 0.577434 0.902281 -1.19862e-07 0.431148 0.962165 -7.57483e-08 0.272469 0.994374 0 0.10593 0.999944 -2.95164e-09 0.0106172 0.994374 -2.94493e-08 0.10593 0.760402 -0.00130281 0.649452 0.816436 0.00149892 0.577434 0.902281 0 0.431148 0.962165 -7.57483e-08 0.272469 0.994374 -2.94494e-08 0.10593 0.999944 0 0.0106172 0.999944 -1.52139e-09 0.0106183 0.852644 -0.00123405 0.522491 0.90228 0.00139478 0.431148 0.962164 0 0.272469 0.994374 -2.94494e-08 0.10593 0.999944 -2.95212e-09 0.0106189 0.994374 -2.94494e-08 0.10593 0.923876 -0.00109581 0.382691 0.962164 0.00122271 0.272469 0.994374 0 0.10593 0.999944 -2.95212e-09 0.0106189 0.994374 -2.94494e-08 0.10593 0.972371 -0.000889554 0.233438 0.994373 0.000980417 0.10593 0.999944 0 0.0106181 0.994374 0 0.10593 0.996917 -0.000613049 0.0784623 0.994374 0 0.10593 0.962165 0 0.272469 0.962165 0 0.272469 0.962164 -7.57483e-08 0.272469 0.962164 0 0.272469 0.962164 -7.50308e-08 0.272469 0.962164 0 0.272469 0.962165 -7.50309e-08 0.272469 0.962165 -7.50309e-08 0.272469 0.962164 0 0.272469 0.902281 -1.19862e-07 0.431148 0.902281 0 0.431148 0.902281 0 0.431148 0.902281 0 0.431148 0.902281 -1.20363e-07 0.431148 0.902281 0 0.431148 0.902281 -1.20363e-07 0.431148 0.902281 -1.20363e-07 0.431148 0.902281 0 0.431148 0.816437 -1.60531e-07 0.577434 0.816437 0 0.577434 0.816437 -1.61004e-07 0.577434 0.816437 -1.60531e-07 0.577434 0.816437 0 0.577434 0.816437 0 0.577434 0.816437 -1.61004e-07 0.577434 0.816437 -1.61004e-07 0.577434 0.816437 0 0.577434 0.70711 -1.9658e-07 0.707104 0.70711 0 0.707104 0.70711 0 0.707104 0.70711 0 0.707104 0.70711 0 0.707104 0.70711 -1.9658e-07 0.707104 0.70711 -1.96955e-07 0.707104 0.70711 -1.96955e-07 0.707104 0.70711 0 0.707104 0.577428 -2.26976e-07 0.816441 0.577428 0 0.816441 0.577428 0 0.816441 0.577428 -2.26976e-07 0.816441 0.577428 -2.26656e-07 0.816441 0.577428 -2.26976e-07 0.816441 0.577428 0 0.816441 0.577428 -2.26656e-07 0.816441 0.577428 0 0.816441 0.431148 -2.5084e-07 0.902281 0.431148 0 0.902281 0.431148 0 0.902281 0.431148 -2.5084e-07 0.902281 0.431148 0 0.902281 0.431148 -2.50101e-07 0.902281 0.431148 -2.5084e-07 0.902281 0.431148 -2.5084e-07 0.902281 0.431148 0 0.902281 0.272466 -2.67489e-07 0.962166 0.272466 0 0.962165 0.272466 0 0.962166 0.272466 -2.67489e-07 0.962166 0.272466 0 0.962166 0.272466 -2.67489e-07 0.962166 0.272466 -2.67489e-07 0.962165 0.105936 -2.76442e-07 0.994373 0.105936 0 0.994373 -0.999944 -1.42972e-09 0.0106181 -0.999944 -2.85981e-09 0.0106195 -0.999944 0 0.0106172 -0.999944 -2.85968e-09 0.010619 -0.999944 0 0.0106176 -0.999944 -2.85956e-09 0.0106186 -0.998795 -0.000236105 0.0490764 -0.994373 0.00103396 0.10593 -0.962162 -7.33782e-08 0.272479 -0.902281 0 0.431148 -0.816442 -1.555e-07 0.577427 -0.70711 0 0.707104 -0.577428 -2.19866e-07 0.816441 -0.431153 0 0.902279 -0.272466 0 0.962166 -0.105935 -2.67783e-07 0.994373 -0.0105304 -2.69283e-07 0.999945 -0.0784569 0.000416545 0.996917 -0.233442 0.000604113 0.972371 -0.382678 0.000744446 0.923881 -0.522508 0.000838062 0.852634 -0.649443 0.000885556 0.76041 -0.760402 0.000885111 0.649452 -0.852644 0.000837958 0.522492 -0.923876 0.000744023 0.382691 -0.972369 0.000603995 0.233449 -0.996918 0.000416485 0.0784506 -0.999944 -2.85931e-09 0.0106176 -0.994374 -2.85268e-08 0.10593 -0.962162 -7.33782e-08 0.27248 -0.902281 -1.16107e-07 0.431148 -0.816442 -1.555e-07 0.577427 -0.70711 -1.90422e-07 0.707104 -0.577428 -2.19866e-07 0.816441 -0.431153 -2.42982e-07 0.902279 -0.272466 -2.59109e-07 0.962166 -0.105935 -2.67783e-07 0.994373 -0.105935 -2.67629e-07 0.994373 -0.105935 0 0.994373 -0.0106181 0 0.999944 -0.989174 -7.19391e-05 0.146747 -0.970035 7.94326e-05 0.242966 -0.941505 -0.000310976 0.337 -0.904101 0.000609151 0.427318 -0.902281 0.000774086 0.431148 -0.857646 -0.00034954 0.514241 -0.816442 0.000988539 0.577427 -0.70711 -1.90422e-07 0.707104 -0.577428 0 0.816441 -0.431153 -2.42982e-07 0.902279 -0.272466 0 0.962165 -0.105935 -2.67783e-07 0.994373 -0.0107058 0 0.999943 -0.0105304 -2.69283e-07 0.999945 -0.105935 -2.67629e-07 0.994373 -0.105935 0 0.994373 -0.272466 0 0.962166 -0.272466 0 0.962166 -0.272466 -2.58926e-07 0.962166 -0.272466 -2.59109e-07 0.962166 -0.962161 0.00100441 0.272479 -0.902281 -1.16107e-07 0.431148 -0.816442 0 0.577427 -0.70711 -1.90422e-07 0.707104 -0.577428 0 0.816441 -0.431153 -2.42982e-07 0.902279 -0.431153 0 0.902279 -0.80329 0.000229503 0.595587 -0.740868 -0.000174447 0.671651 -0.671444 -0.000182106 0.741055 -0.595859 0.00023738 0.803089 -0.577428 0.00107837 0.816441 -0.513825 -0.000391611 0.857895 -0.428306 0.000925487 0.903633 -0.33656 -0.00037556 0.941662 -0.24308 0.000103614 0.970006 -0.146636 -9.30655e-05 0.989191 -0.105935 0.00139669 0.994372 -0.0487351 -0.000329578 0.998812 -0.0106054 -2.69283e-07 0.999944 -0.105935 -2.67783e-07 0.994373 -0.272466 -2.59109e-07 0.962166 -0.431153 -2.42982e-07 0.902279 -0.577428 -2.19866e-07 0.816441 -0.70711 -1.90786e-07 0.707104 -0.707109 0.00113809 0.707104 -0.431152 0.000772529 0.902279 -0.431153 -2.42982e-07 0.902279 -0.272465 0.00124794 0.962165 -0.272466 -2.59109e-07 0.962166 -0.010571 -1.37966e-07 0.999944 -0.0106932 -2.69283e-07 0.999943 -0.105935 0 0.994373 -0.272466 -2.59109e-07 0.962166 -0.431153 0 0.902279 -0.272466 0 0.962166 -0.272466 0 0.962166 -0.105935 0 0.994373 -0.0106493 -2.69283e-07 0.999943 -0.0104865 0 0.999945 -0.105935 -0.000666642 0.994373 -0.272466 0 0.962166 -0.272466 -2.59109e-07 0.962166 -0.272466 -0.000830905 0.962165 -0.431153 0 0.902279 -0.431153 -2.42982e-07 0.902279 -0.431152 -0.000948538 0.902279 -0.577428 0 0.816441 -0.577428 -2.19866e-07 0.816441 -0.577428 -0.00101824 0.816441 -0.70711 0 0.707104 -0.70711 -1.90422e-07 0.707104 -0.707109 -0.0010419 0.707104 -0.816442 0 0.577427 -0.816442 -1.555e-07 0.577427 -0.816442 -0.00101889 0.577427 -0.902281 0 0.431148 -0.902281 -1.16107e-07 0.431148 -0.902281 -0.00094838 0.431148 -0.962162 0 0.27248 -0.962162 -7.33782e-08 0.27248 -0.962161 -0.000831302 0.272479 -0.994374 0 0.10593 -0.994374 -2.85268e-08 0.10593 -0.994373 -0.000666775 0.10593 -0.999944 0 0.0106186 -0.994374 0 0.10593 -0.994374 -2.85268e-08 0.10593 -0.994374 -2.85268e-08 0.10593 -0.994374 0 0.10593 -0.994374 -2.83908e-08 0.10593 -0.994374 0 0.10593 -0.994374 -2.83908e-08 0.10593 -0.994374 0 0.10593 -0.994374 0 0.10593 -0.994374 -2.83908e-08 0.10593 -0.994374 -2.85268e-08 0.10593 -0.999944 -2.85931e-09 0.0106176 -0.999944 0 0.0106195 -0.994374 0 0.10593 -0.994374 -2.83908e-08 0.10593 -0.994374 0 0.10593 -0.994374 0 0.10593 -0.994374 -2.83908e-08 0.10593 -0.994374 0 0.10593 -0.999944 0 0.0106195 -0.962162 -7.33782e-08 0.272479 -0.962162 0 0.27248 -0.962162 0 0.27248 -0.962162 -7.26803e-08 0.27248 -0.962162 0 0.272479 -0.962162 -7.26802e-08 0.272479 -0.962162 0 0.27248 -0.962162 0 0.272479 -0.962162 -7.26803e-08 0.272479 -0.962162 -7.33782e-08 0.272479 -0.962162 0 0.272479 -0.962162 -7.33782e-08 0.27248 -0.962162 -7.26802e-08 0.272479 -0.962162 0 0.27248 -0.962162 0 0.272479 -0.962162 0 0.27248 -0.902281 0 0.431148 -0.902281 -1.16107e-07 0.431148 -0.902281 -1.16107e-07 0.431148 -0.902281 0 0.431148 -0.902281 -1.15078e-07 0.431148 -0.902281 0 0.431148 -0.902281 0 0.431148 -0.902281 -1.15078e-07 0.431148 -0.902281 -1.16107e-07 0.431148 -0.902281 -1.15078e-07 0.431148 -0.902281 0 0.431148 -0.902281 0 0.431148 -0.902281 0 0.431148 -0.902281 0 0.431148 -0.902281 -1.15078e-07 0.431148 -0.816442 -1.555e-07 0.577427 -0.70711 0 0.707104 -0.577428 -2.19866e-07 0.816441 -0.431153 0 0.902279 -0.431153 -2.42982e-07 0.902279 -0.816442 -1.555e-07 0.577427 -0.816442 0 0.577427 -0.816442 -1.555e-07 0.577427 -0.816442 0 0.577427 -0.816442 0 0.577427 -0.816442 -1.55961e-07 0.577427 -0.816442 0 0.577427 -0.816442 0 0.577427 -0.816442 -1.55961e-07 0.577427 -0.816442 -1.555e-07 0.577427 -0.816442 0 0.577427 -0.816442 -1.555e-07 0.577427 -0.816442 0 0.577427 -0.816442 0 0.577427 -0.70711 0 0.707104 -0.70711 -1.90422e-07 0.707104 -0.70711 0 0.707104 -0.70711 -1.90422e-07 0.707104 -0.70711 -1.90422e-07 0.707104 -0.70711 0 0.707104 -0.70711 0 0.707104 -0.70711 -1.90786e-07 0.707104 -0.70711 -1.90422e-07 0.707104 -0.70711 0 0.707104 -0.70711 0 0.707104 -0.70711 0 0.707104 -0.577428 -2.19866e-07 0.816441 -0.577428 -2.19866e-07 0.816441 -0.577428 0 0.816441 -0.577428 -2.19866e-07 0.816441 -0.577428 0 0.816441 -0.577428 0 0.816441 -0.577428 0 0.816441 -0.577428 -2.19556e-07 0.816441 -0.577428 -2.19866e-07 0.816441 -0.577428 0 0.816441 -0.577428 0 0.816441 -0.577428 0 0.816441 -0.431153 0 0.902279 -0.272466 -2.59109e-07 0.962166 -0.105935 0 0.994373 -0.105935 -2.67783e-07 0.994373 -0.431153 -2.42982e-07 0.902279 -0.431153 0 0.902279 -0.431153 -2.43783e-07 0.902279 -0.431153 0 0.902279 -0.431153 0 0.902279 -0.431153 -2.42982e-07 0.902279 -0.272466 -2.59109e-07 0.962165 -0.272466 -2.58926e-07 0.962165 -0.431153 0 0.902279 -0.272466 0 0.962165 -0.272466 0 0.962166 -0.105935 0 0.994373 -0.105935 0 0.994373 -0.105935 0 0.994373 -0.0107058 0 0.999943 -0.272466 0 0.962166 -0.105935 -2.67783e-07 0.994373 -0.105935 -2.67629e-07 0.994373 -0.105935 -2.67783e-07 0.994373 -0.105935 0 0.994373 -0.0105303 0 0.999945 -0.105935 0 0.994373 0 1.37974e-07 -1 -3.21215e-09 1.38466e-07 -1 -9.09363e-09 1.39411e-07 -1 -1.42706e-08 1.40282e-07 -1 0 1.37777e-07 -1 1.35115e-08 1.32819e-07 -1 8.77937e-09 1.34592e-07 -1 3.17201e-09 1.36736e-07 -1 -3.24456e-08 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 1.86265e-07 -1 0 1.4328e-07 -1 5.41935e-09 1.3586e-07 -1 0 -1 -2.66092e-07 0 -1 -2.66092e-07 1.63211e-08 -1 -1.69331e-07 -4.4883e-08 -1 0 0 -1 -2.66092e-07 0 -1 -2.66092e-07 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.0376819 0.99621 0.0783948 0.0174596 0.99656 0.0810137 0.030901 0.99656 0.0768973 0.0434494 0.99656 0.0705707 0.0547521 0.99656 0.0622113 0.0644787 0.99656 0.0520629 0.072351 0.99656 0.0404153 0.0781404 0.99656 0.0276079 0.0816819 0.99656 0.0140062 0.0783429 0.995556 0.0522555 -0.00369583 0.923874 0.382678 0.0555902 0.915767 0.397844 0.00622025 0.852623 0.52249 0.0639328 0.83715 0.543224 0.115524 0.836258 0.536029 0.204447 0.836257 0.508798 0.287507 0.836255 0.466923 0.362274 0.836256 0.411622 0.426626 0.836254 0.344483 0.478707 0.836257 0.267422 0.517012 0.836258 0.182677 0.540458 0.836253 0.0926588 0.649401 0.760356 -0.0116948 0.543122 0.83698 0.0669493 0.522409 0.852496 -0.0184077 0.397693 0.915413 0.0621203 0.39711 0.915241 0.0680928 0.379889 0.915242 0.134227 0.351737 0.915243 0.196496 0.313474 0.915242 0.253115 0.266187 0.915243 0.302448 0.211244 0.915243 0.343083 0.150229 0.915241 0.373851 0.0848826 0.915242 0.393861 0.0848857 0.915242 0.393862 0.115526 0.836258 0.536029 0.0151691 0.760321 0.64937 0.0714915 0.735645 0.673584 0.142969 0.734505 0.663372 0.253022 0.734503 0.62967 0.355791 0.73451 0.577847 0.448342 0.734507 0.509401 0.527972 0.734507 0.426316 0.59244 0.734501 0.330943 0.639836 0.734504 0.226083 0.668837 0.73451 0.114682 0.760399 0.649429 -0.00599355 0.673584 0.735657 0.0713599 0.0229158 0.649266 0.760216 0.0779879 0.61406 0.785397 0.166487 0.61283 0.772477 0.29463 0.612833 0.733232 0.414319 0.612828 0.67289 0.522074 0.612833 0.59319 0.614821 0.612826 0.496427 0.689857 0.61284 0.385389 0.745075 0.612831 0.263252 0.778849 0.61283 0.133543 0.852639 0.522499 -0.00129554 0.785568 0.61419 0.0751878 0.029304 0.522284 0.852268 0.0832961 0.475753 0.875626 0.18543 0.47463 0.860431 0.328192 0.474619 0.816717 0.46148 0.474629 0.749509 0.581523 0.474625 0.660728 0.684818 0.474628 0.552949 0.76842 0.474619 0.429264 0.829908 0.474624 0.293232 0.867532 0.474619 0.148743 0.923875 0.382688 0.00231873 0.875979 0.475943 0.0783527 0.0342332 0.382454 0.92334 0.0871828 0.324528 0.94185 0.199339 0.323653 0.924939 0.352793 0.323656 0.877943 0.496087 0.323648 0.805699 0.62511 0.323657 0.71027 0.736155 0.323662 0.594406 0.826028 0.323655 0.461438 0.892131 0.323647 0.315208 0.932561 0.323664 0.159912 0.972361 0.233434 0.00473761 0.942365 0.324711 0.0806916 0.0375599 0.233278 0.971685 0.0895689 0.164485 0.982304 0.207827 0.164013 0.964317 0.367808 0.164018 0.915323 0.517206 0.164016 0.839998 0.65173 0.164009 0.740506 0.767498 0.164011 0.619716 0.861196 0.164012 0.481085 0.930104 0.164022 0.328637 0.972267 0.164019 0.166717 0.996899 0.0784614 0.00596703 0.982938 0.164596 0.0821091 0.0392523 0.0783965 0.996149 0.0784427 0.0184067 0.996749 0.233361 0.0268829 0.972019 0.382416 0.0373009 0.923237 0.521961 0.045321 0.851765 0.523665 0.049442 0.850488 0.649082 0.0336182 0.759976 0.659478 0.0601933 0.74931 0.760186 0.0238527 0.649267 0.852535 0.0160295 0.522425 0.923829 0.0101673 0.382671 0.972353 0.00623922 0.233433 0.996908 0.0042678 0.0784616 0.982169 0.0835524 0.168415 0.210217 0.066253 0.975407 0.372255 0.0568209 0.92639 0.77618 0.0689762 0.626727 0.870507 0.07579 0.486286 0.939802 0.0806617 0.332064 0.382555 0.923571 -0.0258355 0.24135 0.968752 0.05719 0.242785 0.969187 0.041631 0.232259 0.969186 0.0820598 0.215054 0.969186 0.120129 0.191655 0.969186 0.15475 0.162743 0.969186 0.184915 0.129147 0.969186 0.20976 0.0918489 0.969186 0.228566 0.0518962 0.969186 0.240802 -0.0144683 0.972267 0.233425 0.0467598 0.969278 0.241483 0.233316 0.971806 -0.0340046 0.207826 0.164015 0.964317 0.199332 0.323659 0.924939 0.185443 0.47462 0.860434 0.166483 0.612833 0.772476 0.142969 0.734505 0.663372 0.0518961 0.969186 0.240802 0.367813 0.164012 0.915322 0.352795 0.323653 0.877943 0.328176 0.474631 0.816717 0.294636 0.612829 0.733232 0.253017 0.734505 0.62967 0.150219 0.915243 0.373851 0.204443 0.836259 0.508797 0.0918473 0.969186 0.228566 0.517206 0.164017 0.839998 0.496077 0.323656 0.805701 0.461494 0.474618 0.749507 0.414311 0.612833 0.672891 0.355807 0.734504 0.577846 0.211246 0.915243 0.343083 0.287499 0.836257 0.466924 0.129153 0.969186 0.209758 0.651724 0.164016 0.74051 0.625121 0.323647 0.710265 0.581518 0.474629 0.66073 0.522084 0.612827 0.593187 0.448332 0.734511 0.509403 0.266194 0.915242 0.302446 0.362282 0.836254 0.41162 0.162745 0.969186 0.184914 0.767499 0.164009 0.619714 0.736159 0.323658 0.594403 0.684822 0.474625 0.552947 0.614808 0.612834 0.496433 0.527972 0.734507 0.426316 0.313466 0.915243 0.253119 0.426618 0.836257 0.344488 0.191654 0.969186 0.154751 0.861196 0.164011 0.481084 0.826023 0.323662 0.461443 0.76841 0.474628 0.429272 0.689879 0.612825 0.385374 0.592428 0.734507 0.330951 0.351745 0.915242 0.196489 0.478714 0.836254 0.267417 0.215048 0.969186 0.120135 0.93011 0.16401 0.328627 0.892125 0.323655 0.315215 0.829912 0.474619 0.293227 0.745062 0.612841 0.263265 0.639842 0.734501 0.226077 0.379891 0.915241 0.134224 0.517016 0.836257 0.182673 0.232257 0.969186 0.0820636 0.972266 0.164022 0.16672 0.93257 0.323646 0.159894 0.867529 0.474624 0.148749 0.778848 0.612831 0.133545 0.668846 0.734504 0.11467 0.397109 0.915242 0.068096 0.540446 0.836259 0.0926754 0.242785 0.969186 0.0416305 0.010589 -1.86254e-07 0.999944 0.105936 -0.000694227 0.994373 0.272466 -0.000865305 0.962165 0.431148 -0.000987478 0.902281 0.577428 -0.00106053 0.816441 0.707109 -0.00108484 0.707104 0.816437 -0.00106069 0.577434 0.902281 -0.000986964 0.431148 0.962164 -0.000865176 0.272469 0.994373 -0.000693701 0.10593 0.999944 -1.9778e-09 0.0106182 0.996917 0.000433727 0.0784624 0.233445 -0.00124472 0.972369 0.272465 0.00171167 0.962164 0.382682 -0.00153398 0.923879 0.431147 0.00195344 0.902279 0.522497 -0.00172726 0.852639 0.649448 -0.00182453 0.760404 0.760401 -0.00182394 0.649451 0.852643 -0.00172767 0.522491 0.923875 -0.00153414 0.38269 0.972371 -0.00124538 0.233438 0.996917 -0.00085827 0.0784623 0.994373 0.00137258 0.10593 0.577427 0.00209804 0.81644 0.707108 0.00214621 0.707102 0.816435 0.00209849 0.577433 0.902279 0.00195269 0.431147 0.962163 0.00171179 0.272469 0.972372 0.000629318 0.233438 0.923876 0.000775208 0.382691 0.852644 0.000872975 0.522492 0.760402 0.000921593 0.649452 0.649448 0.00092186 0.760405 0.522498 0.000872676 0.85264 0.382683 0.000774979 0.92388 0.233445 0.00062878 0.97237 0.078456 0.000433708 0.996918 0.078456 -0.000858743 0.996917 0.105936 0.00137313 0.994372 0.0783429 0.995556 -0.052259 0.081682 0.99656 -0.0140063 0.0781408 0.99656 -0.027607 0.072351 0.99656 -0.0404153 0.0644787 0.99656 -0.0520629 0.0547521 0.99656 -0.0622113 0.0434494 0.99656 -0.0705707 0.0308987 0.99656 -0.0768985 0.0174623 0.99656 -0.0810133 0.0376855 0.99621 -0.0783944 0.382555 0.923571 0.0258355 0.397693 0.915413 -0.0621203 0.522409 0.852496 0.0184078 0.543122 0.836981 -0.0669474 0.54045 0.836258 -0.0926592 0.517014 0.836257 -0.182678 0.478709 0.836256 -0.267421 0.426621 0.836257 -0.344483 0.362277 0.836256 -0.411621 0.287502 0.836257 -0.466922 0.204448 0.836257 -0.508798 0.115524 0.836259 -0.536027 0.0151706 0.760323 -0.649368 0.0639339 0.837152 -0.543221 0.00622824 0.852623 -0.52249 0.0555957 0.915766 -0.397846 0.0848825 0.915242 -0.393861 0.150227 0.915243 -0.373847 0.211248 0.915244 -0.34308 0.266186 0.915241 -0.302454 0.313472 0.915243 -0.253113 0.351741 0.915242 -0.196494 0.37989 0.915241 -0.134229 0.397109 0.915242 -0.0680926 0.39711 0.915241 -0.0680961 0.540455 0.836253 -0.0926769 0.649401 0.760356 0.0116943 0.673585 0.735657 -0.0713602 0.668843 0.734505 -0.114682 0.639839 0.734501 -0.226085 0.592436 0.734505 -0.330941 0.527971 0.734506 -0.426319 0.448343 0.734509 -0.509397 0.355792 0.734502 -0.577857 0.253022 0.734503 -0.629671 0.142974 0.734505 -0.663372 0.0229215 0.649263 -0.760219 0.0714984 0.735645 -0.673584 0.760399 0.649429 0.00599364 0.785568 0.61419 -0.0751874 0.778848 0.612831 -0.133545 0.745068 0.612841 -0.263248 0.689867 0.612826 -0.385394 0.614815 0.612833 -0.496425 0.522078 0.612825 -0.593194 0.41432 0.612835 -0.672884 0.29463 0.612833 -0.733232 0.166482 0.612829 -0.772479 0.0293115 0.522284 -0.852268 0.0779931 0.614056 -0.785399 0.852639 0.522499 0.00129552 0.875979 0.475943 -0.0783524 0.867529 0.474625 -0.148743 0.82991 0.474621 -0.293231 0.768414 0.474628 -0.429264 0.684818 0.474628 -0.552949 0.581518 0.474629 -0.660729 0.461489 0.474622 -0.749507 0.328181 0.474626 -0.816717 0.185436 0.474624 -0.860433 0.0342358 0.382459 -0.923338 0.0832984 0.475758 -0.875623 0.923875 0.382688 -0.00231873 0.942365 0.324711 -0.0806916 0.932567 0.323648 -0.159912 0.892128 0.323655 -0.315207 0.826024 0.323659 -0.461442 0.736157 0.323654 -0.594407 0.625113 0.323647 -0.710272 0.49609 0.323652 -0.805695 0.352792 0.323658 -0.877942 0.199334 0.323654 -0.92494 0.0375716 0.233272 -0.971685 0.0871945 0.324523 -0.94185 0.972361 0.233434 -0.00473761 0.982938 0.164596 -0.0821091 0.972267 0.164021 -0.166717 0.930106 0.164006 -0.32864 0.861196 0.164011 -0.481085 0.7675 0.164013 -0.619713 0.651726 0.164016 -0.740508 0.517206 0.164012 -0.839999 0.367817 0.164017 -0.915319 0.207817 0.164015 -0.964319 0.0392596 0.0783964 -0.996149 0.0895762 0.164486 -0.982304 0.996899 0.0784614 -0.00596703 0.996908 0.00426828 -0.0784612 0.972352 0.00624022 -0.233435 0.923829 0.0101716 -0.382671 0.852535 0.0160354 -0.522425 0.760189 0.0238591 -0.649264 0.649078 0.0336272 -0.759978 0.521961 0.0453306 -0.851764 0.382421 0.0373115 -0.923234 0.372264 0.056823 -0.926386 0.233355 0.0268817 -0.97202 0.210207 0.0662601 -0.975409 0.0784427 0.0184179 -0.996749 0.982169 0.0835543 -0.168415 0.939801 0.080663 -0.332065 0.870506 0.0757954 -0.486286 0.776182 0.0689818 -0.626724 0.659475 0.0602027 -0.749312 0.523665 0.0494516 -0.850488 -0.00369583 0.923874 -0.382678 0.0467598 0.969278 -0.241483 0.0519037 0.969186 -0.240798 0.0918418 0.969186 -0.228569 0.129148 0.969185 -0.209763 0.162744 0.969186 -0.184915 0.191654 0.969186 -0.15475 0.215052 0.969186 -0.120128 0.232258 0.969187 -0.0820565 0.242784 0.969187 -0.041631 0.233316 0.971806 0.0340069 0.24135 0.968752 -0.0571902 -0.0144583 0.972267 -0.233426 0.972267 0.164019 -0.166719 0.932564 0.323664 -0.159893 0.867531 0.474618 -0.148751 0.778848 0.612831 -0.133544 0.66884 0.734509 -0.114671 0.242784 0.969187 -0.0416304 0.930109 0.16402 -0.328626 0.892128 0.323648 -0.315214 0.82991 0.474626 -0.293224 0.745071 0.612829 -0.263268 0.639838 0.734505 -0.226076 0.37989 0.915242 -0.134224 0.517013 0.836258 -0.182672 0.232257 0.969186 -0.0820652 0.861194 0.164006 -0.481089 0.826023 0.323655 -0.461447 0.768413 0.474621 -0.429274 0.689868 0.612841 -0.385367 0.592435 0.734501 -0.330951 0.351742 0.915243 -0.19649 0.478709 0.836257 -0.267417 0.21505 0.969186 -0.120134 0.767499 0.164011 -0.619714 0.736159 0.323659 -0.594403 0.684818 0.474628 -0.552949 0.614811 0.612825 -0.49644 0.527971 0.734505 -0.42632 0.31347 0.915242 -0.253119 0.426621 0.836256 -0.344485 0.191654 0.969186 -0.154751 0.651725 0.164013 -0.74051 0.625116 0.323654 -0.710266 0.581518 0.474629 -0.66073 0.522084 0.612834 -0.59318 0.44834 0.734505 -0.509405 0.266192 0.915243 -0.302443 0.362279 0.836257 -0.411617 0.162743 0.969186 -0.184917 0.51721 0.164016 -0.839996 0.496087 0.323648 -0.805698 0.461495 0.47463 -0.749499 0.41431 0.612825 -0.672899 0.3558 0.734509 -0.577844 0.211246 0.915243 -0.343082 0.2875 0.836256 -0.466925 0.129155 0.969186 -0.209756 0.367813 0.164013 -0.915322 0.352786 0.323651 -0.877948 0.328178 0.474623 -0.816721 0.294634 0.612837 -0.733227 0.253019 0.734501 -0.629675 0.150224 0.915242 -0.373851 0.204449 0.836257 -0.508797 0.0918469 0.969186 -0.228566 0.20782 0.164019 -0.964318 0.199337 0.323658 -0.924938 0.185437 0.474626 -0.860432 0.166488 0.612834 -0.772474 0.14297 0.734502 -0.663375 0.0848769 0.915241 -0.393864 0.115514 0.836256 -0.536034 0.0518962 0.969186 -0.240802 0.999944 1.97775e-09 -0.0106179 0.994373 -0.000693695 -0.105931 0.962164 -0.000865099 -0.272469 0.902282 -0.000986765 -0.431146 0.816437 -0.00106036 -0.577434 0.707109 -0.00108464 -0.707104 0.577432 -0.00105997 -0.816438 0.431148 -0.000986719 -0.902281 0.272466 -0.000864766 -0.962165 0.105931 -0.000693446 -0.994373 0.0105595 1.86254e-07 -0.999944 0.078456 0.000434267 -0.996918 0.972371 -0.00124515 -0.233439 0.962163 0.00171194 -0.272469 0.923875 -0.00153392 -0.38269 0.902281 0.00195277 -0.431145 0.852643 -0.00172767 -0.522491 0.760404 -0.00182377 -0.649448 0.649444 -0.00182434 -0.760407 0.522497 -0.00172773 -0.852639 0.382687 -0.00153414 -0.923877 0.233439 -0.00124515 -0.972371 0.078456 -0.000858742 -0.996917 0.105931 0.00137269 -0.994373 0.816435 0.00209849 -0.577433 0.707108 0.0021466 -0.707102 0.577431 0.00209785 -0.816437 0.431147 0.00195294 -0.902279 0.272465 0.00171167 -0.962164 0.233439 0.000629539 -0.972371 0.382688 0.000775574 -0.923877 0.522498 0.000873393 -0.85264 0.649445 0.000922188 -0.760408 0.760405 0.000921874 -0.649449 0.852644 0.000873267 -0.522492 0.923876 0.000775316 -0.382691 0.972371 0.000629331 -0.233439 0.996917 0.000433768 -0.0784619 0.996917 -0.000858264 -0.0784619 0.994373 0.00137269 -0.105931 -0.000898583 0.0784569 -0.996917 0.00143613 0.10593 -0.994373 -0.00130338 0.233437 -0.972371 0.00179074 0.272465 -0.962164 0 0.431153 -0.902279 0 0.577432 -0.816439 0 0.70711 -0.707104 0 0.816442 -0.577427 0 0.902278 -0.431155 0 0.962165 -0.272469 0 0.994374 -0.105931 0.000425212 0.996918 -0.0784501 -0.000680741 0.994373 -0.105931 0.000616574 0.972369 -0.233451 -0.000848302 0.962164 -0.272469 0 0.902278 -0.431155 0 0.816442 -0.577427 0 0.70711 -0.707104 0 0.577432 -0.816439 0 0.431153 -0.902279 0 0.272466 -0.962166 0 0.10593 -0.994374 0 0.272466 -0.962166 0 0.272466 -0.962166 -0.0016056 0.382683 -0.923878 0.00204385 0.431152 -0.902277 0 0.577432 -0.816439 0 0.70711 -0.707104 0 0.816442 -0.577427 0 0.902278 -0.431155 0 0.962165 -0.272469 0 0.994374 -0.105931 0 0.999944 -0.0106181 0 0.994374 -0.105931 -0.00180772 0.522508 -0.852633 0.00219448 0.57743 -0.816437 0 0.70711 -0.707104 0 0.816442 -0.577427 0 0.902278 -0.431155 0 0.962165 -0.272469 0 0.994374 -0.105931 0 0.999944 -0.0106181 0 0.994374 -0.105931 -0.00190901 0.649432 -0.760417 0.00224698 0.707108 -0.707102 0 0.816442 -0.577427 0 0.902278 -0.431155 0 0.962165 -0.272469 0 0.994374 -0.105931 0 0.999944 -0.0106181 0 0.994374 -0.105931 -0.00190795 0.76041 -0.649441 0.00219622 0.81644 -0.577426 0 0.902278 -0.431155 0 0.962164 -0.272469 0 0.994374 -0.105931 0 0.999944 -0.0106181 0 0.999944 -0.0106181 -0.00180619 0.852638 -0.5225 0.00204507 0.902276 -0.431154 0 0.962165 -0.272469 0 0.994374 -0.105931 0 0.999944 -0.0106181 0 0.994374 -0.105931 -0.00160475 0.92388 -0.38268 0.00179134 0.962163 -0.272469 0 0.994374 -0.105931 0 0.999944 -0.0106181 0 0.994374 -0.105931 -0.00130201 0.972368 -0.23345 0.00143751 0.994372 -0.105931 0 0.999944 -0.0106181 0 0.994374 -0.105931 -0.000897914 0.996918 -0.0784501 0.000759938 0.92388 -0.382681 -0.000968457 0.902278 -0.431155 0 0.816442 -0.577427 0 0.70711 -0.707104 0 0.577432 -0.816439 0 0.431153 -0.902279 0 0.272466 -0.962166 0 0.10593 -0.994374 0 0.0105742 -0.999944 0 0.10593 -0.994374 0.000855331 0.852639 -0.5225 -0.00104003 0.816442 -0.577427 0 0.70711 -0.707104 0 0.577432 -0.816439 0 0.431153 -0.902279 0 0.272466 -0.962166 0 0.10593 -0.994374 0 0.0105742 -0.999944 0 0.10593 -0.994374 0.000903523 0.760411 -0.649442 -0.00106407 0.707109 -0.707104 0 0.577432 -0.816439 0 0.431153 -0.902279 0 0.272466 -0.962166 0 0.10593 -0.994374 0 0.0105742 -0.999944 0 0.10593 -0.994374 0.000904022 0.649433 -0.760418 -0.00103921 0.577432 -0.816438 0 0.431153 -0.902279 0 0.272466 -0.962166 0 0.10593 -0.994374 0 0.0105742 -0.999944 0 0.10593 -0.994374 0.000856054 0.522508 -0.852634 -0.000967876 0.431152 -0.902279 0 0.272466 -0.962166 0 0.10593 -0.994374 0 0.0105742 -0.999944 0 0.0105742 -0.999944 0.000760341 0.382683 -0.923879 -0.000848014 0.272466 -0.962165 0 0.10593 -0.994374 0 0.0105742 -0.999944 0 0.10593 -0.994374 0.000617222 0.233437 -0.972372 -0.000680089 0.10593 -0.994373 0 0.0105742 -0.999944 0 0.10593 -0.994374 0.000425529 0.0784569 -0.996917 0 0.10593 -0.994374 0 0.272466 -0.962166 0 0.272466 -0.962166 0 0.272466 -0.962166 0 0.272466 -0.962166 0 0.272466 -0.962166 0 0.272466 -0.962166 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.431153 -0.902279 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.577432 -0.816439 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.70711 -0.707104 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.816442 -0.577427 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.902278 -0.431155 0 0.962165 -0.272469 0 0.962164 -0.272469 0 0.962165 -0.272469 0 0.962165 -0.272469 0 0.962165 -0.272469 0 0.962165 -0.272469 0 0.962165 -0.272469 0 0.962165 -0.272469 0 0.994374 -0.105931 0.0784501 0.996918 0.000393344 0.105931 0.994373 -0.000628407 0.233451 0.972369 0.000570124 0.272471 0.962164 -0.000783277 0.431153 0.902279 0 0.577425 0.816444 0 0.707107 0.707107 0 0.816439 0.577432 0 0.852639 0.5225 -0.000790665 0.816439 0.577432 0.000960587 0.760412 0.64944 -0.000834847 0.707106 0.707106 0.000983148 0.577425 0.816444 0 0.431153 0.902279 0 0.272471 0.962164 0 0.105931 0.994374 0 0.272471 0.962164 0 0.272471 0.962164 0 0.382683 0.92388 0.00070248 0.431153 0.902279 -0.000894192 0.577425 0.816444 0 0.707107 0.707107 0 0.816439 0.577432 0 0.902281 0.431148 0 0.923877 0.382689 -0.000702092 0.902281 0.431148 0.000894256 0.522498 0.85264 0.000790725 0.577424 0.816444 -0.000960349 0.707107 0.707107 0 0.816439 0.577432 0 0.902281 0.431148 0 0.962164 0.272471 0 0.972372 0.233437 -0.000570111 0.962164 0.272471 0.000783765 0.649445 0.760408 0.000835359 0.707106 0.707106 -0.000982429 0.816439 0.577432 0 0.902281 0.431148 0 0.962164 0.272471 0 0.994374 0.10593 0 0.996917 0.0784628 -0.000392872 0.994373 0.10593 0.000628364 0.760412 0.64944 0.000835507 0.816439 0.577432 -0.00096 0.902281 0.431148 0 0.962164 0.272471 0 0.994374 0.10593 0 0.999944 0.010619 0 0.994374 0.10593 0 0.852639 0.5225 0.000791196 0.902281 0.431148 -0.000893817 0.962164 0.272471 0 0.994374 0.10593 0 0.999944 0.010619 0 0.994374 0.10593 0 0.923877 0.382689 0.000702481 0.962164 0.272471 -0.000783488 0.994374 0.10593 0 0.999944 0.0106172 0 0.999944 0.0106181 1.79798e-09 0.972372 0.233437 0.000570348 0.994373 0.10593 -0.000628256 0.999944 0.0106172 0 0.994374 0.10593 0 0.996917 0.0784628 0.000392951 0.649445 0.760408 -0.000834587 0.577424 0.816444 0.000961178 0.431153 0.902279 0 0.272471 0.962164 0 0.105931 0.994374 0 0.0105304 0.999945 0 0.105931 0.994374 0 0.522498 0.85264 -0.00078986 0.431153 0.902279 0.000895109 0.272471 0.962164 0 0.105931 0.994374 0 0.0105304 0.999945 0 0.105931 0.994374 0 0.382683 0.92388 -0.000701542 0.272471 0.962164 0.000784254 0.105931 0.994374 0 0.0107058 0.999943 0 0.0106181 0.999944 1.69322e-07 0.233451 0.972369 -0.000569136 0.105931 0.994373 0.000629417 0.0107058 0.999943 0 0.105931 0.994374 0 0.0784501 0.996918 -0.000392331 0.105931 0.994374 0 0.272471 0.962164 0 0.272471 0.962164 0 0.272471 0.962164 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.577425 0.816444 0 0.577425 0.816444 0 0.577425 0.816444 0 0.577425 0.816444 0 0.577425 0.816444 0 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 0 0.816439 0.577432 0 0.816439 0.577432 0 0.816439 0.577432 0 0.816439 0.577432 0 0.902281 0.431148 0 0.902281 0.431148 0 0.902281 0.431148 0 0.902281 0.431148 0 0.962164 0.272471 0 0.962164 0.272471 0 0.962164 0.272471 0 0.962164 0.272471 0 0.994374 0.10593 0 -0.000401451 0.996918 0.0784506 0.000642648 0.994373 0.10593 -0.000582224 0.972369 0.233449 0.000800823 0.962164 0.272469 0 0.902277 0.431157 0 0.816442 0.577427 0 0.70711 0.707104 0 0.577428 0.816441 0 0.431153 0.902279 0 0.272466 0.962166 0.00058253 0.233442 0.972371 -0.000800622 0.272466 0.962165 0.000717778 0.382678 0.923881 -0.000914019 0.431152 0.902279 0 0.577428 0.816441 0 0.70711 0.707104 0 0.816442 0.577427 0 0.902277 0.431157 0 0.962164 0.272469 0 0.994374 0.10593 0 0.962165 0.272469 0 0.962165 0.272469 -0.000717567 0.92388 0.382681 0.000914293 0.902277 0.431157 0 0.816442 0.577427 0 0.70711 0.707104 0 0.577428 0.816441 0 0.431153 0.902279 0 0.272466 0.962166 0 0.105935 0.994373 0.000401748 0.0784569 0.996917 -0.000642288 0.105935 0.994373 -0.00080753 0.852639 0.5225 0.000981909 0.816442 0.577427 0 0.70711 0.707104 0 0.577428 0.816441 0 0.431153 0.902279 0 0.272466 0.962166 0 0.105935 0.994373 0 0.0105742 0.999944 0 0.105935 0.994373 -0.000853106 0.760408 0.649445 0.00100442 0.707109 0.707104 0 0.577428 0.816441 0 0.431153 0.902279 0 0.272466 0.962166 0 0.105935 0.994373 0 0.0105742 0.999944 0 0.105935 0.994373 -0.00085359 0.649437 0.760415 0.000981221 0.577428 0.816441 0 0.431153 0.902279 0 0.272466 0.962166 0 0.105935 0.994373 0 0.0105742 0.999944 0 0.105935 0.994373 -0.000807992 0.522508 0.852634 0.000914019 0.431152 0.902279 0 0.272466 0.962166 0 0.105935 0.994373 0 0.0105742 0.999944 0 0.0105742 0.999944 -0.000717778 0.382678 0.923881 0.000800622 0.272466 0.962165 0 0.105935 0.994373 0 0.0105742 0.999944 0 0.105935 0.994373 -0.00058253 0.233442 0.972371 0.000642288 0.105935 0.994373 0 0.0105742 0.999944 0 0.105935 0.994373 -0.000401748 0.0784569 0.996917 0.000807991 0.522508 0.852634 -0.00098122 0.577428 0.816441 0 0.70711 0.707104 0 0.816442 0.577427 0 0.902277 0.431157 0 0.962165 0.272469 0 0.994374 0.10593 0 0.999944 0.0106181 0 0.994374 0.10593 0.000853589 0.649437 0.760415 -0.00100442 0.707109 0.707104 0 0.816442 0.577427 0 0.902277 0.431157 0 0.962165 0.272469 0 0.994374 0.10593 0 0.999944 0.0106181 0 0.994374 0.10593 0.000853106 0.760408 0.649445 -0.000981909 0.816442 0.577427 0 0.902277 0.431157 0 0.962164 0.272469 0 0.994374 0.10593 0 0.999944 0.0106181 0 0.994374 0.10593 0.000807531 0.852639 0.5225 -0.000914293 0.902277 0.431157 0 0.962165 0.272469 0 0.994374 0.10593 0 0.999944 0.0106181 0 0.999944 0.0106181 0.000717567 0.92388 0.382681 -0.000800822 0.962164 0.272469 0 0.994374 0.10593 0 0.999944 0.0106181 0 0.994374 0.10593 0.000582225 0.972369 0.233449 -0.000642647 0.994373 0.10593 0 0.999944 0.0106181 0 0.994374 0.10593 0.000401451 0.996918 0.0784506 0 0.994374 0.10593 0 0.962164 0.272469 0 0.962165 0.272469 0 0.962165 0.272469 0 0.962164 0.272469 0 0.962165 0.272469 0 0.902277 0.431157 0 0.902277 0.431157 0 0.902277 0.431157 0 0.902277 0.431157 0 0.902277 0.431157 0 0.902277 0.431157 0 0.902277 0.431157 0 0.816442 0.577427 0 0.816442 0.577427 0 0.816442 0.577427 0 0.816442 0.577427 0 0.816442 0.577427 0 0.816442 0.577427 0 0.816442 0.577427 0 0.70711 0.707104 0 0.70711 0.707104 0 0.70711 0.707104 0 0.70711 0.707104 0 0.70711 0.707104 0 0.70711 0.707104 0 0.70711 0.707104 0 0.577428 0.816441 0 0.577428 0.816441 0 0.577428 0.816441 0 0.577428 0.816441 0 0.577428 0.816441 0 0.577428 0.816441 0 0.577428 0.816441 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.431153 0.902279 0 0.272466 0.962166 0 0.272466 0.962166 0 0.272466 0.962166 0 0.272466 0.962166 0 0.272466 0.962166 0 0.272466 0.962166 0 0.105935 0.994373 0.0320746 0.996405 -0.0784097 0.0513611 0.958937 -0.278931 0.095595 0.908708 -0.406338 0.0803662 0.858321 -0.50678 0.0771915 0.816612 -0.572002 0.0734318 0.77005 -0.633744 0.0691226 0.718856 -0.691714 0.0642923 0.663403 -0.745495 0.0589825 0.603935 -0.794848 0.0532145 0.540881 -0.839414 0.0470405 0.474579 -0.878955 0.0410743 0.389998 -0.919899 0.0305456 0.297792 -0.954142 0.0222482 0.214314 -0.976511 0.0136795 0.129205 -0.991524 0.00483183 0.0421439 -0.9991 0.00414709 0.0407661 -0.99916 0.00559827 0.0386772 -0.999236 0.00504208 0.0377047 -0.999276 0.00613305 0.0351645 -0.999363 0.0192808 0.0996698 -0.994834 0.0318725 0.147622 -0.98853 0.0427022 0.181454 -0.982472 0.0528535 0.207924 -0.976716 0.057093 0.217947 -0.974289 0.0562957 0.208246 -0.976455 0.0513524 0.187563 -0.980909 0.0416136 0.156047 -0.986873 0.0271588 0.106006 -0.993995 0.0117262 0.0591186 -0.998182 0.0410349 0.966622 -0.252901 0.00765514 0.94357 -0.331086 0.031148 0.923432 -0.382495 0.0155883 0.896616 -0.442534 0.0247292 0.856646 -0.515311 0.0284479 0.852294 -0.522289 0.0135374 0.811326 -0.584438 0.0436493 0.772322 -0.63373 0.0504358 0.80985 -0.584464 0.046926 0.818918 -0.571988 0.0525793 0.85542 -0.51526 0.0500357 0.860629 -0.506768 0.0427672 0.912729 -0.406321 0.0688537 0.941083 -0.331093 0.0252014 0.760169 -0.649236 0.0248714 0.759752 -0.649737 0.0402337 0.721062 -0.691701 0.0478431 0.758719 -0.649659 0.0113358 0.703797 -0.710311 0.036668 0.665515 -0.745483 0.044799 0.702443 -0.710329 0.0220633 0.522381 -0.852427 0.0144731 0.50703 -0.861807 0.00503413 0.418012 -0.908428 0.0190354 0.382614 -0.923712 0.00282684 0.319732 -0.947504 0.0153551 0.29896 -0.954142 0.0221117 0.318945 -0.947515 0.0191927 0.391681 -0.919901 0.0301268 0.416979 -0.908417 0.02542 0.476237 -0.878949 0.0292526 0.542718 -0.839406 0.0330129 0.605923 -0.794838 0.0413581 0.641252 -0.766215 0.0196719 0.642199 -0.766285 0.0239913 0.649247 -0.760199 0.0130523 0.233417 -0.972289 0.011348 0.229981 -0.973129 0.0110593 0.215174 -0.976513 0.0162028 0.229777 -0.973108 -0.000522009 0.139084 -0.990281 0.00678328 0.129735 -0.991525 0.0100611 0.138651 -0.99029 0.0126314 0.0784507 -0.996838 0.00108439 0.0467572 -0.998906 0.0028566 0.0437932 -0.999037 0.00370334 0.0457658 -0.998945 0.00612976 0.0351577 -0.999363 0.0060844 0.0295124 -0.999546 0.0193734 0.0887109 -0.995869 0.0308288 0.129941 -0.991042 0.0398561 0.157449 -0.986722 0.047468 0.176918 -0.98308 0.0491326 0.180377 -0.98237 0.0456892 0.165342 -0.985177 0.0377009 0.139481 -0.989507 0.0249607 0.0956961 -0.995098 0.0111156 0.0535354 -0.998504 0.00659133 0.0290317 -0.999557 0.0187357 0.07809 -0.99677 0.0287581 0.112669 -0.993216 0.0358142 0.133839 -0.990356 0.0407971 0.146255 -0.988405 0.0399121 0.143129 -0.988899 0.0336606 0.122861 -0.991853 0.0226953 0.0855225 -0.996078 0.010486 0.0485431 -0.998766 0.00508505 0.0201775 -0.999784 0.00483861 0.0169152 -0.999845 0.00451256 0.0163232 -0.999857 0.00344174 0.011148 -0.999932 0.0110083 0.037512 -0.999236 0.0135427 0.0463077 -0.998835 0.0113891 0.0396292 -0.99915 0.00677907 0.0266789 -0.999621 0.00105951 0.00357597 -0.999993 0.0662328 0.253293 -0.96512 0.0627807 0.306623 -0.949758 0.0867964 0.270914 -0.958682 0.103176 0.42458 -0.899492 0.107347 0.408136 -0.906588 0.11118 0.407424 -0.906446 0.115167 0.442769 -0.889208 0.113854 0.480304 -0.869681 0.107548 0.507679 -0.854808 0.0965499 0.525172 -0.845501 0.0812123 0.532853 -0.842302 0.0631355 0.530303 -0.845454 0.044728 0.508008 -0.86019 0.0646435 0.50585 -0.860196 0.0559958 0.47301 -0.879276 0.0569484 0.442974 -0.894724 0.0389196 0.444928 -0.89472 0.139207 0.485244 -0.863227 0.119465 0.565061 -0.816354 0.123287 0.487495 -0.864378 0.122848 0.514851 -0.848432 0.116607 0.546835 -0.82908 0.105337 0.569219 -0.81541 0.089352 0.582055 -0.808226 0.0700826 0.585226 -0.807836 0.0503995 0.568394 -0.821212 0.0719065 0.566065 -0.821219 0.13842 0.550032 -0.823593 0.130533 0.564882 -0.814782 0.125525 0.584842 -0.801376 0.113957 0.61156 -0.782948 0.0973019 0.629173 -0.771151 0.0768213 0.637509 -0.766604 0.0559079 0.625867 -0.777923 0.078701 0.6234 -0.777932 0.175333 0.708966 -0.6831 0.125725 0.796106 -0.591954 0.136583 0.789978 -0.597729 0.157628 0.842475 -0.515159 0.104764 0.887176 -0.44938 0.0944019 0.833821 -0.543903 0.0758239 0.82026 -0.566943 0.100447 0.817603 -0.566953 0.0954718 0.776236 -0.623172 0.0958815 0.774815 -0.624875 0.0712156 0.777479 -0.624864 0.107432 0.887433 -0.448241 0.0718236 0.875816 -0.477271 0.116321 0.871015 -0.477287 0.00240386 0.00836822 -0.999962 0.00511557 0.0171016 -0.999841 0.00825566 0.0277461 -0.999581 0.00786344 0.0278602 -0.999581 0.00345325 0.0112879 -0.99993 0.00328144 0.010843 -0.999936 0.00500441 0.0171347 -0.999841 0.00525624 0.0197549 -0.999791 0.00618896 0.0194751 -0.999791 0.00826496 0.0282008 -0.999568 0.00836345 0.0281715 -0.999568 0.00790867 0.03248 -0.999441 0.0178829 0.0651002 -0.997719 0.029491 0.106206 -0.993907 0.0312888 0.105683 -0.993908 0.0331381 0.116001 -0.992696 0.0319492 0.116336 -0.992696 0.0308232 0.110545 -0.993393 0.0222413 0.0790154 -0.996625 0.0135035 0.0474011 -0.998785 0.0102465 0.0377271 -0.999236 0.00975164 0.043409 -0.99901 0.0150473 0.0418194 -0.999012 0.020309 0.0751474 -0.996966 0.0240252 0.0740423 -0.996966 0.012196 0.0641836 -0.997864 0.031264 0.126501 -0.991474 0.0525998 0.205624 -0.977216 0.0726376 0.2753 -0.95861 0.089633 0.344546 -0.934481 0.10385 0.412885 -0.904843 0.114652 0.480113 -0.869682 0.106482 0.41221 -0.904845 0.106809 0.444368 -0.889454 0.104774 0.444855 -0.889453 0.113304 0.50642 -0.854811 0.0130816 0.0773863 -0.996915 0.0364938 0.151663 -0.987758 0.0620776 0.24573 -0.967348 0.0867672 0.327716 -0.940784 0.107339 0.408462 -0.906442 0.0950565 0.325381 -0.940793 0.0957763 0.371077 -0.92365 0.103233 0.36906 -0.923654 0.110856 0.443874 -0.889206 0.00247585 0.00827217 -0.999963 0.00249498 0.00834083 -0.999962 0.124407 0.487209 -0.864379 0.0157268 0.0894237 -0.99587 0.122742 0.930868 -0.344121 0.0534832 0.958963 -0.278443 0.0324554 0.971856 -0.233328 0.0333578 0.506226 -0.861755 0.0375344 0.57573 -0.816778 0.00905654 0.576892 -0.81677 0.0542314 0.895095 -0.442565 0.0612323 0.680023 -0.73063 0.0849841 0.677454 -0.730639 0.0833136 0.686893 -0.721967 0.103349 0.68416 -0.721973 0.105022 0.673941 -0.731282 0.112518 0.716185 -0.688781 0.114767 0.774385 -0.622219 0.130813 0.828876 -0.543924 0.0663389 0.730653 -0.679519 0.090729 0.728017 -0.679529 0.0895348 0.733255 -0.674033 0.109478 0.730537 -0.674041 0.0103918 0.120595 -0.992647 0.0162539 0.119959 -0.992646 0.0298599 0.184146 -0.982445 0.043718 0.230886 -0.971998 0.0585462 0.271327 -0.960705 0.0678192 0.29415 -0.95335 0.0726161 0.294667 -0.952837 0.0735098 0.284359 -0.955896 0.0701338 0.263417 -0.962129 0.0622666 0.231606 -0.970815 0.049072 0.189231 -0.980706 0.0435595 0.122841 -0.99147 0.0170792 0.200148 -0.979617 0.0265596 0.199118 -0.979616 0.041173 0.256377 -0.9657 0.058078 0.303767 -0.950974 0.0697751 0.332819 -0.940406 0.0775328 0.338258 -0.937854 0.0815606 0.332995 -0.939395 0.0814166 0.317082 -0.944897 0.0767952 0.290592 -0.95376 0.0675175 0.2535 -0.964976 0.0638723 0.202346 -0.977229 0.0237637 0.278353 -0.960185 0.036588 0.276961 -0.960184 0.054774 0.336757 -0.939997 0.0688348 0.371944 -0.9257 0.0796559 0.382166 -0.920654 0.0869681 0.381698 -0.920187 0.0902444 0.370709 -0.924354 0.0891255 0.349451 -0.932706 0.0833083 0.317754 -0.944506 0.0808143 0.272994 -0.958615 0.021398 0.185304 -0.982448 0.0303655 0.365274 -0.930405 0.0489182 0.363259 -0.930403 0.0643779 0.411448 -0.909157 0.0784223 0.426317 -0.901168 0.0891877 0.43045 -0.898197 0.0960936 0.424238 -0.900438 0.0986695 0.407898 -0.90768 0.0966017 0.381384 -0.919355 0.0952387 0.343029 -0.934484 0.0298107 0.257931 -0.965703 0.0384592 0.33899 -0.940004 0.0487075 0.413591 -0.909159 0.0129709 0.11158 -0.993671 0.0182322 0.110861 -0.993669 0.0316357 0.165641 -0.985679 0.0440994 0.205922 -0.977574 0.0566689 0.239395 -0.969267 0.0634423 0.255856 -0.964631 0.0653973 0.251333 -0.965689 0.0632787 0.235829 -0.969732 0.0568723 0.209609 -0.97613 0.0453935 0.172545 -0.983955 0.0292668 0.116364 -0.992775 0.024242 0.0605537 -0.997871 0.0147086 0.100439 -0.994835 0.0243628 0.166864 -0.985679 0.0340551 0.232511 -0.971997 0.044292 0.306086 -0.950973 0.0559082 0.374116 -0.925695 0.0814747 0.527795 -0.845455 0.073153 0.470663 -0.879275 0.0644646 0.428661 -0.901163 0.0729103 0.481652 -0.873324 0.0785193 0.432534 -0.898192 0.0876027 0.479185 -0.873331 0.0875987 0.47955 -0.873131 0.0983379 0.477454 -0.873137 0.0890779 0.425773 -0.900435 0.0893058 0.582599 -0.807838 0.0966157 0.634805 -0.766608 0.114989 0.773578 -0.623181 0.0261239 0.148746 -0.98853 0.0365374 0.207401 -0.977574 0.0477329 0.273441 -0.960704 0.0600496 0.334719 -0.940403 0.0693279 0.384184 -0.92065 0.0963455 0.530313 -0.84231 0.104605 0.579496 -0.808234 0.112358 0.626648 -0.77116 0.119575 0.671502 -0.731292 0.122378 0.652127 -0.748167 0.127137 0.709108 -0.693543 0.142548 0.769739 -0.622239 0.126214 0.71389 -0.688789 0.0158613 0.0787198 -0.996771 0.0263533 0.130922 -0.991042 0.0369184 0.182721 -0.982471 0.025376 0.113479 -0.993216 0.0483875 0.24121 -0.969266 0.0355777 0.158473 -0.986722 0.0607933 0.295688 -0.953348 0.0467467 0.209386 -0.976715 0.0702693 0.339848 -0.937852 0.0587199 0.256986 -0.964629 0.0797091 0.383287 -0.920183 0.0679752 0.295777 -0.952835 0.0772177 0.334033 -0.939393 0.10709 0.523115 -0.845507 0.0983603 0.467227 -0.878649 0.0956376 0.408623 -0.907679 0.104874 0.465802 -0.878652 0.0864398 0.371619 -0.924353 0.115397 0.567256 -0.815416 0.123249 0.609748 -0.782954 0.1306 0.650524 -0.748172 0.144988 0.63615 -0.75782 0.155726 0.646532 -0.746824 0.139024 0.675909 -0.723754 0.121347 0.545799 -0.829083 0.147032 0.705236 -0.693559 0.132099 0.638964 -0.757809 0.128999 0.584083 -0.801378 0.0152465 0.0681916 -0.997556 0.014029 0.0578605 -0.998226 0.0123285 0.0477193 -0.998785 0.0156795 0.0574371 -0.998226 0.0205804 0.0794653 -0.996625 0.0258279 0.0956877 -0.995076 0.023381 0.0963152 -0.995076 0.0174611 0.0676614 -0.997556 0.0328315 0.134604 -0.990355 0.0432378 0.178003 -0.98308 0.0543258 0.218656 -0.974289 0.0629838 0.251952 -0.965688 0.0716592 0.284833 -0.955895 0.0803619 0.317353 -0.944896 0.0890698 0.349465 -0.932706 0.0977798 0.381082 -0.919356 0.122178 0.515011 -0.848432 0.137207 0.563289 -0.814788 0.00603079 0.0276144 -0.9996 0.00579843 0.0229021 -0.999721 0.00612632 0.0231069 -0.999714 0.0171348 0.0628244 -0.997878 0.0131935 0.0464093 -0.998835 0.0181116 0.0625488 -0.997878 0.0186244 0.0648251 -0.997723 0.018664 0.0648136 -0.997723 0.0119471 0.0394631 -0.99915 0.0289297 0.111058 -0.993393 0.0241282 0.0877941 -0.995846 0.0250679 0.087529 -0.995847 0.0247557 0.086006 -0.995987 0.0245537 0.0860647 -0.995987 0.0160002 0.0523086 -0.998503 0.0148971 0.0526368 -0.998503 0.010844 0.0315967 -0.999442 0.0381898 0.146961 -0.988405 0.0480473 0.180671 -0.982369 0.0402546 0.143032 -0.988899 0.0557982 0.20838 -0.976455 0.0468305 0.16502 -0.985178 0.0635865 0.235746 -0.969732 0.0534977 0.186958 -0.98091 0.0714512 0.26306 -0.96213 0.0602148 0.208666 -0.976131 0.0793309 0.289906 -0.953762 0.0670176 0.230266 -0.970817 0.0872773 0.31668 -0.944509 0.073879 0.251706 -0.96498 0.0878057 0.294026 -0.951756 0.0696108 0.21826 -0.973405 0.0477941 0.132618 -0.990014 0.0268004 0.065678 -0.997481 0.0204761 0.0643287 -0.997719 0.03651 0.122032 -0.991854 0.0418115 0.138286 -0.989509 0.0277414 0.0840214 -0.996078 0.0471976 0.154423 -0.986877 0.0315278 0.0937396 -0.995097 0.0526642 0.170429 -0.983962 0.0354405 0.103541 -0.993994 0.0582424 0.186567 -0.980714 0.0394561 0.113334 -0.992773 0.0776023 0.296905 -0.951748 0.0757013 0.241774 -0.967375 0.0523594 0.146942 -0.987758 0.056015 0.22221 -0.973388 0.0126339 0.0699226 -0.997472 0.0331934 0.136963 -0.99002 0.0295261 0.0724434 -0.996935 0.0218273 0.0560845 -0.998187 0.0194369 0.0510376 -0.998508 0.0172076 0.0465206 -0.998769 0.00886794 0.0378892 -0.999243 0.0129037 0.0366762 -0.999244 0.00845735 0.0261747 -0.999622 0.00395834 0.0143447 -0.999889 0.00437184 0.0142203 -0.999889 0.0352264 0.101489 -0.994213 0.021196 0.372588 -0.927755 -0.0990804 0.992013 -0.0780641 -0.0105255 0.99656 -0.0822026 -0.0242667 0.99656 -0.0792413 -0.0373051 0.99656 -0.074002 -0.0492877 0.99656 -0.0666239 -0.0598306 0.99656 -0.0573451 -0.0686605 0.99656 -0.0464096 -0.0755142 0.99656 -0.0341411 -0.0801998 0.99656 -0.0208794 -0.0784127 0.996431 -0.0312383 0.0757769 0.921224 -0.38158 -0.0985551 0.912719 -0.396524 0.0698454 0.850557 -0.521224 -0.0989159 0.834752 -0.541668 -0.0696438 0.836258 -0.543895 -0.16057 0.836258 -0.5243 -0.246862 0.836258 -0.489625 -0.326054 0.836259 -0.440863 -0.395884 0.836257 -0.379408 -0.454295 0.83626 -0.30706 -0.499663 0.836255 -0.225863 -0.530651 0.836255 -0.138159 -0.649029 0.759933 -0.0355284 -0.543075 0.836908 -0.0682267 -0.522357 0.852387 -0.0240979 -0.397825 0.915716 -0.0565721 -0.389896 0.915244 -0.101533 -0.367137 0.915242 -0.16596 -0.333821 0.91524 -0.225609 -0.290862 0.915245 -0.278795 -0.23959 0.915242 -0.323928 -0.181388 0.915242 -0.359765 -0.117985 0.915242 -0.385243 -0.0511817 0.915241 -0.399643 -0.0511753 0.915242 -0.399641 -0.0696476 0.836257 -0.543896 0.0645957 0.758823 -0.648086 -0.0994924 0.733873 -0.671961 -0.0861936 0.734503 -0.673109 -0.19871 0.734506 -0.648857 -0.305514 0.734504 -0.605942 -0.40353 0.734498 -0.545597 -0.48992 0.734504 -0.469555 -0.562238 0.734498 -0.380001 -0.618356 0.734505 -0.279532 -0.656714 0.734503 -0.170974 -0.759626 0.648769 -0.0454724 -0.673217 0.73524 -0.0787481 0.0602584 0.648253 -0.759037 -0.0999883 0.612849 -0.783848 -0.100374 0.612831 -0.783813 -0.231392 0.612831 -0.755577 -0.35575 0.612837 -0.705601 -0.469895 0.612835 -0.635321 -0.570497 0.612834 -0.54678 -0.6547 0.612834 -0.442496 -0.720072 0.612822 -0.325492 -0.764718 0.612831 -0.199109 -0.85141 0.521746 -0.0536814 -0.784756 0.613559 -0.0877688 0.0567926 0.521665 -0.851258 -0.100609 0.474989 -0.874222 -0.111788 0.47463 -0.873058 -0.257751 0.474622 -0.841604 -0.396256 0.474625 -0.785947 -0.523384 0.474637 -0.707665 -0.635459 0.474631 -0.609029 -0.729241 0.474633 -0.49288 -0.802057 0.474626 -0.362539 -0.851788 0.474627 -0.221783 -0.922213 0.381999 -0.0599978 -0.874696 0.475257 -0.0950692 0.0540922 0.382123 -0.922527 -0.101071 0.324097 -0.940609 -0.120184 0.323642 -0.938516 -0.277057 0.323656 -0.904702 -0.425981 0.323639 -0.844866 -0.562626 0.323639 -0.76073 -0.683098 0.323646 -0.654699 -0.783923 0.32364 -0.529832 -0.862179 0.323656 -0.389736 -0.915643 0.32366 -0.238416 -0.970358 0.232965 -0.0642907 -0.940669 0.324122 -0.100436 0.0523351 0.233117 -0.971039 -0.10133 0.16431 -0.98119 -0.1253 0.164026 -0.978466 -0.288857 0.164023 -0.943217 -0.444101 0.164036 -0.880833 -0.586595 0.164015 -0.793099 -0.71217 0.164025 -0.682576 -0.817286 0.164036 -0.552391 -0.898892 0.16402 -0.406313 -0.954623 0.16403 -0.248576 -0.994715 0.0782776 -0.0664469 -0.980947 0.164276 -0.103712 0.0514784 0.0783529 -0.995596 -0.0784514 0.0118693 -0.996847 -0.23343 0.00731178 -0.972346 -0.382679 0.00471071 -0.923869 -0.522504 0.00403822 -0.852627 -0.64943 0.00537955 -0.760402 -0.760377 0.00862495 -0.649425 -0.852563 0.0138267 -0.522442 -0.923672 0.0210273 -0.382606 -0.971927 0.0301392 -0.233345 -0.995453 0.0541911 -0.0783348 -0.126621 0.0792269 -0.988782 -0.291817 0.0828049 -0.952883 -0.448592 0.0844203 -0.88974 -0.592542 0.0840839 -0.801139 -0.719526 0.0818472 -0.689626 -0.82601 0.0776147 -0.558287 -0.908905 0.0714321 -0.410839 -0.965787 0.0633395 -0.251483 -0.382654 0.923821 -0.0114392 -0.241512 0.969394 -0.0441207 -0.238387 0.969185 -0.0620621 -0.224454 0.969186 -0.101479 -0.204079 0.969187 -0.137943 -0.177836 0.969186 -0.170449 -0.146504 0.969185 -0.198035 -0.110884 0.969186 -0.219961 -0.0721288 0.969186 -0.235532 -0.0312854 0.969187 -0.244333 0.0825896 0.969047 -0.232653 -0.0985839 0.965613 -0.240568 -0.233447 0.972367 0.00220691 -0.1253 0.164026 -0.978466 -0.120169 0.323656 -0.938513 -0.111803 0.474619 -0.873062 -0.100369 0.612834 -0.783812 -0.086189 0.734505 -0.673108 -0.0312915 0.969186 -0.244334 -0.288852 0.164028 -0.943218 -0.277076 0.323639 -0.904702 -0.257737 0.474633 -0.841603 -0.231392 0.61283 -0.755577 -0.198717 0.734503 -0.648858 -0.117983 0.915242 -0.385243 -0.160571 0.836258 -0.5243 -0.0721342 0.969186 -0.235532 -0.444113 0.164023 -0.880829 -0.425963 0.323654 -0.844869 -0.396258 0.474623 -0.785947 -0.355761 0.61283 -0.705601 -0.305507 0.734506 -0.605942 -0.181388 0.915242 -0.359765 -0.246862 0.836257 -0.489625 -0.110899 0.969185 -0.219957 -0.586574 0.164038 -0.793109 -0.562629 0.323636 -0.760729 -0.523399 0.474625 -0.707661 -0.469891 0.612837 -0.635321 -0.403515 0.734504 -0.5456 -0.239577 0.915244 -0.323932 -0.326057 0.836258 -0.440862 -0.146482 0.969186 -0.198044 -0.712178 0.164015 -0.68257 -0.683106 0.323638 -0.654694 -0.635451 0.474638 -0.609033 -0.570495 0.612835 -0.546781 -0.489939 0.734496 -0.469549 -0.290884 0.915241 -0.278785 -0.395877 0.836259 -0.37941 -0.17783 0.969187 -0.170453 -0.817291 0.164029 -0.552385 -0.783918 0.323645 -0.529836 -0.729244 0.474631 -0.492879 -0.654701 0.612833 -0.442495 -0.562224 0.734505 -0.380009 -0.333812 0.915242 -0.225615 -0.454305 0.836257 -0.307054 -0.204091 0.969186 -0.137932 -0.898882 0.164037 -0.406328 -0.862193 0.323638 -0.389721 -0.802049 0.474634 -0.362547 -0.720055 0.612835 -0.325506 -0.618369 0.734498 -0.279522 -0.367128 0.915244 -0.165968 -0.499648 0.836261 -0.225877 -0.224465 0.969185 -0.101466 -0.954627 0.16402 -0.248566 -0.915646 0.323655 -0.23841 -0.85179 0.474624 -0.22178 -0.76473 0.612821 -0.199096 -0.656709 0.734506 -0.170979 -0.389908 0.915241 -0.101516 -0.53065 0.836255 -0.13816 -0.238379 0.969186 -0.0620765 -0.999944 1.42972e-09 -0.0106181 -0.999944 0 -0.0106167 -0.999944 0 -0.0106167 -0.999944 2.85956e-09 -0.0106186 -0.999944 2.85956e-09 -0.0106186 -0.999944 0 -0.0106176 -0.996918 0.000416524 -0.0784501 -0.994373 -0.00066677 -0.105931 -0.962162 0 -0.27248 -0.902282 1.16107e-07 -0.431146 -0.816442 1.555e-07 -0.577427 -0.70711 0 -0.707104 -0.577432 0 -0.816439 -0.431153 2.42982e-07 -0.902279 -0.272466 0 -0.962166 -0.10593 2.67783e-07 -0.994374 -0.0106181 0 -0.999944 -0.0487444 -0.000329659 -0.998811 -0.146626 -9.34324e-05 -0.989192 -0.243088 0.00010329 -0.970004 -0.272465 0.00124729 -0.962165 -0.336551 -0.000375973 -0.941665 -0.428142 0.000734962 -0.903711 -0.513825 -0.000391611 -0.857895 -0.595866 0.000237348 -0.803084 -0.671439 -0.000182076 -0.74106 -0.707109 0.0011383 -0.707104 -0.740872 -0.000174421 -0.671646 -0.80329 0.000229504 -0.595587 -0.857643 -0.000349455 -0.514245 -0.904105 0.000609076 -0.42731 -0.941504 -0.000310946 -0.337003 -0.962161 0.00100448 -0.272479 -0.970034 7.95676e-05 -0.242968 -0.989174 -7.18617e-05 -0.146746 -0.998795 -0.000236105 -0.0490764 -0.999944 0 -0.0106186 -0.994374 0 -0.105931 -0.962162 0 -0.27248 -0.902282 0 -0.431146 -0.816442 0 -0.577427 -0.70711 0 -0.707104 -0.577432 0 -0.816439 -0.431153 0 -0.902279 -0.272466 0 -0.962166 -0.10593 0 -0.994374 -0.10593 0 -0.994374 -0.10593 2.67783e-07 -0.994374 -0.010543 2.69283e-07 -0.999944 -0.972369 0.000604008 -0.233451 -0.962161 -0.000831229 -0.272479 -0.902282 0 -0.431146 -0.816442 1.555e-07 -0.577427 -0.70711 1.90422e-07 -0.707104 -0.577432 0 -0.816439 -0.431153 0 -0.902279 -0.272466 2.59109e-07 -0.962166 -0.10593 0 -0.994374 -0.0104992 2.69283e-07 -0.999945 -0.10593 2.67783e-07 -0.994374 -0.923876 0.000744126 -0.382691 -0.902282 -0.000948189 -0.431146 -0.816442 0 -0.577427 -0.70711 1.90422e-07 -0.707104 -0.577432 2.19865e-07 -0.816439 -0.431153 0 -0.902279 -0.272466 0 -0.962166 -0.10593 2.67783e-07 -0.994374 -0.010662 0 -0.999943 -0.10593 0 -0.994374 -0.852644 0.00083824 -0.522492 -0.816442 -0.00101857 -0.577427 -0.70711 0 -0.707104 -0.577432 2.19866e-07 -0.816439 -0.431153 2.42982e-07 -0.902279 -0.272466 0 -0.962166 -0.10593 0 -0.994374 -0.0104553 2.69283e-07 -0.999945 -0.10593 2.67783e-07 -0.994374 -0.760405 0.00088538 -0.649449 -0.707109 -0.00104171 -0.707104 -0.577432 0 -0.816439 -0.431153 2.42982e-07 -0.902279 -0.272466 2.59109e-07 -0.962166 -0.10593 0 -0.994374 -0.0104426 0 -0.999946 -0.0105775 1.37966e-07 -0.999944 -0.649439 0.000885872 -0.760413 -0.577432 -0.0010177 -0.816438 -0.431153 0 -0.902279 -0.272466 2.59109e-07 -0.962166 -0.10593 2.67783e-07 -0.994374 -0.0104426 0 -0.999946 -0.10593 0 -0.994374 -0.522508 0.000838751 -0.852634 -0.431152 -0.000947809 -0.902279 -0.272466 0 -0.962166 -0.10593 2.67783e-07 -0.994374 -0.0106181 2.69283e-07 -0.999944 -0.10593 2.67783e-07 -0.994374 -0.382684 0.000745017 -0.923879 -0.272466 -0.000830387 -0.962165 -0.10593 0 -0.994374 -0.0106181 2.69283e-07 -0.999944 -0.10593 2.67783e-07 -0.994374 -0.233437 0.000604842 -0.972372 -0.10593 -0.000665892 -0.994373 -0.0105304 0 -0.999945 -0.10593 0 -0.994374 -0.0784569 0.000417082 -0.996917 -0.10593 0.00139617 -0.994373 -0.272466 2.59109e-07 -0.962166 -0.272466 0 -0.962166 -0.431152 0.000882761 -0.902279 -0.577432 2.19866e-07 -0.816439 -0.577432 0 -0.816439 -0.577432 0.00107846 -0.816438 -0.70711 1.90422e-07 -0.707104 -0.70711 0 -0.707104 -0.816442 0.000988539 -0.577427 -0.902282 1.16107e-07 -0.431146 -0.902282 0 -0.431146 -0.902282 0.000774234 -0.431146 -0.962162 7.33782e-08 -0.272479 -0.962162 0 -0.27248 -0.994373 0.00103397 -0.105931 -0.999944 2.85931e-09 -0.0106176 -0.994374 2.8527e-08 -0.105931 -0.994374 2.8391e-08 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 2.8391e-08 -0.105931 -0.994374 2.83909e-08 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 2.83909e-08 -0.105931 -0.994374 0 -0.105931 -0.994374 2.8391e-08 -0.105931 -0.994374 2.8527e-08 -0.105931 -0.999944 2.85918e-09 -0.0106172 -0.999944 0 -0.010619 -0.994374 0 -0.105931 -0.994374 2.83909e-08 -0.105931 -0.994374 2.8527e-08 -0.105931 -0.994374 0 -0.105931 -0.994374 0 -0.105931 -0.994374 2.83909e-08 -0.105931 -0.999944 2.85906e-09 -0.0106167 -0.962162 0 -0.27248 -0.962162 7.33782e-08 -0.272479 -0.962162 7.33782e-08 -0.27248 -0.962162 7.26802e-08 -0.27248 -0.962162 0 -0.27248 -0.962162 0 -0.27248 -0.962162 7.26802e-08 -0.272479 -0.962162 0 -0.272479 -0.962162 7.26803e-08 -0.27248 -0.962162 7.33782e-08 -0.27248 -0.962162 0 -0.27248 -0.962162 7.33782e-08 -0.27248 -0.962162 0 -0.27248 -0.962162 7.26802e-08 -0.272479 -0.962162 0 -0.272479 -0.962162 7.26803e-08 -0.272479 -0.902282 0 -0.431146 -0.902282 1.16107e-07 -0.431146 -0.902282 1.16107e-07 -0.431146 -0.902282 1.16107e-07 -0.431146 -0.902282 0 -0.431146 -0.902282 0 -0.431146 -0.902282 1.16592e-07 -0.431146 -0.902282 0 -0.431146 -0.902282 1.16592e-07 -0.431146 -0.902282 1.16107e-07 -0.431146 -0.902282 0 -0.431146 -0.902282 0 -0.431146 -0.902282 1.16592e-07 -0.431146 -0.902282 0 -0.431146 -0.902282 1.16592e-07 -0.431146 -0.816442 0 -0.577427 -0.816442 1.555e-07 -0.577427 -0.816442 1.555e-07 -0.577427 -0.816442 0 -0.577427 -0.816442 0 -0.577427 -0.816442 0 -0.577427 -0.816442 1.55961e-07 -0.577427 -0.816442 0 -0.577427 -0.816442 1.55961e-07 -0.577427 -0.816442 1.555e-07 -0.577427 -0.816442 1.55961e-07 -0.577427 -0.816442 0 -0.577427 -0.816442 0 -0.577427 -0.816442 1.55961e-07 -0.577427 -0.816442 0 -0.577427 -0.816442 1.555e-07 -0.577427 -0.70711 1.90422e-07 -0.707104 -0.70711 0 -0.707104 -0.70711 1.90422e-07 -0.707104 -0.70711 1.90422e-07 -0.707104 -0.70711 0 -0.707104 -0.70711 0 -0.707104 -0.70711 0 -0.707104 -0.70711 1.90785e-07 -0.707104 -0.70711 0 -0.707104 -0.70711 1.90786e-07 -0.707104 -0.70711 1.90422e-07 -0.707104 -0.70711 0 -0.707104 -0.70711 1.90422e-07 -0.707104 -0.577432 0 -0.816439 -0.577432 2.19866e-07 -0.816439 -0.577432 2.19865e-07 -0.816439 -0.577432 0 -0.816439 -0.577432 0 -0.816439 -0.577432 2.19865e-07 -0.816439 -0.577432 2.19865e-07 -0.816439 -0.577432 0 -0.816439 -0.577432 2.21069e-07 -0.816439 -0.577432 2.19865e-07 -0.816439 -0.577432 0 -0.816439 -0.577432 2.21069e-07 -0.816439 -0.431153 0 -0.902279 -0.431153 2.42982e-07 -0.902279 -0.431153 2.42982e-07 -0.902279 -0.431153 0 -0.902279 -0.431153 0 -0.902279 -0.431153 2.42982e-07 -0.902279 -0.431153 2.43783e-07 -0.902279 -0.431153 0 -0.902279 -0.431153 0 -0.902279 -0.431153 2.42982e-07 -0.902279 -0.272466 2.59109e-07 -0.962166 -0.272466 2.58926e-07 -0.962166 -0.431153 2.43784e-07 -0.902279 -0.431153 0 -0.902279 -0.431153 2.42982e-07 -0.902279 -0.272466 2.59109e-07 -0.962166 -0.272466 0 -0.962166 -0.272466 2.59109e-07 -0.962166 -0.272466 2.59109e-07 -0.962166 -0.272466 0 -0.962166 -0.272466 0 -0.962166 -0.272466 2.59109e-07 -0.962166 -0.272466 0 -0.962166 -0.10593 0 -0.994374 -0.10593 2.68008e-07 -0.994374 -0.999944 0.0106185 3.18794e-09 -0.994373 0.10593 0.000678192 -0.962161 0.272481 0.000845619 -0.902281 0.431148 0.000964887 -0.816444 0.577424 0.00103612 -0.707106 0.707106 0.00106026 -0.577424 0.816444 0.00103625 -0.431157 0.902276 0.000965466 -0.272471 0.962164 0.000845615 -0.10593 0.994373 0.000678679 -0.0106578 0.999943 3.00208e-07 -0.078451 0.996918 -0.000423108 -0.972369 0.233448 0.000745112 -0.962161 0.272481 -0.00102512 -0.923877 0.382689 0.00091768 -0.902281 0.431148 -0.00116962 -0.852639 0.522499 0.00103364 -0.760412 0.64944 0.00109206 -0.649439 0.760413 0.00109202 -0.522508 0.852634 0.00103328 -0.382678 0.923881 0.000918112 -0.233448 0.972369 0.000745112 -0.078451 0.996918 0.000513673 -0.10593 0.994373 -0.000822224 -0.816443 0.577424 -0.00125589 -0.707106 0.707106 -0.00128507 -0.577424 0.816443 -0.00125589 -0.431157 0.902276 -0.00117001 -0.272471 0.962164 -0.00102465 -0.233448 0.972369 -0.000614003 -0.382678 0.923881 -0.000756711 -0.522508 0.852634 -0.000851739 -0.649439 0.760413 -0.000900236 -0.760412 0.64944 -0.000900326 -0.852639 0.5225 -0.00085221 -0.923877 0.382689 -0.000756652 -0.972369 0.233448 -0.000614407 -0.996918 0.078451 -0.000423612 -0.996918 0.078451 0.000513673 -0.994373 0.10593 -0.000822224 0.00367756 0.996911 -0.0784496 -0.000848534 0.995808 -0.0914621 0 0.984886 -0.173206 0.00533257 0.972355 -0.233447 -0.00114953 0.967344 -0.253465 0.000988773 0.962164 -0.272469 -0.000718673 0.972368 -0.233451 0.000793466 0.994373 -0.105931 -0.000495623 0.996918 -0.0784501 0 0.999944 -0.0106181 0 0.999944 -0.0106181 0 0.994374 -0.105931 0 0.943573 -0.331165 0.00657244 0.923861 -0.382672 0 0.9147 -0.404134 -0.000994762 0.902278 -0.431155 0 0.8771 -0.480307 0.00739741 0.852616 -0.522486 0 0.836753 -0.547581 -0.00106828 0.816442 -0.577427 -0.00105314 0.76041 -0.649442 0.00121225 0.816442 -0.577427 -0.000996967 0.852639 -0.5225 0.00112882 0.902277 -0.431155 -0.000885777 0.92388 -0.38268 0 0.787275 -0.616603 0.00781418 0.760388 -0.649423 0 0.736499 -0.676439 -0.00109297 0.707109 -0.707104 0 0.676445 -0.736493 0.0078185 0.649414 -0.760395 0 0.616576 -0.787296 -0.00106743 0.577432 -0.816438 -0.000997809 0.522508 -0.852634 0.00121129 0.577431 -0.816438 -0.00105372 0.649433 -0.760418 0.00124027 0.707109 -0.707104 0 0.54761 -0.836734 0.00740367 0.522494 -0.852611 0 0.480278 -0.877116 -0.000994166 0.431152 -0.902279 0 0.404148 -0.914694 0.00657592 0.382675 -0.92386 0 0.331141 -0.943581 -0.000871048 0.272466 -0.962165 -0.000719428 0.233437 -0.972372 0.000988438 0.272466 -0.962165 -0.000886247 0.382683 -0.923879 0.00112815 0.431152 -0.902278 0 0.249927 -0.968265 -0.0023675 0.14618 -0.989255 0.0036803 0.0784564 -0.996911 0 0.0105742 -0.999944 -0.000495993 0.0784569 -0.996917 0.000792706 0.10593 -0.994373 0 0.10593 -0.994374 0 0.0890387 -0.996028 0 0.0105742 -0.999944 0 0.962164 -0.272469 0.00533818 0.233433 -0.972358 -0.00101715 0.0438575 -0.999037 -0.00437801 0.0415103 -0.999129 -0.00581671 0.0379882 -0.999261 -0.00622612 0.0357107 -0.999343 -0.00456314 0.0341203 -0.999407 -0.0109675 0.233423 -0.972314 -0.0142988 0.22319 -0.97467 -0.00653364 0.310675 -0.950494 -0.0182065 0.38262 -0.923726 -0.0113344 0.406398 -0.913626 -0.0392852 0.36727 -0.929284 -0.0505336 0.317349 -0.946961 -0.0553448 0.26832 -0.961739 -0.0540215 0.220075 -0.973986 -0.0481567 0.173047 -0.983736 -0.0425257 0.145631 -0.988425 -0.0362224 0.118404 -0.992305 -0.0323127 0.109792 -0.993429 -0.0264728 0.0840139 -0.996113 -0.0159595 0.0408463 -0.999038 -0.0165217 0.522437 -0.852518 -0.0296798 0.492845 -0.869611 -0.0458147 0.447704 -0.893008 -0.0602164 0.388086 -0.919654 -0.0665878 0.329078 -0.941952 -0.0654174 0.270551 -0.96048 -0.0585914 0.213022 -0.975289 -0.0517118 0.179511 -0.982396 -0.0441521 0.146032 -0.988294 -0.0376639 0.126649 -0.991232 -0.0304001 0.0949869 -0.995014 -0.0185674 0.0466236 -0.99874 -0.0158354 0.56201 -0.826979 -0.0530643 0.511003 -0.85794 -0.0697812 0.444377 -0.893118 -0.0771782 0.377668 -0.922719 -0.0759171 0.311132 -0.94733 -0.0681336 0.245412 -0.967022 -0.0599828 0.206959 -0.976509 -0.051304 0.168629 -0.984343 -0.0431309 0.143522 -0.988707 -0.0344584 0.106085 -0.99376 -0.0211878 0.0517522 -0.998435 -0.0251751 0.76017 -0.649237 -0.0352791 0.742689 -0.668707 -0.0221379 0.794676 -0.60663 -0.0286925 0.852288 -0.522285 -0.0372211 0.840287 -0.540863 -0.0882895 0.780433 -0.618974 -0.116873 0.694294 -0.710138 -0.129646 0.601272 -0.788457 -0.128806 0.502982 -0.854645 -0.117018 0.401331 -0.908427 -0.101945 0.340809 -0.934589 -0.0879054 0.278628 -0.956368 -0.0716859 0.226698 -0.971323 -0.0564449 0.166533 -0.984419 -0.0388043 0.0801073 -0.996031 -0.00874194 0.923845 -0.382666 -0.069463 0.87915 -0.471455 -0.0949582 0.822868 -0.560242 -0.12594 0.736654 -0.66444 -0.139902 0.64114 -0.754564 -0.139346 0.538531 -0.831004 -0.126968 0.431069 -0.893341 -0.110415 0.366639 -0.923788 -0.0953845 0.300175 -0.949103 -0.0779262 0.251265 -0.964776 -0.066496 0.288992 -0.955019 -0.0652213 0.304178 -0.95038 -0.0387419 0.0777339 -0.996221 -0.0215201 0.335102 -0.941936 -0.0242023 0.930737 -0.364888 -0.11744 0.875208 -0.469274 -0.149854 0.791097 -0.593051 -0.163517 0.694018 -0.701143 -0.1618 0.586656 -0.793509 -0.147219 0.471827 -0.869313 -0.12717 0.402586 -0.906505 -0.110012 0.330194 -0.93748 -0.0892689 0.282228 -0.955185 -0.0891276 0.33611 -0.937596 -0.0325277 0.971854 -0.233327 0.0306199 0.996451 -0.0784133 -0.087359 0.973017 -0.213557 -0.0520293 0.959358 -0.277354 -0.11955 0.934542 -0.335172 -0.107266 0.886246 -0.450624 -0.147752 0.86313 -0.482883 -0.157728 0.772981 -0.61451 -0.151899 0.668021 -0.728475 -0.132697 0.552538 -0.822857 -0.108302 0.410822 -0.90526 -0.125092 0.450883 -0.883774 -0.13845 0.787619 -0.600407 -0.14859 0.671214 -0.726218 -0.141545 0.541542 -0.828672 -0.10295 0.428785 -0.897521 -0.00372435 0.0134719 -0.999902 -0.00496717 0.017139 -0.999841 -0.00485429 0.0169323 -0.999845 -0.00496866 0.0197156 -0.999793 -0.00598915 0.0226793 -0.999725 -0.0169606 0.0718957 -0.997268 -0.0289744 0.146217 -0.988828 -0.0368914 0.241522 -0.969694 -0.0418842 0.366995 -0.929279 -0.0053978 0.0247163 -0.99968 -0.00679501 0.0291418 -0.999552 -0.0173806 0.08791 -0.995977 -0.0262551 0.173414 -0.984499 -0.0282846 0.280192 -0.959527 -0.0263973 0.40556 -0.913687 -0.00596117 0.0293049 -0.999553 -0.0157682 0.104397 -0.994411 -0.0201693 0.201477 -0.979286 -0.0214529 0.309836 -0.950548 -0.0041892 0.0411803 -0.999143 -0.00297575 0.0456201 -0.998954 -0.0147483 0.0873417 -0.996069 -0.0142345 0.0787429 -0.996793 -0.0360379 0.154823 -0.987285 -0.0556902 0.213167 -0.975427 -0.0692934 0.260151 -0.963078 -0.0792625 0.318396 -0.944638 -0.0900916 0.375474 -0.922444 -0.101053 0.470315 -0.876694 -0.105372 0.562323 -0.820177 -0.0983129 0.650629 -0.753005 -0.0769134 0.734292 -0.674463 -0.0458354 0.793535 -0.606796 -0.0814663 0.733806 -0.674457 -0.0467362 0.839791 -0.540895 -0.0132179 0.0666606 -0.997688 -0.0318315 0.131598 -0.990792 -0.0478921 0.179017 -0.98268 -0.0583458 0.215147 -0.974837 -0.0666935 0.263993 -0.962216 -0.0759069 0.312099 -0.947012 -0.086218 0.39285 -0.915552 -0.0906229 0.472941 -0.876421 -0.0856518 0.551732 -0.829611 -0.0685904 0.629019 -0.774358 -0.0422076 0.686051 -0.726328 -0.0327227 0.625888 -0.779226 -0.0210977 0.649289 -0.760249 -0.0117992 0.0545713 -0.99844 -0.0109889 0.0489008 -0.998743 -0.00998697 0.0426384 -0.999041 -0.00899358 0.0370332 -0.999274 -0.0194945 0.0735599 -0.9971 -0.0256569 0.0891832 -0.995685 -0.0257001 0.0899176 -0.995618 -0.0290794 0.110864 -0.99341 -0.0331438 0.131745 -0.990729 -0.0386314 0.1672 -0.985166 -0.0411257 0.203756 -0.978158 -0.0397151 0.241079 -0.969692 -0.0406998 0.203839 -0.978158 -0.0509137 0.317289 -0.946961 -0.00598378 0.0225266 -0.999728 -0.0124025 0.0448513 -0.998917 -0.0140961 0.0480456 -0.998746 -0.0112788 0.0385107 -0.999195 -0.0127384 0.0475583 -0.998787 -0.0145157 0.0565243 -0.998296 -0.0170518 0.0718746 -0.997268 -0.0150652 0.0563824 -0.998296 -0.0279793 0.119671 -0.992419 -0.0283166 0.119593 -0.992419 -0.0298367 0.146049 -0.988827 -0.00271288 0.00943611 -0.999952 -0.00553798 0.0192305 -0.9998 -0.00332597 0.0109686 -0.999934 -0.00346652 0.0113107 -0.99993 -0.00812375 0.028873 -0.99955 -0.00856774 0.0287402 -0.99955 -0.00557715 0.0192189 -0.9998 -0.0182092 0.0877478 -0.995976 -0.0367278 0.930286 -0.364993 -0.0470155 0.880592 -0.471536 -0.0192236 0.687232 -0.726183 -0.0443085 0.742189 -0.668724 -0.0674221 0.629143 -0.77436 -0.039544 0.625495 -0.779225 -0.0602754 0.571597 -0.818317 -0.0634256 0.571263 -0.818313 -0.0793446 0.498762 -0.863099 -0.0784361 0.498905 -0.8631 -0.0877594 0.425203 -0.900834 -0.0864687 0.351129 -0.932326 -0.0777666 0.277454 -0.957586 -0.0683058 0.234223 -0.96978 -0.0584916 0.190769 -0.979891 -0.0486736 0.160341 -0.985861 -0.0386276 0.117254 -0.99235 -0.0239349 0.0570166 -0.998086 -0.0363571 0.560903 -0.827083 -0.0326945 0.492667 -0.869603 -0.0100361 0.078453 -0.996867 -0.00198943 0.13503 -0.990839 -0.0121677 0.121418 -0.992527 -0.0149713 0.121123 -0.992525 -0.0158753 0.223098 -0.974667 -0.00992811 0.134577 -0.990853 -0.0766109 0.879673 -0.469367 -0.0823118 0.824219 -0.560257 -0.107644 0.649159 -0.752997 -0.079989 0.781319 -0.618985 -0.0745058 0.683274 -0.726351 -0.0731014 0.683424 -0.726353 -0.0888537 0.551229 -0.829609 -0.0983029 0.601373 -0.792898 -0.108842 0.516262 -0.849486 -0.107641 0.428807 -0.89696 -0.0972638 0.340331 -0.935262 -0.0850691 0.28804 -0.953832 -0.0730918 0.23505 -0.969231 -0.0600058 0.193645 -0.979235 -0.0472063 0.139148 -0.989146 -0.0297411 0.067753 -0.997259 -0.0576553 0.510516 -0.857933 -0.0513049 0.447123 -0.893 -0.0331213 0.279682 -0.959521 -0.0242599 0.201046 -0.979281 -0.107778 0.79784 -0.593158 -0.108356 0.739429 -0.664457 -0.119292 0.559582 -0.820145 -0.103682 0.696374 -0.71015 -0.0983256 0.471427 -0.876406 -0.0922963 0.602318 -0.792903 -0.0706672 0.444238 -0.893118 -0.0624031 0.387743 -0.919653 -0.0289026 0.172999 -0.984498 -0.0177014 0.104094 -0.99441 -0.121037 0.702486 -0.70133 -0.118039 0.645454 -0.754623 -0.118236 0.466322 -0.876678 -0.111979 0.604754 -0.7885 -0.0970493 0.390334 -0.915544 -0.0982467 0.518346 -0.849509 -0.082537 0.426228 -0.900843 -0.0740119 0.378289 -0.922724 -0.065085 0.329372 -0.941954 -0.0398203 0.166924 -0.985165 -0.0530735 0.268765 -0.961742 -0.119317 0.596599 -0.79362 -0.114339 0.54435 -0.831029 -0.107111 0.371062 -0.92241 -0.107897 0.507844 -0.854665 -0.0874776 0.309104 -0.946997 -0.0938207 0.43202 -0.896971 -0.0782657 0.353034 -0.932331 -0.0699779 0.31251 -0.947333 -0.0613816 0.271488 -0.960483 -0.0355003 0.131136 -0.990729 -0.0500427 0.220999 -0.973989 -0.0251938 0.0938069 -0.995272 -0.0224236 0.0788516 -0.996634 -0.019053 0.0640948 -0.997762 -0.0192608 0.0673516 -0.997543 -0.0177451 0.0591145 -0.998093 -0.0109269 0.0289247 -0.999522 -0.109524 0.481663 -0.869486 -0.103418 0.437187 -0.893405 -0.0934969 0.314538 -0.944629 -0.0968698 0.406558 -0.908475 -0.0766705 0.261288 -0.962211 -0.0831006 0.343999 -0.935286 -0.0685174 0.279846 -0.957597 -0.0609455 0.247271 -0.967028 -0.0532041 0.214414 -0.975293 -0.0315001 0.110208 -0.993409 -0.0431649 0.174337 -0.98374 -0.0134504 0.0473654 -0.998787 -0.0238975 0.0941417 -0.995272 -0.0969643 0.410794 -0.906557 -0.0911669 0.371859 -0.923802 -0.085299 0.345324 -0.934599 -0.0804717 0.257001 -0.963055 -0.0796723 0.304607 -0.94914 -0.0658025 0.210146 -0.975453 -0.074554 0.282392 -0.956398 -0.0730573 0.291295 -0.953839 -0.0657698 0.213056 -0.974824 -0.0543077 0.1771 -0.982693 -0.063879 0.237645 -0.969249 -0.0601648 0.236429 -0.969784 -0.0535062 0.208713 -0.976512 -0.0526568 0.19241 -0.979901 -0.0466966 0.180869 -0.982398 -0.0468789 0.169875 -0.98435 -0.0379005 0.146885 -0.988427 -0.0267967 0.0896091 -0.995616 -0.040915 0.146943 -0.988298 -0.0255564 0.0892133 -0.995685 -0.033961 0.11904 -0.992308 -0.0209798 0.0792426 -0.996635 -0.0114036 0.0384756 -0.999195 -0.0136448 0.0481805 -0.998745 -0.0185613 0.064233 -0.997762 -0.0594246 0.230358 -0.97129 -0.0516516 0.1502 -0.987305 -0.0327907 0.0731774 -0.99678 -0.0382841 0.171907 -0.984369 -0.0518453 0.196082 -0.979216 -0.042882 0.128326 -0.990804 -0.0268086 0.0625788 -0.99768 -0.0339755 0.143072 -0.989129 -0.0438054 0.161796 -0.985852 -0.0396043 0.144576 -0.988701 -0.0295633 0.119941 -0.992341 -0.0352952 0.127357 -0.991229 -0.0271884 0.108241 -0.993753 -0.030896 0.110215 -0.993427 -0.0225728 0.0726469 -0.997102 -0.0247297 0.0966679 -0.995009 -0.0135328 0.0356697 -0.999272 -0.0221881 0.085283 -0.99611 -0.0194774 0.0672868 -0.997544 -0.0132934 0.0445871 -0.998917 -0.00787861 0.0219566 -0.999728 -0.0160591 0.0596141 -0.998092 -0.00937988 0.0319581 -0.999445 -0.00904074 0.0320587 -0.999445 -0.00317964 0.00929584 -0.999952 -0.0054702 0.0156341 -0.999863 -0.00267176 0.00891738 -0.999957 -0.00124532 0.0042031 -0.99999 -0.0661424 0.254781 -0.964734 -0.0137629 0.0725468 -0.99727 -0.0125386 0.0604407 -0.998093 -0.00760913 0.0299198 -0.999523 -0.00440732 0.0159528 -0.999863 -0.0784107 0.996405 -0.032075 -0.278932 0.958936 -0.0513621 -0.406343 0.908706 -0.0955932 -0.506781 0.858321 -0.0803669 -0.571979 0.816628 -0.077193 -0.633756 0.770041 -0.0734265 -0.691683 0.718885 -0.0691271 -0.745522 0.663372 -0.064299 -0.794837 0.60395 -0.058987 -0.839419 0.540873 -0.0532161 -0.878955 0.474578 -0.0470479 -0.919905 0.389984 -0.0410723 -0.954133 0.29782 -0.030563 -0.976514 0.214303 -0.0222551 -0.991524 0.129205 -0.0136865 -0.999099 0.0421594 -0.00483989 -0.99916 0.0407652 -0.00414697 -0.999236 0.0386931 -0.00558622 -0.999274 0.0377542 -0.00504914 -0.999361 0.0352106 -0.00614131 -0.994838 0.0996315 -0.0192768 -0.988528 0.147635 -0.0318835 -0.98246 0.181515 -0.0427183 -0.976713 0.207938 -0.052854 -0.974278 0.217995 -0.0571013 -0.976475 0.208157 -0.0562733 -0.980924 0.18749 -0.0513437 -0.986866 0.156087 -0.0416167 -0.994004 0.10592 -0.0271481 -0.998206 0.0587159 -0.0116857 -0.252876 0.966628 -0.041026 -0.331084 0.94357 -0.00765444 -0.382493 0.923433 -0.0311475 -0.442542 0.896612 -0.0155846 -0.515311 0.856646 -0.0247238 -0.522297 0.852289 -0.0284468 -0.584427 0.811333 -0.0135403 -0.633742 0.772312 -0.0436483 -0.584455 0.809857 -0.0504377 -0.571966 0.818934 -0.0469242 -0.51526 0.85542 -0.0525755 -0.506768 0.860629 -0.0500322 -0.406325 0.912727 -0.0427631 -0.331092 0.941083 -0.0688529 -0.649233 0.760172 -0.0252063 -0.649717 0.759769 -0.0248877 -0.69167 0.721093 -0.0402268 -0.649639 0.758737 -0.0478336 -0.710322 0.703785 -0.011344 -0.745511 0.665485 -0.0366663 -0.71034 0.702432 -0.0448023 -0.852432 0.522373 -0.0220625 -0.861806 0.507031 -0.0144766 -0.908428 0.418012 -0.00503755 -0.92371 0.382619 -0.019037 -0.94751 0.319714 -0.00282233 -0.954133 0.298988 -0.0153616 -0.947521 0.318927 -0.0221026 -0.919907 0.391667 -0.0191837 -0.908417 0.416979 -0.0301236 -0.878949 0.476237 -0.025417 -0.839411 0.542709 -0.029251 -0.794826 0.605939 -0.0330078 -0.766215 0.641253 -0.04135 -0.766285 0.6422 -0.0196724 -0.760194 0.649253 -0.0239959 -0.972287 0.233428 -0.0130442 -0.973127 0.22999 -0.0113391 -0.976515 0.215164 -0.0110598 -0.973106 0.229786 -0.0162098 -0.990274 0.139128 0.000528182 -0.991525 0.129736 -0.00676975 -0.990284 0.138695 -0.0100633 -0.996838 0.0784448 -0.0126364 -0.998907 0.046737 -0.00108391 -0.999036 0.0438145 -0.00285799 -0.998946 0.0457506 -0.00368909 -0.999361 0.0352029 -0.00613756 -0.999547 0.0294703 -0.0060914 -0.995866 0.0887398 -0.0193842 -0.991042 0.129941 -0.0308254 -0.986724 0.157441 -0.039856 -0.983074 0.176952 -0.0474753 -0.982358 0.180437 -0.0491492 -0.98515 0.165498 -0.0457139 -0.989515 0.139422 -0.0376932 -0.995083 0.0958412 -0.024984 -0.998492 0.0537498 -0.0111426 -0.999558 0.0290059 -0.00658095 -0.996773 0.0780525 -0.0187194 -0.993222 0.112625 -0.0287405 -0.99037 0.133738 -0.035793 -0.988405 0.146258 -0.0407921 -0.988916 0.143017 -0.0398886 -0.991846 0.122915 -0.0336704 -0.996083 0.0854651 -0.0226848 -0.998775 0.0483689 -0.0104639 -0.999782 0.02025 -0.00510334 -0.999844 0.0169905 -0.00485706 -0.999855 0.0164074 -0.00453592 -0.999932 0.0111021 -0.00343817 -0.999236 0.0375117 -0.0110137 -0.998835 0.0463098 -0.0135365 -0.999143 0.039785 -0.0114198 -0.999641 0.0259611 -0.00663339 -0.999991 0.00397257 -0.00117702 -0.965121 0.253288 -0.0662331 -0.949769 0.306589 -0.0627829 -0.958695 0.270871 -0.0867965 -0.899502 0.424558 -0.10318 -0.906577 0.408164 -0.107338 -0.906467 0.40738 -0.111175 -0.889195 0.442797 -0.115166 -0.869687 0.480293 -0.113858 -0.854809 0.507676 -0.107548 -0.845501 0.525172 -0.0965502 -0.842301 0.532854 -0.0812089 -0.845454 0.530303 -0.0631332 -0.860184 0.508019 -0.0447344 -0.860189 0.505861 -0.0646405 -0.879264 0.473032 -0.0559955 -0.89474 0.442941 -0.0569499 -0.894736 0.444896 -0.0389184 -0.863214 0.485267 -0.139202 -0.816335 0.56509 -0.119458 -0.864396 0.487462 -0.123291 -0.848404 0.514896 -0.12285 -0.829081 0.546835 -0.116603 -0.815427 0.569196 -0.105337 -0.808233 0.582044 -0.0893538 -0.807823 0.585244 -0.0700787 -0.821212 0.568394 -0.0504002 -0.821219 0.566065 -0.0719044 -0.823579 0.550053 -0.13842 -0.814798 0.56486 -0.130528 -0.801395 0.584817 -0.125523 -0.782949 0.611558 -0.113966 -0.771129 0.629201 -0.0972993 -0.766611 0.637502 -0.0768085 -0.777938 0.625849 -0.0559169 -0.777946 0.623384 -0.0786897 -0.683098 0.708968 -0.175336 -0.591939 0.796118 -0.12572 -0.597714 0.78999 -0.136578 -0.515145 0.842485 -0.157623 -0.44938 0.887176 -0.104768 -0.54389 0.833829 -0.0944063 -0.566923 0.820274 -0.0758262 -0.566933 0.817617 -0.100449 -0.623193 0.776219 -0.095471 -0.624893 0.7748 -0.0958802 -0.624882 0.777464 -0.0712122 -0.448242 0.887433 -0.107434 -0.477269 0.875817 -0.0718234 -0.477285 0.871016 -0.116322 -0.999963 0.00821421 -0.00236689 -0.999841 0.0171029 -0.00511602 -0.999584 0.0276539 -0.00822501 -0.999584 0.0277664 -0.00783844 -0.99993 0.0112892 -0.00345356 -0.999936 0.0108231 -0.0032736 -0.999841 0.0171366 -0.00500257 -0.999782 0.0202003 -0.00536641 -0.999782 0.0199212 -0.00629468 -0.999568 0.0281913 -0.0082621 -0.999568 0.0281639 -0.00835422 -0.999418 0.0331597 -0.00803519 -0.99772 0.0650774 -0.0178665 -0.993895 0.106313 -0.0295151 -0.993895 0.10579 -0.0313158 -0.992693 0.116024 -0.0331502 -0.992693 0.116362 -0.0319514 -0.993397 0.110516 -0.0308145 -0.996625 0.0790179 -0.0222363 -0.998789 0.047319 -0.013484 -0.999236 0.037726 -0.0102543 -0.99901 0.0434047 -0.0097539 -0.999012 0.0418142 -0.0150461 -0.996945 0.0754082 -0.0203496 -0.996945 0.0743 -0.0240749 -0.99784 0.0645504 -0.0122269 -0.991427 0.126856 -0.0312948 -0.977194 0.205729 -0.0526061 -0.958608 0.275309 -0.0726301 -0.93451 0.344467 -0.0896264 -0.904843 0.412885 -0.103853 -0.869687 0.480104 -0.114651 -0.904844 0.412211 -0.106483 -0.889475 0.444326 -0.106809 -0.889474 0.444813 -0.104772 -0.854812 0.506416 -0.113307 -0.996937 0.077104 -0.0130674 -0.987742 0.151769 -0.0365072 -0.967327 0.245813 -0.0620812 -0.94077 0.327754 -0.0867672 -0.906462 0.408419 -0.107331 -0.94078 0.325417 -0.095061 -0.923598 0.371205 -0.0957817 -0.923603 0.369188 -0.103238 -0.889192 0.443902 -0.11085 -0.999963 0.00822976 -0.00246096 -0.999963 0.00818933 -0.0024497 -0.864397 0.487177 -0.124404 -0.995867 0.0894511 -0.0157455 -0.344121 0.930868 -0.122742 -0.278444 0.958962 -0.053481 -0.233325 0.971857 -0.0324557 -0.861755 0.506227 -0.0333551 -0.816784 0.575722 -0.0375329 -0.816776 0.576884 -0.00905553 -0.442573 0.895092 -0.054228 -0.730623 0.68003 -0.061232 -0.730633 0.677462 -0.084974 -0.721952 0.686911 -0.0833017 -0.721958 0.684176 -0.103345 -0.731319 0.6739 -0.105028 -0.688773 0.716191 -0.112525 -0.622216 0.774387 -0.114766 -0.543911 0.828884 -0.130813 -0.679478 0.73069 -0.0663414 -0.679488 0.728056 -0.0907208 -0.674034 0.733254 -0.0895355 -0.674042 0.730535 -0.109486 -0.992647 0.120595 -0.0103988 -0.992646 0.119959 -0.0162542 -0.982445 0.184146 -0.0298595 -0.971995 0.2309 -0.043713 -0.960715 0.271292 -0.0585446 -0.953355 0.294135 -0.0678122 -0.952849 0.294631 -0.0726048 -0.955911 0.284308 -0.0735003 -0.962139 0.263383 -0.070133 -0.970796 0.231682 -0.0622729 -0.980718 0.189167 -0.0490708 -0.991425 0.123195 -0.0435936 -0.979619 0.200136 -0.0170857 -0.979618 0.199108 -0.0265559 -0.965687 0.256421 -0.0411829 -0.950978 0.303755 -0.0580762 -0.940393 0.332853 -0.069787 -0.937824 0.338338 -0.0775498 -0.939389 0.333011 -0.0815619 -0.944902 0.317067 -0.0814114 -0.953744 0.290646 -0.0768006 -0.964999 0.253417 -0.067509 -0.977207 0.202453 -0.0638774 -0.960185 0.278352 -0.0237707 -0.960185 0.276962 -0.0365715 -0.94 0.336748 -0.0547806 -0.925705 0.371932 -0.0688243 -0.920658 0.382157 -0.0796515 -0.920219 0.381621 -0.0869612 -0.92433 0.370767 -0.0902566 -0.932678 0.349524 -0.0891286 -0.94452 0.317714 -0.0833105 -0.958613 0.272999 -0.080819 -0.982448 0.185304 -0.0213983 -0.930392 0.365304 -0.0303824 -0.93039 0.363292 -0.0489186 -0.909173 0.411412 -0.0643788 -0.901151 0.426353 -0.0784217 -0.898183 0.430477 -0.089201 -0.900401 0.424315 -0.0960964 -0.907688 0.407882 -0.098668 -0.919383 0.381318 -0.0965954 -0.934514 0.342951 -0.0952312 -0.965691 0.257977 -0.0298102 -0.940006 0.338984 -0.0384429 -0.909175 0.413555 -0.0487081 -0.993676 0.111533 -0.0129527 -0.993674 0.110812 -0.0182248 -0.985681 0.165628 -0.031631 -0.977568 0.20595 -0.0441071 -0.969274 0.23937 -0.0566652 -0.964624 0.25588 -0.0634417 -0.965689 0.251328 -0.0654062 -0.969709 0.235922 -0.0632911 -0.976139 0.209567 -0.0568629 -0.983952 0.172565 -0.0453974 -0.992797 0.116182 -0.0292499 -0.997847 0.0609267 -0.0242753 -0.994838 0.100401 -0.0147049 -0.985681 0.166851 -0.0243583 -0.971994 0.232524 -0.0340557 -0.950977 0.306072 -0.0442975 -0.925701 0.374102 -0.0559146 -0.845455 0.527796 -0.0814713 -0.879263 0.470686 -0.0731515 -0.901145 0.428698 -0.0644659 -0.873318 0.481664 -0.072909 -0.898178 0.432563 -0.0785185 -0.873325 0.479198 -0.0875987 -0.873145 0.479526 -0.0875951 -0.87315 0.47743 -0.0983345 -0.900398 0.425848 -0.0890925 -0.807826 0.582617 -0.0893052 -0.766615 0.634797 -0.0966175 -0.623201 0.773561 -0.114993 -0.988528 0.148761 -0.0261278 -0.977567 0.207429 -0.0365375 -0.960714 0.273407 -0.0477234 -0.94039 0.334755 -0.0600507 -0.920654 0.384175 -0.0693175 -0.842309 0.530314 -0.0963459 -0.808242 0.579486 -0.104604 -0.771138 0.626675 -0.112363 -0.731328 0.671461 -0.119579 -0.748159 0.652137 -0.122375 -0.693544 0.709108 -0.127135 -0.622237 0.769741 -0.142547 -0.688781 0.713899 -0.126211 -0.996774 0.0786816 -0.0158485 -0.991042 0.13092 -0.0263593 -0.982459 0.182782 -0.0369365 -0.993222 0.113434 -0.0253579 -0.969273 0.241183 -0.0483888 -0.986723 0.158466 -0.0355729 -0.953353 0.295672 -0.0607935 -0.976712 0.209397 -0.0467554 -0.937821 0.339929 -0.0702846 -0.964623 0.25701 -0.0587219 -0.920216 0.38321 -0.0797016 -0.952847 0.295741 -0.0679662 -0.939387 0.334048 -0.0772298 -0.845507 0.523116 -0.10709 -0.878663 0.467202 -0.0983567 -0.907686 0.408606 -0.0956377 -0.878666 0.465777 -0.104872 -0.924328 0.371679 -0.0864403 -0.815433 0.567234 -0.115395 -0.782954 0.609749 -0.123245 -0.748164 0.650534 -0.130601 -0.757815 0.636156 -0.144989 -0.746819 0.646538 -0.155724 -0.723751 0.675911 -0.139025 -0.829083 0.545797 -0.121351 -0.69356 0.705235 -0.147031 -0.757804 0.63897 -0.1321 -0.801397 0.584058 -0.128995 -0.997559 0.0681544 -0.0152304 -0.998229 0.0578153 -0.0140186 -0.998789 0.0476372 -0.0123089 -0.998229 0.057391 -0.0156725 -0.996625 0.0794653 -0.0205839 -0.995074 0.0957072 -0.0258377 -0.995074 0.0963365 -0.023385 -0.997558 0.0676224 -0.0174527 -0.99037 0.134505 -0.0328036 -0.983073 0.178038 -0.0432448 -0.974277 0.218706 -0.0543314 -0.965689 0.25195 -0.0629802 -0.955911 0.284784 -0.0716464 -0.944902 0.317337 -0.0803605 -0.932678 0.349536 -0.0890829 -0.919384 0.381016 -0.0977765 -0.848404 0.515057 -0.122178 -0.814804 0.563265 -0.137211 -0.9996 0.0276148 -0.00603081 -0.99972 0.0229536 -0.00580092 -0.999713 0.023165 -0.00613946 -0.997873 0.0628928 -0.0171506 -0.998835 0.0464081 -0.0131988 -0.997873 0.0626143 -0.0181372 -0.99771 0.0650061 -0.018676 -0.99771 0.0649986 -0.0187016 -0.999143 0.0396193 -0.0119768 -0.993396 0.111027 -0.0289294 -0.995853 0.0877257 -0.024108 -0.995853 0.0874603 -0.0250487 -0.996 0.0858631 -0.0247213 -0.996 0.0859197 -0.0245265 -0.998519 0.0520214 -0.0159372 -0.998519 0.0523475 -0.0148412 -0.999419 0.0322763 -0.0109749 -0.988405 0.146963 -0.0381871 -0.982358 0.180732 -0.0480588 -0.988916 0.14292 -0.0402303 -0.976475 0.208289 -0.0557874 -0.985151 0.165174 -0.0468612 -0.969709 0.235836 -0.0636066 -0.980925 0.186888 -0.0534777 -0.962139 0.263028 -0.0714437 -0.976141 0.208622 -0.0602115 -0.953745 0.289961 -0.0793311 -0.970799 0.230341 -0.0670269 -0.944522 0.316641 -0.0872742 -0.965002 0.251621 -0.0738741 -0.951741 0.294074 -0.0878042 -0.973441 0.218103 -0.0695941 -0.990056 0.132311 -0.0477726 -0.997482 0.0656574 -0.0268041 -0.99772 0.0643022 -0.0204713 -0.991847 0.122088 -0.0365136 -0.989518 0.138226 -0.0418054 -0.996083 0.0839625 -0.0277338 -0.98687 0.154462 -0.047203 -0.995083 0.0938874 -0.0315484 -0.983958 0.170452 -0.0526622 -0.994003 0.103457 -0.0354285 -0.980727 0.186502 -0.0582357 -0.992795 0.11315 -0.0394394 -0.951734 0.29695 -0.0776071 -0.967354 0.241856 -0.0757071 -0.987742 0.147051 -0.052366 -0.973425 0.222051 -0.0560075 -0.997473 0.0699127 -0.0126361 -0.990063 0.136658 -0.0331717 -0.996956 0.0721654 -0.0295047 -0.998211 0.0556833 -0.0217833 -0.998496 0.0512475 -0.0194684 -0.998778 0.0463502 -0.0171808 -0.99925 0.0377135 -0.00884276 -0.999251 0.0365063 -0.0128615 -0.999641 0.0254607 -0.00829857 -0.999889 0.0143541 -0.00396386 -0.999889 0.0142306 -0.00437461 -0.994209 0.101518 -0.035238 -0.927671 0.372797 -0.0211957 0.00101737 0.0438676 0.999037 0.00438038 0.0415158 0.999128 0.00581711 0.0379893 0.999261 0.00622586 0.0357089 0.999343 0.00456248 0.0341182 0.999407 0.0109681 0.233428 0.972312 0.0142979 0.2232 0.974668 0.0065345 0.310665 0.950497 0.0182081 0.382615 0.923728 0.0113344 0.406398 0.913626 0.0392854 0.36727 0.929284 0.0505355 0.317328 0.946968 0.0553437 0.268324 0.961738 0.0540204 0.220077 0.973985 0.0481567 0.173028 0.983739 0.0425194 0.145609 0.988428 0.0362239 0.118427 0.992302 0.032309 0.109763 0.993433 0.0264589 0.0839368 0.99612 0.0159599 0.0408677 0.999037 0.0165217 0.522437 0.852518 0.0296796 0.492845 0.869611 0.0458123 0.447713 0.893003 0.0602129 0.388133 0.919634 0.0665914 0.329092 0.941947 0.0654179 0.270551 0.96048 0.0585957 0.213072 0.975278 0.0517143 0.179502 0.982397 0.0441497 0.14604 0.988293 0.0376728 0.126682 0.991228 0.0304154 0.0950683 0.995006 0.0185642 0.0466001 0.998741 0.015837 0.562002 0.826984 0.0530601 0.511005 0.857939 0.0697847 0.444341 0.893135 0.0771738 0.377706 0.922704 0.0759211 0.311129 0.94733 0.0681321 0.245412 0.967022 0.0599876 0.207002 0.9765 0.0513002 0.168556 0.984356 0.0431318 0.143548 0.988703 0.0344721 0.106174 0.99375 0.0211934 0.0517776 0.998434 0.0251765 0.760167 0.649239 0.0352787 0.742689 0.668706 0.0221366 0.79468 0.606624 0.028691 0.852288 0.522285 0.0372222 0.840283 0.540869 0.08829 0.780431 0.618977 0.116873 0.694294 0.710138 0.129648 0.601252 0.788472 0.128807 0.502977 0.854648 0.11702 0.40133 0.908427 0.101945 0.340781 0.934599 0.0879053 0.278629 0.956367 0.0716851 0.226698 0.971323 0.0564491 0.166532 0.984419 0.0388035 0.0802058 0.996023 0.00874194 0.923845 0.382666 0.0694625 0.879151 0.471454 0.0949561 0.822871 0.560238 0.125943 0.73665 0.664444 0.139899 0.641147 0.754559 0.139346 0.538539 0.830999 0.126968 0.431094 0.893329 0.110412 0.366704 0.923763 0.0953906 0.300166 0.949105 0.0779232 0.251226 0.964787 0.0664971 0.289039 0.955005 0.0652239 0.304206 0.950371 0.038737 0.0776768 0.996226 0.0215192 0.334997 0.941973 0.0242024 0.930736 0.364889 0.117437 0.875209 0.469273 0.149851 0.791104 0.593042 0.16352 0.694013 0.701147 0.161797 0.586666 0.793501 0.147222 0.471826 0.869313 0.127171 0.402567 0.906514 0.110016 0.330193 0.93748 0.0892703 0.282274 0.955171 0.0891289 0.33611 0.937596 0.0325279 0.971854 0.233326 -0.0306197 0.996451 0.0784138 0.0873577 0.973017 0.213555 0.0520279 0.959358 0.277354 0.119549 0.934542 0.335171 0.107266 0.886246 0.450625 0.147751 0.863131 0.482881 0.157727 0.772988 0.614502 0.151899 0.668017 0.728478 0.132695 0.552549 0.82285 0.108306 0.410836 0.905253 0.125091 0.450893 0.883769 0.13845 0.787624 0.6004 0.148588 0.671211 0.726222 0.141546 0.541551 0.828666 0.102948 0.428816 0.897507 0.00371433 0.0134358 0.999903 0.00498604 0.0171881 0.99984 0.00485491 0.0169481 0.999845 0.00496863 0.0197154 0.999793 0.00599393 0.022693 0.999725 0.0169597 0.0718757 0.997269 0.0289736 0.146176 0.988834 0.03689 0.241522 0.969694 0.041882 0.366995 0.92928 0.00540156 0.0247334 0.99968 0.00679816 0.0291567 0.999552 0.0173784 0.087911 0.995977 0.0262561 0.173428 0.984497 0.0282847 0.280182 0.95953 0.0263972 0.405561 0.913687 0.00596166 0.0293203 0.999552 0.01577 0.104395 0.994411 0.0201694 0.201477 0.979286 0.021453 0.309826 0.950551 0.0041894 0.041182 0.999143 0.00297645 0.0456305 0.998954 0.0147531 0.0874303 0.996061 0.0142239 0.0785738 0.996807 0.0360316 0.154745 0.987297 0.0556927 0.213178 0.975425 0.0692934 0.260154 0.963078 0.0792647 0.318464 0.944615 0.0900956 0.375473 0.922444 0.101055 0.470306 0.876698 0.105367 0.562358 0.820153 0.0983164 0.65062 0.753013 0.0769122 0.734292 0.674462 0.0458336 0.793539 0.606791 0.081467 0.733807 0.674456 0.0467381 0.839787 0.540901 0.0132206 0.0666641 0.997688 0.0318226 0.13152 0.990803 0.0478866 0.178976 0.982687 0.0583398 0.21511 0.974846 0.066693 0.264012 0.962211 0.0759097 0.312092 0.947014 0.0862191 0.392894 0.915533 0.0906234 0.472945 0.876419 0.0856547 0.551721 0.829619 0.0685878 0.629019 0.774358 0.042211 0.686044 0.726334 0.0327196 0.625902 0.779215 0.0210998 0.649292 0.760246 0.0118033 0.054602 0.998438 0.0109847 0.0488736 0.998745 0.00999092 0.0426619 0.99904 0.00899389 0.037036 0.999273 0.0194863 0.073505 0.997104 0.0256562 0.0891873 0.995684 0.025705 0.0899429 0.995615 0.029087 0.110886 0.993407 0.0331414 0.131722 0.990732 0.0386324 0.16723 0.985161 0.0411277 0.203768 0.978155 0.0397189 0.241079 0.969692 0.040703 0.203852 0.978155 0.0509149 0.317268 0.946968 0.00597958 0.022508 0.999729 0.0124044 0.0448639 0.998916 0.014083 0.0479871 0.998749 0.011285 0.038549 0.999193 0.0127432 0.0475514 0.998788 0.0145126 0.0565262 0.998296 0.0170488 0.071855 0.997269 0.015065 0.0563836 0.998296 0.0279751 0.119655 0.992421 0.0283142 0.119577 0.992421 0.029833 0.146008 0.988833 0.00274814 0.00957195 0.99995 0.00552441 0.0191616 0.999801 0.00331746 0.0109475 0.999935 0.00345847 0.0112908 0.99993 0.00813534 0.0289272 0.999548 0.00858426 0.0287929 0.999549 0.00555616 0.0191522 0.999801 0.01821 0.0877482 0.995976 0.0367272 0.930285 0.364994 0.0470152 0.880593 0.471536 0.0192249 0.687226 0.726189 0.0443088 0.74219 0.668723 0.0674228 0.629143 0.77436 0.0395384 0.62551 0.779214 0.0602795 0.571587 0.818324 0.0634281 0.571253 0.81832 0.0793401 0.498784 0.863087 0.0784306 0.498927 0.863088 0.0877626 0.42516 0.900853 0.0864688 0.351124 0.932328 0.0777652 0.277453 0.957587 0.068305 0.234179 0.969791 0.0584966 0.190837 0.979877 0.0486729 0.160336 0.985862 0.0386197 0.1172 0.992357 0.0239474 0.0571389 0.998079 0.0363562 0.560895 0.827088 0.0326947 0.492668 0.869603 0.0100336 0.078453 0.996867 0.00198851 0.13502 0.990841 0.0121656 0.121424 0.992526 0.0149728 0.121129 0.992524 0.0158734 0.223108 0.974664 0.0099287 0.134566 0.990855 0.0766115 0.879674 0.469366 0.0823107 0.824222 0.560253 0.107646 0.649151 0.753005 0.0799899 0.781317 0.618988 0.0745038 0.683281 0.726345 0.0730996 0.68343 0.726347 0.0888538 0.551218 0.829617 0.0983001 0.601384 0.79289 0.108845 0.516257 0.849489 0.107638 0.428799 0.896965 0.0972654 0.340331 0.935262 0.0850714 0.28801 0.953841 0.073092 0.235089 0.969222 0.0600129 0.193695 0.979225 0.0472162 0.139226 0.989134 0.0297398 0.0677436 0.997259 0.0576515 0.510518 0.857932 0.0513069 0.447132 0.892995 0.0331209 0.279672 0.959524 0.0242618 0.201046 0.979281 0.107775 0.797846 0.59315 0.108356 0.739426 0.664461 0.119286 0.559617 0.820122 0.103678 0.696374 0.71015 0.098326 0.471432 0.876404 0.0922962 0.602328 0.792895 0.07067 0.444202 0.893135 0.0624045 0.387789 0.919633 0.0289015 0.173013 0.984495 0.017699 0.104093 0.99441 0.121038 0.702482 0.701334 0.118039 0.645461 0.754617 0.118238 0.466312 0.876683 0.111983 0.604734 0.788516 0.097047 0.390379 0.915525 0.0982428 0.518342 0.849512 0.0825387 0.426186 0.900862 0.0740144 0.378326 0.922709 0.0650853 0.329387 0.941949 0.039823 0.166953 0.98516 0.0530721 0.268769 0.961741 0.119317 0.596608 0.793613 0.114341 0.544356 0.831025 0.107113 0.371061 0.92241 0.107898 0.50784 0.854668 0.0874796 0.309098 0.946998 0.0938234 0.432009 0.896976 0.0782638 0.353029 0.932333 0.0699764 0.312509 0.947334 0.0613834 0.271487 0.960483 0.0354968 0.131114 0.990732 0.0500431 0.221001 0.973989 0.0251939 0.0938295 0.995269 0.0224216 0.0788295 0.996636 0.0190485 0.0640601 0.997764 0.0192585 0.0673522 0.997543 0.0177537 0.0591554 0.998091 0.0109287 0.0289216 0.999522 0.109519 0.481664 0.869486 0.103417 0.437211 0.893393 0.0934974 0.314607 0.944606 0.0968665 0.406558 0.908475 0.0766737 0.261307 0.962206 0.0831008 0.343999 0.935286 0.0685166 0.279844 0.957597 0.0609492 0.24727 0.967028 0.053209 0.214464 0.975281 0.0315014 0.110232 0.993407 0.0431585 0.17432 0.983743 0.0134466 0.0473609 0.998787 0.0238998 0.0941638 0.99527 0.0969724 0.410771 0.906567 0.0911699 0.37192 0.923777 0.0853003 0.345295 0.93461 0.0804747 0.257003 0.963054 0.0796704 0.304601 0.949142 0.0658029 0.21016 0.97545 0.074553 0.282392 0.956398 0.0730568 0.291266 0.953848 0.0657679 0.213018 0.974832 0.0543011 0.177061 0.982701 0.0638847 0.237682 0.96924 0.0601661 0.236385 0.969795 0.0535092 0.208757 0.976503 0.0526652 0.192478 0.979887 0.0466933 0.180862 0.982399 0.0468657 0.169805 0.984363 0.0378992 0.14686 0.988431 0.0268047 0.0896336 0.995614 0.0409196 0.146948 0.988297 0.0255569 0.089217 0.995684 0.0339666 0.119063 0.992306 0.0209768 0.0792207 0.996636 0.0114149 0.0385124 0.999193 0.0136275 0.0481233 0.998748 0.0185514 0.0641998 0.997765 0.0594289 0.230357 0.97129 0.0516481 0.150121 0.987318 0.0327801 0.0729961 0.996793 0.0382837 0.171908 0.984369 0.0518529 0.196132 0.979206 0.0428726 0.128249 0.990815 0.0268062 0.0625883 0.997679 0.033979 0.143151 0.989117 0.0438029 0.161792 0.985852 0.0396091 0.1446 0.988697 0.029559 0.119884 0.992348 0.0353031 0.12739 0.991224 0.0272015 0.108331 0.993743 0.03089 0.110187 0.993431 0.0225611 0.0725928 0.997106 0.0247394 0.0967501 0.995001 0.0135336 0.0356739 0.999272 0.0221749 0.0852058 0.996117 0.0194774 0.0672868 0.997544 0.0132966 0.0445992 0.998917 0.0078747 0.0219385 0.999728 0.0160681 0.0596547 0.99809 0.00938072 0.0319486 0.999445 0.0090386 0.03205 0.999445 0.00321465 0.00943175 0.99995 0.00547287 0.015647 0.999863 0.00272717 0.00909676 0.999955 0.00119292 0.00402624 0.999991 0.0661425 0.254739 0.964745 0.0137648 0.0725417 0.99727 0.012554 0.06056 0.998086 0.00760861 0.0299182 0.999523 0.00441062 0.0159658 0.999863 -0.0784127 0.996431 0.0312383 -0.0801999 0.99656 0.0208787 -0.0755132 0.99656 0.0341432 -0.0686615 0.99656 0.0464084 -0.0598306 0.99656 0.0573451 -0.0492861 0.99656 0.0666253 -0.0373074 0.99656 0.0740011 -0.0242667 0.99656 0.0792413 -0.0105255 0.99656 0.0822026 -0.0990696 0.992014 0.0780646 -0.382654 0.923821 0.0114392 -0.397825 0.915716 0.0565715 -0.522357 0.852387 0.0240977 -0.543075 0.836908 0.0682261 -0.530649 0.836255 0.13816 -0.499656 0.836259 0.225863 -0.4543 0.836257 0.30706 -0.395882 0.836259 0.379406 -0.326055 0.836258 0.440864 -0.246863 0.836256 0.489627 -0.160567 0.836256 0.524303 -0.0696485 0.836259 0.543894 0.0646005 0.75882 0.648089 -0.09892 0.834753 0.541665 0.0698215 0.850558 0.521225 -0.0985741 0.912717 0.396522 -0.0511747 0.915242 0.399641 -0.117984 0.915243 0.38524 -0.181388 0.915242 0.359765 -0.239584 0.915243 0.323928 -0.290874 0.915241 0.278794 -0.333815 0.915242 0.225609 -0.367133 0.915243 0.165961 -0.389904 0.915241 0.101533 -0.389902 0.915243 0.101517 -0.53065 0.836254 0.138163 -0.649029 0.759933 0.0355281 -0.673218 0.735239 0.078748 -0.656711 0.734506 0.170972 -0.61836 0.7345 0.279537 -0.562232 0.734504 0.379998 -0.489926 0.734497 0.469561 -0.403529 0.734505 0.545588 -0.305508 0.734506 0.605942 -0.198706 0.734503 0.648861 -0.0861937 0.734502 0.67311 0.0602526 0.648257 0.759034 -0.0994875 0.733871 0.671964 -0.759626 0.648769 0.0454724 -0.784756 0.613559 0.0877688 -0.764726 0.612821 0.19911 -0.720063 0.612836 0.325487 -0.654698 0.612834 0.442499 -0.570497 0.612837 0.546775 -0.469892 0.612834 0.635324 -0.355749 0.612828 0.70561 -0.231396 0.612833 0.755574 -0.100373 0.612837 0.783809 0.0567926 0.521665 0.851258 -0.0999945 0.612853 0.783845 -0.85141 0.521746 0.0536814 -0.874696 0.475257 0.0950692 -0.851789 0.474623 0.221783 -0.802054 0.474634 0.362535 -0.72924 0.474628 0.492887 -0.635459 0.474632 0.609029 -0.523391 0.474626 0.707667 -0.396259 0.474631 0.785942 -0.257741 0.474629 0.841604 -0.111799 0.474623 0.87306 0.0541224 0.382118 0.922527 -0.100595 0.474994 0.874221 -0.922213 0.381999 0.0599978 -0.940669 0.324122 0.100436 -0.915645 0.323653 0.238417 -0.862186 0.323638 0.389735 -0.783918 0.323645 0.529836 -0.683104 0.323645 0.654693 -0.562626 0.32364 0.760729 -0.425975 0.32365 0.844865 -0.277063 0.323649 0.904703 -0.120179 0.32365 0.938514 0.0523479 0.233122 0.971037 -0.101041 0.324092 0.940614 -0.970358 0.232965 0.0642907 -0.980947 0.164277 0.103712 -0.954624 0.16402 0.248579 -0.898892 0.164037 0.406307 -0.817285 0.164032 0.552393 -0.712171 0.164011 0.682578 -0.58659 0.164029 0.793099 -0.444103 0.164019 0.880835 -0.288862 0.164022 0.943216 -0.125295 0.16402 0.978467 0.0514565 0.078353 0.995597 -0.101342 0.164304 0.98119 -0.994715 0.0782776 0.0664466 -0.995453 0.0541892 0.0783353 -0.971927 0.0301342 0.233343 -0.965786 0.0633415 0.251486 -0.923672 0.0210316 0.382606 -0.908907 0.0714296 0.410835 -0.852563 0.0138208 0.522442 -0.760374 0.00862257 0.649428 -0.649433 0.00537446 0.760399 -0.522504 0.00404781 0.852627 -0.382674 0.00471523 0.923871 -0.233436 0.00730793 0.972345 -0.0784514 0.0118806 0.996847 -0.126616 0.0792305 0.988783 -0.826008 0.0776128 0.558289 -0.719526 0.0818394 0.689627 -0.592538 0.0840888 0.801141 -0.448592 0.0844303 0.88974 -0.291822 0.0828007 0.952882 0.0757513 0.921226 0.381581 -0.0985992 0.965611 0.240568 -0.0312856 0.969186 0.244335 -0.0721293 0.969186 0.235534 -0.110892 0.969186 0.21996 -0.146494 0.969187 0.198032 -0.177835 0.969186 0.170448 -0.204085 0.969186 0.137941 -0.224457 0.969185 0.101488 -0.238382 0.969187 0.0620588 -0.233447 0.972367 -0.00220691 -0.241512 0.969394 0.0441207 0.0826188 0.969045 0.232651 -0.954625 0.164032 0.248567 -0.915645 0.323661 0.238409 -0.851789 0.474627 0.221779 -0.764722 0.612832 0.199092 -0.656713 0.734502 0.170982 -0.238382 0.969185 0.0620762 -0.898887 0.164019 0.406325 -0.862188 0.323654 0.389717 -0.802054 0.474623 0.362549 -0.720064 0.612821 0.325513 -0.618359 0.734507 0.27952 -0.367133 0.915242 0.165968 -0.499657 0.836255 0.225878 -0.224461 0.969186 0.101466 -0.817287 0.164036 0.55239 -0.783916 0.323639 0.529842 -0.729241 0.474633 0.49288 -0.654699 0.612836 0.442493 -0.562231 0.734499 0.38001 -0.333813 0.915241 0.225616 -0.454302 0.836259 0.307051 -0.204088 0.969187 0.137933 -0.712182 0.164029 0.682562 -0.683104 0.323646 0.654692 -0.635457 0.474628 0.609034 -0.570495 0.612834 0.546782 -0.489932 0.734505 0.469542 -0.29088 0.915243 0.278781 -0.39588 0.836257 0.379413 -0.177834 0.969186 0.170449 -0.586577 0.16401 0.793113 -0.562631 0.323647 0.760723 -0.523394 0.47463 0.707661 -0.469895 0.612838 0.635318 -0.40352 0.734497 0.545606 -0.239579 0.915242 0.323935 -0.326056 0.836259 0.440861 -0.14648 0.969185 0.198048 -0.444109 0.164026 0.880831 -0.425969 0.323643 0.84487 -0.396253 0.474625 0.785948 -0.355755 0.612834 0.705601 -0.305507 0.734506 0.605943 -0.181391 0.915243 0.359761 -0.246867 0.836258 0.489622 -0.110898 0.969186 0.219955 -0.288858 0.164018 0.943218 -0.277065 0.323651 0.904702 -0.257742 0.47463 0.841603 -0.231388 0.612827 0.755581 -0.198711 0.734506 0.648857 -0.117979 0.915242 0.385244 -0.160566 0.836256 0.524304 -0.0721339 0.969186 0.235531 -0.125295 0.16402 0.978467 -0.12018 0.323652 0.938513 -0.111802 0.474626 0.873058 -0.100369 0.612834 0.783811 -0.0861952 0.734503 0.673109 -0.0511709 0.915241 0.399643 -0.0696415 0.836256 0.543898 -0.0312874 0.969186 0.244334 0 0.0105742 0.999944 0.000680308 0.105935 0.994373 0.000848014 0.272466 0.962165 0.000968123 0.431152 0.902279 0.0010393 0.577428 0.816441 0.00106388 0.707109 0.707104 0.00104003 0.816442 0.577427 0.000968414 0.902277 0.431157 0.000848226 0.962164 0.272469 0.000680688 0.994373 0.10593 0 0.999944 0.0106181 -0.000425215 0.996918 0.0784506 0.000742057 0.233442 0.97237 -0.00101987 0.272466 0.962165 0.000914343 0.382678 0.923881 -0.00116432 0.431152 0.902278 0.00102926 0.522508 0.852634 0.00108735 0.649437 0.760415 0.00108673 0.760408 0.649445 0.00102867 0.852639 0.5225 0.000914075 0.92388 0.38268 0.000741667 0.972369 0.233449 0.000511389 0.996918 0.0784506 -0.000818638 0.994373 0.10593 -0.00124993 0.577428 0.816441 -0.00127948 0.707109 0.707103 -0.00125081 0.816442 0.577427 -0.00116467 0.902276 0.431157 -0.00102013 0.962164 0.272469 -0.000616688 0.972369 0.233449 -0.000760043 0.92388 0.382681 -0.000855331 0.852639 0.5225 -0.000903604 0.760408 0.649445 -0.000904117 0.649437 0.760415 -0.00085582 0.522508 0.852634 -0.000760266 0.382678 0.923881 -0.000617012 0.233442 0.97237 -0.000425529 0.0784569 0.996917 0.000511767 0.0784569 0.996917 -0.00081818 0.105935 0.994373 -0.0105742 0.999944 3.00208e-07 -0.10593 0.994373 -0.000742663 -0.272471 0.962164 -0.000925669 -0.431157 0.902276 -0.0010571 -0.577424 0.816444 -0.00113479 -0.707106 0.707106 -0.00116123 -0.816444 0.577424 -0.00113493 -0.902281 0.431148 -0.00105703 -0.962161 0.272481 -0.000926503 -0.994373 0.10593 -0.000743196 -0.999944 0.0106176 3.18768e-09 -0.996918 0.078451 0.000464388 -0.233448 0.972369 -0.000673554 -0.272471 0.962164 0.000926247 -0.382678 0.923881 -0.000829939 -0.431157 0.902276 0.00105765 -0.522508 0.852634 -0.00093405 -0.649439 0.760413 -0.000987149 -0.760412 0.64944 -0.00098718 -0.852639 0.5225 -0.000934368 -0.923877 0.382689 -0.000829549 -0.972369 0.233448 -0.000673553 -0.996918 0.078451 -0.000464341 -0.994373 0.10593 0.00074326 -0.577424 0.816444 0.00113528 -0.707106 0.707106 0.00116166 -0.816444 0.577424 0.00113528 -0.902281 0.431148 0.00105729 -0.962161 0.272481 0.000926666 -0.972369 0.233448 0.000673694 -0.923877 0.382689 0.000829778 -0.852639 0.5225 0.000934682 -0.760412 0.64944 0.00098757 -0.649439 0.760413 0.000987606 -0.522508 0.852634 0.000934562 -0.382678 0.923881 0.000830494 -0.233448 0.972369 0.000674138 -0.078451 0.996918 0.000464939 -0.10593 0.994373 0.00074326 -0.078451 0.996918 -0.000464341 -0.999037 0.0438575 0.00101713 -0.999129 0.0415044 0.00438006 -0.999261 0.0379855 0.00581096 -0.999342 0.0357376 0.00623079 -0.999406 0.034146 0.0045666 -0.972311 0.233434 0.0109662 -0.974666 0.22321 0.0142946 -0.950497 0.310665 0.00653213 -0.923724 0.382625 0.0182073 -0.91362 0.406412 0.0113326 -0.929292 0.36725 0.0392954 -0.946968 0.317331 0.0505317 -0.961732 0.268346 0.0553445 -0.973989 0.220062 0.0540219 -0.983732 0.173065 0.0481577 -0.988429 0.145602 0.0425298 -0.992298 0.118458 0.0362255 -0.993442 0.109685 0.0322935 -0.996148 0.0836266 0.0263912 -0.999038 0.0408432 0.0159537 -0.852523 0.522428 0.016519 -0.869611 0.492845 0.0296729 -0.892987 0.447744 0.0458117 -0.919653 0.388088 0.0602209 -0.941955 0.329071 0.0665867 -0.960481 0.270548 0.0654189 -0.975284 0.213047 0.0585909 -0.982385 0.179567 0.0517253 -0.988282 0.146114 0.0441578 -0.991225 0.126701 0.0376833 -0.994991 0.0952205 0.0304356 -0.998755 0.0463135 0.0185318 -0.827 0.561979 0.0158355 -0.857938 0.511006 0.053055 -0.89313 0.44435 0.0697906 -0.922716 0.377676 0.0771736 -0.94733 0.31113 0.0759189 -0.96702 0.245419 0.0681305 -0.97649 0.207049 0.0599913 -0.984357 0.168549 0.0513015 -0.988676 0.143723 0.0431624 -0.993777 0.105934 0.0344406 -0.998418 0.0520659 0.0212239 -0.649234 0.760172 0.0251802 -0.668706 0.742689 0.0352856 -0.606602 0.794697 0.0221384 -0.522293 0.852283 0.0286904 -0.54089 0.840269 0.0372274 -0.618984 0.780425 0.0882916 -0.710146 0.694286 0.116876 -0.788465 0.601262 0.129646 -0.854646 0.502981 0.128802 -0.908418 0.401353 0.117018 -0.934608 0.340755 0.101955 -0.956356 0.278668 0.0879011 -0.971312 0.226746 0.0716885 -0.984412 0.16657 0.0564491 -0.996023 0.0801995 0.038804 -0.382664 0.923846 0.00874257 -0.471444 0.879156 0.069458 -0.560231 0.822876 0.0949545 -0.66442 0.736672 0.125936 -0.75455 0.641157 0.139899 -0.83099 0.538553 0.139341 -0.893328 0.431097 0.126967 -0.923786 0.366639 0.110429 -0.949089 0.300221 0.0953767 -0.964789 0.251217 0.077925 -0.955004 0.289044 0.066492 -0.950379 0.30418 0.0652215 -0.996224 0.0777053 0.0387384 -0.941971 0.335004 0.0215224 -0.364893 0.930735 0.024204 -0.46927 0.875212 0.117433 -0.593049 0.791098 0.149855 -0.701143 0.694018 0.163516 -0.793522 0.586636 0.161804 -0.869317 0.471822 0.147216 -0.906492 0.402616 0.127176 -0.937487 0.330175 0.110012 -0.95517 0.282279 0.0892715 -0.937603 0.336091 0.0891303 -0.233324 0.971855 0.0325291 -0.0784142 0.996451 -0.0306187 -0.213554 0.973018 0.0873569 -0.277353 0.959358 0.0520258 -0.335174 0.934541 0.11955 -0.450622 0.886247 0.107267 -0.482878 0.863133 0.14775 -0.614502 0.772987 0.157729 -0.728474 0.668022 0.151899 -0.822868 0.552522 0.132695 -0.905242 0.410863 0.1083 -0.883747 0.450935 0.125096 -0.600395 0.787628 0.138447 -0.726218 0.671214 0.14859 -0.828682 0.541527 0.141543 -0.897505 0.428819 0.10295 -0.999902 0.0135071 0.0037341 -0.99984 0.0171646 0.00497369 -0.999844 0.0169774 0.00487141 -0.999792 0.019788 0.0049869 -0.999725 0.0226898 0.00598607 -0.997268 0.071894 0.0169658 -0.98884 0.146139 0.0289744 -0.969697 0.24151 0.036891 -0.929287 0.366976 0.0418827 -0.99968 0.0247163 0.00539779 -0.999552 0.0291564 0.00679965 -0.995974 0.0879419 0.0173812 -0.984494 0.17344 0.0262567 -0.959531 0.280181 0.0282825 -0.913681 0.405574 0.0263963 -0.999552 0.0293196 0.00596533 -0.994412 0.104383 0.0157688 -0.979283 0.201488 0.0201671 -0.950551 0.309827 0.0214505 -0.999143 0.0411687 0.00418801 -0.998954 0.0456201 0.00297576 -0.996062 0.0874271 0.014756 -0.996819 0.0784203 0.0142178 -0.987298 0.154741 0.0360272 -0.97546 0.213021 0.0556782 -0.963098 0.260083 0.0692789 -0.944639 0.318394 0.0792597 -0.922433 0.3755 0.0900932 -0.876661 0.470376 0.101051 -0.820175 0.562325 0.105375 -0.753014 0.650618 0.0983144 -0.674455 0.734299 0.0769116 -0.606769 0.793556 0.0458281 -0.674449 0.733813 0.0814694 -0.540922 0.839773 0.0467469 -0.997688 0.0666575 0.0132229 -0.990793 0.131593 0.0318284 -0.982699 0.178918 0.0478743 -0.974871 0.215001 0.0583279 -0.962223 0.263969 0.0666825 -0.947003 0.312126 0.0759149 -0.915546 0.392863 0.0862162 -0.876405 0.472973 0.0906204 -0.829626 0.551709 0.0856603 -0.774377 0.628996 0.0685895 -0.726334 0.686044 0.0422069 -0.779221 0.625894 0.0327251 -0.760243 0.649296 0.0211 -0.998423 0.0548843 0.0118413 -0.998759 0.0485914 0.010942 -0.999041 0.0426347 0.00998904 -0.999274 0.0370341 0.00899645 -0.997088 0.073718 0.0195275 -0.995685 0.0891866 0.0256523 -0.995621 0.0898873 0.0256887 -0.993415 0.110821 0.0290762 -0.990732 0.131723 0.0331392 -0.985156 0.167254 0.0386363 -0.978155 0.203769 0.0411276 -0.969696 0.241066 0.0397224 -0.978155 0.203852 0.0407065 -0.946967 0.31727 0.0509174 -0.999741 0.0219801 0.00585823 -0.998919 0.0447988 0.0123841 -0.998743 0.0480897 0.0141096 -0.999199 0.0384104 0.0112487 -0.998786 0.0475862 0.0127454 -0.9983 0.0564539 0.0145046 -0.997268 0.0718753 0.0170465 -0.9983 0.0563127 0.015051 -0.992424 0.119636 0.027972 -0.992424 0.119557 0.028311 -0.988839 0.145971 0.0298288 -0.999952 0.00943683 0.00271308 -0.999797 0.0193645 0.00557522 -0.999932 0.011148 0.00338317 -0.999929 0.0113863 0.0034811 -0.999554 0.0287403 0.00808311 -0.999554 0.0286071 0.00852876 -0.999797 0.0193508 0.00562138 -0.995973 0.0877784 0.0182169 -0.364998 0.930284 0.0367289 -0.471526 0.880598 0.0470123 -0.726189 0.687226 0.0192287 -0.668723 0.74219 0.04431 -0.774379 0.629119 0.0674284 -0.77922 0.625503 0.039535 -0.818331 0.571578 0.0602761 -0.818326 0.571243 0.0634344 -0.863067 0.498819 0.0793371 -0.863068 0.498961 0.078432 -0.900855 0.425156 0.0877684 -0.932325 0.351133 0.0864652 -0.957589 0.277447 0.077763 -0.969802 0.234133 0.0683025 -0.97987 0.190874 0.058495 -0.985887 0.160185 0.0486538 -0.992341 0.117332 0.0386362 -0.998077 0.0571805 0.0239497 -0.827104 0.560871 0.03636 -0.869603 0.492667 0.0327031 -0.996868 0.0784471 0.0100344 -0.990839 0.13503 0.00198704 -0.992523 0.121452 0.0121642 -0.99252 0.121157 0.0149696 -0.974662 0.223118 0.0158713 -0.990853 0.134576 0.00993049 -0.469363 0.879676 0.0766101 -0.560246 0.824226 0.0823107 -0.753006 0.649148 0.107649 -0.618995 0.78131 0.0799933 -0.726339 0.683287 0.0745016 -0.726341 0.683437 0.0730954 -0.829624 0.551207 0.0888512 -0.792904 0.601365 0.0983033 -0.849489 0.516258 0.108842 -0.896982 0.428762 0.107644 -0.935289 0.340259 0.0972583 -0.953825 0.288065 0.0850708 -0.969213 0.235123 0.0730946 -0.979184 0.193894 0.0600358 -0.989144 0.139158 0.0472121 -0.997248 0.0679059 0.0297474 -0.857931 0.510519 0.0576594 -0.89298 0.447163 0.0513033 -0.959525 0.279671 0.0331206 -0.979279 0.201057 0.0242626 -0.593157 0.79784 0.107777 -0.664437 0.739448 0.108351 -0.820143 0.559586 0.11929 -0.710158 0.696366 0.103682 -0.87639 0.471459 0.0983259 -0.792909 0.60231 0.0922981 -0.89313 0.444213 0.0706666 -0.919652 0.387746 0.0624013 -0.984493 0.173025 0.0289024 -0.994411 0.10408 0.0177012 -0.70133 0.702486 0.121034 -0.754608 0.645473 0.118034 -0.876646 0.466382 0.118232 -0.788508 0.604744 0.111978 -0.915538 0.390348 0.0970484 -0.849511 0.518342 0.0982457 -0.900864 0.426183 0.0825364 -0.922721 0.378296 0.0740133 -0.941957 0.329365 0.065086 -0.985156 0.166979 0.0398238 -0.961735 0.268791 0.0530731 -0.793635 0.596579 0.119318 -0.831016 0.54437 0.114338 -0.922399 0.371089 0.107112 -0.854666 0.507843 0.107898 -0.946987 0.309134 0.0874756 -0.896993 0.431976 0.0938151 -0.93233 0.353038 0.0782623 -0.947334 0.31251 0.0699746 -0.960483 0.271486 0.0613798 -0.990732 0.131113 0.0354984 -0.973992 0.220985 0.0500423 -0.995269 0.09383 0.0251928 -0.996636 0.0788238 0.0224178 -0.99776 0.0641265 0.0190659 -0.997547 0.067306 0.019245 -0.998084 0.0592711 0.0177808 -0.999512 0.0292458 0.0109912 -0.869488 0.481655 0.109538 -0.893392 0.43721 0.103429 -0.94463 0.314535 0.0935003 -0.908466 0.406578 0.096874 -0.962218 0.261259 0.0766795 -0.935313 0.343924 0.0831059 -0.957599 0.279839 0.0685146 -0.967026 0.247275 0.0609519 -0.975287 0.214436 0.053216 -0.993414 0.110166 0.0314939 -0.983736 0.174354 0.0431703 -0.998786 0.047395 0.013451 -0.99527 0.0941648 0.0238966 -0.906544 0.410826 0.0969563 -0.9238 0.371864 0.0911614 -0.934619 0.345271 0.0852985 -0.963074 0.25693 0.08047 -0.949127 0.304648 0.0796741 -0.975485 0.210004 0.0657824 -0.956386 0.28243 0.074556 -0.953832 0.291321 0.0730598 -0.974857 0.21291 0.0657561 -0.982712 0.177 0.0542944 -0.969231 0.237716 0.0638928 -0.969806 0.23634 0.060161 -0.976492 0.208804 0.0535131 -0.97988 0.192514 0.0526693 -0.982387 0.180928 0.0467006 -0.984364 0.169797 0.0468706 -0.988432 0.146856 0.0378987 -0.99562 0.0895762 0.0267949 -0.988286 0.14702 0.0409383 -0.995684 0.0892167 0.0255518 -0.992302 0.119092 0.0339702 -0.996637 0.079212 0.0209848 -0.999199 0.0383704 0.011391 -0.998743 0.0482269 0.0136506 -0.99776 0.0642678 0.0185635 -0.971278 0.230406 0.0594291 -0.987319 0.150115 0.0516432 -0.996804 0.0728537 0.0327698 -0.984362 0.171947 0.0382839 -0.979164 0.196333 0.0518704 -0.990806 0.128321 0.0428735 -0.99768 0.0625764 0.0268057 -0.989128 0.143081 0.0339735 -0.985878 0.16164 0.043787 -0.98867 0.144778 0.0396338 -0.992331 0.120019 0.0295703 -0.991221 0.127412 0.0353035 -0.99377 0.108087 0.0271748 -0.99344 0.110113 0.0308625 -0.99709 0.0728083 0.0225978 -0.994986 0.0969024 0.0247612 -0.999272 0.0356699 0.013541 -0.996145 0.0848903 0.0221277 -0.997547 0.0672388 0.0194698 -0.99892 0.044532 0.0132838 -0.999741 0.021408 0.00776213 -0.998082 0.0597707 0.0160958 -0.99945 0.0318165 0.00933875 -0.99945 0.0319142 0.00900894 -0.999952 0.00929516 0.00318493 -0.999858 0.0159074 0.00552942 -0.999957 0.00891779 0.00267188 -0.99999 0.00420326 0.00124536 -0.964747 0.254733 0.0661373 -0.997259 0.0727002 0.0137824 -0.998083 0.0606 0.0125614 -0.999513 0.0302416 0.00767446 -0.999858 0.0162231 0.00447763 -0.032075 0.996405 0.0784102 -0.0513619 0.958937 0.278931 -0.0955931 0.908708 0.40634 -0.0803649 0.858319 0.506784 -0.0771925 0.816618 0.571994 -0.0734304 0.770055 0.633738 -0.0691241 0.718874 0.691695 -0.0642965 0.663384 0.745512 -0.0589821 0.603927 0.794854 -0.0532207 0.540891 0.839407 -0.0470441 0.474587 0.878951 -0.0410755 0.389991 0.919902 -0.03055 0.297793 0.954141 -0.0222521 0.214323 0.976509 -0.0136802 0.129216 0.991522 -0.00482791 0.0421352 0.9991 -0.00414697 0.0407652 0.99916 -0.00559816 0.0386759 0.999236 -0.00504246 0.0377044 0.999276 -0.00613058 0.0351702 0.999363 -0.0192734 0.0996459 0.994836 -0.0318617 0.147577 0.988537 -0.0427066 0.181469 0.982469 -0.0528596 0.207937 0.976713 -0.057095 0.217972 0.974284 -0.0562934 0.208236 0.976457 -0.0513466 0.187533 0.980915 -0.0416098 0.156028 0.986876 -0.0271607 0.10601 0.993994 -0.0117221 0.0590761 0.998185 -0.0410385 0.96662 0.252908 -0.00765279 0.94357 0.331086 -0.0311462 0.923432 0.382495 -0.0155855 0.896616 0.442534 -0.0247249 0.856652 0.515302 -0.0284488 0.852294 0.522289 -0.0135352 0.811318 0.584448 -0.0436511 0.772327 0.633724 -0.0504353 0.809843 0.584475 -0.0469204 0.818924 0.571981 -0.0525744 0.855425 0.515251 -0.0500344 0.860628 0.50677 -0.042767 0.912728 0.406323 -0.068855 0.941083 0.331092 -0.0251977 0.760167 0.649239 -0.0248939 0.759783 0.649701 -0.0402249 0.721081 0.691682 -0.0478369 0.758751 0.649622 -0.0113416 0.703763 0.710344 -0.0366747 0.665496 0.7455 -0.0448029 0.702409 0.710362 -0.0220609 0.522381 0.852427 -0.0144745 0.507039 0.861802 -0.0050343 0.418012 0.908428 -0.0190381 0.382609 0.923714 -0.00283029 0.319732 0.947504 -0.015352 0.298961 0.954142 -0.0221082 0.318945 0.947515 -0.0191896 0.391674 0.919904 -0.0301267 0.416979 0.908417 -0.0254193 0.476246 0.878945 -0.0292519 0.542727 0.8394 -0.0330085 0.605915 0.794844 -0.0413587 0.641265 0.766204 -0.0196746 0.642212 0.766274 -0.0239884 0.64925 0.760197 -0.0130554 0.233422 0.972288 -0.0113527 0.229989 0.973127 -0.0110566 0.215184 0.976511 -0.0161997 0.229786 0.973106 0.000519536 0.139088 0.99028 -0.00678379 0.129746 0.991524 -0.0100594 0.138655 0.99029 -0.0126351 0.0784507 0.996838 -0.00108416 0.0467471 0.998906 -0.00285586 0.0437821 0.999037 -0.00370303 0.0457556 0.998946 -0.00613264 0.0351745 0.999362 -0.00608702 0.0295104 0.999546 -0.0193818 0.0887251 0.995868 -0.030833 0.129956 0.99104 -0.0398561 0.15746 0.986721 -0.0474642 0.176897 0.983084 -0.0491385 0.180417 0.982362 -0.0456887 0.165343 0.985177 -0.0377002 0.139472 0.989508 -0.0249704 0.095754 0.995092 -0.0111203 0.0535736 0.998502 -0.0065913 0.0290321 0.999557 -0.018728 0.0780692 0.996772 -0.028747 0.112645 0.993219 -0.0358109 0.133807 0.99036 -0.0407963 0.146255 0.988405 -0.0399184 0.143168 0.988893 -0.0336613 0.122868 0.991852 -0.0226876 0.0854646 0.996083 -0.0104859 0.0485432 0.998766 -0.0050852 0.020178 0.999783 -0.00483864 0.0169148 0.999845 -0.00451249 0.0163227 0.999857 -0.00344192 0.0111487 0.999932 -0.0110112 0.0375128 0.999236 -0.0135542 0.0463545 0.998833 -0.0113893 0.039631 0.99915 -0.00675484 0.026569 0.999624 -0.00105927 0.00357518 0.999993 -0.0662368 0.253256 0.965129 -0.0627811 0.30664 0.949753 -0.0867949 0.270922 0.95868 -0.103176 0.424583 0.899491 -0.107342 0.408158 0.906579 -0.111179 0.407403 0.906456 -0.115167 0.442767 0.88921 -0.113853 0.480305 0.869681 -0.107548 0.507675 0.85481 -0.096548 0.525158 0.84551 -0.0812117 0.532857 0.8423 -0.0631306 0.530293 0.845461 -0.0447319 0.508009 0.860189 -0.0646377 0.505853 0.860195 -0.055993 0.473024 0.879268 -0.0569461 0.442972 0.894725 -0.0389205 0.444927 0.894721 -0.139205 0.485262 0.863217 -0.119464 0.565076 0.816344 -0.123288 0.487486 0.864383 -0.122848 0.514853 0.84843 -0.116607 0.546847 0.829073 -0.105342 0.569216 0.815412 -0.0893507 0.582075 0.808211 -0.0700852 0.585233 0.807831 -0.0504047 0.56841 0.8212 -0.0719083 0.566081 0.821207 -0.138418 0.550045 0.823584 -0.130532 0.564874 0.814787 -0.125524 0.584834 0.801382 -0.113959 0.61156 0.782948 -0.0972995 0.629162 0.77116 -0.0768131 0.637511 0.766603 -0.0559099 0.625842 0.777944 -0.0786965 0.623376 0.777952 -0.175334 0.708964 0.683102 -0.125725 0.796106 0.591953 -0.136582 0.789979 0.597728 -0.157626 0.842477 0.515156 -0.104765 0.887175 0.449383 -0.0944049 0.833824 0.543899 -0.0758237 0.820269 0.56693 -0.100447 0.817612 0.56694 -0.0954719 0.776241 0.623165 -0.095883 0.774816 0.624874 -0.0712157 0.77748 0.624863 -0.107434 0.887432 0.448244 -0.0718216 0.875815 0.477272 -0.116323 0.871014 0.477289 -0.00244659 0.00852065 0.999961 -0.00509371 0.0170282 0.999842 -0.0082534 0.0277468 0.999581 -0.0078659 0.0278595 0.999581 -0.00345222 0.0112739 0.999931 -0.00325404 0.0107607 0.999937 -0.00497971 0.0170621 0.999842 -0.0052876 0.0198976 0.999788 -0.00622529 0.0196165 0.999788 -0.00826466 0.0281889 0.999568 -0.00835836 0.028161 0.999568 -0.00796973 0.0328077 0.99943 -0.0178752 0.0650752 0.99772 -0.0295011 0.106259 0.993901 -0.0312983 0.105736 0.993902 -0.0331284 0.115946 0.992703 -0.0319332 0.116283 0.992703 -0.0308171 0.110543 0.993393 -0.0222353 0.0789879 0.996628 -0.0135038 0.0474026 0.998785 -0.0102468 0.0377285 0.999236 -0.00972616 0.0432316 0.999018 -0.0150191 0.0416429 0.99902 -0.0203218 0.0752316 0.996959 -0.0240404 0.0741251 0.996959 -0.0121971 0.0641968 0.997863 -0.0312624 0.126496 0.991474 -0.0526035 0.20568 0.977205 -0.0726365 0.2753 0.95861 -0.0896301 0.344561 0.934475 -0.103853 0.41285 0.904859 -0.114651 0.480114 0.869682 -0.106478 0.412177 0.90486 -0.106806 0.444369 0.889454 -0.104773 0.444855 0.889453 -0.113303 0.506416 0.854813 -0.0130841 0.0774791 0.996908 -0.0364964 0.151677 0.987756 -0.0620781 0.245758 0.967341 -0.0867657 0.327698 0.94079 -0.107334 0.408443 0.906451 -0.0950525 0.325363 0.9408 -0.0957733 0.371123 0.923631 -0.103236 0.369104 0.923636 -0.110855 0.443872 0.889207 -0.00251714 0.00840903 0.999961 -0.00254038 0.00849246 0.999961 -0.124407 0.4872 0.864384 -0.0157326 0.0894384 0.995868 -0.122742 0.930868 0.34412 -0.0534813 0.958963 0.278443 -0.0324549 0.971857 0.233326 -0.0333572 0.506234 0.861751 -0.0375297 0.575724 0.816783 -0.00905562 0.576884 0.816776 -0.0542306 0.895095 0.442565 -0.0612315 0.680031 0.730623 -0.0849768 0.677463 0.730632 -0.0833047 0.68691 0.721952 -0.103349 0.684175 0.721958 -0.105024 0.673947 0.731277 -0.112516 0.716185 0.688781 -0.114766 0.774386 0.622218 -0.130812 0.828879 0.54392 -0.0663399 0.730673 0.679497 -0.0907206 0.728039 0.679507 -0.0895342 0.733242 0.674047 -0.109479 0.730524 0.674055 -0.0103887 0.120596 0.992647 -0.0162506 0.119959 0.992646 -0.0298559 0.184134 0.982448 -0.0437187 0.2309 0.971995 -0.0585423 0.271294 0.960715 -0.0678195 0.294168 0.953345 -0.0726163 0.294644 0.952844 -0.0735029 0.284361 0.955896 -0.070128 0.263355 0.962147 -0.062269 0.231642 0.970806 -0.0490598 0.189115 0.980729 -0.0435564 0.122834 0.991471 -0.0170795 0.200147 0.979617 -0.0265598 0.199118 0.979616 -0.0411734 0.256399 0.965694 -0.0580812 0.303754 0.950979 -0.0697764 0.33284 0.940398 -0.0775379 0.338276 0.937847 -0.0815632 0.332994 0.939395 -0.0814197 0.317112 0.944887 -0.0768001 0.290641 0.953745 -0.0675172 0.25346 0.964987 -0.063879 0.202403 0.977217 -0.0237673 0.278353 0.960185 -0.0365815 0.276962 0.960184 -0.0547728 0.336749 0.94 -0.0688321 0.371933 0.925704 -0.0796552 0.382174 0.920651 -0.0869625 0.381652 0.920206 -0.0902471 0.370728 0.924347 -0.0891188 0.349431 0.932714 -0.0832994 0.317658 0.944539 -0.0808105 0.272993 0.958616 -0.0213937 0.185292 0.982451 -0.0303698 0.365274 0.930404 -0.0489156 0.363261 0.930402 -0.0643784 0.411445 0.909158 -0.0784174 0.426312 0.901171 -0.0891935 0.430448 0.898198 -0.0960924 0.424267 0.900424 -0.0986699 0.407877 0.90769 -0.0966112 0.381428 0.919336 -0.0952469 0.343042 0.934479 -0.0298122 0.257954 0.965697 -0.0384527 0.338983 0.940006 -0.048705 0.41359 0.90916 -0.0129722 0.111593 0.993669 -0.0182369 0.110873 0.993667 -0.0316444 0.165651 0.985677 -0.0441101 0.205978 0.977562 -0.0566637 0.239381 0.969271 -0.0634452 0.255861 0.964629 -0.0653911 0.251307 0.965696 -0.0632872 0.235867 0.969722 -0.0568692 0.209601 0.976132 -0.0453958 0.17257 0.983951 -0.0292579 0.11627 0.992787 -0.0242439 0.06058 0.997869 -0.0147014 0.100415 0.994837 -0.0243664 0.166876 0.985677 -0.0340533 0.232526 0.971994 -0.0442895 0.306073 0.950977 -0.055907 0.374104 0.9257 -0.0814748 0.527784 0.845462 -0.0731573 0.470677 0.879267 -0.0644651 0.428657 0.901165 -0.0729146 0.481664 0.873318 -0.0785145 0.432533 0.898193 -0.0876002 0.479198 0.873325 -0.0875963 0.479553 0.87313 -0.0983397 0.477457 0.873135 -0.0890844 0.425801 0.900421 -0.0893057 0.582606 0.807833 -0.0966118 0.634807 0.766607 -0.114986 0.773584 0.623174 -0.0261141 0.148701 0.988537 -0.0365511 0.207456 0.977561 -0.0477291 0.273407 0.960714 -0.0600548 0.334739 0.940395 -0.0693258 0.384191 0.920647 -0.096343 0.530317 0.842307 -0.104608 0.579516 0.80822 -0.112361 0.626636 0.77117 -0.119577 0.671508 0.731285 -0.122381 0.652128 0.748166 -0.127138 0.709113 0.693537 -0.142547 0.769739 0.622239 -0.126216 0.71389 0.688789 -0.015857 0.0786981 0.996772 -0.026362 0.130936 0.99104 -0.0369168 0.182737 0.982468 -0.0253671 0.113453 0.993219 -0.0483896 0.241194 0.96927 -0.0355809 0.158483 0.98672 -0.0607941 0.295706 0.953342 -0.0467508 0.209399 0.976712 -0.0702706 0.339866 0.937845 -0.058717 0.256992 0.964628 -0.0797059 0.383241 0.920203 -0.0679718 0.295756 0.952842 -0.0772214 0.334033 0.939393 -0.107091 0.523101 0.845516 -0.0983621 0.46722 0.878653 -0.095635 0.408603 0.907688 -0.104873 0.465796 0.878656 -0.0864377 0.371639 0.924345 -0.115397 0.567255 0.815417 -0.123247 0.609749 0.782954 -0.130599 0.650526 0.748171 -0.144989 0.636153 0.757817 -0.155723 0.646537 0.74682 -0.139026 0.675908 0.723755 -0.121347 0.54581 0.829075 -0.147031 0.705242 0.693553 -0.132098 0.638968 0.757806 -0.128998 0.584074 0.801384 -0.0152396 0.0681728 0.997557 -0.0140369 0.057884 0.998225 -0.0123292 0.0477206 0.998785 -0.0156879 0.0574606 0.998225 -0.0205775 0.0794369 0.996628 -0.0258328 0.0956833 0.995077 -0.0233773 0.0963133 0.995076 -0.017454 0.0676427 0.997557 -0.0328191 0.134574 0.99036 -0.0432329 0.177983 0.983083 -0.0543335 0.21868 0.974283 -0.0629829 0.251925 0.965695 -0.0716614 0.284833 0.955895 -0.0803666 0.317382 0.944886 -0.0890703 0.349443 0.932714 -0.0977825 0.381128 0.919337 -0.122178 0.515014 0.84843 -0.137207 0.563281 0.814794 -0.00603071 0.0276146 0.9996 -0.00579913 0.022919 0.999721 -0.00613373 0.0231279 0.999714 -0.0171266 0.0627897 0.99788 -0.0132075 0.0464554 0.998833 -0.0181013 0.0625147 0.99788 -0.0186212 0.0648233 0.997723 -0.0186641 0.0648108 0.997723 -0.0119481 0.0394648 0.99915 -0.0289347 0.111053 0.993393 -0.0241431 0.0878636 0.99584 -0.0250849 0.0875979 0.99584 -0.0247514 0.0859711 0.99599 -0.0245452 0.0860311 0.99599 -0.0160035 0.0523211 0.998502 -0.0149023 0.0526489 0.998502 -0.0109099 0.0319236 0.999431 -0.0381921 0.14696 0.988405 -0.0480529 0.18071 0.982362 -0.0402605 0.143072 0.988893 -0.0557953 0.20837 0.976457 -0.0468305 0.16502 0.985178 -0.063588 0.235785 0.969723 -0.0534929 0.186928 0.980916 -0.0714385 0.263 0.962147 -0.0602176 0.208656 0.976134 -0.0793355 0.289955 0.953746 -0.0670206 0.230302 0.970809 -0.0872662 0.316585 0.944541 -0.0738773 0.251666 0.96499 -0.0878052 0.294085 0.951737 -0.0696092 0.218251 0.973407 -0.047798 0.132682 0.990005 -0.0267846 0.0654786 0.997494 -0.0204692 0.0643032 0.99772 -0.0365112 0.12204 0.991853 -0.0418098 0.138277 0.989511 -0.0277319 0.0839635 0.996083 -0.047193 0.154404 0.98688 -0.0315367 0.0937997 0.995091 -0.0526655 0.170457 0.983957 -0.0354393 0.103548 0.993993 -0.0582305 0.186449 0.980738 -0.0394462 0.113239 0.992785 -0.0776079 0.29696 0.951731 -0.0757055 0.241801 0.967368 -0.0523588 0.146956 0.987756 -0.0560122 0.222204 0.97339 -0.0126196 0.0697333 0.997486 -0.0332007 0.137023 0.990011 -0.0295344 0.0725313 0.996929 -0.0218249 0.0560437 0.99819 -0.0194439 0.0510733 0.998506 -0.0172078 0.0465205 0.998769 -0.00887073 0.0378884 0.999243 -0.0129006 0.0366771 0.999244 -0.00843308 0.026064 0.999625 -0.00388401 0.0140472 0.999894 -0.00429249 0.0139243 0.999894 -0.0352267 0.101413 0.994221 -0.0211948 0.372552 0.927769 1 0 0 1 0 0 0 1 0 -2.75533e-07 1 6.64013e-06 3.07978e-07 1 5.27938e-07 3.14902e-07 1 4.86053e-07 3.23663e-07 1 4.51781e-07 3.34284e-07 1 4.23293e-07 3.46885e-07 1 3.99334e-07 3.61683e-07 1 3.79015e-07 3.79014e-07 1 3.61684e-07 3.99333e-07 1 3.46885e-07 4.23295e-07 1 3.34283e-07 4.51779e-07 1 3.23663e-07 4.86057e-07 1 3.14901e-07 5.27937e-07 1 3.07978e-07 5.80139e-07 1 3.02994e-07 6.46878e-07 1 3.00225e-07 0 -1.37974e-07 1 3.2113e-09 -1.38466e-07 1 9.09363e-09 -1.39411e-07 1 1.42706e-08 -1.40282e-07 1 0 -1.37777e-07 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -1.4328e-07 1 -5.41933e-09 -1.3586e-07 1 -3.17118e-09 -1.36737e-07 1 -8.77937e-09 -1.34592e-07 1 -1.35118e-08 -1.32819e-07 1 0 0 1 0 0 1 0 -1.86265e-07 1 3.24456e-08 0 1 0 0 1 0 0 1 -6.46878e-07 1 3.00225e-07 -5.8014e-07 1 3.02994e-07 -5.27935e-07 1 3.07978e-07 -4.86057e-07 1 3.14901e-07 -4.51779e-07 1 3.23663e-07 -4.23295e-07 1 3.34283e-07 -3.99334e-07 1 3.46885e-07 -3.79014e-07 1 3.61684e-07 -3.61683e-07 1 3.79015e-07 -3.46885e-07 1 3.99334e-07 -3.34284e-07 1 4.23293e-07 -3.23662e-07 1 4.51782e-07 -3.14901e-07 1 4.86053e-07 -3.07978e-07 1 5.27937e-07 2.75532e-07 1 6.6401e-06 0 1 0 0.998176 -7.90731e-09 0.0603762 0.983619 -2.36082e-08 0.180261 0.983619 -2.57428e-08 0.18026 0.954723 -4.32916e-08 0.297497 0.954723 -3.46333e-08 0.297498 0.911899 -4.77786e-08 0.410415 0.85578 0 0.517339 0.787183 0 0.61672 0.707112 0 0.707102 0.616716 0 0.787186 0.51734 0 0.85578 0.41041 0 0.911901 0.297507 0 0.95472 0.180252 -2.29017e-07 0.983621 0.06038 -2.32406e-07 0.998175 -0.0603801 -2.32406e-07 0.998175 -0.180252 0 0.983621 -0.297507 0 0.95472 -0.41041 0 0.911901 -0.51734 0 0.85578 -0.616716 0 0.787186 -0.707111 0 0.707102 -0.787182 0 0.61672 -0.85578 -6.02262e-08 0.517339 -0.911899 -4.77786e-08 0.410415 -0.954723 -4.32916e-08 0.297498 -0.983619 -2.36081e-08 0.18026 -0.998176 -7.91736e-09 0.0603762 -0.998176 7.90731e-09 -0.0603762 -0.983619 2.62312e-08 -0.18026 -0.954723 3.46333e-08 -0.297498 -0.911899 4.77786e-08 -0.410415 -0.85578 0 -0.517339 -0.787182 0 -0.61672 -0.707111 0 -0.707102 -0.616716 0 -0.787186 -0.51734 0 -0.85578 -0.41041 0 -0.911901 -0.297507 0 -0.95472 -0.180251 2.29017e-07 -0.983621 -0.0603801 2.32406e-07 -0.998175 0.06038 2.32406e-07 -0.998175 0.180251 0 -0.983621 0.297507 0 -0.95472 0.41041 0 -0.911901 0.51734 0 -0.85578 0.616716 0 -0.787186 0.707112 0 -0.707102 0.787183 0 -0.61672 0.85578 6.02262e-08 -0.517339 0.911899 4.77786e-08 -0.410415 0.954723 4.32916e-08 -0.297498 0.983619 2.36082e-08 -0.18026 0.998176 7.91736e-09 -0.0603762 0.998176 -8.27447e-09 0.0603762 0.911899 -4.77786e-08 0.410415 0.85578 -6.02262e-08 0.517339 0.787183 0 0.61672 0.707112 0 0.707102 0.616716 0 0.787186 0.51734 0 0.85578 0.41041 0 0.911901 0.297507 0 0.95472 0.180251 0 0.983621 0.06038 -2.32406e-07 0.998175 -0.0603801 -2.32406e-07 0.998175 -0.180251 -2.29017e-07 0.983621 -0.297507 0 0.95472 -0.41041 0 0.911901 -0.51734 0 0.85578 -0.616716 0 0.787186 -0.707111 0 0.707102 -0.787182 0 0.61672 -0.85578 0 0.517339 -0.911899 -4.77786e-08 0.410415 -0.954723 -3.46333e-08 0.297497 -0.983619 -2.62312e-08 0.18026 -0.998176 -7.90731e-09 0.0603762 -0.998176 7.91736e-09 -0.0603762 -0.983619 2.36081e-08 -0.18026 -0.954723 4.32916e-08 -0.297498 -0.911899 4.77786e-08 -0.410415 -0.85578 6.02262e-08 -0.517339 -0.787182 0 -0.61672 -0.707111 0 -0.707102 -0.616716 0 -0.787186 -0.51734 0 -0.85578 -0.41041 0 -0.911901 -0.297507 0 -0.95472 -0.180252 0 -0.983621 -0.0603801 2.32406e-07 -0.998175 0.06038 2.32406e-07 -0.998175 0.180252 2.29017e-07 -0.983621 0.297507 0 -0.95472 0.41041 0 -0.911901 0.51734 0 -0.85578 0.616716 0 -0.787186 0.707112 0 -0.707102 0.787183 0 -0.61672 0.85578 0 -0.517339 0.911899 4.77786e-08 -0.410415 0.954723 3.46333e-08 -0.297498 0.983619 2.62313e-08 -0.18026 0.998176 7.90731e-09 -0.0603762 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0.920404 -2.8009e-08 0.390968 0.920314 2.08636e-05 0.391181 0.89989 4.12447e-05 0.436118 0.877275 6.14641e-05 0.479988 0.852522 8.09357e-05 0.522692 0.825691 0.00010029 0.564122 0.796851 0.000119237 0.604175 0.766067 0.000137742 0.642761 0.733419 0.000155701 0.679777 0.698981 0.000173517 0.71514 0.662844 0.000190861 0.748757 0.625089 0.000207671 0.780553 0.585813 0.000224003 0.810446 0.545108 0.000240185 0.838366 0.503077 0.000256135 0.864241 0.459819 0.000271264 0.888013 0.415441 0.000286464 0.90962 0.370053 0.000391311 0.929011 0.323759 0.000471343 0.946139 0.27668 0.00052628 0.960962 0.228925 0.000556194 0.973444 0.180616 0.000561125 0.983554 0.131861 0.00054092 0.991268 0.0827876 0.000495876 0.996567 0.033515 0.000425709 0.999438 -0.0158433 0.000330415 0.999874 -0.0651627 0.000210239 0.997875 -0.113957 6.42165e-05 0.993486 -0.0684689 -6.01755e-05 0.997653 0.900191 -2.06482e-05 0.435495 0.87783 -4.09713e-05 0.478973 0.853365 -6.0702e-05 0.521314 0.826863 -8.03567e-05 0.562404 0.798384 -9.9348e-05 0.602148 0.767994 -0.00011799 0.640458 0.735768 -0.00013625 0.677233 0.701785 -0.00015429 0.712389 0.666124 -0.000171648 0.745841 0.628867 -0.000188763 0.777512 0.590109 -0.000205572 0.807324 0.549941 -0.000222063 0.835204 0.508459 -0.000237888 0.861086 0.465755 -0.000253281 0.884914 0.421946 -0.000268553 0.906621 0.378612 -0.00026112 0.925556 0.335951 -0.000353785 0.941879 0.292588 -0.000421175 0.956238 0.248615 -0.000463691 0.968602 0.204125 -0.00048105 0.978945 0.159206 -0.000473608 0.987245 0.113957 -0.000440925 0.993486 0.0684689 -0.000383235 0.997653 0.0228389 -0.000300729 0.999739 -0.022839 -0.000193041 0.999739 -0.114321 9.60826e-05 0.993444 -0.159206 -8.85237e-05 0.987245 -0.1632 0.00023623 0.986593 -0.211686 0.000351451 0.977338 -0.259651 0.000441694 0.965702 -0.306988 0.000506989 0.951713 -0.353571 0.000547 0.935407 -0.399301 0.000562051 0.91682 -0.444051 0.000557824 0.896001 -0.487724 0.000541296 0.872998 -0.530204 0.000512385 0.84787 -0.571396 0.000470766 0.820675 -0.611199 0.000417517 0.791477 -0.649506 0.000351934 0.760357 -0.68623 0.000273558 0.727385 -0.72129 0.000183132 0.692633 -0.754589 8.04584e-05 0.656198 -0.78479 -3.11976e-05 0.619762 -0.754944 3.29698e-05 0.655789 -0.204125 -0.000216366 0.978945 -0.248615 -0.000319011 0.968602 -0.292588 -0.000396693 0.956239 -0.335952 -0.000449212 0.941879 -0.378613 -0.000477137 0.925555 -0.420983 -0.000504285 0.907069 -0.462941 -0.000493853 0.886389 -0.503888 -0.000470944 0.863769 -0.543734 -0.000436242 0.839258 -0.582382 -0.000388899 0.812915 -0.619765 -0.00032887 0.784788 -0.655789 -0.000257087 0.754944 -0.690372 -0.000173009 0.723455 -0.723453 -7.64e-05 0.690374 -0.786047 0.000141327 0.618167 -0.812913 -0.000133261 0.582386 -0.815591 0.000236826 0.578629 -0.843152 0.000320222 0.537676 -0.868655 0.000391617 0.495418 -0.892039 0.000449848 0.451959 -0.913257 0.000496194 0.407384 -0.932246 0.000541118 0.361826 -0.948963 0.00056096 0.315387 -0.96337 0.000555697 0.268175 -0.975429 0.000525802 0.220315 -0.985112 0.000470768 0.171914 -0.992394 0.000390252 0.123098 -0.99726 0.000285235 0.073974 -0.999695 0.00015501 0.02468 -0.999739 2.99541e-09 -0.0228423 -0.999739 -2.99541e-09 0.0228423 -0.83926 -0.000222892 0.54373 -0.863768 -0.000299842 0.50389 -0.886389 -0.000364414 0.462941 -0.907069 -0.000417683 0.420983 -0.925556 -0.00044134 0.378611 -0.94188 -0.000473789 0.335951 -0.956239 -0.000481417 0.292587 -0.968601 -0.00046363 0.248618 -0.978944 -0.000420889 0.204126 -0.987246 -0.000353388 0.159203 -0.993485 -0.000260741 0.11396 -0.997653 -0.000142922 0.0684658 -0.999695 0.000155016 -0.02468 -0.997653 -0.000142904 -0.0684658 -0.99726 0.000285256 -0.073974 -0.992394 0.000390287 -0.123098 -0.985112 0.000470818 -0.171914 -0.975429 0.000525865 -0.220315 -0.96337 0.000555774 -0.268175 -0.948963 0.000561039 -0.315387 -0.932246 0.000541185 -0.361826 -0.913257 0.000496223 -0.407384 -0.892039 0.000449848 -0.451959 -0.868654 0.000391597 -0.495419 -0.843152 0.000320395 -0.537675 -0.815589 0.000236928 -0.578631 -0.786047 0.000141542 -0.618166 -0.754943 3.31593e-05 -0.65579 -0.784791 -3.10207e-05 -0.619761 -0.993485 -0.000260708 -0.11396 -0.987246 -0.000353342 -0.159203 -0.978944 -0.000420831 -0.204126 -0.968601 -0.000463559 -0.248618 -0.956239 -0.000481334 -0.292587 -0.94188 -0.000473715 -0.335951 -0.925556 -0.000441303 -0.378611 -0.907068 -0.000417674 -0.420983 -0.886389 -0.00036444 -0.462942 -0.863768 -0.000299687 -0.503889 -0.83926 -0.000222812 -0.54373 -0.812912 -0.000133049 -0.582386 -0.754589 8.05524e-05 -0.656198 -0.723453 -7.63015e-05 -0.690374 -0.72129 0.000183232 -0.692633 -0.68623 0.000273573 -0.727384 -0.649506 0.000351934 -0.760357 -0.611199 0.000417517 -0.791477 -0.571396 0.000470884 -0.820675 -0.530203 0.000512585 -0.847871 -0.487724 0.000541546 -0.872998 -0.444052 0.000557883 -0.896001 -0.3993 0.000562115 -0.91682 -0.353573 0.000547078 -0.935407 -0.306988 0.000506989 -0.951713 -0.259652 0.000441795 -0.965702 -0.211686 0.000351311 -0.977338 -0.1632 0.00023623 -0.986593 -0.114318 9.60954e-05 -0.993444 -0.0684655 -5.98989e-05 -0.997653 -0.113957 6.45012e-05 -0.993486 -0.690372 -0.000172905 -0.723455 -0.655789 -0.000257088 -0.754944 -0.619765 -0.000328868 -0.784788 -0.582383 -0.00038887 -0.812914 -0.543734 -0.00043612 -0.839258 -0.503888 -0.000470695 -0.863769 -0.46294 -0.000493653 -0.88639 -0.420983 -0.000504287 -0.907069 -0.378613 -0.000477004 -0.925555 -0.335952 -0.000449211 -0.941879 -0.292589 -0.000396596 -0.956238 -0.248613 -0.00031912 -0.968603 -0.204127 -0.000216384 -0.978944 -0.159206 -8.85235e-05 -0.987245 -0.0651627 0.000210238 -0.997875 -0.0228407 -0.000193025 -0.999739 -0.0158433 0.000330558 -0.999874 0.0335135 0.000425602 -0.999438 0.0827891 0.000496114 -0.996567 0.131861 0.000541204 -0.991268 0.180614 0.000561333 -0.983554 0.228925 0.000556194 -0.973444 0.276679 0.000526367 -0.960962 0.323761 0.000471247 -0.946139 0.370053 0.000391311 -0.929011 0.415441 0.000286464 -0.90962 0.459819 0.000271391 -0.888013 0.503077 0.000256135 -0.864241 0.545109 0.000240079 -0.838365 0.585812 0.000224106 -0.810447 0.62509 0.000207683 -0.780552 0.662843 0.000190958 -0.748758 0.698981 0.000173414 -0.71514 0.733419 0.000155701 -0.679777 0.766067 0.000137653 -0.642761 0.796852 0.000119242 -0.604175 0.825691 0.000100371 -0.564122 0.852522 8.1012e-05 -0.522691 0.877275 6.1634e-05 -0.479988 0.899889 4.1306e-05 -0.436118 0.920314 2.0836e-05 -0.39118 0.920404 2.8009e-08 -0.390968 0.0228406 -0.000300614 -0.999739 0.0684655 -0.0003833 -0.997653 0.113957 -0.00044064 -0.993486 0.159206 -0.000473325 -0.987245 0.204127 -0.000480983 -0.978944 0.248614 -0.000463608 -0.968603 0.29259 -0.000421268 -0.956238 0.335951 -0.000353784 -0.941879 0.378612 -0.000261121 -0.925556 0.421945 -0.00026844 -0.906622 0.465756 -0.000253265 -0.884913 0.50846 -0.000237996 -0.861085 0.54994 -0.000221956 -0.835204 0.590109 -0.000205572 -0.807324 0.628867 -0.000188652 -0.777512 0.666125 -0.000171744 -0.74584 0.701784 -0.000154295 -0.71239 0.735769 -0.000136339 -0.677232 0.767993 -0.000117992 -0.640458 0.798384 -9.92651e-05 -0.602148 0.826863 -8.02773e-05 -0.562404 0.853364 -6.05181e-05 -0.521315 0.87783 -4.09006e-05 -0.478972 0.900191 -2.06794e-05 -0.435494 0.0603721 0 -0.998176 0.0603721 0 -0.998176 0.180261 0 -0.983619 0.297499 0 -0.954722 0.410416 0 -0.911899 0.517339 0 -0.855781 0.616722 0 -0.787181 0.707103 0 -0.707111 0.787186 0 -0.616716 0.855776 0 -0.517346 0.9119 0 -0.410412 0.954722 0 -0.297498 0.98362 0 -0.180254 0.998175 0 -0.06038 0.998175 0 -0.06038 0.180261 0 -0.983619 0.297499 0 -0.954722 0.410416 0 -0.911899 0.517339 0 -0.855781 0.616722 0 -0.787181 0.707103 0 -0.707111 0.787186 0 -0.616716 0.855776 0 -0.517346 0.9119 0 -0.410412 0.954722 0 -0.297498 0.98362 0 -0.180254 4.4718e-07 -1 -2.70467e-08 3.39508e-07 -1 -6.22195e-08 2.7245e-07 -1 -8.48973e-08 2.25459e-07 -1 -1.01473e-07 1.89738e-07 -1 -1.147e-07 1.60824e-07 -1 -1.25998e-07 1.36191e-07 -1 -1.36191e-07 1.14249e-07 -1 -1.45828e-07 2.66694e-08 -1 -1.86686e-07 0 -1 0 0 -1 0 1.28534e-07 -1 -2.1262e-07 9.17619e-08 -1 -2.03887e-07 6.14765e-08 -1 -1.97282e-07 3.52055e-08 -1 -1.92114e-07 1.13703e-08 -1 -1.8797e-07 -1.11659e-08 -1 -1.8459e-07 -3.33164e-08 -1 -1.81805e-07 -5.59372e-08 -1 -1.79508e-07 -7.9946e-08 -1 -1.77634e-07 -1.06489e-07 -1 -1.76153e-07 -1.08581e-08 -1 -1.79514e-07 -3.26631e-08 -1 -1.78238e-07 3.45191e-07 -1 -2.09158e-07 -3.25154e-07 -1 -7.22462e-07 -1.97721e-07 -1 -6.34502e-07 -7.14919e-08 -1 -9.12523e-08 -8.72288e-08 -1 -8.72292e-08 -1.05851e-07 -1 -8.2929e-08 -1.29127e-07 -1 -7.80603e-08 -1.60258e-07 -1 -7.2126e-08 -2.05816e-07 -1 -6.41334e-08 -2.81959e-07 -1 -5.16729e-08 -4.42454e-07 -1 -2.67643e-08 4.42454e-07 -1 -2.67642e-08 2.81959e-07 -1 -5.16729e-08 2.05816e-07 -1 -6.41334e-08 1.60258e-07 -1 -7.21261e-08 1.29127e-07 -1 -7.80603e-08 1.05851e-07 -1 -8.2929e-08 8.72288e-08 -1 -8.72292e-08 7.14919e-08 -1 -9.12523e-08 -3.45191e-07 -1 -2.09158e-07 3.26645e-08 -1 -1.78238e-07 1.08574e-08 -1 -1.79514e-07 1.06489e-07 -1 -1.76153e-07 7.99457e-08 -1 -1.77634e-07 5.59374e-08 -1 -1.79508e-07 3.33164e-08 -1 -1.81805e-07 1.11659e-08 -1 -1.8459e-07 -1.13703e-08 -1 -1.8797e-07 -3.52055e-08 -1 -1.92114e-07 -6.14771e-08 -1 -1.97282e-07 -9.17614e-08 -1 -2.03887e-07 -1.28534e-07 -1 -2.1262e-07 -2.66694e-08 -1 -1.86686e-07 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -9.31323e-08 -1.14249e-07 -1 -1.45828e-07 -1.3619e-07 -1 -1.36191e-07 -1.60824e-07 -1 -1.25998e-07 -1.89738e-07 -1 -1.147e-07 -2.25459e-07 -1 -1.01473e-07 -2.7245e-07 -1 -8.48972e-08 -3.39509e-07 -1 -6.22195e-08 -4.47182e-07 -1 -2.70467e-08 0 -1 -1.69331e-07 1.97715e-07 -1 -6.34497e-07 3.25158e-07 -1 -7.22465e-07 5.01011e-07 -1 -8.28772e-07 7.52867e-07 -1 -9.60956e-07 1.13123e-06 -1 -1.13124e-06 1.73734e-06 -1 -1.36111e-06 2.79848e-06 -1 -1.69178e-06 4.92183e-06 -1 -2.21513e-06 -2.12376e-05 -1 2.57871e-06 0 -1 0 0 -1 0 2.12377e-05 -1 2.57873e-06 -4.92179e-06 -1 -2.21513e-06 -2.79851e-06 -1 -1.69179e-06 -1.73733e-06 -1 -1.36111e-06 -1.13125e-06 -1 -1.13125e-06 -7.52858e-07 -1 -9.60951e-07 -5.01011e-07 -1 -8.28772e-07 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 0.998175 0 0.06038 0.998175 0 0.06038 0.98362 0 0.180254 0.954723 0 0.297497 0.911899 0 0.410414 0.855777 0 0.517344 0.787184 0 0.616718 0.707107 0 0.707107 0.616719 0 0.787183 0.517339 0 0.855781 0.410412 0 0.9119 0.297506 0 0.95472 0.180254 0 0.98362 0.060376 0 0.998176 0.060376 0 0.998176 0.98362 0 0.180254 0.954723 0 0.297497 0.911899 0 0.410414 0.855777 0 0.517344 0.787184 0 0.616718 0.707107 0 0.707107 0.616719 0 0.787183 0.517339 0 0.855781 0.410412 0 0.9119 0.297506 0 0.95472 0.180254 0 0.98362 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 0 1 0 0 1 -1 0 0 -1 0 0 0 1 0 5.89132e-08 1 6.70375e-06 -6.52757e-08 1 1.10545e-07 -6.56049e-08 1 9.96354e-08 -6.60048e-08 1 9.03003e-08 -6.64664e-08 1 8.21771e-08 -6.69845e-08 1 7.50005e-08 -6.75555e-08 1 6.85763e-08 -6.81781e-08 1 6.2754e-08 -6.88521e-08 1 5.74183e-08 -6.45872e-08 1 8.64348e-08 -5.72554e-08 1 1.07045e-07 -6.1471e-08 1 9.56651e-08 -6.53154e-08 1 8.57217e-08 -6.89088e-08 1 7.68336e-08 -7.23419e-08 1 6.87225e-08 -7.56878e-08 1 6.11773e-08 -7.90091e-08 1 5.40301e-08 -8.23637e-08 1 4.71392e-08 -8.58076e-08 1 4.03813e-08 -8.94007e-08 1 3.36387e-08 -9.32092e-08 1 2.67932e-08 -9.73107e-08 1 1.97186e-08 -1.01801e-07 1 1.2271e-08 -1.06802e-07 1 4.27314e-09 0 1 1.69331e-07 1.06802e-07 1 4.27314e-09 1.01801e-07 1 1.2271e-08 9.73107e-08 1 1.97187e-08 9.3209e-08 1 2.67933e-08 8.94007e-08 1 3.36387e-08 8.58078e-08 1 4.03812e-08 8.2364e-08 1 4.71389e-08 7.90091e-08 1 5.40301e-08 7.56878e-08 1 6.11772e-08 7.2342e-08 1 6.87226e-08 6.89088e-08 1 7.68336e-08 6.53154e-08 1 8.57218e-08 6.14711e-08 1 9.5665e-08 5.72555e-08 1 1.07045e-07 6.45872e-08 1 8.64347e-08 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 6.88521e-08 1 5.74183e-08 6.81781e-08 1 6.27542e-08 6.75555e-08 1 6.85763e-08 6.69845e-08 1 7.50007e-08 6.64664e-08 1 8.21772e-08 6.60047e-08 1 9.03007e-08 6.56049e-08 1 9.96352e-08 6.52757e-08 1 1.10545e-07 -5.89136e-08 1 6.70377e-06 0 1 0 0.998176 7.9068e-09 -0.0603724 0.983619 2.36083e-08 -0.180261 0.983619 2.4517e-08 -0.180261 0.954723 4.04055e-08 -0.297497 0.954723 4.04055e-08 -0.297497 0.911897 5.57423e-08 -0.41042 0.855781 6.02261e-08 -0.517338 0.787183 7.17956e-08 -0.616719 0.707106 8.23181e-08 -0.707107 0.61672 9.16401e-08 -0.787182 0.517339 9.96259e-08 -0.85578 0.410411 1.06159e-07 -0.911901 0.297507 1.11144e-07 -0.95472 0.180252 1.52678e-07 -0.98362 0.0603798 1.54937e-07 -0.998176 -0.0603803 1.54937e-07 -0.998175 -0.180252 1.14508e-07 -0.98362 -0.297505 1.11144e-07 -0.95472 -0.410411 1.06159e-07 -0.911901 -0.517339 9.96259e-08 -0.85578 -0.61672 9.16401e-08 -0.787182 -0.707106 8.23181e-08 -0.707107 -0.787183 7.17956e-08 -0.616719 -0.855781 7.02637e-08 -0.517338 -0.9119 5.57415e-08 -0.410413 -0.954723 4.04055e-08 -0.297498 -0.983619 2.36083e-08 -0.180261 -0.998175 7.91789e-09 -0.0603801 -0.998175 -7.90781e-09 0.0603801 -0.983619 -2.44827e-08 0.180261 -0.954723 -4.04055e-08 0.297498 -0.9119 -5.57415e-08 0.410413 -0.855781 -6.0226e-08 0.517338 -0.787183 -7.17956e-08 0.616719 -0.707106 -8.23181e-08 0.707107 -0.61672 -9.16401e-08 0.787182 -0.517339 -9.96259e-08 0.85578 -0.410411 -1.06159e-07 0.911901 -0.297505 -1.11144e-07 0.95472 -0.180251 -1.52678e-07 0.983621 -0.0603803 -1.54937e-07 0.998175 0.0603798 -1.54937e-07 0.998176 0.180251 -1.14508e-07 0.983621 0.297507 -1.11144e-07 0.95472 0.410411 -1.06159e-07 0.911901 0.517339 -9.96259e-08 0.85578 0.61672 -9.16401e-08 0.787182 0.707106 -8.23181e-08 0.707107 0.787183 -7.17956e-08 0.616719 0.855781 -7.02637e-08 0.517338 0.911897 -5.57423e-08 0.41042 0.954723 -4.04055e-08 0.297497 0.983619 -2.36083e-08 0.180261 0.998176 -7.91688e-09 0.0603724 0.998176 7.96801e-09 -0.0603724 0.911897 5.57423e-08 -0.41042 0.855781 7.02637e-08 -0.517338 0.787183 7.17956e-08 -0.616719 0.707106 8.23181e-08 -0.707107 0.61672 9.16401e-08 -0.787182 0.517339 9.96259e-08 -0.85578 0.410411 1.06159e-07 -0.911901 0.297507 1.11144e-07 -0.95472 0.180252 1.14508e-07 -0.983621 0.0603798 1.54937e-07 -0.998176 -0.0603803 1.54937e-07 -0.998175 -0.180252 1.52678e-07 -0.983621 -0.297505 1.11144e-07 -0.95472 -0.410411 1.06159e-07 -0.911901 -0.517339 9.96259e-08 -0.85578 -0.61672 9.16401e-08 -0.787182 -0.707106 8.23181e-08 -0.707107 -0.787183 7.17956e-08 -0.616719 -0.855781 6.0226e-08 -0.517338 -0.9119 5.57415e-08 -0.410413 -0.954723 4.04055e-08 -0.297497 -0.983619 2.44827e-08 -0.180261 -0.998175 7.90781e-09 -0.0603801 -0.998175 -7.91789e-09 0.0603801 -0.983619 -2.36083e-08 0.180261 -0.954723 -4.04055e-08 0.297497 -0.9119 -5.57415e-08 0.410413 -0.855781 -7.02637e-08 0.517338 -0.787183 -7.17956e-08 0.616719 -0.707106 -8.23181e-08 0.707107 -0.61672 -9.16401e-08 0.787182 -0.517339 -9.96259e-08 0.85578 -0.410411 -1.06159e-07 0.911901 -0.297505 -1.11144e-07 0.95472 -0.180252 -1.14508e-07 0.983621 -0.0603803 -1.54937e-07 0.998175 0.0603798 -1.54937e-07 0.998176 0.180252 -1.52678e-07 0.983621 0.297507 -1.11144e-07 0.95472 0.410411 -1.06159e-07 0.911901 0.517339 -9.96259e-08 0.85578 0.61672 -9.16401e-08 0.787182 0.707106 -8.23181e-08 0.707107 0.787183 -7.17956e-08 0.616719 0.855781 -6.0226e-08 0.517338 0.911897 -5.57423e-08 0.41042 0.954723 -4.04055e-08 0.297497 0.983619 -2.44827e-08 0.180261 0.998176 -7.9068e-09 0.0603724 0 1 0 2.12376e-05 1 -2.57871e-06 -4.92183e-06 1 2.21513e-06 -2.79848e-06 1 1.69178e-06 -1.73735e-06 1 1.36111e-06 -1.13123e-06 1 1.13124e-06 -7.52867e-07 1 9.60956e-07 -5.01011e-07 1 8.28772e-07 -3.25158e-07 1 7.22465e-07 -1.97715e-07 1 6.34497e-07 3.45191e-07 1 2.09158e-07 -7.14919e-08 1 9.12523e-08 -8.72291e-08 1 8.72292e-08 -1.05851e-07 1 8.29291e-08 -1.29127e-07 1 7.80603e-08 -1.60257e-07 1 7.21261e-08 -2.05816e-07 1 6.41334e-08 -2.81959e-07 1 5.16729e-08 -4.42454e-07 1 2.67642e-08 0 1 9.31323e-08 4.42454e-07 1 2.67642e-08 2.81959e-07 1 5.16729e-08 2.05816e-07 1 6.41334e-08 1.60257e-07 1 7.21261e-08 1.29127e-07 1 7.80603e-08 1.05851e-07 1 8.2929e-08 8.72292e-08 1 8.72292e-08 7.14919e-08 1 9.12523e-08 -3.45191e-07 1 2.09158e-07 3.26631e-08 1 1.78238e-07 1.08581e-08 1 1.79514e-07 1.06489e-07 1 1.76153e-07 7.99464e-08 1 1.77634e-07 5.59374e-08 1 1.79508e-07 3.33165e-08 1 1.81805e-07 1.11659e-08 1 1.8459e-07 -1.13703e-08 1 1.8797e-07 -3.52056e-08 1 1.92114e-07 -6.14763e-08 1 1.97282e-07 -9.17614e-08 1 2.03887e-07 -1.28534e-07 1 2.1262e-07 -2.66694e-08 1 1.86686e-07 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -3.26645e-08 1 1.78238e-07 -1.08574e-08 1 1.79514e-07 -7.9946e-08 1 1.77634e-07 -1.06489e-07 1 1.76153e-07 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 2.66694e-08 1 1.86686e-07 1.28534e-07 1 2.1262e-07 9.17614e-08 1 2.03887e-07 6.14763e-08 1 1.97282e-07 3.52054e-08 1 1.92114e-07 1.13703e-08 1 1.8797e-07 -1.11659e-08 1 1.8459e-07 -3.33163e-08 1 1.81805e-07 -5.59374e-08 1 1.79508e-07 0 1 1.69331e-07 -4.47182e-07 1 2.70467e-08 -3.39508e-07 1 6.22196e-08 -2.72451e-07 1 8.48973e-08 -2.25458e-07 1 1.01473e-07 -1.89738e-07 1 1.147e-07 -1.60824e-07 1 1.25998e-07 -1.36191e-07 1 1.36191e-07 -1.14249e-07 1 1.45828e-07 1.97721e-07 1 6.34502e-07 3.25153e-07 1 7.22462e-07 5.01011e-07 1 8.28772e-07 7.52859e-07 1 9.60952e-07 1.13125e-06 1 1.13125e-06 1.73733e-06 1 1.36111e-06 2.79851e-06 1 1.69179e-06 4.92179e-06 1 2.21513e-06 -2.12377e-05 1 -2.57873e-06 0 1 0 4.4718e-07 1 2.70467e-08 3.39508e-07 1 6.22196e-08 2.7245e-07 1 8.48972e-08 2.25459e-07 1 1.01473e-07 1.89738e-07 1 1.147e-07 1.60824e-07 1 1.25998e-07 1.36191e-07 1 1.36191e-07 1.14249e-07 1 1.45828e-07 1 0 0 1 0 0 0.707107 -7.31716e-08 0.707107 0.707107 -7.31716e-08 0.707107 0.707107 -6.58545e-08 0.707107 0.707107 -7.14566e-08 0.707107 9.31323e-08 -9.31323e-08 1 -8.46655e-08 -8.46657e-08 1 -0.707107 0.707107 9.40778e-08 -0.707107 0.707107 4.59364e-08 -0.707107 0.707107 0 -0.707107 7.31716e-08 -0.707107 -0.707107 5.98677e-08 -0.707107 -0.707107 -0.707107 0 -0.707107 -0.707107 0 -0.707107 -0.707107 0 0 1 1.33046e-07 1.16415e-07 1 1.16415e-07 0 1.0348e-07 -1 0 1.0348e-07 -1 0 -1 0 0 -1 0 -0.707107 -0.707107 0 -0.707107 -0.707107 0 -0.707107 0.707107 0 -0.707107 0.707107 1.09757e-07 -0.707107 -5.98677e-08 0.707107 -0.707107 -7.31716e-08 0.707107 0 -1 0 0 -1 0 0 -1.0348e-07 1 0 -1.0348e-07 1 0 1 1.5522e-07 0 1 1.5522e-07 0 1.0348e-07 -1 0 1.0348e-07 -1 -8.46656e-08 8.46657e-08 -1 1.0348e-07 1.0348e-07 -1 -0.5 0.707107 0.5 -0.5 0.707107 0.5 -0.5 0.707107 0.499999 -0.5 0.707107 0.499999 -0.5 0.707107 0.5 1.01176e-07 -1.43085e-07 1 0 -1.65568e-07 1 -0.707107 -1.17075e-07 0.707107 -0.707107 -1.17075e-07 0.707107 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -0.5 0.707107 -0.5 -0.5 0.707107 -0.499999 -0.5 0.707107 -0.5 -0.5 0.707107 -0.5 -0.5 0.707107 -0.5 -0.707107 1.17075e-07 -0.707107 -0.707107 1.17075e-07 -0.707107 -1.01176e-07 1.43085e-07 -1 0 1.65568e-07 -1 -0.707107 6.58544e-08 -0.707106 -0.707107 1.31709e-07 -0.707106 6.55813e-09 -1 -1.878e-07 3.87034e-08 -1 -2.19496e-07 2.12282e-08 -1 -2.01975e-07 -6.14694e-09 -1 -1.76024e-07 -1.74495e-08 -1 -1.66025e-07 -2.77483e-08 -1 -1.57368e-07 -9.99615e-08 -1 -9.99613e-08 -4.24606e-08 -1 -1.70301e-07 -5.18089e-08 -1 -1.5945e-07 -6.0548e-08 -1 -1.49863e-07 -6.88893e-08 -1 -1.41245e-07 -7.70038e-08 -1 -1.33374e-07 -8.50401e-08 -1 -1.26076e-07 -9.31346e-08 -1 -1.19207e-07 -1.01424e-07 -1 -1.12644e-07 -1.10052e-07 -1 -1.06276e-07 -1.19175e-07 -1 -9.99999e-08 -1.28981e-07 -1 -9.37089e-08 -1.39695e-07 -1 -8.72915e-08 -1.51612e-07 -1 -8.06137e-08 -1.65118e-07 -1 -7.35153e-08 -1.80744e-07 -1 -6.5786e-08 -1.99254e-07 -1 -5.71345e-08 -2.21779e-07 -1 -4.71411e-08 -2.50105e-07 -1 -3.51505e-08 -2.87205e-07 -1 -2.00822e-08 -3.38438e-07 -1 0 2.87205e-07 -1 -2.00822e-08 2.50105e-07 -1 -3.51505e-08 2.21779e-07 -1 -4.71411e-08 1.99254e-07 -1 -5.71345e-08 1.80744e-07 -1 -6.57861e-08 1.65118e-07 -1 -7.35153e-08 1.51613e-07 -1 -8.06137e-08 1.39695e-07 -1 -8.72915e-08 1.28981e-07 -1 -9.3709e-08 1.19175e-07 -1 -9.99999e-08 1.10052e-07 -1 -1.06276e-07 1.01424e-07 -1 -1.12644e-07 9.31345e-08 -1 -1.19207e-07 8.50397e-08 -1 -1.26076e-07 7.70039e-08 -1 -1.33374e-07 6.88893e-08 -1 -1.41245e-07 6.05485e-08 -1 -1.49862e-07 5.18087e-08 -1 -1.5945e-07 4.2461e-08 -1 -1.70301e-07 2.77482e-08 -1 -1.57368e-07 1.74498e-08 -1 -1.66024e-07 6.14676e-09 -1 -1.76024e-07 -6.55834e-09 -1 -1.878e-07 -2.12284e-08 -1 -2.01975e-07 -3.87031e-08 -1 -2.19495e-07 -6.03011e-08 -1 -2.41855e-07 -8.82446e-08 -1 -2.71589e-07 -1.26598e-07 -1 -3.13341e-07 0 -1 -1.72968e-07 -3.1496e-08 -1 -6.45771e-08 -3.34689e-08 -1 -5.79696e-08 -3.52317e-08 -1 -5.22332e-08 -3.68407e-08 -1 -4.71541e-08 -3.83371e-08 -1 -4.25775e-08 -3.97521e-08 -1 -3.83881e-08 -4.1111e-08 -1 -3.44963e-08 -4.24346e-08 -1 -3.08307e-08 -4.37412e-08 -1 -2.73323e-08 -4.50468e-08 -1 -2.3952e-08 -4.63678e-08 -1 -2.06441e-08 -4.77193e-08 -1 -1.73685e-08 -4.91184e-08 -1 -1.4084e-08 -5.05827e-08 -1 -1.07517e-08 -5.21333e-08 -1 -7.32686e-09 -5.37942e-08 -1 -3.76169e-09 -5.55953e-08 -1 0 7.16398e-10 -1 -1.14624e-07 5.37942e-08 -1 -3.7618e-09 5.21333e-08 -1 -7.32698e-09 5.05827e-08 -1 -1.07517e-08 4.91183e-08 -1 -1.40844e-08 4.77192e-08 -1 -1.73686e-08 4.63678e-08 -1 -2.06441e-08 4.50469e-08 -1 -2.3952e-08 4.37411e-08 -1 -2.73324e-08 4.24346e-08 -1 -3.08307e-08 4.1111e-08 -1 -3.44963e-08 3.97521e-08 -1 -3.83879e-08 3.8337e-08 -1 -4.25775e-08 3.68407e-08 -1 -4.7154e-08 3.52319e-08 -1 -5.22326e-08 3.34688e-08 -1 -5.79698e-08 3.14962e-08 -1 -6.45768e-08 0 -1 -1.72968e-07 1.26595e-07 -1 -3.13337e-07 8.8246e-08 -1 -2.7159e-07 6.03008e-08 -1 -2.41855e-07 4.97318e-09 -1 -1.30712e-07 9.99614e-08 -1 -9.99614e-08 -9.19824e-08 -1 -9.19825e-08 1.14624e-07 -1 -1.14624e-07 0 1.86264e-07 -1 0 1.86264e-07 -1 -1.17896e-07 1.66731e-07 -1 -0.707107 -6.58544e-08 0.707106 -0.707107 -1.31709e-07 0.707107 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0 -0.707107 -1.17896e-07 0.707107 -0.707107 -1.17896e-07 0.707107 -0.707107 1.17896e-07 -0.707107 -0.707107 1.17896e-07 -0.707107 -8.82238e-08 1.24767e-07 -1 0 1.33046e-07 -1 0 1.33046e-07 -1 0 -1.86264e-07 1 0 -1.86264e-07 1 1.17896e-07 -1.66731e-07 1 0 -1.33046e-07 1 0 -1.33046e-07 1 8.82238e-08 -1.24767e-07 1 0 1 0 0 1 0 0 1 0 0 -0.998176 -0.0603798 0 -0.983618 -0.180264 0 -0.983618 -0.180264 0 -0.954723 -0.297497 0 -0.954723 -0.297497 0 -0.911901 -0.41041 0 -0.855779 -0.517342 0 -0.787181 -0.616722 0 -0.707107 -0.707107 0 -0.616727 -0.787177 0 -0.517339 -0.855781 0 -0.410412 -0.9119 0 -0.297497 -0.954723 0 -0.180254 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180254 -0.98362 0 0.297499 -0.954722 0 0.410412 -0.9119 0 0.517339 -0.855781 0 0.616727 -0.787177 0 0.707107 -0.707107 0 0.787181 -0.616722 0 0.855775 -0.517348 0 0.911901 -0.41041 0 0.954723 -0.297497 0 0.983619 -0.180262 0 0.998176 -0.0603721 0 0.998176 0.0603726 0 0.983619 0.180262 0 0.954723 0.297495 0 0.9119 0.410412 0 0.855775 0.517348 0 0.787184 0.616718 0 0.707103 0.707111 0 0.616727 0.787177 0 0.517339 0.855781 0 0.410412 0.9119 0 0.297499 0.954722 0 0.180254 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180254 0.98362 0 -0.297497 0.954723 0 -0.410412 0.9119 0 -0.517339 0.855781 0 -0.616727 0.787177 0 -0.707103 0.707111 0 -0.787184 0.616718 0 -0.855779 0.517342 0 -0.9119 0.410412 0 -0.954723 0.297495 0 -0.983619 0.180261 0 -0.998175 0.0603803 0 -0.998176 -0.0603798 0 -0.911901 -0.41041 0 -0.855779 -0.517342 0 -0.787181 -0.616722 0 -0.707107 -0.707107 0 -0.616727 -0.787177 0 -0.517339 -0.855781 0 -0.410412 -0.9119 0 -0.297497 -0.954723 0 -0.180254 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180254 -0.98362 0 0.297499 -0.954722 0 0.410412 -0.9119 0 0.517339 -0.855781 0 0.616727 -0.787177 0 0.707107 -0.707107 0 0.787181 -0.616722 0 0.855775 -0.517348 0 0.911901 -0.41041 0 0.954723 -0.297497 0 0.983619 -0.180262 0 0.998176 -0.0603721 0 0.998176 0.0603726 0 0.983619 0.180262 0 0.954723 0.297495 0 0.9119 0.410412 0 0.855775 0.517348 0 0.787184 0.616718 0 0.707103 0.707111 0 0.616727 0.787177 0 0.517339 0.855781 0 0.410412 0.9119 0 0.297499 0.954722 0 0.180254 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180254 0.98362 0 -0.297497 0.954723 0 -0.410412 0.9119 0 -0.517339 0.855781 0 -0.616727 0.787177 0 -0.707103 0.707111 0 -0.787184 0.616718 0 -0.855779 0.517342 0 -0.9119 0.410412 0 -0.954723 0.297495 0 -0.983619 0.180261 0 -0.998175 0.0603803 0 -0.998176 -0.0603798 0 -0.98362 -0.180255 0 -0.98362 -0.180255 0 -0.954723 -0.297497 0 -0.954723 -0.297497 0 -0.911901 -0.41041 0 -0.855782 -0.517336 0 -0.787174 -0.616731 0 -0.707111 -0.707103 0 -0.616727 -0.787177 0 -0.517339 -0.855781 0 -0.410412 -0.9119 0 -0.297495 -0.954723 0 -0.180255 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180255 -0.98362 0 0.297497 -0.954723 0 0.41041 -0.911901 0 0.517339 -0.855781 0 0.61673 -0.787175 0 0.707107 -0.707107 0 0.787174 -0.616731 0 0.855782 -0.517336 0 0.911901 -0.41041 0 0.954723 -0.297497 0 0.98362 -0.180255 0 0.998176 -0.0603798 0 0.998175 0.0603803 0 0.98362 0.180255 0 0.954723 0.297495 0 0.9119 0.410412 0 0.855782 0.517336 0 0.787177 0.616728 0 0.707103 0.707111 0 0.61673 0.787175 0 0.517339 0.855781 0 0.41041 0.911901 0 0.297497 0.954723 0 0.180255 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180255 0.98362 0 -0.297495 0.954723 0 -0.410412 0.9119 0 -0.517339 0.855781 0 -0.616727 0.787177 0 -0.707107 0.707107 0 -0.787177 0.616728 0 -0.855782 0.517336 0 -0.9119 0.410412 0 -0.954723 0.297495 0 -0.98362 0.180255 0 -0.998175 0.0603803 0 -0.998176 -0.0603798 0 -0.911901 -0.41041 0 -0.855782 -0.517336 0 -0.787174 -0.616731 0 -0.707111 -0.707103 0 -0.616727 -0.787177 0 -0.517339 -0.855781 0 -0.410412 -0.9119 0 -0.297495 -0.954723 0 -0.180255 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180255 -0.98362 0 0.297497 -0.954723 0 0.41041 -0.911901 0 0.517339 -0.855781 0 0.61673 -0.787175 0 0.707107 -0.707107 0 0.787174 -0.616731 0 0.855782 -0.517336 0 0.911901 -0.41041 0 0.954723 -0.297497 0 0.98362 -0.180255 0 0.998176 -0.0603798 0 0.998175 0.0603803 0 0.98362 0.180255 0 0.954723 0.297495 0 0.9119 0.410412 0 0.855782 0.517336 0 0.787177 0.616728 0 0.707103 0.707111 0 0.61673 0.787175 0 0.517339 0.855781 0 0.41041 0.911901 0 0.297497 0.954723 0 0.180255 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180255 0.98362 0 -0.297495 0.954723 0 -0.410412 0.9119 0 -0.517339 0.855781 0 -0.616727 0.787177 0 -0.707107 0.707107 0 -0.787177 0.616728 0 -0.855782 0.517336 0 -0.9119 0.410412 0 -0.954723 0.297495 0 -0.98362 0.180255 0 -0.998175 0.0603803 0 -0.998175 -0.0603803 0 -0.98362 -0.180255 0 -0.98362 -0.180255 0 -0.954723 -0.297495 0 -0.954723 -0.297495 0 -0.9119 -0.410412 0 -0.855784 -0.517333 0 -0.787174 -0.616731 0 -0.707107 -0.707107 0 -0.616731 -0.787174 0 -0.517333 -0.855784 0 -0.410412 -0.9119 0 -0.297495 -0.954723 0 -0.180255 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180255 -0.98362 0 0.297497 -0.954723 0 0.41041 -0.911901 0 0.517333 -0.855784 0 0.616735 -0.787171 0 0.707103 -0.707111 0 0.787174 -0.616731 0 0.855784 -0.517333 0 0.9119 -0.410412 0 0.954723 -0.297495 0 0.98362 -0.180255 0 0.998175 -0.0603803 0 0.998176 0.0603798 0 0.98362 0.180255 0 0.954723 0.297497 0 0.911901 0.41041 0 0.855784 0.517333 0 0.787171 0.616735 0 0.707107 0.707107 0 0.616735 0.787171 0 0.517333 0.855784 0 0.41041 0.911901 0 0.297497 0.954723 0 0.180255 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180255 0.98362 0 -0.297495 0.954723 0 -0.410412 0.9119 0 -0.517333 0.855784 0 -0.616731 0.787174 0 -0.707111 0.707103 0 -0.787171 0.616735 0 -0.855784 0.517333 0 -0.911901 0.41041 0 -0.954723 0.297497 0 -0.98362 0.180255 0 -0.998176 0.0603798 0 -0.998175 -0.0603803 0 -0.9119 -0.410412 0 -0.855784 -0.517333 0 -0.787174 -0.616731 0 -0.707107 -0.707107 0 -0.616731 -0.787174 0 -0.517333 -0.855784 0 -0.410412 -0.9119 0 -0.297495 -0.954723 0 -0.180255 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180255 -0.98362 0 0.297497 -0.954723 0 0.41041 -0.911901 0 0.517333 -0.855784 0 0.616735 -0.787171 0 0.707103 -0.707111 0 0.787174 -0.616731 0 0.855784 -0.517333 0 0.9119 -0.410412 0 0.954723 -0.297495 0 0.98362 -0.180255 0 0.998175 -0.0603803 0 0.998176 0.0603798 0 0.98362 0.180255 0 0.954723 0.297497 0 0.911901 0.41041 0 0.855784 0.517333 0 0.787171 0.616735 0 0.707107 0.707107 0 0.616735 0.787171 0 0.517333 0.855784 0 0.41041 0.911901 0 0.297497 0.954723 0 0.180255 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180255 0.98362 0 -0.297495 0.954723 0 -0.410412 0.9119 0 -0.517333 0.855784 0 -0.616731 0.787174 0 -0.707111 0.707103 0 -0.787171 0.616735 0 -0.855784 0.517333 0 -0.911901 0.41041 0 -0.954723 0.297497 0 -0.98362 0.180255 0 -0.998176 0.0603798 0 -0.998175 -0.0603803 0 -0.983618 -0.180264 0 -0.983618 -0.180264 0 -0.954723 -0.297495 0 -0.954723 -0.297495 0 -0.9119 -0.410412 0 -0.855781 -0.517339 0 -0.787181 -0.616722 0 -0.707103 -0.707111 0 -0.616731 -0.787174 0 -0.517333 -0.855784 0 -0.410412 -0.9119 0 -0.297497 -0.954723 0 -0.180254 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180254 -0.98362 0 0.297499 -0.954722 0 0.410412 -0.9119 0 0.517333 -0.855784 0 0.616731 -0.787174 0 0.707103 -0.707111 0 0.787181 -0.616722 0 0.855777 -0.517344 0 0.9119 -0.410412 0 0.954723 -0.297495 0 0.983619 -0.180262 0 0.998176 -0.0603726 0 0.998176 0.0603721 0 0.983619 0.180262 0 0.954723 0.297497 0 0.911901 0.41041 0 0.855777 0.517344 0 0.787178 0.616726 0 0.707107 0.707107 0 0.616731 0.787174 0 0.517333 0.855784 0 0.410412 0.9119 0 0.297499 0.954722 0 0.180254 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180254 0.98362 0 -0.297497 0.954723 0 -0.410412 0.9119 0 -0.517333 0.855784 0 -0.616731 0.787174 0 -0.707107 0.707107 0 -0.787178 0.616726 0 -0.855781 0.517339 0 -0.911901 0.41041 0 -0.954723 0.297497 0 -0.983619 0.180261 0 -0.998176 0.0603798 0 -0.998175 -0.0603803 0 -0.9119 -0.410412 0 -0.855781 -0.517339 0 -0.787181 -0.616722 0 -0.707103 -0.707111 0 -0.616731 -0.787174 0 -0.517333 -0.855784 0 -0.410412 -0.9119 0 -0.297497 -0.954723 0 -0.180254 -0.98362 0 -0.0603803 -0.998175 0 0.0603798 -0.998176 0 0.180254 -0.98362 0 0.297499 -0.954722 0 0.410412 -0.9119 0 0.517333 -0.855784 0 0.616731 -0.787174 0 0.707103 -0.707111 0 0.787181 -0.616722 0 0.855777 -0.517344 0 0.9119 -0.410412 0 0.954723 -0.297495 0 0.983619 -0.180262 0 0.998176 -0.0603726 0 0.998176 0.0603721 0 0.983619 0.180262 0 0.954723 0.297497 0 0.911901 0.41041 0 0.855777 0.517344 0 0.787178 0.616726 0 0.707107 0.707107 0 0.616731 0.787174 0 0.517333 0.855784 0 0.410412 0.9119 0 0.297499 0.954722 0 0.180254 0.98362 0 0.0603798 0.998176 0 -0.0603803 0.998175 0 -0.180254 0.98362 0 -0.297497 0.954723 0 -0.410412 0.9119 0 -0.517333 0.855784 0 -0.616731 0.787174 0 -0.707107 0.707107 0 -0.787178 0.616726 0 -0.855781 0.517339 0 -0.911901 0.41041 0 -0.954723 0.297497 0 -0.983619 0.180261 0 -0.998176 0.0603798 -0.707107 -7.74758e-08 0.707107 -0.707107 -1.06217e-07 0.707107 -0.707107 0.707107 7.74758e-08 -0.707107 0.707107 8.49735e-08 -0.707107 7.74758e-08 -0.707107 -0.707107 1.06217e-07 -0.707107 -0.707107 -0.707107 -7.74758e-08 -0.707107 -0.707107 -8.49735e-08 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0.0697559 1.39358e-07 -0.997564 0.207911 1.36646e-07 -0.978148 0.207911 1.37511e-07 -0.978148 0.34202 1.31274e-07 -0.939693 0.469473 1.23346e-07 -0.882947 0.587786 8.47637e-08 -0.809017 0.469473 9.25097e-08 -0.882947 0.469473 9.16741e-08 -0.882947 0.34202 9.84552e-08 -0.939693 0.207911 1.36646e-07 -0.978148 0.34202 1.31274e-07 -0.939693 0.34202 1.31146e-07 -0.939693 0.34202 1.31145e-07 -0.939693 0.469473 1.23346e-07 -0.882947 0.587786 1.13018e-07 -0.809016 0.694657 7.53681e-08 -0.719341 0.587786 8.47637e-08 -0.809017 0.469473 1.22232e-07 -0.882947 0.587786 1.13018e-07 -0.809016 0.694657 1.00491e-07 -0.719341 0.788011 6.45052e-08 -0.615662 0.694657 7.53681e-08 -0.719341 0.587786 1.12047e-07 -0.809016 0.694657 1.00491e-07 -0.719341 0.788011 8.6007e-08 -0.615662 0.86603 6.1117e-08 -0.499991 0.788011 7.52561e-08 -0.615662 0.694657 9.93133e-08 -0.719341 0.788011 8.60069e-08 -0.615662 0.86603 6.9848e-08 -0.499991 0.927181 4.57912e-08 -0.374613 0.86603 6.1117e-08 -0.499991 0.788011 7.38488e-08 -0.615662 0.86603 6.1117e-08 -0.499991 0.927181 5.23328e-08 -0.374613 0.970296 3.16837e-08 -0.241921 0.927181 4.9062e-08 -0.374613 0.86603 6.11167e-08 -0.499991 0.927181 4.57912e-08 -0.374613 0.970296 3.16837e-08 -0.241921 0.994522 1.369e-08 -0.10453 0.970296 3.16837e-08 -0.241921 0.927181 4.83837e-08 -0.374613 0.970296 3.16837e-08 -0.241921 0.994522 1.369e-08 -0.10453 0.999391 -4.57072e-09 0.0348997 0.994522 1.369e-08 -0.10453 0.970296 3.18314e-08 -0.241921 0.994522 1.369e-08 -0.10453 0.999391 -4.57072e-09 0.0348997 0.984809 -2.27417e-08 0.173644 0.999391 -4.57072e-09 0.0348997 0.994522 1.40058e-08 -0.10453 0.999391 -4.57072e-09 0.0348997 0.984809 -2.27417e-08 0.173644 0.951055 -4.04719e-08 0.309023 0.984809 -2.27417e-08 0.173644 0.999391 -4.61556e-09 0.0348997 0.984809 -2.27417e-08 0.173644 0.951055 -4.04719e-08 0.309023 0.898795 -5.35845e-08 0.438369 0.951055 -3.77738e-08 0.309023 0.984809 -2.29186e-08 0.173644 0.951055 -4.04719e-08 0.309023 0.898795 -6.12394e-08 0.438369 0.829037 -6.83537e-08 0.559194 0.898795 -5.35845e-08 0.438369 0.951055 -3.81976e-08 0.309023 0.898795 -5.35845e-08 0.438369 0.829037 -7.81185e-08 0.559194 0.743146 -8.17917e-08 0.669129 0.829037 -6.83537e-08 0.559194 0.898795 -5.34768e-08 0.438369 0.829037 -6.83537e-08 0.559194 0.743146 -9.34763e-08 0.669129 0.642786 -8.02615e-08 0.766046 0.743146 -7.01072e-08 0.669129 0.829037 -6.87556e-08 0.559194 0.743146 -8.17917e-08 0.669129 0.642786 -1.07015e-07 0.766046 0.52992 -8.88532e-08 0.848048 0.642786 -8.02615e-08 0.766046 0.743146 -9.16745e-08 0.669129 0.642786 -1.07015e-07 0.766046 0.52992 -1.18471e-07 0.848048 0.406741 -9.57154e-08 0.913544 0.52992 -8.88532e-08 0.848048 0.642786 -1.095e-07 0.766046 0.52992 -1.18471e-07 0.848048 0.406741 -1.27621e-07 0.913544 0.275632 -1.00715e-07 0.961263 0.406741 -9.57154e-08 0.913544 0.52992 -1.19687e-07 0.848048 0.406741 -1.27621e-07 0.913544 0.275632 -1.34287e-07 0.961263 0.139177 -1.03754e-07 0.990268 0.275632 -1.00715e-07 0.961263 0.406741 -1.27326e-07 0.913544 0.275632 -1.34287e-07 0.961263 0.139177 -1.38339e-07 0.990268 0 -1.04774e-07 1 0.139177 -1.03754e-07 0.990268 0.275632 -1.34963e-07 0.961263 0.139177 -1.38339e-07 0.990268 0 -1.39698e-07 1 -0.139176 -1.03754e-07 0.990268 0 -1.04774e-07 1 0.139177 -1.3815e-07 0.990268 0 -1.39698e-07 1 -0.139176 -1.38339e-07 0.990268 -0.275634 -1.00715e-07 0.961263 -0.139176 -1.03754e-07 0.990268 0 -1.39698e-07 1 -0.139176 -1.38339e-07 0.990268 -0.275634 -1.34287e-07 0.961263 -0.406738 -9.57156e-08 0.913545 -0.275634 -1.00715e-07 0.961263 -0.139176 -1.38149e-07 0.990268 -0.275634 -1.34287e-07 0.961263 -0.406738 -1.27621e-07 0.913545 -0.52992 -8.88532e-08 0.848048 -0.406738 -9.57156e-08 0.913545 -0.275634 -1.33691e-07 0.961263 -0.406738 -1.27621e-07 0.913545 -0.52992 -1.18471e-07 0.848048 -0.642786 -8.02615e-08 0.766046 -0.52992 -8.88532e-08 0.848048 -0.406738 -1.27325e-07 0.913545 -0.52992 -1.18471e-07 0.848048 -0.642786 -1.07015e-07 0.766046 -0.743146 -7.01072e-08 0.669129 -0.642786 -8.02615e-08 0.766046 -0.52992 -1.1714e-07 0.848048 -0.642786 -1.07015e-07 0.766046 -0.743146 -9.34763e-08 0.669129 -0.829037 -6.83537e-08 0.559194 -0.743146 -8.17917e-08 0.669129 -0.642786 -1.06953e-07 0.766046 -0.743146 -9.34762e-08 0.669129 -0.829037 -7.81185e-08 0.559194 -0.898795 -5.35845e-08 0.438369 -0.829037 -6.83537e-08 0.559194 -0.743146 -8.14885e-08 0.669129 -0.829037 -6.83537e-08 0.559194 -0.898795 -6.12394e-08 0.438369 -0.951055 -3.77737e-08 0.309023 -0.898795 -5.35845e-08 0.438369 -0.829037 -6.87556e-08 0.559194 -0.898795 -5.35845e-08 0.438369 -0.951055 -4.317e-08 0.309023 -0.984809 -2.27417e-08 0.173644 -0.951055 -4.04719e-08 0.309023 -0.898795 -5.60233e-08 0.438369 -0.951055 -3.77737e-08 0.309023 -0.984809 -2.27417e-08 0.173644 -0.999391 -4.57072e-09 0.0348997 -0.984809 -2.27417e-08 0.173644 -0.951055 -4.07441e-08 0.309023 -0.984809 -2.27417e-08 0.173644 -0.999391 -4.57072e-09 0.0348997 -0.994521 1.36911e-08 -0.104539 -0.999391 -4.57072e-09 0.0348997 -0.984809 -2.29186e-08 0.173644 -0.999391 -4.57072e-09 0.0348997 -0.994521 1.36911e-08 -0.104539 -0.970296 3.16837e-08 -0.241921 -0.994521 1.36911e-08 -0.104539 -0.999391 -4.61556e-09 0.0348997 -0.994521 1.36911e-08 -0.104539 -0.970296 3.16837e-08 -0.241921 -0.927184 4.9061e-08 -0.374605 -0.970296 3.16837e-08 -0.241921 -0.994521 1.33692e-08 -0.104539 -0.970296 3.16837e-08 -0.241921 -0.927185 4.9061e-08 -0.374605 -0.866026 6.11178e-08 -0.499998 -0.927185 4.57902e-08 -0.374605 -0.970296 3.18314e-08 -0.241921 -0.927184 4.9061e-08 -0.374605 -0.866027 6.9849e-08 -0.499998 -0.788011 7.52561e-08 -0.615662 -0.866026 6.11178e-08 -0.499998 -0.927185 4.58373e-08 -0.374605 -0.866026 6.11178e-08 -0.499998 -0.788011 8.6007e-08 -0.615662 -0.694661 7.53676e-08 -0.719337 -0.788011 6.45052e-08 -0.615662 -0.866026 6.11164e-08 -0.499998 -0.788011 7.5256e-08 -0.615662 -0.694661 1.0049e-07 -0.719337 -0.587786 8.47637e-08 -0.809017 -0.694661 7.53676e-08 -0.719337 -0.788011 8.65814e-08 -0.615662 -0.694661 1.0049e-07 -0.719337 -0.587786 1.13018e-07 -0.809016 -0.469473 9.25097e-08 -0.882947 -0.587786 8.47637e-08 -0.809017 -0.694661 9.93139e-08 -0.719337 -0.587786 1.13018e-07 -0.809016 -0.469473 1.23346e-07 -0.882947 -0.342017 9.84553e-08 -0.939694 -0.469473 9.25097e-08 -0.882947 -0.587786 1.14594e-07 -0.809016 -0.469473 1.23346e-07 -0.882947 -0.342017 1.31274e-07 -0.939694 -0.207913 1.02484e-07 -0.978147 -0.342017 9.84553e-08 -0.939694 -0.469473 1.24779e-07 -0.882947 -0.342017 1.31274e-07 -0.939694 -0.207913 1.36646e-07 -0.978147 -0.0697559 1.04519e-07 -0.997564 -0.207913 1.02484e-07 -0.978147 -0.342017 1.31144e-07 -0.939694 -0.207913 1.36646e-07 -0.978147 -0.0697559 1.39358e-07 -0.997564 0.0697559 1.04519e-07 -0.997564 -0.0697559 1.04519e-07 -0.997564 -0.207913 1.37512e-07 -0.978147 -0.0697559 1.39358e-07 -0.997564 0.0697559 1.39358e-07 -0.997564 0.207911 1.02484e-07 -0.978148 0.0697559 1.04519e-07 -0.997564 -0.0697559 1.39421e-07 -0.997564 0.0697559 1.39358e-07 -0.997564 0.207911 1.36646e-07 -0.978148 0.34202 9.84552e-08 -0.939693 0.207911 1.02484e-07 -0.978148 -0.0697559 1.39358e-07 -0.997564 0.0697559 1.39358e-07 -0.997564 0.469473 1.23346e-07 -0.882947 0.587786 1.13018e-07 -0.809016 0.694657 1.00491e-07 -0.719341 0.788011 8.6007e-08 -0.615662 0.86603 6.9848e-08 -0.499991 0.927181 4.9062e-08 -0.374613 0.970296 3.16837e-08 -0.241921 0.994522 1.369e-08 -0.10453 0.999391 -4.57072e-09 0.0348997 0.984809 -2.27417e-08 0.173644 0.951055 -4.317e-08 0.309023 0.898795 -6.12394e-08 0.438369 0.829037 -7.81186e-08 0.559194 0.743146 -9.34763e-08 0.669129 0.642786 -1.07015e-07 0.766046 0.52992 -1.18471e-07 0.848048 0.406741 -1.27621e-07 0.913544 0.275632 -1.34287e-07 0.961263 0.139177 -1.38339e-07 0.990268 0 -1.39698e-07 1 -0.139176 -1.38339e-07 0.990268 -0.275634 -1.34287e-07 0.961263 -0.406737 -1.27621e-07 0.913545 -0.52992 -1.18471e-07 0.848048 -0.642786 -1.07015e-07 0.766046 -0.743146 -9.34763e-08 0.669129 -0.829037 -7.81185e-08 0.559194 -0.898795 -6.12394e-08 0.438369 -0.951055 -4.04719e-08 0.309023 -0.984809 -2.27417e-08 0.173644 -0.999391 -4.57072e-09 0.0348997 -0.994521 1.36911e-08 -0.104539 -0.970296 3.16837e-08 -0.241921 -0.927185 5.23317e-08 -0.374605 -0.866027 6.9849e-08 -0.499998 -0.788011 8.6007e-08 -0.615662 -0.694661 1.0049e-07 -0.719337 -0.587786 1.13018e-07 -0.809016 -0.469473 1.23346e-07 -0.882947 -0.342017 1.31274e-07 -0.939694 -0.207913 1.36646e-07 -0.978147 8.17036e-09 1 1.16843e-07 -1.6543e-08 1 1.17706e-07 2.533e-08 1 1.19168e-07 -3.47722e-08 1 1.21267e-07 4.51529e-08 1 1.24058e-07 7.02151e-08 1 1.32055e-07 1.04743e-07 1 1.44165e-07 1.56654e-07 1 1.62218e-07 -2.70587e-07 1 1.41568e-07 -3.21944e-07 1 1.35303e-07 2.58638e-07 1 1.04496e-07 -6.79995e-07 1 1.17121e-07 0 1 0 1.33699e-06 1 4.66957e-08 -5.68196e-08 1 1.27619e-07 -8.59242e-08 1 1.37507e-07 -1.27782e-07 1 1.52285e-07 2.38706e-07 1 1.59353e-07 -2.06568e-07 1 1.39332e-07 4.09456e-07 1 1.09763e-07 -3.91465e-07 1 1.27197e-07 0 1 1.16557e-07 -8.17036e-09 1 1.16843e-07 1.65431e-08 1 1.17706e-07 3.47719e-08 1 1.21267e-07 5.682e-08 1 1.27619e-07 8.59242e-08 1 1.37507e-07 1.27782e-07 1 1.52285e-07 -2.38706e-07 1 1.59353e-07 2.06568e-07 1 1.39333e-07 -4.09456e-07 1 1.09763e-07 3.91468e-07 1 1.27198e-07 -1.33702e-06 1 4.66907e-08 0 1 0 -2.53298e-08 1 1.19168e-07 -4.51533e-08 1 1.24058e-07 -7.0215e-08 1 1.32055e-07 -1.04743e-07 1 1.44165e-07 -1.56652e-07 1 1.62218e-07 2.70587e-07 1 1.41568e-07 3.21949e-07 1 1.35303e-07 -2.58632e-07 1 1.04496e-07 6.79988e-07 1 1.17122e-07 -0.0304101 -0.707381 -0.706178 -0.0739668 -0.706583 -0.703754 -0.0909613 -0.707702 -0.700631 -0.1229 -0.706462 -0.696999 -0.150847 -0.707743 -0.690178 -0.20969 -0.707505 -0.674883 -0.264918 -0.707017 -0.655702 -0.218611 -0.706781 -0.672807 -0.171203 -0.706528 -0.686664 -0.267032 -0.707233 -0.654611 -0.310172 -0.706654 -0.63595 -0.322133 -0.707637 -0.628872 -0.374933 -0.70776 -0.59875 -0.425116 -0.707603 -0.564423 -0.472352 -0.707167 -0.526116 -0.508912 -0.706742 -0.491452 -0.473169 -0.707072 -0.525509 -0.353867 -0.706478 -0.612916 -0.395757 -0.706489 -0.586728 -0.435597 -0.706687 -0.557538 -0.515553 -0.707547 -0.483304 -0.542131 -0.70651 -0.454904 -0.555032 -0.707753 -0.437065 -0.59061 -0.707679 -0.387777 -0.62207 -0.707325 -0.335738 -0.646213 -0.706845 -0.287712 -0.624486 -0.70694 -0.332045 -0.572581 -0.706466 -0.416 -0.600081 -0.70661 -0.374974 -0.64853 -0.707434 -0.280973 -0.664976 -0.706559 -0.242034 -0.670007 -0.707722 -0.224099 -0.686787 -0.707729 -0.16566 -0.698753 -0.707458 -0.106052 -0.705526 -0.706966 -0.0493323 -0.700505 -0.706824 -0.0984513 -0.680337 -0.70646 -0.195081 -0.6922 -0.706548 -0.147133 -0.705446 -0.707295 -0.0456029 -0.707589 -0.706624 0 -0.706384 -0.707666 0.0151993 -0.702372 -0.707756 0.0758569 -0.693436 -0.707567 0.135999 -0.679721 -0.707101 0.194904 -0.69203 -0.706723 0.147097 -0.70602 -0.70647 0.0493669 -0.700823 -0.706502 0.0984959 -0.679595 -0.707133 0.195226 -0.664839 -0.706705 0.241984 -0.659875 -0.707586 0.252759 -0.63555 -0.707759 0.308471 -0.606767 -0.707652 0.362026 -0.57372 -0.707265 0.413063 -0.541908 -0.706802 0.454716 -0.572157 -0.706991 0.415692 -0.646532 -0.706496 0.287854 -0.624897 -0.706473 0.332263 -0.600056 -0.706639 0.374959 -0.535919 -0.707482 0.460717 -0.509059 -0.706537 0.491593 -0.49415 -0.707737 0.504901 -0.448933 -0.707712 0.54553 -0.400547 -0.707407 0.582355 -0.35365 -0.706915 0.612538 -0.395543 -0.706868 0.586416 -0.473578 -0.70646 0.525964 -0.435668 -0.70657 0.55763 -0.34903 -0.707354 0.614678 -0.310197 -0.706596 0.636003 -0.294758 -0.707691 0.6421 -0.238448 -0.707748 0.665007 -0.180454 -0.707526 0.683259 -0.122799 -0.707045 0.696425 -0.171148 -0.706761 0.686438 -0.265128 -0.706464 0.656214 -0.21869 -0.706519 0.673057 -0.121105 -0.707201 0.696563 -0.0739582 -0.706671 0.703667 -0.060744 -0.707621 0.703977 0 -0.707761 0.706452 0.0607444 -0.707621 0.703977 0.121105 -0.707201 0.696563 0.171148 -0.706761 0.686438 0.122799 -0.707045 0.696425 -0.0246988 -0.706483 0.707299 0.0246987 -0.706483 0.707299 0.0739582 -0.706671 0.703667 0.180454 -0.707526 0.683259 0.21869 -0.706519 0.673057 0.238447 -0.707748 0.665008 0.294758 -0.707691 0.642099 0.34903 -0.707354 0.614678 0.395542 -0.706868 0.586416 0.35365 -0.706915 0.612538 0.265128 -0.706464 0.656214 0.310197 -0.706596 0.636003 0.400547 -0.707408 0.582355 0.435668 -0.706571 0.55763 0.448937 -0.707712 0.545526 0.49415 -0.707737 0.504901 0.535918 -0.707482 0.460718 0.572154 -0.706991 0.415696 0.541909 -0.706802 0.454715 0.47358 -0.706461 0.525962 0.509059 -0.706538 0.491592 0.573716 -0.707265 0.41307 0.600058 -0.706639 0.374956 0.606771 -0.707652 0.36202 0.635548 -0.707759 0.308474 0.659875 -0.707586 0.252759 0.679596 -0.707133 0.195224 0.69203 -0.706723 0.147096 0.679721 -0.707101 0.194906 0.624896 -0.706474 0.332266 0.646533 -0.706496 0.287853 0.66484 -0.706705 0.241983 0.693437 -0.707567 0.135993 0.700823 -0.706503 0.0984944 0.702372 -0.707756 0.0758606 0.706384 -0.707666 0.0152049 0.705446 -0.707295 -0.0456045 0.700506 -0.706823 -0.0984498 0.705526 -0.706965 -0.049337 0.706019 -0.70647 0.0493715 0.70759 -0.706624 0 0.698753 -0.707458 -0.106052 0.692201 -0.706548 -0.147132 0.686785 -0.70773 -0.165665 0.670011 -0.707721 -0.22409 0.64853 -0.707433 -0.280975 0.624485 -0.70694 -0.332047 0.646214 -0.706845 -0.287711 0.680336 -0.70646 -0.195082 0.664977 -0.706559 -0.242033 0.622071 -0.707325 -0.335738 0.600083 -0.706609 -0.374971 0.59061 -0.707679 -0.387777 0.555031 -0.707753 -0.437066 0.515552 -0.707547 -0.483305 0.47317 -0.707072 -0.525508 0.508912 -0.706742 -0.491451 0.572578 -0.706466 -0.416004 0.542132 -0.70651 -0.454903 0.472352 -0.707167 -0.526116 0.435596 -0.706687 -0.557538 0.425116 -0.707604 -0.564423 0.374934 -0.70776 -0.598749 0.322132 -0.707637 -0.628873 0.267032 -0.707233 -0.65461 0.218611 -0.706781 -0.672807 0.264918 -0.707017 -0.655702 0.395756 -0.706489 -0.586729 0.353867 -0.706478 -0.612917 0.310173 -0.706654 -0.63595 0.209691 -0.707505 -0.674883 0.171203 -0.706528 -0.686664 0.150847 -0.707743 -0.690178 0.0909615 -0.707702 -0.700631 0.0304102 -0.707381 -0.706178 -0.0246854 -0.706891 -0.706891 0.0246853 -0.706891 -0.706891 0.122901 -0.706462 -0.696999 0.0739666 -0.706583 -0.703754 -0.043023 9.3046e-08 -0.999074 -0.128747 9.23571e-08 -0.991677 -0.128747 9.18687e-08 -0.991677 -0.213521 9.09845e-08 -0.976938 -0.296714 2.22346e-07 -0.954967 -0.213521 2.27461e-07 -0.976938 -0.213522 1.51395e-07 -0.976938 -0.128747 1.53929e-07 -0.991678 -0.128747 1.53929e-07 -0.991678 -0.043023 1.55077e-07 -0.999074 0.0430228 1.55077e-07 -0.999074 0.128747 1.53929e-07 -0.991678 0.213522 2.27461e-07 -0.976938 0.296715 2.22345e-07 -0.954966 0.377708 2.15584e-07 -0.925925 0.455906 2.07226e-07 -0.890028 0.530727 1.97334e-07 -0.847543 0.601628 1.8598e-07 -0.798777 0.668064 1.7325e-07 -0.744104 0.729555 1.59238e-07 -0.683922 0.785651 1.44046e-07 -0.61867 0.835925 1.27788e-07 -0.548844 0.880012 7.37222e-08 -0.474952 0.917584 6.17067e-08 -0.397542 0.948363 4.9234e-08 -0.317188 0.972118 2.72986e-08 -0.234493 0.988678 1.89244e-08 -0.150055 0.997917 8.44898e-09 -0.0645121 0.999768 -2.71407e-09 0.0215203 0.994218 -1.25009e-08 0.107382 0.981307 -2.2404e-08 0.192449 0.961129 -4.28563e-08 0.276099 0.933837 -5.55221e-08 0.357698 0.899631 -6.77772e-08 0.436651 0.858765 -1.19295e-07 0.512369 0.811538 -1.36043e-07 0.584299 0.758305 -1.51782e-07 0.6519 0.699457 -1.66398e-07 0.714675 0.635437 -1.79781e-07 0.772152 0.5667 -1.91835e-07 0.823924 0.493775 -2.02467e-07 0.86959 0.417196 -2.116e-07 0.908817 0.337521 -2.19168e-07 0.941318 0.255353 -1.50075e-07 0.966848 0.171292 -1.52926e-07 0.98522 0.085968 -1.54646e-07 0.996298 0 -1.55221e-07 1 -0.0859675 -1.54646e-07 0.996298 -0.171292 -1.52926e-07 0.98522 -0.255353 -2.25112e-07 0.966848 -0.337523 -2.19168e-07 0.941317 -0.417195 -2.11601e-07 0.908817 -0.493775 -2.02467e-07 0.86959 -0.5667 -1.91835e-07 0.823924 -0.635432 -1.79782e-07 0.772157 -0.699457 -1.66398e-07 0.714675 -0.758307 -1.51782e-07 0.651898 -0.811545 -1.36041e-07 0.58429 -0.858761 -7.95314e-08 0.512377 -0.899633 -6.77765e-08 0.436646 -0.933837 -5.55221e-08 0.357698 -0.961128 -3.21425e-08 0.276102 -0.981305 -2.24049e-08 0.192457 -0.994218 -1.3542e-08 0.107377 -0.999769 -2.81739e-09 0.0215122 -0.997917 8.13572e-09 -0.0645095 -0.988678 1.74687e-08 -0.150055 -0.97212 3.63969e-08 -0.234485 -0.948359 4.92359e-08 -0.3172 -0.917585 6.17063e-08 -0.39754 -0.880012 1.10583e-07 -0.474952 -0.835925 1.27788e-07 -0.548844 -0.785652 1.44045e-07 -0.618669 -0.729556 1.59238e-07 -0.683921 -0.668064 1.7325e-07 -0.744104 -0.601628 1.8598e-07 -0.798777 -0.530726 1.97334e-07 -0.847543 -0.455907 2.07226e-07 -0.890027 -0.377708 2.15584e-07 -0.925925 -0.296714 2.20209e-07 -0.954967 -0.296714 8.89382e-08 -0.954967 -0.213521 9.09845e-08 -0.976938 -0.296714 8.87718e-08 -0.954967 -0.377708 8.62334e-08 -0.925925 -0.377708 2.15584e-07 -0.925925 -0.377708 8.67072e-08 -0.925925 -0.455907 8.28902e-08 -0.890027 -0.455907 2.07226e-07 -0.890027 -0.455907 8.25788e-08 -0.890027 -0.530726 7.89336e-08 -0.847543 -0.530726 1.97334e-07 -0.847543 -0.530726 7.84493e-08 -0.847543 -0.601628 7.43919e-08 -0.798777 -0.601628 1.8598e-07 -0.798777 -0.601628 7.43208e-08 -0.798777 -0.668064 6.93001e-08 -0.744104 -0.668064 1.7325e-07 -0.744104 -0.668064 7.01917e-08 -0.744104 -0.729556 6.36951e-08 -0.683921 -0.729556 1.59238e-07 -0.683921 -0.729556 6.1934e-08 -0.683921 -0.785652 5.7618e-08 -0.618669 -0.785652 1.44045e-07 -0.618669 -0.785652 5.57405e-08 -0.618669 -0.835925 5.1115e-08 -0.548844 -0.835925 1.27788e-07 -0.548844 -0.835925 5.16115e-08 -0.548844 -0.880012 4.42333e-08 -0.474951 -0.880012 7.37222e-08 -0.474951 -0.880012 5.57406e-08 -0.474952 -0.917585 4.62797e-08 -0.39754 -0.917585 6.17063e-08 -0.39754 -0.917585 4.54182e-08 -0.39754 -0.948359 3.69269e-08 -0.3172 -0.948359 4.92359e-08 -0.3172 -0.948359 3.71603e-08 -0.3172 -0.97212 2.72977e-08 -0.234485 -0.97212 2.72977e-08 -0.234485 -0.97212 3.30314e-08 -0.234485 -0.988678 2.09624e-08 -0.150055 -0.988678 1.89244e-08 -0.150055 -0.988678 1.96124e-08 -0.150055 -0.997917 8.63637e-09 -0.0645095 -0.997917 8.44863e-09 -0.0645095 -0.997917 8.25785e-09 -0.0645095 -0.999769 -2.84869e-09 0.0215122 -0.999769 -2.71304e-09 0.0215122 -0.999769 -2.70961e-09 0.0215122 -0.994218 -1.43754e-08 0.107377 -0.994218 -1.25003e-08 0.107377 -0.994218 -1.49674e-08 0.107377 -0.981305 -2.68859e-08 0.192457 -0.981305 -2.24049e-08 0.192457 -0.981305 -2.6838e-08 0.192457 -0.961128 -3.8571e-08 0.276102 -0.961128 -4.28567e-08 0.276102 -0.961128 -3.30313e-08 0.276102 -0.933837 -4.16416e-08 0.357698 -0.933837 -5.55221e-08 0.357698 -0.933837 -4.12894e-08 0.357698 -0.899633 -5.08323e-08 0.436646 -0.899633 -6.77765e-08 0.436646 -0.899633 -5.16117e-08 0.436646 -0.85876 -5.96486e-08 0.512377 -0.85876 -1.19297e-07 0.512378 -0.85876 -4.74825e-08 0.512377 -0.811545 -5.44163e-08 0.58429 -0.811545 -1.36041e-07 0.58429 -0.811545 -5.57408e-08 0.58429 -0.758307 -6.07127e-08 0.651898 -0.758307 -1.51782e-07 0.651898 -0.758307 -5.98695e-08 0.651898 -0.699457 -6.65593e-08 0.714675 -0.699457 -1.66398e-07 0.714675 -0.699457 -6.60628e-08 0.714675 -0.635432 -7.19127e-08 0.772157 -0.635432 -1.79782e-07 0.772157 -0.635432 -7.22559e-08 0.772157 -0.5667 -7.67339e-08 0.823924 -0.5667 -1.91835e-07 0.823924 -0.5667 -7.63851e-08 0.823924 -0.493775 -8.09868e-08 0.86959 -0.493775 -2.02467e-07 0.86959 -0.493775 -8.05141e-08 0.86959 -0.417195 -8.46402e-08 0.908817 -0.417195 -2.11601e-07 0.908817 -0.417195 -8.46432e-08 0.908817 -0.337523 -8.7667e-08 0.941317 -0.337523 -2.19168e-07 0.941317 -0.337523 -8.87718e-08 0.941317 -0.255353 -9.00447e-08 0.966848 -0.255353 -1.50075e-07 0.966848 -0.255353 -8.98043e-08 0.966848 -0.171292 -9.17558e-08 0.98522 -0.171292 -1.52926e-07 0.98522 -0.171292 -9.18682e-08 0.98522 -0.0859675 -9.27875e-08 0.996298 -0.0859675 -1.54646e-07 0.996298 -0.0859675 -9.23848e-08 0.996298 0 -9.31322e-08 1 0 -1.55221e-07 1 0 -9.31322e-08 1 0.085968 -9.27875e-08 0.996298 0.085968 -1.54646e-07 0.996298 0.085968 -9.29014e-08 0.996298 0.171292 -9.17558e-08 0.98522 0.171292 -1.52926e-07 0.98522 0.171292 -9.08359e-08 0.98522 0.255353 -9.00447e-08 0.966848 0.255353 -2.25112e-07 0.966848 0.255353 -8.98043e-08 0.966848 0.337521 -8.76671e-08 0.941318 0.337521 -2.19168e-07 0.941318 0.337521 -8.67069e-08 0.941318 0.417196 -8.46401e-08 0.908817 0.417196 -2.116e-07 0.908817 0.417196 -8.46434e-08 0.908817 0.493775 -8.09868e-08 0.86959 0.493775 -2.02467e-07 0.86959 0.493775 -8.05141e-08 0.86959 0.5667 -7.67339e-08 0.823924 0.5667 -1.91835e-07 0.823924 0.5667 -7.63851e-08 0.823924 0.635437 -7.19123e-08 0.772153 0.635437 -1.79781e-07 0.772152 0.635437 -7.22565e-08 0.772153 0.699457 -6.65593e-08 0.714675 0.699457 -1.66398e-07 0.714675 0.699457 -6.81273e-08 0.714675 0.758305 -6.07129e-08 0.6519 0.758305 -1.51782e-07 0.6519 0.758305 -5.98694e-08 0.6519 0.811538 -5.44171e-08 0.584299 0.811538 -1.36043e-07 0.584299 0.811538 -5.57404e-08 0.584299 0.858765 -4.77181e-08 0.512369 0.858765 -7.95302e-08 0.512369 0.858765 -5.98695e-08 0.512369 0.899631 -5.08328e-08 0.436651 0.899631 -6.77772e-08 0.436651 0.899631 -4.95471e-08 0.436651 0.933837 -4.16416e-08 0.357698 0.933837 -5.55221e-08 0.357698 0.933837 -4.33538e-08 0.357698 0.961129 -3.21422e-08 0.276099 0.961129 -3.21422e-08 0.276099 0.961129 -3.92247e-08 0.276099 0.981307 -2.68848e-08 0.192449 0.981307 -2.2404e-08 0.192449 0.981307 -2.68381e-08 0.192449 0.994218 -1.50011e-08 0.107382 0.994218 -1.35427e-08 0.107382 0.994218 -1.44512e-08 0.107382 0.999768 -2.88109e-09 0.0215203 0.999768 -2.81846e-09 0.0215203 0.999768 -2.96767e-09 0.0215203 0.997917 8.54285e-09 -0.0645121 0.997917 8.13606e-09 -0.0645121 0.997917 8.77397e-09 -0.0645121 0.988678 2.0089e-08 -0.150055 0.988678 1.74687e-08 -0.150055 0.988678 2.16769e-08 -0.150055 0.972118 3.27583e-08 -0.234493 0.972118 3.63981e-08 -0.234493 0.972118 2.6838e-08 -0.234493 0.948363 3.69255e-08 -0.317188 0.948363 4.9234e-08 -0.317188 0.948363 3.71604e-08 -0.317188 0.917584 4.628e-08 -0.397542 0.917584 6.17067e-08 -0.397542 0.917584 4.74826e-08 -0.397542 0.880012 5.52916e-08 -0.474951 0.880012 1.10583e-07 -0.474951 0.880012 4.33538e-08 -0.474952 0.835925 5.1115e-08 -0.548844 0.835925 1.27788e-07 -0.548844 0.835925 5.16115e-08 -0.548844 0.785651 5.76182e-08 -0.61867 0.785651 1.44046e-07 -0.61867 0.785651 5.78049e-08 -0.61867 0.729555 6.36952e-08 -0.683922 0.729555 1.59238e-07 -0.683922 0.729555 6.39984e-08 -0.683922 0.668064 6.93001e-08 -0.744104 0.668064 1.7325e-07 -0.744104 0.668064 6.81272e-08 -0.744104 0.601628 7.43919e-08 -0.798777 0.601628 1.8598e-07 -0.798777 0.601628 7.43208e-08 -0.798777 0.530727 7.89336e-08 -0.847543 0.530727 1.97334e-07 -0.847543 0.530727 7.84494e-08 -0.847543 0.455906 8.28903e-08 -0.890028 0.455906 2.07226e-07 -0.890028 0.455906 8.25786e-08 -0.890028 0.377708 8.62334e-08 -0.925925 0.377708 2.15584e-07 -0.925925 0.377708 8.46428e-08 -0.925925 0.296715 8.89381e-08 -0.954966 0.296715 2.22345e-07 -0.954966 0.296715 8.87723e-08 -0.954966 0.213521 9.09845e-08 -0.976938 0.213521 1.51641e-07 -0.976938 0.213521 9.08368e-08 -0.976938 0.128747 9.23571e-08 -0.991677 0.128747 1.53929e-07 -0.991678 0.128747 9.2901e-08 -0.991677 0.0430228 9.3046e-08 -0.999074 0.0430228 1.55077e-07 -0.999074 0.0430228 9.31584e-08 -0.999074 -0.043023 9.3046e-08 -0.999074 -0.043023 1.55077e-07 -0.999074 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 1.52053e-07 0 1 1.52053e-07 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1.21556e-08 1 1.73835e-07 6.47159e-08 1 1.77807e-07 3.73018e-08 1 1.7549e-07 -1.20768e-08 1 1.72708e-07 -3.65691e-08 1 1.72045e-07 -6.25435e-08 1 1.71837e-07 -9.15236e-08 1 1.7213e-07 -1.25727e-07 1 1.73049e-07 -1.68852e-07 1 1.74852e-07 -2.27934e-07 1 1.78082e-07 -3.18696e-07 1 1.83996e-07 -4.85627e-07 1 1.9621e-07 0 1 1.58032e-07 -2.65633e-07 1 6.62294e-08 -3.52577e-07 1 3.70577e-08 4.20413e-07 1 1.46812e-08 3.03337e-07 1 5.34851e-08 2.35569e-07 1 7.65427e-08 0 1 1.59363e-07 3.87257e-07 1 1.88876e-07 2.67705e-07 1 1.8057e-07 1.95731e-07 1 1.76236e-07 1.45848e-07 1 1.73816e-07 1.0779e-07 1 1.72499e-07 7.65426e-08 1 1.71915e-07 4.9285e-08 1 1.71883e-07 2.42188e-08 1 1.7232e-07 0 1 1.7321e-07 -2.45376e-08 1 1.7459e-07 -5.06255e-08 1 1.76554e-07 -7.9822e-08 1 1.79283e-07 -1.14411e-07 1 1.83095e-07 -1.58238e-07 1 1.88582e-07 -2.18713e-07 1 1.9693e-07 -5.95388e-08 1 1.73292e-07 -2.20764e-07 1 1.48908e-07 -2.86245e-07 1 1.3961e-07 -3.8769e-07 1 1.25971e-07 -5.76569e-07 1 1.01663e-07 -1.08706e-06 1 3.79612e-08 4.49819e-09 1 1.71936e-07 7.54408e-07 1 7.92993e-08 4.64932e-07 1 1.1592e-07 3.30587e-07 1 1.33565e-07 2.50489e-07 1 1.44619e-07 1.95423e-07 1 1.52682e-07 1.85688e-07 1 1.92283e-07 1.34829e-07 1 1.85575e-07 9.62531e-08 1 1.81025e-07 -1.98949e-09 1 1.52053e-07 5.87697e-08 1 1.74061e-07 + + + + + + + + + + + + + + +

210 0 0 0 1 0 128 1 210 1 1 1 128 2 2 2 210 2 128 3 129 3 2 3 2 4 129 4 3 4 3 5 129 5 133 5 4 6 133 6 5 6 217 7 5 7 132 7 233 8 132 8 7 8 6 9 7 9 8 9 9 10 8 10 10 10 234 11 10 11 137 11 219 12 137 12 125 12 220 13 125 13 138 13 124 14 138 14 140 14 11 15 140 15 141 15 221 16 141 16 12 16 222 17 12 17 143 17 13 18 143 18 145 18 123 19 145 19 146 19 121 20 146 20 122 20 120 21 122 21 148 21 119 22 148 22 14 22 118 23 14 23 149 23 15 24 149 24 150 24 16 25 150 25 17 25 18 26 17 26 19 26 117 27 19 27 151 27 116 28 151 28 152 28 115 29 152 29 153 29 240 30 153 30 20 30 241 31 20 31 114 31 237 32 114 32 154 32 21 33 154 33 22 33 243 34 22 34 23 34 113 35 23 35 24 35 112 36 24 36 25 36 111 37 25 37 26 37 246 38 26 38 110 38 247 39 110 39 27 39 252 40 27 40 109 40 108 41 109 41 160 41 107 42 160 42 161 42 106 43 161 43 28 43 105 44 28 44 162 44 254 45 162 45 30 45 29 46 30 46 104 46 103 47 104 47 31 47 259 48 31 48 167 48 260 49 167 49 32 49 261 50 32 50 168 50 263 51 168 51 265 51 263 52 261 52 168 52 33 53 126 53 1204 53 33 54 463 54 126 54 126 55 463 55 699 55 34 56 126 56 699 56 34 57 35 57 126 57 126 58 35 58 81 58 81 59 35 59 36 59 474 60 81 60 36 60 474 61 478 61 81 61 81 62 478 62 481 62 127 63 481 63 484 63 37 64 127 64 484 64 37 65 38 65 127 65 127 66 38 66 39 66 39 67 38 67 40 67 82 68 40 68 41 68 698 69 82 69 41 69 698 70 209 70 82 70 698 71 498 71 209 71 209 72 498 72 42 72 45 73 42 73 43 73 501 74 45 74 43 74 501 75 44 75 45 75 501 76 46 76 44 76 44 77 46 77 48 77 47 78 44 78 48 78 47 79 693 79 44 79 44 80 693 80 53 80 53 81 693 81 49 81 690 82 53 82 49 82 690 83 50 83 53 83 53 84 50 84 689 84 51 85 53 85 689 85 51 86 52 86 53 86 53 87 52 87 684 87 54 88 684 88 55 88 56 89 54 89 55 89 56 90 57 90 54 90 56 91 317 91 57 91 57 92 317 92 206 92 206 93 317 93 58 93 205 94 58 94 311 94 204 95 311 95 59 95 202 96 59 96 60 96 83 97 60 97 305 97 201 98 305 98 303 98 84 99 303 99 308 99 85 100 308 100 86 100 197 101 86 101 302 101 198 102 302 102 87 102 196 103 87 103 299 103 88 104 299 104 89 104 90 105 89 105 301 105 195 106 301 106 297 106 194 107 297 107 61 107 193 108 61 108 295 108 91 109 295 109 63 109 62 110 63 110 293 110 191 111 293 111 92 111 93 112 92 112 64 112 190 113 64 113 290 113 189 114 290 114 283 114 187 115 283 115 282 115 94 116 282 116 65 116 186 117 65 117 66 117 185 118 66 118 287 118 67 119 287 119 95 119 68 120 95 120 281 120 69 121 281 121 71 121 70 122 71 122 72 122 181 123 72 123 73 123 180 124 73 124 74 124 179 125 74 125 280 125 96 126 280 126 75 126 76 127 75 127 275 127 177 128 275 128 274 128 77 129 274 129 279 129 97 130 279 130 78 130 175 131 78 131 98 131 174 132 98 132 273 132 172 133 273 133 79 133 99 134 79 134 100 134 101 135 100 135 269 135 102 136 269 136 80 136 170 137 80 137 265 137 168 138 170 138 265 138 81 139 481 139 127 139 39 140 40 140 82 140 209 141 42 141 45 141 53 142 684 142 54 142 206 143 58 143 205 143 205 144 311 144 204 144 204 145 59 145 202 145 202 146 60 146 83 146 83 147 305 147 201 147 201 148 303 148 84 148 84 149 308 149 85 149 85 150 86 150 197 150 197 151 302 151 198 151 198 152 87 152 196 152 196 153 299 153 88 153 88 154 89 154 90 154 90 155 301 155 195 155 195 156 297 156 194 156 194 157 61 157 193 157 193 158 295 158 91 158 91 159 63 159 62 159 62 160 293 160 191 160 191 161 92 161 93 161 93 162 64 162 190 162 190 163 290 163 189 163 189 164 283 164 187 164 187 165 282 165 94 165 94 166 65 166 186 166 186 167 66 167 185 167 185 168 287 168 67 168 67 169 95 169 68 169 68 170 281 170 69 170 69 171 71 171 70 171 70 172 72 172 181 172 181 173 73 173 180 173 180 174 74 174 179 174 179 175 280 175 96 175 96 176 75 176 76 176 76 177 275 177 177 177 177 178 274 178 77 178 77 179 279 179 97 179 97 180 78 180 175 180 175 181 98 181 174 181 174 182 273 182 172 182 172 183 79 183 99 183 99 184 100 184 101 184 101 185 269 185 102 185 102 186 80 186 170 186 261 187 260 187 32 187 260 188 259 188 167 188 259 189 103 189 31 189 103 190 29 190 104 190 29 191 254 191 30 191 254 192 105 192 162 192 105 193 106 193 28 193 106 194 107 194 161 194 107 195 108 195 160 195 108 196 252 196 109 196 252 197 247 197 27 197 247 198 246 198 110 198 246 199 111 199 26 199 111 200 112 200 25 200 112 201 113 201 24 201 113 202 243 202 23 202 243 203 21 203 22 203 21 204 237 204 154 204 237 205 241 205 114 205 241 206 240 206 20 206 240 207 115 207 153 207 115 208 116 208 152 208 116 209 117 209 151 209 117 210 18 210 19 210 18 211 16 211 17 211 16 212 15 212 150 212 15 213 118 213 149 213 118 214 119 214 14 214 119 215 120 215 148 215 120 216 121 216 122 216 121 217 123 217 146 217 123 218 13 218 145 218 13 219 222 219 143 219 222 220 221 220 12 220 221 221 11 221 141 221 11 222 124 222 140 222 124 223 220 223 138 223 220 224 219 224 125 224 219 225 234 225 137 225 234 226 9 226 10 226 9 227 6 227 8 227 6 228 233 228 7 228 233 229 217 229 132 229 217 230 4 230 5 230 4 231 3 231 133 231 126 232 1 232 1204 232 1204 233 1 233 0 233 1166 234 39 234 1167 234 1166 235 127 235 39 235 1166 236 1163 236 127 236 127 237 1163 237 81 237 81 238 1163 238 126 238 126 239 1163 239 130 239 1 240 130 240 1161 240 128 241 1161 241 129 241 128 242 1 242 1161 242 126 243 130 243 1 243 1161 244 1160 244 129 244 129 245 1160 245 133 245 133 246 1160 246 131 246 5 247 131 247 132 247 5 248 133 248 131 248 131 249 134 249 132 249 132 250 134 250 7 250 7 251 134 251 135 251 8 252 135 252 10 252 8 253 7 253 135 253 135 254 1155 254 10 254 10 255 1155 255 137 255 137 256 1155 256 136 256 125 257 136 257 138 257 125 258 137 258 136 258 136 259 139 259 138 259 138 260 139 260 140 260 140 261 139 261 142 261 141 262 142 262 12 262 141 263 140 263 142 263 142 264 144 264 12 264 12 265 144 265 143 265 143 266 144 266 145 266 145 267 144 267 1152 267 146 268 1152 268 147 268 122 269 147 269 148 269 122 270 146 270 147 270 145 271 1152 271 146 271 147 272 1149 272 148 272 148 273 1149 273 14 273 14 274 1149 274 1147 274 149 275 1147 275 150 275 149 276 14 276 1147 276 1146 277 151 277 1147 277 1146 278 152 278 151 278 1146 279 153 279 152 279 1146 280 20 280 153 280 1146 281 114 281 20 281 1146 282 154 282 114 282 1146 283 22 283 154 283 1146 284 23 284 22 284 1146 285 24 285 23 285 1146 286 25 286 24 286 1146 287 26 287 25 287 1146 288 110 288 26 288 1146 289 27 289 110 289 1146 290 109 290 27 290 1146 291 160 291 109 291 1146 292 1145 292 160 292 160 293 1145 293 1143 293 1141 294 160 294 1143 294 1141 295 1139 295 160 295 160 296 1139 296 155 296 156 297 160 297 155 297 156 298 157 298 160 298 160 299 157 299 158 299 159 300 160 300 158 300 159 301 1129 301 160 301 160 302 1129 302 163 302 161 303 163 303 164 303 28 304 164 304 1124 304 162 305 1124 305 30 305 162 306 28 306 1124 306 160 307 163 307 161 307 161 308 164 308 28 308 1124 309 165 309 30 309 30 310 165 310 104 310 104 311 165 311 166 311 31 312 166 312 167 312 31 313 104 313 166 313 166 314 1119 314 167 314 167 315 1119 315 32 315 32 316 1119 316 169 316 168 317 169 317 170 317 168 318 32 318 169 318 169 319 171 319 170 319 170 320 171 320 102 320 102 321 171 321 101 321 101 322 171 322 173 322 99 323 173 323 1116 323 172 324 1116 324 174 324 172 325 99 325 1116 325 101 326 173 326 99 326 1116 327 1115 327 174 327 174 328 1115 328 175 328 175 329 1115 329 1114 329 97 330 1114 330 77 330 97 331 175 331 1114 331 1114 332 176 332 77 332 77 333 176 333 177 333 177 334 176 334 178 334 76 335 178 335 96 335 76 336 177 336 178 336 178 337 1107 337 96 337 96 338 1107 338 179 338 179 339 1107 339 1105 339 180 340 1105 340 181 340 180 341 179 341 1105 341 1105 342 182 342 181 342 181 343 182 343 70 343 70 344 182 344 1103 344 69 345 1103 345 68 345 69 346 70 346 1103 346 1103 347 183 347 68 347 68 348 183 348 67 348 67 349 183 349 184 349 185 350 184 350 186 350 185 351 67 351 184 351 184 352 1098 352 186 352 186 353 1098 353 94 353 94 354 1098 354 188 354 187 355 188 355 189 355 187 356 94 356 188 356 188 357 1097 357 189 357 189 358 1097 358 190 358 190 359 1097 359 1096 359 93 360 1096 360 191 360 93 361 190 361 1096 361 1096 362 192 362 191 362 191 363 192 363 62 363 62 364 192 364 1092 364 91 365 1092 365 193 365 91 366 62 366 1092 366 1092 367 1091 367 193 367 193 368 1091 368 194 368 194 369 1091 369 1089 369 195 370 1089 370 90 370 195 371 194 371 1089 371 1089 372 1088 372 90 372 90 373 1088 373 88 373 88 374 1088 374 196 374 196 375 1088 375 1086 375 198 376 1086 376 199 376 197 377 199 377 85 377 197 378 198 378 199 378 196 379 1086 379 198 379 199 380 200 380 85 380 85 381 200 381 84 381 84 382 200 382 1082 382 201 383 1082 383 83 383 201 384 84 384 1082 384 1082 385 203 385 83 385 83 386 203 386 202 386 202 387 203 387 1080 387 204 388 1080 388 205 388 204 389 202 389 1080 389 1080 390 1079 390 205 390 205 391 1079 391 206 391 206 392 1079 392 1076 392 57 393 1076 393 54 393 57 394 206 394 1076 394 1076 395 207 395 54 395 54 396 207 396 53 396 53 397 207 397 208 397 44 398 208 398 45 398 44 399 53 399 208 399 208 400 1029 400 45 400 45 401 1029 401 209 401 209 402 1029 402 1167 402 82 403 1167 403 39 403 82 404 209 404 1167 404 151 405 19 405 1147 405 1147 406 19 406 17 406 150 407 1147 407 17 407 0 408 210 408 1203 408 1203 409 210 409 214 409 214 410 210 410 2 410 1205 411 2 411 3 411 319 412 3 412 4 412 1207 413 4 413 215 413 211 414 215 414 1375 414 1208 415 1375 415 212 415 1256 416 212 416 213 416 1256 417 1208 417 212 417 214 418 2 418 1205 418 1205 419 3 419 319 419 4 420 217 420 215 420 215 421 217 421 216 421 216 422 217 422 1390 422 1390 423 217 423 233 423 2025 424 233 424 6 424 218 425 6 425 9 425 1412 426 9 426 234 426 223 427 234 427 219 427 220 428 223 428 219 428 220 429 124 429 223 429 223 430 124 430 11 430 221 431 223 431 11 431 221 432 222 432 223 432 223 433 222 433 13 433 123 434 223 434 13 434 123 435 121 435 223 435 223 436 121 436 120 436 119 437 223 437 120 437 119 438 118 438 223 438 223 439 118 439 1427 439 1427 440 118 440 1432 440 1432 441 118 441 224 441 224 442 118 442 225 442 225 443 118 443 1452 443 1452 444 118 444 226 444 226 445 118 445 1459 445 1459 446 118 446 1468 446 1468 447 118 447 227 447 227 448 118 448 228 448 228 449 118 449 229 449 229 450 118 450 1494 450 1494 451 118 451 230 451 230 452 118 452 15 452 231 453 15 453 16 453 235 454 16 454 18 454 232 455 18 455 1518 455 232 456 235 456 18 456 1390 457 233 457 2025 457 2025 458 6 458 218 458 218 459 9 459 1412 459 1412 460 234 460 223 460 230 461 15 461 231 461 231 462 16 462 235 462 18 463 117 463 1518 463 1518 464 117 464 238 464 238 465 117 465 116 465 1534 466 116 466 115 466 239 467 115 467 240 467 236 468 240 468 241 468 2019 469 241 469 237 469 242 470 237 470 21 470 1551 471 21 471 243 471 1573 472 243 472 113 472 244 473 113 473 112 473 1578 474 112 474 111 474 1588 475 111 475 245 475 1588 476 1578 476 111 476 238 477 116 477 1534 477 1534 478 115 478 239 478 239 479 240 479 236 479 236 480 241 480 2019 480 2019 481 237 481 242 481 242 482 21 482 1551 482 1551 483 243 483 1573 483 1573 484 113 484 244 484 244 485 112 485 1578 485 111 486 246 486 245 486 245 487 246 487 1601 487 1601 488 246 488 247 488 251 489 247 489 252 489 1608 490 252 490 108 490 248 491 108 491 107 491 250 492 107 492 106 492 249 493 106 493 253 493 249 494 250 494 106 494 1601 495 247 495 251 495 251 496 252 496 1608 496 1608 497 108 497 248 497 248 498 107 498 250 498 106 499 105 499 253 499 253 500 105 500 1638 500 1638 501 105 501 254 501 256 502 254 502 29 502 257 503 29 503 103 503 255 504 103 504 258 504 255 505 257 505 103 505 1638 506 254 506 256 506 256 507 29 507 257 507 103 508 259 508 258 508 258 509 259 509 262 509 262 510 259 510 260 510 1674 511 260 511 261 511 1680 512 261 512 263 512 1689 513 263 513 264 513 1689 514 1680 514 263 514 262 515 260 515 1674 515 1674 516 261 516 1680 516 263 517 265 517 264 517 264 518 265 518 1692 518 1692 519 265 519 80 519 266 520 80 520 269 520 268 521 269 521 267 521 268 522 266 522 269 522 1692 523 80 523 266 523 269 524 100 524 267 524 267 525 100 525 270 525 270 526 100 526 79 526 1718 527 79 527 273 527 271 528 273 528 98 528 1730 529 98 529 272 529 1730 530 271 530 98 530 270 531 79 531 1718 531 1718 532 273 532 271 532 98 533 78 533 272 533 272 534 78 534 1743 534 1743 535 78 535 279 535 1754 536 279 536 274 536 1756 537 274 537 275 537 276 538 275 538 75 538 1767 539 75 539 280 539 277 540 280 540 278 540 277 541 1767 541 280 541 1743 542 279 542 1754 542 1754 543 274 543 1756 543 1756 544 275 544 276 544 276 545 75 545 1767 545 280 546 74 546 278 546 278 547 74 547 1782 547 1782 548 74 548 73 548 1789 549 73 549 72 549 1794 550 72 550 71 550 286 551 71 551 281 551 1807 552 281 552 95 552 2005 553 95 553 287 553 1823 554 287 554 66 554 288 555 66 555 65 555 1825 556 65 556 282 556 285 557 282 557 283 557 284 558 283 558 289 558 284 559 285 559 283 559 1782 560 73 560 1789 560 1789 561 72 561 1794 561 1794 562 71 562 286 562 286 563 281 563 1807 563 1807 564 95 564 2005 564 2005 565 287 565 1823 565 1823 566 66 566 288 566 288 567 65 567 1825 567 1825 568 282 568 285 568 283 569 290 569 289 569 289 570 290 570 1862 570 1862 571 290 571 64 571 291 572 64 572 92 572 1870 573 92 573 293 573 292 574 293 574 63 574 294 575 63 575 295 575 1893 576 295 576 61 576 300 577 61 577 297 577 296 578 297 578 301 578 298 579 301 579 89 579 1919 580 89 580 299 580 1932 581 299 581 87 581 1933 582 87 582 302 582 1938 583 302 583 1952 583 1938 584 1933 584 302 584 1862 585 64 585 291 585 291 586 92 586 1870 586 1870 587 293 587 292 587 292 588 63 588 294 588 294 589 295 589 1893 589 1893 590 61 590 300 590 300 591 297 591 296 591 296 592 301 592 298 592 298 593 89 593 1919 593 1919 594 299 594 1932 594 1932 595 87 595 1933 595 302 596 86 596 1952 596 1952 597 86 597 1953 597 1953 598 86 598 308 598 309 599 308 599 303 599 1965 600 303 600 305 600 304 601 305 601 60 601 306 602 60 602 59 602 307 603 59 603 310 603 307 604 306 604 59 604 1953 605 308 605 309 605 309 606 303 606 1965 606 1965 607 305 607 304 607 304 608 60 608 306 608 59 609 311 609 310 609 310 610 311 610 315 610 312 611 310 611 315 611 312 612 1990 612 310 612 312 613 1249 613 1990 613 1990 614 1249 614 1993 614 1993 615 1249 615 314 615 313 616 1993 616 314 616 315 617 311 617 1289 617 1289 618 311 618 58 618 1251 619 58 619 317 619 318 620 317 620 56 620 316 621 56 621 55 621 316 622 318 622 56 622 1289 623 58 623 1251 623 1251 624 317 624 318 624 1207 625 215 625 211 625 211 626 1375 626 1208 626 1207 627 319 627 4 627 1352 628 682 628 320 628 1352 629 321 629 682 629 1352 630 331 630 321 630 1352 631 1290 631 331 631 331 632 1290 632 322 632 752 633 322 633 343 633 753 634 343 634 323 634 755 635 323 635 716 635 329 636 716 636 718 636 757 637 718 637 324 637 756 638 324 638 325 638 758 639 325 639 720 639 327 640 720 640 326 640 1273 641 326 641 1230 641 1273 642 327 642 326 642 1273 643 328 643 327 643 327 644 328 644 656 644 758 645 656 645 665 645 756 646 665 646 664 646 757 647 664 647 662 647 329 648 662 648 754 648 755 649 754 649 677 649 753 650 677 650 330 650 752 651 330 651 680 651 331 652 680 652 321 652 331 653 752 653 680 653 331 654 322 654 752 654 1290 655 332 655 322 655 322 656 332 656 761 656 344 657 761 657 760 657 767 658 760 658 775 658 766 659 775 659 348 659 341 660 348 660 349 660 338 661 349 661 784 661 785 662 784 662 333 662 795 663 333 663 334 663 336 664 334 664 351 664 1265 665 351 665 335 665 1265 666 336 666 351 666 1265 667 709 667 336 667 336 668 709 668 337 668 795 669 337 669 794 669 785 670 794 670 339 670 338 671 339 671 340 671 341 672 340 672 776 672 766 673 776 673 342 673 767 674 342 674 759 674 344 675 759 675 343 675 322 676 344 676 343 676 322 677 761 677 344 677 332 678 345 678 761 678 761 679 345 679 346 679 760 680 346 680 347 680 775 681 347 681 774 681 348 682 774 682 783 682 349 683 783 683 790 683 784 684 790 684 792 684 333 685 792 685 350 685 334 686 350 686 803 686 351 687 803 687 802 687 335 688 802 688 356 688 335 689 351 689 802 689 345 690 1291 690 346 690 346 691 1291 691 352 691 347 692 352 692 353 692 774 693 353 693 781 693 783 694 781 694 354 694 790 695 354 695 791 695 792 696 791 696 793 696 350 697 793 697 360 697 803 698 360 698 355 698 802 699 355 699 809 699 356 700 809 700 361 700 356 701 802 701 809 701 1291 702 357 702 352 702 352 703 357 703 780 703 353 704 780 704 779 704 781 705 779 705 358 705 354 706 358 706 359 706 791 707 359 707 789 707 793 708 789 708 365 708 360 709 365 709 807 709 355 710 807 710 810 710 809 711 810 711 367 711 361 712 367 712 366 712 361 713 809 713 367 713 357 714 368 714 780 714 780 715 368 715 778 715 779 716 778 716 362 716 358 717 362 717 363 717 359 718 363 718 364 718 789 719 364 719 798 719 365 720 798 720 369 720 807 721 369 721 808 721 810 722 808 722 813 722 367 723 813 723 818 723 366 724 818 724 1263 724 366 725 367 725 818 725 368 726 1294 726 778 726 778 727 1294 727 370 727 362 728 370 728 788 728 363 729 788 729 797 729 364 730 797 730 372 730 798 731 372 731 800 731 369 732 800 732 373 732 808 733 373 733 816 733 813 734 816 734 819 734 818 735 819 735 375 735 1263 736 375 736 1223 736 1263 737 818 737 375 737 370 738 1294 738 371 738 788 739 371 739 796 739 797 740 796 740 799 740 372 741 799 741 801 741 800 742 801 742 806 742 373 743 806 743 374 743 816 744 374 744 817 744 819 745 817 745 706 745 375 746 706 746 839 746 1223 747 839 747 1261 747 1223 748 375 748 839 748 1295 749 388 749 708 749 1295 750 387 750 388 750 1295 751 376 751 387 751 387 752 376 752 389 752 804 753 389 753 391 753 385 754 391 754 815 754 384 755 815 755 377 755 823 756 377 756 824 756 822 757 824 757 393 757 378 758 393 758 835 758 837 759 835 759 838 759 840 760 838 760 379 760 380 761 379 761 1259 761 380 762 840 762 379 762 380 763 381 763 840 763 840 764 381 764 382 764 837 765 382 765 827 765 378 766 827 766 825 766 822 767 825 767 383 767 823 768 383 768 707 768 384 769 707 769 811 769 385 770 811 770 805 770 804 771 805 771 386 771 387 772 386 772 388 772 387 773 804 773 386 773 387 774 389 774 804 774 376 775 396 775 389 775 389 776 396 776 390 776 391 777 390 777 812 777 815 778 812 778 814 778 377 779 814 779 821 779 824 780 821 780 392 780 393 781 392 781 834 781 835 782 834 782 836 782 838 783 836 783 394 783 379 784 394 784 395 784 1259 785 395 785 1218 785 1259 786 379 786 395 786 396 787 1299 787 390 787 390 788 1299 788 397 788 812 789 397 789 400 789 814 790 400 790 398 790 821 791 398 791 826 791 392 792 826 792 399 792 834 793 399 793 842 793 836 794 842 794 841 794 394 795 841 795 846 795 395 796 846 796 404 796 1218 797 404 797 1212 797 1218 798 395 798 404 798 1299 799 405 799 397 799 397 800 405 800 401 800 400 801 401 801 402 801 398 802 402 802 406 802 826 803 406 803 832 803 399 804 832 804 403 804 842 805 403 805 843 805 841 806 843 806 851 806 846 807 851 807 855 807 404 808 855 808 409 808 1212 809 409 809 1257 809 1212 810 404 810 409 810 405 811 1303 811 401 811 401 812 1303 812 820 812 402 813 820 813 829 813 406 814 829 814 830 814 832 815 830 815 833 815 403 816 833 816 411 816 843 817 411 817 412 817 851 818 412 818 407 818 855 819 407 819 408 819 409 820 408 820 416 820 1257 821 416 821 415 821 1257 822 409 822 416 822 820 823 1303 823 704 823 829 824 704 824 703 824 830 825 703 825 831 825 833 826 831 826 410 826 411 827 410 827 413 827 412 828 413 828 702 828 407 829 702 829 701 829 408 830 701 830 414 830 416 831 414 831 870 831 415 832 870 832 1255 832 415 833 416 833 870 833 1302 834 705 834 417 834 1302 835 428 835 705 835 1302 836 429 836 428 836 428 837 429 837 418 837 426 838 418 838 845 838 848 839 845 839 854 839 425 840 854 840 852 840 857 841 852 841 861 841 419 842 861 842 860 842 868 843 860 843 872 843 867 844 872 844 871 844 422 845 871 845 421 845 420 846 421 846 1206 846 420 847 422 847 421 847 420 848 1254 848 422 848 422 849 1254 849 873 849 867 850 873 850 423 850 868 851 423 851 424 851 419 852 424 852 700 852 857 853 700 853 849 853 425 854 849 854 850 854 848 855 850 855 427 855 426 856 427 856 828 856 428 857 828 857 705 857 428 858 426 858 828 858 428 859 418 859 426 859 429 860 1308 860 418 860 418 861 1308 861 844 861 845 862 844 862 433 862 854 863 433 863 853 863 852 864 853 864 430 864 861 865 430 865 431 865 860 866 431 866 869 866 872 867 869 867 880 867 871 868 880 868 879 868 421 869 879 869 432 869 1206 870 432 870 437 870 1206 871 421 871 432 871 1308 872 1306 872 844 872 844 873 1306 873 847 873 433 874 847 874 440 874 853 875 440 875 856 875 430 876 856 876 859 876 431 877 859 877 866 877 869 878 866 878 442 878 880 879 442 879 434 879 879 880 434 880 435 880 432 881 435 881 436 881 437 882 436 882 1253 882 437 883 432 883 436 883 1306 884 438 884 847 884 847 885 438 885 439 885 440 886 439 886 441 886 856 887 441 887 446 887 859 888 446 888 865 888 866 889 865 889 878 889 442 890 878 890 877 890 434 891 877 891 443 891 435 892 443 892 448 892 436 893 448 893 444 893 1253 894 444 894 450 894 1253 895 436 895 444 895 438 896 445 896 439 896 439 897 445 897 451 897 441 898 451 898 858 898 446 899 858 899 863 899 865 900 863 900 447 900 878 901 447 901 453 901 877 902 453 902 454 902 443 903 454 903 455 903 448 904 455 904 449 904 444 905 449 905 456 905 450 906 456 906 1204 906 450 907 444 907 456 907 445 908 457 908 451 908 451 909 457 909 862 909 858 910 862 910 864 910 863 911 864 911 452 911 447 912 452 912 876 912 453 913 876 913 887 913 454 914 887 914 890 914 455 915 890 915 893 915 449 916 893 916 462 916 456 917 462 917 33 917 1204 918 456 918 33 918 862 919 457 919 458 919 864 920 458 920 459 920 452 921 459 921 460 921 876 922 460 922 461 922 887 923 461 923 469 923 890 924 469 924 467 924 893 925 467 925 894 925 462 926 894 926 463 926 33 927 462 927 463 927 464 928 881 928 874 928 464 929 883 929 881 929 464 930 465 930 883 930 883 931 465 931 886 931 885 932 886 932 471 932 470 933 471 933 892 933 889 934 892 934 473 934 888 935 473 935 475 935 35 936 475 936 36 936 35 937 888 937 475 937 35 938 466 938 888 938 35 939 34 939 466 939 466 940 34 940 894 940 467 941 466 941 894 941 467 942 468 942 466 942 467 943 469 943 468 943 468 944 469 944 875 944 889 945 875 945 470 945 892 946 889 946 470 946 465 947 1316 947 886 947 886 948 1316 948 476 948 471 949 476 949 472 949 892 950 472 950 891 950 473 951 891 951 479 951 475 952 479 952 474 952 36 953 475 953 474 953 1316 954 1313 954 476 954 476 955 1313 955 477 955 472 956 477 956 896 956 891 957 896 957 482 957 479 958 482 958 481 958 478 959 479 959 481 959 478 960 474 960 479 960 1313 961 1314 961 477 961 477 962 1314 962 483 962 896 963 483 963 480 963 482 964 480 964 485 964 481 965 485 965 484 965 481 966 482 966 485 966 1314 967 486 967 483 967 483 968 486 968 895 968 480 969 895 969 746 969 485 970 746 970 484 970 485 971 480 971 746 971 486 972 487 972 895 972 895 973 487 973 747 973 746 974 747 974 488 974 37 975 488 975 38 975 37 976 746 976 488 976 37 977 484 977 746 977 747 978 487 978 494 978 488 979 494 979 489 979 38 980 489 980 40 980 38 981 488 981 489 981 490 982 491 982 1317 982 490 983 493 983 491 983 490 984 495 984 493 984 493 985 495 985 492 985 41 986 492 986 698 986 41 987 493 987 492 987 41 988 40 988 493 988 493 989 40 989 491 989 491 990 40 990 489 990 494 991 491 991 489 991 494 992 1317 992 491 992 494 993 487 993 1317 993 495 994 496 994 492 994 492 995 496 995 697 995 698 996 697 996 497 996 498 997 497 997 42 997 498 998 698 998 497 998 496 999 499 999 697 999 697 1000 499 1000 500 1000 497 1001 500 1001 897 1001 42 1002 897 1002 502 1002 43 1003 502 1003 501 1003 43 1004 42 1004 502 1004 499 1005 1320 1005 500 1005 500 1006 1320 1006 511 1006 897 1007 511 1007 898 1007 502 1008 898 1008 510 1008 503 1009 510 1009 696 1009 900 1010 696 1010 504 1010 904 1011 504 1011 903 1011 906 1012 903 1012 505 1012 508 1013 505 1013 912 1013 507 1014 912 1014 506 1014 1287 1015 506 1015 515 1015 1287 1016 507 1016 506 1016 1287 1017 1288 1017 507 1017 507 1018 1288 1018 742 1018 508 1019 742 1019 687 1019 906 1020 687 1020 685 1020 904 1021 685 1021 692 1021 900 1022 692 1022 693 1022 509 1023 693 1023 47 1023 48 1024 509 1024 47 1024 48 1025 503 1025 509 1025 48 1026 46 1026 503 1026 503 1027 46 1027 501 1027 502 1028 503 1028 501 1028 502 1029 510 1029 503 1029 1320 1030 1321 1030 511 1030 511 1031 1321 1031 516 1031 898 1032 516 1032 512 1032 510 1033 512 1033 513 1033 696 1034 513 1034 899 1034 504 1035 899 1035 902 1035 903 1036 902 1036 905 1036 505 1037 905 1037 911 1037 912 1038 911 1038 918 1038 506 1039 918 1039 514 1039 515 1040 514 1040 1286 1040 515 1041 506 1041 514 1041 516 1042 1321 1042 741 1042 512 1043 741 1043 517 1043 513 1044 517 1044 518 1044 899 1045 518 1045 738 1045 902 1046 738 1046 519 1046 905 1047 519 1047 520 1047 911 1048 520 1048 922 1048 918 1049 922 1049 521 1049 514 1050 521 1050 522 1050 1286 1051 522 1051 1285 1051 1286 1052 514 1052 522 1052 1323 1053 740 1053 1322 1053 1323 1054 534 1054 740 1054 1323 1055 523 1055 534 1055 534 1056 523 1056 524 1056 535 1057 524 1057 910 1057 533 1058 910 1058 908 1058 909 1059 908 1059 538 1059 916 1060 538 1060 525 1060 925 1061 525 1061 526 1061 924 1062 526 1062 929 1062 527 1063 929 1063 935 1063 528 1064 935 1064 529 1064 530 1065 529 1065 1247 1065 530 1066 528 1066 529 1066 530 1067 531 1067 528 1067 528 1068 531 1068 932 1068 527 1069 932 1069 931 1069 924 1070 931 1070 921 1070 925 1071 921 1071 532 1071 916 1072 532 1072 915 1072 909 1073 915 1073 907 1073 533 1074 907 1074 739 1074 535 1075 739 1075 901 1075 534 1076 901 1076 740 1076 534 1077 535 1077 901 1077 534 1078 524 1078 535 1078 523 1079 536 1079 524 1079 524 1080 536 1080 537 1080 910 1081 537 1081 914 1081 908 1082 914 1082 917 1082 538 1083 917 1083 920 1083 525 1084 920 1084 539 1084 526 1085 539 1085 542 1085 929 1086 542 1086 930 1086 935 1087 930 1087 540 1087 529 1088 540 1088 543 1088 1247 1089 543 1089 1282 1089 1247 1090 529 1090 543 1090 536 1091 1327 1091 537 1091 537 1092 1327 1092 545 1092 914 1093 545 1093 913 1093 917 1094 913 1094 541 1094 920 1095 541 1095 923 1095 539 1096 923 1096 549 1096 542 1097 549 1097 928 1097 930 1098 928 1098 933 1098 540 1099 933 1099 940 1099 543 1100 940 1100 550 1100 1282 1101 550 1101 544 1101 1282 1102 543 1102 550 1102 1327 1103 1330 1103 545 1103 545 1104 1330 1104 546 1104 913 1105 546 1105 547 1105 541 1106 547 1106 548 1106 923 1107 548 1107 927 1107 549 1108 927 1108 553 1108 928 1109 553 1109 934 1109 933 1110 934 1110 938 1110 940 1111 938 1111 948 1111 550 1112 948 1112 551 1112 544 1113 551 1113 552 1113 544 1114 550 1114 551 1114 1330 1115 557 1115 546 1115 546 1116 557 1116 919 1116 547 1117 919 1117 748 1117 548 1118 748 1118 559 1118 927 1119 559 1119 554 1119 553 1120 554 1120 560 1120 934 1121 560 1121 939 1121 938 1122 939 1122 947 1122 948 1123 947 1123 555 1123 551 1124 555 1124 556 1124 552 1125 556 1125 1241 1125 552 1126 551 1126 556 1126 557 1127 1329 1127 919 1127 919 1128 1329 1128 558 1128 748 1129 558 1129 563 1129 559 1130 563 1130 926 1130 554 1131 926 1131 566 1131 560 1132 566 1132 942 1132 939 1133 942 1133 561 1133 947 1134 561 1134 953 1134 555 1135 953 1135 562 1135 556 1136 562 1136 956 1136 1241 1137 956 1137 567 1137 1241 1138 556 1138 956 1138 558 1139 1329 1139 564 1139 563 1140 564 1140 565 1140 926 1141 565 1141 936 1141 566 1142 936 1142 937 1142 942 1143 937 1143 735 1143 561 1144 735 1144 734 1144 953 1145 734 1145 954 1145 562 1146 954 1146 957 1146 956 1147 957 1147 971 1147 567 1148 971 1148 733 1148 567 1149 956 1149 971 1149 1332 1150 737 1150 736 1150 1332 1151 568 1151 737 1151 1332 1152 569 1152 568 1152 568 1153 569 1153 570 1153 579 1154 570 1154 571 1154 946 1155 571 1155 952 1155 572 1156 952 1156 573 1156 951 1157 573 1157 955 1157 961 1158 955 1158 962 1158 968 1159 962 1159 970 1159 974 1160 970 1160 969 1160 973 1161 969 1161 574 1161 575 1162 574 1162 1240 1162 575 1163 973 1163 574 1163 575 1164 732 1164 973 1164 973 1165 732 1165 576 1165 974 1166 576 1166 972 1166 968 1167 972 1167 960 1167 961 1168 960 1168 577 1168 951 1169 577 1169 578 1169 572 1170 578 1170 945 1170 946 1171 945 1171 941 1171 579 1172 941 1172 580 1172 568 1173 580 1173 737 1173 568 1174 579 1174 580 1174 568 1175 570 1175 579 1175 569 1176 581 1176 570 1176 570 1177 581 1177 944 1177 571 1178 944 1178 943 1178 952 1179 943 1179 950 1179 573 1180 950 1180 582 1180 955 1181 582 1181 583 1181 962 1182 583 1182 588 1182 970 1183 588 1183 589 1183 969 1184 589 1184 584 1184 574 1185 584 1185 585 1185 1240 1186 585 1186 1278 1186 1240 1187 574 1187 585 1187 581 1188 1333 1188 944 1188 944 1189 1333 1189 586 1189 943 1190 586 1190 949 1190 950 1191 949 1191 587 1191 582 1192 587 1192 959 1192 583 1193 959 1193 593 1193 588 1194 593 1194 594 1194 589 1195 594 1195 977 1195 584 1196 977 1196 590 1196 585 1197 590 1197 596 1197 1278 1198 596 1198 597 1198 1278 1199 585 1199 596 1199 1333 1200 591 1200 586 1200 586 1201 591 1201 592 1201 949 1202 592 1202 958 1202 587 1203 958 1203 599 1203 959 1204 599 1204 964 1204 593 1205 964 1205 967 1205 594 1206 967 1206 976 1206 977 1207 976 1207 595 1207 590 1208 595 1208 984 1208 596 1209 984 1209 598 1209 597 1210 598 1210 1237 1210 597 1211 596 1211 598 1211 591 1212 1334 1212 592 1212 592 1213 1334 1213 605 1213 958 1214 605 1214 600 1214 599 1215 600 1215 966 1215 964 1216 966 1216 965 1216 967 1217 965 1217 601 1217 976 1218 601 1218 602 1218 595 1219 602 1219 603 1219 984 1220 603 1220 983 1220 598 1221 983 1221 604 1221 1237 1222 604 1222 1276 1222 1237 1223 598 1223 604 1223 1334 1224 731 1224 605 1224 605 1225 731 1225 606 1225 600 1226 606 1226 610 1226 966 1227 610 1227 607 1227 965 1228 607 1228 975 1228 601 1229 975 1229 611 1229 602 1230 611 1230 608 1230 603 1231 608 1231 986 1231 983 1232 986 1232 992 1232 604 1233 992 1233 991 1233 1276 1234 991 1234 1275 1234 1276 1235 604 1235 991 1235 606 1236 731 1236 609 1236 610 1237 609 1237 963 1237 607 1238 963 1238 730 1238 975 1239 730 1239 729 1239 611 1240 729 1240 727 1240 608 1241 727 1241 612 1241 986 1242 612 1242 726 1242 992 1243 726 1243 997 1243 991 1244 997 1244 725 1244 1275 1245 725 1245 724 1245 1275 1246 991 1246 725 1246 1337 1247 627 1247 613 1247 1337 1248 614 1248 627 1248 1337 1249 1336 1249 614 1249 614 1250 1336 1250 979 1250 615 1251 979 1251 978 1251 616 1252 978 1252 982 1252 625 1253 982 1253 617 1253 990 1254 617 1254 996 1254 995 1255 996 1255 631 1255 1002 1256 631 1256 1003 1256 618 1257 1003 1257 620 1257 619 1258 620 1258 621 1258 622 1259 621 1259 1274 1259 622 1260 619 1260 621 1260 622 1261 723 1261 619 1261 619 1262 723 1262 623 1262 618 1263 623 1263 1001 1263 1002 1264 1001 1264 994 1264 995 1265 994 1265 624 1265 990 1266 624 1266 985 1266 625 1267 985 1267 981 1267 616 1268 981 1268 728 1268 615 1269 728 1269 626 1269 614 1270 626 1270 627 1270 614 1271 615 1271 626 1271 614 1272 979 1272 615 1272 1336 1273 1338 1273 979 1273 979 1274 1338 1274 628 1274 978 1275 628 1275 980 1275 982 1276 980 1276 989 1276 617 1277 989 1277 629 1277 996 1278 629 1278 630 1278 631 1279 630 1279 635 1279 1003 1280 635 1280 1010 1280 620 1281 1010 1281 637 1281 621 1282 637 1282 639 1282 1274 1283 639 1283 638 1283 1274 1284 621 1284 639 1284 1338 1285 632 1285 628 1285 628 1286 632 1286 640 1286 980 1287 640 1287 633 1287 989 1288 633 1288 988 1288 629 1289 988 1289 1000 1289 630 1290 1000 1290 634 1290 635 1291 634 1291 636 1291 1010 1292 636 1292 1009 1292 637 1293 1009 1293 1022 1293 639 1294 1022 1294 1021 1294 638 1295 1021 1295 643 1295 638 1296 639 1296 1021 1296 632 1297 1339 1297 640 1297 640 1298 1339 1298 641 1298 633 1299 641 1299 993 1299 988 1300 993 1300 644 1300 1000 1301 644 1301 998 1301 634 1302 998 1302 642 1302 636 1303 642 1303 645 1303 1009 1304 645 1304 646 1304 1022 1305 646 1305 1025 1305 1021 1306 1025 1306 650 1306 643 1307 650 1307 649 1307 643 1308 1021 1308 650 1308 1339 1309 1341 1309 641 1309 641 1310 1341 1310 987 1310 993 1311 987 1311 651 1311 644 1312 651 1312 652 1312 998 1313 652 1313 653 1313 642 1314 653 1314 1008 1314 645 1315 1008 1315 1015 1315 646 1316 1015 1316 647 1316 1025 1317 647 1317 648 1317 650 1318 648 1318 1028 1318 649 1319 1028 1319 1231 1319 649 1320 650 1320 1028 1320 987 1321 1341 1321 749 1321 651 1322 749 1322 999 1322 652 1323 999 1323 654 1323 653 1324 654 1324 1013 1324 1008 1325 1013 1325 1014 1325 1015 1326 1014 1326 1020 1326 647 1327 1020 1327 668 1327 648 1328 668 1328 655 1328 1028 1329 655 1329 722 1329 1231 1330 722 1330 656 1330 328 1331 1231 1331 656 1331 658 1332 750 1332 657 1332 658 1333 659 1333 750 1333 658 1334 1344 1334 659 1334 659 1335 1344 1335 660 1335 1005 1336 660 1336 1011 1336 1007 1337 1011 1337 1017 1337 1018 1338 1017 1338 1027 1338 1024 1339 1027 1339 669 1339 661 1340 669 1340 672 1340 663 1341 672 1341 662 1341 664 1342 663 1342 662 1342 664 1343 666 1343 663 1343 664 1344 665 1344 666 1344 666 1345 665 1345 722 1345 655 1346 666 1346 722 1346 655 1347 667 1347 666 1347 655 1348 668 1348 667 1348 667 1349 668 1349 751 1349 661 1350 751 1350 1024 1350 669 1351 661 1351 1024 1351 1344 1352 1346 1352 660 1352 660 1353 1346 1353 673 1353 1011 1354 673 1354 1016 1354 1017 1355 1016 1355 670 1355 1027 1356 670 1356 745 1356 669 1357 745 1357 671 1357 672 1358 671 1358 754 1358 662 1359 672 1359 754 1359 1346 1360 1347 1360 673 1360 673 1361 1347 1361 674 1361 1016 1362 674 1362 675 1362 670 1363 675 1363 676 1363 745 1364 676 1364 678 1364 671 1365 678 1365 677 1365 754 1366 671 1366 677 1366 1347 1367 1348 1367 674 1367 674 1368 1348 1368 1023 1368 675 1369 1023 1369 1026 1369 676 1370 1026 1370 679 1370 678 1371 679 1371 330 1371 677 1372 678 1372 330 1372 1348 1373 1350 1373 1023 1373 1023 1374 1350 1374 681 1374 1026 1375 681 1375 683 1375 679 1376 683 1376 680 1376 330 1377 679 1377 680 1377 1350 1378 320 1378 681 1378 681 1379 320 1379 682 1379 683 1380 682 1380 321 1380 680 1381 683 1381 321 1381 1252 1382 684 1382 694 1382 688 1383 694 1383 695 1383 686 1384 695 1384 691 1384 685 1385 691 1385 692 1385 685 1386 686 1386 691 1386 685 1387 687 1387 686 1387 686 1388 687 1388 744 1388 688 1389 744 1389 1250 1389 1252 1390 688 1390 1250 1390 1252 1391 694 1391 688 1391 684 1392 52 1392 694 1392 694 1393 52 1393 51 1393 695 1394 51 1394 689 1394 691 1395 689 1395 50 1395 690 1396 691 1396 50 1396 690 1397 692 1397 691 1397 690 1398 49 1398 692 1398 692 1399 49 1399 693 1399 694 1400 51 1400 695 1400 695 1401 689 1401 691 1401 900 1402 693 1402 509 1402 503 1403 900 1403 509 1403 503 1404 696 1404 900 1404 897 1405 42 1405 497 1405 697 1406 698 1406 492 1406 34 1407 699 1407 894 1407 894 1408 699 1408 463 1408 1254 1409 1255 1409 873 1409 873 1410 1255 1410 870 1410 423 1411 870 1411 414 1411 424 1412 414 1412 701 1412 700 1413 701 1413 702 1413 849 1414 702 1414 413 1414 850 1415 413 1415 410 1415 427 1416 410 1416 831 1416 828 1417 831 1417 703 1417 705 1418 703 1418 704 1418 417 1419 704 1419 1303 1419 417 1420 705 1420 704 1420 381 1421 1261 1421 382 1421 382 1422 1261 1422 839 1422 827 1423 839 1423 706 1423 825 1424 706 1424 817 1424 383 1425 817 1425 374 1425 707 1426 374 1426 806 1426 811 1427 806 1427 801 1427 805 1428 801 1428 799 1428 386 1429 799 1429 796 1429 388 1430 796 1430 371 1430 708 1431 371 1431 1294 1431 708 1432 388 1432 371 1432 709 1433 1228 1433 337 1433 337 1434 1228 1434 710 1434 794 1435 710 1435 786 1435 339 1436 786 1436 782 1436 340 1437 782 1437 711 1437 776 1438 711 1438 768 1438 342 1439 768 1439 762 1439 759 1440 762 1440 323 1440 343 1441 759 1441 323 1441 1228 1442 714 1442 710 1442 710 1443 714 1443 787 1443 786 1444 787 1444 712 1444 782 1445 712 1445 769 1445 711 1446 769 1446 763 1446 768 1447 763 1447 713 1447 762 1448 713 1448 716 1448 323 1449 762 1449 716 1449 714 1450 1229 1450 787 1450 787 1451 1229 1451 717 1451 712 1452 717 1452 771 1452 769 1453 771 1453 770 1453 763 1454 770 1454 715 1454 713 1455 715 1455 718 1455 716 1456 713 1456 718 1456 1229 1457 1268 1457 717 1457 717 1458 1268 1458 777 1458 771 1459 777 1459 772 1459 770 1460 772 1460 764 1460 715 1461 764 1461 324 1461 718 1462 715 1462 324 1462 1268 1463 719 1463 777 1463 777 1464 719 1464 773 1464 772 1465 773 1465 765 1465 764 1466 765 1466 325 1466 324 1467 764 1467 325 1467 719 1468 1271 1468 773 1468 773 1469 1271 1469 721 1469 765 1470 721 1470 720 1470 325 1471 765 1471 720 1471 1271 1472 1230 1472 721 1472 721 1473 1230 1473 326 1473 720 1474 721 1474 326 1474 722 1475 1231 1475 1028 1475 723 1476 724 1476 623 1476 623 1477 724 1477 725 1477 1001 1478 725 1478 997 1478 994 1479 997 1479 726 1479 624 1480 726 1480 612 1480 985 1481 612 1481 727 1481 981 1482 727 1482 729 1482 728 1483 729 1483 730 1483 626 1484 730 1484 963 1484 627 1485 963 1485 609 1485 613 1486 609 1486 731 1486 613 1487 627 1487 609 1487 732 1488 733 1488 576 1488 576 1489 733 1489 971 1489 972 1490 971 1490 957 1490 960 1491 957 1491 954 1491 577 1492 954 1492 734 1492 578 1493 734 1493 735 1493 945 1494 735 1494 937 1494 941 1495 937 1495 936 1495 580 1496 936 1496 565 1496 737 1497 565 1497 564 1497 736 1498 564 1498 1329 1498 736 1499 737 1499 564 1499 531 1500 1285 1500 932 1500 932 1501 1285 1501 522 1501 931 1502 522 1502 521 1502 921 1503 521 1503 922 1503 532 1504 922 1504 520 1504 915 1505 520 1505 519 1505 907 1506 519 1506 738 1506 739 1507 738 1507 518 1507 901 1508 518 1508 517 1508 740 1509 517 1509 741 1509 1322 1510 741 1510 1321 1510 1322 1511 740 1511 741 1511 1288 1512 743 1512 742 1512 742 1513 743 1513 744 1513 687 1514 742 1514 744 1514 743 1515 1250 1515 744 1515 683 1516 681 1516 682 1516 665 1517 656 1517 722 1517 683 1518 679 1518 1026 1518 679 1519 678 1519 676 1519 678 1520 671 1520 745 1520 671 1521 672 1521 669 1521 672 1522 663 1522 661 1522 661 1523 663 1523 667 1523 751 1524 661 1524 667 1524 663 1525 666 1525 667 1525 362 1526 778 1526 370 1526 858 1527 451 1527 862 1527 746 1528 895 1528 747 1528 748 1529 919 1529 558 1529 600 1530 605 1530 606 1530 1341 1531 657 1531 749 1531 749 1532 657 1532 750 1532 999 1533 750 1533 1004 1533 654 1534 1004 1534 1006 1534 1013 1535 1006 1535 1012 1535 1014 1536 1012 1536 1019 1536 1020 1537 1019 1537 751 1537 668 1538 1020 1538 751 1538 753 1539 330 1539 752 1539 343 1540 753 1540 752 1540 755 1541 677 1541 753 1541 323 1542 755 1542 753 1542 329 1543 754 1543 755 1543 716 1544 329 1544 755 1544 757 1545 662 1545 329 1545 718 1546 757 1546 329 1546 756 1547 664 1547 757 1547 324 1548 756 1548 757 1548 758 1549 665 1549 756 1549 325 1550 758 1550 756 1550 327 1551 656 1551 758 1551 720 1552 327 1552 758 1552 767 1553 759 1553 344 1553 760 1554 767 1554 344 1554 346 1555 760 1555 761 1555 342 1556 762 1556 759 1556 768 1557 713 1557 762 1557 763 1558 715 1558 713 1558 770 1559 764 1559 715 1559 772 1560 765 1560 764 1560 773 1561 721 1561 765 1561 352 1562 347 1562 346 1562 766 1563 342 1563 767 1563 775 1564 766 1564 767 1564 347 1565 775 1565 760 1565 776 1566 768 1566 342 1566 711 1567 763 1567 768 1567 769 1568 770 1568 763 1568 771 1569 772 1569 770 1569 777 1570 773 1570 772 1570 780 1571 353 1571 352 1571 353 1572 774 1572 347 1572 341 1573 776 1573 766 1573 348 1574 341 1574 766 1574 774 1575 348 1575 775 1575 340 1576 711 1576 776 1576 782 1577 769 1577 711 1577 712 1578 771 1578 769 1578 717 1579 777 1579 771 1579 778 1580 779 1580 780 1580 779 1581 781 1581 353 1581 781 1582 783 1582 774 1582 338 1583 340 1583 341 1583 349 1584 338 1584 341 1584 783 1585 349 1585 348 1585 339 1586 782 1586 340 1586 786 1587 712 1587 782 1587 787 1588 717 1588 712 1588 358 1589 779 1589 362 1589 354 1590 781 1590 358 1590 790 1591 783 1591 354 1591 784 1592 349 1592 790 1592 785 1593 339 1593 338 1593 784 1594 785 1594 338 1594 794 1595 786 1595 339 1595 710 1596 787 1596 786 1596 371 1597 788 1597 370 1597 788 1598 363 1598 362 1598 797 1599 788 1599 796 1599 363 1600 359 1600 358 1600 364 1601 363 1601 797 1601 359 1602 791 1602 354 1602 789 1603 359 1603 364 1603 791 1604 792 1604 790 1604 793 1605 791 1605 789 1605 792 1606 333 1606 784 1606 350 1607 792 1607 793 1607 794 1608 785 1608 795 1608 795 1609 785 1609 333 1609 334 1610 333 1610 350 1610 710 1611 794 1611 337 1611 334 1612 336 1612 795 1612 795 1613 336 1613 337 1613 386 1614 796 1614 388 1614 372 1615 797 1615 799 1615 798 1616 364 1616 372 1616 365 1617 789 1617 798 1617 360 1618 793 1618 365 1618 803 1619 350 1619 360 1619 351 1620 334 1620 803 1620 805 1621 799 1621 386 1621 800 1622 372 1622 801 1622 369 1623 798 1623 800 1623 807 1624 365 1624 369 1624 355 1625 360 1625 807 1625 802 1626 803 1626 355 1626 385 1627 805 1627 804 1627 391 1628 385 1628 804 1628 390 1629 391 1629 389 1629 811 1630 801 1630 805 1630 373 1631 800 1631 806 1631 808 1632 369 1632 373 1632 810 1633 807 1633 808 1633 809 1634 355 1634 810 1634 397 1635 812 1635 390 1635 384 1636 811 1636 385 1636 815 1637 384 1637 385 1637 812 1638 815 1638 391 1638 707 1639 806 1639 811 1639 816 1640 373 1640 374 1640 813 1641 808 1641 816 1641 367 1642 810 1642 813 1642 401 1643 400 1643 397 1643 400 1644 814 1644 812 1644 823 1645 707 1645 384 1645 377 1646 823 1646 384 1646 814 1647 377 1647 815 1647 383 1648 374 1648 707 1648 819 1649 816 1649 817 1649 818 1650 813 1650 819 1650 820 1651 402 1651 401 1651 402 1652 398 1652 400 1652 398 1653 821 1653 814 1653 822 1654 383 1654 823 1654 824 1655 822 1655 823 1655 821 1656 824 1656 377 1656 825 1657 817 1657 383 1657 375 1658 819 1658 706 1658 704 1659 829 1659 820 1659 829 1660 406 1660 402 1660 406 1661 826 1661 398 1661 826 1662 392 1662 821 1662 378 1663 825 1663 822 1663 393 1664 378 1664 822 1664 392 1665 393 1665 824 1665 827 1666 706 1666 825 1666 828 1667 703 1667 705 1667 703 1668 830 1668 829 1668 830 1669 832 1669 406 1669 833 1670 830 1670 831 1670 832 1671 399 1671 826 1671 403 1672 832 1672 833 1672 399 1673 834 1673 392 1673 842 1674 399 1674 403 1674 834 1675 835 1675 393 1675 836 1676 834 1676 842 1676 827 1677 378 1677 837 1677 837 1678 378 1678 835 1678 838 1679 835 1679 836 1679 839 1680 827 1680 382 1680 838 1681 840 1681 837 1681 837 1682 840 1682 382 1682 427 1683 831 1683 828 1683 411 1684 833 1684 410 1684 843 1685 403 1685 411 1685 841 1686 842 1686 843 1686 394 1687 836 1687 841 1687 379 1688 838 1688 394 1688 848 1689 427 1689 426 1689 845 1690 848 1690 426 1690 844 1691 845 1691 418 1691 850 1692 410 1692 427 1692 412 1693 411 1693 413 1693 851 1694 843 1694 412 1694 846 1695 841 1695 851 1695 395 1696 394 1696 846 1696 847 1697 433 1697 844 1697 425 1698 850 1698 848 1698 854 1699 425 1699 848 1699 433 1700 854 1700 845 1700 849 1701 413 1701 850 1701 407 1702 412 1702 702 1702 855 1703 851 1703 407 1703 404 1704 846 1704 855 1704 439 1705 440 1705 847 1705 440 1706 853 1706 433 1706 857 1707 849 1707 425 1707 852 1708 857 1708 425 1708 853 1709 852 1709 854 1709 700 1710 702 1710 849 1710 408 1711 407 1711 701 1711 409 1712 855 1712 408 1712 451 1713 441 1713 439 1713 441 1714 856 1714 440 1714 856 1715 430 1715 853 1715 419 1716 700 1716 857 1716 861 1717 419 1717 857 1717 430 1718 861 1718 852 1718 424 1719 701 1719 700 1719 416 1720 408 1720 414 1720 446 1721 441 1721 858 1721 859 1722 856 1722 446 1722 431 1723 430 1723 859 1723 860 1724 861 1724 431 1724 868 1725 424 1725 419 1725 860 1726 868 1726 419 1726 423 1727 414 1727 424 1727 458 1728 864 1728 862 1728 864 1729 863 1729 858 1729 863 1730 865 1730 446 1730 459 1731 452 1731 864 1731 865 1732 866 1732 859 1732 452 1733 447 1733 863 1733 866 1734 869 1734 431 1734 447 1735 878 1735 865 1735 869 1736 872 1736 860 1736 878 1737 442 1737 866 1737 423 1738 868 1738 867 1738 867 1739 868 1739 872 1739 442 1740 880 1740 869 1740 870 1741 423 1741 873 1741 880 1742 871 1742 872 1742 871 1743 422 1743 867 1743 867 1744 422 1744 873 1744 874 1745 881 1745 458 1745 457 1746 874 1746 458 1746 459 1747 881 1747 882 1747 460 1748 882 1748 884 1748 461 1749 884 1749 875 1749 469 1750 461 1750 875 1750 882 1751 460 1751 459 1751 460 1752 876 1752 452 1752 876 1753 453 1753 447 1753 453 1754 877 1754 878 1754 877 1755 434 1755 442 1755 434 1756 879 1756 880 1756 879 1757 421 1757 871 1757 881 1758 459 1758 458 1758 881 1759 883 1759 882 1759 882 1760 883 1760 885 1760 884 1761 885 1761 470 1761 875 1762 884 1762 470 1762 886 1763 885 1763 883 1763 885 1764 884 1764 882 1764 884 1765 461 1765 460 1765 461 1766 887 1766 876 1766 887 1767 454 1767 453 1767 454 1768 443 1768 877 1768 443 1769 435 1769 434 1769 435 1770 432 1770 879 1770 476 1771 471 1771 886 1771 471 1772 470 1772 885 1772 890 1773 887 1773 469 1773 455 1774 454 1774 890 1774 448 1775 443 1775 455 1775 436 1776 435 1776 448 1776 477 1777 472 1777 476 1777 472 1778 892 1778 471 1778 468 1779 875 1779 889 1779 888 1780 889 1780 473 1780 888 1781 468 1781 889 1781 888 1782 466 1782 468 1782 893 1783 890 1783 467 1783 449 1784 455 1784 893 1784 444 1785 448 1785 449 1785 483 1786 896 1786 477 1786 896 1787 891 1787 472 1787 891 1788 473 1788 892 1788 462 1789 893 1789 894 1789 456 1790 449 1790 462 1790 895 1791 480 1791 483 1791 480 1792 482 1792 896 1792 482 1793 479 1793 891 1793 479 1794 475 1794 473 1794 494 1795 488 1795 747 1795 500 1796 497 1796 697 1796 511 1797 897 1797 500 1797 516 1798 898 1798 511 1798 898 1799 502 1799 897 1799 741 1800 512 1800 516 1800 512 1801 510 1801 898 1801 901 1802 517 1802 740 1802 517 1803 513 1803 512 1803 513 1804 696 1804 510 1804 899 1805 513 1805 518 1805 504 1806 696 1806 899 1806 904 1807 692 1807 900 1807 504 1808 904 1808 900 1808 688 1809 695 1809 686 1809 744 1810 688 1810 686 1810 739 1811 518 1811 901 1811 902 1812 899 1812 738 1812 903 1813 504 1813 902 1813 906 1814 685 1814 904 1814 903 1815 906 1815 904 1815 533 1816 739 1816 535 1816 910 1817 533 1817 535 1817 537 1818 910 1818 524 1818 907 1819 738 1819 739 1819 905 1820 902 1820 519 1820 505 1821 903 1821 905 1821 508 1822 687 1822 906 1822 505 1823 508 1823 906 1823 545 1824 914 1824 537 1824 909 1825 907 1825 533 1825 908 1826 909 1826 533 1826 914 1827 908 1827 910 1827 915 1828 519 1828 907 1828 911 1829 905 1829 520 1829 912 1830 505 1830 911 1830 507 1831 742 1831 508 1831 912 1832 507 1832 508 1832 546 1833 913 1833 545 1833 913 1834 917 1834 914 1834 916 1835 915 1835 909 1835 538 1836 916 1836 909 1836 917 1837 538 1837 908 1837 532 1838 520 1838 915 1838 918 1839 911 1839 922 1839 506 1840 912 1840 918 1840 919 1841 547 1841 546 1841 547 1842 541 1842 913 1842 541 1843 920 1843 917 1843 925 1844 532 1844 916 1844 525 1845 925 1845 916 1845 920 1846 525 1846 538 1846 921 1847 922 1847 532 1847 514 1848 918 1848 521 1848 548 1849 547 1849 748 1849 923 1850 541 1850 548 1850 539 1851 920 1851 923 1851 526 1852 525 1852 539 1852 924 1853 921 1853 925 1853 526 1854 924 1854 925 1854 931 1855 521 1855 921 1855 564 1856 563 1856 558 1856 563 1857 559 1857 748 1857 926 1858 563 1858 565 1858 559 1859 927 1859 548 1859 554 1860 559 1860 926 1860 927 1861 549 1861 923 1861 553 1862 927 1862 554 1862 549 1863 542 1863 539 1863 928 1864 549 1864 553 1864 542 1865 929 1865 526 1865 930 1866 542 1866 928 1866 931 1867 924 1867 527 1867 527 1868 924 1868 929 1868 935 1869 929 1869 930 1869 522 1870 931 1870 932 1870 935 1871 528 1871 527 1871 527 1872 528 1872 932 1872 580 1873 565 1873 737 1873 566 1874 926 1874 936 1874 560 1875 554 1875 566 1875 934 1876 553 1876 560 1876 933 1877 928 1877 934 1877 540 1878 930 1878 933 1878 529 1879 935 1879 540 1879 941 1880 936 1880 580 1880 942 1881 566 1881 937 1881 939 1882 560 1882 942 1882 938 1883 934 1883 939 1883 940 1884 933 1884 938 1884 543 1885 540 1885 940 1885 946 1886 941 1886 579 1886 571 1887 946 1887 579 1887 944 1888 571 1888 570 1888 945 1889 937 1889 941 1889 561 1890 942 1890 735 1890 947 1891 939 1891 561 1891 948 1892 938 1892 947 1892 550 1893 940 1893 948 1893 586 1894 943 1894 944 1894 572 1895 945 1895 946 1895 952 1896 572 1896 946 1896 943 1897 952 1897 571 1897 578 1898 735 1898 945 1898 953 1899 561 1899 734 1899 555 1900 947 1900 953 1900 551 1901 948 1901 555 1901 592 1902 949 1902 586 1902 949 1903 950 1903 943 1903 951 1904 578 1904 572 1904 573 1905 951 1905 572 1905 950 1906 573 1906 952 1906 577 1907 734 1907 578 1907 562 1908 953 1908 954 1908 556 1909 555 1909 562 1909 605 1910 958 1910 592 1910 958 1911 587 1911 949 1911 587 1912 582 1912 950 1912 961 1913 577 1913 951 1913 955 1914 961 1914 951 1914 582 1915 955 1915 573 1915 960 1916 954 1916 577 1916 956 1917 562 1917 957 1917 599 1918 958 1918 600 1918 959 1919 587 1919 599 1919 583 1920 582 1920 959 1920 962 1921 955 1921 583 1921 968 1922 960 1922 961 1922 962 1923 968 1923 961 1923 972 1924 957 1924 960 1924 609 1925 610 1925 606 1925 610 1926 966 1926 600 1926 607 1927 610 1927 963 1927 966 1928 964 1928 599 1928 965 1929 966 1929 607 1929 964 1930 593 1930 959 1930 967 1931 964 1931 965 1931 593 1932 588 1932 583 1932 594 1933 593 1933 967 1933 588 1934 970 1934 962 1934 589 1935 588 1935 594 1935 972 1936 968 1936 974 1936 974 1937 968 1937 970 1937 969 1938 970 1938 589 1938 971 1939 972 1939 576 1939 969 1940 973 1940 974 1940 974 1941 973 1941 576 1941 626 1942 963 1942 627 1942 975 1943 607 1943 730 1943 601 1944 965 1944 975 1944 976 1945 967 1945 601 1945 977 1946 594 1946 976 1946 584 1947 589 1947 977 1947 574 1948 969 1948 584 1948 728 1949 730 1949 626 1949 611 1950 975 1950 729 1950 602 1951 601 1951 611 1951 595 1952 976 1952 602 1952 590 1953 977 1953 595 1953 585 1954 584 1954 590 1954 616 1955 728 1955 615 1955 978 1956 616 1956 615 1956 628 1957 978 1957 979 1957 981 1958 729 1958 728 1958 608 1959 611 1959 727 1959 603 1960 602 1960 608 1960 984 1961 595 1961 603 1961 596 1962 590 1962 984 1962 640 1963 980 1963 628 1963 625 1964 981 1964 616 1964 982 1965 625 1965 616 1965 980 1966 982 1966 978 1966 985 1967 727 1967 981 1967 986 1968 608 1968 612 1968 983 1969 603 1969 986 1969 598 1970 984 1970 983 1970 641 1971 633 1971 640 1971 633 1972 989 1972 980 1972 990 1973 985 1973 625 1973 617 1974 990 1974 625 1974 989 1975 617 1975 982 1975 624 1976 612 1976 985 1976 992 1977 986 1977 726 1977 604 1978 983 1978 992 1978 987 1979 993 1979 641 1979 993 1980 988 1980 633 1980 988 1981 629 1981 989 1981 995 1982 624 1982 990 1982 996 1983 995 1983 990 1983 629 1984 996 1984 617 1984 994 1985 726 1985 624 1985 991 1986 992 1986 997 1986 749 1987 651 1987 987 1987 651 1988 644 1988 993 1988 644 1989 1000 1989 988 1989 1000 1990 630 1990 629 1990 1002 1991 994 1991 995 1991 631 1992 1002 1992 995 1992 630 1993 631 1993 996 1993 1001 1994 997 1994 994 1994 750 1995 999 1995 749 1995 999 1996 652 1996 651 1996 652 1997 998 1997 644 1997 1004 1998 654 1998 999 1998 998 1999 634 1999 1000 1999 654 2000 653 2000 652 2000 634 2001 635 2001 630 2001 653 2002 642 2002 998 2002 635 2003 1003 2003 631 2003 642 2004 636 2004 634 2004 1001 2005 1002 2005 618 2005 618 2006 1002 2006 1003 2006 636 2007 1010 2007 635 2007 725 2008 1001 2008 623 2008 1010 2009 620 2009 1003 2009 620 2010 619 2010 618 2010 618 2011 619 2011 623 2011 750 2012 659 2012 1004 2012 1004 2013 659 2013 1005 2013 1006 2014 1005 2014 1007 2014 1012 2015 1007 2015 1018 2015 1019 2016 1018 2016 1024 2016 751 2017 1019 2017 1024 2017 660 2018 1005 2018 659 2018 1005 2019 1006 2019 1004 2019 1006 2020 1013 2020 654 2020 1013 2021 1008 2021 653 2021 1008 2022 645 2022 642 2022 645 2023 1009 2023 636 2023 1009 2024 637 2024 1010 2024 637 2025 621 2025 620 2025 673 2026 1011 2026 660 2026 1011 2027 1007 2027 1005 2027 1007 2028 1012 2028 1006 2028 1012 2029 1014 2029 1013 2029 1014 2030 1015 2030 1008 2030 1015 2031 646 2031 645 2031 646 2032 1022 2032 1009 2032 1022 2033 639 2033 637 2033 674 2034 1016 2034 673 2034 1016 2035 1017 2035 1011 2035 1017 2036 1018 2036 1007 2036 1018 2037 1019 2037 1012 2037 1019 2038 1020 2038 1014 2038 1020 2039 647 2039 1015 2039 647 2040 1025 2040 646 2040 1025 2041 1021 2041 1022 2041 1023 2042 675 2042 674 2042 675 2043 670 2043 1016 2043 670 2044 1027 2044 1017 2044 1027 2045 1024 2045 1018 2045 648 2046 647 2046 668 2046 650 2047 1025 2047 648 2047 681 2048 1026 2048 1023 2048 1026 2049 676 2049 675 2049 676 2050 745 2050 670 2050 745 2051 669 2051 1027 2051 1028 2052 648 2052 655 2052 1029 2053 1034 2053 1167 2053 1029 2054 1032 2054 1034 2054 1029 2055 208 2055 1032 2055 1032 2056 208 2056 1030 2056 1031 2057 1030 2057 1175 2057 1031 2058 1032 2058 1030 2058 1031 2059 1033 2059 1032 2059 1032 2060 1033 2060 1034 2060 1034 2061 1033 2061 1168 2061 1165 2062 1168 2062 1184 2062 1164 2063 1184 2063 1193 2063 1162 2064 1193 2064 1194 2064 1035 2065 1194 2065 1195 2065 1036 2066 1195 2066 1197 2066 1158 2067 1197 2067 1037 2067 1159 2068 1037 2068 1186 2068 1157 2069 1186 2069 1038 2069 1156 2070 1038 2070 1039 2070 1040 2071 1039 2071 1187 2071 1041 2072 1187 2072 1189 2072 1154 2073 1189 2073 1042 2073 1153 2074 1042 2074 1043 2074 1151 2075 1043 2075 1200 2075 1044 2076 1200 2076 1202 2076 1150 2077 1202 2077 1190 2077 1045 2078 1190 2078 1191 2078 1148 2079 1191 2079 1046 2079 1047 2080 1046 2080 1192 2080 1048 2081 1192 2081 1144 2081 1142 2082 1144 2082 1049 2082 1140 2083 1049 2083 1201 2083 1138 2084 1201 2084 1137 2084 1136 2085 1137 2085 1134 2085 1135 2086 1134 2086 1050 2086 1133 2087 1050 2087 1188 2087 1132 2088 1188 2088 1131 2088 1130 2089 1131 2089 1199 2089 1128 2090 1199 2090 1198 2090 1051 2091 1198 2091 1127 2091 1126 2092 1127 2092 1125 2092 1122 2093 1125 2093 1196 2093 1123 2094 1196 2094 1121 2094 1120 2095 1121 2095 1185 2095 1052 2096 1185 2096 1054 2096 1053 2097 1054 2097 1183 2097 1118 2098 1183 2098 1169 2098 1055 2099 1169 2099 1056 2099 1117 2100 1056 2100 1174 2100 1057 2101 1174 2101 1113 2101 1112 2102 1113 2102 1110 2102 1111 2103 1110 2103 1108 2103 1109 2104 1108 2104 1058 2104 1106 2105 1058 2105 1177 2105 1104 2106 1177 2106 1060 2106 1059 2107 1060 2107 1173 2107 1102 2108 1173 2108 1061 2108 1101 2109 1061 2109 1181 2109 1099 2110 1181 2110 1062 2110 1100 2111 1062 2111 1063 2111 1064 2112 1063 2112 1065 2112 1066 2113 1065 2113 1067 2113 1095 2114 1067 2114 1068 2114 1093 2115 1068 2115 1094 2115 1069 2116 1094 2116 1070 2116 1090 2117 1070 2117 1182 2117 1071 2118 1182 2118 1072 2118 1087 2119 1072 2119 1180 2119 1073 2120 1180 2120 1179 2120 1085 2121 1179 2121 1178 2121 1084 2122 1178 2122 1172 2122 1083 2123 1172 2123 1074 2123 1081 2124 1074 2124 1176 2124 1075 2125 1176 2125 1171 2125 1078 2126 1171 2126 1170 2126 1077 2127 1170 2127 1175 2127 1030 2128 1077 2128 1175 2128 1030 2129 207 2129 1077 2129 1030 2130 208 2130 207 2130 207 2131 1076 2131 1077 2131 1077 2132 1076 2132 1078 2132 1170 2133 1077 2133 1078 2133 1076 2134 1079 2134 1078 2134 1078 2135 1079 2135 1075 2135 1171 2136 1078 2136 1075 2136 1079 2137 1080 2137 1075 2137 1075 2138 1080 2138 1081 2138 1176 2139 1075 2139 1081 2139 1080 2140 203 2140 1081 2140 1081 2141 203 2141 1083 2141 1074 2142 1081 2142 1083 2142 203 2143 1082 2143 1083 2143 1083 2144 1082 2144 1084 2144 1172 2145 1083 2145 1084 2145 1082 2146 200 2146 1084 2146 1084 2147 200 2147 1085 2147 1178 2148 1084 2148 1085 2148 200 2149 199 2149 1085 2149 1085 2150 199 2150 1073 2150 1179 2151 1085 2151 1073 2151 199 2152 1086 2152 1073 2152 1073 2153 1086 2153 1087 2153 1180 2154 1073 2154 1087 2154 1086 2155 1088 2155 1087 2155 1087 2156 1088 2156 1071 2156 1072 2157 1087 2157 1071 2157 1088 2158 1089 2158 1071 2158 1071 2159 1089 2159 1090 2159 1182 2160 1071 2160 1090 2160 1089 2161 1091 2161 1090 2161 1090 2162 1091 2162 1069 2162 1070 2163 1090 2163 1069 2163 1091 2164 1092 2164 1069 2164 1069 2165 1092 2165 1093 2165 1094 2166 1069 2166 1093 2166 1092 2167 192 2167 1093 2167 1093 2168 192 2168 1095 2168 1068 2169 1093 2169 1095 2169 192 2170 1096 2170 1095 2170 1095 2171 1096 2171 1066 2171 1067 2172 1095 2172 1066 2172 1096 2173 1097 2173 1066 2173 1066 2174 1097 2174 1064 2174 1065 2175 1066 2175 1064 2175 1097 2176 188 2176 1064 2176 1064 2177 188 2177 1100 2177 1063 2178 1064 2178 1100 2178 188 2179 1098 2179 1100 2179 1100 2180 1098 2180 1099 2180 1062 2181 1100 2181 1099 2181 1098 2182 184 2182 1099 2182 1099 2183 184 2183 1101 2183 1181 2184 1099 2184 1101 2184 184 2185 183 2185 1101 2185 1101 2186 183 2186 1102 2186 1061 2187 1101 2187 1102 2187 183 2188 1103 2188 1102 2188 1102 2189 1103 2189 1059 2189 1173 2190 1102 2190 1059 2190 1103 2191 182 2191 1059 2191 1059 2192 182 2192 1104 2192 1060 2193 1059 2193 1104 2193 182 2194 1105 2194 1104 2194 1104 2195 1105 2195 1106 2195 1177 2196 1104 2196 1106 2196 1105 2197 1107 2197 1106 2197 1106 2198 1107 2198 1109 2198 1058 2199 1106 2199 1109 2199 1107 2200 178 2200 1109 2200 1109 2201 178 2201 1111 2201 1108 2202 1109 2202 1111 2202 178 2203 176 2203 1111 2203 1111 2204 176 2204 1112 2204 1110 2205 1111 2205 1112 2205 176 2206 1114 2206 1112 2206 1112 2207 1114 2207 1057 2207 1113 2208 1112 2208 1057 2208 1114 2209 1115 2209 1057 2209 1057 2210 1115 2210 1117 2210 1174 2211 1057 2211 1117 2211 1115 2212 1116 2212 1117 2212 1117 2213 1116 2213 1055 2213 1056 2214 1117 2214 1055 2214 1116 2215 173 2215 1055 2215 1055 2216 173 2216 1118 2216 1169 2217 1055 2217 1118 2217 173 2218 171 2218 1118 2218 1118 2219 171 2219 1053 2219 1183 2220 1118 2220 1053 2220 171 2221 169 2221 1053 2221 1053 2222 169 2222 1052 2222 1054 2223 1053 2223 1052 2223 169 2224 1119 2224 1052 2224 1052 2225 1119 2225 1120 2225 1185 2226 1052 2226 1120 2226 1119 2227 166 2227 1120 2227 1120 2228 166 2228 1123 2228 1121 2229 1120 2229 1123 2229 166 2230 165 2230 1123 2230 1123 2231 165 2231 1122 2231 1196 2232 1123 2232 1122 2232 165 2233 1124 2233 1122 2233 1122 2234 1124 2234 1126 2234 1125 2235 1122 2235 1126 2235 1124 2236 164 2236 1126 2236 1126 2237 164 2237 1051 2237 1127 2238 1126 2238 1051 2238 164 2239 163 2239 1051 2239 1051 2240 163 2240 1128 2240 1198 2241 1051 2241 1128 2241 163 2242 1129 2242 1128 2242 1128 2243 1129 2243 1130 2243 1199 2244 1128 2244 1130 2244 1129 2245 159 2245 1130 2245 1130 2246 159 2246 1132 2246 1131 2247 1130 2247 1132 2247 159 2248 158 2248 1132 2248 1132 2249 158 2249 1133 2249 1188 2250 1132 2250 1133 2250 158 2251 157 2251 1133 2251 1133 2252 157 2252 1135 2252 1050 2253 1133 2253 1135 2253 157 2254 156 2254 1135 2254 1135 2255 156 2255 1136 2255 1134 2256 1135 2256 1136 2256 156 2257 155 2257 1136 2257 1136 2258 155 2258 1138 2258 1137 2259 1136 2259 1138 2259 155 2260 1139 2260 1138 2260 1138 2261 1139 2261 1140 2261 1201 2262 1138 2262 1140 2262 1139 2263 1141 2263 1140 2263 1140 2264 1141 2264 1142 2264 1049 2265 1140 2265 1142 2265 1141 2266 1143 2266 1142 2266 1142 2267 1143 2267 1048 2267 1144 2268 1142 2268 1048 2268 1143 2269 1145 2269 1048 2269 1048 2270 1145 2270 1047 2270 1192 2271 1048 2271 1047 2271 1145 2272 1146 2272 1047 2272 1047 2273 1146 2273 1148 2273 1046 2274 1047 2274 1148 2274 1146 2275 1147 2275 1148 2275 1148 2276 1147 2276 1045 2276 1191 2277 1148 2277 1045 2277 1147 2278 1149 2278 1045 2278 1045 2279 1149 2279 1150 2279 1190 2280 1045 2280 1150 2280 1149 2281 147 2281 1150 2281 1150 2282 147 2282 1044 2282 1202 2283 1150 2283 1044 2283 147 2284 1152 2284 1044 2284 1044 2285 1152 2285 1151 2285 1200 2286 1044 2286 1151 2286 1152 2287 144 2287 1151 2287 1151 2288 144 2288 1153 2288 1043 2289 1151 2289 1153 2289 144 2290 142 2290 1153 2290 1153 2291 142 2291 1154 2291 1042 2292 1153 2292 1154 2292 142 2293 139 2293 1154 2293 1154 2294 139 2294 1041 2294 1189 2295 1154 2295 1041 2295 139 2296 136 2296 1041 2296 1041 2297 136 2297 1040 2297 1187 2298 1041 2298 1040 2298 136 2299 1155 2299 1040 2299 1040 2300 1155 2300 1156 2300 1039 2301 1040 2301 1156 2301 1155 2302 135 2302 1156 2302 1156 2303 135 2303 1157 2303 1038 2304 1156 2304 1157 2304 135 2305 134 2305 1157 2305 1157 2306 134 2306 1159 2306 1186 2307 1157 2307 1159 2307 134 2308 131 2308 1159 2308 1159 2309 131 2309 1158 2309 1037 2310 1159 2310 1158 2310 131 2311 1160 2311 1158 2311 1158 2312 1160 2312 1036 2312 1197 2313 1158 2313 1036 2313 1160 2314 1161 2314 1036 2314 1036 2315 1161 2315 1035 2315 1195 2316 1036 2316 1035 2316 1161 2317 130 2317 1035 2317 1035 2318 130 2318 1162 2318 1194 2319 1035 2319 1162 2319 130 2320 1163 2320 1162 2320 1162 2321 1163 2321 1164 2321 1193 2322 1162 2322 1164 2322 1163 2323 1166 2323 1164 2323 1164 2324 1166 2324 1165 2324 1184 2325 1164 2325 1165 2325 1166 2326 1167 2326 1165 2326 1165 2327 1167 2327 1034 2327 1168 2328 1165 2328 1034 2328 1168 2329 1183 2329 1184 2329 1168 2330 1169 2330 1183 2330 1168 2331 1033 2331 1169 2331 1169 2332 1033 2332 1056 2332 1056 2333 1033 2333 1031 2333 1174 2334 1031 2334 1175 2334 1113 2335 1175 2335 1170 2335 1110 2336 1170 2336 1171 2336 1108 2337 1171 2337 1176 2337 1058 2338 1176 2338 1074 2338 1177 2339 1074 2339 1172 2339 1060 2340 1172 2340 1178 2340 1173 2341 1178 2341 1179 2341 1061 2342 1179 2342 1180 2342 1181 2343 1180 2343 1072 2343 1062 2344 1072 2344 1182 2344 1063 2345 1182 2345 1070 2345 1065 2346 1070 2346 1094 2346 1067 2347 1094 2347 1068 2347 1067 2348 1065 2348 1094 2348 1056 2349 1031 2349 1174 2349 1174 2350 1175 2350 1113 2350 1113 2351 1170 2351 1110 2351 1110 2352 1171 2352 1108 2352 1108 2353 1176 2353 1058 2353 1058 2354 1074 2354 1177 2354 1177 2355 1172 2355 1060 2355 1060 2356 1178 2356 1173 2356 1173 2357 1179 2357 1061 2357 1061 2358 1180 2358 1181 2358 1181 2359 1072 2359 1062 2359 1062 2360 1182 2360 1063 2360 1063 2361 1070 2361 1065 2361 1183 2362 1054 2362 1184 2362 1184 2363 1054 2363 1193 2363 1193 2364 1054 2364 1185 2364 1194 2365 1185 2365 1121 2365 1195 2366 1121 2366 1196 2366 1197 2367 1196 2367 1125 2367 1037 2368 1125 2368 1127 2368 1186 2369 1127 2369 1198 2369 1038 2370 1198 2370 1199 2370 1039 2371 1199 2371 1131 2371 1187 2372 1131 2372 1188 2372 1189 2373 1188 2373 1050 2373 1042 2374 1050 2374 1134 2374 1043 2375 1134 2375 1137 2375 1200 2376 1137 2376 1201 2376 1202 2377 1201 2377 1049 2377 1190 2378 1049 2378 1144 2378 1191 2379 1144 2379 1192 2379 1046 2380 1191 2380 1192 2380 1193 2381 1185 2381 1194 2381 1194 2382 1121 2382 1195 2382 1195 2383 1196 2383 1197 2383 1197 2384 1125 2384 1037 2384 1037 2385 1127 2385 1186 2385 1186 2386 1198 2386 1038 2386 1038 2387 1199 2387 1039 2387 1039 2388 1131 2388 1187 2388 1187 2389 1188 2389 1189 2389 1189 2390 1050 2390 1042 2390 1042 2391 1134 2391 1043 2391 1043 2392 1137 2392 1200 2392 1200 2393 1201 2393 1202 2393 1202 2394 1049 2394 1190 2394 1190 2395 1144 2395 1191 2395 0 2396 1203 2396 1204 2396 1204 2397 1203 2397 450 2397 450 2398 1203 2398 214 2398 1253 2399 214 2399 1205 2399 437 2400 1205 2400 319 2400 1206 2401 319 2401 1207 2401 420 2402 1207 2402 211 2402 1254 2403 211 2403 1208 2403 1255 2404 1208 2404 1256 2404 415 2405 1256 2405 213 2405 1257 2406 213 2406 1209 2406 1355 2407 1257 2407 1209 2407 1355 2408 1210 2408 1257 2408 1257 2409 1210 2409 1211 2409 1212 2410 1211 2410 1213 2410 1214 2411 1212 2411 1213 2411 1214 2412 1215 2412 1212 2412 1212 2413 1215 2413 1216 2413 1218 2414 1216 2414 1372 2414 1217 2415 1218 2415 1372 2415 1217 2416 1219 2416 1218 2416 1218 2417 1219 2417 1221 2417 1220 2418 1218 2418 1221 2418 1220 2419 1370 2419 1218 2419 1218 2420 1370 2420 1258 2420 1259 2421 1258 2421 5437 2421 1222 2422 1259 2422 5437 2422 1222 2423 5662 2423 1259 2423 1259 2424 5662 2424 380 2424 380 2425 5662 2425 1260 2425 381 2426 1260 2426 5660 2426 1261 2427 5660 2427 1262 2427 1223 2428 1262 2428 1224 2428 1263 2429 1224 2429 5659 2429 366 2430 5659 2430 5656 2430 361 2431 5656 2431 1225 2431 356 2432 1225 2432 1264 2432 335 2433 1264 2433 5655 2433 1265 2434 5655 2434 1226 2434 709 2435 1226 2435 1227 2435 1228 2436 1227 2436 1266 2436 714 2437 1266 2437 5654 2437 1229 2438 5654 2438 1267 2438 1268 2439 1267 2439 1269 2439 719 2440 1269 2440 1270 2440 1271 2441 1270 2441 1272 2441 1230 2442 1272 2442 5653 2442 1273 2443 5653 2443 5652 2443 328 2444 5652 2444 5651 2444 1231 2445 5651 2445 5650 2445 5649 2446 1231 2446 5650 2446 5649 2447 649 2447 1231 2447 5649 2448 1232 2448 649 2448 649 2449 1232 2449 643 2449 643 2450 1232 2450 1233 2450 638 2451 1233 2451 5648 2451 1274 2452 5648 2452 1234 2452 622 2453 1234 2453 5647 2453 723 2454 5647 2454 1235 2454 724 2455 1235 2455 5646 2455 1275 2456 5646 2456 1236 2456 1276 2457 1236 2457 1277 2457 1237 2458 1277 2458 1238 2458 597 2459 1238 2459 1239 2459 1278 2460 1239 2460 1279 2460 1240 2461 1279 2461 1280 2461 575 2462 1280 2462 5666 2462 732 2463 5666 2463 5663 2463 733 2464 5663 2464 5665 2464 567 2465 5665 2465 1281 2465 1241 2466 1281 2466 1242 2466 552 2467 1242 2467 1243 2467 544 2468 1243 2468 1244 2468 1282 2469 1244 2469 5669 2469 1964 2470 1282 2470 5669 2470 1964 2471 1245 2471 1282 2471 1282 2472 1245 2472 1247 2472 1247 2473 1245 2473 1246 2473 1975 2474 1247 2474 1246 2474 1975 2475 1981 2475 1247 2475 1247 2476 1981 2476 1987 2476 1988 2477 1247 2477 1987 2477 1988 2478 1986 2478 1247 2478 1247 2479 1986 2479 1283 2479 530 2480 1283 2480 1985 2480 1984 2481 530 2481 1985 2481 1984 2482 1992 2482 530 2482 530 2483 1992 2483 1284 2483 531 2484 1284 2484 1996 2484 1997 2485 531 2485 1996 2485 1997 2486 1248 2486 531 2486 531 2487 1248 2487 313 2487 1285 2488 313 2488 314 2488 1286 2489 314 2489 1249 2489 515 2490 1249 2490 312 2490 1287 2491 312 2491 315 2491 1288 2492 315 2492 1289 2492 743 2493 1289 2493 1251 2493 1250 2494 1251 2494 318 2494 1252 2495 318 2495 316 2495 684 2496 316 2496 55 2496 684 2497 1252 2497 316 2497 450 2498 214 2498 1253 2498 1253 2499 1205 2499 437 2499 437 2500 319 2500 1206 2500 1206 2501 1207 2501 420 2501 420 2502 211 2502 1254 2502 1254 2503 1208 2503 1255 2503 1255 2504 1256 2504 415 2504 415 2505 213 2505 1257 2505 1257 2506 1211 2506 1212 2506 1212 2507 1216 2507 1218 2507 1218 2508 1258 2508 1259 2508 380 2509 1260 2509 381 2509 381 2510 5660 2510 1261 2510 1261 2511 1262 2511 1223 2511 1223 2512 1224 2512 1263 2512 1263 2513 5659 2513 366 2513 366 2514 5656 2514 361 2514 361 2515 1225 2515 356 2515 356 2516 1264 2516 335 2516 335 2517 5655 2517 1265 2517 1265 2518 1226 2518 709 2518 709 2519 1227 2519 1228 2519 1228 2520 1266 2520 714 2520 714 2521 5654 2521 1229 2521 1229 2522 1267 2522 1268 2522 1268 2523 1269 2523 719 2523 719 2524 1270 2524 1271 2524 1271 2525 1272 2525 1230 2525 1230 2526 5653 2526 1273 2526 1273 2527 5652 2527 328 2527 328 2528 5651 2528 1231 2528 643 2529 1233 2529 638 2529 638 2530 5648 2530 1274 2530 1274 2531 1234 2531 622 2531 622 2532 5647 2532 723 2532 723 2533 1235 2533 724 2533 724 2534 5646 2534 1275 2534 1275 2535 1236 2535 1276 2535 1276 2536 1277 2536 1237 2536 1237 2537 1238 2537 597 2537 597 2538 1239 2538 1278 2538 1278 2539 1279 2539 1240 2539 1240 2540 1280 2540 575 2540 575 2541 5666 2541 732 2541 732 2542 5663 2542 733 2542 733 2543 5665 2543 567 2543 567 2544 1281 2544 1241 2544 1241 2545 1242 2545 552 2545 552 2546 1243 2546 544 2546 544 2547 1244 2547 1282 2547 1247 2548 1283 2548 530 2548 530 2549 1284 2549 531 2549 531 2550 313 2550 1285 2550 1285 2551 314 2551 1286 2551 1286 2552 1249 2552 515 2552 515 2553 312 2553 1287 2553 1287 2554 315 2554 1288 2554 1288 2555 1289 2555 743 2555 743 2556 1251 2556 1250 2556 1250 2557 318 2557 1252 2557 5363 2558 1352 2558 5390 2558 5363 2559 1290 2559 1352 2559 5363 2560 5362 2560 1290 2560 1290 2561 5362 2561 332 2561 332 2562 5362 2562 1292 2562 345 2563 1292 2563 1291 2563 345 2564 332 2564 1292 2564 1292 2565 1293 2565 1291 2565 1291 2566 1293 2566 357 2566 357 2567 1293 2567 5391 2567 368 2568 5391 2568 5365 2568 1294 2569 5365 2569 708 2569 1294 2570 368 2570 5365 2570 357 2571 5391 2571 368 2571 5365 2572 1296 2572 708 2572 708 2573 1296 2573 1295 2573 1295 2574 1296 2574 1297 2574 376 2575 1297 2575 1298 2575 396 2576 1298 2576 1299 2576 396 2577 376 2577 1298 2577 1295 2578 1297 2578 376 2578 1298 2579 1300 2579 1299 2579 1299 2580 1300 2580 405 2580 405 2581 1300 2581 1301 2581 1303 2582 1301 2582 1304 2582 417 2583 1304 2583 1302 2583 417 2584 1303 2584 1304 2584 405 2585 1301 2585 1303 2585 1304 2586 1305 2586 1302 2586 1302 2587 1305 2587 429 2587 429 2588 1305 2588 5393 2588 1308 2589 5393 2589 1307 2589 1306 2590 1307 2590 438 2590 1306 2591 1308 2591 1307 2591 429 2592 5393 2592 1308 2592 1307 2593 1309 2593 438 2593 438 2594 1309 2594 445 2594 445 2595 1309 2595 1311 2595 457 2596 1311 2596 1310 2596 874 2597 1310 2597 464 2597 874 2598 457 2598 1310 2598 445 2599 1311 2599 457 2599 1310 2600 5395 2600 464 2600 464 2601 5395 2601 465 2601 465 2602 5395 2602 1315 2602 1316 2603 1315 2603 1312 2603 1313 2604 1312 2604 1314 2604 1313 2605 1316 2605 1312 2605 465 2606 1315 2606 1316 2606 1312 2607 5371 2607 1314 2607 1314 2608 5371 2608 486 2608 486 2609 5371 2609 5396 2609 487 2610 5396 2610 1318 2610 1317 2611 1318 2611 490 2611 1317 2612 487 2612 1318 2612 486 2613 5396 2613 487 2613 1318 2614 1319 2614 490 2614 490 2615 1319 2615 495 2615 495 2616 1319 2616 5373 2616 496 2617 5373 2617 5399 2617 499 2618 5399 2618 5400 2618 1320 2619 5400 2619 1321 2619 1320 2620 499 2620 5400 2620 495 2621 5373 2621 496 2621 496 2622 5399 2622 499 2622 5400 2623 5401 2623 1321 2623 1321 2624 5401 2624 1322 2624 1322 2625 5401 2625 1324 2625 1323 2626 1324 2626 1325 2626 523 2627 1325 2627 536 2627 523 2628 1323 2628 1325 2628 1322 2629 1324 2629 1323 2629 1325 2630 1326 2630 536 2630 536 2631 1326 2631 1327 2631 1327 2632 1326 2632 1328 2632 1330 2633 1328 2633 1331 2633 557 2634 1331 2634 1329 2634 557 2635 1330 2635 1331 2635 1327 2636 1328 2636 1330 2636 1331 2637 5378 2637 1329 2637 1329 2638 5378 2638 736 2638 736 2639 5378 2639 5404 2639 1332 2640 5404 2640 5405 2640 569 2641 5405 2641 581 2641 569 2642 1332 2642 5405 2642 736 2643 5404 2643 1332 2643 5405 2644 5406 2644 581 2644 581 2645 5406 2645 1333 2645 1333 2646 5406 2646 5382 2646 591 2647 5382 2647 5383 2647 1334 2648 5383 2648 731 2648 1334 2649 591 2649 5383 2649 1333 2650 5382 2650 591 2650 5383 2651 5384 2651 731 2651 731 2652 5384 2652 613 2652 613 2653 5384 2653 5385 2653 1337 2654 5385 2654 1335 2654 1336 2655 1335 2655 1338 2655 1336 2656 1337 2656 1335 2656 613 2657 5385 2657 1337 2657 1335 2658 5386 2658 1338 2658 1338 2659 5386 2659 632 2659 632 2660 5386 2660 1342 2660 1339 2661 1342 2661 1340 2661 1341 2662 1340 2662 657 2662 1341 2663 1339 2663 1340 2663 632 2664 1342 2664 1339 2664 1340 2665 1343 2665 657 2665 657 2666 1343 2666 658 2666 658 2667 1343 2667 1345 2667 1344 2668 1345 2668 5388 2668 1346 2669 5388 2669 1347 2669 1346 2670 1344 2670 5388 2670 658 2671 1345 2671 1344 2671 5388 2672 1349 2672 1347 2672 1347 2673 1349 2673 1348 2673 1348 2674 1349 2674 1351 2674 1350 2675 1351 2675 5390 2675 320 2676 5390 2676 1352 2676 320 2677 1350 2677 5390 2677 1348 2678 1351 2678 1350 2678 2045 2679 5437 2679 1369 2679 2043 2680 1369 2680 2044 2680 2041 2681 2044 2681 1353 2681 2037 2682 1353 2682 2038 2682 2034 2683 2038 2683 1373 2683 2035 2684 1373 2684 1354 2684 2029 2685 1354 2685 1371 2685 2027 2686 1371 2686 1368 2686 1367 2687 1368 2687 1355 2687 1374 2688 1355 2688 1209 2688 213 2689 1374 2689 1209 2689 213 2690 212 2690 1374 2690 1374 2691 212 2691 1356 2691 1367 2692 1356 2692 2049 2692 2051 2693 2049 2693 2052 2693 2050 2694 2052 2694 1357 2694 1365 2695 1357 2695 2063 2695 2060 2696 2063 2696 1377 2696 2076 2697 1377 2697 2074 2697 1362 2698 2074 2698 2077 2698 1360 2699 2077 2699 1358 2699 5443 2700 1358 2700 5444 2700 5443 2701 1360 2701 1358 2701 5443 2702 1359 2702 1360 2702 1360 2703 1359 2703 1361 2703 1362 2704 1361 2704 1363 2704 2076 2705 1363 2705 1364 2705 2060 2706 1364 2706 1366 2706 1365 2707 1366 2707 2055 2707 2050 2708 2055 2708 2026 2708 2051 2709 2026 2709 2027 2709 1367 2710 2027 2710 1368 2710 1367 2711 2051 2711 2027 2711 1367 2712 2049 2712 2051 2712 5437 2713 1258 2713 1369 2713 1369 2714 1258 2714 1370 2714 2044 2715 1370 2715 1220 2715 1353 2716 1220 2716 1221 2716 1219 2717 1353 2717 1221 2717 1219 2718 2038 2718 1353 2718 1219 2719 1217 2719 2038 2719 2038 2720 1217 2720 1372 2720 1373 2721 1372 2721 1216 2721 1215 2722 1373 2722 1216 2722 1215 2723 1354 2723 1373 2723 1215 2724 1214 2724 1354 2724 1354 2725 1214 2725 1371 2725 1371 2726 1214 2726 1213 2726 1211 2727 1371 2727 1213 2727 1211 2728 1368 2728 1371 2728 1211 2729 1210 2729 1368 2729 1368 2730 1210 2730 1355 2730 1369 2731 1370 2731 2044 2731 2044 2732 1220 2732 1353 2732 2038 2733 1372 2733 1373 2733 1367 2734 1355 2734 1374 2734 1356 2735 1367 2735 1374 2735 212 2736 1375 2736 1356 2736 1356 2737 1375 2737 1379 2737 2049 2738 1379 2738 1376 2738 2052 2739 1376 2739 2056 2739 1357 2740 2056 2740 2061 2740 2063 2741 2061 2741 2064 2741 1377 2742 2064 2742 2075 2742 2074 2743 2075 2743 1382 2743 2077 2744 1382 2744 1384 2744 1358 2745 1384 2745 1378 2745 5444 2746 1378 2746 5446 2746 5444 2747 1358 2747 1378 2747 1375 2748 215 2748 1379 2748 1379 2749 215 2749 1386 2749 1376 2750 1386 2750 2054 2750 2056 2751 2054 2751 1380 2751 2061 2752 1380 2752 1381 2752 2064 2753 1381 2753 2071 2753 2075 2754 2071 2754 1383 2754 1382 2755 1383 2755 2081 2755 1384 2756 2081 2756 2089 2756 1378 2757 2089 2757 1385 2757 5446 2758 1385 2758 5448 2758 5446 2759 1378 2759 1385 2759 215 2760 216 2760 1386 2760 1386 2761 216 2761 2059 2761 2054 2762 2059 2762 1391 2762 1380 2763 1391 2763 1392 2763 1381 2764 1392 2764 2072 2764 2071 2765 2072 2765 2073 2765 1383 2766 2073 2766 2080 2766 2081 2767 2080 2767 1387 2767 2089 2768 1387 2768 1388 2768 1385 2769 1388 2769 1389 2769 5448 2770 1389 2770 1394 2770 5448 2771 1385 2771 1389 2771 216 2772 1390 2772 2059 2772 2059 2773 1390 2773 1395 2773 1391 2774 1395 2774 1396 2774 1392 2775 1396 2775 2067 2775 2072 2776 2067 2776 1397 2776 2073 2777 1397 2777 1393 2777 2080 2778 1393 2778 2082 2778 1387 2779 2082 2779 2088 2779 1388 2780 2088 2780 2093 2780 1389 2781 2093 2781 1403 2781 1394 2782 1403 2782 1401 2782 1394 2783 1389 2783 1403 2783 1390 2784 2025 2784 1395 2784 1395 2785 2025 2785 2066 2785 1396 2786 2066 2786 2069 2786 2067 2787 2069 2787 2068 2787 1397 2788 2068 2788 2079 2788 1393 2789 2079 2789 2087 2789 2082 2790 2087 2790 1398 2790 2088 2791 1398 2791 1399 2791 2093 2792 1399 2792 1400 2792 1403 2793 1400 2793 1404 2793 1401 2794 1404 2794 1402 2794 1401 2795 1403 2795 1404 2795 2066 2796 2025 2796 1405 2796 2069 2797 1405 2797 2070 2797 2068 2798 2070 2798 2024 2798 2079 2799 2024 2799 2085 2799 2087 2800 2085 2800 2022 2800 1398 2801 2022 2801 1406 2801 1399 2802 1406 2802 1407 2802 1400 2803 1407 2803 1408 2803 1404 2804 1408 2804 1409 2804 1402 2805 1409 2805 1410 2805 1402 2806 1404 2806 1409 2806 1412 2807 1411 2807 218 2807 1412 2808 1413 2808 1411 2808 1412 2809 223 2809 1413 2809 1413 2810 223 2810 1414 2810 2083 2811 1414 2811 2084 2811 2090 2812 2084 2812 2091 2812 1415 2813 2091 2813 1416 2813 2096 2814 1416 2814 2098 2814 1417 2815 2098 2815 2102 2815 1424 2816 2102 2816 1418 2816 2101 2817 1418 2817 1420 2817 1419 2818 1420 2818 1421 2818 5449 2819 1421 2819 5466 2819 5449 2820 1419 2820 1421 2820 5449 2821 5463 2821 1419 2821 1419 2822 5463 2822 1422 2822 2101 2823 1422 2823 1423 2823 1424 2824 1423 2824 1425 2824 1417 2825 1425 2825 2095 2825 2096 2826 2095 2826 2021 2826 1415 2827 2021 2827 1426 2827 2090 2828 1426 2828 2086 2828 2083 2829 2086 2829 2023 2829 1413 2830 2023 2830 1411 2830 1413 2831 2083 2831 2023 2831 1413 2832 1414 2832 2083 2832 223 2833 1427 2833 1414 2833 1414 2834 1427 2834 1433 2834 2084 2835 1433 2835 1428 2835 2091 2836 1428 2836 2092 2836 1416 2837 2092 2837 2094 2837 2098 2838 2094 2838 1429 2838 2102 2839 1429 2839 1430 2839 1418 2840 1430 2840 2110 2840 1420 2841 2110 2841 1438 2841 1421 2842 1438 2842 1440 2842 5466 2843 1440 2843 1431 2843 5466 2844 1421 2844 1440 2844 1427 2845 1432 2845 1433 2845 1433 2846 1432 2846 1434 2846 1428 2847 1434 2847 1435 2847 2092 2848 1435 2848 1443 2848 2094 2849 1443 2849 1444 2849 1429 2850 1444 2850 1436 2850 1430 2851 1436 2851 1445 2851 2110 2852 1445 2852 1437 2852 1438 2853 1437 2853 2117 2853 1440 2854 2117 2854 1441 2854 1431 2855 1441 2855 1439 2855 1431 2856 1440 2856 1441 2856 1432 2857 224 2857 1434 2857 1434 2858 224 2858 1447 2858 1435 2859 1447 2859 1442 2859 1443 2860 1442 2860 1449 2860 1444 2861 1449 2861 2105 2861 1436 2862 2105 2862 2106 2862 1445 2863 2106 2863 1446 2863 1437 2864 1446 2864 2116 2864 2117 2865 2116 2865 2122 2865 1441 2866 2122 2866 1450 2866 1439 2867 1450 2867 5450 2867 1439 2868 1441 2868 1450 2868 224 2869 225 2869 1447 2869 1447 2870 225 2870 1448 2870 1442 2871 1448 2871 2100 2871 1449 2872 2100 2872 1453 2872 2105 2873 1453 2873 2104 2873 2106 2874 2104 2874 2109 2874 1446 2875 2109 2875 2115 2875 2116 2876 2115 2876 2121 2876 2122 2877 2121 2877 2125 2877 1450 2878 2125 2878 2128 2878 5450 2879 2128 2879 1451 2879 5450 2880 1450 2880 2128 2880 225 2881 1452 2881 1448 2881 1448 2882 1452 2882 2097 2882 2100 2883 2097 2883 2099 2883 1453 2884 2099 2884 2103 2884 2104 2885 2103 2885 2114 2885 2109 2886 2114 2886 2120 2886 2115 2887 2120 2887 2119 2887 2121 2888 2119 2888 1454 2888 2125 2889 1454 2889 1455 2889 2128 2890 1455 2890 1458 2890 1451 2891 1458 2891 5451 2891 1451 2892 2128 2892 1458 2892 1452 2893 226 2893 2097 2893 2097 2894 226 2894 1460 2894 2099 2895 1460 2895 2108 2895 2103 2896 2108 2896 2113 2896 2114 2897 2113 2897 2112 2897 2120 2898 2112 2898 1456 2898 2119 2899 1456 2899 1457 2899 1454 2900 1457 2900 2127 2900 1455 2901 2127 2901 2132 2901 1458 2902 2132 2902 1465 2902 5451 2903 1465 2903 1466 2903 5451 2904 1458 2904 1465 2904 226 2905 1459 2905 1460 2905 1460 2906 1459 2906 1467 2906 2108 2907 1467 2907 1461 2907 2113 2908 1461 2908 1462 2908 2112 2909 1462 2909 1463 2909 1456 2910 1463 2910 2124 2910 1457 2911 2124 2911 1471 2911 2127 2912 1471 2912 2136 2912 2132 2913 2136 2913 1464 2913 1465 2914 1464 2914 2140 2914 1466 2915 2140 2915 5453 2915 1466 2916 1465 2916 2140 2916 1459 2917 1468 2917 1467 2917 1467 2918 1468 2918 2107 2918 1461 2919 2107 2919 2111 2919 1462 2920 2111 2920 1469 2920 1463 2921 1469 2921 1470 2921 2124 2922 1470 2922 2126 2922 1471 2923 2126 2923 2131 2923 2136 2924 2131 2924 1474 2924 1464 2925 1474 2925 1475 2925 2140 2926 1475 2926 2144 2926 5453 2927 2144 2927 1478 2927 5453 2928 2140 2928 2144 2928 1468 2929 227 2929 2107 2929 2107 2930 227 2930 1480 2930 2111 2931 1480 2931 1472 2931 1469 2932 1472 2932 1473 2932 1470 2933 1473 2933 1481 2933 2126 2934 1481 2934 2135 2934 2131 2935 2135 2935 2134 2935 1474 2936 2134 2936 1476 2936 1475 2937 1476 2937 1477 2937 2144 2938 1477 2938 1479 2938 1478 2939 1479 2939 5479 2939 1478 2940 2144 2940 1479 2940 227 2941 228 2941 1480 2941 1480 2942 228 2942 2118 2942 1472 2943 2118 2943 2123 2943 1473 2944 2123 2944 2130 2944 1481 2945 2130 2945 2129 2945 2135 2946 2129 2946 1482 2946 2134 2947 1482 2947 2139 2947 1476 2948 2139 2948 2143 2948 1477 2949 2143 2949 2156 2949 1479 2950 2156 2950 1483 2950 5479 2951 1483 2951 1489 2951 5479 2952 1479 2952 1483 2952 228 2953 229 2953 2118 2953 2118 2954 229 2954 1484 2954 2123 2955 1484 2955 1485 2955 2130 2956 1485 2956 1486 2956 2129 2957 1486 2957 2138 2957 1482 2958 2138 2958 1492 2958 2139 2959 1492 2959 2149 2959 2143 2960 2149 2960 1487 2960 2156 2961 1487 2961 2159 2961 1483 2962 2159 2962 1488 2962 1489 2963 1488 2963 5469 2963 1489 2964 1483 2964 1488 2964 229 2965 1494 2965 1484 2965 1484 2966 1494 2966 1490 2966 1485 2967 1490 2967 1495 2967 1486 2968 1495 2968 1491 2968 2138 2969 1491 2969 2142 2969 1492 2970 2142 2970 2147 2970 2149 2971 2147 2971 2148 2971 1487 2972 2148 2972 1493 2972 2159 2973 1493 2973 2165 2973 1488 2974 2165 2974 2164 2974 5469 2975 2164 2975 5470 2975 5469 2976 1488 2976 2164 2976 1494 2977 230 2977 1490 2977 1490 2978 230 2978 1500 2978 1495 2979 1500 2979 1496 2979 1491 2980 1496 2980 1497 2980 2142 2981 1497 2981 1498 2981 2147 2982 1498 2982 2155 2982 2148 2983 2155 2983 2154 2983 1493 2984 2154 2984 2163 2984 2165 2985 2163 2985 1499 2985 2164 2986 1499 2986 1504 2986 5470 2987 1504 2987 1503 2987 5470 2988 2164 2988 1504 2988 230 2989 231 2989 1500 2989 1500 2990 231 2990 2133 2990 1496 2991 2133 2991 1501 2991 1497 2992 1501 2992 2141 2992 1498 2993 2141 2993 1506 2993 2155 2994 1506 2994 2158 2994 2154 2995 2158 2995 1507 2995 2163 2996 1507 2996 1502 2996 1499 2997 1502 2997 2168 2997 1504 2998 2168 2998 2177 2998 1503 2999 2177 2999 5482 2999 1503 3000 1504 3000 2177 3000 231 3001 235 3001 2133 3001 2133 3002 235 3002 2137 3002 1501 3003 2137 3003 1505 3003 2141 3004 1505 3004 2146 3004 1506 3005 2146 3005 2153 3005 2158 3006 2153 3006 2157 3006 1507 3007 2157 3007 1512 3007 1502 3008 1512 3008 1514 3008 2168 3009 1514 3009 1508 3009 2177 3010 1508 3010 1509 3010 5482 3011 1509 3011 1516 3011 5482 3012 2177 3012 1509 3012 235 3013 232 3013 2137 3013 2137 3014 232 3014 1510 3014 1505 3015 1510 3015 2145 3015 2146 3016 2145 3016 2152 3016 2153 3017 2152 3017 1511 3017 2157 3018 1511 3018 1513 3018 1512 3019 1513 3019 2167 3019 1514 3020 2167 3020 2175 3020 1508 3021 2175 3021 2176 3021 1509 3022 2176 3022 1517 3022 1516 3023 1517 3023 1515 3023 1516 3024 1509 3024 1517 3024 232 3025 1518 3025 1510 3025 1510 3026 1518 3026 2150 3026 2145 3027 2150 3027 2151 3027 2152 3028 2151 3028 1519 3028 1511 3029 1519 3029 2162 3029 1513 3030 2162 3030 1520 3030 2167 3031 1520 3031 1521 3031 2175 3032 1521 3032 2174 3032 2176 3033 2174 3033 1522 3033 1517 3034 1522 3034 2182 3034 1515 3035 2182 3035 1526 3035 1515 3036 1517 3036 2182 3036 1518 3037 238 3037 2150 3037 2150 3038 238 3038 1528 3038 2151 3039 1528 3039 1523 3039 1519 3040 1523 3040 2161 3040 2162 3041 2161 3041 1524 3041 1520 3042 1524 3042 1530 3042 1521 3043 1530 3043 2173 3043 2174 3044 2173 3044 1533 3044 1522 3045 1533 3045 1525 3045 2182 3046 1525 3046 1527 3046 1526 3047 1527 3047 5471 3047 1526 3048 2182 3048 1527 3048 238 3049 1534 3049 1528 3049 1528 3050 1534 3050 1535 3050 1523 3051 1535 3051 2166 3051 2161 3052 2166 3052 1536 3052 1524 3053 1536 3053 1529 3053 1530 3054 1529 3054 1531 3054 2173 3055 1531 3055 1532 3055 1533 3056 1532 3056 2184 3056 1525 3057 2184 3057 2185 3057 1527 3058 2185 3058 1538 3058 5471 3059 1538 3059 5472 3059 5471 3060 1527 3060 1538 3060 1534 3061 239 3061 1535 3061 1535 3062 239 3062 2160 3062 2166 3063 2160 3063 2170 3063 1536 3064 2170 3064 2169 3064 1529 3065 2169 3065 2178 3065 1531 3066 2178 3066 2180 3066 1532 3067 2180 3067 2181 3067 2184 3068 2181 3068 1537 3068 2185 3069 1537 3069 2189 3069 1538 3070 2189 3070 2191 3070 5472 3071 2191 3071 5486 3071 5472 3072 1538 3072 2191 3072 239 3073 236 3073 2160 3073 2160 3074 236 3074 1539 3074 2170 3075 1539 3075 2172 3075 2169 3076 2172 3076 2171 3076 2178 3077 2171 3077 1540 3077 2180 3078 1540 3078 2179 3078 2181 3079 2179 3079 2188 3079 1537 3080 2188 3080 1541 3080 2189 3081 1541 3081 1542 3081 2191 3082 1542 3082 1543 3082 5486 3083 1543 3083 5474 3083 5486 3084 2191 3084 1543 3084 1539 3085 236 3085 2020 3085 2172 3086 2020 3086 1544 3086 2171 3087 1544 3087 1545 3087 1540 3088 1545 3088 1546 3088 2179 3089 1546 3089 2017 3089 2188 3090 2017 3090 1547 3090 1541 3091 1547 3091 1548 3091 1542 3092 1548 3092 1549 3092 1543 3093 1549 3093 1550 3093 5474 3094 1550 3094 5487 3094 5474 3095 1543 3095 1550 3095 242 3096 2018 3096 2019 3096 242 3097 1552 3097 2018 3097 242 3098 1551 3098 1552 3098 1552 3099 1551 3099 1553 3099 1566 3100 1553 3100 2186 3100 2183 3101 2186 3101 1554 3101 1555 3102 1554 3102 1557 3102 1556 3103 1557 3103 2194 3103 2192 3104 2194 3104 2193 3104 2202 3105 2193 3105 2210 3105 2209 3106 2210 3106 1558 3106 1561 3107 1558 3107 1560 3107 1559 3108 1560 3108 5488 3108 1559 3109 1561 3109 1560 3109 1559 3110 5476 3110 1561 3110 1561 3111 5476 3111 2208 3111 2209 3112 2208 3112 2203 3112 2202 3113 2203 3113 1562 3113 2192 3114 1562 3114 2190 3114 1556 3115 2190 3115 2187 3115 1555 3116 2187 3116 1563 3116 2183 3117 1563 3117 1564 3117 1566 3118 1564 3118 1565 3118 1552 3119 1565 3119 2018 3119 1552 3120 1566 3120 1565 3120 1552 3121 1553 3121 1566 3121 1551 3122 1573 3122 1553 3122 1553 3123 1573 3123 1574 3123 2186 3124 1574 3124 1567 3124 1554 3125 1567 3125 1568 3125 1557 3126 1568 3126 1569 3126 2194 3127 1569 3127 1570 3127 2193 3128 1570 3128 2201 3128 2210 3129 2201 3129 1571 3129 1558 3130 1571 3130 1576 3130 1560 3131 1576 3131 1572 3131 5488 3132 1572 3132 1577 3132 5488 3133 1560 3133 1572 3133 1573 3134 244 3134 1574 3134 1574 3135 244 3135 1579 3135 1567 3136 1579 3136 1575 3136 1568 3137 1575 3137 1580 3137 1569 3138 1580 3138 2200 3138 1570 3139 2200 3139 2199 3139 2201 3140 2199 3140 2212 3140 1571 3141 2212 3141 2215 3141 1576 3142 2215 3142 2220 3142 1572 3143 2220 3143 2219 3143 1577 3144 2219 3144 1585 3144 1577 3145 1572 3145 2219 3145 244 3146 1578 3146 1579 3146 1579 3147 1578 3147 1587 3147 1575 3148 1587 3148 1581 3148 1580 3149 1581 3149 1582 3149 2200 3150 1582 3150 2198 3150 2199 3151 2198 3151 2207 3151 2212 3152 2207 3152 1583 3152 2215 3153 1583 3153 1584 3153 2220 3154 1584 3154 2224 3154 2219 3155 2224 3155 1586 3155 1585 3156 1586 3156 1590 3156 1585 3157 2219 3157 1586 3157 1578 3158 1588 3158 1587 3158 1587 3159 1588 3159 1591 3159 1581 3160 1591 3160 2197 3160 1582 3161 2197 3161 2205 3161 2198 3162 2205 3162 2206 3162 2207 3163 2206 3163 2214 3163 1583 3164 2214 3164 1592 3164 1584 3165 1592 3165 2223 3165 2224 3166 2223 3166 2222 3166 1586 3167 2222 3167 1589 3167 1590 3168 1589 3168 5490 3168 1590 3169 1586 3169 1589 3169 1588 3170 245 3170 1591 3170 1591 3171 245 3171 1594 3171 2197 3172 1594 3172 2196 3172 2205 3173 2196 3173 2204 3173 2206 3174 2204 3174 2211 3174 2214 3175 2211 3175 2218 3175 1592 3176 2218 3176 2217 3176 2223 3177 2217 3177 1593 3177 2222 3178 1593 3178 2233 3178 1589 3179 2233 3179 1600 3179 5490 3180 1600 3180 1599 3180 5490 3181 1589 3181 1600 3181 245 3182 1601 3182 1594 3182 1594 3183 1601 3183 2195 3183 2196 3184 2195 3184 1595 3184 2204 3185 1595 3185 1596 3185 2211 3186 1596 3186 1597 3186 2218 3187 1597 3187 1598 3187 2217 3188 1598 3188 2227 3188 1593 3189 2227 3189 1606 3189 2233 3190 1606 3190 2238 3190 1600 3191 2238 3191 2237 3191 1599 3192 2237 3192 5491 3192 1599 3193 1600 3193 2237 3193 1601 3194 251 3194 2195 3194 2195 3195 251 3195 1609 3195 1595 3196 1609 3196 1602 3196 1596 3197 1602 3197 1603 3197 1597 3198 1603 3198 2216 3198 1598 3199 2216 3199 1604 3199 2227 3200 1604 3200 1605 3200 1606 3201 1605 3201 2236 3201 2238 3202 2236 3202 1612 3202 2237 3203 1612 3203 1613 3203 5491 3204 1613 3204 1607 3204 5491 3205 2237 3205 1613 3205 251 3206 1608 3206 1609 3206 1609 3207 1608 3207 2213 3207 1602 3208 2213 3208 1610 3208 1603 3209 1610 3209 2221 3209 2216 3210 2221 3210 1611 3210 1604 3211 1611 3211 1615 3211 1605 3212 1615 3212 2235 3212 2236 3213 2235 3213 1617 3213 1612 3214 1617 3214 2243 3214 1613 3215 2243 3215 1618 3215 1607 3216 1618 3216 5497 3216 1607 3217 1613 3217 1618 3217 1608 3218 248 3218 2213 3218 2213 3219 248 3219 1619 3219 1610 3220 1619 3220 1614 3220 2221 3221 1614 3221 2226 3221 1611 3222 2226 3222 1620 3222 1615 3223 1620 3223 2232 3223 2235 3224 2232 3224 1616 3224 1617 3225 1616 3225 2242 3225 2243 3226 2242 3226 1622 3226 1618 3227 1622 3227 1623 3227 5497 3228 1623 3228 5499 3228 5497 3229 1618 3229 1623 3229 248 3230 250 3230 1619 3230 1619 3231 250 3231 1624 3231 1614 3232 1624 3232 1625 3232 2226 3233 1625 3233 2225 3233 1620 3234 2225 3234 2231 3234 2232 3235 2231 3235 1621 3235 1616 3236 1621 3236 1626 3236 2242 3237 1626 3237 2248 3237 1622 3238 2248 3238 1627 3238 1623 3239 1627 3239 2254 3239 5499 3240 2254 3240 5500 3240 5499 3241 1623 3241 2254 3241 250 3242 249 3242 1624 3242 1624 3243 249 3243 1628 3243 1625 3244 1628 3244 1629 3244 2225 3245 1629 3245 2230 3245 2231 3246 2230 3246 2241 3246 1621 3247 2241 3247 1630 3247 1626 3248 1630 3248 2246 3248 2248 3249 2246 3249 2247 3249 1627 3250 2247 3250 2253 3250 2254 3251 2253 3251 1633 3251 5500 3252 1633 3252 5501 3252 5500 3253 2254 3253 1633 3253 249 3254 253 3254 1628 3254 1628 3255 253 3255 2229 3255 1629 3256 2229 3256 2228 3256 2230 3257 2228 3257 2240 3257 2241 3258 2240 3258 1635 3258 1630 3259 1635 3259 1631 3259 2246 3260 1631 3260 2245 3260 2247 3261 2245 3261 2252 3261 2253 3262 2252 3262 1632 3262 1633 3263 1632 3263 2260 3263 5501 3264 2260 3264 1637 3264 5501 3265 1633 3265 2260 3265 253 3266 1638 3266 2229 3266 2229 3267 1638 3267 2234 3267 2228 3268 2234 3268 1634 3268 2240 3269 1634 3269 1641 3269 1635 3270 1641 3270 1636 3270 1631 3271 1636 3271 2251 3271 2245 3272 2251 3272 1643 3272 2252 3273 1643 3273 1646 3273 1632 3274 1646 3274 1647 3274 2260 3275 1647 3275 2261 3275 1637 3276 2261 3276 5504 3276 1637 3277 2260 3277 2261 3277 1638 3278 256 3278 2234 3278 2234 3279 256 3279 1639 3279 1634 3280 1639 3280 1640 3280 1641 3281 1640 3281 1642 3281 1636 3282 1642 3282 1648 3282 2251 3283 1648 3283 1644 3283 1643 3284 1644 3284 1645 3284 1646 3285 1645 3285 1649 3285 1647 3286 1649 3286 2272 3286 2261 3287 2272 3287 1651 3287 5504 3288 1651 3288 5509 3288 5504 3289 2261 3289 1651 3289 256 3290 257 3290 1639 3290 1639 3291 257 3291 2239 3291 1640 3292 2239 3292 1653 3292 1642 3293 1653 3293 1654 3293 1648 3294 1654 3294 2256 3294 1644 3295 2256 3295 2255 3295 1645 3296 2255 3296 2259 3296 1649 3297 2259 3297 1650 3297 2272 3298 1650 3298 2271 3298 1651 3299 2271 3299 1652 3299 5509 3300 1652 3300 5505 3300 5509 3301 1651 3301 1652 3301 257 3302 255 3302 2239 3302 2239 3303 255 3303 2244 3303 1653 3304 2244 3304 1655 3304 1654 3305 1655 3305 2250 3305 2256 3306 2250 3306 2258 3306 2255 3307 2258 3307 1656 3307 2259 3308 1656 3308 2269 3308 1650 3309 2269 3309 1657 3309 2271 3310 1657 3310 1658 3310 1652 3311 1658 3311 1659 3311 5505 3312 1659 3312 1665 3312 5505 3313 1652 3313 1659 3313 255 3314 258 3314 2244 3314 2244 3315 258 3315 1660 3315 1655 3316 1660 3316 2249 3316 2250 3317 2249 3317 1661 3317 2258 3318 1661 3318 1668 3318 1656 3319 1668 3319 2268 3319 2269 3320 2268 3320 2270 3320 1657 3321 2270 3321 1662 3321 1658 3322 1662 3322 1663 3322 1659 3323 1663 3323 1664 3323 1665 3324 1664 3324 1673 3324 1665 3325 1659 3325 1664 3325 258 3326 262 3326 1660 3326 1660 3327 262 3327 1666 3327 2249 3328 1666 3328 1667 3328 1661 3329 1667 3329 1669 3329 1668 3330 1669 3330 2266 3330 2268 3331 2266 3331 2265 3331 2270 3332 2265 3332 1670 3332 1662 3333 1670 3333 2275 3333 1663 3334 2275 3334 1671 3334 1664 3335 1671 3335 1672 3335 1673 3336 1672 3336 1677 3336 1673 3337 1664 3337 1672 3337 262 3338 1674 3338 1666 3338 1666 3339 1674 3339 2257 3339 1667 3340 2257 3340 1681 3340 1669 3341 1681 3341 2263 3341 2266 3342 2263 3342 2264 3342 2265 3343 2264 3343 1675 3343 1670 3344 1675 3344 2277 3344 2275 3345 2277 3345 1676 3345 1671 3346 1676 3346 1684 3346 1672 3347 1684 3347 1678 3347 1677 3348 1678 3348 1679 3348 1677 3349 1672 3349 1678 3349 1674 3350 1680 3350 2257 3350 2257 3351 1680 3351 2047 3351 1681 3352 2047 3352 2046 3352 2263 3353 2046 3353 2262 3353 2264 3354 2262 3354 2267 3354 1675 3355 2267 3355 1682 3355 2277 3356 1682 3356 2278 3356 1676 3357 2278 3357 1683 3357 1684 3358 1683 3358 2285 3358 1678 3359 2285 3359 1685 3359 1679 3360 1685 3360 5512 3360 1679 3361 1678 3361 1685 3361 1680 3362 1689 3362 2047 3362 2047 3363 1689 3363 1686 3363 2046 3364 1686 3364 1690 3364 2262 3365 1690 3365 2273 3365 2267 3366 2273 3366 1691 3366 1682 3367 1691 3367 2281 3367 2278 3368 2281 3368 2284 3368 1683 3369 2284 3369 2286 3369 2285 3370 2286 3370 1687 3370 1685 3371 1687 3371 1688 3371 5512 3372 1688 3372 5513 3372 5512 3373 1685 3373 1688 3373 1686 3374 1689 3374 2016 3374 1690 3375 2016 3375 2015 3375 2273 3376 2015 3376 2274 3376 1691 3377 2274 3377 2280 3377 2281 3378 2280 3378 2012 3378 2284 3379 2012 3379 2010 3379 2286 3380 2010 3380 2009 3380 1687 3381 2009 3381 2292 3381 1688 3382 2292 3382 2008 3382 5513 3383 2008 3383 2006 3383 5513 3384 1688 3384 2008 3384 1692 3385 2014 3385 264 3385 1692 3386 1702 3386 2014 3386 1692 3387 266 3387 1702 3387 1702 3388 266 3388 1693 3388 1703 3389 1693 3389 1694 3389 2283 3390 1694 3390 2282 3390 1695 3391 2282 3391 1696 3391 2291 3392 1696 3392 2290 3392 2296 3393 2290 3393 1697 3393 2295 3394 1697 3394 2302 3394 2307 3395 2302 3395 2306 3395 2305 3396 2306 3396 1698 3396 1699 3397 1698 3397 1706 3397 1699 3398 2305 3398 1698 3398 1699 3399 5516 3399 2305 3399 2305 3400 5516 3400 2007 3400 2307 3401 2007 3401 2297 3401 2295 3402 2297 3402 1700 3402 2296 3403 1700 3403 1701 3403 2291 3404 1701 3404 2011 3404 1695 3405 2011 3405 2279 3405 2283 3406 2279 3406 2276 3406 1703 3407 2276 3407 2013 3407 1702 3408 2013 3408 2014 3408 1702 3409 1703 3409 2013 3409 1702 3410 1693 3410 1703 3410 266 3411 268 3411 1693 3411 1693 3412 268 3412 1704 3412 1694 3413 1704 3413 1707 3413 2282 3414 1707 3414 1708 3414 1696 3415 1708 3415 2289 3415 2290 3416 2289 3416 1710 3416 1697 3417 1710 3417 2301 3417 2302 3418 2301 3418 1705 3418 2306 3419 1705 3419 2316 3419 1698 3420 2316 3420 2315 3420 1706 3421 2315 3421 5508 3421 1706 3422 1698 3422 2315 3422 268 3423 267 3423 1704 3423 1704 3424 267 3424 1715 3424 1707 3425 1715 3425 1709 3425 1708 3426 1709 3426 2288 3426 2289 3427 2288 3427 2300 3427 1710 3428 2300 3428 1711 3428 2301 3429 1711 3429 1712 3429 1705 3430 1712 3430 2314 3430 2316 3431 2314 3431 1713 3431 2315 3432 1713 3432 1714 3432 5508 3433 1714 3433 5519 3433 5508 3434 2315 3434 1714 3434 267 3435 270 3435 1715 3435 1715 3436 270 3436 1717 3436 1709 3437 1717 3437 2287 3437 2288 3438 2287 3438 2294 3438 2300 3439 2294 3439 2299 3439 1711 3440 2299 3440 2311 3440 1712 3441 2311 3441 2310 3441 2314 3442 2310 3442 2313 3442 1713 3443 2313 3443 1716 3443 1714 3444 1716 3444 1721 3444 5519 3445 1721 3445 5529 3445 5519 3446 1714 3446 1721 3446 270 3447 1718 3447 1717 3447 1717 3448 1718 3448 1722 3448 2287 3449 1722 3449 2293 3449 2294 3450 2293 3450 1719 3450 2299 3451 1719 3451 2304 3451 2311 3452 2304 3452 1726 3452 2310 3453 1726 3453 2312 3453 2313 3454 2312 3454 2323 3454 1716 3455 2323 3455 1720 3455 1721 3456 1720 3456 1729 3456 5529 3457 1729 3457 5531 3457 5529 3458 1721 3458 1729 3458 1718 3459 271 3459 1722 3459 1722 3460 271 3460 1723 3460 2293 3461 1723 3461 1724 3461 1719 3462 1724 3462 1725 3462 2304 3463 1725 3463 1731 3463 1726 3464 1731 3464 1727 3464 2312 3465 1727 3465 1728 3465 2323 3466 1728 3466 2328 3466 1720 3467 2328 3467 2327 3467 1729 3468 2327 3468 1734 3468 5531 3469 1734 3469 1735 3469 5531 3470 1729 3470 1734 3470 271 3471 1730 3471 1723 3471 1723 3472 1730 3472 2298 3472 1724 3473 2298 3473 2303 3473 1725 3474 2303 3474 1732 3474 1731 3475 1732 3475 2320 3475 1727 3476 2320 3476 2321 3476 1728 3477 2321 3477 1733 3477 2328 3478 1733 3478 1739 3478 2327 3479 1739 3479 2337 3479 1734 3480 2337 3480 1736 3480 1735 3481 1736 3481 1742 3481 1735 3482 1734 3482 1736 3482 1730 3483 272 3483 2298 3483 2298 3484 272 3484 2309 3484 2303 3485 2309 3485 2308 3485 1732 3486 2308 3486 2319 3486 2320 3487 2319 3487 1737 3487 2321 3488 1737 3488 1738 3488 1733 3489 1738 3489 1747 3489 1739 3490 1747 3490 2336 3490 2337 3491 2336 3491 2344 3491 1736 3492 2344 3492 1740 3492 1742 3493 1740 3493 1741 3493 1742 3494 1736 3494 1740 3494 272 3495 1743 3495 2309 3495 2309 3496 1743 3496 1744 3496 2308 3497 1744 3497 1745 3497 2319 3498 1745 3498 1750 3498 1737 3499 1750 3499 1746 3499 1738 3500 1746 3500 2335 3500 1747 3501 2335 3501 2334 3501 2336 3502 2334 3502 2340 3502 2344 3503 2340 3503 1748 3503 1740 3504 1748 3504 1749 3504 1741 3505 1749 3505 5535 3505 1741 3506 1740 3506 1749 3506 1743 3507 1754 3507 1744 3507 1744 3508 1754 3508 1755 3508 1745 3509 1755 3509 2318 3509 1750 3510 2318 3510 2325 3510 1746 3511 2325 3511 2326 3511 2335 3512 2326 3512 2333 3512 2334 3513 2333 3513 1751 3513 2340 3514 1751 3514 1752 3514 1748 3515 1752 3515 2347 3515 1749 3516 2347 3516 1753 3516 5535 3517 1753 3517 5524 3517 5535 3518 1749 3518 1753 3518 1754 3519 1756 3519 1755 3519 1755 3520 1756 3520 2317 3520 2318 3521 2317 3521 1759 3521 2325 3522 1759 3522 2332 3522 2326 3523 2332 3523 2331 3523 2333 3524 2331 3524 2343 3524 1751 3525 2343 3525 1757 3525 1752 3526 1757 3526 2351 3526 2347 3527 2351 3527 2352 3527 1753 3528 2352 3528 1758 3528 5524 3529 1758 3529 5525 3529 5524 3530 1753 3530 1758 3530 1756 3531 276 3531 2317 3531 2317 3532 276 3532 2322 3532 1759 3533 2322 3533 2330 3533 2332 3534 2330 3534 2329 3534 2331 3535 2329 3535 2342 3535 2343 3536 2342 3536 1760 3536 1757 3537 1760 3537 1761 3537 2351 3538 1761 3538 2350 3538 2352 3539 2350 3539 2356 3539 1758 3540 2356 3540 1762 3540 5525 3541 1762 3541 1766 3541 5525 3542 1758 3542 1762 3542 276 3543 1767 3543 2322 3543 2322 3544 1767 3544 2324 3544 2330 3545 2324 3545 1768 3545 2329 3546 1768 3546 2339 3546 2342 3547 2339 3547 1763 3547 1760 3548 1763 3548 1771 3548 1761 3549 1771 3549 1773 3549 2350 3550 1773 3550 1764 3550 2356 3551 1764 3551 1775 3551 1762 3552 1775 3552 2366 3552 1766 3553 2366 3553 1765 3553 1766 3554 1762 3554 2366 3554 1767 3555 277 3555 2324 3555 2324 3556 277 3556 1769 3556 1768 3557 1769 3557 2338 3557 2339 3558 2338 3558 1770 3558 1763 3559 1770 3559 1772 3559 1771 3560 1772 3560 2355 3560 1773 3561 2355 3561 1778 3561 1764 3562 1778 3562 1774 3562 1775 3563 1774 3563 2365 3563 2366 3564 2365 3564 1776 3564 1765 3565 1776 3565 1781 3565 1765 3566 2366 3566 1776 3566 277 3567 278 3567 1769 3567 1769 3568 278 3568 2341 3568 2338 3569 2341 3569 2346 3569 1770 3570 2346 3570 1784 3570 1772 3571 1784 3571 2349 3571 2355 3572 2349 3572 1777 3572 1778 3573 1777 3573 2364 3573 1774 3574 2364 3574 1779 3574 2365 3575 1779 3575 2372 3575 1776 3576 2372 3576 1780 3576 1781 3577 1780 3577 1788 3577 1781 3578 1776 3578 1780 3578 278 3579 1782 3579 2341 3579 2341 3580 1782 3580 1783 3580 2346 3581 1783 3581 2345 3581 1784 3582 2345 3582 1785 3582 2349 3583 1785 3583 2354 3583 1777 3584 2354 3584 2360 3584 2364 3585 2360 3585 2363 3585 1779 3586 2363 3586 2369 3586 2372 3587 2369 3587 2373 3587 1780 3588 2373 3588 1786 3588 1788 3589 1786 3589 1787 3589 1788 3590 1780 3590 1786 3590 1782 3591 1789 3591 1783 3591 1783 3592 1789 3592 1793 3592 2345 3593 1793 3593 2348 3593 1785 3594 2348 3594 2359 3594 2354 3595 2359 3595 2362 3595 2360 3596 2362 3596 1790 3596 2363 3597 1790 3597 1797 3597 2369 3598 1797 3598 1798 3598 2373 3599 1798 3599 2380 3599 1786 3600 2380 3600 1791 3600 1787 3601 1791 3601 1792 3601 1787 3602 1786 3602 1791 3602 1789 3603 1794 3603 1793 3603 1793 3604 1794 3604 1800 3604 2348 3605 1800 3605 1795 3605 2359 3606 1795 3606 2358 3606 2362 3607 2358 3607 2368 3607 1790 3608 2368 3608 1796 3608 1797 3609 1796 3609 1803 3609 1798 3610 1803 3610 1799 3610 2380 3611 1799 3611 2379 3611 1791 3612 2379 3612 2383 3612 1792 3613 2383 3613 5528 3613 1792 3614 1791 3614 2383 3614 1794 3615 286 3615 1800 3615 1800 3616 286 3616 2353 3616 1795 3617 2353 3617 2357 3617 2358 3618 2357 3618 1801 3618 2368 3619 1801 3619 1802 3619 1796 3620 1802 3620 1804 3620 1803 3621 1804 3621 2377 3621 1799 3622 2377 3622 1805 3622 2379 3623 1805 3623 1806 3623 2383 3624 1806 3624 2387 3624 5528 3625 2387 3625 5537 3625 5528 3626 2383 3626 2387 3626 286 3627 1807 3627 2353 3627 2353 3628 1807 3628 1813 3628 2357 3629 1813 3629 2361 3629 1801 3630 2361 3630 1808 3630 1802 3631 1808 3631 1809 3631 1804 3632 1809 3632 2376 3632 2377 3633 2376 3633 2378 3633 1805 3634 2378 3634 1810 3634 1806 3635 1810 3635 2386 3635 2387 3636 2386 3636 1811 3636 5537 3637 1811 3637 1812 3637 5537 3638 2387 3638 1811 3638 1807 3639 2005 3639 1813 3639 1813 3640 2005 3640 2367 3640 2361 3641 2367 3641 2371 3641 1808 3642 2371 3642 1815 3642 1809 3643 1815 3643 1817 3643 2376 3644 1817 3644 2374 3644 2378 3645 2374 3645 1818 3645 1810 3646 1818 3646 1819 3646 2386 3647 1819 3647 1821 3647 1811 3648 1821 3648 2392 3648 1812 3649 2392 3649 5539 3649 1812 3650 1811 3650 2392 3650 2367 3651 2005 3651 2004 3651 2371 3652 2004 3652 1814 3652 1815 3653 1814 3653 1816 3653 1817 3654 1816 3654 2375 3654 2374 3655 2375 3655 2382 3655 1818 3656 2382 3656 2389 3656 1819 3657 2389 3657 1820 3657 1821 3658 1820 3658 2001 3658 2392 3659 2001 3659 1822 3659 5539 3660 1822 3660 2000 3660 5539 3661 2392 3661 1822 3661 288 3662 2003 3662 1823 3662 288 3663 1824 3663 2003 3663 288 3664 1825 3664 1824 3664 1824 3665 1825 3665 1826 3665 1841 3666 1826 3666 1827 3666 1828 3667 1827 3667 2385 3667 2384 3668 2385 3668 1830 3668 1829 3669 1830 3669 1831 3669 2394 3670 1831 3670 1832 3670 1833 3671 1832 3671 1843 3671 2400 3672 1843 3672 2406 3672 2407 3673 2406 3673 1836 3673 1835 3674 1836 3674 1834 3674 1835 3675 2407 3675 1836 3675 1835 3676 5548 3676 2407 3676 2407 3677 5548 3677 1837 3677 2400 3678 1837 3678 2401 3678 1833 3679 2401 3679 1838 3679 2394 3680 1838 3680 2002 3680 1829 3681 2002 3681 1839 3681 2384 3682 1839 3682 1840 3682 1828 3683 1840 3683 2381 3683 1841 3684 2381 3684 2370 3684 1824 3685 2370 3685 2003 3685 1824 3686 1841 3686 2370 3686 1824 3687 1826 3687 1841 3687 1825 3688 285 3688 1826 3688 1826 3689 285 3689 1846 3689 1827 3690 1846 3690 1842 3690 2385 3691 1842 3691 2388 3691 1830 3692 2388 3692 1848 3692 1831 3693 1848 3693 1849 3693 1832 3694 1849 3694 2405 3694 1843 3695 2405 3695 1844 3695 2406 3696 1844 3696 2410 3696 1836 3697 2410 3697 1845 3697 1834 3698 1845 3698 5549 3698 1834 3699 1836 3699 1845 3699 285 3700 284 3700 1846 3700 1846 3701 284 3701 1847 3701 1842 3702 1847 3702 1851 3702 2388 3703 1851 3703 2391 3703 1848 3704 2391 3704 2399 3704 1849 3705 2399 3705 2404 3705 2405 3706 2404 3706 2403 3706 1844 3707 2403 3707 2409 3707 2410 3708 2409 3708 1850 3708 1845 3709 1850 3709 1855 3709 5549 3710 1855 3710 5551 3710 5549 3711 1845 3711 1855 3711 284 3712 289 3712 1847 3712 1847 3713 289 3713 1858 3713 1851 3714 1858 3714 2390 3714 2391 3715 2390 3715 1852 3715 2399 3716 1852 3716 2398 3716 2404 3717 2398 3717 2402 3717 2403 3718 2402 3718 2414 3718 2409 3719 2414 3719 1853 3719 1850 3720 1853 3720 1854 3720 1855 3721 1854 3721 1856 3721 5551 3722 1856 3722 1857 3722 5551 3723 1855 3723 1856 3723 289 3724 1862 3724 1858 3724 1858 3725 1862 3725 1859 3725 2390 3726 1859 3726 2393 3726 1852 3727 2393 3727 2397 3727 2398 3728 2397 3728 1864 3728 2402 3729 1864 3729 1860 3729 2414 3730 1860 3730 2413 3730 1853 3731 2413 3731 1866 3731 1854 3732 1866 3732 2421 3732 1856 3733 2421 3733 1861 3733 1857 3734 1861 3734 1868 3734 1857 3735 1856 3735 1861 3735 1862 3736 291 3736 1859 3736 1859 3737 291 3737 2395 3737 2393 3738 2395 3738 2396 3738 2397 3739 2396 3739 1863 3739 1864 3740 1863 3740 2408 3740 1860 3741 2408 3741 2412 3741 2413 3742 2412 3742 1865 3742 1866 3743 1865 3743 2423 3743 2421 3744 2423 3744 1867 3744 1861 3745 1867 3745 1869 3745 1868 3746 1869 3746 5542 3746 1868 3747 1861 3747 1869 3747 291 3748 1870 3748 2395 3748 2395 3749 1870 3749 1871 3749 2396 3750 1871 3750 1872 3750 1863 3751 1872 3751 1873 3751 2408 3752 1873 3752 2417 3752 2412 3753 2417 3753 1878 3753 1865 3754 1878 3754 2420 3754 2423 3755 2420 3755 2422 3755 1867 3756 2422 3756 1874 3756 1869 3757 1874 3757 1880 3757 5542 3758 1880 3758 1881 3758 5542 3759 1869 3759 1880 3759 1870 3760 292 3760 1871 3760 1871 3761 292 3761 1875 3761 1872 3762 1875 3762 1876 3762 1873 3763 1876 3763 1877 3763 2417 3764 1877 3764 1879 3764 1878 3765 1879 3765 1883 3765 2420 3766 1883 3766 1885 3766 2422 3767 1885 3767 1886 3767 1874 3768 1886 3768 1888 3768 1880 3769 1888 3769 1882 3769 1881 3770 1882 3770 1890 3770 1881 3771 1880 3771 1882 3771 292 3772 294 3772 1875 3772 1875 3773 294 3773 1892 3773 1876 3774 1892 3774 2411 3774 1877 3775 2411 3775 2416 3775 1879 3776 2416 3776 2419 3776 1883 3777 2419 3777 1884 3777 1885 3778 1884 3778 1887 3778 1886 3779 1887 3779 1889 3779 1888 3780 1889 3780 1898 3780 1882 3781 1898 3781 1891 3781 1890 3782 1891 3782 5553 3782 1890 3783 1882 3783 1891 3783 294 3784 1893 3784 1892 3784 1892 3785 1893 3785 2415 3785 2411 3786 2415 3786 1894 3786 2416 3787 1894 3787 1895 3787 2419 3788 1895 3788 1896 3788 1884 3789 1896 3789 2425 3789 1887 3790 2425 3790 1904 3790 1889 3791 1904 3791 1897 3791 1898 3792 1897 3792 1899 3792 1891 3793 1899 3793 1900 3793 5553 3794 1900 3794 1906 3794 5553 3795 1891 3795 1900 3795 1893 3796 300 3796 2415 3796 2415 3797 300 3797 1901 3797 1894 3798 1901 3798 2418 3798 1895 3799 2418 3799 1902 3799 1896 3800 1902 3800 1903 3800 2425 3801 1903 3801 2429 3801 1904 3802 2429 3802 1905 3802 1897 3803 1905 3803 2437 3803 1899 3804 2437 3804 2438 3804 1900 3805 2438 3805 1907 3805 1906 3806 1907 3806 5554 3806 1906 3807 1900 3807 1907 3807 300 3808 296 3808 1901 3808 1901 3809 296 3809 1908 3809 2418 3810 1908 3810 2424 3810 1902 3811 2424 3811 1909 3811 1903 3812 1909 3812 1910 3812 2429 3813 1910 3813 2433 3813 1905 3814 2433 3814 1914 3814 2437 3815 1914 3815 2436 3815 2438 3816 2436 3816 1915 3816 1907 3817 1915 3817 2447 3817 5554 3818 2447 3818 1911 3818 5554 3819 1907 3819 2447 3819 296 3820 298 3820 1908 3820 1908 3821 298 3821 1918 3821 2424 3822 1918 3822 1912 3822 1909 3823 1912 3823 1913 3823 1910 3824 1913 3824 1920 3824 2433 3825 1920 3825 1922 3825 1914 3826 1922 3826 2439 3826 2436 3827 2439 3827 1916 3827 1915 3828 1916 3828 1925 3828 2447 3829 1925 3829 1917 3829 1911 3830 1917 3830 5543 3830 1911 3831 2447 3831 1917 3831 298 3832 1919 3832 1918 3832 1918 3833 1919 3833 1928 3833 1912 3834 1928 3834 2428 3834 1913 3835 2428 3835 2427 3835 1920 3836 2427 3836 1921 3836 1922 3837 1921 3837 1923 3837 2439 3838 1923 3838 1924 3838 1916 3839 1924 3839 2446 3839 1925 3840 2446 3840 1926 3840 1917 3841 1926 3841 1927 3841 5543 3842 1927 3842 1931 3842 5543 3843 1917 3843 1927 3843 1919 3844 1932 3844 1928 3844 1928 3845 1932 3845 2426 3845 2428 3846 2426 3846 1929 3846 2427 3847 1929 3847 2432 3847 1921 3848 2432 3848 2435 3848 1923 3849 2435 3849 1934 3849 1924 3850 1934 3850 2445 3850 2446 3851 2445 3851 2450 3851 1926 3852 2450 3852 1930 3852 1927 3853 1930 3853 1937 3853 1931 3854 1937 3854 5545 3854 1931 3855 1927 3855 1937 3855 1932 3856 1933 3856 2426 3856 2426 3857 1933 3857 2431 3857 1929 3858 2431 3858 1939 3858 2432 3859 1939 3859 2434 3859 2435 3860 2434 3860 2443 3860 1934 3861 2443 3861 2444 3861 2445 3862 2444 3862 2449 3862 2450 3863 2449 3863 1935 3863 1930 3864 1935 3864 1943 3864 1937 3865 1943 3865 1936 3865 5545 3866 1936 3866 5546 3866 5545 3867 1937 3867 1936 3867 1933 3868 1938 3868 2431 3868 2431 3869 1938 3869 2430 3869 1939 3870 2430 3870 1940 3870 2434 3871 1940 3871 2442 3871 2443 3872 2442 3872 2441 3872 2444 3873 2441 3873 1941 3873 2449 3874 1941 3874 1942 3874 1935 3875 1942 3875 1946 3875 1943 3876 1946 3876 2462 3876 1936 3877 2462 3877 1948 3877 5546 3878 1948 3878 1950 3878 5546 3879 1936 3879 1948 3879 1938 3880 1952 3880 2430 3880 2430 3881 1952 3881 1944 3881 1940 3882 1944 3882 2440 3882 2442 3883 2440 3883 1945 3883 2441 3884 1945 3884 1956 3884 1941 3885 1956 3885 2455 3885 1942 3886 2455 3886 1957 3886 1946 3887 1957 3887 1947 3887 2462 3888 1947 3888 2468 3888 1948 3889 2468 3889 1949 3889 1950 3890 1949 3890 1951 3890 1950 3891 1948 3891 1949 3891 1952 3892 1953 3892 1944 3892 1944 3893 1953 3893 1954 3893 2440 3894 1954 3894 1955 3894 1945 3895 1955 3895 2451 3895 1956 3896 2451 3896 2452 3896 2455 3897 2452 3897 1958 3897 1957 3898 1958 3898 1959 3898 1947 3899 1959 3899 2463 3899 2468 3900 2463 3900 2474 3900 1949 3901 2474 3901 1960 3901 1951 3902 1960 3902 5555 3902 1951 3903 1949 3903 1960 3903 1953 3904 309 3904 1954 3904 1954 3905 309 3905 2448 3905 1955 3906 2448 3906 1966 3906 2451 3907 1966 3907 1961 3907 2452 3908 1961 3908 2454 3908 1958 3909 2454 3909 2460 3909 1959 3910 2460 3910 1968 3910 2463 3911 1968 3911 1962 3911 2474 3912 1962 3912 1971 3912 1960 3913 1971 3913 1963 3913 5555 3914 1963 3914 1964 3914 5555 3915 1960 3915 1963 3915 309 3916 1965 3916 2448 3916 2448 3917 1965 3917 1973 3917 1966 3918 1973 3918 1967 3918 1961 3919 1967 3919 2453 3919 2454 3920 2453 3920 2458 3920 2460 3921 2458 3921 1969 3921 1968 3922 1969 3922 1974 3922 1962 3923 1974 3923 1970 3923 1971 3924 1970 3924 1972 3924 1963 3925 1972 3925 1246 3925 1245 3926 1963 3926 1246 3926 1245 3927 1964 3927 1963 3927 1965 3928 304 3928 1973 3928 1973 3929 304 3929 1976 3929 1967 3930 1976 3930 2457 3930 2453 3931 2457 3931 2456 3931 2458 3932 2456 3932 1977 3932 1969 3933 1977 3933 2467 3933 1974 3934 2467 3934 1978 3934 1970 3935 1978 3935 1980 3935 1972 3936 1980 3936 1975 3936 1246 3937 1972 3937 1975 3937 1976 3938 304 3938 2048 3938 2457 3939 2048 3939 2459 3939 2456 3940 2459 3940 2461 3940 1977 3941 2461 3941 1998 3941 2467 3942 1998 3942 1979 3942 1978 3943 1979 3943 2475 3943 1980 3944 2475 3944 1987 3944 1981 3945 1980 3945 1987 3945 1981 3946 1975 3946 1980 3946 307 3947 1982 3947 306 3947 307 3948 2466 3948 1982 3948 307 3949 310 3949 2466 3949 2466 3950 310 3950 1989 3950 2464 3951 1989 3951 2469 3951 1999 3952 2469 3952 1983 3952 2471 3953 1983 3953 1984 3953 1985 3954 2471 3954 1984 3954 1985 3955 2473 3955 2471 3955 1985 3956 1283 3956 2473 3956 2473 3957 1283 3957 1986 3957 2475 3958 1986 3958 1988 3958 1987 3959 2475 3959 1988 3959 310 3960 1990 3960 1989 3960 1989 3961 1990 3961 1991 3961 2469 3962 1991 3962 1995 3962 1284 3963 1995 3963 1996 3963 1284 3964 2469 3964 1995 3964 1284 3965 1983 3965 2469 3965 1284 3966 1992 3966 1983 3966 1983 3967 1992 3967 1984 3967 1990 3968 1993 3968 1991 3968 1991 3969 1993 3969 1994 3969 1995 3970 1994 3970 1997 3970 1996 3971 1995 3971 1997 3971 1993 3972 313 3972 1994 3972 1994 3973 313 3973 1248 3973 1997 3974 1994 3974 1248 3974 2473 3975 1986 3975 2475 3975 1979 3976 2473 3976 2475 3976 1979 3977 2472 3977 2473 3977 1979 3978 1998 3978 2472 3978 2472 3979 1998 3979 2470 3979 1999 3980 2470 3980 2464 3980 2469 3981 1999 3981 2464 3981 5548 3982 2000 3982 1837 3982 1837 3983 2000 3983 1822 3983 2401 3984 1822 3984 2001 3984 1838 3985 2001 3985 1820 3985 2002 3986 1820 3986 2389 3986 1839 3987 2389 3987 2382 3987 1840 3988 2382 3988 2375 3988 2381 3989 2375 3989 1816 3989 2370 3990 1816 3990 1814 3990 2003 3991 1814 3991 2004 3991 1823 3992 2004 3992 2005 3992 1823 3993 2003 3993 2004 3993 5516 3994 2006 3994 2007 3994 2007 3995 2006 3995 2008 3995 2297 3996 2008 3996 2292 3996 1700 3997 2292 3997 2009 3997 1701 3998 2009 3998 2010 3998 2011 3999 2010 3999 2012 3999 2279 4000 2012 4000 2280 4000 2276 4001 2280 4001 2274 4001 2013 4002 2274 4002 2015 4002 2014 4003 2015 4003 2016 4003 264 4004 2016 4004 1689 4004 264 4005 2014 4005 2016 4005 5476 4006 5487 4006 2208 4006 2208 4007 5487 4007 1550 4007 2203 4008 1550 4008 1549 4008 1562 4009 1549 4009 1548 4009 2190 4010 1548 4010 1547 4010 2187 4011 1547 4011 2017 4011 1563 4012 2017 4012 1546 4012 1564 4013 1546 4013 1545 4013 1565 4014 1545 4014 1544 4014 2018 4015 1544 4015 2020 4015 2019 4016 2020 4016 236 4016 2019 4017 2018 4017 2020 4017 5463 4018 1410 4018 1422 4018 1422 4019 1410 4019 1409 4019 1423 4020 1409 4020 1408 4020 1425 4021 1408 4021 1407 4021 2095 4022 1407 4022 1406 4022 2021 4023 1406 4023 2022 4023 1426 4024 2022 4024 2085 4024 2086 4025 2085 4025 2024 4025 2023 4026 2024 4026 2070 4026 1411 4027 2070 4027 1405 4027 218 4028 1405 4028 2025 4028 218 4029 1411 4029 1405 4029 1359 4030 5457 4030 1361 4030 1361 4031 5457 4031 2078 4031 1363 4032 2078 4032 2065 4032 1364 4033 2065 4033 2062 4033 1366 4034 2062 4034 2057 4034 2055 4035 2057 4035 2030 4035 2026 4036 2030 4036 2029 4036 2027 4037 2029 4037 1371 4037 2027 4038 2026 4038 2029 4038 5457 4039 5442 4039 2078 4039 2078 4040 5442 4040 2031 4040 2065 4041 2031 4041 2032 4041 2062 4042 2032 4042 2033 4042 2057 4043 2033 4043 2028 4043 2030 4044 2028 4044 2035 4044 2029 4045 2035 4045 1354 4045 2029 4046 2030 4046 2035 4046 5442 4047 5440 4047 2031 4047 2031 4048 5440 4048 2058 4048 2032 4049 2058 4049 2036 4049 2033 4050 2036 4050 2053 4050 2028 4051 2053 4051 2034 4051 2035 4052 2034 4052 1373 4052 2035 4053 2028 4053 2034 4053 5440 4054 5456 4054 2058 4054 2058 4055 5456 4055 2039 4055 2036 4056 2039 4056 2040 4056 2053 4057 2040 4057 2037 4057 2034 4058 2037 4058 2038 4058 2034 4059 2053 4059 2037 4059 5456 4060 5439 4060 2039 4060 2039 4061 5439 4061 2042 4061 2040 4062 2042 4062 2041 4062 2037 4063 2041 4063 1353 4063 2037 4064 2040 4064 2041 4064 5439 4065 5454 4065 2042 4065 2042 4066 5454 4066 2043 4066 2041 4067 2043 4067 2044 4067 2041 4068 2042 4068 2043 4068 5454 4069 2045 4069 2043 4069 2043 4070 2045 4070 1369 4070 1396 4071 1395 4071 2066 4071 2170 4072 2160 4072 1539 4072 2046 4073 2047 4073 1686 4073 304 4074 306 4074 2048 4074 2048 4075 306 4075 1982 4075 2459 4076 1982 4076 2465 4076 2461 4077 2465 4077 2470 4077 1998 4078 2461 4078 2470 4078 1379 4079 2049 4079 1356 4079 1386 4080 1376 4080 1379 4080 2050 4081 2026 4081 2051 4081 2052 4082 2050 4082 2051 4082 1376 4083 2052 4083 2049 4083 2055 4084 2030 4084 2026 4084 2057 4085 2028 4085 2030 4085 2033 4086 2053 4086 2028 4086 2036 4087 2040 4087 2053 4087 2039 4088 2042 4088 2040 4088 2059 4089 2054 4089 1386 4089 2054 4090 2056 4090 1376 4090 1365 4091 2055 4091 2050 4091 1357 4092 1365 4092 2050 4092 2056 4093 1357 4093 2052 4093 1366 4094 2057 4094 2055 4094 2062 4095 2033 4095 2057 4095 2032 4096 2036 4096 2033 4096 2058 4097 2039 4097 2036 4097 1395 4098 1391 4098 2059 4098 1391 4099 1380 4099 2054 4099 1380 4100 2061 4100 2056 4100 2060 4101 1366 4101 1365 4101 2063 4102 2060 4102 1365 4102 2061 4103 2063 4103 1357 4103 1364 4104 2062 4104 1366 4104 2065 4105 2032 4105 2062 4105 2031 4106 2058 4106 2032 4106 1392 4107 1391 4107 1396 4107 1381 4108 1380 4108 1392 4108 2064 4109 2061 4109 1381 4109 1377 4110 2063 4110 2064 4110 2076 4111 1364 4111 2060 4111 1377 4112 2076 4112 2060 4112 1363 4113 2065 4113 1364 4113 2078 4114 2031 4114 2065 4114 1405 4115 2069 4115 2066 4115 2069 4116 2067 4116 1396 4116 2068 4117 2069 4117 2070 4117 2067 4118 2072 4118 1392 4118 1397 4119 2067 4119 2068 4119 2072 4120 2071 4120 1381 4120 2073 4121 2072 4121 1397 4121 2071 4122 2075 4122 2064 4122 1383 4123 2071 4123 2073 4123 2075 4124 2074 4124 1377 4124 1382 4125 2075 4125 1383 4125 1363 4126 2076 4126 1362 4126 1362 4127 2076 4127 2074 4127 2077 4128 2074 4128 1382 4128 2078 4129 1363 4129 1361 4129 2077 4130 1360 4130 1362 4130 1362 4131 1360 4131 1361 4131 2023 4132 2070 4132 1411 4132 2079 4133 2068 4133 2024 4133 1393 4134 1397 4134 2079 4134 2080 4135 2073 4135 1393 4135 2081 4136 1383 4136 2080 4136 1384 4137 1382 4137 2081 4137 1358 4138 2077 4138 1384 4138 2086 4139 2024 4139 2023 4139 2087 4140 2079 4140 2085 4140 2082 4141 1393 4141 2087 4141 1387 4142 2080 4142 2082 4142 2089 4143 2081 4143 1387 4143 1378 4144 1384 4144 2089 4144 2090 4145 2086 4145 2083 4145 2084 4146 2090 4146 2083 4146 1433 4147 2084 4147 1414 4147 1426 4148 2085 4148 2086 4148 1398 4149 2087 4149 2022 4149 2088 4150 2082 4150 1398 4150 1388 4151 1387 4151 2088 4151 1385 4152 2089 4152 1388 4152 1434 4153 1428 4153 1433 4153 1415 4154 1426 4154 2090 4154 2091 4155 1415 4155 2090 4155 1428 4156 2091 4156 2084 4156 2021 4157 2022 4157 1426 4157 1399 4158 1398 4158 1406 4158 2093 4159 2088 4159 1399 4159 1389 4160 1388 4160 2093 4160 1447 4161 1435 4161 1434 4161 1435 4162 2092 4162 1428 4162 2096 4163 2021 4163 1415 4163 1416 4164 2096 4164 1415 4164 2092 4165 1416 4165 2091 4165 2095 4166 1406 4166 2021 4166 1400 4167 1399 4167 1407 4167 1403 4168 2093 4168 1400 4168 1448 4169 1442 4169 1447 4169 1442 4170 1443 4170 1435 4170 1443 4171 2094 4171 2092 4171 1417 4172 2095 4172 2096 4172 2098 4173 1417 4173 2096 4173 2094 4174 2098 4174 1416 4174 1425 4175 1407 4175 2095 4175 1404 4176 1400 4176 1408 4176 2097 4177 2100 4177 1448 4177 2100 4178 1449 4178 1442 4178 1449 4179 1444 4179 1443 4179 1444 4180 1429 4180 2094 4180 1424 4181 1425 4181 1417 4181 2102 4182 1424 4182 1417 4182 1429 4183 2102 4183 2098 4183 1423 4184 1408 4184 1425 4184 1460 4185 2099 4185 2097 4185 2099 4186 1453 4186 2100 4186 1453 4187 2105 4187 1449 4187 2105 4188 1436 4188 1444 4188 1436 4189 1430 4189 1429 4189 2101 4190 1423 4190 1424 4190 1418 4191 2101 4191 1424 4191 1430 4192 1418 4192 2102 4192 1422 4193 1409 4193 1423 4193 1467 4194 2108 4194 1460 4194 2108 4195 2103 4195 2099 4195 2103 4196 2104 4196 1453 4196 2104 4197 2106 4197 2105 4197 2106 4198 1445 4198 1436 4198 1445 4199 2110 4199 1430 4199 1419 4200 1422 4200 2101 4200 1420 4201 1419 4201 2101 4201 2110 4202 1420 4202 1418 4202 2107 4203 1461 4203 1467 4203 1461 4204 2113 4204 2108 4204 2113 4205 2114 4205 2103 4205 2114 4206 2109 4206 2104 4206 2109 4207 1446 4207 2106 4207 1446 4208 1437 4208 1445 4208 1437 4209 1438 4209 2110 4209 1438 4210 1421 4210 1420 4210 1480 4211 2111 4211 2107 4211 2111 4212 1462 4212 1461 4212 1462 4213 2112 4213 2113 4213 2112 4214 2120 4214 2114 4214 2120 4215 2115 4215 2109 4215 2115 4216 2116 4216 1446 4216 2116 4217 2117 4217 1437 4217 2117 4218 1440 4218 1438 4218 2118 4219 1472 4219 1480 4219 1472 4220 1469 4220 2111 4220 1469 4221 1463 4221 1462 4221 1463 4222 1456 4222 2112 4222 1456 4223 2119 4223 2120 4223 2119 4224 2121 4224 2115 4224 2121 4225 2122 4225 2116 4225 2122 4226 1441 4226 2117 4226 1484 4227 2123 4227 2118 4227 2123 4228 1473 4228 1472 4228 1473 4229 1470 4229 1469 4229 1470 4230 2124 4230 1463 4230 2124 4231 1457 4231 1456 4231 1457 4232 1454 4232 2119 4232 1454 4233 2125 4233 2121 4233 2125 4234 1450 4234 2122 4234 1490 4235 1485 4235 1484 4235 1485 4236 2130 4236 2123 4236 2130 4237 1481 4237 1473 4237 1481 4238 2126 4238 1470 4238 2126 4239 1471 4239 2124 4239 1471 4240 2127 4240 1457 4240 2127 4241 1455 4241 1454 4241 1455 4242 2128 4242 2125 4242 1500 4243 1495 4243 1490 4243 1495 4244 1486 4244 1485 4244 1486 4245 2129 4245 2130 4245 2129 4246 2135 4246 1481 4246 2135 4247 2131 4247 2126 4247 2131 4248 2136 4248 1471 4248 2136 4249 2132 4249 2127 4249 2132 4250 1458 4250 1455 4250 2133 4251 1496 4251 1500 4251 1496 4252 1491 4252 1495 4252 1491 4253 2138 4253 1486 4253 2138 4254 1482 4254 2129 4254 1482 4255 2134 4255 2135 4255 2134 4256 1474 4256 2131 4256 1474 4257 1464 4257 2136 4257 1464 4258 1465 4258 2132 4258 2137 4259 1501 4259 2133 4259 1501 4260 1497 4260 1496 4260 1497 4261 2142 4261 1491 4261 2142 4262 1492 4262 2138 4262 1492 4263 2139 4263 1482 4263 2139 4264 1476 4264 2134 4264 1476 4265 1475 4265 1474 4265 1475 4266 2140 4266 1464 4266 1510 4267 1505 4267 2137 4267 1505 4268 2141 4268 1501 4268 2141 4269 1498 4269 1497 4269 1498 4270 2147 4270 2142 4270 2147 4271 2149 4271 1492 4271 2149 4272 2143 4272 2139 4272 2143 4273 1477 4273 1476 4273 1477 4274 2144 4274 1475 4274 2150 4275 2145 4275 1510 4275 2145 4276 2146 4276 1505 4276 2146 4277 1506 4277 2141 4277 1506 4278 2155 4278 1498 4278 2155 4279 2148 4279 2147 4279 2148 4280 1487 4280 2149 4280 1487 4281 2156 4281 2143 4281 2156 4282 1479 4282 1477 4282 1528 4283 2151 4283 2150 4283 2151 4284 2152 4284 2145 4284 2152 4285 2153 4285 2146 4285 2153 4286 2158 4286 1506 4286 2158 4287 2154 4287 2155 4287 2154 4288 1493 4288 2148 4288 1493 4289 2159 4289 1487 4289 2159 4290 1483 4290 2156 4290 1535 4291 1523 4291 1528 4291 1523 4292 1519 4292 2151 4292 1519 4293 1511 4293 2152 4293 1511 4294 2157 4294 2153 4294 2157 4295 1507 4295 2158 4295 1507 4296 2163 4296 2154 4296 2163 4297 2165 4297 1493 4297 2165 4298 1488 4298 2159 4298 2160 4299 2166 4299 1535 4299 2166 4300 2161 4300 1523 4300 2161 4301 2162 4301 1519 4301 2162 4302 1513 4302 1511 4302 1513 4303 1512 4303 2157 4303 1512 4304 1502 4304 1507 4304 1502 4305 1499 4305 2163 4305 1499 4306 2164 4306 2165 4306 1536 4307 2166 4307 2170 4307 1524 4308 2161 4308 1536 4308 1520 4309 2162 4309 1524 4309 2167 4310 1513 4310 1520 4310 1514 4311 1512 4311 2167 4311 2168 4312 1502 4312 1514 4312 1504 4313 1499 4313 2168 4313 2020 4314 2172 4314 1539 4314 2172 4315 2169 4315 2170 4315 2171 4316 2172 4316 1544 4316 2169 4317 1529 4317 1536 4317 2178 4318 2169 4318 2171 4318 1529 4319 1530 4319 1524 4319 1531 4320 1529 4320 2178 4320 1530 4321 1521 4321 1520 4321 2173 4322 1530 4322 1531 4322 1521 4323 2175 4323 2167 4323 2174 4324 1521 4324 2173 4324 2175 4325 1508 4325 1514 4325 2176 4326 2175 4326 2174 4326 1508 4327 2177 4327 2168 4327 1509 4328 1508 4328 2176 4328 1565 4329 1544 4329 2018 4329 1540 4330 2171 4330 1545 4330 2180 4331 2178 4331 1540 4331 1532 4332 1531 4332 2180 4332 1533 4333 2173 4333 1532 4333 1522 4334 2174 4334 1533 4334 1517 4335 2176 4335 1522 4335 1564 4336 1545 4336 1565 4336 2179 4337 1540 4337 1546 4337 2181 4338 2180 4338 2179 4338 2184 4339 1532 4339 2181 4339 1525 4340 1533 4340 2184 4340 2182 4341 1522 4341 1525 4341 2183 4342 1564 4342 1566 4342 2186 4343 2183 4343 1566 4343 1574 4344 2186 4344 1553 4344 1563 4345 1546 4345 1564 4345 2188 4346 2179 4346 2017 4346 1537 4347 2181 4347 2188 4347 2185 4348 2184 4348 1537 4348 1527 4349 1525 4349 2185 4349 1579 4350 1567 4350 1574 4350 1555 4351 1563 4351 2183 4351 1554 4352 1555 4352 2183 4352 1567 4353 1554 4353 2186 4353 2187 4354 2017 4354 1563 4354 1541 4355 2188 4355 1547 4355 2189 4356 1537 4356 1541 4356 1538 4357 2185 4357 2189 4357 1587 4358 1575 4358 1579 4358 1575 4359 1568 4359 1567 4359 1556 4360 2187 4360 1555 4360 1557 4361 1556 4361 1555 4361 1568 4362 1557 4362 1554 4362 2190 4363 1547 4363 2187 4363 1542 4364 1541 4364 1548 4364 2191 4365 2189 4365 1542 4365 1591 4366 1581 4366 1587 4366 1581 4367 1580 4367 1575 4367 1580 4368 1569 4368 1568 4368 2192 4369 2190 4369 1556 4369 2194 4370 2192 4370 1556 4370 1569 4371 2194 4371 1557 4371 1562 4372 1548 4372 2190 4372 1543 4373 1542 4373 1549 4373 1594 4374 2197 4374 1591 4374 2197 4375 1582 4375 1581 4375 1582 4376 2200 4376 1580 4376 2200 4377 1570 4377 1569 4377 2202 4378 1562 4378 2192 4378 2193 4379 2202 4379 2192 4379 1570 4380 2193 4380 2194 4380 2203 4381 1549 4381 1562 4381 2195 4382 2196 4382 1594 4382 2196 4383 2205 4383 2197 4383 2205 4384 2198 4384 1582 4384 2198 4385 2199 4385 2200 4385 2199 4386 2201 4386 1570 4386 2209 4387 2203 4387 2202 4387 2210 4388 2209 4388 2202 4388 2201 4389 2210 4389 2193 4389 2208 4390 1550 4390 2203 4390 1609 4391 1595 4391 2195 4391 1595 4392 2204 4392 2196 4392 2204 4393 2206 4393 2205 4393 2206 4394 2207 4394 2198 4394 2207 4395 2212 4395 2199 4395 2212 4396 1571 4396 2201 4396 1561 4397 2208 4397 2209 4397 1558 4398 1561 4398 2209 4398 1571 4399 1558 4399 2210 4399 2213 4400 1602 4400 1609 4400 1602 4401 1596 4401 1595 4401 1596 4402 2211 4402 2204 4402 2211 4403 2214 4403 2206 4403 2214 4404 1583 4404 2207 4404 1583 4405 2215 4405 2212 4405 2215 4406 1576 4406 1571 4406 1576 4407 1560 4407 1558 4407 1619 4408 1610 4408 2213 4408 1610 4409 1603 4409 1602 4409 1603 4410 1597 4410 1596 4410 1597 4411 2218 4411 2211 4411 2218 4412 1592 4412 2214 4412 1592 4413 1584 4413 1583 4413 1584 4414 2220 4414 2215 4414 2220 4415 1572 4415 1576 4415 1624 4416 1614 4416 1619 4416 1614 4417 2221 4417 1610 4417 2221 4418 2216 4418 1603 4418 2216 4419 1598 4419 1597 4419 1598 4420 2217 4420 2218 4420 2217 4421 2223 4421 1592 4421 2223 4422 2224 4422 1584 4422 2224 4423 2219 4423 2220 4423 1628 4424 1625 4424 1624 4424 1625 4425 2226 4425 1614 4425 2226 4426 1611 4426 2221 4426 1611 4427 1604 4427 2216 4427 1604 4428 2227 4428 1598 4428 2227 4429 1593 4429 2217 4429 1593 4430 2222 4430 2223 4430 2222 4431 1586 4431 2224 4431 2229 4432 1629 4432 1628 4432 1629 4433 2225 4433 1625 4433 2225 4434 1620 4434 2226 4434 1620 4435 1615 4435 1611 4435 1615 4436 1605 4436 1604 4436 1605 4437 1606 4437 2227 4437 1606 4438 2233 4438 1593 4438 2233 4439 1589 4439 2222 4439 2234 4440 2228 4440 2229 4440 2228 4441 2230 4441 1629 4441 2230 4442 2231 4442 2225 4442 2231 4443 2232 4443 1620 4443 2232 4444 2235 4444 1615 4444 2235 4445 2236 4445 1605 4445 2236 4446 2238 4446 1606 4446 2238 4447 1600 4447 2233 4447 1639 4448 1634 4448 2234 4448 1634 4449 2240 4449 2228 4449 2240 4450 2241 4450 2230 4450 2241 4451 1621 4451 2231 4451 1621 4452 1616 4452 2232 4452 1616 4453 1617 4453 2235 4453 1617 4454 1612 4454 2236 4454 1612 4455 2237 4455 2238 4455 2239 4456 1640 4456 1639 4456 1640 4457 1641 4457 1634 4457 1641 4458 1635 4458 2240 4458 1635 4459 1630 4459 2241 4459 1630 4460 1626 4460 1621 4460 1626 4461 2242 4461 1616 4461 2242 4462 2243 4462 1617 4462 2243 4463 1613 4463 1612 4463 2244 4464 1653 4464 2239 4464 1653 4465 1642 4465 1640 4465 1642 4466 1636 4466 1641 4466 1636 4467 1631 4467 1635 4467 1631 4468 2246 4468 1630 4468 2246 4469 2248 4469 1626 4469 2248 4470 1622 4470 2242 4470 1622 4471 1618 4471 2243 4471 1660 4472 1655 4472 2244 4472 1655 4473 1654 4473 1653 4473 1654 4474 1648 4474 1642 4474 1648 4475 2251 4475 1636 4475 2251 4476 2245 4476 1631 4476 2245 4477 2247 4477 2246 4477 2247 4478 1627 4478 2248 4478 1627 4479 1623 4479 1622 4479 1666 4480 2249 4480 1660 4480 2249 4481 2250 4481 1655 4481 2250 4482 2256 4482 1654 4482 2256 4483 1644 4483 1648 4483 1644 4484 1643 4484 2251 4484 1643 4485 2252 4485 2245 4485 2252 4486 2253 4486 2247 4486 2253 4487 2254 4487 1627 4487 2257 4488 1667 4488 1666 4488 1667 4489 1661 4489 2249 4489 1661 4490 2258 4490 2250 4490 2258 4491 2255 4491 2256 4491 2255 4492 1645 4492 1644 4492 1645 4493 1646 4493 1643 4493 1646 4494 1632 4494 2252 4494 1632 4495 1633 4495 2253 4495 2047 4496 1681 4496 2257 4496 1681 4497 1669 4497 1667 4497 1669 4498 1668 4498 1661 4498 1668 4499 1656 4499 2258 4499 1656 4500 2259 4500 2255 4500 2259 4501 1649 4501 1645 4501 1649 4502 1647 4502 1646 4502 1647 4503 2260 4503 1632 4503 2263 4504 1681 4504 2046 4504 2266 4505 1669 4505 2263 4505 2268 4506 1668 4506 2266 4506 2269 4507 1656 4507 2268 4507 1650 4508 2259 4508 2269 4508 2272 4509 1649 4509 1650 4509 2261 4510 1647 4510 2272 4510 2016 4511 1690 4511 1686 4511 1690 4512 2262 4512 2046 4512 2273 4513 1690 4513 2015 4513 2262 4514 2264 4514 2263 4514 2267 4515 2262 4515 2273 4515 2264 4516 2265 4516 2266 4516 1675 4517 2264 4517 2267 4517 2265 4518 2270 4518 2268 4518 1670 4519 2265 4519 1675 4519 2270 4520 1657 4520 2269 4520 1662 4521 2270 4521 1670 4521 1657 4522 2271 4522 1650 4522 1658 4523 1657 4523 1662 4523 2271 4524 1651 4524 2272 4524 1652 4525 2271 4525 1658 4525 2013 4526 2015 4526 2014 4526 1691 4527 2273 4527 2274 4527 1682 4528 2267 4528 1691 4528 2277 4529 1675 4529 1682 4529 2275 4530 1670 4530 2277 4530 1663 4531 1662 4531 2275 4531 1659 4532 1658 4532 1663 4532 2276 4533 2274 4533 2013 4533 2281 4534 1691 4534 2280 4534 2278 4535 1682 4535 2281 4535 1676 4536 2277 4536 2278 4536 1671 4537 2275 4537 1676 4537 1664 4538 1663 4538 1671 4538 2283 4539 2276 4539 1703 4539 1694 4540 2283 4540 1703 4540 1704 4541 1694 4541 1693 4541 2279 4542 2280 4542 2276 4542 2284 4543 2281 4543 2012 4543 1683 4544 2278 4544 2284 4544 1684 4545 1676 4545 1683 4545 1672 4546 1671 4546 1684 4546 1715 4547 1707 4547 1704 4547 1695 4548 2279 4548 2283 4548 2282 4549 1695 4549 2283 4549 1707 4550 2282 4550 1694 4550 2011 4551 2012 4551 2279 4551 2286 4552 2284 4552 2010 4552 2285 4553 1683 4553 2286 4553 1678 4554 1684 4554 2285 4554 1717 4555 1709 4555 1715 4555 1709 4556 1708 4556 1707 4556 2291 4557 2011 4557 1695 4557 1696 4558 2291 4558 1695 4558 1708 4559 1696 4559 2282 4559 1701 4560 2010 4560 2011 4560 1687 4561 2286 4561 2009 4561 1685 4562 2285 4562 1687 4562 1722 4563 2287 4563 1717 4563 2287 4564 2288 4564 1709 4564 2288 4565 2289 4565 1708 4565 2296 4566 1701 4566 2291 4566 2290 4567 2296 4567 2291 4567 2289 4568 2290 4568 1696 4568 1700 4569 2009 4569 1701 4569 1688 4570 1687 4570 2292 4570 1723 4571 2293 4571 1722 4571 2293 4572 2294 4572 2287 4572 2294 4573 2300 4573 2288 4573 2300 4574 1710 4574 2289 4574 2295 4575 1700 4575 2296 4575 1697 4576 2295 4576 2296 4576 1710 4577 1697 4577 2290 4577 2297 4578 2292 4578 1700 4578 2298 4579 1724 4579 1723 4579 1724 4580 1719 4580 2293 4580 1719 4581 2299 4581 2294 4581 2299 4582 1711 4582 2300 4582 1711 4583 2301 4583 1710 4583 2307 4584 2297 4584 2295 4584 2302 4585 2307 4585 2295 4585 2301 4586 2302 4586 1697 4586 2007 4587 2008 4587 2297 4587 2309 4588 2303 4588 2298 4588 2303 4589 1725 4589 1724 4589 1725 4590 2304 4590 1719 4590 2304 4591 2311 4591 2299 4591 2311 4592 1712 4592 1711 4592 1712 4593 1705 4593 2301 4593 2305 4594 2007 4594 2307 4594 2306 4595 2305 4595 2307 4595 1705 4596 2306 4596 2302 4596 1744 4597 2308 4597 2309 4597 2308 4598 1732 4598 2303 4598 1732 4599 1731 4599 1725 4599 1731 4600 1726 4600 2304 4600 1726 4601 2310 4601 2311 4601 2310 4602 2314 4602 1712 4602 2314 4603 2316 4603 1705 4603 2316 4604 1698 4604 2306 4604 1755 4605 1745 4605 1744 4605 1745 4606 2319 4606 2308 4606 2319 4607 2320 4607 1732 4607 2320 4608 1727 4608 1731 4608 1727 4609 2312 4609 1726 4609 2312 4610 2313 4610 2310 4610 2313 4611 1713 4611 2314 4611 1713 4612 2315 4612 2316 4612 2317 4613 2318 4613 1755 4613 2318 4614 1750 4614 1745 4614 1750 4615 1737 4615 2319 4615 1737 4616 2321 4616 2320 4616 2321 4617 1728 4617 1727 4617 1728 4618 2323 4618 2312 4618 2323 4619 1716 4619 2313 4619 1716 4620 1714 4620 1713 4620 2322 4621 1759 4621 2317 4621 1759 4622 2325 4622 2318 4622 2325 4623 1746 4623 1750 4623 1746 4624 1738 4624 1737 4624 1738 4625 1733 4625 2321 4625 1733 4626 2328 4626 1728 4626 2328 4627 1720 4627 2323 4627 1720 4628 1721 4628 1716 4628 2324 4629 2330 4629 2322 4629 2330 4630 2332 4630 1759 4630 2332 4631 2326 4631 2325 4631 2326 4632 2335 4632 1746 4632 2335 4633 1747 4633 1738 4633 1747 4634 1739 4634 1733 4634 1739 4635 2327 4635 2328 4635 2327 4636 1729 4636 1720 4636 1769 4637 1768 4637 2324 4637 1768 4638 2329 4638 2330 4638 2329 4639 2331 4639 2332 4639 2331 4640 2333 4640 2326 4640 2333 4641 2334 4641 2335 4641 2334 4642 2336 4642 1747 4642 2336 4643 2337 4643 1739 4643 2337 4644 1734 4644 2327 4644 2341 4645 2338 4645 1769 4645 2338 4646 2339 4646 1768 4646 2339 4647 2342 4647 2329 4647 2342 4648 2343 4648 2331 4648 2343 4649 1751 4649 2333 4649 1751 4650 2340 4650 2334 4650 2340 4651 2344 4651 2336 4651 2344 4652 1736 4652 2337 4652 1783 4653 2346 4653 2341 4653 2346 4654 1770 4654 2338 4654 1770 4655 1763 4655 2339 4655 1763 4656 1760 4656 2342 4656 1760 4657 1757 4657 2343 4657 1757 4658 1752 4658 1751 4658 1752 4659 1748 4659 2340 4659 1748 4660 1740 4660 2344 4660 1793 4661 2345 4661 1783 4661 2345 4662 1784 4662 2346 4662 1784 4663 1772 4663 1770 4663 1772 4664 1771 4664 1763 4664 1771 4665 1761 4665 1760 4665 1761 4666 2351 4666 1757 4666 2351 4667 2347 4667 1752 4667 2347 4668 1749 4668 1748 4668 1800 4669 2348 4669 1793 4669 2348 4670 1785 4670 2345 4670 1785 4671 2349 4671 1784 4671 2349 4672 2355 4672 1772 4672 2355 4673 1773 4673 1771 4673 1773 4674 2350 4674 1761 4674 2350 4675 2352 4675 2351 4675 2352 4676 1753 4676 2347 4676 2353 4677 1795 4677 1800 4677 1795 4678 2359 4678 2348 4678 2359 4679 2354 4679 1785 4679 2354 4680 1777 4680 2349 4680 1777 4681 1778 4681 2355 4681 1778 4682 1764 4682 1773 4682 1764 4683 2356 4683 2350 4683 2356 4684 1758 4684 2352 4684 1813 4685 2357 4685 2353 4685 2357 4686 2358 4686 1795 4686 2358 4687 2362 4687 2359 4687 2362 4688 2360 4688 2354 4688 2360 4689 2364 4689 1777 4689 2364 4690 1774 4690 1778 4690 1774 4691 1775 4691 1764 4691 1775 4692 1762 4692 2356 4692 2367 4693 2361 4693 1813 4693 2361 4694 1801 4694 2357 4694 1801 4695 2368 4695 2358 4695 2368 4696 1790 4696 2362 4696 1790 4697 2363 4697 2360 4697 2363 4698 1779 4698 2364 4698 1779 4699 2365 4699 1774 4699 2365 4700 2366 4700 1775 4700 2004 4701 2371 4701 2367 4701 2371 4702 1808 4702 2361 4702 1808 4703 1802 4703 1801 4703 1802 4704 1796 4704 2368 4704 1796 4705 1797 4705 1790 4705 1797 4706 2369 4706 2363 4706 2369 4707 2372 4707 1779 4707 2372 4708 1776 4708 2365 4708 2370 4709 1814 4709 2003 4709 1814 4710 1815 4710 2371 4710 1815 4711 1809 4711 1808 4711 1817 4712 1815 4712 1816 4712 1809 4713 1804 4713 1802 4713 2376 4714 1809 4714 1817 4714 1804 4715 1803 4715 1796 4715 2377 4716 1804 4716 2376 4716 1803 4717 1798 4717 1797 4717 1799 4718 1803 4718 2377 4718 1798 4719 2373 4719 2369 4719 2380 4720 1798 4720 1799 4720 2373 4721 1780 4721 2372 4721 1786 4722 2373 4722 2380 4722 2381 4723 1816 4723 2370 4723 2374 4724 1817 4724 2375 4724 2378 4725 2376 4725 2374 4725 1805 4726 2377 4726 2378 4726 2379 4727 1799 4727 1805 4727 1791 4728 2380 4728 2379 4728 1828 4729 2381 4729 1841 4729 1827 4730 1828 4730 1841 4730 1846 4731 1827 4731 1826 4731 1840 4732 2375 4732 2381 4732 1818 4733 2374 4733 2382 4733 1810 4734 2378 4734 1818 4734 1806 4735 1805 4735 1810 4735 2383 4736 2379 4736 1806 4736 1847 4737 1842 4737 1846 4737 2384 4738 1840 4738 1828 4738 2385 4739 2384 4739 1828 4739 1842 4740 2385 4740 1827 4740 1839 4741 2382 4741 1840 4741 1819 4742 1818 4742 2389 4742 2386 4743 1810 4743 1819 4743 2387 4744 1806 4744 2386 4744 1858 4745 1851 4745 1847 4745 1851 4746 2388 4746 1842 4746 1829 4747 1839 4747 2384 4747 1830 4748 1829 4748 2384 4748 2388 4749 1830 4749 2385 4749 2002 4750 2389 4750 1839 4750 1821 4751 1819 4751 1820 4751 1811 4752 2386 4752 1821 4752 1859 4753 2390 4753 1858 4753 2390 4754 2391 4754 1851 4754 2391 4755 1848 4755 2388 4755 2394 4756 2002 4756 1829 4756 1831 4757 2394 4757 1829 4757 1848 4758 1831 4758 1830 4758 1838 4759 1820 4759 2002 4759 2392 4760 1821 4760 2001 4760 2395 4761 2393 4761 1859 4761 2393 4762 1852 4762 2390 4762 1852 4763 2399 4763 2391 4763 2399 4764 1849 4764 1848 4764 1833 4765 1838 4765 2394 4765 1832 4766 1833 4766 2394 4766 1849 4767 1832 4767 1831 4767 2401 4768 2001 4768 1838 4768 1871 4769 2396 4769 2395 4769 2396 4770 2397 4770 2393 4770 2397 4771 2398 4771 1852 4771 2398 4772 2404 4772 2399 4772 2404 4773 2405 4773 1849 4773 2400 4774 2401 4774 1833 4774 1843 4775 2400 4775 1833 4775 2405 4776 1843 4776 1832 4776 1837 4777 1822 4777 2401 4777 1875 4778 1872 4778 1871 4778 1872 4779 1863 4779 2396 4779 1863 4780 1864 4780 2397 4780 1864 4781 2402 4781 2398 4781 2402 4782 2403 4782 2404 4782 2403 4783 1844 4783 2405 4783 2407 4784 1837 4784 2400 4784 2406 4785 2407 4785 2400 4785 1844 4786 2406 4786 1843 4786 1892 4787 1876 4787 1875 4787 1876 4788 1873 4788 1872 4788 1873 4789 2408 4789 1863 4789 2408 4790 1860 4790 1864 4790 1860 4791 2414 4791 2402 4791 2414 4792 2409 4792 2403 4792 2409 4793 2410 4793 1844 4793 2410 4794 1836 4794 2406 4794 2415 4795 2411 4795 1892 4795 2411 4796 1877 4796 1876 4796 1877 4797 2417 4797 1873 4797 2417 4798 2412 4798 2408 4798 2412 4799 2413 4799 1860 4799 2413 4800 1853 4800 2414 4800 1853 4801 1850 4801 2409 4801 1850 4802 1845 4802 2410 4802 1901 4803 1894 4803 2415 4803 1894 4804 2416 4804 2411 4804 2416 4805 1879 4805 1877 4805 1879 4806 1878 4806 2417 4806 1878 4807 1865 4807 2412 4807 1865 4808 1866 4808 2413 4808 1866 4809 1854 4809 1853 4809 1854 4810 1855 4810 1850 4810 1908 4811 2418 4811 1901 4811 2418 4812 1895 4812 1894 4812 1895 4813 2419 4813 2416 4813 2419 4814 1883 4814 1879 4814 1883 4815 2420 4815 1878 4815 2420 4816 2423 4816 1865 4816 2423 4817 2421 4817 1866 4817 2421 4818 1856 4818 1854 4818 1918 4819 2424 4819 1908 4819 2424 4820 1902 4820 2418 4820 1902 4821 1896 4821 1895 4821 1896 4822 1884 4822 2419 4822 1884 4823 1885 4823 1883 4823 1885 4824 2422 4824 2420 4824 2422 4825 1867 4825 2423 4825 1867 4826 1861 4826 2421 4826 1928 4827 1912 4827 1918 4827 1912 4828 1909 4828 2424 4828 1909 4829 1903 4829 1902 4829 1903 4830 2425 4830 1896 4830 2425 4831 1887 4831 1884 4831 1887 4832 1886 4832 1885 4832 1886 4833 1874 4833 2422 4833 1874 4834 1869 4834 1867 4834 2426 4835 2428 4835 1928 4835 2428 4836 1913 4836 1912 4836 1913 4837 1910 4837 1909 4837 1910 4838 2429 4838 1903 4838 2429 4839 1904 4839 2425 4839 1904 4840 1889 4840 1887 4840 1889 4841 1888 4841 1886 4841 1888 4842 1880 4842 1874 4842 2431 4843 1929 4843 2426 4843 1929 4844 2427 4844 2428 4844 2427 4845 1920 4845 1913 4845 1920 4846 2433 4846 1910 4846 2433 4847 1905 4847 2429 4847 1905 4848 1897 4848 1904 4848 1897 4849 1898 4849 1889 4849 1898 4850 1882 4850 1888 4850 2430 4851 1939 4851 2431 4851 1939 4852 2432 4852 1929 4852 2432 4853 1921 4853 2427 4853 1921 4854 1922 4854 1920 4854 1922 4855 1914 4855 2433 4855 1914 4856 2437 4856 1905 4856 2437 4857 1899 4857 1897 4857 1899 4858 1891 4858 1898 4858 1944 4859 1940 4859 2430 4859 1940 4860 2434 4860 1939 4860 2434 4861 2435 4861 2432 4861 2435 4862 1923 4862 1921 4862 1923 4863 2439 4863 1922 4863 2439 4864 2436 4864 1914 4864 2436 4865 2438 4865 2437 4865 2438 4866 1900 4866 1899 4866 1954 4867 2440 4867 1944 4867 2440 4868 2442 4868 1940 4868 2442 4869 2443 4869 2434 4869 2443 4870 1934 4870 2435 4870 1934 4871 1924 4871 1923 4871 1924 4872 1916 4872 2439 4872 1916 4873 1915 4873 2436 4873 1915 4874 1907 4874 2438 4874 2448 4875 1955 4875 1954 4875 1955 4876 1945 4876 2440 4876 1945 4877 2441 4877 2442 4877 2441 4878 2444 4878 2443 4878 2444 4879 2445 4879 1934 4879 2445 4880 2446 4880 1924 4880 2446 4881 1925 4881 1916 4881 1925 4882 2447 4882 1915 4882 1973 4883 1966 4883 2448 4883 1966 4884 2451 4884 1955 4884 2451 4885 1956 4885 1945 4885 1956 4886 1941 4886 2441 4886 1941 4887 2449 4887 2444 4887 2449 4888 2450 4888 2445 4888 2450 4889 1926 4889 2446 4889 1926 4890 1917 4890 1925 4890 1976 4891 1967 4891 1973 4891 1967 4892 1961 4892 1966 4892 1961 4893 2452 4893 2451 4893 2452 4894 2455 4894 1956 4894 2455 4895 1942 4895 1941 4895 1942 4896 1935 4896 2449 4896 1935 4897 1930 4897 2450 4897 1930 4898 1927 4898 1926 4898 2048 4899 2457 4899 1976 4899 2457 4900 2453 4900 1967 4900 2453 4901 2454 4901 1961 4901 2454 4902 1958 4902 2452 4902 1958 4903 1957 4903 2455 4903 1957 4904 1946 4904 1942 4904 1946 4905 1943 4905 1935 4905 1943 4906 1937 4906 1930 4906 1982 4907 2459 4907 2048 4907 2459 4908 2456 4908 2457 4908 2456 4909 2458 4909 2453 4909 2465 4910 2461 4910 2459 4910 2458 4911 2460 4911 2454 4911 2461 4912 1977 4912 2456 4912 2460 4913 1959 4913 1958 4913 1977 4914 1969 4914 2458 4914 1959 4915 1947 4915 1957 4915 1969 4916 1968 4916 2460 4916 1947 4917 2462 4917 1946 4917 1968 4918 2463 4918 1959 4918 2462 4919 1936 4919 1943 4919 2463 4920 2468 4920 1947 4920 2468 4921 1948 4921 2462 4921 1982 4922 2466 4922 2465 4922 2465 4923 2466 4923 2464 4923 2470 4924 2465 4924 2464 4924 1989 4925 2464 4925 2466 4925 2467 4926 1977 4926 1998 4926 1974 4927 1969 4927 2467 4927 1962 4928 1968 4928 1974 4928 2474 4929 2463 4929 1962 4929 1949 4930 2468 4930 2474 4930 1991 4931 2469 4931 1989 4931 2472 4932 2470 4932 1999 4932 2471 4933 1999 4933 1983 4933 2471 4934 2472 4934 1999 4934 2471 4935 2473 4935 2472 4935 1978 4936 2467 4936 1979 4936 1970 4937 1974 4937 1978 4937 1971 4938 1962 4938 1970 4938 1960 4939 2474 4939 1971 4939 1994 4940 1995 4940 1991 4940 1980 4941 1978 4941 2475 4941 1972 4942 1970 4942 1980 4942 1963 4943 1971 4943 1972 4943 2526 4944 2746 4944 2542 4944 2525 4945 2542 4945 2476 4945 2520 4946 2476 4946 2479 4946 2511 4947 2479 4947 2477 4947 2506 4948 2477 4948 2486 4948 2568 4949 2486 4949 2480 4949 2496 4950 2480 4950 2478 4950 2570 4951 2478 4951 2483 4951 2528 4952 2483 4952 2529 4952 2530 4953 2529 4953 3057 4953 3076 4954 2476 4954 2541 4954 3076 4955 2479 4955 2476 4955 3076 4956 3074 4956 2479 4956 2479 4957 3074 4957 2477 4957 2477 4958 3074 4958 3070 4958 2486 4959 3070 4959 3069 4959 2480 4960 3069 4960 2481 4960 2478 4961 2481 4961 2482 4961 2483 4962 2482 4962 2484 4962 2529 4963 2484 4963 2485 4963 3057 4964 2529 4964 2485 4964 2477 4965 3070 4965 2486 4965 2486 4966 3069 4966 2480 4966 2480 4967 2481 4967 2478 4967 2478 4968 2482 4968 2483 4968 2483 4969 2484 4969 2529 4969 3057 4970 2617 4970 2535 4970 3057 4971 2535 4971 2546 4971 3057 4972 2546 4972 2487 4972 3057 4973 2487 4973 2488 4973 3057 4974 2488 4974 2533 4974 3057 4975 2533 4975 2489 4975 3057 4976 2489 4976 2531 4976 3057 4977 2531 4977 2530 4977 2611 4978 2534 4978 2490 4978 2611 4979 2491 4979 2534 4979 2611 4980 2610 4980 2491 4980 2491 4981 2610 4981 2492 4981 2539 4982 2492 4982 2493 4982 2494 4983 2493 4983 2495 4983 2556 4984 2495 4984 2554 4984 2555 4985 2554 4985 2553 4985 2566 4986 2553 4986 2499 4986 2565 4987 2499 4987 2564 4987 2570 4988 2564 4988 2496 4988 2478 4989 2570 4989 2496 4989 2610 4990 2500 4990 2492 4990 2492 4991 2500 4991 2502 4991 2493 4992 2502 4992 2497 4992 2495 4993 2497 4993 2551 4993 2554 4994 2551 4994 2498 4994 2553 4995 2498 4995 2504 4995 2499 4996 2504 4996 2563 4996 2564 4997 2563 4997 2569 4997 2496 4998 2569 4998 2568 4998 2480 4999 2496 4999 2568 4999 2500 5000 2501 5000 2502 5000 2502 5001 2501 5001 2545 5001 2497 5002 2545 5002 2549 5002 2551 5003 2549 5003 2503 5003 2498 5004 2503 5004 2509 5004 2504 5005 2509 5005 2560 5005 2563 5006 2560 5006 2559 5006 2569 5007 2559 5007 2505 5007 2568 5008 2505 5008 2506 5008 2486 5009 2568 5009 2506 5009 2501 5010 2507 5010 2545 5010 2545 5011 2507 5011 2512 5011 2549 5012 2512 5012 2508 5012 2503 5013 2508 5013 2548 5013 2509 5014 2548 5014 2552 5014 2560 5015 2552 5015 2558 5015 2559 5016 2558 5016 2510 5016 2505 5017 2510 5017 2562 5017 2506 5018 2562 5018 2511 5018 2477 5019 2506 5019 2511 5019 2507 5020 2593 5020 2512 5020 2512 5021 2593 5021 2544 5021 2508 5022 2544 5022 2513 5022 2548 5023 2513 5023 2515 5023 2552 5024 2515 5024 2550 5024 2558 5025 2550 5025 2518 5025 2510 5026 2518 5026 2557 5026 2562 5027 2557 5027 2514 5027 2511 5028 2514 5028 2520 5028 2479 5029 2511 5029 2520 5029 2593 5030 2591 5030 2544 5030 2544 5031 2591 5031 2543 5031 2513 5032 2543 5032 2527 5032 2515 5033 2527 5033 2516 5033 2550 5034 2516 5034 2517 5034 2518 5035 2517 5035 2519 5035 2557 5036 2519 5036 2561 5036 2514 5037 2561 5037 2567 5037 2520 5038 2567 5038 2525 5038 2476 5039 2520 5039 2525 5039 2591 5040 2572 5040 2543 5040 2543 5041 2572 5041 2774 5041 2527 5042 2774 5042 2521 5042 2516 5043 2521 5043 2522 5043 2523 5044 2516 5044 2522 5044 2523 5045 2517 5045 2516 5045 2523 5046 2764 5046 2517 5046 2517 5047 2764 5047 2519 5047 2519 5048 2764 5048 2524 5048 2561 5049 2524 5049 2760 5049 2567 5050 2760 5050 2715 5050 2525 5051 2715 5051 2526 5051 2542 5052 2525 5052 2526 5052 2543 5053 2774 5053 2527 5053 2527 5054 2521 5054 2516 5054 2519 5055 2524 5055 2561 5055 2561 5056 2760 5056 2567 5056 2567 5057 2715 5057 2525 5057 2528 5058 2529 5058 2530 5058 2536 5059 2530 5059 2531 5059 2532 5060 2531 5060 2489 5060 2537 5061 2489 5061 2533 5061 2538 5062 2533 5062 2488 5062 2540 5063 2488 5063 2487 5063 2547 5064 2487 5064 2546 5064 2534 5065 2546 5065 2535 5065 2490 5066 2535 5066 2617 5066 2490 5067 2534 5067 2535 5067 2570 5068 2483 5068 2528 5068 2565 5069 2528 5069 2536 5069 2566 5070 2536 5070 2532 5070 2555 5071 2532 5071 2537 5071 2556 5072 2537 5072 2538 5072 2494 5073 2538 5073 2540 5073 2539 5074 2540 5074 2547 5074 2491 5075 2547 5075 2534 5075 2491 5076 2539 5076 2547 5076 2491 5077 2492 5077 2539 5077 2746 5078 2541 5078 2542 5078 2542 5079 2541 5079 2476 5079 2513 5080 2544 5080 2543 5080 2508 5081 2512 5081 2544 5081 2549 5082 2545 5082 2512 5082 2497 5083 2502 5083 2545 5083 2493 5084 2492 5084 2502 5084 2546 5085 2534 5085 2547 5085 2515 5086 2513 5086 2527 5086 2548 5087 2508 5087 2513 5087 2503 5088 2549 5088 2508 5088 2551 5089 2497 5089 2549 5089 2495 5090 2493 5090 2497 5090 2540 5091 2539 5091 2494 5091 2494 5092 2539 5092 2493 5092 2487 5093 2547 5093 2540 5093 2550 5094 2515 5094 2516 5094 2552 5095 2548 5095 2515 5095 2509 5096 2503 5096 2548 5096 2498 5097 2551 5097 2503 5097 2554 5098 2495 5098 2551 5098 2538 5099 2494 5099 2556 5099 2556 5100 2494 5100 2495 5100 2488 5101 2540 5101 2538 5101 2518 5102 2550 5102 2517 5102 2558 5103 2552 5103 2550 5103 2560 5104 2509 5104 2552 5104 2504 5105 2498 5105 2509 5105 2553 5106 2554 5106 2498 5106 2537 5107 2556 5107 2555 5107 2555 5108 2556 5108 2554 5108 2533 5109 2538 5109 2537 5109 2557 5110 2518 5110 2519 5110 2510 5111 2558 5111 2518 5111 2559 5112 2560 5112 2558 5112 2563 5113 2504 5113 2560 5113 2499 5114 2553 5114 2504 5114 2532 5115 2555 5115 2566 5115 2566 5116 2555 5116 2553 5116 2489 5117 2537 5117 2532 5117 2514 5118 2557 5118 2561 5118 2562 5119 2510 5119 2557 5119 2505 5120 2559 5120 2510 5120 2569 5121 2563 5121 2559 5121 2564 5122 2499 5122 2563 5122 2536 5123 2566 5123 2565 5123 2565 5124 2566 5124 2499 5124 2531 5125 2532 5125 2536 5125 2520 5126 2514 5126 2567 5126 2511 5127 2562 5127 2514 5127 2506 5128 2505 5128 2562 5128 2568 5129 2569 5129 2505 5129 2496 5130 2564 5130 2569 5130 2528 5131 2565 5131 2570 5131 2570 5132 2565 5132 2564 5132 2530 5133 2536 5133 2528 5133 2572 5134 2571 5134 2584 5134 2572 5135 2634 5135 2571 5135 2572 5136 2637 5136 2634 5136 2572 5137 2573 5137 2637 5137 2572 5138 2636 5138 2573 5138 2572 5139 2574 5139 2636 5139 2572 5140 2591 5140 2574 5140 2574 5141 2591 5141 2592 5141 2632 5142 2592 5142 2645 5142 2646 5143 2645 5143 2575 5143 2655 5144 2575 5144 2666 5144 2668 5145 2666 5145 2576 5145 2667 5146 2576 5146 2577 5146 2688 5147 2577 5147 2595 5147 2689 5148 2595 5148 2596 5148 2702 5149 2596 5149 2701 5149 2705 5150 2701 5150 3332 5150 2704 5151 3332 5151 2618 5151 2703 5152 2618 5152 2621 5152 2690 5153 2621 5153 2631 5153 2691 5154 2631 5154 2578 5154 2677 5155 2578 5155 2579 5155 2590 5156 2579 5156 2580 5156 2660 5157 2580 5157 2624 5157 2652 5158 2624 5158 2627 5158 2642 5159 2627 5159 2581 5159 2641 5160 2581 5160 2630 5160 2584 5161 2630 5161 2629 5161 2584 5162 2641 5162 2630 5162 2584 5163 2582 5163 2641 5163 2584 5164 2643 5164 2582 5164 2584 5165 2583 5165 2643 5165 2584 5166 2638 5166 2583 5166 2584 5167 2571 5167 2638 5167 2638 5168 2571 5168 2586 5168 2585 5169 2586 5169 2650 5169 2587 5170 2650 5170 2589 5170 2588 5171 2589 5171 2665 5171 2590 5172 2665 5172 2677 5172 2579 5173 2590 5173 2677 5173 2591 5174 2593 5174 2592 5174 2592 5175 2593 5175 2644 5175 2645 5176 2644 5176 2653 5176 2575 5177 2653 5177 2661 5177 2666 5178 2661 5178 2594 5178 2576 5179 2594 5179 2687 5179 2577 5180 2687 5180 2686 5180 2595 5181 2686 5181 2695 5181 2596 5182 2695 5182 2599 5182 2701 5183 2599 5183 3332 5183 2701 5184 2596 5184 2599 5184 2593 5185 2507 5185 2644 5185 2644 5186 2507 5186 2600 5186 2653 5187 2600 5187 2602 5187 2661 5188 2602 5188 2603 5188 2594 5189 2603 5189 2604 5189 2687 5190 2604 5190 2597 5190 2686 5191 2597 5191 2607 5191 2695 5192 2607 5192 2598 5192 2599 5193 2598 5193 3332 5193 2599 5194 2695 5194 2598 5194 2507 5195 2501 5195 2600 5195 2600 5196 2501 5196 2601 5196 2602 5197 2601 5197 2608 5197 2603 5198 2608 5198 2605 5198 2604 5199 2605 5199 2685 5199 2597 5200 2685 5200 2606 5200 2607 5201 2606 5201 2699 5201 2598 5202 2699 5202 3332 5202 2598 5203 2607 5203 2699 5203 2501 5204 2500 5204 2601 5204 2601 5205 2500 5205 2673 5205 2608 5206 2673 5206 2684 5206 2605 5207 2684 5207 2683 5207 2685 5208 2683 5208 2694 5208 2606 5209 2694 5209 2700 5209 2699 5210 2700 5210 3057 5210 3332 5211 2699 5211 3057 5211 2500 5212 2610 5212 2673 5212 2673 5213 2610 5213 2681 5213 2684 5214 2681 5214 2682 5214 2683 5215 2682 5215 2693 5215 2694 5216 2693 5216 2609 5216 2700 5217 2609 5217 3057 5217 2700 5218 2694 5218 2609 5218 2610 5219 2611 5219 2681 5219 2681 5220 2611 5220 2692 5220 2682 5221 2692 5221 2612 5221 2693 5222 2612 5222 2614 5222 2609 5223 2614 5223 3057 5223 2609 5224 2693 5224 2614 5224 2611 5225 2490 5225 2692 5225 2692 5226 2490 5226 2613 5226 2612 5227 2613 5227 2615 5227 2614 5228 2615 5228 3057 5228 2614 5229 2612 5229 2615 5229 2490 5230 2617 5230 2613 5230 2613 5231 2617 5231 2616 5231 2615 5232 2616 5232 3057 5232 2615 5233 2613 5233 2616 5233 2617 5234 3057 5234 2616 5234 3332 5235 2619 5235 2618 5235 2618 5236 2619 5236 2621 5236 2621 5237 2619 5237 4129 5237 2620 5238 2621 5238 4129 5238 2620 5239 2631 5239 2621 5239 2620 5240 4128 5240 2631 5240 2631 5241 4128 5241 2622 5241 2578 5242 2622 5242 2623 5242 2579 5243 2623 5243 4125 5243 4124 5244 2579 5244 4125 5244 4124 5245 2580 5245 2579 5245 4124 5246 4123 5246 2580 5246 2580 5247 4123 5247 2625 5247 2624 5248 2625 5248 4173 5248 2626 5249 2624 5249 4173 5249 2626 5250 2627 5250 2624 5250 2626 5251 2628 5251 2627 5251 2627 5252 2628 5252 2581 5252 2581 5253 2628 5253 4121 5253 2629 5254 2581 5254 4121 5254 2629 5255 2630 5255 2581 5255 2631 5256 2622 5256 2578 5256 2578 5257 2623 5257 2579 5257 2580 5258 2625 5258 2624 5258 2592 5259 2632 5259 2574 5259 2574 5260 2632 5260 2636 5260 2636 5261 2632 5261 2647 5261 2573 5262 2647 5262 2635 5262 2637 5263 2635 5263 2649 5263 2634 5264 2649 5264 2633 5264 2571 5265 2633 5265 2586 5265 2571 5266 2634 5266 2633 5266 2635 5267 2637 5267 2573 5267 2573 5268 2636 5268 2647 5268 2649 5269 2634 5269 2637 5269 2586 5270 2585 5270 2638 5270 2638 5271 2585 5271 2583 5271 2583 5272 2585 5272 2639 5272 2643 5273 2639 5273 2640 5273 2582 5274 2640 5274 2642 5274 2641 5275 2642 5275 2581 5275 2641 5276 2582 5276 2642 5276 2640 5277 2582 5277 2643 5277 2643 5278 2583 5278 2639 5278 2644 5279 2645 5279 2592 5279 2645 5280 2646 5280 2632 5280 2632 5281 2646 5281 2647 5281 2647 5282 2646 5282 2654 5282 2635 5283 2654 5283 2659 5283 2649 5284 2659 5284 2648 5284 2633 5285 2648 5285 2657 5285 2586 5286 2657 5286 2650 5286 2586 5287 2633 5287 2657 5287 2659 5288 2649 5288 2635 5288 2635 5289 2647 5289 2654 5289 2648 5290 2633 5290 2649 5290 2650 5291 2587 5291 2585 5291 2585 5292 2587 5292 2639 5292 2639 5293 2587 5293 2651 5293 2640 5294 2651 5294 2652 5294 2642 5295 2652 5295 2627 5295 2642 5296 2640 5296 2652 5296 2640 5297 2639 5297 2651 5297 2600 5298 2653 5298 2644 5298 2653 5299 2575 5299 2645 5299 2575 5300 2655 5300 2646 5300 2646 5301 2655 5301 2654 5301 2654 5302 2655 5302 2662 5302 2659 5303 2662 5303 2658 5303 2648 5304 2658 5304 2664 5304 2657 5305 2664 5305 2656 5305 2650 5306 2656 5306 2589 5306 2650 5307 2657 5307 2656 5307 2658 5308 2648 5308 2659 5308 2659 5309 2654 5309 2662 5309 2664 5310 2657 5310 2648 5310 2589 5311 2588 5311 2587 5311 2587 5312 2588 5312 2651 5312 2651 5313 2588 5313 2660 5313 2652 5314 2660 5314 2624 5314 2652 5315 2651 5315 2660 5315 2601 5316 2602 5316 2600 5316 2602 5317 2661 5317 2653 5317 2661 5318 2666 5318 2575 5318 2666 5319 2668 5319 2655 5319 2655 5320 2668 5320 2662 5320 2662 5321 2668 5321 2663 5321 2658 5322 2663 5322 2671 5322 2664 5323 2671 5323 2672 5323 2656 5324 2672 5324 2670 5324 2589 5325 2670 5325 2665 5325 2589 5326 2656 5326 2670 5326 2671 5327 2664 5327 2658 5327 2658 5328 2662 5328 2663 5328 2672 5329 2656 5329 2664 5329 2665 5330 2590 5330 2588 5330 2588 5331 2590 5331 2660 5331 2660 5332 2590 5332 2580 5332 2673 5333 2608 5333 2601 5333 2608 5334 2603 5334 2602 5334 2603 5335 2594 5335 2661 5335 2594 5336 2576 5336 2666 5336 2576 5337 2667 5337 2668 5337 2668 5338 2667 5338 2663 5338 2663 5339 2667 5339 2669 5339 2671 5340 2669 5340 2674 5340 2672 5341 2674 5341 2676 5341 2670 5342 2676 5342 2678 5342 2665 5343 2678 5343 2677 5343 2665 5344 2670 5344 2678 5344 2674 5345 2672 5345 2671 5345 2671 5346 2663 5346 2669 5346 2676 5347 2670 5347 2672 5347 2681 5348 2684 5348 2673 5348 2684 5349 2605 5349 2608 5349 2605 5350 2604 5350 2603 5350 2604 5351 2687 5351 2594 5351 2687 5352 2577 5352 2576 5352 2577 5353 2688 5353 2667 5353 2667 5354 2688 5354 2669 5354 2669 5355 2688 5355 2675 5355 2674 5356 2675 5356 2679 5356 2676 5357 2679 5357 2680 5357 2678 5358 2680 5358 2691 5358 2677 5359 2691 5359 2578 5359 2677 5360 2678 5360 2691 5360 2679 5361 2676 5361 2674 5361 2674 5362 2669 5362 2675 5362 2680 5363 2678 5363 2676 5363 2692 5364 2682 5364 2681 5364 2682 5365 2683 5365 2684 5365 2683 5366 2685 5366 2605 5366 2685 5367 2597 5367 2604 5367 2597 5368 2686 5368 2687 5368 2686 5369 2595 5369 2577 5369 2595 5370 2689 5370 2688 5370 2688 5371 2689 5371 2675 5371 2675 5372 2689 5372 2698 5372 2679 5373 2698 5373 2697 5373 2680 5374 2697 5374 2690 5374 2691 5375 2690 5375 2631 5375 2691 5376 2680 5376 2690 5376 2697 5377 2680 5377 2679 5377 2679 5378 2675 5378 2698 5378 2613 5379 2612 5379 2692 5379 2612 5380 2693 5380 2682 5380 2693 5381 2694 5381 2683 5381 2694 5382 2606 5382 2685 5382 2606 5383 2607 5383 2597 5383 2607 5384 2695 5384 2686 5384 2695 5385 2596 5385 2595 5385 2596 5386 2702 5386 2689 5386 2689 5387 2702 5387 2698 5387 2698 5388 2702 5388 2696 5388 2697 5389 2696 5389 2703 5389 2690 5390 2703 5390 2621 5390 2690 5391 2697 5391 2703 5391 2697 5392 2698 5392 2696 5392 2699 5393 2606 5393 2700 5393 2701 5394 2705 5394 2702 5394 2702 5395 2705 5395 2696 5395 2696 5396 2705 5396 2704 5396 2703 5397 2704 5397 2618 5397 2703 5398 2696 5398 2704 5398 3332 5399 2704 5399 2705 5399 5595 5400 2721 5400 3326 5400 5595 5401 2707 5401 2721 5401 5595 5402 2706 5402 2707 5402 2707 5403 2706 5403 2708 5403 2722 5404 2708 5404 2783 5404 2723 5405 2783 5405 2710 5405 2709 5406 2710 5406 2712 5406 2711 5407 2712 5407 2796 5407 2724 5408 2796 5408 2799 5408 2713 5409 2799 5409 2726 5409 2804 5410 2726 5410 2805 5410 2725 5411 2805 5411 2746 5411 2526 5412 2725 5412 2746 5412 2526 5413 2714 5413 2725 5413 2526 5414 2715 5414 2714 5414 2714 5415 2715 5415 2801 5415 2800 5416 2801 5416 2802 5416 2716 5417 2802 5417 2755 5417 2792 5418 2755 5418 2757 5418 2793 5419 2757 5419 2717 5419 2781 5420 2717 5420 2785 5420 2782 5421 2785 5421 2718 5421 2719 5422 2718 5422 2720 5422 2721 5423 2720 5423 3326 5423 2721 5424 2719 5424 2720 5424 2721 5425 2707 5425 2719 5425 2719 5426 2707 5426 2722 5426 2782 5427 2722 5427 2723 5427 2781 5428 2723 5428 2709 5428 2793 5429 2709 5429 2711 5429 2792 5430 2711 5430 2724 5430 2716 5431 2724 5431 2713 5431 2800 5432 2713 5432 2804 5432 2714 5433 2804 5433 2725 5433 2714 5434 2800 5434 2804 5434 2714 5435 2801 5435 2800 5435 2706 5436 5594 5436 2708 5436 2708 5437 5594 5437 2729 5437 2783 5438 2729 5438 2788 5438 2710 5439 2788 5439 2791 5439 2712 5440 2791 5440 2730 5440 2796 5441 2730 5441 2798 5441 2799 5442 2798 5442 2727 5442 2726 5443 2727 5443 2728 5443 2805 5444 2728 5444 2746 5444 2805 5445 2726 5445 2728 5445 5594 5446 2733 5446 2729 5446 2729 5447 2733 5447 2734 5447 2788 5448 2734 5448 2737 5448 2791 5449 2737 5449 2731 5449 2730 5450 2731 5450 2739 5450 2798 5451 2739 5451 2732 5451 2727 5452 2732 5452 2741 5452 2728 5453 2741 5453 2746 5453 2728 5454 2727 5454 2741 5454 2733 5455 2735 5455 2734 5455 2734 5456 2735 5456 2736 5456 2737 5457 2736 5457 2797 5457 2731 5458 2797 5458 2738 5458 2739 5459 2738 5459 2740 5459 2732 5460 2740 5460 2742 5460 2741 5461 2742 5461 2746 5461 2741 5462 2732 5462 2742 5462 2735 5463 2747 5463 2736 5463 2736 5464 2747 5464 2748 5464 2797 5465 2748 5465 2743 5465 2738 5466 2743 5466 2803 5466 2740 5467 2803 5467 2744 5467 2742 5468 2744 5468 2745 5468 2746 5469 2742 5469 2745 5469 2747 5470 5593 5470 2748 5470 2748 5471 5593 5471 2751 5471 2743 5472 2751 5472 2749 5472 2803 5473 2749 5473 2750 5473 2744 5474 2750 5474 2745 5474 2744 5475 2803 5475 2750 5475 5593 5476 5592 5476 2751 5476 2751 5477 5592 5477 2753 5477 2749 5478 2753 5478 2752 5478 2750 5479 2752 5479 2745 5479 2750 5480 2749 5480 2752 5480 5592 5481 5591 5481 2753 5481 2753 5482 5591 5482 2754 5482 2752 5483 2754 5483 2745 5483 2752 5484 2753 5484 2754 5484 5591 5485 2745 5485 2754 5485 2715 5486 2760 5486 2801 5486 2801 5487 2760 5487 2761 5487 2802 5488 2761 5488 2756 5488 2755 5489 2756 5489 2794 5489 2757 5490 2794 5490 2789 5490 2717 5491 2789 5491 2784 5491 2785 5492 2784 5492 2758 5492 2718 5493 2758 5493 2759 5493 2720 5494 2759 5494 3326 5494 2720 5495 2718 5495 2759 5495 2760 5496 2524 5496 2761 5496 2761 5497 2524 5497 2763 5497 2756 5498 2763 5498 2795 5498 2794 5499 2795 5499 2786 5499 2789 5500 2786 5500 2762 5500 2784 5501 2762 5501 2778 5501 2758 5502 2778 5502 2766 5502 2759 5503 2766 5503 3326 5503 2759 5504 2758 5504 2766 5504 2524 5505 2764 5505 2763 5505 2763 5506 2764 5506 2767 5506 2795 5507 2767 5507 2790 5507 2786 5508 2790 5508 2765 5508 2762 5509 2765 5509 2768 5509 2778 5510 2768 5510 2777 5510 2766 5511 2777 5511 3326 5511 2766 5512 2778 5512 2777 5512 2764 5513 2523 5513 2767 5513 2767 5514 2523 5514 2769 5514 2790 5515 2769 5515 2787 5515 2765 5516 2787 5516 2779 5516 2768 5517 2779 5517 2776 5517 2777 5518 2776 5518 2572 5518 3326 5519 2777 5519 2572 5519 2523 5520 2522 5520 2769 5520 2769 5521 2522 5521 2780 5521 2787 5522 2780 5522 2770 5522 2779 5523 2770 5523 2772 5523 2776 5524 2772 5524 2572 5524 2776 5525 2779 5525 2772 5525 2522 5526 2521 5526 2780 5526 2780 5527 2521 5527 2773 5527 2770 5528 2773 5528 2771 5528 2772 5529 2771 5529 2572 5529 2772 5530 2770 5530 2771 5530 2521 5531 2774 5531 2773 5531 2773 5532 2774 5532 2775 5532 2771 5533 2775 5533 2572 5533 2771 5534 2773 5534 2775 5534 2774 5535 2572 5535 2775 5535 2768 5536 2776 5536 2777 5536 2782 5537 2718 5537 2719 5537 2722 5538 2782 5538 2719 5538 2708 5539 2722 5539 2707 5539 2785 5540 2758 5540 2718 5540 2784 5541 2778 5541 2758 5541 2762 5542 2768 5542 2778 5542 2765 5543 2779 5543 2768 5543 2787 5544 2770 5544 2779 5544 2780 5545 2773 5545 2770 5545 2729 5546 2783 5546 2708 5546 2781 5547 2785 5547 2782 5547 2723 5548 2781 5548 2782 5548 2783 5549 2723 5549 2722 5549 2717 5550 2784 5550 2785 5550 2789 5551 2762 5551 2784 5551 2786 5552 2765 5552 2762 5552 2790 5553 2787 5553 2765 5553 2769 5554 2780 5554 2787 5554 2734 5555 2788 5555 2729 5555 2788 5556 2710 5556 2783 5556 2793 5557 2717 5557 2781 5557 2709 5558 2793 5558 2781 5558 2710 5559 2709 5559 2723 5559 2757 5560 2789 5560 2717 5560 2794 5561 2786 5561 2789 5561 2795 5562 2790 5562 2786 5562 2767 5563 2769 5563 2790 5563 2736 5564 2737 5564 2734 5564 2737 5565 2791 5565 2788 5565 2791 5566 2712 5566 2710 5566 2792 5567 2757 5567 2793 5567 2711 5568 2792 5568 2793 5568 2712 5569 2711 5569 2709 5569 2755 5570 2794 5570 2757 5570 2756 5571 2795 5571 2794 5571 2763 5572 2767 5572 2795 5572 2748 5573 2797 5573 2736 5573 2797 5574 2731 5574 2737 5574 2731 5575 2730 5575 2791 5575 2730 5576 2796 5576 2712 5576 2716 5577 2755 5577 2792 5577 2724 5578 2716 5578 2792 5578 2796 5579 2724 5579 2711 5579 2802 5580 2756 5580 2755 5580 2761 5581 2763 5581 2756 5581 2751 5582 2743 5582 2748 5582 2743 5583 2738 5583 2797 5583 2738 5584 2739 5584 2731 5584 2739 5585 2798 5585 2730 5585 2798 5586 2799 5586 2796 5586 2800 5587 2802 5587 2716 5587 2713 5588 2800 5588 2716 5588 2799 5589 2713 5589 2724 5589 2801 5590 2761 5590 2802 5590 2753 5591 2749 5591 2751 5591 2749 5592 2803 5592 2743 5592 2803 5593 2740 5593 2738 5593 2740 5594 2732 5594 2739 5594 2732 5595 2727 5595 2798 5595 2727 5596 2726 5596 2799 5596 2726 5597 2804 5597 2713 5597 2742 5598 2740 5598 2744 5598 2725 5599 2804 5599 2805 5599 2806 5600 3157 5600 2872 5600 2866 5601 2872 5601 2873 5601 2862 5602 2873 5602 2807 5602 2808 5603 2807 5603 2809 5603 2857 5604 2809 5604 2877 5604 2852 5605 2877 5605 2879 5605 2854 5606 2879 5606 2810 5606 2811 5607 2810 5607 2820 5607 2847 5608 2820 5608 2818 5608 3164 5609 2874 5609 3165 5609 3164 5610 2881 5610 2874 5610 3164 5611 2812 5611 2881 5611 2881 5612 2812 5612 2888 5612 2889 5613 2888 5613 2813 5613 2890 5614 2813 5614 2824 5614 2882 5615 2824 5615 2826 5615 2883 5616 2826 5616 2814 5616 2821 5617 2814 5617 2815 5617 2816 5618 2815 5618 2817 5618 2928 5619 2816 5619 2817 5619 2928 5620 2898 5620 2816 5620 2928 5621 2922 5621 2898 5621 2898 5622 2922 5622 2818 5622 2820 5623 2898 5623 2818 5623 2820 5624 2819 5624 2898 5624 2820 5625 2810 5625 2819 5625 2819 5626 2810 5626 2878 5626 2821 5627 2878 5627 2883 5627 2814 5628 2821 5628 2883 5628 2812 5629 2822 5629 2888 5629 2888 5630 2822 5630 2823 5630 2813 5631 2823 5631 2825 5631 2824 5632 2825 5632 2830 5632 2826 5633 2830 5633 2827 5633 2814 5634 2827 5634 2828 5634 2815 5635 2828 5635 2943 5635 2817 5636 2815 5636 2943 5636 2822 5637 2829 5637 2823 5637 2823 5638 2829 5638 2831 5638 2825 5639 2831 5639 2897 5639 2830 5640 2897 5640 2899 5640 2827 5641 2899 5641 2835 5641 2828 5642 2835 5642 2944 5642 2943 5643 2828 5643 2944 5643 2829 5644 3147 5644 2831 5644 2831 5645 3147 5645 2832 5645 2897 5646 2832 5646 2833 5646 2899 5647 2833 5647 2839 5647 2835 5648 2839 5648 2834 5648 2944 5649 2835 5649 2834 5649 3147 5650 3146 5650 2832 5650 2832 5651 3146 5651 2836 5651 2840 5652 2836 5652 2837 5652 2841 5653 2837 5653 2957 5653 2954 5654 2841 5654 2957 5654 2954 5655 2838 5655 2841 5655 2954 5656 2953 5656 2838 5656 2838 5657 2953 5657 2839 5657 2833 5658 2838 5658 2839 5658 2833 5659 2840 5659 2838 5659 2833 5660 2832 5660 2840 5660 2840 5661 2832 5661 2836 5661 2840 5662 2837 5662 2841 5662 2838 5663 2840 5663 2841 5663 2953 5664 2834 5664 2839 5664 2922 5665 2902 5665 2818 5665 2818 5666 2902 5666 2844 5666 2844 5667 3068 5667 2870 5667 2844 5668 2870 5668 2842 5668 2844 5669 2842 5669 2880 5669 2844 5670 2880 5670 2843 5670 2844 5671 2843 5671 2845 5671 2844 5672 2845 5672 2896 5672 2844 5673 2896 5673 2846 5673 2844 5674 2846 5674 2847 5674 2844 5675 2847 5675 2818 5675 3059 5676 2848 5676 3064 5676 3059 5677 2849 5677 2848 5677 3059 5678 2855 5678 2849 5678 2849 5679 2855 5679 2850 5679 2869 5680 2850 5680 2868 5680 2885 5681 2868 5681 2884 5681 2892 5682 2884 5682 2851 5682 2893 5683 2851 5683 2852 5683 2854 5684 2852 5684 2879 5684 2854 5685 2893 5685 2852 5685 2854 5686 2853 5686 2893 5686 2854 5687 2811 5687 2853 5687 2854 5688 2810 5688 2811 5688 2855 5689 2856 5689 2850 5689 2850 5690 2856 5690 2858 5690 2868 5691 2858 5691 2859 5691 2884 5692 2859 5692 2860 5692 2851 5693 2860 5693 2857 5693 2852 5694 2857 5694 2877 5694 2852 5695 2851 5695 2857 5695 2856 5696 3047 5696 2858 5696 2858 5697 3047 5697 2867 5697 2859 5698 2867 5698 2863 5698 2860 5699 2863 5699 2808 5699 2857 5700 2808 5700 2809 5700 2857 5701 2860 5701 2808 5701 3047 5702 2864 5702 2867 5702 2867 5703 2864 5703 2861 5703 2863 5704 2861 5704 2862 5704 2808 5705 2862 5705 2807 5705 2808 5706 2863 5706 2862 5706 2864 5707 2865 5707 2861 5707 2861 5708 2865 5708 2866 5708 2862 5709 2866 5709 2873 5709 2862 5710 2861 5710 2866 5710 2865 5711 2806 5711 2866 5711 2866 5712 2806 5712 2872 5712 2863 5713 2867 5713 2861 5713 2859 5714 2858 5714 2867 5714 2868 5715 2850 5715 2858 5715 2850 5716 2869 5716 2849 5716 2849 5717 2869 5717 2871 5717 2848 5718 2871 5718 2842 5718 2870 5719 2848 5719 2842 5719 2870 5720 3064 5720 2848 5720 2870 5721 3068 5721 3064 5721 2848 5722 2849 5722 2871 5722 3165 5723 2874 5723 2872 5723 3157 5724 3165 5724 2872 5724 2873 5725 2874 5725 2875 5725 2807 5726 2875 5726 2876 5726 2809 5727 2876 5727 2891 5727 2877 5728 2891 5728 2894 5728 2879 5729 2894 5729 2878 5729 2810 5730 2879 5730 2878 5730 2875 5731 2807 5731 2873 5731 2860 5732 2859 5732 2863 5732 2884 5733 2868 5733 2859 5733 2868 5734 2885 5734 2869 5734 2869 5735 2885 5735 2887 5735 2871 5736 2887 5736 2880 5736 2842 5737 2871 5737 2880 5737 2871 5738 2869 5738 2887 5738 2872 5739 2874 5739 2873 5739 2874 5740 2881 5740 2875 5740 2875 5741 2881 5741 2889 5741 2876 5742 2889 5742 2890 5742 2891 5743 2890 5743 2882 5743 2894 5744 2882 5744 2883 5744 2878 5745 2894 5745 2883 5745 2888 5746 2889 5746 2881 5746 2889 5747 2876 5747 2875 5747 2876 5748 2809 5748 2807 5748 2851 5749 2884 5749 2860 5749 2884 5750 2892 5750 2885 5750 2885 5751 2892 5751 2886 5751 2887 5752 2886 5752 2843 5752 2880 5753 2887 5753 2843 5753 2887 5754 2885 5754 2886 5754 2823 5755 2813 5755 2888 5755 2813 5756 2890 5756 2889 5756 2890 5757 2891 5757 2876 5757 2891 5758 2877 5758 2809 5758 2851 5759 2893 5759 2892 5759 2892 5760 2893 5760 2895 5760 2886 5761 2895 5761 2845 5761 2843 5762 2886 5762 2845 5762 2886 5763 2892 5763 2895 5763 2831 5764 2825 5764 2823 5764 2825 5765 2824 5765 2813 5765 2824 5766 2882 5766 2890 5766 2882 5767 2894 5767 2891 5767 2894 5768 2879 5768 2877 5768 2896 5769 2845 5769 2895 5769 2853 5770 2895 5770 2893 5770 2853 5771 2896 5771 2895 5771 2853 5772 2846 5772 2896 5772 2853 5773 2811 5773 2846 5773 2846 5774 2811 5774 2847 5774 2847 5775 2811 5775 2820 5775 2832 5776 2897 5776 2831 5776 2897 5777 2830 5777 2825 5777 2830 5778 2826 5778 2824 5778 2826 5779 2883 5779 2882 5779 2899 5780 2897 5780 2833 5780 2827 5781 2830 5781 2899 5781 2814 5782 2826 5782 2827 5782 2819 5783 2878 5783 2821 5783 2816 5784 2821 5784 2815 5784 2816 5785 2819 5785 2821 5785 2816 5786 2898 5786 2819 5786 2835 5787 2899 5787 2839 5787 2828 5788 2827 5788 2835 5788 2815 5789 2814 5789 2828 5789 2844 5790 2970 5790 2911 5790 2844 5791 2900 5791 2970 5791 2844 5792 2969 5792 2900 5792 2844 5793 2901 5793 2969 5793 2844 5794 2971 5794 2901 5794 2844 5795 2968 5795 2971 5795 2844 5796 2967 5796 2968 5796 2844 5797 2902 5797 2967 5797 2967 5798 2902 5798 2966 5798 2903 5799 2966 5799 2923 5799 2974 5800 2923 5800 2904 5800 2991 5801 2904 5801 2998 5801 2992 5802 2998 5802 3010 5802 2999 5803 3010 5803 2905 5803 3011 5804 2905 5804 2906 5804 3029 5805 2906 5805 3027 5805 3028 5806 3027 5806 2926 5806 3032 5807 2926 5807 2925 5807 3035 5808 2925 5808 3033 5808 3031 5809 3033 5809 2907 5809 3018 5810 2907 5810 3020 5810 3019 5811 3020 5811 3013 5811 3002 5812 3013 5812 3003 5812 3001 5813 3003 5813 2908 5813 2921 5814 2908 5814 2909 5814 2989 5815 2909 5815 2910 5815 2973 5816 2910 5816 2965 5816 2913 5817 2965 5817 2964 5817 2911 5818 2964 5818 2912 5818 2911 5819 2913 5819 2964 5819 2911 5820 2914 5820 2913 5820 2911 5821 2916 5821 2914 5821 2911 5822 2915 5822 2916 5822 2911 5823 2970 5823 2915 5823 2915 5824 2970 5824 2917 5824 2918 5825 2917 5825 2919 5825 2920 5826 2919 5826 2985 5826 2921 5827 2985 5827 3001 5827 2908 5828 2921 5828 3001 5828 2902 5829 2922 5829 2966 5829 2966 5830 2922 5830 2927 5830 2923 5831 2927 5831 2929 5831 2904 5832 2929 5832 2990 5832 2998 5833 2990 5833 2997 5833 3010 5834 2997 5834 3017 5834 2905 5835 3017 5835 2924 5835 2906 5836 2924 5836 3026 5836 3027 5837 3026 5837 2932 5837 2926 5838 2932 5838 2925 5838 2926 5839 3027 5839 2932 5839 2922 5840 2928 5840 2927 5840 2927 5841 2928 5841 2933 5841 2929 5842 2933 5842 2930 5842 2990 5843 2930 5843 2936 5843 2997 5844 2936 5844 2931 5844 3017 5845 2931 5845 2937 5845 2924 5846 2937 5846 3025 5846 3026 5847 3025 5847 2941 5847 2932 5848 2941 5848 2925 5848 2932 5849 3026 5849 2941 5849 2928 5850 2817 5850 2933 5850 2933 5851 2817 5851 2934 5851 2930 5852 2934 5852 2935 5852 2936 5853 2935 5853 3016 5853 2931 5854 3016 5854 2938 5854 2937 5855 2938 5855 2939 5855 3025 5856 2939 5856 2940 5856 2941 5857 2940 5857 2925 5857 2941 5858 3025 5858 2940 5858 2817 5859 2943 5859 2934 5859 2934 5860 2943 5860 2945 5860 2935 5861 2945 5861 3015 5861 3016 5862 3015 5862 2947 5862 2938 5863 2947 5863 3024 5863 2939 5864 3024 5864 2942 5864 2940 5865 2942 5865 2957 5865 2925 5866 2940 5866 2957 5866 2943 5867 2944 5867 2945 5867 2945 5868 2944 5868 3014 5868 3015 5869 3014 5869 2946 5869 2947 5870 2946 5870 2951 5870 3024 5871 2951 5871 2948 5871 2942 5872 2948 5872 2957 5872 2942 5873 3024 5873 2948 5873 2944 5874 2834 5874 3014 5874 3014 5875 2834 5875 2952 5875 2946 5876 2952 5876 2949 5876 2951 5877 2949 5877 2950 5877 2948 5878 2950 5878 2957 5878 2948 5879 2951 5879 2950 5879 2834 5880 2953 5880 2952 5880 2952 5881 2953 5881 2956 5881 2949 5882 2956 5882 2955 5882 2950 5883 2955 5883 2957 5883 2950 5884 2949 5884 2955 5884 2953 5885 2954 5885 2956 5885 2956 5886 2954 5886 2958 5886 2955 5887 2958 5887 2957 5887 2955 5888 2956 5888 2958 5888 2954 5889 2957 5889 2958 5889 2925 5890 3239 5890 3033 5890 3033 5891 3239 5891 2907 5891 2907 5892 3239 5892 3237 5892 3236 5893 2907 5893 3237 5893 3236 5894 3020 5894 2907 5894 3236 5895 3243 5895 3020 5895 3020 5896 3243 5896 3013 5896 3013 5897 3243 5897 3235 5897 2959 5898 3013 5898 3235 5898 2959 5899 3003 5899 3013 5899 2959 5900 2960 5900 3003 5900 3003 5901 2960 5901 2961 5901 2908 5902 2961 5902 3234 5902 3222 5903 2908 5903 3234 5903 3222 5904 2909 5904 2908 5904 3222 5905 2962 5905 2909 5905 2909 5906 2962 5906 2910 5906 2910 5907 2962 5907 3219 5907 2963 5908 2910 5908 3219 5908 2963 5909 2965 5909 2910 5909 2963 5910 3218 5910 2965 5910 2965 5911 3218 5911 2912 5911 2964 5912 2965 5912 2912 5912 3003 5913 2961 5913 2908 5913 2966 5914 2903 5914 2967 5914 2967 5915 2903 5915 2968 5915 2968 5916 2903 5916 2982 5916 2971 5917 2982 5917 2981 5917 2901 5918 2981 5918 2976 5918 2969 5919 2976 5919 2978 5919 2900 5920 2978 5920 2980 5920 2970 5921 2980 5921 2917 5921 2970 5922 2900 5922 2980 5922 2981 5923 2901 5923 2971 5923 2971 5924 2968 5924 2982 5924 2976 5925 2969 5925 2901 5925 2978 5926 2900 5926 2969 5926 2917 5927 2918 5927 2915 5927 2915 5928 2918 5928 2916 5928 2916 5929 2918 5929 2972 5929 2914 5930 2972 5930 2973 5930 2913 5931 2973 5931 2965 5931 2913 5932 2914 5932 2973 5932 2914 5933 2916 5933 2972 5933 2927 5934 2923 5934 2966 5934 2923 5935 2974 5935 2903 5935 2903 5936 2974 5936 2982 5936 2982 5937 2974 5937 2975 5937 2981 5938 2975 5938 2977 5938 2976 5939 2977 5939 2979 5939 2978 5940 2979 5940 2983 5940 2980 5941 2983 5941 2988 5941 2917 5942 2988 5942 2919 5942 2917 5943 2980 5943 2988 5943 2977 5944 2976 5944 2981 5944 2981 5945 2982 5945 2975 5945 2979 5946 2978 5946 2976 5946 2983 5947 2980 5947 2978 5947 2919 5948 2920 5948 2918 5948 2918 5949 2920 5949 2972 5949 2972 5950 2920 5950 2989 5950 2973 5951 2989 5951 2910 5951 2973 5952 2972 5952 2989 5952 2933 5953 2929 5953 2927 5953 2929 5954 2904 5954 2923 5954 2904 5955 2991 5955 2974 5955 2974 5956 2991 5956 2975 5956 2975 5957 2991 5957 2987 5957 2977 5958 2987 5958 2986 5958 2979 5959 2986 5959 2996 5959 2983 5960 2996 5960 2995 5960 2988 5961 2995 5961 2984 5961 2919 5962 2984 5962 2985 5962 2919 5963 2988 5963 2984 5963 2986 5964 2979 5964 2977 5964 2977 5965 2975 5965 2987 5965 2996 5966 2983 5966 2979 5966 2995 5967 2988 5967 2983 5967 2985 5968 2921 5968 2920 5968 2920 5969 2921 5969 2989 5969 2989 5970 2921 5970 2909 5970 2934 5971 2930 5971 2933 5971 2930 5972 2990 5972 2929 5972 2990 5973 2998 5973 2904 5973 2998 5974 2992 5974 2991 5974 2991 5975 2992 5975 2987 5975 2987 5976 2992 5976 3006 5976 2986 5977 3006 5977 2993 5977 2996 5978 2993 5978 2994 5978 2995 5979 2994 5979 3008 5979 2984 5980 3008 5980 3004 5980 2985 5981 3004 5981 3001 5981 2985 5982 2984 5982 3004 5982 2993 5983 2996 5983 2986 5983 2986 5984 2987 5984 3006 5984 2994 5985 2995 5985 2996 5985 3008 5986 2984 5986 2995 5986 2945 5987 2935 5987 2934 5987 2935 5988 2936 5988 2930 5988 2936 5989 2997 5989 2990 5989 2997 5990 3010 5990 2998 5990 3010 5991 2999 5991 2992 5991 2992 5992 2999 5992 3006 5992 3006 5993 2999 5993 3000 5993 2993 5994 3000 5994 3005 5994 2994 5995 3005 5995 3007 5995 3008 5996 3007 5996 3009 5996 3004 5997 3009 5997 3002 5997 3001 5998 3002 5998 3003 5998 3001 5999 3004 5999 3002 5999 3005 6000 2994 6000 2993 6000 2993 6001 3006 6001 3000 6001 3007 6002 3008 6002 2994 6002 3009 6003 3004 6003 3008 6003 3014 6004 3015 6004 2945 6004 3015 6005 3016 6005 2935 6005 3016 6006 2931 6006 2936 6006 2931 6007 3017 6007 2997 6007 3017 6008 2905 6008 3010 6008 2905 6009 3011 6009 2999 6009 2999 6010 3011 6010 3000 6010 3000 6011 3011 6011 3023 6011 3005 6012 3023 6012 3022 6012 3007 6013 3022 6013 3012 6013 3009 6014 3012 6014 3019 6014 3002 6015 3019 6015 3013 6015 3002 6016 3009 6016 3019 6016 3022 6017 3007 6017 3005 6017 3005 6018 3000 6018 3023 6018 3012 6019 3009 6019 3007 6019 2952 6020 2946 6020 3014 6020 2946 6021 2947 6021 3015 6021 2947 6022 2938 6022 3016 6022 2938 6023 2937 6023 2931 6023 2937 6024 2924 6024 3017 6024 2924 6025 2906 6025 2905 6025 2906 6026 3029 6026 3011 6026 3011 6027 3029 6027 3023 6027 3023 6028 3029 6028 3030 6028 3022 6029 3030 6029 3021 6029 3012 6030 3021 6030 3018 6030 3019 6031 3018 6031 3020 6031 3019 6032 3012 6032 3018 6032 3021 6033 3012 6033 3022 6033 3022 6034 3023 6034 3030 6034 2956 6035 2949 6035 2952 6035 2949 6036 2951 6036 2946 6036 2951 6037 3024 6037 2947 6037 3024 6038 2939 6038 2938 6038 2939 6039 3025 6039 2937 6039 3025 6040 3026 6040 2924 6040 3026 6041 3027 6041 2906 6041 3027 6042 3028 6042 3029 6042 3029 6043 3028 6043 3030 6043 3030 6044 3028 6044 3034 6044 3021 6045 3034 6045 3031 6045 3018 6046 3031 6046 2907 6046 3018 6047 3021 6047 3031 6047 3021 6048 3030 6048 3034 6048 2940 6049 2939 6049 2942 6049 2926 6050 3032 6050 3028 6050 3028 6051 3032 6051 3034 6051 3034 6052 3032 6052 3035 6052 3031 6053 3035 6053 3033 6053 3031 6054 3034 6054 3035 6054 2925 6055 3035 6055 3032 6055 2806 6056 3072 6056 3157 6056 2806 6057 3039 6057 3072 6057 2806 6058 2865 6058 3039 6058 3039 6059 2865 6059 3042 6059 3040 6060 3042 6060 3037 6060 3036 6061 3037 6061 3044 6061 3090 6062 3044 6062 3091 6062 3038 6063 3091 6063 3045 6063 2481 6064 3045 6064 2482 6064 2481 6065 3038 6065 3045 6065 2481 6066 3069 6066 3038 6066 3038 6067 3069 6067 3086 6067 3090 6068 3086 6068 3071 6068 3036 6069 3071 6069 3082 6069 3040 6070 3082 6070 3041 6070 3039 6071 3041 6071 3072 6071 3039 6072 3040 6072 3041 6072 3039 6073 3042 6073 3040 6073 2865 6074 2864 6074 3042 6074 3042 6075 2864 6075 3043 6075 3037 6076 3043 6076 3085 6076 3044 6077 3085 6077 3089 6077 3091 6078 3089 6078 3092 6078 3045 6079 3092 6079 3046 6079 2482 6080 3046 6080 2484 6080 2482 6081 3045 6081 3046 6081 2864 6082 3047 6082 3043 6082 3043 6083 3047 6083 3049 6083 3085 6084 3049 6084 3050 6084 3089 6085 3050 6085 3052 6085 3092 6086 3052 6086 3053 6086 3046 6087 3053 6087 3048 6087 2484 6088 3048 6088 2485 6088 2484 6089 3046 6089 3048 6089 3047 6090 2856 6090 3049 6090 3049 6091 2856 6091 3088 6091 3050 6092 3088 6092 3051 6092 3052 6093 3051 6093 3054 6093 3053 6094 3054 6094 3094 6094 3048 6095 3094 6095 3058 6095 2485 6096 3058 6096 3057 6096 2485 6097 3048 6097 3058 6097 2856 6098 2855 6098 3088 6098 3088 6099 2855 6099 3055 6099 3051 6100 3055 6100 3056 6100 3054 6101 3056 6101 3093 6101 3094 6102 3093 6102 3062 6102 3058 6103 3062 6103 3057 6103 3058 6104 3094 6104 3062 6104 2855 6105 3059 6105 3055 6105 3055 6106 3059 6106 3060 6106 3056 6107 3060 6107 3061 6107 3093 6108 3061 6108 3063 6108 3062 6109 3063 6109 3057 6109 3062 6110 3093 6110 3063 6110 3059 6111 3064 6111 3060 6111 3060 6112 3064 6112 3065 6112 3061 6113 3065 6113 3067 6113 3063 6114 3067 6114 2844 6114 3057 6115 3063 6115 2844 6115 3064 6116 3068 6116 3065 6116 3065 6117 3068 6117 3066 6117 3067 6118 3066 6118 2844 6118 3067 6119 3065 6119 3066 6119 3068 6120 2844 6120 3066 6120 3069 6121 3070 6121 3086 6121 3086 6122 3070 6122 3087 6122 3071 6123 3087 6123 3083 6123 3082 6124 3083 6124 3075 6124 3041 6125 3075 6125 3073 6125 3072 6126 3073 6126 3157 6126 3072 6127 3041 6127 3073 6127 3070 6128 3074 6128 3087 6128 3087 6129 3074 6129 3084 6129 3083 6130 3084 6130 3081 6130 3075 6131 3081 6131 3078 6131 3073 6132 3078 6132 3157 6132 3073 6133 3075 6133 3078 6133 3074 6134 3076 6134 3084 6134 3084 6135 3076 6135 3077 6135 3081 6136 3077 6136 3079 6136 3078 6137 3079 6137 2746 6137 3157 6138 3078 6138 2746 6138 3076 6139 2541 6139 3077 6139 3077 6140 2541 6140 3080 6140 3079 6141 3080 6141 2746 6141 3079 6142 3077 6142 3080 6142 2541 6143 2746 6143 3080 6143 3081 6144 3079 6144 3078 6144 3082 6145 3075 6145 3041 6145 3083 6146 3081 6146 3075 6146 3084 6147 3077 6147 3081 6147 3036 6148 3082 6148 3040 6148 3037 6149 3036 6149 3040 6149 3043 6150 3037 6150 3042 6150 3071 6151 3083 6151 3082 6151 3087 6152 3084 6152 3083 6152 3049 6153 3085 6153 3043 6153 3090 6154 3071 6154 3036 6154 3044 6155 3090 6155 3036 6155 3085 6156 3044 6156 3037 6156 3086 6157 3087 6157 3071 6157 3088 6158 3050 6158 3049 6158 3050 6159 3089 6159 3085 6159 3038 6160 3086 6160 3090 6160 3091 6161 3038 6161 3090 6161 3089 6162 3091 6162 3044 6162 3055 6163 3051 6163 3088 6163 3051 6164 3052 6164 3050 6164 3052 6165 3092 6165 3089 6165 3092 6166 3045 6166 3091 6166 3060 6167 3056 6167 3055 6167 3056 6168 3054 6168 3051 6168 3054 6169 3053 6169 3052 6169 3053 6170 3046 6170 3092 6170 3065 6171 3061 6171 3060 6171 3061 6172 3093 6172 3056 6172 3093 6173 3094 6173 3054 6173 3094 6174 3048 6174 3053 6174 3063 6175 3061 6175 3067 6175 3095 6176 3096 6176 3153 6176 3095 6177 3098 6177 3096 6177 3095 6178 3097 6178 3098 6178 3098 6179 3097 6179 3099 6179 3100 6180 3099 6180 3178 6180 3180 6181 3178 6181 3101 6181 3179 6182 3101 6182 3181 6182 3187 6183 3181 6183 3185 6183 3102 6184 3185 6184 3191 6184 3196 6185 3191 6185 3103 6185 3113 6186 3103 6186 3104 6186 3105 6187 3104 6187 2957 6187 2837 6188 3105 6188 2957 6188 2837 6189 3106 6189 3105 6189 2837 6190 2836 6190 3106 6190 3106 6191 2836 6191 3140 6191 3190 6192 3140 6192 3141 6192 3112 6193 3141 6193 3186 6193 3107 6194 3186 6194 3108 6194 3182 6195 3108 6195 3174 6195 3109 6196 3174 6196 3168 6196 3110 6197 3168 6197 3145 6197 3167 6198 3145 6198 3111 6198 3096 6199 3111 6199 3153 6199 3096 6200 3167 6200 3111 6200 3096 6201 3098 6201 3167 6201 3167 6202 3098 6202 3100 6202 3110 6203 3100 6203 3180 6203 3109 6204 3180 6204 3179 6204 3182 6205 3179 6205 3187 6205 3107 6206 3187 6206 3102 6206 3112 6207 3102 6207 3196 6207 3190 6208 3196 6208 3113 6208 3106 6209 3113 6209 3105 6209 3106 6210 3190 6210 3113 6210 3106 6211 3140 6211 3190 6211 3097 6212 5614 6212 3099 6212 3099 6213 5614 6213 3114 6213 3178 6214 3114 6214 3115 6214 3101 6215 3115 6215 3116 6215 3181 6216 3116 6216 3118 6216 3185 6217 3118 6217 3189 6217 3191 6218 3189 6218 3120 6218 3103 6219 3120 6219 3119 6219 3104 6220 3119 6220 2957 6220 3104 6221 3103 6221 3119 6221 5614 6222 3117 6222 3114 6222 3114 6223 3117 6223 3122 6223 3115 6224 3122 6224 3184 6224 3116 6225 3184 6225 3124 6225 3118 6226 3124 6226 3194 6226 3189 6227 3194 6227 3195 6227 3120 6228 3195 6228 3121 6228 3119 6229 3121 6229 2957 6229 3119 6230 3120 6230 3121 6230 3117 6231 3123 6231 3122 6231 3122 6232 3123 6232 3127 6232 3184 6233 3127 6233 3128 6233 3124 6234 3128 6234 3129 6234 3194 6235 3129 6235 3125 6235 3195 6236 3125 6236 3126 6236 3121 6237 3126 6237 2957 6237 3121 6238 3195 6238 3126 6238 3123 6239 3131 6239 3127 6239 3127 6240 3131 6240 3132 6240 3128 6241 3132 6241 3193 6241 3129 6242 3193 6242 3133 6242 3125 6243 3133 6243 3130 6243 3126 6244 3130 6244 5351 6244 2957 6245 3126 6245 5351 6245 3131 6246 5613 6246 3132 6246 3132 6247 5613 6247 3192 6247 3193 6248 3192 6248 3134 6248 3133 6249 3134 6249 3136 6249 3130 6250 3136 6250 5351 6250 3130 6251 3133 6251 3136 6251 5613 6252 5611 6252 3192 6252 3192 6253 5611 6253 3137 6253 3134 6254 3137 6254 3135 6254 3136 6255 3135 6255 5351 6255 3136 6256 3134 6256 3135 6256 5611 6257 3138 6257 3137 6257 3137 6258 3138 6258 3139 6258 3135 6259 3139 6259 5351 6259 3135 6260 3137 6260 3139 6260 3138 6261 5351 6261 3139 6261 2836 6262 3146 6262 3140 6262 3140 6263 3146 6263 3148 6263 3141 6264 3148 6264 3188 6264 3186 6265 3188 6265 3183 6265 3108 6266 3183 6266 3142 6266 3174 6267 3142 6267 3169 6267 3168 6268 3169 6268 3143 6268 3145 6269 3143 6269 3144 6269 3111 6270 3144 6270 3153 6270 3111 6271 3145 6271 3144 6271 3146 6272 3147 6272 3148 6272 3148 6273 3147 6273 3149 6273 3188 6274 3149 6274 3150 6274 3183 6275 3150 6275 3151 6275 3142 6276 3151 6276 3155 6276 3169 6277 3155 6277 3170 6277 3143 6278 3170 6278 3152 6278 3144 6279 3152 6279 3153 6279 3144 6280 3143 6280 3152 6280 3147 6281 2829 6281 3149 6281 3149 6282 2829 6282 3154 6282 3150 6283 3154 6283 3175 6283 3151 6284 3175 6284 3176 6284 3155 6285 3176 6285 3172 6285 3170 6286 3172 6286 3156 6286 3152 6287 3156 6287 3153 6287 3152 6288 3170 6288 3156 6288 2829 6289 2822 6289 3154 6289 3154 6290 2822 6290 3177 6290 3175 6291 3177 6291 3173 6291 3176 6292 3173 6292 3171 6292 3172 6293 3171 6293 3159 6293 3156 6294 3159 6294 3157 6294 3153 6295 3156 6295 3157 6295 2822 6296 2812 6296 3177 6296 3177 6297 2812 6297 3158 6297 3173 6298 3158 6298 3162 6298 3171 6299 3162 6299 3160 6299 3159 6300 3160 6300 3157 6300 3159 6301 3171 6301 3160 6301 2812 6302 3164 6302 3158 6302 3158 6303 3164 6303 3161 6303 3162 6304 3161 6304 3163 6304 3160 6305 3163 6305 3157 6305 3160 6306 3162 6306 3163 6306 3164 6307 3165 6307 3161 6307 3161 6308 3165 6308 3166 6308 3163 6309 3166 6309 3157 6309 3163 6310 3161 6310 3166 6310 3165 6311 3157 6311 3166 6311 3172 6312 3159 6312 3156 6312 3110 6313 3145 6313 3167 6313 3100 6314 3110 6314 3167 6314 3099 6315 3100 6315 3098 6315 3168 6316 3143 6316 3145 6316 3169 6317 3170 6317 3143 6317 3155 6318 3172 6318 3170 6318 3176 6319 3171 6319 3172 6319 3173 6320 3162 6320 3171 6320 3158 6321 3161 6321 3162 6321 3114 6322 3178 6322 3099 6322 3109 6323 3168 6323 3110 6323 3180 6324 3109 6324 3110 6324 3178 6325 3180 6325 3100 6325 3174 6326 3169 6326 3168 6326 3142 6327 3155 6327 3169 6327 3151 6328 3176 6328 3155 6328 3175 6329 3173 6329 3176 6329 3177 6330 3158 6330 3173 6330 3122 6331 3115 6331 3114 6331 3115 6332 3101 6332 3178 6332 3182 6333 3174 6333 3109 6333 3179 6334 3182 6334 3109 6334 3101 6335 3179 6335 3180 6335 3108 6336 3142 6336 3174 6336 3183 6337 3151 6337 3142 6337 3150 6338 3175 6338 3151 6338 3154 6339 3177 6339 3175 6339 3127 6340 3184 6340 3122 6340 3184 6341 3116 6341 3115 6341 3116 6342 3181 6342 3101 6342 3107 6343 3108 6343 3182 6343 3187 6344 3107 6344 3182 6344 3181 6345 3187 6345 3179 6345 3186 6346 3183 6346 3108 6346 3188 6347 3150 6347 3183 6347 3149 6348 3154 6348 3150 6348 3132 6349 3128 6349 3127 6349 3128 6350 3124 6350 3184 6350 3124 6351 3118 6351 3116 6351 3118 6352 3185 6352 3181 6352 3112 6353 3186 6353 3107 6353 3102 6354 3112 6354 3107 6354 3185 6355 3102 6355 3187 6355 3141 6356 3188 6356 3186 6356 3148 6357 3149 6357 3188 6357 3192 6358 3193 6358 3132 6358 3193 6359 3129 6359 3128 6359 3129 6360 3194 6360 3124 6360 3194 6361 3189 6361 3118 6361 3189 6362 3191 6362 3185 6362 3190 6363 3141 6363 3112 6363 3196 6364 3190 6364 3112 6364 3191 6365 3196 6365 3102 6365 3140 6366 3148 6366 3141 6366 3137 6367 3134 6367 3192 6367 3134 6368 3133 6368 3193 6368 3133 6369 3125 6369 3129 6369 3125 6370 3195 6370 3194 6370 3195 6371 3120 6371 3189 6371 3120 6372 3103 6372 3191 6372 3103 6373 3113 6373 3196 6373 3126 6374 3125 6374 3130 6374 3105 6375 3113 6375 3104 6375 2911 6376 3197 6376 3212 6376 2911 6377 3198 6377 3197 6377 2911 6378 3267 6378 3198 6378 2911 6379 3265 6379 3267 6379 2911 6380 3266 6380 3265 6380 2911 6381 3199 6381 3266 6381 2911 6382 2912 6382 3199 6382 3199 6383 2912 6383 3200 6383 3259 6384 3200 6384 3229 6384 3275 6385 3229 6385 3284 6385 3285 6386 3284 6386 3283 6386 3201 6387 3283 6387 3291 6387 3292 6388 3291 6388 3232 6388 3300 6389 3232 6389 3228 6389 3202 6390 3228 6390 3203 6390 3308 6391 3203 6391 3227 6391 3204 6392 3227 6392 5344 6392 3320 6393 5344 6393 3205 6393 3316 6394 3205 6394 4886 6394 3317 6395 4886 6395 3206 6395 3309 6396 3206 6396 3207 6396 3252 6397 3207 6397 3208 6397 3297 6398 3208 6398 3254 6398 3209 6399 3254 6399 4882 6399 3256 6400 4882 6400 3210 6400 3211 6401 3210 6401 4881 6401 3258 6402 4881 6402 3212 6402 3257 6403 3212 6403 3214 6403 3213 6404 3214 6404 3215 6404 3255 6405 3215 6405 3271 6405 3216 6406 3271 6406 3276 6406 3253 6407 3276 6407 3287 6407 3251 6408 3287 6408 3250 6408 3310 6409 3250 6409 3217 6409 3318 6410 3217 6410 3306 6410 3321 6411 3306 6411 3308 6411 3204 6412 3308 6412 3227 6412 3204 6413 3321 6413 3308 6413 3204 6414 3320 6414 3321 6414 3204 6415 5344 6415 3320 6415 2912 6416 3218 6416 3200 6416 3200 6417 3218 6417 2963 6417 3220 6418 2963 6418 3219 6418 2962 6419 3220 6419 3219 6419 2962 6420 3221 6420 3220 6420 2962 6421 3222 6421 3221 6421 3221 6422 3222 6422 3223 6422 3289 6423 3223 6423 3224 6423 3281 6424 3224 6424 3302 6424 3305 6425 3302 6425 3225 6425 3304 6426 3225 6426 3226 6426 3314 6427 3226 6427 3313 6427 3312 6428 3313 6428 5344 6428 3315 6429 5344 6429 3227 6429 3203 6430 3315 6430 3227 6430 3203 6431 3311 6431 3315 6431 3203 6432 3228 6432 3311 6432 3311 6433 3228 6433 3233 6433 3314 6434 3233 6434 3304 6434 3226 6435 3314 6435 3304 6435 3200 6436 2963 6436 3220 6436 3229 6437 3220 6437 3282 6437 3284 6438 3282 6438 3230 6438 3283 6439 3230 6439 3290 6439 3291 6440 3290 6440 3231 6440 3232 6441 3231 6441 3233 6441 3228 6442 3232 6442 3233 6442 3222 6443 3234 6443 3223 6443 3223 6444 3234 6444 2961 6444 3299 6445 2961 6445 2960 6445 2959 6446 3299 6446 2960 6446 2959 6447 3298 6447 3299 6447 2959 6448 3235 6448 3298 6448 3298 6449 3235 6449 3243 6449 3244 6450 3243 6450 3236 6450 3238 6451 3236 6451 3237 6451 3239 6452 3238 6452 3237 6452 3239 6453 3319 6453 3238 6453 3239 6454 2925 6454 3319 6454 3319 6455 2925 6455 3240 6455 3238 6456 3240 6456 3241 6456 3244 6457 3241 6457 3248 6457 3298 6458 3248 6458 3242 6458 3299 6459 3242 6459 3224 6459 3223 6460 3299 6460 3224 6460 3223 6461 2961 6461 3299 6461 3298 6462 3243 6462 3244 6462 3248 6463 3298 6463 3244 6463 3244 6464 3236 6464 3238 6464 3241 6465 3244 6465 3238 6465 5344 6466 3313 6466 2925 6466 2925 6467 3313 6467 3245 6467 3249 6468 3245 6468 3246 6468 3247 6469 3246 6469 3303 6469 3248 6470 3303 6470 3242 6470 3248 6471 3247 6471 3303 6471 3248 6472 3241 6472 3247 6472 3247 6473 3241 6473 3322 6473 3249 6474 3322 6474 2925 6474 3245 6475 3249 6475 2925 6475 3320 6476 3205 6476 3316 6476 3321 6477 3316 6477 3318 6477 3306 6478 3321 6478 3318 6478 3316 6479 4886 6479 3317 6479 3318 6480 3317 6480 3310 6480 3217 6481 3318 6481 3310 6481 3317 6482 3206 6482 3309 6482 3310 6483 3309 6483 3251 6483 3250 6484 3310 6484 3251 6484 3309 6485 3207 6485 3252 6485 3251 6486 3252 6486 3253 6486 3287 6487 3251 6487 3253 6487 3252 6488 3208 6488 3297 6488 3253 6489 3297 6489 3216 6489 3276 6490 3253 6490 3216 6490 3297 6491 3254 6491 3209 6491 3216 6492 3209 6492 3255 6492 3271 6493 3216 6493 3255 6493 3209 6494 4882 6494 3256 6494 3255 6495 3256 6495 3213 6495 3215 6496 3255 6496 3213 6496 3256 6497 3210 6497 3211 6497 3213 6498 3211 6498 3257 6498 3214 6499 3213 6499 3257 6499 3211 6500 4881 6500 3258 6500 3257 6501 3258 6501 3212 6501 3257 6502 3211 6502 3258 6502 3200 6503 3259 6503 3199 6503 3199 6504 3259 6504 3266 6504 3266 6505 3259 6505 3260 6505 3265 6506 3260 6506 3268 6506 3267 6507 3268 6507 3261 6507 3198 6508 3261 6508 3273 6508 3197 6509 3273 6509 3274 6509 3262 6510 3274 6510 3263 6510 3264 6511 3263 6511 3215 6511 3214 6512 3264 6512 3215 6512 3214 6513 3212 6513 3264 6513 3264 6514 3212 6514 3262 6514 3263 6515 3264 6515 3262 6515 3268 6516 3267 6516 3265 6516 3265 6517 3266 6517 3260 6517 3261 6518 3198 6518 3267 6518 3273 6519 3197 6519 3198 6519 3274 6520 3262 6520 3197 6520 3197 6521 3262 6521 3212 6521 3220 6522 3229 6522 3200 6522 3229 6523 3275 6523 3259 6523 3259 6524 3275 6524 3260 6524 3260 6525 3275 6525 3277 6525 3268 6526 3277 6526 3269 6526 3261 6527 3269 6527 3272 6527 3273 6528 3272 6528 3279 6528 3274 6529 3279 6529 3270 6529 3263 6530 3270 6530 3271 6530 3215 6531 3263 6531 3271 6531 3269 6532 3261 6532 3268 6532 3268 6533 3260 6533 3277 6533 3272 6534 3273 6534 3261 6534 3279 6535 3274 6535 3273 6535 3270 6536 3263 6536 3274 6536 3256 6537 3211 6537 3213 6537 3282 6538 3284 6538 3229 6538 3284 6539 3285 6539 3275 6539 3275 6540 3285 6540 3277 6540 3277 6541 3285 6541 3278 6541 3269 6542 3278 6542 3288 6542 3272 6543 3288 6543 3286 6543 3279 6544 3286 6544 3280 6544 3270 6545 3280 6545 3276 6545 3271 6546 3270 6546 3276 6546 3288 6547 3272 6547 3269 6547 3269 6548 3277 6548 3278 6548 3286 6549 3279 6549 3272 6549 3280 6550 3270 6550 3279 6550 3209 6551 3256 6551 3255 6551 3220 6552 3221 6552 3282 6552 3282 6553 3221 6553 3289 6553 3230 6554 3289 6554 3281 6554 3290 6555 3281 6555 3305 6555 3231 6556 3305 6556 3304 6556 3233 6557 3231 6557 3304 6557 3223 6558 3289 6558 3221 6558 3289 6559 3230 6559 3282 6559 3230 6560 3283 6560 3284 6560 3283 6561 3201 6561 3285 6561 3285 6562 3201 6562 3278 6562 3278 6563 3201 6563 3293 6563 3288 6564 3293 6564 3296 6564 3286 6565 3296 6565 3295 6565 3280 6566 3295 6566 3287 6566 3276 6567 3280 6567 3287 6567 3296 6568 3286 6568 3288 6568 3288 6569 3278 6569 3293 6569 3295 6570 3280 6570 3286 6570 3297 6571 3209 6571 3216 6571 3281 6572 3289 6572 3224 6572 3290 6573 3230 6573 3281 6573 3291 6574 3283 6574 3290 6574 3291 6575 3292 6575 3201 6575 3201 6576 3292 6576 3293 6576 3293 6577 3292 6577 3294 6577 3296 6578 3294 6578 3301 6578 3295 6579 3301 6579 3250 6579 3287 6580 3295 6580 3250 6580 3301 6581 3295 6581 3296 6581 3296 6582 3293 6582 3294 6582 3252 6583 3297 6583 3253 6583 3298 6584 3242 6584 3299 6584 3302 6585 3305 6585 3281 6585 3305 6586 3231 6586 3290 6586 3231 6587 3232 6587 3291 6587 3232 6588 3300 6588 3292 6588 3292 6589 3300 6589 3294 6589 3294 6590 3300 6590 3307 6590 3301 6591 3307 6591 3217 6591 3250 6592 3301 6592 3217 6592 3301 6593 3294 6593 3307 6593 3309 6594 3252 6594 3251 6594 3224 6595 3242 6595 3302 6595 3302 6596 3242 6596 3303 6596 3225 6597 3303 6597 3246 6597 3226 6598 3246 6598 3245 6598 3313 6599 3226 6599 3245 6599 3225 6600 3302 6600 3303 6600 3304 6601 3305 6601 3225 6601 3306 6602 3217 6602 3307 6602 3202 6603 3307 6603 3300 6603 3228 6604 3202 6604 3300 6604 3306 6605 3307 6605 3202 6605 3308 6606 3202 6606 3203 6606 3308 6607 3306 6607 3202 6607 3317 6608 3309 6608 3310 6608 3226 6609 3225 6609 3246 6609 3311 6610 3233 6610 3314 6610 3312 6611 3314 6611 3313 6611 3312 6612 3311 6612 3314 6612 3312 6613 3315 6613 3311 6613 3312 6614 5344 6614 3315 6614 3316 6615 3317 6615 3318 6615 3319 6616 3240 6616 3238 6616 3249 6617 3246 6617 3247 6617 3322 6618 3249 6618 3247 6618 3320 6619 3316 6619 3321 6619 2925 6620 3322 6620 3240 6620 3240 6621 3322 6621 3241 6621 4161 6622 4278 6622 2584 6622 2584 6623 4278 6623 3323 6623 3324 6624 2584 6624 3323 6624 3324 6625 3325 6625 2584 6625 2584 6626 3325 6626 3817 6626 3842 6627 2584 6627 3817 6627 3842 6628 3841 6628 2584 6628 2584 6629 3841 6629 3620 6629 3326 6630 3620 6630 3579 6630 5557 6631 3579 6631 5760 6631 5557 6632 3326 6632 3579 6632 3817 6633 3325 6633 3327 6633 3327 6634 3325 6634 3328 6634 3983 6635 3328 6635 4283 6635 3984 6636 4283 6636 3330 6636 3331 6637 3330 6637 4332 6637 3329 6638 4332 6638 4368 6638 4315 6639 3329 6639 4368 6639 3327 6640 3328 6640 3983 6640 3983 6641 4283 6641 3984 6641 3984 6642 3330 6642 3331 6642 3331 6643 4332 6643 3329 6643 3620 6644 3634 6644 3579 6644 2572 6645 2584 6645 3326 6645 3326 6646 2584 6646 3620 6646 3337 6647 3332 6647 3334 6647 3334 6648 3332 6648 3057 6648 2844 6649 3334 6649 3057 6649 2844 6650 3333 6650 3334 6650 2844 6651 2911 6651 3333 6651 3333 6652 2911 6652 3342 6652 3335 6653 4250 6653 3338 6653 3336 6654 3338 6654 4489 6654 3336 6655 3335 6655 3338 6655 3337 6656 3338 6656 3332 6656 3332 6657 3338 6657 4250 6657 5634 6658 4506 6658 3338 6658 5634 6659 3343 6659 4506 6659 5634 6660 5015 6660 3343 6660 5634 6661 5016 6661 5015 6661 5634 6662 3339 6662 5016 6662 5634 6663 4968 6663 3339 6663 5634 6664 4967 6664 4968 6664 5634 6665 3340 6665 4967 6665 5634 6666 3341 6666 3340 6666 5634 6667 5066 6667 3341 6667 5634 6668 3212 6668 5066 6668 5634 6669 2911 6669 3212 6669 5634 6670 3342 6670 2911 6670 3343 6671 5492 6671 4506 6671 4506 6672 4504 6672 3338 6672 3338 6673 4504 6673 4505 6673 4629 6674 3338 6674 4505 6674 4629 6675 3344 6675 3338 6675 3338 6676 3344 6676 3345 6676 4489 6677 3338 6677 3345 6677 5673 6678 3346 6678 3402 6678 5673 6679 3402 6679 3401 6679 5673 6680 3401 6680 3400 6680 5673 6681 3400 6681 3412 6681 5673 6682 3412 6682 3415 6682 5673 6683 3415 6683 3420 6683 5673 6684 3420 6684 3347 6684 5673 6685 3347 6685 3399 6685 5673 6686 3399 6686 3441 6686 5673 6687 3441 6687 3348 6687 3351 6688 3350 6688 3776 6688 3351 6689 3349 6689 3350 6689 3351 6690 3363 6690 3349 6690 3349 6691 3363 6691 3405 6691 3408 6692 3405 6692 3352 6692 3360 6693 3352 6693 3354 6693 3353 6694 3354 6694 3413 6694 3418 6695 3413 6695 3365 6695 3419 6696 3365 6696 3355 6696 3427 6697 3355 6697 3356 6697 3435 6698 3356 6698 3432 6698 3359 6699 3432 6699 3357 6699 3358 6700 3357 6700 3366 6700 3358 6701 3359 6701 3357 6701 3358 6702 3688 6702 3359 6702 3359 6703 3688 6703 3442 6703 3435 6704 3442 6704 3434 6704 3427 6705 3434 6705 3429 6705 3419 6706 3429 6706 3428 6706 3418 6707 3428 6707 3414 6707 3353 6708 3414 6708 3361 6708 3360 6709 3361 6709 3407 6709 3408 6710 3407 6710 3362 6710 3349 6711 3362 6711 3350 6711 3349 6712 3408 6712 3362 6712 3349 6713 3405 6713 3408 6713 3363 6714 3367 6714 3405 6714 3405 6715 3367 6715 3368 6715 3352 6716 3368 6716 3364 6716 3354 6717 3364 6717 3370 6717 3413 6718 3370 6718 3411 6718 3365 6719 3411 6719 3417 6719 3355 6720 3417 6720 3426 6720 3356 6721 3426 6721 3433 6721 3432 6722 3433 6722 3431 6722 3357 6723 3431 6723 3439 6723 3366 6724 3439 6724 3693 6724 3366 6725 3357 6725 3439 6725 3367 6726 3369 6726 3368 6726 3368 6727 3369 6727 3374 6727 3364 6728 3374 6728 3371 6728 3370 6729 3371 6729 3372 6729 3411 6730 3372 6730 3410 6730 3417 6731 3410 6731 3376 6731 3426 6732 3376 6732 3373 6732 3433 6733 3373 6733 3424 6733 3431 6734 3424 6734 3440 6734 3439 6735 3440 6735 3438 6735 3693 6736 3438 6736 3698 6736 3693 6737 3439 6737 3438 6737 3369 6738 3375 6738 3374 6738 3374 6739 3375 6739 3377 6739 3371 6740 3377 6740 3378 6740 3372 6741 3378 6741 3409 6741 3410 6742 3409 6742 3380 6742 3376 6743 3380 6743 3381 6743 3373 6744 3381 6744 3425 6744 3424 6745 3425 6745 3423 6745 3440 6746 3423 6746 3382 6746 3438 6747 3382 6747 3437 6747 3698 6748 3437 6748 3699 6748 3698 6749 3438 6749 3437 6749 3375 6750 3383 6750 3377 6750 3377 6751 3383 6751 3404 6751 3378 6752 3404 6752 3379 6752 3409 6753 3379 6753 3406 6753 3380 6754 3406 6754 3384 6754 3381 6755 3384 6755 3416 6755 3425 6756 3416 6756 3421 6756 3423 6757 3421 6757 3430 6757 3382 6758 3430 6758 3385 6758 3437 6759 3385 6759 3387 6759 3699 6760 3387 6760 3388 6760 3699 6761 3437 6761 3387 6761 3383 6762 3389 6762 3404 6762 3404 6763 3389 6763 3403 6763 3379 6764 3403 6764 3396 6764 3406 6765 3396 6765 3390 6765 3384 6766 3390 6766 3392 6766 3416 6767 3392 6767 3393 6767 3421 6768 3393 6768 3422 6768 3430 6769 3422 6769 3386 6769 3385 6770 3386 6770 3436 6770 3387 6771 3436 6771 3395 6771 3388 6772 3395 6772 5341 6772 3388 6773 3387 6773 3395 6773 3389 6774 5350 6774 3403 6774 3403 6775 5350 6775 3443 6775 3396 6776 3443 6776 3397 6776 3390 6777 3397 6777 3462 6777 3391 6778 3390 6778 3462 6778 3391 6779 3392 6779 3390 6779 3391 6780 3460 6780 3392 6780 3392 6781 3460 6781 3393 6781 3393 6782 3460 6782 3394 6782 3422 6783 3394 6783 3458 6783 3386 6784 3458 6784 3445 6784 3436 6785 3445 6785 3457 6785 3395 6786 3457 6786 5341 6786 3395 6787 3436 6787 3457 6787 3403 6788 3443 6788 3396 6788 3396 6789 3397 6789 3390 6789 3393 6790 3394 6790 3422 6790 3422 6791 3458 6791 3386 6791 3386 6792 3445 6792 3436 6792 3688 6793 3398 6793 3442 6793 3442 6794 3398 6794 3441 6794 3434 6795 3441 6795 3399 6795 3429 6796 3399 6796 3347 6796 3428 6797 3347 6797 3420 6797 3414 6798 3420 6798 3415 6798 3361 6799 3415 6799 3412 6799 3407 6800 3412 6800 3400 6800 3362 6801 3400 6801 3401 6801 3350 6802 3401 6802 3402 6802 3776 6803 3402 6803 3346 6803 3776 6804 3350 6804 3402 6804 3398 6805 3348 6805 3441 6805 3379 6806 3404 6806 3403 6806 3378 6807 3377 6807 3404 6807 3371 6808 3374 6808 3377 6808 3364 6809 3368 6809 3374 6809 3352 6810 3405 6810 3368 6810 3401 6811 3350 6811 3362 6811 3406 6812 3379 6812 3396 6812 3409 6813 3378 6813 3379 6813 3372 6814 3371 6814 3378 6814 3370 6815 3364 6815 3371 6815 3354 6816 3352 6816 3364 6816 3407 6817 3408 6817 3360 6817 3360 6818 3408 6818 3352 6818 3400 6819 3362 6819 3407 6819 3384 6820 3406 6820 3390 6820 3380 6821 3409 6821 3406 6821 3410 6822 3372 6822 3409 6822 3411 6823 3370 6823 3372 6823 3413 6824 3354 6824 3370 6824 3361 6825 3360 6825 3353 6825 3353 6826 3360 6826 3354 6826 3412 6827 3407 6827 3361 6827 3416 6828 3384 6828 3392 6828 3381 6829 3380 6829 3384 6829 3376 6830 3410 6830 3380 6830 3417 6831 3411 6831 3410 6831 3365 6832 3413 6832 3411 6832 3414 6833 3353 6833 3418 6833 3418 6834 3353 6834 3413 6834 3415 6835 3361 6835 3414 6835 3421 6836 3416 6836 3393 6836 3425 6837 3381 6837 3416 6837 3373 6838 3376 6838 3381 6838 3426 6839 3417 6839 3376 6839 3355 6840 3365 6840 3417 6840 3428 6841 3418 6841 3419 6841 3419 6842 3418 6842 3365 6842 3420 6843 3414 6843 3428 6843 3430 6844 3421 6844 3422 6844 3423 6845 3425 6845 3421 6845 3424 6846 3373 6846 3425 6846 3433 6847 3426 6847 3373 6847 3356 6848 3355 6848 3426 6848 3429 6849 3419 6849 3427 6849 3427 6850 3419 6850 3355 6850 3347 6851 3428 6851 3429 6851 3385 6852 3430 6852 3386 6852 3382 6853 3423 6853 3430 6853 3440 6854 3424 6854 3423 6854 3431 6855 3433 6855 3424 6855 3432 6856 3356 6856 3433 6856 3434 6857 3427 6857 3435 6857 3435 6858 3427 6858 3356 6858 3399 6859 3429 6859 3434 6859 3387 6860 3385 6860 3436 6860 3437 6861 3382 6861 3385 6861 3438 6862 3440 6862 3382 6862 3439 6863 3431 6863 3440 6863 3357 6864 3432 6864 3431 6864 3442 6865 3435 6865 3359 6865 3359 6866 3435 6866 3432 6866 3441 6867 3434 6867 3442 6867 5350 6868 5758 6868 3464 6868 3443 6869 3464 6869 3448 6869 3397 6870 3448 6870 3463 6870 3462 6871 3463 6871 3444 6871 3391 6872 3444 6872 3454 6872 3460 6873 3454 6873 3461 6873 3394 6874 3461 6874 3459 6874 3458 6875 3459 6875 3455 6875 3445 6876 3455 6876 3452 6876 3457 6877 3452 6877 3446 6877 5341 6878 3446 6878 5767 6878 5341 6879 3457 6879 3446 6879 3447 6880 3448 6880 5757 6880 3447 6881 3463 6881 3448 6881 3447 6882 5756 6882 3463 6882 3463 6883 5756 6883 3444 6883 3444 6884 5756 6884 3449 6884 3454 6885 3449 6885 3450 6885 3461 6886 3450 6886 5755 6886 3459 6887 5755 6887 3451 6887 3455 6888 3451 6888 3456 6888 3452 6889 3456 6889 3453 6889 3446 6890 3453 6890 5767 6890 3446 6891 3452 6891 3453 6891 3444 6892 3449 6892 3454 6892 3454 6893 3450 6893 3461 6893 3461 6894 5755 6894 3459 6894 3459 6895 3451 6895 3455 6895 3455 6896 3456 6896 3452 6896 3457 6897 3445 6897 3452 6897 3445 6898 3458 6898 3455 6898 3458 6899 3394 6899 3459 6899 3394 6900 3460 6900 3461 6900 3460 6901 3391 6901 3454 6901 3391 6902 3462 6902 3444 6902 3462 6903 3397 6903 3463 6903 3397 6904 3443 6904 3448 6904 3443 6905 5350 6905 3464 6905 5758 6906 5757 6906 3464 6906 3464 6907 5757 6907 3448 6907 5642 6908 3712 6908 3530 6908 5642 6909 3530 6909 3529 6909 5642 6910 3529 6910 3528 6910 5642 6911 3528 6911 3465 6911 5642 6912 3465 6912 3466 6912 5642 6913 3466 6913 3527 6913 5642 6914 3527 6914 3467 6914 5642 6915 3467 6915 3554 6915 5642 6916 3554 6916 3562 6916 5642 6917 3562 6917 3588 6917 3469 6918 3468 6918 3710 6918 3469 6919 3471 6919 3468 6919 3469 6920 3470 6920 3471 6920 3471 6921 3470 6921 3481 6921 3536 6922 3481 6922 3533 6922 3535 6923 3533 6923 3543 6923 3472 6924 3543 6924 3484 6924 3548 6925 3484 6925 3486 6925 3550 6926 3486 6926 3487 6926 3473 6927 3487 6927 3549 6927 3474 6928 3549 6928 3553 6928 3476 6929 3553 6929 3489 6929 3475 6930 3489 6930 3491 6930 3475 6931 3476 6931 3489 6931 3475 6932 3619 6932 3476 6932 3476 6933 3619 6933 3561 6933 3474 6934 3561 6934 3526 6934 3473 6935 3526 6935 3477 6935 3550 6936 3477 6936 3478 6936 3548 6937 3478 6937 3479 6937 3472 6938 3479 6938 3542 6938 3535 6939 3542 6939 3537 6939 3536 6940 3537 6940 3480 6940 3471 6941 3480 6941 3468 6941 3471 6942 3536 6942 3480 6942 3471 6943 3481 6943 3536 6943 3470 6944 3493 6944 3481 6944 3481 6945 3493 6945 3494 6945 3533 6946 3494 6946 3482 6946 3543 6947 3482 6947 3483 6947 3484 6948 3483 6948 3485 6948 3486 6949 3485 6949 3547 6949 3487 6950 3547 6950 3497 6950 3549 6951 3497 6951 3488 6951 3553 6952 3488 6952 3560 6952 3489 6953 3560 6953 3490 6953 3491 6954 3490 6954 3492 6954 3491 6955 3489 6955 3490 6955 3493 6956 3495 6956 3494 6956 3494 6957 3495 6957 3496 6957 3482 6958 3496 6958 3534 6958 3483 6959 3534 6959 3541 6959 3485 6960 3541 6960 3539 6960 3547 6961 3539 6961 3546 6961 3497 6962 3546 6962 3545 6962 3488 6963 3545 6963 3552 6963 3560 6964 3552 6964 3559 6964 3490 6965 3559 6965 3502 6965 3492 6966 3502 6966 3498 6966 3492 6967 3490 6967 3502 6967 3495 6968 3503 6968 3496 6968 3496 6969 3503 6969 3532 6969 3534 6970 3532 6970 3531 6970 3541 6971 3531 6971 3540 6971 3539 6972 3540 6972 3538 6972 3546 6973 3538 6973 3499 6973 3545 6974 3499 6974 3500 6974 3552 6975 3500 6975 3501 6975 3559 6976 3501 6976 3558 6976 3502 6977 3558 6977 3557 6977 3498 6978 3557 6978 3635 6978 3498 6979 3502 6979 3557 6979 3503 6980 3504 6980 3532 6980 3532 6981 3504 6981 3505 6981 3531 6982 3505 6982 3508 6982 3540 6983 3508 6983 3509 6983 3538 6984 3509 6984 3510 6984 3499 6985 3510 6985 3544 6985 3500 6986 3544 6986 3506 6986 3501 6987 3506 6987 3551 6987 3558 6988 3551 6988 3555 6988 3557 6989 3555 6989 3515 6989 3635 6990 3515 6990 3514 6990 3635 6991 3557 6991 3515 6991 3504 6992 3507 6992 3505 6992 3505 6993 3507 6993 3516 6993 3508 6994 3516 6994 3525 6994 3509 6995 3525 6995 3517 6995 3510 6996 3517 6996 3511 6996 3544 6997 3511 6997 3521 6997 3506 6998 3521 6998 3512 6998 3551 6999 3512 6999 3513 6999 3555 7000 3513 7000 3556 7000 3515 7001 3556 7001 3523 7001 3514 7002 3523 7002 3634 7002 3514 7003 3515 7003 3523 7003 3507 7004 3563 7004 3516 7004 3516 7005 3563 7005 3524 7005 3525 7006 3524 7006 3518 7006 3517 7007 3518 7007 3519 7007 3511 7008 3519 7008 3520 7008 3521 7009 3520 7009 3583 7009 3512 7010 3583 7010 3522 7010 3513 7011 3522 7011 3582 7011 3567 7012 3513 7012 3582 7012 3567 7013 3556 7013 3513 7013 3567 7014 3569 7014 3556 7014 3556 7015 3569 7015 3523 7015 3523 7016 3569 7016 3634 7016 3516 7017 3524 7017 3525 7017 3525 7018 3518 7018 3517 7018 3517 7019 3519 7019 3511 7019 3511 7020 3520 7020 3521 7020 3521 7021 3583 7021 3512 7021 3512 7022 3522 7022 3513 7022 3619 7023 3590 7023 3561 7023 3561 7024 3590 7024 3562 7024 3526 7025 3562 7025 3554 7025 3477 7026 3554 7026 3467 7026 3478 7027 3467 7027 3527 7027 3479 7028 3527 7028 3466 7028 3542 7029 3466 7029 3465 7029 3537 7030 3465 7030 3528 7030 3480 7031 3528 7031 3529 7031 3468 7032 3529 7032 3530 7032 3710 7033 3530 7033 3712 7033 3710 7034 3468 7034 3530 7034 3590 7035 3588 7035 3562 7035 3508 7036 3505 7036 3516 7036 3531 7037 3532 7037 3505 7037 3534 7038 3496 7038 3532 7038 3482 7039 3494 7039 3496 7039 3533 7040 3481 7040 3494 7040 3529 7041 3468 7041 3480 7041 3509 7042 3508 7042 3525 7042 3540 7043 3531 7043 3508 7043 3541 7044 3534 7044 3531 7044 3483 7045 3482 7045 3534 7045 3543 7046 3533 7046 3482 7046 3537 7047 3536 7047 3535 7047 3535 7048 3536 7048 3533 7048 3528 7049 3480 7049 3537 7049 3510 7050 3509 7050 3517 7050 3538 7051 3540 7051 3509 7051 3539 7052 3541 7052 3540 7052 3485 7053 3483 7053 3541 7053 3484 7054 3543 7054 3483 7054 3542 7055 3535 7055 3472 7055 3472 7056 3535 7056 3543 7056 3465 7057 3537 7057 3542 7057 3544 7058 3510 7058 3511 7058 3499 7059 3538 7059 3510 7059 3546 7060 3539 7060 3538 7060 3547 7061 3485 7061 3539 7061 3486 7062 3484 7062 3485 7062 3479 7063 3472 7063 3548 7063 3548 7064 3472 7064 3484 7064 3466 7065 3542 7065 3479 7065 3506 7066 3544 7066 3521 7066 3500 7067 3499 7067 3544 7067 3545 7068 3546 7068 3499 7068 3497 7069 3547 7069 3546 7069 3487 7070 3486 7070 3547 7070 3478 7071 3548 7071 3550 7071 3550 7072 3548 7072 3486 7072 3527 7073 3479 7073 3478 7073 3551 7074 3506 7074 3512 7074 3501 7075 3500 7075 3506 7075 3552 7076 3545 7076 3500 7076 3488 7077 3497 7077 3545 7077 3549 7078 3487 7078 3497 7078 3477 7079 3550 7079 3473 7079 3473 7080 3550 7080 3487 7080 3467 7081 3478 7081 3477 7081 3555 7082 3551 7082 3513 7082 3558 7083 3501 7083 3551 7083 3559 7084 3552 7084 3501 7084 3560 7085 3488 7085 3552 7085 3553 7086 3549 7086 3488 7086 3526 7087 3473 7087 3474 7087 3474 7088 3473 7088 3549 7088 3554 7089 3477 7089 3526 7089 3515 7090 3555 7090 3556 7090 3557 7091 3558 7091 3555 7091 3502 7092 3559 7092 3558 7092 3490 7093 3560 7093 3559 7093 3489 7094 3553 7094 3560 7094 3561 7095 3474 7095 3476 7095 3476 7096 3474 7096 3553 7096 3562 7097 3526 7097 3561 7097 3563 7098 5773 7098 3564 7098 3524 7099 3564 7099 3565 7099 3518 7100 3565 7100 3584 7100 3519 7101 3584 7101 3573 7101 3520 7102 3573 7102 3566 7102 3583 7103 3566 7103 3574 7103 3522 7104 3574 7104 3577 7104 3582 7105 3577 7105 3578 7105 3567 7106 3578 7106 3568 7106 3569 7107 3568 7107 3580 7107 3634 7108 3580 7108 3579 7108 3634 7109 3569 7109 3580 7109 3571 7110 3565 7110 3570 7110 3571 7111 3584 7111 3565 7111 3571 7112 3572 7112 3584 7112 3584 7113 3572 7113 3573 7113 3573 7114 3572 7114 5763 7114 3566 7115 5763 7115 3575 7115 3574 7116 3575 7116 3576 7116 3577 7117 3576 7117 3581 7117 3578 7118 3581 7118 5762 7118 3568 7119 5762 7119 5761 7119 3580 7120 5761 7120 3579 7120 3580 7121 3568 7121 5761 7121 3573 7122 5763 7122 3566 7122 3566 7123 3575 7123 3574 7123 3574 7124 3576 7124 3577 7124 3577 7125 3581 7125 3578 7125 3578 7126 5762 7126 3568 7126 3569 7127 3567 7127 3568 7127 3567 7128 3582 7128 3578 7128 3582 7129 3522 7129 3577 7129 3522 7130 3583 7130 3574 7130 3583 7131 3520 7131 3566 7131 3520 7132 3519 7132 3573 7132 3519 7133 3518 7133 3584 7133 3518 7134 3524 7134 3565 7134 3524 7135 3563 7135 3564 7135 5773 7136 3570 7136 3564 7136 3564 7137 3570 7137 3565 7137 3585 7138 3597 7138 3620 7138 3585 7139 3596 7139 3597 7139 3585 7140 3598 7140 3596 7140 3596 7141 3598 7141 3599 7141 3594 7142 3599 7142 3646 7142 3647 7143 3646 7143 3650 7143 3655 7144 3650 7144 3586 7144 3664 7145 3586 7145 3663 7145 3592 7146 3663 7146 3670 7146 3676 7147 3670 7147 3587 7147 3675 7148 3587 7148 3589 7148 3588 7149 3589 7149 5642 7149 3588 7150 3675 7150 3589 7150 3588 7151 3590 7151 3675 7151 3675 7152 3590 7152 3591 7152 3676 7153 3591 7153 3666 7153 3592 7154 3666 7154 3593 7154 3664 7155 3593 7155 3656 7155 3655 7156 3656 7156 3652 7156 3647 7157 3652 7157 3595 7157 3594 7158 3595 7158 3639 7158 3596 7159 3639 7159 3597 7159 3596 7160 3594 7160 3639 7160 3596 7161 3599 7161 3594 7161 3598 7162 3831 7162 3599 7162 3599 7163 3831 7163 3648 7163 3646 7164 3648 7164 3651 7164 3650 7165 3651 7165 3602 7165 3586 7166 3602 7166 3665 7166 3663 7167 3665 7167 3674 7167 3670 7168 3674 7168 3603 7168 3587 7169 3603 7169 3600 7169 3589 7170 3600 7170 5642 7170 3589 7171 3587 7171 3600 7171 3831 7172 3909 7172 3648 7172 3648 7173 3909 7173 3601 7173 3651 7174 3601 7174 3662 7174 3602 7175 3662 7175 3661 7175 3665 7176 3661 7176 3669 7176 3674 7177 3669 7177 3673 7177 3603 7178 3673 7178 3607 7178 3600 7179 3607 7179 5642 7179 3600 7180 3603 7180 3607 7180 3909 7181 3604 7181 3601 7181 3601 7182 3604 7182 3605 7182 3662 7183 3605 7183 3660 7183 3661 7184 3660 7184 3606 7184 3669 7185 3606 7185 3672 7185 3673 7186 3672 7186 3609 7186 3607 7187 3609 7187 5642 7187 3607 7188 3673 7188 3609 7188 3604 7189 3608 7189 3605 7189 3605 7190 3608 7190 3659 7190 3660 7191 3659 7191 3611 7191 3606 7192 3611 7192 3671 7192 3672 7193 3671 7193 3613 7193 3609 7194 3613 7194 5636 7194 5642 7195 3609 7195 5636 7195 3608 7196 3610 7196 3659 7196 3659 7197 3610 7197 3668 7197 3611 7198 3668 7198 3612 7198 3671 7199 3612 7199 3614 7199 3613 7200 3614 7200 5636 7200 3613 7201 3671 7201 3614 7201 3610 7202 3615 7202 3668 7202 3668 7203 3615 7203 3616 7203 3612 7204 3616 7204 3617 7204 3614 7205 3617 7205 5636 7205 3614 7206 3612 7206 3617 7206 3615 7207 3809 7207 3616 7207 3616 7208 3809 7208 3618 7208 3617 7209 3618 7209 5636 7209 3617 7210 3616 7210 3618 7210 3809 7211 5636 7211 3618 7211 3590 7212 3619 7212 3591 7212 3591 7213 3619 7213 3622 7213 3666 7214 3622 7214 3667 7214 3593 7215 3667 7215 3657 7215 3656 7216 3657 7216 3624 7216 3652 7217 3624 7217 3640 7217 3595 7218 3640 7218 3641 7218 3639 7219 3641 7219 3621 7219 3597 7220 3621 7220 3620 7220 3597 7221 3639 7221 3621 7221 3619 7222 3475 7222 3622 7222 3622 7223 3475 7223 3658 7223 3667 7224 3658 7224 3623 7224 3657 7225 3623 7225 3653 7225 3624 7226 3653 7226 3649 7226 3640 7227 3649 7227 3625 7227 3641 7228 3625 7228 3626 7228 3621 7229 3626 7229 3620 7229 3621 7230 3641 7230 3626 7230 3475 7231 3491 7231 3658 7231 3658 7232 3491 7232 3627 7232 3623 7233 3627 7233 3629 7233 3653 7234 3629 7234 3643 7234 3649 7235 3643 7235 3642 7235 3625 7236 3642 7236 3628 7236 3626 7237 3628 7237 3620 7237 3626 7238 3625 7238 3628 7238 3491 7239 3492 7239 3627 7239 3627 7240 3492 7240 3654 7240 3629 7241 3654 7241 3644 7241 3643 7242 3644 7242 3631 7242 3642 7243 3631 7243 3633 7243 3628 7244 3633 7244 3620 7244 3628 7245 3642 7245 3633 7245 3492 7246 3498 7246 3654 7246 3654 7247 3498 7247 3645 7247 3644 7248 3645 7248 3630 7248 3631 7249 3630 7249 3632 7249 3633 7250 3632 7250 3634 7250 3620 7251 3633 7251 3634 7251 3498 7252 3635 7252 3645 7252 3645 7253 3635 7253 3636 7253 3630 7254 3636 7254 3637 7254 3632 7255 3637 7255 3634 7255 3632 7256 3630 7256 3637 7256 3635 7257 3514 7257 3636 7257 3636 7258 3514 7258 3638 7258 3637 7259 3638 7259 3634 7259 3637 7260 3636 7260 3638 7260 3514 7261 3634 7261 3638 7261 3631 7262 3632 7262 3633 7262 3595 7263 3641 7263 3639 7263 3640 7264 3625 7264 3641 7264 3649 7265 3642 7265 3625 7265 3643 7266 3631 7266 3642 7266 3644 7267 3630 7267 3631 7267 3645 7268 3636 7268 3630 7268 3647 7269 3595 7269 3594 7269 3646 7270 3647 7270 3594 7270 3648 7271 3646 7271 3599 7271 3652 7272 3640 7272 3595 7272 3624 7273 3649 7273 3640 7273 3653 7274 3643 7274 3649 7274 3629 7275 3644 7275 3643 7275 3654 7276 3645 7276 3644 7276 3601 7277 3651 7277 3648 7277 3655 7278 3652 7278 3647 7278 3650 7279 3655 7279 3647 7279 3651 7280 3650 7280 3646 7280 3656 7281 3624 7281 3652 7281 3657 7282 3653 7282 3624 7282 3623 7283 3629 7283 3653 7283 3627 7284 3654 7284 3629 7284 3605 7285 3662 7285 3601 7285 3662 7286 3602 7286 3651 7286 3664 7287 3656 7287 3655 7287 3586 7288 3664 7288 3655 7288 3602 7289 3586 7289 3650 7289 3593 7290 3657 7290 3656 7290 3667 7291 3623 7291 3657 7291 3658 7292 3627 7292 3623 7292 3659 7293 3660 7293 3605 7293 3660 7294 3661 7294 3662 7294 3661 7295 3665 7295 3602 7295 3592 7296 3593 7296 3664 7296 3663 7297 3592 7297 3664 7297 3665 7298 3663 7298 3586 7298 3666 7299 3667 7299 3593 7299 3622 7300 3658 7300 3667 7300 3668 7301 3611 7301 3659 7301 3611 7302 3606 7302 3660 7302 3606 7303 3669 7303 3661 7303 3669 7304 3674 7304 3665 7304 3676 7305 3666 7305 3592 7305 3670 7306 3676 7306 3592 7306 3674 7307 3670 7307 3663 7307 3591 7308 3622 7308 3666 7308 3616 7309 3612 7309 3668 7309 3612 7310 3671 7310 3611 7310 3671 7311 3672 7311 3606 7311 3672 7312 3673 7312 3669 7312 3673 7313 3603 7313 3674 7313 3675 7314 3591 7314 3676 7314 3587 7315 3675 7315 3676 7315 3603 7316 3587 7316 3670 7316 3609 7317 3672 7317 3613 7317 3348 7318 3677 7318 5673 7318 3348 7319 3683 7319 3677 7319 3348 7320 3398 7320 3683 7320 3683 7321 3398 7321 3718 7321 3684 7322 3718 7322 3685 7322 3720 7323 3685 7323 3687 7323 3723 7324 3687 7324 3678 7324 3679 7325 3678 7325 3680 7325 3495 7326 3680 7326 3503 7326 3495 7327 3679 7327 3680 7327 3495 7328 3493 7328 3679 7328 3679 7329 3493 7329 3681 7329 3723 7330 3681 7330 3722 7330 3720 7331 3722 7331 3682 7331 3684 7332 3682 7332 3716 7332 3683 7333 3716 7333 3677 7333 3683 7334 3684 7334 3716 7334 3683 7335 3718 7335 3684 7335 3398 7336 3688 7336 3718 7336 3718 7337 3688 7337 3686 7337 3685 7338 3686 7338 3721 7338 3687 7339 3721 7339 3725 7339 3678 7340 3725 7340 3726 7340 3680 7341 3726 7341 3729 7341 3503 7342 3729 7342 3504 7342 3503 7343 3680 7343 3729 7343 3688 7344 3358 7344 3686 7344 3686 7345 3358 7345 3719 7345 3721 7346 3719 7346 3724 7346 3725 7347 3724 7347 3689 7347 3726 7348 3689 7348 3732 7348 3729 7349 3732 7349 3690 7349 3504 7350 3690 7350 3507 7350 3504 7351 3729 7351 3690 7351 3358 7352 3366 7352 3719 7352 3719 7353 3366 7353 3691 7353 3724 7354 3691 7354 3728 7354 3689 7355 3728 7355 3727 7355 3732 7356 3727 7356 3731 7356 3690 7357 3731 7357 3692 7357 3507 7358 3692 7358 3563 7358 3507 7359 3690 7359 3692 7359 3366 7360 3693 7360 3691 7360 3691 7361 3693 7361 3694 7361 3728 7362 3694 7362 3695 7362 3727 7363 3695 7363 3730 7363 3731 7364 3730 7364 3696 7364 3692 7365 3696 7365 3563 7365 3692 7366 3731 7366 3696 7366 3693 7367 3698 7367 3694 7367 3694 7368 3698 7368 3700 7368 3695 7369 3700 7369 3701 7369 3730 7370 3701 7370 3697 7370 3696 7371 3697 7371 3563 7371 3696 7372 3730 7372 3697 7372 3698 7373 3699 7373 3700 7373 3700 7374 3699 7374 3702 7374 3701 7375 3702 7375 3703 7375 3697 7376 3703 7376 5341 7376 3563 7377 3697 7377 5341 7377 3699 7378 3388 7378 3702 7378 3702 7379 3388 7379 3704 7379 3703 7380 3704 7380 5341 7380 3703 7381 3702 7381 3704 7381 3388 7382 5341 7382 3704 7382 3493 7383 3470 7383 3681 7383 3681 7384 3470 7384 3705 7384 3722 7385 3705 7385 3717 7385 3682 7386 3717 7386 3706 7386 3716 7387 3706 7387 3709 7387 3677 7388 3709 7388 5673 7388 3677 7389 3716 7389 3709 7389 3470 7390 3469 7390 3705 7390 3705 7391 3469 7391 3707 7391 3717 7392 3707 7392 3714 7392 3706 7393 3714 7393 3708 7393 3709 7394 3708 7394 5673 7394 3709 7395 3706 7395 3708 7395 3469 7396 3710 7396 3707 7396 3707 7397 3710 7397 3711 7397 3714 7398 3711 7398 3715 7398 3708 7399 3715 7399 5642 7399 5673 7400 3708 7400 5642 7400 3710 7401 3712 7401 3711 7401 3711 7402 3712 7402 3713 7402 3715 7403 3713 7403 5642 7403 3715 7404 3711 7404 3713 7404 3712 7405 5642 7405 3713 7405 3714 7406 3715 7406 3708 7406 3682 7407 3706 7407 3716 7407 3717 7408 3714 7408 3706 7408 3707 7409 3711 7409 3714 7409 3720 7410 3682 7410 3684 7410 3685 7411 3720 7411 3684 7411 3686 7412 3685 7412 3718 7412 3722 7413 3717 7413 3682 7413 3705 7414 3707 7414 3717 7414 3719 7415 3721 7415 3686 7415 3723 7416 3722 7416 3720 7416 3687 7417 3723 7417 3720 7417 3721 7418 3687 7418 3685 7418 3681 7419 3705 7419 3722 7419 3691 7420 3724 7420 3719 7420 3724 7421 3725 7421 3721 7421 3679 7422 3681 7422 3723 7422 3678 7423 3679 7423 3723 7423 3725 7424 3678 7424 3687 7424 3694 7425 3728 7425 3691 7425 3728 7426 3689 7426 3724 7426 3689 7427 3726 7427 3725 7427 3726 7428 3680 7428 3678 7428 3700 7429 3695 7429 3694 7429 3695 7430 3727 7430 3728 7430 3727 7431 3732 7431 3689 7431 3732 7432 3729 7432 3726 7432 3702 7433 3701 7433 3700 7433 3701 7434 3730 7434 3695 7434 3730 7435 3731 7435 3727 7435 3731 7436 3690 7436 3732 7436 3697 7437 3701 7437 3703 7437 3733 7438 3766 7438 5461 7438 3733 7439 3734 7439 3766 7439 3733 7440 4687 7440 3734 7440 3734 7441 4687 7441 3739 7441 3738 7442 3739 7442 3735 7442 3785 7443 3735 7443 3736 7443 3792 7444 3736 7444 3741 7444 3793 7445 3741 7445 3742 7445 3737 7446 3742 7446 3807 7446 3800 7447 3807 7447 3806 7447 3383 7448 3806 7448 3389 7448 3383 7449 3800 7449 3806 7449 3383 7450 3375 7450 3800 7450 3800 7451 3375 7451 3763 7451 3737 7452 3763 7452 3794 7452 3793 7453 3794 7453 3795 7453 3792 7454 3795 7454 3765 7454 3785 7455 3765 7455 3786 7455 3738 7456 3786 7456 3767 7456 3734 7457 3767 7457 3766 7457 3734 7458 3738 7458 3767 7458 3734 7459 3739 7459 3738 7459 4687 7460 4683 7460 3739 7460 3739 7461 4683 7461 3790 7461 3735 7462 3790 7462 3789 7462 3736 7463 3789 7463 3740 7463 3741 7464 3740 7464 3799 7464 3742 7465 3799 7465 3805 7465 3807 7466 3805 7466 3743 7466 3806 7467 3743 7467 3748 7467 3389 7468 3748 7468 5350 7468 3389 7469 3806 7469 3748 7469 4683 7470 4681 7470 3790 7470 3790 7471 4681 7471 3744 7471 3789 7472 3744 7472 3745 7472 3740 7473 3745 7473 3746 7473 3799 7474 3746 7474 3747 7474 3805 7475 3747 7475 3804 7475 3743 7476 3804 7476 3753 7476 3748 7477 3753 7477 5350 7477 3748 7478 3743 7478 3753 7478 4681 7479 3749 7479 3744 7479 3744 7480 3749 7480 3750 7480 3745 7481 3750 7481 3751 7481 3746 7482 3751 7482 3798 7482 3747 7483 3798 7483 3754 7483 3804 7484 3754 7484 3752 7484 3753 7485 3752 7485 5350 7485 3753 7486 3804 7486 3752 7486 3749 7487 4675 7487 3750 7487 3750 7488 4675 7488 3797 7488 3751 7489 3797 7489 3803 7489 3798 7490 3803 7490 3808 7490 3754 7491 3808 7491 3756 7491 3752 7492 3756 7492 5350 7492 3752 7493 3754 7493 3756 7493 4675 7494 4668 7494 3797 7494 3797 7495 4668 7495 3802 7495 3803 7496 3802 7496 3801 7496 3808 7497 3801 7497 3755 7497 3756 7498 3755 7498 3760 7498 5350 7499 3756 7499 3760 7499 4668 7500 3759 7500 3802 7500 3802 7501 3759 7501 3757 7501 3801 7502 3757 7502 3758 7502 3755 7503 3758 7503 3760 7503 3755 7504 3801 7504 3758 7504 3759 7505 3761 7505 3757 7505 3757 7506 3761 7506 3762 7506 3758 7507 3762 7507 3760 7507 3758 7508 3757 7508 3762 7508 3761 7509 3760 7509 3762 7509 3375 7510 3369 7510 3763 7510 3763 7511 3369 7511 3796 7511 3794 7512 3796 7512 3764 7512 3795 7513 3764 7513 3770 7513 3765 7514 3770 7514 3787 7514 3786 7515 3787 7515 3772 7515 3767 7516 3772 7516 3771 7516 3766 7517 3771 7517 5461 7517 3766 7518 3767 7518 3771 7518 3369 7519 3367 7519 3796 7519 3796 7520 3367 7520 3768 7520 3764 7521 3768 7521 3769 7521 3770 7522 3769 7522 3784 7522 3787 7523 3784 7523 3775 7523 3772 7524 3775 7524 3774 7524 3771 7525 3774 7525 5461 7525 3771 7526 3772 7526 3774 7526 3367 7527 3363 7527 3768 7527 3768 7528 3363 7528 3791 7528 3769 7529 3791 7529 3773 7529 3784 7530 3773 7530 3782 7530 3775 7531 3782 7531 3783 7531 3774 7532 3783 7532 5461 7532 3774 7533 3775 7533 3783 7533 3363 7534 3351 7534 3791 7534 3791 7535 3351 7535 3788 7535 3773 7536 3788 7536 3778 7536 3782 7537 3778 7537 3777 7537 3783 7538 3777 7538 5673 7538 5461 7539 3783 7539 5673 7539 3351 7540 3776 7540 3788 7540 3788 7541 3776 7541 3779 7541 3778 7542 3779 7542 3780 7542 3777 7543 3780 7543 5673 7543 3777 7544 3778 7544 3780 7544 3776 7545 3346 7545 3779 7545 3779 7546 3346 7546 3781 7546 3780 7547 3781 7547 5673 7547 3780 7548 3779 7548 3781 7548 3346 7549 5673 7549 3781 7549 3782 7550 3777 7550 3783 7550 3786 7551 3772 7551 3767 7551 3787 7552 3775 7552 3772 7552 3784 7553 3782 7553 3775 7553 3773 7554 3778 7554 3782 7554 3788 7555 3779 7555 3778 7555 3785 7556 3786 7556 3738 7556 3735 7557 3785 7557 3738 7557 3790 7558 3735 7558 3739 7558 3765 7559 3787 7559 3786 7559 3770 7560 3784 7560 3787 7560 3769 7561 3773 7561 3784 7561 3791 7562 3788 7562 3773 7562 3744 7563 3789 7563 3790 7563 3792 7564 3765 7564 3785 7564 3736 7565 3792 7565 3785 7565 3789 7566 3736 7566 3735 7566 3795 7567 3770 7567 3765 7567 3764 7568 3769 7568 3770 7568 3768 7569 3791 7569 3769 7569 3750 7570 3745 7570 3744 7570 3745 7571 3740 7571 3789 7571 3793 7572 3795 7572 3792 7572 3741 7573 3793 7573 3792 7573 3740 7574 3741 7574 3736 7574 3794 7575 3764 7575 3795 7575 3796 7576 3768 7576 3764 7576 3797 7577 3751 7577 3750 7577 3751 7578 3746 7578 3745 7578 3746 7579 3799 7579 3740 7579 3737 7580 3794 7580 3793 7580 3742 7581 3737 7581 3793 7581 3799 7582 3742 7582 3741 7582 3763 7583 3796 7583 3794 7583 3802 7584 3803 7584 3797 7584 3803 7585 3798 7585 3751 7585 3798 7586 3747 7586 3746 7586 3747 7587 3805 7587 3799 7587 3800 7588 3763 7588 3737 7588 3807 7589 3800 7589 3737 7589 3805 7590 3807 7590 3742 7590 3757 7591 3801 7591 3802 7591 3801 7592 3808 7592 3803 7592 3808 7593 3754 7593 3798 7593 3754 7594 3804 7594 3747 7594 3804 7595 3743 7595 3805 7595 3743 7596 3806 7596 3807 7596 3756 7597 3808 7597 3755 7597 5636 7598 3809 7598 5552 7598 5552 7599 3809 7599 3810 7599 3879 7600 3810 7600 3811 7600 3875 7601 3811 7601 3812 7601 3876 7602 3812 7602 3826 7602 3877 7603 3826 7603 3827 7603 3911 7604 3827 7604 3912 7604 3870 7605 3912 7605 3871 7605 3872 7606 3871 7606 3868 7606 3861 7607 3868 7607 3813 7607 3814 7608 3813 7608 3863 7608 3862 7609 3863 7609 3832 7609 3925 7610 3832 7610 3815 7610 3920 7611 3815 7611 3838 7611 3921 7612 3838 7612 3840 7612 3816 7613 3840 7613 3842 7613 3817 7614 3816 7614 3842 7614 3817 7615 3818 7615 3816 7615 3817 7616 3327 7616 3818 7616 3818 7617 3327 7617 3819 7617 3935 7618 3819 7618 3820 7618 3962 7619 3820 7619 3821 7619 3937 7620 3821 7620 3964 7620 3938 7621 3964 7621 3822 7621 3939 7622 3822 7622 3994 7622 3993 7623 3994 7623 4003 7623 3995 7624 4003 7624 3845 7624 3940 7625 3845 7625 3823 7625 4005 7626 3823 7626 3847 7626 3942 7627 3847 7627 4315 7627 3810 7628 3809 7628 3829 7628 3910 7629 3829 7629 3615 7629 3610 7630 3910 7630 3615 7630 3610 7631 3825 7631 3910 7631 3610 7632 3824 7632 3825 7632 3610 7633 3608 7633 3824 7633 3824 7634 3608 7634 3828 7634 3826 7635 3828 7635 3827 7635 3826 7636 3824 7636 3828 7636 3826 7637 3812 7637 3824 7637 3824 7638 3812 7638 3825 7638 3825 7639 3812 7639 3811 7639 3910 7640 3811 7640 3810 7640 3829 7641 3910 7641 3810 7641 3608 7642 3604 7642 3828 7642 3828 7643 3604 7643 3830 7643 3827 7644 3830 7644 3912 7644 3827 7645 3828 7645 3830 7645 3830 7646 3604 7646 3835 7646 3912 7647 3835 7647 3871 7647 3912 7648 3830 7648 3835 7648 3831 7649 3907 7649 3909 7649 3831 7650 3834 7650 3907 7650 3831 7651 3833 7651 3834 7651 3831 7652 3598 7652 3833 7652 3833 7653 3598 7653 3836 7653 3832 7654 3836 7654 3815 7654 3832 7655 3833 7655 3836 7655 3832 7656 3863 7656 3833 7656 3833 7657 3863 7657 3834 7657 3834 7658 3863 7658 3813 7658 3907 7659 3813 7659 3868 7659 3908 7660 3868 7660 3871 7660 3835 7661 3908 7661 3871 7661 3835 7662 3909 7662 3908 7662 3835 7663 3604 7663 3909 7663 3598 7664 3585 7664 3836 7664 3836 7665 3585 7665 3837 7665 3815 7666 3837 7666 3838 7666 3815 7667 3836 7667 3837 7667 3837 7668 3585 7668 3839 7668 3838 7669 3839 7669 3840 7669 3838 7670 3837 7670 3839 7670 3585 7671 3620 7671 3839 7671 3839 7672 3620 7672 3841 7672 3840 7673 3841 7673 3842 7673 3840 7674 3839 7674 3841 7674 3327 7675 3983 7675 3819 7675 3819 7676 3983 7676 3843 7676 3820 7677 3843 7677 3961 7677 3821 7678 3961 7678 3850 7678 3964 7679 3850 7679 3844 7679 3822 7680 3844 7680 3992 7680 3994 7681 3992 7681 4002 7681 4003 7682 4002 7682 3853 7682 3845 7683 3853 7683 3846 7683 3823 7684 3846 7684 3848 7684 3847 7685 3848 7685 4315 7685 3843 7686 3983 7686 3849 7686 3961 7687 3849 7687 3851 7687 3850 7688 3851 7688 3977 7688 3844 7689 3977 7689 3852 7689 3992 7690 3852 7690 3888 7690 4002 7691 3888 7691 4001 7691 3853 7692 4001 7692 3854 7692 3846 7693 3854 7693 3892 7693 3848 7694 3892 7694 4315 7694 3331 7695 3974 7695 3984 7695 3331 7696 3975 7696 3974 7696 3331 7697 3329 7697 3975 7697 3975 7698 3329 7698 3883 7698 3985 7699 3883 7699 3882 7699 3986 7700 3882 7700 3887 7700 3987 7701 3887 7701 3855 7701 3990 7702 3855 7702 4315 7702 4315 7703 3880 7703 3329 7703 5538 7704 4008 7704 4015 7704 5538 7705 3902 7705 4008 7705 5538 7706 3903 7706 3902 7706 5538 7707 3856 7707 3903 7707 3903 7708 3856 7708 3857 7708 3904 7709 3857 7709 3981 7709 3858 7710 3981 7710 3897 7710 3899 7711 3897 7711 3866 7711 3901 7712 3866 7712 3954 7712 3859 7713 3954 7713 3949 7713 3946 7714 3949 7714 3950 7714 3860 7715 3950 7715 3861 7715 3814 7716 3861 7716 3813 7716 3814 7717 3860 7717 3861 7717 3814 7718 3945 7718 3860 7718 3814 7719 3862 7719 3945 7719 3814 7720 3863 7720 3862 7720 3856 7721 3972 7721 3857 7721 3857 7722 3972 7722 3906 7722 3981 7723 3906 7723 3864 7723 3897 7724 3864 7724 3865 7724 3866 7725 3865 7725 3955 7725 3954 7726 3955 7726 3867 7726 3949 7727 3867 7727 3869 7727 3950 7728 3869 7728 3872 7728 3861 7729 3872 7729 3868 7729 3861 7730 3950 7730 3872 7730 3906 7731 3972 7731 3982 7731 3864 7732 3982 7732 3971 7732 3865 7733 3971 7733 3957 7733 3955 7734 3957 7734 3956 7734 3867 7735 3956 7735 3913 7735 3869 7736 3913 7736 3870 7736 3872 7737 3870 7737 3871 7737 3872 7738 3869 7738 3870 7738 3874 7739 3973 7739 5540 7739 3874 7740 3959 7740 3973 7740 3874 7741 3873 7741 3959 7741 3874 7742 5550 7742 3873 7742 3873 7743 5550 7743 3878 7743 3915 7744 3878 7744 3875 7744 3876 7745 3875 7745 3812 7745 3876 7746 3915 7746 3875 7746 3876 7747 3951 7747 3915 7747 3876 7748 3877 7748 3951 7748 3876 7749 3826 7749 3877 7749 3878 7750 5550 7750 3879 7750 3875 7751 3879 7751 3811 7751 3875 7752 3878 7752 3879 7752 3880 7753 4315 7753 3881 7753 3905 7754 3881 7754 3884 7754 3885 7755 3884 7755 3882 7755 3883 7756 3885 7756 3882 7756 3883 7757 3329 7757 3885 7757 3885 7758 3329 7758 3905 7758 3884 7759 3885 7759 3905 7759 3886 7760 4315 7760 3855 7760 3887 7761 3886 7761 3855 7761 3887 7762 3884 7762 3886 7762 3887 7763 3882 7763 3884 7763 3990 7764 4315 7764 4013 7764 3991 7765 4013 7765 4000 7765 3989 7766 4000 7766 4001 7766 3888 7767 3989 7767 4001 7767 3888 7768 3889 7768 3989 7768 3888 7769 3852 7769 3889 7769 3889 7770 3852 7770 3976 7770 3890 7771 3976 7771 3891 7771 3985 7772 3891 7772 3975 7772 3883 7773 3985 7773 3975 7773 4014 7774 4315 7774 3892 7774 3854 7775 4014 7775 3892 7775 3854 7776 4000 7776 4014 7776 3854 7777 4001 7777 4000 7777 3942 7778 4315 7778 4010 7778 3941 7779 4010 7779 4011 7779 3893 7780 4011 7780 3894 7780 3998 7781 3894 7781 3895 7781 3933 7782 3895 7782 3896 7782 3898 7783 3896 7783 3858 7783 3899 7784 3858 7784 3897 7784 3899 7785 3898 7785 3858 7785 3899 7786 3900 7786 3898 7786 3899 7787 3901 7787 3900 7787 3899 7788 3866 7788 3901 7788 4012 7789 4315 7789 4016 7789 4009 7790 4016 7790 4008 7790 4007 7791 4008 7791 3902 7791 4006 7792 3902 7792 3903 7792 3904 7793 3903 7793 3857 7793 3904 7794 4006 7794 3903 7794 3904 7795 3896 7795 4006 7795 3904 7796 3858 7796 3896 7796 3904 7797 3981 7797 3858 7797 3905 7798 3329 7798 3880 7798 3881 7799 3905 7799 3880 7799 3981 7800 3857 7800 3906 7800 3820 7801 3819 7801 3843 7801 5550 7802 5552 7802 3879 7802 3879 7803 5552 7803 3810 7803 3615 7804 3829 7804 3809 7804 3813 7805 3907 7805 3834 7805 3868 7806 3908 7806 3907 7806 3907 7807 3908 7807 3909 7807 3811 7808 3910 7808 3825 7808 3911 7809 3912 7809 3870 7809 3913 7810 3911 7810 3870 7810 3913 7811 3916 7811 3911 7811 3913 7812 3956 7812 3916 7812 3916 7813 3956 7813 3914 7813 3951 7814 3914 7814 3958 7814 3915 7815 3958 7815 3873 7815 3878 7816 3915 7816 3873 7816 3877 7817 3827 7817 3911 7817 3916 7818 3877 7818 3911 7818 3916 7819 3951 7819 3877 7819 3916 7820 3914 7820 3951 7820 3840 7821 3816 7821 3921 7821 3921 7822 3816 7822 3917 7822 3934 7823 3917 7823 3936 7823 3922 7824 3936 7824 3952 7824 3953 7825 3952 7825 3963 7825 3965 7826 3963 7826 3966 7826 3967 7827 3966 7827 3918 7827 3996 7828 3918 7828 3919 7828 3924 7829 3919 7829 3997 7829 3999 7830 3997 7830 4004 7830 3893 7831 4004 7831 3941 7831 4011 7832 3893 7832 3941 7832 3838 7833 3921 7833 3920 7833 3920 7834 3921 7834 3934 7834 3927 7835 3934 7835 3922 7835 3943 7836 3922 7836 3953 7836 3928 7837 3953 7837 3965 7837 3923 7838 3965 7838 3967 7838 3968 7839 3967 7839 3996 7839 3930 7840 3996 7840 3924 7840 3931 7841 3924 7841 3999 7841 3998 7842 3999 7842 3893 7842 3894 7843 3998 7843 3893 7843 3815 7844 3920 7844 3925 7844 3925 7845 3920 7845 3927 7845 3926 7846 3927 7846 3943 7846 3944 7847 3943 7847 3928 7847 3929 7848 3928 7848 3923 7848 3932 7849 3923 7849 3968 7849 3970 7850 3968 7850 3930 7850 3980 7851 3930 7851 3931 7851 3933 7852 3931 7852 3998 7852 3895 7853 3933 7853 3998 7853 3917 7854 3934 7854 3921 7854 3832 7855 3925 7855 3862 7855 3862 7856 3925 7856 3926 7856 3945 7857 3926 7857 3944 7857 3947 7858 3944 7858 3929 7858 3948 7859 3929 7859 3932 7859 3969 7860 3932 7860 3970 7860 3900 7861 3970 7861 3980 7861 3898 7862 3980 7862 3933 7862 3896 7863 3898 7863 3933 7863 3934 7864 3927 7864 3920 7864 3927 7865 3926 7865 3925 7865 3926 7866 3945 7866 3862 7866 3816 7867 3818 7867 3917 7867 3917 7868 3818 7868 3935 7868 3936 7869 3935 7869 3962 7869 3952 7870 3962 7870 3937 7870 3963 7871 3937 7871 3938 7871 3966 7872 3938 7872 3939 7872 3918 7873 3939 7873 3993 7873 3919 7874 3993 7874 3995 7874 3997 7875 3995 7875 3940 7875 4004 7876 3940 7876 4005 7876 3941 7877 4005 7877 3942 7877 4010 7878 3941 7878 3942 7878 3819 7879 3935 7879 3818 7879 3935 7880 3936 7880 3917 7880 3936 7881 3922 7881 3934 7881 3922 7882 3943 7882 3927 7882 3943 7883 3944 7883 3926 7883 3946 7884 3950 7884 3860 7884 3947 7885 3860 7885 3945 7885 3944 7886 3947 7886 3945 7886 3946 7887 3860 7887 3947 7887 3948 7888 3947 7888 3929 7888 3948 7889 3946 7889 3947 7889 3948 7890 3859 7890 3946 7890 3948 7891 3969 7891 3859 7891 3948 7892 3932 7892 3969 7892 3949 7893 3869 7893 3950 7893 3867 7894 3913 7894 3869 7894 3958 7895 3915 7895 3951 7895 3962 7896 3935 7896 3820 7896 3952 7897 3936 7897 3962 7897 3953 7898 3922 7898 3952 7898 3928 7899 3943 7899 3953 7899 3929 7900 3944 7900 3928 7900 3859 7901 3949 7901 3946 7901 3954 7902 3867 7902 3949 7902 3955 7903 3956 7903 3867 7903 3956 7904 3957 7904 3914 7904 3914 7905 3957 7905 3960 7905 3958 7906 3960 7906 3959 7906 3873 7907 3958 7907 3959 7907 3958 7908 3914 7908 3960 7908 3849 7909 3961 7909 3843 7909 3961 7910 3821 7910 3820 7910 3821 7911 3937 7911 3962 7911 3851 7912 3850 7912 3961 7912 3937 7913 3963 7913 3952 7913 3850 7914 3964 7914 3821 7914 3963 7915 3965 7915 3953 7915 3964 7916 3938 7916 3937 7916 3965 7917 3923 7917 3928 7917 3938 7918 3966 7918 3963 7918 3923 7919 3932 7919 3929 7919 3966 7920 3967 7920 3965 7920 3967 7921 3968 7921 3923 7921 3954 7922 3859 7922 3901 7922 3901 7923 3859 7923 3969 7923 3900 7924 3969 7924 3970 7924 3900 7925 3901 7925 3969 7925 3968 7926 3970 7926 3932 7926 3955 7927 3954 7927 3866 7927 3957 7928 3955 7928 3865 7928 3960 7929 3957 7929 3971 7929 3973 7930 3971 7930 3982 7930 5540 7931 3982 7931 3972 7931 5540 7932 3973 7932 3982 7932 3865 7933 3866 7933 3897 7933 3959 7934 3960 7934 3973 7934 3973 7935 3960 7935 3971 7935 3971 7936 3865 7936 3864 7936 3851 7937 3849 7937 3978 7937 3979 7938 3978 7938 3974 7938 3891 7939 3974 7939 3975 7939 3891 7940 3979 7940 3974 7940 3891 7941 3976 7941 3979 7941 3979 7942 3976 7942 3977 7942 3851 7943 3979 7943 3977 7943 3851 7944 3978 7944 3979 7944 3844 7945 3850 7945 3977 7945 3822 7946 3964 7946 3844 7946 3939 7947 3938 7947 3822 7947 3918 7948 3966 7948 3939 7948 3996 7949 3967 7949 3918 7949 3930 7950 3968 7950 3996 7950 3980 7951 3970 7951 3930 7951 3898 7952 3900 7952 3980 7952 3981 7953 3864 7953 3897 7953 3906 7954 3982 7954 3864 7954 3983 7955 3984 7955 3849 7955 3849 7956 3984 7956 3978 7956 3978 7957 3984 7957 3974 7957 3890 7958 3891 7958 3985 7958 3986 7959 3985 7959 3882 7959 3986 7960 3890 7960 3985 7960 3986 7961 3988 7961 3890 7961 3986 7962 3987 7962 3988 7962 3986 7963 3887 7963 3987 7963 3976 7964 3852 7964 3977 7964 3889 7965 3976 7965 3890 7965 3988 7966 3889 7966 3890 7966 3988 7967 3989 7967 3889 7967 3988 7968 3991 7968 3989 7968 3988 7969 3987 7969 3991 7969 3991 7970 3987 7970 3990 7970 4013 7971 3991 7971 3990 7971 3852 7972 3992 7972 3844 7972 3992 7973 3994 7973 3822 7973 4002 7974 3992 7974 3888 7974 3994 7975 3993 7975 3939 7975 4003 7976 3994 7976 4002 7976 3993 7977 3919 7977 3918 7977 3995 7978 3993 7978 4003 7978 3919 7979 3924 7979 3996 7979 3997 7980 3919 7980 3995 7980 3924 7981 3931 7981 3930 7981 3999 7982 3924 7982 3997 7982 3931 7983 3933 7983 3980 7983 3998 7984 3931 7984 3999 7984 4006 7985 3896 7985 3895 7985 4007 7986 3895 7986 3894 7986 4009 7987 3894 7987 4011 7987 4012 7988 4011 7988 4010 7988 4000 7989 3989 7989 3991 7989 4001 7990 3853 7990 4002 7990 3853 7991 3845 7991 4003 7991 3846 7992 3853 7992 3854 7992 3845 7993 3940 7993 3995 7993 3823 7994 3845 7994 3846 7994 3940 7995 4004 7995 3997 7995 4005 7996 3940 7996 3823 7996 4004 7997 3893 7997 3999 7997 3941 7998 4004 7998 4005 7998 4006 7999 3895 7999 4007 7999 3902 8000 4006 8000 4007 8000 4008 8001 4007 8001 4009 8001 4009 8002 4007 8002 3894 8002 4012 8003 4010 8003 4315 8003 4011 8004 4012 8004 4009 8004 4009 8005 4012 8005 4016 8005 4005 8006 3847 8006 3942 8006 3823 8007 3848 8007 3847 8007 3892 8008 3848 8008 3846 8008 4315 8009 4014 8009 4013 8009 4013 8010 4014 8010 4000 8010 3855 8011 3990 8011 3987 8011 4315 8012 3886 8012 3881 8012 3881 8013 3886 8013 3884 8013 4015 8014 4008 8014 4016 8014 4315 8015 4015 8015 4016 8015 4255 8016 4017 8016 4018 8016 4255 8017 4018 8017 4019 8017 4255 8018 4019 8018 4081 8018 4255 8019 4081 8019 4080 8019 4255 8020 4080 8020 4078 8020 4255 8021 4078 8021 4076 8021 4255 8022 4076 8022 4020 8022 4255 8023 4020 8023 4075 8023 4255 8024 4075 8024 4021 8024 4255 8025 4021 8025 4246 8025 4022 8026 4023 8026 4254 8026 4022 8027 4031 8027 4023 8027 4022 8028 4024 8028 4031 8028 4031 8029 4024 8029 4032 8029 4084 8030 4032 8030 4033 8030 4088 8031 4033 8031 4090 8031 4089 8032 4090 8032 4094 8032 4100 8033 4094 8033 4099 8033 4028 8034 4099 8034 4098 8034 4103 8035 4098 8035 4036 8035 4106 8036 4036 8036 4037 8036 4027 8037 4037 8037 4026 8037 4025 8038 4026 8038 4038 8038 4025 8039 4027 8039 4026 8039 4025 8040 4074 8040 4027 8040 4027 8041 4074 8041 4111 8041 4106 8042 4111 8042 4107 8042 4103 8043 4107 8043 4104 8043 4028 8044 4104 8044 4029 8044 4100 8045 4029 8045 4077 8045 4089 8046 4077 8046 4091 8046 4088 8047 4091 8047 4079 8047 4084 8048 4079 8048 4030 8048 4031 8049 4030 8049 4023 8049 4031 8050 4084 8050 4030 8050 4031 8051 4032 8051 4084 8051 4024 8052 4268 8052 4032 8052 4032 8053 4268 8053 4034 8053 4033 8054 4034 8054 4039 8054 4090 8055 4039 8055 4086 8055 4094 8056 4086 8056 4095 8056 4099 8057 4095 8057 4035 8057 4098 8058 4035 8058 4041 8058 4036 8059 4041 8059 4042 8059 4037 8060 4042 8060 4110 8060 4026 8061 4110 8061 4044 8061 4038 8062 4044 8062 4046 8062 4038 8063 4026 8063 4044 8063 4268 8064 4047 8064 4034 8064 4034 8065 4047 8065 4040 8065 4039 8066 4040 8066 4083 8066 4086 8067 4083 8067 4087 8067 4095 8068 4087 8068 4085 8068 4035 8069 4085 8069 4097 8069 4041 8070 4097 8070 4043 8070 4042 8071 4043 8071 4105 8071 4110 8072 4105 8072 4109 8072 4044 8073 4109 8073 4045 8073 4046 8074 4045 8074 4054 8074 4046 8075 4044 8075 4045 8075 4047 8076 4055 8076 4040 8076 4040 8077 4055 8077 4048 8077 4083 8078 4048 8078 4049 8078 4087 8079 4049 8079 4056 8079 4085 8080 4056 8080 4093 8080 4097 8081 4093 8081 4050 8081 4043 8082 4050 8082 4051 8082 4105 8083 4051 8083 4052 8083 4109 8084 4052 8084 4053 8084 4045 8085 4053 8085 4059 8085 4054 8086 4059 8086 4248 8086 4054 8087 4045 8087 4059 8087 4055 8088 4275 8088 4048 8088 4048 8089 4275 8089 4062 8089 4049 8090 4062 8090 4057 8090 4056 8091 4057 8091 4058 8091 4093 8092 4058 8092 4092 8092 4050 8093 4092 8093 4096 8093 4051 8094 4096 8094 4101 8094 4052 8095 4101 8095 4065 8095 4053 8096 4065 8096 4108 8096 4059 8097 4108 8097 4060 8097 4248 8098 4060 8098 4249 8098 4248 8099 4059 8099 4060 8099 4275 8100 4061 8100 4062 8100 4062 8101 4061 8101 4063 8101 4057 8102 4063 8102 4082 8102 4058 8103 4082 8103 4072 8103 4092 8104 4072 8104 4070 8104 4096 8105 4070 8105 4064 8105 4101 8106 4064 8106 4102 8106 4065 8107 4102 8107 4066 8107 4108 8108 4066 8108 4067 8108 4060 8109 4067 8109 4073 8109 4249 8110 4073 8110 4250 8110 4249 8111 4060 8111 4073 8111 4061 8112 4161 8112 4063 8112 4063 8113 4161 8113 4068 8113 4082 8114 4068 8114 4069 8114 4072 8115 4069 8115 4168 8115 4070 8116 4168 8116 4162 8116 4064 8117 4162 8117 4157 8117 4102 8118 4157 8118 4155 8118 4066 8119 4155 8119 4071 8119 4067 8120 4071 8120 4138 8120 4073 8121 4138 8121 4115 8121 4250 8122 4073 8122 4115 8122 4063 8123 4068 8123 4082 8123 4082 8124 4069 8124 4072 8124 4072 8125 4168 8125 4070 8125 4070 8126 4162 8126 4064 8126 4064 8127 4157 8127 4102 8127 4102 8128 4155 8128 4066 8128 4066 8129 4071 8129 4067 8129 4067 8130 4138 8130 4073 8130 4074 8131 4238 8131 4111 8131 4111 8132 4238 8132 4021 8132 4107 8133 4021 8133 4075 8133 4104 8134 4075 8134 4020 8134 4029 8135 4020 8135 4076 8135 4077 8136 4076 8136 4078 8136 4091 8137 4078 8137 4080 8137 4079 8138 4080 8138 4081 8138 4030 8139 4081 8139 4019 8139 4023 8140 4019 8140 4018 8140 4254 8141 4018 8141 4017 8141 4254 8142 4023 8142 4018 8142 4238 8143 4246 8143 4021 8143 4057 8144 4062 8144 4063 8144 4049 8145 4048 8145 4062 8145 4083 8146 4040 8146 4048 8146 4039 8147 4034 8147 4040 8147 4033 8148 4032 8148 4034 8148 4019 8149 4023 8149 4030 8149 4058 8150 4057 8150 4082 8150 4056 8151 4049 8151 4057 8151 4087 8152 4083 8152 4049 8152 4086 8153 4039 8153 4083 8153 4090 8154 4033 8154 4039 8154 4079 8155 4084 8155 4088 8155 4088 8156 4084 8156 4033 8156 4081 8157 4030 8157 4079 8157 4092 8158 4058 8158 4072 8158 4093 8159 4056 8159 4058 8159 4085 8160 4087 8160 4056 8160 4095 8161 4086 8161 4087 8161 4094 8162 4090 8162 4086 8162 4091 8163 4088 8163 4089 8163 4089 8164 4088 8164 4090 8164 4080 8165 4079 8165 4091 8165 4096 8166 4092 8166 4070 8166 4050 8167 4093 8167 4092 8167 4097 8168 4085 8168 4093 8168 4035 8169 4095 8169 4085 8169 4099 8170 4094 8170 4095 8170 4077 8171 4089 8171 4100 8171 4100 8172 4089 8172 4094 8172 4078 8173 4091 8173 4077 8173 4101 8174 4096 8174 4064 8174 4051 8175 4050 8175 4096 8175 4043 8176 4097 8176 4050 8176 4041 8177 4035 8177 4097 8177 4098 8178 4099 8178 4035 8178 4029 8179 4100 8179 4028 8179 4028 8180 4100 8180 4099 8180 4076 8181 4077 8181 4029 8181 4065 8182 4101 8182 4102 8182 4052 8183 4051 8183 4101 8183 4105 8184 4043 8184 4051 8184 4042 8185 4041 8185 4043 8185 4036 8186 4098 8186 4041 8186 4104 8187 4028 8187 4103 8187 4103 8188 4028 8188 4098 8188 4020 8189 4029 8189 4104 8189 4108 8190 4065 8190 4066 8190 4053 8191 4052 8191 4065 8191 4109 8192 4105 8192 4052 8192 4110 8193 4042 8193 4105 8193 4037 8194 4036 8194 4042 8194 4107 8195 4103 8195 4106 8195 4106 8196 4103 8196 4036 8196 4075 8197 4104 8197 4107 8197 4060 8198 4108 8198 4067 8198 4059 8199 4053 8199 4108 8199 4045 8200 4109 8200 4053 8200 4044 8201 4110 8201 4109 8201 4026 8202 4037 8202 4110 8202 4111 8203 4106 8203 4027 8203 4027 8204 4106 8204 4037 8204 4021 8205 4107 8205 4111 8205 4250 8206 4190 8206 3332 8206 4250 8207 4113 8207 4190 8207 4250 8208 4112 8208 4113 8208 4250 8209 4114 8209 4112 8209 4250 8210 4188 8210 4114 8210 4250 8211 4182 8211 4188 8211 4250 8212 4115 8212 4182 8212 4182 8213 4115 8213 4191 8213 4116 8214 4191 8214 4117 8214 4198 8215 4117 8215 4140 8215 4206 8216 4140 8216 4213 8216 4205 8217 4213 8217 4212 8217 4118 8218 4212 8218 4220 8218 4222 8219 4220 8219 4226 8219 4119 8220 4226 8220 4143 8220 4227 8221 4143 8221 4136 8221 4137 8222 4136 8222 2584 8222 4120 8223 2584 8223 2629 8223 4230 8224 2629 8224 4121 8224 2628 8225 4230 8225 4121 8225 2628 8226 4122 8226 4230 8226 2628 8227 2626 8227 4122 8227 4122 8228 2626 8228 4173 8228 4174 8229 4173 8229 2625 8229 4176 8230 2625 8230 4123 8230 4124 8231 4176 8231 4123 8231 4124 8232 4126 8232 4176 8232 4124 8233 4125 8233 4126 8233 4126 8234 4125 8234 2623 8234 4179 8235 2623 8235 2622 8235 4127 8236 2622 8236 4128 8236 2620 8237 4127 8237 4128 8237 2620 8238 4130 8238 4127 8238 2620 8239 4129 8239 4130 8239 4130 8240 4129 8240 2619 8240 4181 8241 2619 8241 3332 8241 4131 8242 3332 8242 4132 8242 4197 8243 4132 8243 4187 8243 4204 8244 4187 8244 4133 8244 4180 8245 4133 8245 4134 8245 4178 8246 4134 8246 4214 8246 4177 8247 4214 8247 4175 8247 4229 8248 4175 8248 4135 8248 4228 8249 4135 8249 4172 8249 4235 8250 4172 8250 4227 8250 4137 8251 4227 8251 4136 8251 4137 8252 4235 8252 4227 8252 4137 8253 4120 8253 4235 8253 4137 8254 2584 8254 4120 8254 4115 8255 4138 8255 4191 8255 4191 8256 4138 8256 4139 8256 4117 8257 4139 8257 4146 8257 4140 8258 4146 8258 4141 8258 4213 8259 4141 8259 4211 8259 4212 8260 4211 8260 4142 8260 4220 8261 4142 8261 4224 8261 4226 8262 4224 8262 4144 8262 4143 8263 4144 8263 4145 8263 4136 8264 4145 8264 2584 8264 4136 8265 4143 8265 4145 8265 4138 8266 4071 8266 4139 8266 4139 8267 4071 8267 4150 8267 4146 8268 4150 8268 4147 8268 4141 8269 4147 8269 4210 8269 4211 8270 4210 8270 4148 8270 4142 8271 4148 8271 4149 8271 4224 8272 4149 8272 4234 8272 4144 8273 4234 8273 4154 8273 4145 8274 4154 8274 2584 8274 4145 8275 4144 8275 4154 8275 4071 8276 4155 8276 4150 8276 4150 8277 4155 8277 4156 8277 4147 8278 4156 8278 4158 8278 4210 8279 4158 8279 4219 8279 4148 8280 4219 8280 4151 8280 4149 8281 4151 8281 4152 8281 4234 8282 4152 8282 4153 8282 4154 8283 4153 8283 2584 8283 4154 8284 4234 8284 4153 8284 4155 8285 4157 8285 4156 8285 4156 8286 4157 8286 4209 8286 4158 8287 4209 8287 4159 8287 4219 8288 4159 8288 4233 8288 4151 8289 4233 8289 4232 8289 4152 8290 4232 8290 4160 8290 4153 8291 4160 8291 4161 8291 2584 8292 4153 8292 4161 8292 4157 8293 4162 8293 4209 8293 4209 8294 4162 8294 4163 8294 4159 8295 4163 8295 4223 8295 4233 8296 4223 8296 4231 8296 4232 8297 4231 8297 4166 8297 4160 8298 4166 8298 4161 8298 4160 8299 4232 8299 4166 8299 4162 8300 4168 8300 4163 8300 4163 8301 4168 8301 4164 8301 4223 8302 4164 8302 4165 8302 4231 8303 4165 8303 4167 8303 4166 8304 4167 8304 4161 8304 4166 8305 4231 8305 4167 8305 4168 8306 4069 8306 4164 8306 4164 8307 4069 8307 4169 8307 4165 8308 4169 8308 4170 8308 4167 8309 4170 8309 4161 8309 4167 8310 4165 8310 4170 8310 4069 8311 4068 8311 4169 8311 4169 8312 4068 8312 4171 8312 4170 8313 4171 8313 4161 8313 4170 8314 4169 8314 4171 8314 4068 8315 4161 8315 4171 8315 4120 8316 2629 8316 4230 8316 4235 8317 4230 8317 4228 8317 4172 8318 4235 8318 4228 8318 4122 8319 4173 8319 4174 8319 4229 8320 4174 8320 4177 8320 4175 8321 4229 8321 4177 8321 4174 8322 2625 8322 4176 8322 4177 8323 4176 8323 4178 8323 4214 8324 4177 8324 4178 8324 4126 8325 2623 8325 4179 8325 4180 8326 4179 8326 4204 8326 4133 8327 4180 8327 4204 8327 4179 8328 2622 8328 4127 8328 4204 8329 4127 8329 4197 8329 4187 8330 4204 8330 4197 8330 4130 8331 2619 8331 4181 8331 4131 8332 4181 8332 3332 8332 4131 8333 4130 8333 4181 8333 4131 8334 4197 8334 4130 8334 4131 8335 4132 8335 4197 8335 4191 8336 4116 8336 4182 8336 4182 8337 4116 8337 4188 8337 4188 8338 4116 8338 4189 8338 4114 8339 4189 8339 4193 8339 4112 8340 4193 8340 4183 8340 4113 8341 4183 8341 4195 8341 4190 8342 4195 8342 4196 8342 4184 8343 4196 8343 4185 8343 4186 8344 4185 8344 4187 8344 4132 8345 4186 8345 4187 8345 4132 8346 3332 8346 4186 8346 4186 8347 3332 8347 4184 8347 4185 8348 4186 8348 4184 8348 4193 8349 4112 8349 4114 8349 4114 8350 4188 8350 4189 8350 4183 8351 4113 8351 4112 8351 4195 8352 4190 8352 4113 8352 4196 8353 4184 8353 4190 8353 4190 8354 4184 8354 3332 8354 4139 8355 4117 8355 4191 8355 4117 8356 4198 8356 4116 8356 4116 8357 4198 8357 4189 8357 4189 8358 4198 8358 4192 8358 4193 8359 4192 8359 4202 8359 4183 8360 4202 8360 4194 8360 4195 8361 4194 8361 4199 8361 4196 8362 4199 8362 4203 8362 4185 8363 4203 8363 4133 8363 4187 8364 4185 8364 4133 8364 4202 8365 4183 8365 4193 8365 4193 8366 4189 8366 4192 8366 4194 8367 4195 8367 4183 8367 4199 8368 4196 8368 4195 8368 4203 8369 4185 8369 4196 8369 4127 8370 4130 8370 4197 8370 4150 8371 4146 8371 4139 8371 4146 8372 4140 8372 4117 8372 4140 8373 4206 8373 4198 8373 4198 8374 4206 8374 4192 8374 4192 8375 4206 8375 4207 8375 4202 8376 4207 8376 4201 8376 4194 8377 4201 8377 4208 8377 4199 8378 4208 8378 4200 8378 4203 8379 4200 8379 4134 8379 4133 8380 4203 8380 4134 8380 4201 8381 4194 8381 4202 8381 4202 8382 4192 8382 4207 8382 4208 8383 4199 8383 4194 8383 4200 8384 4203 8384 4199 8384 4179 8385 4127 8385 4204 8385 4156 8386 4147 8386 4150 8386 4147 8387 4141 8387 4146 8387 4141 8388 4213 8388 4140 8388 4213 8389 4205 8389 4206 8389 4206 8390 4205 8390 4207 8390 4207 8391 4205 8391 4218 8391 4201 8392 4218 8392 4217 8392 4208 8393 4217 8393 4215 8393 4200 8394 4215 8394 4214 8394 4134 8395 4200 8395 4214 8395 4217 8396 4208 8396 4201 8396 4201 8397 4207 8397 4218 8397 4215 8398 4200 8398 4208 8398 4126 8399 4179 8399 4180 8399 4178 8400 4180 8400 4134 8400 4178 8401 4126 8401 4180 8401 4178 8402 4176 8402 4126 8402 4209 8403 4158 8403 4156 8403 4158 8404 4210 8404 4147 8404 4210 8405 4211 8405 4141 8405 4211 8406 4212 8406 4213 8406 4212 8407 4118 8407 4205 8407 4205 8408 4118 8408 4218 8408 4218 8409 4118 8409 4221 8409 4217 8410 4221 8410 4216 8410 4215 8411 4216 8411 4175 8411 4214 8412 4215 8412 4175 8412 4216 8413 4215 8413 4217 8413 4217 8414 4218 8414 4221 8414 4163 8415 4159 8415 4209 8415 4159 8416 4219 8416 4158 8416 4219 8417 4148 8417 4210 8417 4148 8418 4142 8418 4211 8418 4142 8419 4220 8419 4212 8419 4220 8420 4222 8420 4118 8420 4118 8421 4222 8421 4221 8421 4221 8422 4222 8422 4225 8422 4216 8423 4225 8423 4135 8423 4175 8424 4216 8424 4135 8424 4216 8425 4221 8425 4225 8425 4174 8426 4176 8426 4177 8426 4164 8427 4223 8427 4163 8427 4223 8428 4233 8428 4159 8428 4233 8429 4151 8429 4219 8429 4151 8430 4149 8430 4148 8430 4149 8431 4224 8431 4142 8431 4224 8432 4226 8432 4220 8432 4172 8433 4135 8433 4225 8433 4119 8434 4225 8434 4222 8434 4226 8435 4119 8435 4222 8435 4172 8436 4225 8436 4119 8436 4227 8437 4119 8437 4143 8437 4227 8438 4172 8438 4119 8438 4122 8439 4174 8439 4229 8439 4228 8440 4229 8440 4135 8440 4228 8441 4122 8441 4229 8441 4228 8442 4230 8442 4122 8442 4169 8443 4165 8443 4164 8443 4165 8444 4231 8444 4223 8444 4231 8445 4232 8445 4233 8445 4232 8446 4152 8446 4151 8446 4152 8447 4234 8447 4149 8447 4234 8448 4144 8448 4224 8448 4144 8449 4143 8449 4226 8449 4153 8450 4152 8450 4160 8450 4120 8451 4230 8451 4235 8451 4250 8452 3335 8452 4236 8452 4249 8453 4236 8453 4251 8453 4248 8454 4251 8454 4240 8454 4054 8455 4240 8455 4243 8455 4046 8456 4243 8456 4244 8456 4038 8457 4244 8457 4247 8457 4025 8458 4247 8458 4237 8458 4074 8459 4237 8459 4245 8459 4238 8460 4245 8460 4242 8460 4246 8461 4242 8461 4239 8461 4255 8462 4239 8462 5343 8462 4255 8463 4246 8463 4239 8463 4484 8464 4251 8464 4486 8464 4484 8465 4240 8465 4251 8465 4484 8466 4478 8466 4240 8466 4240 8467 4478 8467 4243 8467 4243 8468 4478 8468 4556 8468 4244 8469 4556 8469 4473 8469 4247 8470 4473 8470 4467 8470 4237 8471 4467 8471 4466 8471 4245 8472 4466 8472 4241 8472 4242 8473 4241 8473 4463 8473 4239 8474 4463 8474 5343 8474 4239 8475 4242 8475 4463 8475 4243 8476 4556 8476 4244 8476 4244 8477 4473 8477 4247 8477 4247 8478 4467 8478 4237 8478 4237 8479 4466 8479 4245 8479 4245 8480 4241 8480 4242 8480 4246 8481 4238 8481 4242 8481 4238 8482 4074 8482 4245 8482 4074 8483 4025 8483 4237 8483 4025 8484 4038 8484 4247 8484 4038 8485 4046 8485 4244 8485 4046 8486 4054 8486 4243 8486 4054 8487 4248 8487 4240 8487 4248 8488 4249 8488 4251 8488 4249 8489 4250 8489 4236 8489 3335 8490 4486 8490 4236 8490 4236 8491 4486 8491 4251 8491 4321 8492 4257 8492 4322 8492 4321 8493 4252 8493 4257 8493 4321 8494 4253 8494 4252 8494 4321 8495 4310 8495 4253 8495 4253 8496 4310 8496 4282 8496 4252 8497 4282 8497 4254 8497 4017 8498 4252 8498 4254 8498 4017 8499 4256 8499 4252 8499 4017 8500 4255 8500 4256 8500 4256 8501 4255 8501 4322 8501 4257 8502 4256 8502 4322 8502 4257 8503 4252 8503 4256 8503 4282 8504 4310 8504 4258 8504 4259 8505 4258 8505 4310 8505 4259 8506 4282 8506 4258 8506 4259 8507 4260 8507 4282 8507 4259 8508 4261 8508 4260 8508 4259 8509 4263 8509 4261 8509 4261 8510 4263 8510 4260 8510 4260 8511 4263 8511 4269 8511 4024 8512 4269 8512 4268 8512 4024 8513 4260 8513 4269 8513 4024 8514 4022 8514 4260 8514 4260 8515 4022 8515 4282 8515 4282 8516 4022 8516 4254 8516 4269 8517 4263 8517 4264 8517 4262 8518 4264 8518 4263 8518 4262 8519 4269 8519 4264 8519 4262 8520 4267 8520 4269 8520 4262 8521 4265 8521 4267 8521 4262 8522 4294 8522 4265 8522 4265 8523 4294 8523 4267 8523 4267 8524 4294 8524 4266 8524 4047 8525 4266 8525 4055 8525 4047 8526 4267 8526 4266 8526 4047 8527 4268 8527 4267 8527 4267 8528 4268 8528 4269 8528 4266 8529 4294 8529 4270 8529 4271 8530 4270 8530 4294 8530 4271 8531 4266 8531 4270 8531 4271 8532 4276 8532 4266 8532 4271 8533 4272 8533 4276 8533 4271 8534 4273 8534 4272 8534 4272 8535 4273 8535 4276 8535 4276 8536 4273 8536 4274 8536 4275 8537 4274 8537 4061 8537 4275 8538 4276 8538 4274 8538 4275 8539 4055 8539 4276 8539 4276 8540 4055 8540 4266 8540 4274 8541 4273 8541 4279 8541 4281 8542 4279 8542 4277 8542 4280 8543 4277 8543 4278 8543 4281 8544 4278 8544 4161 8544 4061 8545 4281 8545 4161 8545 4061 8546 4274 8546 4281 8546 4281 8547 4274 8547 4279 8547 4281 8548 4277 8548 4280 8548 4278 8549 4281 8549 4280 8549 4282 8550 4252 8550 4253 8550 4277 8551 4279 8551 4273 8551 3323 8552 4278 8552 4386 8552 3324 8553 4386 8553 4388 8553 3325 8554 4388 8554 4405 8554 3328 8555 4405 8555 4283 8555 3328 8556 3325 8556 4405 8556 4273 8557 4387 8557 4277 8557 4273 8558 4284 8558 4387 8558 4273 8559 4340 8559 4284 8559 4273 8560 4271 8560 4340 8560 4340 8561 4271 8561 4341 8561 4337 8562 4341 8562 4285 8562 4359 8563 4285 8563 4360 8563 4358 8564 4360 8564 4287 8564 4286 8565 4287 8565 4413 8565 4412 8566 4413 8566 4411 8566 4430 8567 4411 8567 4428 8567 4288 8568 4428 8568 4289 8568 4442 8569 4289 8569 4440 8569 4443 8570 4440 8570 4290 8570 4354 8571 4290 8571 4353 8571 4271 8572 4294 8572 4341 8572 4341 8573 4294 8573 4385 8573 4285 8574 4385 8574 4291 8574 4360 8575 4291 8575 4295 8575 4287 8576 4295 8576 4292 8576 4413 8577 4292 8577 4296 8577 4411 8578 4296 8578 4421 8578 4428 8579 4421 8579 4429 8579 4289 8580 4429 8580 4293 8580 4440 8581 4293 8581 4299 8581 4290 8582 4299 8582 4439 8582 4353 8583 4439 8583 4441 8583 4385 8584 4294 8584 4351 8584 4291 8585 4351 8585 4378 8585 4295 8586 4378 8586 4403 8586 4292 8587 4403 8587 4297 8587 4296 8588 4297 8588 4298 8588 4421 8589 4298 8589 4420 8589 4429 8590 4420 8590 4382 8590 4293 8591 4382 8591 4300 8591 4299 8592 4300 8592 4438 8592 4439 8593 4438 8593 4437 8593 4441 8594 4437 8594 4352 8594 4263 8595 4377 8595 4262 8595 4263 8596 4375 8596 4377 8596 4263 8597 4301 8597 4375 8597 4263 8598 4259 8598 4301 8598 4301 8599 4259 8599 4374 8599 4391 8600 4374 8600 4389 8600 4390 8601 4389 8601 4400 8601 4399 8602 4400 8602 4302 8602 4303 8603 4302 8603 4304 8603 4410 8604 4304 8604 4305 8604 4306 8605 4305 8605 4313 8605 4426 8606 4313 8606 4307 8606 4432 8607 4307 8607 4448 8607 4308 8608 4448 8608 4309 8608 4433 8609 4309 8609 4343 8609 4259 8610 4310 8610 4374 8610 4374 8611 4310 8611 4372 8611 4389 8612 4372 8612 4311 8612 4400 8613 4311 8613 4312 8613 4302 8614 4312 8614 4406 8614 4304 8615 4406 8615 4317 8615 4305 8616 4317 8616 4318 8616 4313 8617 4318 8617 4416 8617 4307 8618 4416 8618 4320 8618 4448 8619 4320 8619 4319 8619 4309 8620 4319 8620 4314 8620 5536 8621 4309 8621 4314 8621 5536 8622 4343 8622 4309 8622 5536 8623 4315 8623 4343 8623 4372 8624 4310 8624 4373 8624 4311 8625 4373 8625 4316 8625 4312 8626 4316 8626 4329 8626 4406 8627 4329 8627 4330 8627 4317 8628 4330 8628 4409 8628 4318 8629 4409 8629 4326 8629 4416 8630 4326 8630 4328 8630 4320 8631 4328 8631 4331 8631 4319 8632 4331 8632 4314 8632 4319 8633 4320 8633 4331 8633 4310 8634 4321 8634 4373 8634 4373 8635 4321 8635 4322 8635 4323 8636 4373 8636 4322 8636 4323 8637 4316 8637 4373 8637 4323 8638 5526 8638 4316 8638 4316 8639 5526 8639 4329 8639 4329 8640 5526 8640 4324 8640 4330 8641 4324 8641 5527 8641 4409 8642 5527 8642 4325 8642 4326 8643 4325 8643 4327 8643 4328 8644 4327 8644 4331 8644 4328 8645 4326 8645 4327 8645 4329 8646 4324 8646 4330 8646 4330 8647 5527 8647 4409 8647 4409 8648 4325 8648 4326 8648 4327 8649 4314 8649 4331 8649 4332 8650 4369 8650 4368 8650 4332 8651 4422 8651 4369 8651 4332 8652 4333 8652 4422 8652 4332 8653 3330 8653 4333 8653 4333 8654 3330 8654 4334 8654 4363 8655 4334 8655 4335 8655 4336 8656 4335 8656 4398 8656 4359 8657 4398 8657 4337 8657 4285 8658 4359 8658 4337 8658 3330 8659 4283 8659 4334 8659 4334 8660 4283 8660 4404 8660 4335 8661 4404 8661 4338 8661 4398 8662 4338 8662 4339 8662 4337 8663 4339 8663 4340 8663 4341 8664 4337 8664 4340 8664 4404 8665 4283 8665 4405 8665 4338 8666 4405 8666 4342 8666 4339 8667 4342 8667 4284 8667 4340 8668 4339 8668 4284 8668 3325 8669 3324 8669 4388 8669 3324 8670 3323 8670 4386 8670 4343 8671 4315 8671 4433 8671 4433 8672 4315 8672 4344 8672 4308 8673 4344 8673 4345 8673 4432 8674 4345 8674 4425 8674 4426 8675 4425 8675 4397 8675 4306 8676 4397 8676 4417 8676 4410 8677 4417 8677 4395 8677 4303 8678 4395 8678 4394 8678 4399 8679 4394 8679 4346 8679 4390 8680 4346 8680 4347 8680 4391 8681 4347 8681 4393 8681 4301 8682 4393 8682 4375 8682 4301 8683 4391 8683 4393 8683 4301 8684 4374 8684 4391 8684 4348 8685 4315 8685 4435 8685 4436 8686 4435 8686 4434 8686 4396 8687 4434 8687 4349 8687 4427 8688 4349 8688 4384 8688 4419 8689 4384 8689 4383 8689 4418 8690 4383 8690 4381 8690 4407 8691 4381 8691 4350 8691 4401 8692 4350 8692 4408 8692 4402 8693 4408 8693 4380 8693 4392 8694 4380 8694 4376 8694 4377 8695 4376 8695 4379 8695 4262 8696 4379 8696 4351 8696 4294 8697 4262 8697 4351 8697 4352 8698 4315 8698 4441 8698 4441 8699 4315 8699 4353 8699 4353 8700 4315 8700 4354 8700 4354 8701 4315 8701 4449 8701 4443 8702 4449 8702 4355 8702 4442 8703 4355 8703 4356 8703 4288 8704 4356 8704 4431 8704 4430 8705 4431 8705 4357 8705 4412 8706 4357 8706 4424 8706 4286 8707 4424 8707 4364 8707 4358 8708 4364 8708 4336 8708 4359 8709 4336 8709 4398 8709 4359 8710 4358 8710 4336 8710 4359 8711 4360 8711 4358 8711 4361 8712 4315 8712 4446 8712 4445 8713 4446 8713 4444 8713 4415 8714 4444 8714 4370 8714 4414 8715 4370 8715 4369 8715 4362 8716 4369 8716 4422 8716 4423 8717 4422 8717 4333 8717 4363 8718 4333 8718 4334 8718 4363 8719 4423 8719 4333 8719 4363 8720 4364 8720 4423 8720 4363 8721 4336 8721 4364 8721 4363 8722 4335 8722 4336 8722 4365 8723 4315 8723 4447 8723 4366 8724 4447 8724 4367 8724 4371 8725 4367 8725 4368 8725 4369 8726 4371 8726 4368 8726 4369 8727 4370 8727 4371 8727 4371 8728 4370 8728 4366 8728 4367 8729 4371 8729 4366 8729 4335 8730 4334 8730 4404 8730 4311 8731 4372 8731 4373 8731 4389 8732 4374 8732 4372 8732 4377 8733 4379 8733 4262 8733 4377 8734 4375 8734 4392 8734 4376 8735 4377 8735 4392 8735 4378 8736 4351 8736 4379 8736 4376 8737 4378 8737 4379 8737 4376 8738 4403 8738 4378 8738 4376 8739 4380 8739 4403 8739 4403 8740 4380 8740 4297 8740 4297 8741 4380 8741 4408 8741 4298 8742 4408 8742 4350 8742 4420 8743 4350 8743 4381 8743 4382 8744 4381 8744 4383 8744 4300 8745 4383 8745 4384 8745 4438 8746 4384 8746 4349 8746 4437 8747 4349 8747 4434 8747 4352 8748 4434 8748 4435 8748 4291 8749 4385 8749 4351 8749 4285 8750 4341 8750 4385 8750 4278 8751 4277 8751 4386 8751 4386 8752 4277 8752 4387 8752 4388 8753 4387 8753 4342 8753 4405 8754 4388 8754 4342 8754 4387 8755 4284 8755 4342 8755 4386 8756 4387 8756 4388 8756 4312 8757 4311 8757 4316 8757 4400 8758 4389 8758 4311 8758 4347 8759 4391 8759 4390 8759 4390 8760 4391 8760 4389 8760 4375 8761 4393 8761 4392 8761 4392 8762 4393 8762 4402 8762 4380 8763 4392 8763 4402 8763 4402 8764 4393 8764 4347 8764 4401 8765 4347 8765 4346 8765 4407 8766 4346 8766 4394 8766 4418 8767 4394 8767 4395 8767 4419 8768 4395 8768 4417 8768 4427 8769 4417 8769 4397 8769 4396 8770 4397 8770 4425 8770 4436 8771 4425 8771 4345 8771 4348 8772 4345 8772 4344 8772 4295 8773 4291 8773 4378 8773 4360 8774 4285 8774 4291 8774 4398 8775 4339 8775 4337 8775 4338 8776 4342 8776 4339 8776 4406 8777 4312 8777 4329 8777 4302 8778 4400 8778 4312 8778 4346 8779 4390 8779 4399 8779 4399 8780 4390 8780 4400 8780 4408 8781 4402 8781 4401 8781 4401 8782 4402 8782 4347 8782 4292 8783 4295 8783 4403 8783 4287 8784 4360 8784 4295 8784 4335 8785 4338 8785 4398 8785 4404 8786 4405 8786 4338 8786 4317 8787 4406 8787 4330 8787 4304 8788 4302 8788 4406 8788 4394 8789 4399 8789 4303 8789 4303 8790 4399 8790 4302 8790 4350 8791 4401 8791 4407 8791 4407 8792 4401 8792 4346 8792 4298 8793 4297 8793 4408 8793 4296 8794 4292 8794 4297 8794 4413 8795 4287 8795 4292 8795 4364 8796 4358 8796 4286 8796 4286 8797 4358 8797 4287 8797 4318 8798 4317 8798 4409 8798 4305 8799 4304 8799 4317 8799 4395 8800 4303 8800 4410 8800 4410 8801 4303 8801 4304 8801 4381 8802 4407 8802 4418 8802 4418 8803 4407 8803 4394 8803 4420 8804 4298 8804 4350 8804 4421 8805 4296 8805 4298 8805 4411 8806 4413 8806 4296 8806 4424 8807 4286 8807 4412 8807 4412 8808 4286 8808 4413 8808 4423 8809 4364 8809 4424 8809 4362 8810 4424 8810 4357 8810 4414 8811 4357 8811 4431 8811 4415 8812 4431 8812 4356 8812 4445 8813 4356 8813 4355 8813 4361 8814 4355 8814 4449 8814 4416 8815 4318 8815 4326 8815 4313 8816 4305 8816 4318 8816 4417 8817 4410 8817 4306 8817 4306 8818 4410 8818 4305 8818 4383 8819 4418 8819 4419 8819 4419 8820 4418 8820 4395 8820 4382 8821 4420 8821 4381 8821 4429 8822 4421 8822 4420 8822 4428 8823 4411 8823 4421 8823 4357 8824 4412 8824 4430 8824 4430 8825 4412 8825 4411 8825 4422 8826 4423 8826 4362 8826 4362 8827 4423 8827 4424 8827 4416 8828 4328 8828 4320 8828 4313 8829 4416 8829 4307 8829 4306 8830 4313 8830 4426 8830 4397 8831 4306 8831 4426 8831 4307 8832 4320 8832 4448 8832 4425 8833 4426 8833 4432 8833 4432 8834 4426 8834 4307 8834 4419 8835 4417 8835 4427 8835 4384 8836 4419 8836 4427 8836 4349 8837 4427 8837 4396 8837 4396 8838 4427 8838 4397 8838 4382 8839 4383 8839 4300 8839 4429 8840 4382 8840 4293 8840 4300 8841 4384 8841 4438 8841 4428 8842 4429 8842 4289 8842 4293 8843 4300 8843 4299 8843 4430 8844 4428 8844 4288 8844 4431 8845 4430 8845 4288 8845 4289 8846 4293 8846 4440 8846 4356 8847 4288 8847 4442 8847 4442 8848 4288 8848 4289 8848 4362 8849 4357 8849 4414 8849 4369 8850 4362 8850 4414 8850 4370 8851 4414 8851 4415 8851 4415 8852 4414 8852 4431 8852 4432 8853 4448 8853 4308 8853 4345 8854 4432 8854 4308 8854 4344 8855 4308 8855 4433 8855 4433 8856 4308 8856 4309 8856 4396 8857 4425 8857 4436 8857 4434 8858 4396 8858 4436 8858 4435 8859 4436 8859 4348 8859 4348 8860 4436 8860 4345 8860 4438 8861 4349 8861 4437 8861 4299 8862 4438 8862 4439 8862 4437 8863 4434 8863 4352 8863 4440 8864 4299 8864 4290 8864 4439 8865 4437 8865 4441 8865 4442 8866 4440 8866 4443 8866 4355 8867 4442 8867 4443 8867 4290 8868 4439 8868 4353 8868 4449 8869 4443 8869 4354 8869 4354 8870 4443 8870 4290 8870 4415 8871 4356 8871 4445 8871 4444 8872 4415 8872 4445 8872 4446 8873 4445 8873 4361 8873 4361 8874 4445 8874 4355 8874 4370 8875 4444 8875 4366 8875 4366 8876 4444 8876 4365 8876 4447 8877 4366 8877 4365 8877 4365 8878 4444 8878 4446 8878 4368 8879 4367 8879 4447 8879 4315 8880 4368 8880 4447 8880 4448 8881 4319 8881 4309 8881 4315 8882 4348 8882 4344 8882 4315 8883 4352 8883 4435 8883 4315 8884 4361 8884 4449 8884 4315 8885 4365 8885 4446 8885 5343 8886 4463 8886 4450 8886 4450 8887 4463 8887 4451 8887 4533 8888 4451 8888 4452 8888 4528 8889 4452 8889 4470 8889 4529 8890 4470 8890 4468 8890 4561 8891 4468 8891 4453 8891 4557 8892 4453 8892 4475 8892 4526 8893 4475 8893 4477 8893 4454 8894 4477 8894 4455 8894 4518 8895 4455 8895 4555 8895 4519 8896 4555 8896 4456 8896 4520 8897 4456 8897 4584 8897 4578 8898 4584 8898 4577 8898 4457 8899 4577 8899 4570 8899 4583 8900 4570 8900 4488 8900 4563 8901 4488 8901 4489 8901 3345 8902 4563 8902 4489 8902 3345 8903 4591 8903 4563 8903 3345 8904 3344 8904 4591 8904 4591 8905 3344 8905 4554 8905 4599 8906 4554 8906 4458 8906 4605 8907 4458 8907 4459 8907 4606 8908 4459 8908 4490 8908 4460 8909 4490 8909 4492 8909 4461 8910 4492 8910 4640 8910 4589 8911 4640 8911 4639 8911 4642 8912 4639 8912 4652 8912 4462 8913 4652 8913 4651 8913 4656 8914 4651 8914 4659 8914 4590 8915 4659 8915 5492 8915 4451 8916 4463 8916 4471 8916 4465 8917 4471 8917 4241 8917 4466 8918 4465 8918 4241 8918 4466 8919 4464 8919 4465 8919 4466 8920 4469 8920 4464 8920 4466 8921 4467 8921 4469 8921 4469 8922 4467 8922 4472 8922 4468 8923 4472 8923 4453 8923 4468 8924 4469 8924 4472 8924 4468 8925 4470 8925 4469 8925 4469 8926 4470 8926 4464 8926 4464 8927 4470 8927 4452 8927 4465 8928 4452 8928 4451 8928 4471 8929 4465 8929 4451 8929 4467 8930 4473 8930 4472 8930 4472 8931 4473 8931 4474 8931 4453 8932 4474 8932 4475 8932 4453 8933 4472 8933 4474 8933 4474 8934 4473 8934 4476 8934 4475 8935 4476 8935 4477 8935 4475 8936 4474 8936 4476 8936 4478 8937 4482 8937 4556 8937 4478 8938 4479 8938 4482 8938 4478 8939 4480 8939 4479 8939 4478 8940 4484 8940 4480 8940 4480 8941 4484 8941 4481 8941 4584 8942 4481 8942 4577 8942 4584 8943 4480 8943 4481 8943 4584 8944 4456 8944 4480 8944 4480 8945 4456 8945 4479 8945 4479 8946 4456 8946 4555 8946 4482 8947 4555 8947 4455 8947 4483 8948 4455 8948 4477 8948 4476 8949 4483 8949 4477 8949 4476 8950 4556 8950 4483 8950 4476 8951 4473 8951 4556 8951 4484 8952 4486 8952 4481 8952 4481 8953 4486 8953 4485 8953 4577 8954 4485 8954 4570 8954 4577 8955 4481 8955 4485 8955 4485 8956 4486 8956 4487 8956 4570 8957 4487 8957 4488 8957 4570 8958 4485 8958 4487 8958 4486 8959 3335 8959 4487 8959 4487 8960 3335 8960 3336 8960 4488 8961 3336 8961 4489 8961 4488 8962 4487 8962 3336 8962 3344 8963 4629 8963 4554 8963 4554 8964 4629 8964 4494 8964 4458 8965 4494 8965 4496 8965 4459 8966 4496 8966 4622 8966 4490 8967 4622 8967 4491 8967 4492 8968 4491 8968 4638 8968 4640 8969 4638 8969 4499 8969 4639 8970 4499 8970 4493 8970 4652 8971 4493 8971 4502 8971 4651 8972 4502 8972 4503 8972 4659 8973 4503 8973 5492 8973 4494 8974 4629 8974 4495 8974 4496 8975 4495 8975 4497 8975 4622 8976 4497 8976 4498 8976 4491 8977 4498 8977 4541 8977 4638 8978 4541 8978 4500 8978 4499 8979 4500 8979 4501 8979 4493 8980 4501 8980 4650 8980 4502 8981 4650 8981 4543 8981 4503 8982 4543 8982 5492 8982 4504 8983 4619 8983 4505 8983 4504 8984 4620 8984 4619 8984 4504 8985 4506 8985 4620 8985 4620 8986 4506 8986 4507 8986 4542 8987 4507 8987 4632 8987 4508 8988 4632 8988 4538 8988 4509 8989 4538 8989 4660 8989 4661 8990 4660 8990 5492 8990 5492 8991 4553 8991 4506 8991 5503 8992 4550 8992 4662 8992 5503 8993 4510 8993 4550 8993 5503 8994 4511 8994 4510 8994 5503 8995 4512 8995 4511 8995 4511 8996 4512 8996 4522 8996 4513 8997 4522 8997 4523 8997 4514 8998 4523 8998 4627 8998 4547 8999 4627 8999 4615 8999 4548 9000 4615 9000 4608 9000 4515 9001 4608 9001 4598 9001 4516 9002 4598 9002 4517 9002 4596 9003 4517 9003 4518 9003 4519 9004 4518 9004 4555 9004 4519 9005 4596 9005 4518 9005 4519 9006 4521 9006 4596 9006 4519 9007 4520 9007 4521 9007 4519 9008 4456 9008 4520 9008 4512 9009 4612 9009 4522 9009 4522 9010 4612 9010 4628 9010 4523 9011 4628 9011 4626 9011 4627 9012 4626 9012 4524 9012 4615 9013 4524 9013 4601 9013 4608 9014 4601 9014 4602 9014 4598 9015 4602 9015 4525 9015 4517 9016 4525 9016 4454 9016 4518 9017 4454 9017 4455 9017 4518 9018 4517 9018 4454 9018 4628 9019 4612 9019 4614 9019 4626 9020 4614 9020 4618 9020 4524 9021 4618 9021 4611 9021 4601 9022 4611 9022 4603 9022 4602 9023 4603 9023 4558 9023 4525 9024 4558 9024 4526 9024 4454 9025 4526 9025 4477 9025 4454 9026 4525 9026 4526 9026 4527 9027 4613 9027 5510 9027 4527 9028 4616 9028 4613 9028 4527 9029 4560 9029 4616 9029 4527 9030 5511 9030 4560 9030 4560 9031 5511 9031 4532 9031 4530 9032 4532 9032 4528 9032 4529 9033 4528 9033 4470 9033 4529 9034 4530 9034 4528 9034 4529 9035 4531 9035 4530 9035 4529 9036 4561 9036 4531 9036 4529 9037 4468 9037 4561 9037 4532 9038 5511 9038 4533 9038 4528 9039 4533 9039 4452 9039 4528 9040 4532 9040 4533 9040 4553 9041 5492 9041 4534 9041 4552 9042 4534 9042 4537 9042 4535 9043 4537 9043 4632 9043 4507 9044 4535 9044 4632 9044 4507 9045 4506 9045 4535 9045 4535 9046 4506 9046 4552 9046 4537 9047 4535 9047 4552 9047 4536 9048 5492 9048 4660 9048 4538 9049 4536 9049 4660 9049 4538 9050 4537 9050 4536 9050 4538 9051 4632 9051 4537 9051 4661 9052 5492 9052 4539 9052 4636 9053 4539 9053 4540 9053 4649 9054 4540 9054 4501 9054 4500 9055 4649 9055 4501 9055 4500 9056 4633 9056 4649 9056 4500 9057 4541 9057 4633 9057 4633 9058 4541 9058 4634 9058 4635 9059 4634 9059 4631 9059 4542 9060 4631 9060 4620 9060 4507 9061 4542 9061 4620 9061 4544 9062 5492 9062 4543 9062 4650 9063 4544 9063 4543 9063 4650 9064 4540 9064 4544 9064 4650 9065 4501 9065 4540 9065 4590 9066 5492 9066 4657 9066 4655 9067 4657 9067 4647 9067 4569 9068 4647 9068 4576 9068 4581 9069 4576 9069 4646 9069 4582 9070 4646 9070 4546 9070 4545 9071 4546 9071 4514 9071 4547 9072 4514 9072 4627 9072 4547 9073 4545 9073 4514 9073 4547 9074 4549 9074 4545 9074 4547 9075 4548 9075 4549 9075 4547 9076 4615 9076 4548 9076 4648 9077 5492 9077 4663 9077 4658 9078 4663 9078 4550 9078 4551 9079 4550 9079 4510 9079 4645 9080 4510 9080 4511 9080 4513 9081 4511 9081 4522 9081 4513 9082 4645 9082 4511 9082 4513 9083 4546 9083 4645 9083 4513 9084 4514 9084 4546 9084 4513 9085 4523 9085 4514 9085 4552 9086 4506 9086 4553 9086 4534 9087 4552 9087 4553 9087 4523 9088 4522 9088 4628 9088 4458 9089 4554 9089 4494 9089 5511 9090 4450 9090 4533 9090 4533 9091 4450 9091 4451 9091 4241 9092 4471 9092 4463 9092 4555 9093 4482 9093 4479 9093 4455 9094 4483 9094 4482 9094 4482 9095 4483 9095 4556 9095 4452 9096 4465 9096 4464 9096 4557 9097 4475 9097 4526 9097 4558 9098 4557 9098 4526 9098 4558 9099 4562 9099 4557 9099 4558 9100 4603 9100 4562 9100 4562 9101 4603 9101 4559 9101 4531 9102 4559 9102 4604 9102 4530 9103 4604 9103 4560 9103 4532 9104 4530 9104 4560 9104 4561 9105 4453 9105 4557 9105 4562 9106 4561 9106 4557 9106 4562 9107 4531 9107 4561 9107 4562 9108 4559 9108 4531 9108 4488 9109 4563 9109 4583 9109 4583 9110 4563 9110 4564 9110 4593 9111 4564 9111 4592 9111 4571 9112 4592 9112 4565 9112 4572 9113 4565 9113 4588 9113 4607 9114 4588 9114 4566 9114 4575 9115 4566 9115 4623 9115 4643 9116 4623 9116 4641 9116 4567 9117 4641 9117 4568 9117 4654 9118 4568 9118 4653 9118 4569 9119 4653 9119 4655 9119 4647 9120 4569 9120 4655 9120 4570 9121 4583 9121 4457 9121 4457 9122 4583 9122 4593 9122 4579 9123 4593 9123 4571 9123 4580 9124 4571 9124 4572 9124 4600 9125 4572 9125 4607 9125 4573 9126 4607 9126 4575 9126 4574 9127 4575 9127 4643 9127 4625 9128 4643 9128 4567 9128 4644 9129 4567 9129 4654 9129 4581 9130 4654 9130 4569 9130 4576 9131 4581 9131 4569 9131 4577 9132 4457 9132 4578 9132 4578 9133 4457 9133 4579 9133 4595 9134 4579 9134 4580 9134 4594 9135 4580 9135 4600 9135 4586 9136 4600 9136 4573 9136 4587 9137 4573 9137 4574 9137 4610 9138 4574 9138 4625 9138 4624 9139 4625 9139 4644 9139 4582 9140 4644 9140 4581 9140 4646 9141 4582 9141 4581 9141 4564 9142 4593 9142 4583 9142 4584 9143 4578 9143 4520 9143 4520 9144 4578 9144 4595 9144 4521 9145 4595 9145 4594 9145 4585 9146 4594 9146 4586 9146 4597 9147 4586 9147 4587 9147 4609 9148 4587 9148 4610 9148 4549 9149 4610 9149 4624 9149 4545 9150 4624 9150 4582 9150 4546 9151 4545 9151 4582 9151 4593 9152 4579 9152 4457 9152 4579 9153 4595 9153 4578 9153 4595 9154 4521 9154 4520 9154 4563 9155 4591 9155 4564 9155 4564 9156 4591 9156 4599 9156 4592 9157 4599 9157 4605 9157 4565 9158 4605 9158 4606 9158 4588 9159 4606 9159 4460 9159 4566 9160 4460 9160 4461 9160 4623 9161 4461 9161 4589 9161 4641 9162 4589 9162 4642 9162 4568 9163 4642 9163 4462 9163 4653 9164 4462 9164 4656 9164 4655 9165 4656 9165 4590 9165 4657 9166 4655 9166 4590 9166 4554 9167 4599 9167 4591 9167 4599 9168 4592 9168 4564 9168 4592 9169 4571 9169 4593 9169 4571 9170 4580 9170 4579 9170 4580 9171 4594 9171 4595 9171 4516 9172 4517 9172 4596 9172 4585 9173 4596 9173 4521 9173 4594 9174 4585 9174 4521 9174 4516 9175 4596 9175 4585 9175 4597 9176 4585 9176 4586 9176 4597 9177 4516 9177 4585 9177 4597 9178 4515 9178 4516 9178 4597 9179 4609 9179 4515 9179 4597 9180 4587 9180 4609 9180 4598 9181 4525 9181 4517 9181 4602 9182 4558 9182 4525 9182 4604 9183 4530 9183 4531 9183 4605 9184 4599 9184 4458 9184 4565 9185 4592 9185 4605 9185 4572 9186 4571 9186 4565 9186 4600 9187 4580 9187 4572 9187 4586 9188 4594 9188 4600 9188 4515 9189 4598 9189 4516 9189 4608 9190 4602 9190 4598 9190 4601 9191 4603 9191 4602 9191 4603 9192 4611 9192 4559 9192 4559 9193 4611 9193 4617 9193 4604 9194 4617 9194 4616 9194 4560 9195 4604 9195 4616 9195 4604 9196 4559 9196 4617 9196 4495 9197 4496 9197 4494 9197 4496 9198 4459 9198 4458 9198 4459 9199 4606 9199 4605 9199 4497 9200 4622 9200 4496 9200 4606 9201 4588 9201 4565 9201 4622 9202 4490 9202 4459 9202 4588 9203 4607 9203 4572 9203 4490 9204 4460 9204 4606 9204 4607 9205 4573 9205 4600 9205 4460 9206 4566 9206 4588 9206 4573 9207 4587 9207 4586 9207 4566 9208 4575 9208 4607 9208 4575 9209 4574 9209 4573 9209 4608 9210 4515 9210 4548 9210 4548 9211 4515 9211 4609 9211 4549 9212 4609 9212 4610 9212 4549 9213 4548 9213 4609 9213 4574 9214 4610 9214 4587 9214 4601 9215 4608 9215 4615 9215 4611 9216 4601 9216 4524 9216 4617 9217 4611 9217 4618 9217 4613 9218 4618 9218 4614 9218 5510 9219 4614 9219 4612 9219 5510 9220 4613 9220 4614 9220 4524 9221 4615 9221 4627 9221 4616 9222 4617 9222 4613 9222 4613 9223 4617 9223 4618 9223 4618 9224 4524 9224 4626 9224 4497 9225 4495 9225 4630 9225 4621 9226 4630 9226 4619 9226 4631 9227 4619 9227 4620 9227 4631 9228 4621 9228 4619 9228 4631 9229 4634 9229 4621 9229 4621 9230 4634 9230 4498 9230 4497 9231 4621 9231 4498 9231 4497 9232 4630 9232 4621 9232 4491 9233 4622 9233 4498 9233 4492 9234 4490 9234 4491 9234 4461 9235 4460 9235 4492 9235 4623 9236 4566 9236 4461 9236 4643 9237 4575 9237 4623 9237 4625 9238 4574 9238 4643 9238 4624 9239 4610 9239 4625 9239 4545 9240 4549 9240 4624 9240 4523 9241 4626 9241 4627 9241 4628 9242 4614 9242 4626 9242 4629 9243 4505 9243 4495 9243 4495 9244 4505 9244 4630 9244 4630 9245 4505 9245 4619 9245 4635 9246 4631 9246 4542 9246 4508 9247 4542 9247 4632 9247 4508 9248 4635 9248 4542 9248 4508 9249 4637 9249 4635 9249 4508 9250 4509 9250 4637 9250 4508 9251 4538 9251 4509 9251 4634 9252 4541 9252 4498 9252 4633 9253 4634 9253 4635 9253 4637 9254 4633 9254 4635 9254 4637 9255 4649 9255 4633 9255 4637 9256 4636 9256 4649 9256 4637 9257 4509 9257 4636 9257 4636 9258 4509 9258 4661 9258 4539 9259 4636 9259 4661 9259 4541 9260 4638 9260 4491 9260 4638 9261 4640 9261 4492 9261 4499 9262 4638 9262 4500 9262 4640 9263 4589 9263 4461 9263 4639 9264 4640 9264 4499 9264 4589 9265 4641 9265 4623 9265 4642 9266 4589 9266 4639 9266 4641 9267 4567 9267 4643 9267 4568 9268 4641 9268 4642 9268 4567 9269 4644 9269 4625 9269 4654 9270 4567 9270 4568 9270 4644 9271 4582 9271 4624 9271 4581 9272 4644 9272 4654 9272 4645 9273 4546 9273 4646 9273 4551 9274 4646 9274 4576 9274 4658 9275 4576 9275 4647 9275 4648 9276 4647 9276 4657 9276 4540 9277 4649 9277 4636 9277 4501 9278 4493 9278 4499 9278 4493 9279 4652 9279 4639 9279 4502 9280 4493 9280 4650 9280 4652 9281 4462 9281 4642 9281 4651 9282 4652 9282 4502 9282 4462 9283 4653 9283 4568 9283 4656 9284 4462 9284 4651 9284 4653 9285 4569 9285 4654 9285 4655 9286 4653 9286 4656 9286 4645 9287 4646 9287 4551 9287 4510 9288 4645 9288 4551 9288 4550 9289 4551 9289 4658 9289 4658 9290 4551 9290 4576 9290 4648 9291 4657 9291 5492 9291 4647 9292 4648 9292 4658 9292 4658 9293 4648 9293 4663 9293 4656 9294 4659 9294 4590 9294 4651 9295 4503 9295 4659 9295 4543 9296 4503 9296 4502 9296 5492 9297 4544 9297 4539 9297 4539 9298 4544 9298 4540 9298 4660 9299 4661 9299 4509 9299 5492 9300 4536 9300 4534 9300 4534 9301 4536 9301 4537 9301 4662 9302 4550 9302 4663 9302 5492 9303 4662 9303 4663 9303 5349 9304 3760 9304 4759 9304 4714 9305 4759 9305 4757 9305 4713 9306 4757 9306 4664 9306 4665 9307 4664 9307 5347 9307 4665 9308 4713 9308 4664 9308 3759 9309 4666 9309 3761 9309 3759 9310 4667 9310 4666 9310 3759 9311 4711 9311 4667 9311 3759 9312 4668 9312 4711 9312 4711 9313 4668 9313 4756 9313 4767 9314 4756 9314 4708 9314 4709 9315 4708 9315 4766 9315 4778 9316 4766 9316 4777 9316 4779 9317 4777 9317 4783 9317 4794 9318 4783 9318 4669 9318 4670 9319 4669 9319 4806 9319 4809 9320 4806 9320 4671 9320 4672 9321 4671 9321 4808 9321 4818 9322 4808 9322 4820 9322 4819 9323 4820 9323 4731 9323 4668 9324 4675 9324 4756 9324 4756 9325 4675 9325 4673 9325 4708 9326 4673 9326 4755 9326 4766 9327 4755 9327 4769 9327 4777 9328 4769 9328 4674 9328 4783 9329 4674 9329 4776 9329 4669 9330 4776 9330 4782 9330 4806 9331 4782 9331 4803 9331 4671 9332 4803 9332 4804 9332 4808 9333 4804 9333 4678 9333 4820 9334 4678 9334 4816 9334 4731 9335 4816 9335 4680 9335 4673 9336 4675 9336 4676 9336 4755 9337 4676 9337 4677 9337 4769 9338 4677 9338 4750 9338 4674 9339 4750 9339 4751 9339 4776 9340 4751 9340 4753 9340 4782 9341 4753 9341 4793 9341 4803 9342 4793 9342 4802 9342 4804 9343 4802 9343 4807 9343 4678 9344 4807 9344 4679 9344 4816 9345 4679 9345 4817 9345 4680 9346 4817 9346 4815 9346 4681 9347 4730 9347 3749 9347 4681 9348 4721 9348 4730 9348 4681 9349 4682 9349 4721 9349 4681 9350 4683 9350 4682 9350 4682 9351 4683 9351 4684 9351 4761 9352 4684 9352 4747 9352 4768 9353 4747 9353 4760 9353 4685 9354 4760 9354 4772 9354 4774 9355 4772 9355 4781 9355 4791 9356 4781 9356 4686 9356 4798 9357 4686 9357 4691 9357 4799 9358 4691 9358 4797 9358 4716 9359 4797 9359 4811 9359 4812 9360 4811 9360 4813 9360 4715 9361 4813 9361 4695 9361 4683 9362 4687 9362 4684 9362 4684 9363 4687 9363 4746 9363 4747 9364 4746 9364 4698 9364 4760 9365 4698 9365 4688 9365 4772 9366 4688 9366 4689 9366 4781 9367 4689 9367 4690 9367 4686 9368 4690 9368 4788 9368 4691 9369 4788 9369 4692 9369 4797 9370 4692 9370 4693 9370 4811 9371 4693 9371 4701 9371 4813 9372 4701 9372 4694 9372 4696 9373 4813 9373 4694 9373 4696 9374 4695 9374 4813 9374 4696 9375 5467 9375 4695 9375 4746 9376 4687 9376 4697 9376 4698 9377 4697 9377 4702 9377 4688 9378 4702 9378 4699 9378 4689 9379 4699 9379 4707 9379 4690 9380 4707 9380 4780 9380 4788 9381 4780 9381 4789 9381 4692 9382 4789 9382 4706 9382 4693 9383 4706 9383 4700 9383 4701 9384 4700 9384 4694 9384 4701 9385 4693 9385 4700 9385 4687 9386 3733 9386 4697 9386 4697 9387 3733 9387 5461 9387 5462 9388 4697 9388 5461 9388 5462 9389 4702 9389 4697 9389 5462 9390 5464 9390 4702 9390 4702 9391 5464 9391 4699 9391 4699 9392 5464 9392 5465 9392 4707 9393 5465 9393 4703 9393 4780 9394 4703 9394 4704 9394 4789 9395 4704 9395 4705 9395 4706 9396 4705 9396 4700 9396 4706 9397 4789 9397 4705 9397 4699 9398 5465 9398 4707 9398 4707 9399 4703 9399 4780 9399 4780 9400 4704 9400 4789 9400 4705 9401 4694 9401 4700 9401 5354 9402 4737 9402 4825 9402 5354 9403 4795 9403 4737 9403 5354 9404 4738 9404 4795 9404 5354 9405 5348 9405 4738 9405 4738 9406 5348 9406 4744 9406 4740 9407 4744 9407 4770 9407 4735 9408 4770 9408 4710 9408 4709 9409 4710 9409 4767 9409 4708 9410 4709 9410 4767 9410 5348 9411 5347 9411 4744 9411 4744 9412 5347 9412 4745 9412 4770 9413 4745 9413 4771 9413 4710 9414 4771 9414 4712 9414 4767 9415 4712 9415 4711 9415 4756 9416 4767 9416 4711 9416 4745 9417 5347 9417 4664 9417 4771 9418 4664 9418 4758 9418 4712 9419 4758 9419 4667 9419 4711 9420 4712 9420 4667 9420 4713 9421 4714 9421 4757 9421 4714 9422 5349 9422 4759 9422 4695 9423 5467 9423 4715 9423 4715 9424 5467 9424 4828 9424 4812 9425 4828 9425 4765 9425 4716 9426 4765 9426 4717 9426 4799 9427 4717 9427 4801 9427 4798 9428 4801 9428 4790 9428 4791 9429 4790 9429 4718 9429 4774 9430 4718 9430 4773 9430 4685 9431 4773 9431 4762 9431 4768 9432 4762 9432 4719 9432 4761 9433 4719 9433 4720 9433 4682 9434 4720 9434 4721 9434 4682 9435 4761 9435 4720 9435 4682 9436 4684 9436 4761 9436 4827 9437 5467 9437 4829 9437 4814 9438 4829 9438 4722 9438 4764 9439 4722 9439 4723 9439 4763 9440 4723 9440 4805 9440 4800 9441 4805 9441 4724 9441 4792 9442 4724 9442 4726 9442 4725 9443 4726 9443 4754 9443 4775 9444 4754 9444 4752 9444 4727 9445 4752 9445 4728 9445 4748 9446 4728 9446 4729 9446 4730 9447 4729 9447 4749 9447 3749 9448 4749 9448 4676 9448 4675 9449 3749 9449 4676 9449 4815 9450 5467 9450 4680 9450 4680 9451 5467 9451 4731 9451 4731 9452 5467 9452 4819 9452 4819 9453 5467 9453 4732 9453 4818 9454 4732 9454 4733 9454 4672 9455 4733 9455 4734 9455 4809 9456 4734 9456 4810 9456 4670 9457 4810 9457 4786 9457 4794 9458 4786 9458 4784 9458 4779 9459 4784 9459 4785 9459 4778 9460 4785 9460 4735 9460 4709 9461 4735 9461 4710 9461 4709 9462 4778 9462 4735 9462 4709 9463 4766 9463 4778 9463 4830 9464 5467 9464 4831 9464 4822 9465 4831 9465 4736 9465 4821 9466 4736 9466 4823 9466 4787 9467 4823 9467 4737 9467 4796 9468 4737 9468 4795 9468 4739 9469 4795 9469 4738 9469 4740 9470 4738 9470 4744 9470 4740 9471 4739 9471 4738 9471 4740 9472 4785 9472 4739 9472 4740 9473 4735 9473 4785 9473 4740 9474 4770 9474 4735 9474 4824 9475 5467 9475 4826 9475 4743 9476 4826 9476 4741 9476 4742 9477 4741 9477 4825 9477 4737 9478 4742 9478 4825 9478 4737 9479 4823 9479 4742 9479 4742 9480 4823 9480 4743 9480 4741 9481 4742 9481 4743 9481 4770 9482 4744 9482 4745 9482 4698 9483 4746 9483 4697 9483 4747 9484 4684 9484 4746 9484 4730 9485 4749 9485 3749 9485 4730 9486 4721 9486 4748 9486 4729 9487 4730 9487 4748 9487 4677 9488 4676 9488 4749 9488 4729 9489 4677 9489 4749 9489 4729 9490 4750 9490 4677 9490 4729 9491 4728 9491 4750 9491 4750 9492 4728 9492 4751 9492 4751 9493 4728 9493 4752 9493 4753 9494 4752 9494 4754 9494 4793 9495 4754 9495 4726 9495 4802 9496 4726 9496 4724 9496 4807 9497 4724 9497 4805 9497 4679 9498 4805 9498 4723 9498 4817 9499 4723 9499 4722 9499 4815 9500 4722 9500 4829 9500 4755 9501 4673 9501 4676 9501 4708 9502 4756 9502 4673 9502 3760 9503 3761 9503 4759 9503 4759 9504 3761 9504 4666 9504 4757 9505 4666 9505 4758 9505 4664 9506 4757 9506 4758 9506 4666 9507 4667 9507 4758 9507 4759 9508 4666 9508 4757 9508 4688 9509 4698 9509 4702 9509 4760 9510 4747 9510 4698 9510 4719 9511 4761 9511 4768 9511 4768 9512 4761 9512 4747 9512 4721 9513 4720 9513 4748 9513 4748 9514 4720 9514 4727 9514 4728 9515 4748 9515 4727 9515 4727 9516 4720 9516 4719 9516 4775 9517 4719 9517 4762 9517 4725 9518 4762 9518 4773 9518 4792 9519 4773 9519 4718 9519 4800 9520 4718 9520 4790 9520 4763 9521 4790 9521 4801 9521 4764 9522 4801 9522 4717 9522 4814 9523 4717 9523 4765 9523 4827 9524 4765 9524 4828 9524 4769 9525 4755 9525 4677 9525 4766 9526 4708 9526 4755 9526 4710 9527 4712 9527 4767 9527 4771 9528 4758 9528 4712 9528 4689 9529 4688 9529 4699 9529 4772 9530 4760 9530 4688 9530 4762 9531 4768 9531 4685 9531 4685 9532 4768 9532 4760 9532 4752 9533 4727 9533 4775 9533 4775 9534 4727 9534 4719 9534 4674 9535 4769 9535 4750 9535 4777 9536 4766 9536 4769 9536 4770 9537 4771 9537 4710 9537 4745 9538 4664 9538 4771 9538 4690 9539 4689 9539 4707 9539 4781 9540 4772 9540 4689 9540 4773 9541 4685 9541 4774 9541 4774 9542 4685 9542 4772 9542 4754 9543 4775 9543 4725 9543 4725 9544 4775 9544 4762 9544 4753 9545 4751 9545 4752 9545 4776 9546 4674 9546 4751 9546 4783 9547 4777 9547 4674 9547 4785 9548 4778 9548 4779 9548 4779 9549 4778 9549 4777 9549 4788 9550 4690 9550 4780 9550 4686 9551 4781 9551 4690 9551 4718 9552 4774 9552 4791 9552 4791 9553 4774 9553 4781 9553 4726 9554 4725 9554 4792 9554 4792 9555 4725 9555 4773 9555 4793 9556 4753 9556 4754 9556 4782 9557 4776 9557 4753 9557 4669 9558 4783 9558 4776 9558 4784 9559 4779 9559 4794 9559 4794 9560 4779 9560 4783 9560 4739 9561 4785 9561 4784 9561 4796 9562 4784 9562 4786 9562 4787 9563 4786 9563 4810 9563 4821 9564 4810 9564 4734 9564 4822 9565 4734 9565 4733 9565 4830 9566 4733 9566 4732 9566 4692 9567 4788 9567 4789 9567 4691 9568 4686 9568 4788 9568 4790 9569 4791 9569 4798 9569 4798 9570 4791 9570 4686 9570 4724 9571 4792 9571 4800 9571 4800 9572 4792 9572 4718 9572 4802 9573 4793 9573 4726 9573 4803 9574 4782 9574 4793 9574 4806 9575 4669 9575 4782 9575 4786 9576 4794 9576 4670 9576 4670 9577 4794 9577 4669 9577 4795 9578 4739 9578 4796 9578 4796 9579 4739 9579 4784 9579 4692 9580 4706 9580 4693 9580 4691 9581 4692 9581 4797 9581 4798 9582 4691 9582 4799 9582 4801 9583 4798 9583 4799 9583 4797 9584 4693 9584 4811 9584 4717 9585 4799 9585 4716 9585 4716 9586 4799 9586 4797 9586 4800 9587 4790 9587 4763 9587 4805 9588 4800 9588 4763 9588 4723 9589 4763 9589 4764 9589 4764 9590 4763 9590 4801 9590 4802 9591 4724 9591 4807 9591 4803 9592 4802 9592 4804 9592 4807 9593 4805 9593 4679 9593 4806 9594 4803 9594 4671 9594 4804 9595 4807 9595 4678 9595 4670 9596 4806 9596 4809 9596 4810 9597 4670 9597 4809 9597 4671 9598 4804 9598 4808 9598 4734 9599 4809 9599 4672 9599 4672 9600 4809 9600 4671 9600 4796 9601 4786 9601 4787 9601 4737 9602 4796 9602 4787 9602 4823 9603 4787 9603 4821 9603 4821 9604 4787 9604 4810 9604 4716 9605 4811 9605 4812 9605 4765 9606 4716 9606 4812 9606 4828 9607 4812 9607 4715 9607 4715 9608 4812 9608 4813 9608 4764 9609 4717 9609 4814 9609 4722 9610 4764 9610 4814 9610 4829 9611 4814 9611 4827 9611 4827 9612 4814 9612 4765 9612 4679 9613 4723 9613 4817 9613 4678 9614 4679 9614 4816 9614 4817 9615 4722 9615 4815 9615 4808 9616 4678 9616 4820 9616 4816 9617 4817 9617 4680 9617 4672 9618 4808 9618 4818 9618 4733 9619 4672 9619 4818 9619 4820 9620 4816 9620 4731 9620 4732 9621 4818 9621 4819 9621 4819 9622 4818 9622 4820 9622 4821 9623 4734 9623 4822 9623 4736 9624 4821 9624 4822 9624 4831 9625 4822 9625 4830 9625 4830 9626 4822 9626 4733 9626 4823 9627 4736 9627 4743 9627 4743 9628 4736 9628 4824 9628 4826 9629 4743 9629 4824 9629 4824 9630 4736 9630 4831 9630 4825 9631 4741 9631 4826 9631 5467 9632 4825 9632 4826 9632 4811 9633 4701 9633 4813 9633 5467 9634 4827 9634 4828 9634 5467 9635 4815 9635 4829 9635 5467 9636 4830 9636 4732 9636 5467 9637 4824 9637 4831 9637 5357 9638 4964 9638 4893 9638 5357 9639 4893 9639 4832 9639 5357 9640 4832 9640 4891 9640 5357 9641 4891 9641 4890 9641 5357 9642 4890 9642 4889 9642 5357 9643 4889 9643 4888 9643 5357 9644 4888 9644 4887 9644 5357 9645 4887 9645 4917 9645 5357 9646 4917 9646 4894 9646 5357 9647 4894 9647 4927 9647 4833 9648 4896 9648 4892 9648 4833 9649 4834 9649 4896 9649 4833 9650 4835 9650 4834 9650 4834 9651 4835 9651 4847 9651 4846 9652 4847 9652 4848 9652 4899 9653 4848 9653 4836 9653 4906 9654 4836 9654 4903 9654 4907 9655 4903 9655 4851 9655 4912 9656 4851 9656 4837 9656 4838 9657 4837 9657 4852 9657 4841 9658 4852 9658 4853 9658 4922 9659 4853 9659 4920 9659 4839 9660 4920 9660 4855 9660 4839 9661 4922 9661 4920 9661 4839 9662 4840 9662 4922 9662 4922 9663 4840 9663 4924 9663 4841 9664 4924 9664 4923 9664 4838 9665 4923 9665 4842 9665 4912 9666 4842 9666 4916 9666 4907 9667 4916 9667 4843 9667 4906 9668 4843 9668 4844 9668 4899 9669 4844 9669 4845 9669 4846 9670 4845 9670 4900 9670 4834 9671 4900 9671 4896 9671 4834 9672 4846 9672 4900 9672 4834 9673 4847 9673 4846 9673 4835 9674 4856 9674 4847 9674 4847 9675 4856 9675 4849 9675 4848 9676 4849 9676 4898 9676 4836 9677 4898 9677 4850 9677 4903 9678 4850 9678 4905 9678 4851 9679 4905 9679 4859 9679 4837 9680 4859 9680 4915 9680 4852 9681 4915 9681 4860 9681 4853 9682 4860 9682 4921 9682 4920 9683 4921 9683 4854 9683 4855 9684 4854 9684 4942 9684 4855 9685 4920 9685 4854 9685 4856 9686 4857 9686 4849 9686 4849 9687 4857 9687 4858 9687 4898 9688 4858 9688 4897 9688 4850 9689 4897 9689 4863 9689 4905 9690 4863 9690 4902 9690 4859 9691 4902 9691 4911 9691 4915 9692 4911 9692 4910 9692 4860 9693 4910 9693 4914 9693 4921 9694 4914 9694 4861 9694 4854 9695 4861 9695 4866 9695 4942 9696 4866 9696 4862 9696 4942 9697 4854 9697 4866 9697 4857 9698 4949 9698 4858 9698 4858 9699 4949 9699 4895 9699 4897 9700 4895 9700 4867 9700 4863 9701 4867 9701 4864 9701 4902 9702 4864 9702 4869 9702 4911 9703 4869 9703 4909 9703 4910 9704 4909 9704 4865 9704 4914 9705 4865 9705 4870 9705 4861 9706 4870 9706 4919 9706 4866 9707 4919 9707 4918 9707 4862 9708 4918 9708 4943 9708 4862 9709 4866 9709 4918 9709 4949 9710 4960 9710 4895 9710 4895 9711 4960 9711 4875 9711 4867 9712 4875 9712 4868 9712 4864 9713 4868 9713 4901 9713 4869 9714 4901 9714 4878 9714 4909 9715 4878 9715 4908 9715 4865 9716 4908 9716 4871 9716 4870 9717 4871 9717 4872 9717 4919 9718 4872 9718 4873 9718 4918 9719 4873 9719 4874 9719 4943 9720 4874 9720 4945 9720 4943 9721 4918 9721 4874 9721 4960 9722 4951 9722 4875 9722 4875 9723 4951 9723 4876 9723 4868 9724 4876 9724 4877 9724 4901 9725 4877 9725 4885 9725 4878 9726 4885 9726 4904 9726 4908 9727 4904 9727 4883 9727 4871 9728 4883 9728 4913 9728 4872 9729 4913 9729 4879 9729 4873 9730 4879 9730 4884 9730 4874 9731 4884 9731 4880 9731 4945 9732 4880 9732 5344 9732 4945 9733 4874 9733 4880 9733 4951 9734 3212 9734 4876 9734 4876 9735 3212 9735 4881 9735 3210 9736 4876 9736 4881 9736 3210 9737 4877 9737 4876 9737 3210 9738 4882 9738 4877 9738 4877 9739 4882 9739 4885 9739 4885 9740 4882 9740 3254 9740 4904 9741 3254 9741 3208 9741 4883 9742 3208 9742 3207 9742 4913 9743 3207 9743 3206 9743 4879 9744 3206 9744 4886 9744 4884 9745 4886 9745 3205 9745 4880 9746 3205 9746 5344 9746 4880 9747 4884 9747 3205 9747 4885 9748 3254 9748 4904 9748 4904 9749 3208 9749 4883 9749 4883 9750 3207 9750 4913 9750 4913 9751 3206 9751 4879 9751 4879 9752 4886 9752 4884 9752 4840 9753 4939 9753 4924 9753 4924 9754 4939 9754 4894 9754 4923 9755 4894 9755 4917 9755 4842 9756 4917 9756 4887 9756 4916 9757 4887 9757 4888 9757 4843 9758 4888 9758 4889 9758 4844 9759 4889 9759 4890 9759 4845 9760 4890 9760 4891 9760 4900 9761 4891 9761 4832 9761 4896 9762 4832 9762 4893 9762 4892 9763 4893 9763 4964 9763 4892 9764 4896 9764 4893 9764 4939 9765 4927 9765 4894 9765 4868 9766 4875 9766 4876 9766 4867 9767 4895 9767 4875 9767 4897 9768 4858 9768 4895 9768 4898 9769 4849 9769 4858 9769 4848 9770 4847 9770 4849 9770 4832 9771 4896 9771 4900 9771 4901 9772 4868 9772 4877 9772 4864 9773 4867 9773 4868 9773 4863 9774 4897 9774 4867 9774 4850 9775 4898 9775 4897 9775 4836 9776 4848 9776 4898 9776 4845 9777 4846 9777 4899 9777 4899 9778 4846 9778 4848 9778 4891 9779 4900 9779 4845 9779 4878 9780 4901 9780 4885 9780 4869 9781 4864 9781 4901 9781 4902 9782 4863 9782 4864 9782 4905 9783 4850 9783 4863 9783 4903 9784 4836 9784 4850 9784 4844 9785 4899 9785 4906 9785 4906 9786 4899 9786 4836 9786 4890 9787 4845 9787 4844 9787 4908 9788 4878 9788 4904 9788 4909 9789 4869 9789 4878 9789 4911 9790 4902 9790 4869 9790 4859 9791 4905 9791 4902 9791 4851 9792 4903 9792 4905 9792 4843 9793 4906 9793 4907 9793 4907 9794 4906 9794 4903 9794 4889 9795 4844 9795 4843 9795 4871 9796 4908 9796 4883 9796 4865 9797 4909 9797 4908 9797 4910 9798 4911 9798 4909 9798 4915 9799 4859 9799 4911 9799 4837 9800 4851 9800 4859 9800 4916 9801 4907 9801 4912 9801 4912 9802 4907 9802 4851 9802 4888 9803 4843 9803 4916 9803 4872 9804 4871 9804 4913 9804 4870 9805 4865 9805 4871 9805 4914 9806 4910 9806 4865 9806 4860 9807 4915 9807 4910 9807 4852 9808 4837 9808 4915 9808 4842 9809 4912 9809 4838 9809 4838 9810 4912 9810 4837 9810 4887 9811 4916 9811 4842 9811 4873 9812 4872 9812 4879 9812 4919 9813 4870 9813 4872 9813 4861 9814 4914 9814 4870 9814 4921 9815 4860 9815 4914 9815 4853 9816 4852 9816 4860 9816 4923 9817 4838 9817 4841 9817 4841 9818 4838 9818 4852 9818 4917 9819 4842 9819 4923 9819 4874 9820 4873 9820 4884 9820 4918 9821 4919 9821 4873 9821 4866 9822 4861 9822 4919 9822 4854 9823 4921 9823 4861 9823 4920 9824 4853 9824 4921 9824 4924 9825 4841 9825 4922 9825 4922 9826 4841 9826 4853 9826 4894 9827 4923 9827 4924 9827 5344 9828 5345 9828 4946 9828 4945 9829 4946 9829 4925 9829 4943 9830 4925 9830 4944 9830 4862 9831 4944 9831 4936 9831 4942 9832 4936 9832 4941 9832 4855 9833 4941 9833 4940 9833 4839 9834 4940 9834 4931 9834 4840 9835 4931 9835 4938 9835 4939 9836 4938 9836 4926 9836 4927 9837 4926 9837 4935 9837 5357 9838 4935 9838 5481 9838 5357 9839 4927 9839 4935 9839 4928 9840 4925 9840 4929 9840 4928 9841 4944 9841 4925 9841 4928 9842 4930 9842 4944 9842 4944 9843 4930 9843 4936 9843 4936 9844 4930 9844 5242 9844 4941 9845 5242 9845 4937 9845 4940 9846 4937 9846 4932 9846 4931 9847 4932 9847 5155 9847 4938 9848 5155 9848 4933 9848 4926 9849 4933 9849 4934 9849 4935 9850 4934 9850 5481 9850 4935 9851 4926 9851 4934 9851 4936 9852 5242 9852 4941 9852 4941 9853 4937 9853 4940 9853 4940 9854 4932 9854 4931 9854 4931 9855 5155 9855 4938 9855 4938 9856 4933 9856 4926 9856 4927 9857 4939 9857 4926 9857 4939 9858 4840 9858 4938 9858 4840 9859 4839 9859 4931 9859 4839 9860 4855 9860 4940 9860 4855 9861 4942 9861 4941 9861 4942 9862 4862 9862 4936 9862 4862 9863 4943 9863 4944 9863 4943 9864 4945 9864 4925 9864 4945 9865 5344 9865 4946 9865 5345 9866 4929 9866 4946 9866 4946 9867 4929 9867 4925 9867 5357 9868 5008 9868 4966 9868 4964 9869 4966 9869 4947 9869 4892 9870 4947 9870 4953 9870 4833 9871 4953 9871 4963 9871 4835 9872 4963 9872 4948 9872 4856 9873 4948 9873 4959 9873 4857 9874 4959 9874 4962 9874 4949 9875 4962 9875 4961 9875 4960 9876 4961 9876 4950 9876 4951 9877 4950 9877 4957 9877 3212 9878 4957 9878 5066 9878 3212 9879 4951 9879 4957 9879 4952 9880 4947 9880 4965 9880 4952 9881 4953 9881 4947 9881 4952 9882 4991 9882 4953 9882 4953 9883 4991 9883 4963 9883 4963 9884 4991 9884 4989 9884 4948 9885 4989 9885 4958 9885 4959 9886 4958 9886 4954 9886 4962 9887 4954 9887 4955 9887 4961 9888 4955 9888 4970 9888 4950 9889 4970 9889 4956 9889 4957 9890 4956 9890 5066 9890 4957 9891 4950 9891 4956 9891 4963 9892 4989 9892 4948 9892 4948 9893 4958 9893 4959 9893 4959 9894 4954 9894 4962 9894 4962 9895 4955 9895 4961 9895 4961 9896 4970 9896 4950 9896 4951 9897 4960 9897 4950 9897 4960 9898 4949 9898 4961 9898 4949 9899 4857 9899 4962 9899 4857 9900 4856 9900 4959 9900 4856 9901 4835 9901 4948 9901 4835 9902 4833 9902 4963 9902 4833 9903 4892 9903 4953 9903 4892 9904 4964 9904 4947 9904 4964 9905 5357 9905 4966 9905 4965 9906 4947 9906 4966 9906 5008 9907 4965 9907 4966 9907 3341 9908 5066 9908 5022 9908 3340 9909 5022 9909 5067 9909 4967 9910 5067 9910 5021 9910 4968 9911 5021 9911 3339 9911 4968 9912 4967 9912 5021 9912 4970 9913 5069 9913 4956 9913 4970 9914 4969 9914 5069 9914 4970 9915 5020 9915 4969 9915 4970 9916 4955 9916 5020 9916 5020 9917 4955 9917 4971 9917 5018 9918 4971 9918 4976 9918 4972 9919 4976 9919 5047 9919 4973 9920 5047 9920 4974 9920 5089 9921 4974 9921 5097 9921 4975 9922 5097 9922 4980 9922 5103 9923 4980 9923 5114 9923 5115 9924 5114 9924 5113 9924 5117 9925 5113 9925 5116 9925 5042 9926 5116 9926 5121 9926 5041 9927 5121 9927 4981 9927 4955 9928 4954 9928 4971 9928 4971 9929 4954 9929 5065 9929 4976 9930 5065 9930 4984 9930 5047 9931 4984 9931 4977 9931 4974 9932 4977 9932 4978 9932 5097 9933 4978 9933 4979 9933 4980 9934 4979 9934 5096 9934 5114 9935 5096 9935 5102 9935 5113 9936 5102 9936 4987 9936 5116 9937 4987 9937 5119 9937 5121 9938 5119 9938 4982 9938 4981 9939 4982 9939 4983 9939 5065 9940 4954 9940 5040 9940 4984 9941 5040 9941 5079 9941 4977 9942 5079 9942 4985 9942 4978 9943 4985 9943 5088 9943 4979 9944 5088 9944 5087 9944 5096 9945 5087 9945 4986 9945 5102 9946 4986 9946 5101 9946 4987 9947 5101 9947 5112 9947 5119 9948 5112 9948 4988 9948 4982 9949 4988 9949 5120 9949 4983 9950 5120 9950 5132 9950 4989 9951 5059 9951 4958 9951 4989 9952 5032 9952 5059 9952 4989 9953 4990 9953 5032 9953 4989 9954 4991 9954 4990 9954 4990 9955 4991 9955 4992 9955 5071 9956 4992 9956 4993 9956 5030 9957 4993 9957 4999 9957 5081 9958 4999 9958 4994 9958 5086 9959 4994 9959 4995 9959 5027 9960 4995 9960 5092 9960 5026 9961 5092 9961 4996 9961 4997 9962 4996 9962 5107 9962 5108 9963 5107 9963 5130 9963 5023 9964 5130 9964 5004 9964 4998 9965 5004 9965 5003 9965 4991 9966 4952 9966 4992 9966 4992 9967 4952 9967 5058 9967 4993 9968 5058 9968 5005 9968 4999 9969 5005 9969 5000 9969 4994 9970 5000 9970 5001 9970 4995 9971 5001 9971 5085 9971 5092 9972 5085 9972 5090 9972 4996 9973 5090 9973 5002 9973 5107 9974 5002 9974 5106 9974 5130 9975 5106 9975 5131 9975 5004 9976 5131 9976 5014 9976 5502 9977 5004 9977 5014 9977 5502 9978 5003 9978 5004 9978 5502 9979 5492 9979 5003 9979 5058 9980 4952 9980 5007 9980 5005 9981 5007 9981 5070 9981 5000 9982 5070 9982 5010 9982 5001 9983 5010 9983 5013 9983 5085 9984 5013 9984 5091 9984 5090 9985 5091 9985 5012 9985 5002 9986 5012 9986 5105 9986 5106 9987 5105 9987 5006 9987 5131 9988 5006 9988 5014 9988 5131 9989 5106 9989 5006 9989 4952 9990 4965 9990 5007 9990 5007 9991 4965 9991 5008 9991 5489 9992 5007 9992 5008 9992 5489 9993 5070 9993 5007 9993 5489 9994 5009 9994 5070 9994 5070 9995 5009 9995 5010 9995 5010 9996 5009 9996 5495 9996 5013 9997 5495 9997 5496 9997 5091 9998 5496 9998 5498 9998 5012 9999 5498 9999 5011 9999 5105 10000 5011 10000 5006 10000 5105 10001 5012 10001 5011 10001 5010 10002 5495 10002 5013 10002 5013 10003 5496 10003 5091 10003 5091 10004 5498 10004 5012 10004 5011 10005 5014 10005 5006 10005 5015 10006 5048 10006 3343 10006 5015 10007 5050 10007 5048 10007 5015 10008 5051 10008 5050 10008 5015 10009 5016 10009 5051 10009 5051 10010 5016 10010 5017 10010 5052 10011 5017 10011 5057 10011 5053 10012 5057 10012 5019 10012 4972 10013 5019 10013 5018 10013 4976 10014 4972 10014 5018 10014 5016 10015 3339 10015 5017 10015 5017 10016 3339 10016 5083 10016 5057 10017 5083 10017 5084 10017 5019 10018 5084 10018 5080 10018 5018 10019 5080 10019 5020 10019 4971 10020 5018 10020 5020 10020 5083 10021 3339 10021 5021 10021 5084 10022 5021 10022 5068 10022 5080 10023 5068 10023 4969 10023 5020 10024 5080 10024 4969 10024 4967 10025 3340 10025 5067 10025 3340 10026 3341 10026 5022 10026 5003 10027 5492 10027 4998 10027 4998 10028 5492 10028 5024 10028 5023 10029 5024 10029 5078 10029 5108 10030 5078 10030 5025 10030 4997 10031 5025 10031 5076 10031 5026 10032 5076 10032 5109 10032 5027 10033 5109 10033 5093 10033 5086 10034 5093 10034 5028 10034 5081 10035 5028 10035 5029 10035 5030 10036 5029 10036 5031 10036 5071 10037 5031 10037 5073 10037 4990 10038 5073 10038 5032 10038 4990 10039 5071 10039 5073 10039 4990 10040 4992 10040 5071 10040 5077 10041 5492 10041 5064 10041 5033 10042 5064 10042 5034 10042 5111 10043 5034 10043 5110 10043 5035 10044 5110 10044 5063 10044 5036 10045 5063 10045 5062 10045 5095 10046 5062 10046 5094 10046 5037 10047 5094 10047 5039 10047 5038 10048 5039 10048 5082 10048 5074 10049 5082 10049 5075 10049 5072 10050 5075 10050 5061 10050 5059 10051 5061 10051 5060 10051 4958 10052 5060 10052 5040 10052 4954 10053 4958 10053 5040 10053 5132 10054 5492 10054 4983 10054 4983 10055 5492 10055 4981 10055 4981 10056 5492 10056 5041 10056 5041 10057 5492 10057 5043 10057 5042 10058 5043 10058 5124 10058 5117 10059 5124 10059 5044 10059 5115 10060 5044 10060 5100 10060 5103 10061 5100 10061 5045 10061 4975 10062 5045 10062 5104 10062 5089 10063 5104 10063 5046 10063 4973 10064 5046 10064 5053 10064 4972 10065 5053 10065 5019 10065 4972 10066 4973 10066 5053 10066 4972 10067 5047 10067 4973 10067 5123 10068 5492 10068 5129 10068 5122 10069 5129 10069 5126 10069 5118 10070 5126 10070 5125 10070 5099 10071 5125 10071 5048 10071 5049 10072 5048 10072 5050 10072 5098 10073 5050 10073 5051 10073 5052 10074 5051 10074 5017 10074 5052 10075 5098 10075 5051 10075 5052 10076 5046 10076 5098 10076 5052 10077 5053 10077 5046 10077 5052 10078 5057 10078 5053 10078 5128 10079 5492 10079 5054 10079 5127 10080 5054 10080 5055 10080 5056 10081 5055 10081 3343 10081 5048 10082 5056 10082 3343 10082 5048 10083 5125 10083 5056 10083 5056 10084 5125 10084 5127 10084 5055 10085 5056 10085 5127 10085 5057 10086 5017 10086 5083 10086 5005 10087 5058 10087 5007 10087 4993 10088 4992 10088 5058 10088 5059 10089 5060 10089 4958 10089 5059 10090 5032 10090 5072 10090 5061 10091 5059 10091 5072 10091 5079 10092 5040 10092 5060 10092 5061 10093 5079 10093 5060 10093 5061 10094 4985 10094 5079 10094 5061 10095 5075 10095 4985 10095 4985 10096 5075 10096 5088 10096 5088 10097 5075 10097 5082 10097 5087 10098 5082 10098 5039 10098 4986 10099 5039 10099 5094 10099 5101 10100 5094 10100 5062 10100 5112 10101 5062 10101 5063 10101 4988 10102 5063 10102 5110 10102 5120 10103 5110 10103 5034 10103 5132 10104 5034 10104 5064 10104 4984 10105 5065 10105 5040 10105 4976 10106 4971 10106 5065 10106 5066 10107 4956 10107 5022 10107 5022 10108 4956 10108 5069 10108 5067 10109 5069 10109 5068 10109 5021 10110 5067 10110 5068 10110 5069 10111 4969 10111 5068 10111 5022 10112 5069 10112 5067 10112 5000 10113 5005 10113 5070 10113 4999 10114 4993 10114 5005 10114 5031 10115 5071 10115 5030 10115 5030 10116 5071 10116 4993 10116 5032 10117 5073 10117 5072 10117 5072 10118 5073 10118 5074 10118 5075 10119 5072 10119 5074 10119 5074 10120 5073 10120 5031 10120 5038 10121 5031 10121 5029 10121 5037 10122 5029 10122 5028 10122 5095 10123 5028 10123 5093 10123 5036 10124 5093 10124 5109 10124 5035 10125 5109 10125 5076 10125 5111 10126 5076 10126 5025 10126 5033 10127 5025 10127 5078 10127 5077 10128 5078 10128 5024 10128 4977 10129 4984 10129 5079 10129 5047 10130 4976 10130 4984 10130 5019 10131 5080 10131 5018 10131 5084 10132 5068 10132 5080 10132 5001 10133 5000 10133 5010 10133 4994 10134 4999 10134 5000 10134 5029 10135 5030 10135 5081 10135 5081 10136 5030 10136 4999 10136 5082 10137 5074 10137 5038 10137 5038 10138 5074 10138 5031 10138 4978 10139 4977 10139 4985 10139 4974 10140 5047 10140 4977 10140 5057 10141 5084 10141 5019 10141 5083 10142 5021 10142 5084 10142 5085 10143 5001 10143 5013 10143 4995 10144 4994 10144 5001 10144 5028 10145 5081 10145 5086 10145 5086 10146 5081 10146 4994 10146 5039 10147 5038 10147 5037 10147 5037 10148 5038 10148 5029 10148 5087 10149 5088 10149 5082 10149 4979 10150 4978 10150 5088 10150 5097 10151 4974 10151 4978 10151 5046 10152 4973 10152 5089 10152 5089 10153 4973 10153 4974 10153 5090 10154 5085 10154 5091 10154 5092 10155 4995 10155 5085 10155 5093 10156 5086 10156 5027 10156 5027 10157 5086 10157 4995 10157 5094 10158 5037 10158 5095 10158 5095 10159 5037 10159 5028 10159 4986 10160 5087 10160 5039 10160 5096 10161 4979 10161 5087 10161 4980 10162 5097 10162 4979 10162 5104 10163 5089 10163 4975 10163 4975 10164 5089 10164 5097 10164 5098 10165 5046 10165 5104 10165 5049 10166 5104 10166 5045 10166 5099 10167 5045 10167 5100 10167 5118 10168 5100 10168 5044 10168 5122 10169 5044 10169 5124 10169 5123 10170 5124 10170 5043 10170 5002 10171 5090 10171 5012 10171 4996 10172 5092 10172 5090 10172 5109 10173 5027 10173 5026 10173 5026 10174 5027 10174 5092 10174 5062 10175 5095 10175 5036 10175 5036 10176 5095 10176 5093 10176 5101 10177 4986 10177 5094 10177 5102 10178 5096 10178 4986 10178 5114 10179 4980 10179 5096 10179 5045 10180 4975 10180 5103 10180 5103 10181 4975 10181 4980 10181 5050 10182 5098 10182 5049 10182 5049 10183 5098 10183 5104 10183 5002 10184 5105 10184 5106 10184 4996 10185 5002 10185 5107 10185 5026 10186 4996 10186 4997 10186 5076 10187 5026 10187 4997 10187 5107 10188 5106 10188 5130 10188 5025 10189 4997 10189 5108 10189 5108 10190 4997 10190 5107 10190 5036 10191 5109 10191 5035 10191 5063 10192 5036 10192 5035 10192 5110 10193 5035 10193 5111 10193 5111 10194 5035 10194 5076 10194 5101 10195 5062 10195 5112 10195 5102 10196 5101 10196 4987 10196 5112 10197 5063 10197 4988 10197 5114 10198 5102 10198 5113 10198 4987 10199 5112 10199 5119 10199 5103 10200 5114 10200 5115 10200 5100 10201 5103 10201 5115 10201 5113 10202 4987 10202 5116 10202 5044 10203 5115 10203 5117 10203 5117 10204 5115 10204 5113 10204 5049 10205 5045 10205 5099 10205 5048 10206 5049 10206 5099 10206 5125 10207 5099 10207 5118 10207 5118 10208 5099 10208 5100 10208 5108 10209 5130 10209 5023 10209 5078 10210 5108 10210 5023 10210 5024 10211 5023 10211 4998 10211 4998 10212 5023 10212 5004 10212 5111 10213 5025 10213 5033 10213 5034 10214 5111 10214 5033 10214 5064 10215 5033 10215 5077 10215 5077 10216 5033 10216 5078 10216 4988 10217 5110 10217 5120 10217 5119 10218 4988 10218 4982 10218 5120 10219 5034 10219 5132 10219 5116 10220 5119 10220 5121 10220 4982 10221 5120 10221 4983 10221 5117 10222 5116 10222 5042 10222 5124 10223 5117 10223 5042 10223 5121 10224 4982 10224 4981 10224 5043 10225 5042 10225 5041 10225 5041 10226 5042 10226 5121 10226 5118 10227 5044 10227 5122 10227 5126 10228 5118 10228 5122 10228 5129 10229 5122 10229 5123 10229 5123 10230 5122 10230 5124 10230 5125 10231 5126 10231 5127 10231 5127 10232 5126 10232 5128 10232 5054 10233 5127 10233 5128 10233 5128 10234 5126 10234 5129 10234 3343 10235 5055 10235 5054 10235 5492 10236 3343 10236 5054 10236 5130 10237 5131 10237 5004 10237 5492 10238 5077 10238 5024 10238 5492 10239 5132 10239 5064 10239 5492 10240 5123 10240 5043 10240 5492 10241 5128 10241 5129 10241 5481 10242 4934 10242 5240 10242 5240 10243 4934 10243 5241 10243 5239 10244 5241 10244 5134 10244 5133 10245 5134 10245 5157 10245 5207 10246 5157 10246 5135 10246 5245 10247 5135 10247 5160 10247 5136 10248 5160 10248 5161 10248 5137 10249 5161 10249 5162 10249 5200 10250 5162 10250 5199 10250 5138 10251 5199 10251 5139 10251 5192 10252 5139 10252 5166 10252 5266 10253 5166 10253 5140 10253 5270 10254 5140 10254 5170 10254 5261 10255 5170 10255 5141 10255 5254 10256 5141 10256 5142 10256 5144 10257 5142 10257 5143 10257 5346 10258 5144 10258 5143 10258 5346 10259 5275 10259 5144 10259 5346 10260 5145 10260 5275 10260 5275 10261 5145 10261 5273 10261 5274 10262 5273 10262 5146 10262 5291 10263 5146 10263 5298 10263 5297 10264 5298 10264 5147 10264 5299 10265 5147 10265 5148 10265 5271 10266 5148 10266 5149 10266 5322 10267 5149 10267 5150 10267 5324 10268 5150 10268 5151 10268 5331 10269 5151 10269 5329 10269 5272 10270 5329 10270 5152 10270 5334 10271 5152 10271 5467 10271 5241 10272 4934 10272 5153 10272 5154 10273 5153 10273 4933 10273 5155 10274 5154 10274 4933 10274 5155 10275 5243 10275 5154 10275 5155 10276 5156 10276 5243 10276 5155 10277 4932 10277 5156 10277 5156 10278 4932 10278 5158 10278 5135 10279 5158 10279 5160 10279 5135 10280 5156 10280 5158 10280 5135 10281 5157 10281 5156 10281 5156 10282 5157 10282 5243 10282 5243 10283 5157 10283 5134 10283 5154 10284 5134 10284 5241 10284 5153 10285 5154 10285 5241 10285 4932 10286 4937 10286 5158 10286 5158 10287 4937 10287 5159 10287 5160 10288 5159 10288 5161 10288 5160 10289 5158 10289 5159 10289 5159 10290 4937 10290 5163 10290 5161 10291 5163 10291 5162 10291 5161 10292 5159 10292 5163 10292 4930 10293 5167 10293 5242 10293 4930 10294 5165 10294 5167 10294 4930 10295 5164 10295 5165 10295 4930 10296 4928 10296 5164 10296 5164 10297 4928 10297 5169 10297 5140 10298 5169 10298 5170 10298 5140 10299 5164 10299 5169 10299 5140 10300 5166 10300 5164 10300 5164 10301 5166 10301 5165 10301 5165 10302 5166 10302 5139 10302 5167 10303 5139 10303 5199 10303 5168 10304 5199 10304 5162 10304 5163 10305 5168 10305 5162 10305 5163 10306 5242 10306 5168 10306 5163 10307 4937 10307 5242 10307 4928 10308 4929 10308 5169 10308 5169 10309 4929 10309 5171 10309 5170 10310 5171 10310 5141 10310 5170 10311 5169 10311 5171 10311 5171 10312 4929 10312 5172 10312 5141 10313 5172 10313 5142 10313 5141 10314 5171 10314 5172 10314 4929 10315 5345 10315 5172 10315 5172 10316 5345 10316 5173 10316 5142 10317 5173 10317 5143 10317 5142 10318 5172 10318 5173 10318 5145 10319 5352 10319 5273 10319 5273 10320 5352 10320 5237 10320 5146 10321 5237 10321 5296 10321 5298 10322 5296 10322 5174 10322 5147 10323 5174 10323 5312 10323 5148 10324 5312 10324 5321 10324 5149 10325 5321 10325 5175 10325 5150 10326 5175 10326 5176 10326 5151 10327 5176 10327 5328 10327 5329 10328 5328 10328 5335 10328 5152 10329 5335 10329 5467 10329 5237 10330 5352 10330 5177 10330 5296 10331 5177 10331 5178 10331 5174 10332 5178 10332 5179 10332 5312 10333 5179 10333 5219 10333 5321 10334 5219 10334 5180 10334 5175 10335 5180 10335 5217 10335 5176 10336 5217 10336 5223 10336 5328 10337 5223 10337 5222 10337 5335 10338 5222 10338 5467 10338 5181 10339 5316 10339 5353 10339 5181 10340 5221 10340 5316 10340 5181 10341 5184 10341 5221 10341 5221 10342 5184 10342 5210 10342 5220 10343 5210 10343 5182 10343 5318 10344 5182 10344 5214 10344 5338 10345 5214 10345 5213 10345 5337 10346 5213 10346 5467 10346 5467 10347 5183 10347 5184 10347 5185 10348 5340 10348 5468 10348 5185 10349 5186 10349 5340 10349 5185 10350 5187 10350 5186 10350 5185 10351 5452 10351 5187 10351 5187 10352 5452 10352 5233 10352 5234 10353 5233 10353 5194 10353 5235 10354 5194 10354 5188 10354 5189 10355 5188 10355 5198 10355 5190 10356 5198 10356 5303 10356 5293 10357 5303 10357 5191 10357 5284 10358 5191 10358 5286 10358 5280 10359 5286 10359 5138 10359 5192 10360 5138 10360 5139 10360 5192 10361 5280 10361 5138 10361 5192 10362 5267 10362 5280 10362 5192 10363 5266 10363 5267 10363 5192 10364 5166 10364 5266 10364 5452 10365 5193 10365 5233 10365 5233 10366 5193 10366 5195 10366 5194 10367 5195 10367 5196 10367 5188 10368 5196 10368 5197 10368 5198 10369 5197 10369 5305 10369 5303 10370 5305 10370 5287 10370 5191 10371 5287 10371 5288 10371 5286 10372 5288 10372 5200 10372 5138 10373 5200 10373 5199 10373 5138 10374 5286 10374 5200 10374 5195 10375 5193 10375 5201 10375 5196 10376 5201 10376 5308 10376 5197 10377 5308 10377 5202 10377 5305 10378 5202 10378 5294 10378 5287 10379 5294 10379 5203 10379 5288 10380 5203 10380 5137 10380 5200 10381 5137 10381 5162 10381 5200 10382 5288 10382 5137 10382 5204 10383 5307 10383 5480 10383 5204 10384 5205 10384 5307 10384 5204 10385 5206 10385 5205 10385 5204 10386 5238 10386 5206 10386 5206 10387 5238 10387 5208 10387 5289 10388 5208 10388 5133 10388 5207 10389 5133 10389 5157 10389 5207 10390 5289 10390 5133 10390 5207 10391 5290 10391 5289 10391 5207 10392 5245 10392 5290 10392 5207 10393 5135 10393 5245 10393 5208 10394 5238 10394 5239 10394 5133 10395 5239 10395 5134 10395 5133 10396 5208 10396 5239 10396 5183 10397 5467 10397 5236 10397 5209 10398 5236 10398 5339 10398 5211 10399 5339 10399 5182 10399 5210 10400 5211 10400 5182 10400 5210 10401 5184 10401 5211 10401 5211 10402 5184 10402 5209 10402 5339 10403 5211 10403 5209 10403 5212 10404 5467 10404 5213 10404 5214 10405 5212 10405 5213 10405 5214 10406 5339 10406 5212 10406 5214 10407 5182 10407 5339 10407 5337 10408 5467 10408 5215 10408 5327 10409 5215 10409 5224 10409 5216 10410 5224 10410 5217 10410 5180 10411 5216 10411 5217 10411 5180 10412 5218 10412 5216 10412 5180 10413 5219 10413 5218 10413 5218 10414 5219 10414 5320 10414 5317 10415 5320 10415 5309 10415 5220 10416 5309 10416 5221 10416 5210 10417 5220 10417 5221 10417 5336 10418 5467 10418 5222 10418 5223 10419 5336 10419 5222 10419 5223 10420 5224 10420 5336 10420 5223 10421 5217 10421 5224 10421 5334 10422 5467 10422 5226 10422 5225 10423 5226 10423 5227 10423 5332 10424 5227 10424 5260 10424 5264 10425 5260 10425 5265 10425 5263 10426 5265 10426 5228 10426 5315 10427 5228 10427 5235 10427 5189 10428 5235 10428 5188 10428 5189 10429 5315 10429 5235 10429 5189 10430 5268 10430 5315 10430 5189 10431 5190 10431 5268 10431 5189 10432 5198 10432 5190 10432 5229 10433 5467 10433 5230 10433 5231 10434 5230 10434 5340 10434 5333 10435 5340 10435 5186 10435 5232 10436 5186 10436 5187 10436 5234 10437 5187 10437 5233 10437 5234 10438 5232 10438 5187 10438 5234 10439 5228 10439 5232 10439 5234 10440 5235 10440 5228 10440 5234 10441 5194 10441 5235 10441 5209 10442 5184 10442 5183 10442 5236 10443 5209 10443 5183 10443 5194 10444 5233 10444 5195 10444 5146 10445 5273 10445 5237 10445 5238 10446 5240 10446 5239 10446 5239 10447 5240 10447 5241 10447 4933 10448 5153 10448 4934 10448 5139 10449 5167 10449 5165 10449 5199 10450 5168 10450 5167 10450 5167 10451 5168 10451 5242 10451 5134 10452 5154 10452 5243 10452 5136 10453 5161 10453 5137 10453 5203 10454 5136 10454 5137 10454 5203 10455 5246 10455 5136 10455 5203 10456 5294 10456 5246 10456 5246 10457 5294 10457 5244 10457 5290 10458 5244 10458 5295 10458 5289 10459 5295 10459 5206 10459 5208 10460 5289 10460 5206 10460 5245 10461 5160 10461 5136 10461 5246 10462 5245 10462 5136 10462 5246 10463 5290 10463 5245 10463 5246 10464 5244 10464 5290 10464 5142 10465 5144 10465 5254 10465 5254 10466 5144 10466 5247 10466 5277 10467 5247 10467 5248 10467 5276 10468 5248 10468 5250 10468 5249 10469 5250 10469 5300 10469 5256 10470 5300 10470 5251 10470 5302 10471 5251 10471 5323 10471 5258 10472 5323 10472 5252 10472 5325 10473 5252 10473 5253 10473 5259 10474 5253 10474 5330 10474 5332 10475 5330 10475 5225 10475 5227 10476 5332 10476 5225 10476 5141 10477 5254 10477 5261 10477 5261 10478 5254 10478 5277 10478 5278 10479 5277 10479 5276 10479 5255 10480 5276 10480 5249 10480 5292 10481 5249 10481 5256 10481 5257 10482 5256 10482 5302 10482 5314 10483 5302 10483 5258 10483 5313 10484 5258 10484 5325 10484 5326 10485 5325 10485 5259 10485 5264 10486 5259 10486 5332 10486 5260 10487 5264 10487 5332 10487 5170 10488 5261 10488 5270 10488 5270 10489 5261 10489 5278 10489 5279 10490 5278 10490 5255 10490 5282 10491 5255 10491 5292 10491 5283 10492 5292 10492 5257 10492 5301 10493 5257 10493 5314 10493 5269 10494 5314 10494 5313 10494 5262 10495 5313 10495 5326 10495 5263 10496 5326 10496 5264 10496 5265 10497 5263 10497 5264 10497 5247 10498 5277 10498 5254 10498 5140 10499 5270 10499 5266 10499 5266 10500 5270 10500 5279 10500 5267 10501 5279 10501 5282 10501 5281 10502 5282 10502 5283 10502 5285 10503 5283 10503 5301 10503 5304 10504 5301 10504 5269 10504 5268 10505 5269 10505 5262 10505 5315 10506 5262 10506 5263 10506 5228 10507 5315 10507 5263 10507 5277 10508 5278 10508 5261 10508 5278 10509 5279 10509 5270 10509 5279 10510 5267 10510 5266 10510 5144 10511 5275 10511 5247 10511 5247 10512 5275 10512 5274 10512 5248 10513 5274 10513 5291 10513 5250 10514 5291 10514 5297 10514 5300 10515 5297 10515 5299 10515 5251 10516 5299 10516 5271 10516 5323 10517 5271 10517 5322 10517 5252 10518 5322 10518 5324 10518 5253 10519 5324 10519 5331 10519 5330 10520 5331 10520 5272 10520 5225 10521 5272 10521 5334 10521 5226 10522 5225 10522 5334 10522 5273 10523 5274 10523 5275 10523 5274 10524 5248 10524 5247 10524 5248 10525 5276 10525 5277 10525 5276 10526 5255 10526 5278 10526 5255 10527 5282 10527 5279 10527 5284 10528 5286 10528 5280 10528 5281 10529 5280 10529 5267 10529 5282 10530 5281 10530 5267 10530 5284 10531 5280 10531 5281 10531 5285 10532 5281 10532 5283 10532 5285 10533 5284 10533 5281 10533 5285 10534 5293 10534 5284 10534 5285 10535 5304 10535 5293 10535 5285 10536 5301 10536 5304 10536 5191 10537 5288 10537 5286 10537 5287 10538 5203 10538 5288 10538 5295 10539 5289 10539 5290 10539 5291 10540 5274 10540 5146 10540 5250 10541 5248 10541 5291 10541 5249 10542 5276 10542 5250 10542 5292 10543 5255 10543 5249 10543 5283 10544 5282 10544 5292 10544 5293 10545 5191 10545 5284 10545 5303 10546 5287 10546 5191 10546 5305 10547 5294 10547 5287 10547 5294 10548 5202 10548 5244 10548 5244 10549 5202 10549 5306 10549 5295 10550 5306 10550 5205 10550 5206 10551 5295 10551 5205 10551 5295 10552 5244 10552 5306 10552 5177 10553 5296 10553 5237 10553 5296 10554 5298 10554 5146 10554 5298 10555 5297 10555 5291 10555 5178 10556 5174 10556 5296 10556 5297 10557 5300 10557 5250 10557 5174 10558 5147 10558 5298 10558 5300 10559 5256 10559 5249 10559 5147 10560 5299 10560 5297 10560 5256 10561 5257 10561 5292 10561 5299 10562 5251 10562 5300 10562 5257 10563 5301 10563 5283 10563 5251 10564 5302 10564 5256 10564 5302 10565 5314 10565 5257 10565 5303 10566 5293 10566 5190 10566 5190 10567 5293 10567 5304 10567 5268 10568 5304 10568 5269 10568 5268 10569 5190 10569 5304 10569 5314 10570 5269 10570 5301 10570 5305 10571 5303 10571 5198 10571 5202 10572 5305 10572 5197 10572 5306 10573 5202 10573 5308 10573 5307 10574 5308 10574 5201 10574 5480 10575 5201 10575 5193 10575 5480 10576 5307 10576 5201 10576 5197 10577 5198 10577 5188 10577 5205 10578 5306 10578 5307 10578 5307 10579 5306 10579 5308 10579 5308 10580 5197 10580 5196 10580 5178 10581 5177 10581 5311 10581 5310 10582 5311 10582 5316 10582 5309 10583 5316 10583 5221 10583 5309 10584 5310 10584 5316 10584 5309 10585 5320 10585 5310 10585 5310 10586 5320 10586 5179 10586 5178 10587 5310 10587 5179 10587 5178 10588 5311 10588 5310 10588 5312 10589 5174 10589 5179 10589 5148 10590 5147 10590 5312 10590 5271 10591 5299 10591 5148 10591 5323 10592 5251 10592 5271 10592 5258 10593 5302 10593 5323 10593 5313 10594 5314 10594 5258 10594 5262 10595 5269 10595 5313 10595 5315 10596 5268 10596 5262 10596 5194 10597 5196 10597 5188 10597 5195 10598 5201 10598 5196 10598 5352 10599 5353 10599 5177 10599 5177 10600 5353 10600 5311 10600 5311 10601 5353 10601 5316 10601 5317 10602 5309 10602 5220 10602 5318 10603 5220 10603 5182 10603 5318 10604 5317 10604 5220 10604 5318 10605 5319 10605 5317 10605 5318 10606 5338 10606 5319 10606 5318 10607 5214 10607 5338 10607 5320 10608 5219 10608 5179 10608 5218 10609 5320 10609 5317 10609 5319 10610 5218 10610 5317 10610 5319 10611 5216 10611 5218 10611 5319 10612 5327 10612 5216 10612 5319 10613 5338 10613 5327 10613 5327 10614 5338 10614 5337 10614 5215 10615 5327 10615 5337 10615 5219 10616 5321 10616 5312 10616 5321 10617 5149 10617 5148 10617 5175 10618 5321 10618 5180 10618 5149 10619 5322 10619 5271 10619 5150 10620 5149 10620 5175 10620 5322 10621 5252 10621 5323 10621 5324 10622 5322 10622 5150 10622 5252 10623 5325 10623 5258 10623 5253 10624 5252 10624 5324 10624 5325 10625 5326 10625 5313 10625 5259 10626 5325 10626 5253 10626 5326 10627 5263 10627 5262 10627 5264 10628 5326 10628 5259 10628 5232 10629 5228 10629 5265 10629 5333 10630 5265 10630 5260 10630 5231 10631 5260 10631 5227 10631 5229 10632 5227 10632 5226 10632 5224 10633 5216 10633 5327 10633 5217 10634 5176 10634 5175 10634 5176 10635 5151 10635 5150 10635 5328 10636 5176 10636 5223 10636 5151 10637 5331 10637 5324 10637 5329 10638 5151 10638 5328 10638 5331 10639 5330 10639 5253 10639 5272 10640 5331 10640 5329 10640 5330 10641 5332 10641 5259 10641 5225 10642 5330 10642 5272 10642 5232 10643 5265 10643 5333 10643 5186 10644 5232 10644 5333 10644 5340 10645 5333 10645 5231 10645 5231 10646 5333 10646 5260 10646 5229 10647 5226 10647 5467 10647 5227 10648 5229 10648 5231 10648 5231 10649 5229 10649 5230 10649 5272 10650 5152 10650 5334 10650 5329 10651 5335 10651 5152 10651 5222 10652 5335 10652 5328 10652 5467 10653 5336 10653 5215 10653 5215 10654 5336 10654 5224 10654 5213 10655 5337 10655 5338 10655 5467 10656 5212 10656 5236 10656 5236 10657 5212 10657 5339 10657 5468 10658 5340 10658 5230 10658 5467 10659 5468 10659 5230 10659 5773 10660 3563 10660 5767 10660 5767 10661 3563 10661 5341 10661 4322 10662 4255 10662 5523 10662 5523 10663 4255 10663 5522 10663 5522 10664 4255 10664 5534 10664 5534 10665 4255 10665 5521 10665 5521 10666 4255 10666 5533 10666 5533 10667 4255 10667 5532 10667 5532 10668 4255 10668 5530 10668 5530 10669 4255 10669 5520 10669 5520 10670 4255 10670 5518 10670 5518 10671 4255 10671 5507 10671 5507 10672 4255 10672 5506 10672 5506 10673 4255 10673 5517 10673 5517 10674 4255 10674 5342 10674 5342 10675 4255 10675 5515 10675 5515 10676 4255 10676 5514 10676 5514 10677 4255 10677 5343 10677 5344 10678 2925 10678 5345 10678 5345 10679 2925 10679 5173 10679 5173 10680 2925 10680 5143 10680 5143 10681 2925 10681 5346 10681 5346 10682 2925 10682 4713 10682 4665 10683 5346 10683 4713 10683 4665 10684 5145 10684 5346 10684 4665 10685 5347 10685 5145 10685 5145 10686 5347 10686 5352 10686 5352 10687 5347 10687 5348 10687 5353 10688 5348 10688 5354 10688 5181 10689 5354 10689 4825 10689 5184 10690 4825 10690 5467 10690 5184 10691 5181 10691 4825 10691 2957 10692 5351 10692 2925 10692 2925 10693 5351 10693 3760 10693 5349 10694 2925 10694 3760 10694 5349 10695 4714 10695 2925 10695 2925 10696 4714 10696 4713 10696 5612 10697 5759 10697 5351 10697 5351 10698 5759 10698 5758 10698 3760 10699 5758 10699 5350 10699 3760 10700 5351 10700 5758 10700 5352 10701 5348 10701 5353 10701 5353 10702 5354 10702 5181 10702 5008 10703 5357 10703 5494 10703 5494 10704 5357 10704 5355 10704 5355 10705 5357 10705 5493 10705 5493 10706 5357 10706 5356 10706 5356 10707 5357 10707 5477 10707 5477 10708 5357 10708 5475 10708 5475 10709 5357 10709 5478 10709 5478 10710 5357 10710 5358 10710 5358 10711 5357 10711 5473 10711 5473 10712 5357 10712 5485 10712 5485 10713 5357 10713 5359 10713 5359 10714 5357 10714 5360 10714 5360 10715 5357 10715 5484 10715 5484 10716 5357 10716 5483 10716 5483 10717 5357 10717 5361 10717 5361 10718 5357 10718 5481 10718 5431 10719 5363 10719 5411 10719 5431 10720 5362 10720 5363 10720 5431 10721 5432 10721 5362 10721 5362 10722 5432 10722 1292 10722 1292 10723 5432 10723 5433 10723 1293 10724 5433 10724 5364 10724 5391 10725 5364 10725 5434 10725 5365 10726 5434 10726 5366 10726 1296 10727 5366 10727 5426 10727 1297 10728 5426 10728 5367 10728 1298 10729 5367 10729 5427 10729 1300 10730 5427 10730 5436 10730 1301 10731 5436 10731 5428 10731 1304 10732 5428 10732 5368 10732 1305 10733 5368 10733 5392 10733 5393 10734 5392 10734 5430 10734 1307 10735 5430 10735 5429 10735 1309 10736 5429 10736 5394 10736 1311 10737 5394 10737 5369 10737 1310 10738 5369 10738 5435 10738 5395 10739 5435 10739 5370 10739 1315 10740 5370 10740 5425 10740 1312 10741 5425 10741 5424 10741 5371 10742 5424 10742 5372 10742 5396 10743 5372 10743 5397 10743 1318 10744 5397 10744 5398 10744 1319 10745 5398 10745 5374 10745 5373 10746 5374 10746 5423 10746 5399 10747 5423 10747 5375 10747 5400 10748 5375 10748 5376 10748 5401 10749 5376 10749 5412 10749 1324 10750 5412 10750 5402 10750 1325 10751 5402 10751 5420 10751 1326 10752 5420 10752 5377 10752 1328 10753 5377 10753 5422 10753 1331 10754 5422 10754 5403 10754 5378 10755 5403 10755 5379 10755 5404 10756 5379 10756 5380 10756 5405 10757 5380 10757 5381 10757 5406 10758 5381 10758 5407 10758 5382 10759 5407 10759 5418 10759 5383 10760 5418 10760 5419 10760 5384 10761 5419 10761 5408 10761 5385 10762 5408 10762 5409 10762 1335 10763 5409 10763 5387 10763 5386 10764 5387 10764 5417 10764 1342 10765 5417 10765 5410 10765 1340 10766 5410 10766 5416 10766 1343 10767 5416 10767 5421 10767 1345 10768 5421 10768 5415 10768 5388 10769 5415 10769 5414 10769 1349 10770 5414 10770 5413 10770 1351 10771 5413 10771 5389 10771 5390 10772 5389 10772 5411 10772 5363 10773 5390 10773 5411 10773 1292 10774 5433 10774 1293 10774 1293 10775 5364 10775 5391 10775 5391 10776 5434 10776 5365 10776 5365 10777 5366 10777 1296 10777 1296 10778 5426 10778 1297 10778 1297 10779 5367 10779 1298 10779 1298 10780 5427 10780 1300 10780 1300 10781 5436 10781 1301 10781 1301 10782 5428 10782 1304 10782 1304 10783 5368 10783 1305 10783 1305 10784 5392 10784 5393 10784 5393 10785 5430 10785 1307 10785 1307 10786 5429 10786 1309 10786 1309 10787 5394 10787 1311 10787 1311 10788 5369 10788 1310 10788 1310 10789 5435 10789 5395 10789 5395 10790 5370 10790 1315 10790 1315 10791 5425 10791 1312 10791 1312 10792 5424 10792 5371 10792 5371 10793 5372 10793 5396 10793 5396 10794 5397 10794 1318 10794 1318 10795 5398 10795 1319 10795 1319 10796 5374 10796 5373 10796 5373 10797 5423 10797 5399 10797 5399 10798 5375 10798 5400 10798 5400 10799 5376 10799 5401 10799 5401 10800 5412 10800 1324 10800 1324 10801 5402 10801 1325 10801 1325 10802 5420 10802 1326 10802 1326 10803 5377 10803 1328 10803 1328 10804 5422 10804 1331 10804 1331 10805 5403 10805 5378 10805 5378 10806 5379 10806 5404 10806 5404 10807 5380 10807 5405 10807 5405 10808 5381 10808 5406 10808 5406 10809 5407 10809 5382 10809 5382 10810 5418 10810 5383 10810 5383 10811 5419 10811 5384 10811 5384 10812 5408 10812 5385 10812 5385 10813 5409 10813 1335 10813 1335 10814 5387 10814 5386 10814 5386 10815 5417 10815 1342 10815 1342 10816 5410 10816 1340 10816 1340 10817 5416 10817 1343 10817 1343 10818 5421 10818 1345 10818 1345 10819 5415 10819 5388 10819 5388 10820 5414 10820 1349 10820 1349 10821 5413 10821 1351 10821 1351 10822 5389 10822 5390 10822 5389 10823 5375 10823 5411 10823 5389 10824 5376 10824 5375 10824 5389 10825 5413 10825 5376 10825 5376 10826 5413 10826 5412 10826 5412 10827 5413 10827 5414 10827 5402 10828 5414 10828 5415 10828 5420 10829 5415 10829 5421 10829 5377 10830 5421 10830 5416 10830 5422 10831 5416 10831 5410 10831 5403 10832 5410 10832 5417 10832 5379 10833 5417 10833 5387 10833 5380 10834 5387 10834 5409 10834 5381 10835 5409 10835 5408 10835 5407 10836 5408 10836 5419 10836 5418 10837 5407 10837 5419 10837 5412 10838 5414 10838 5402 10838 5402 10839 5415 10839 5420 10839 5420 10840 5421 10840 5377 10840 5377 10841 5416 10841 5422 10841 5422 10842 5410 10842 5403 10842 5403 10843 5417 10843 5379 10843 5379 10844 5387 10844 5380 10844 5380 10845 5409 10845 5381 10845 5381 10846 5408 10846 5407 10846 5375 10847 5423 10847 5411 10847 5411 10848 5423 10848 5431 10848 5431 10849 5423 10849 5374 10849 5432 10850 5374 10850 5398 10850 5433 10851 5398 10851 5397 10851 5364 10852 5397 10852 5372 10852 5434 10853 5372 10853 5424 10853 5366 10854 5424 10854 5425 10854 5426 10855 5425 10855 5370 10855 5367 10856 5370 10856 5435 10856 5427 10857 5435 10857 5369 10857 5436 10858 5369 10858 5394 10858 5428 10859 5394 10859 5429 10859 5368 10860 5429 10860 5430 10860 5392 10861 5368 10861 5430 10861 5431 10862 5374 10862 5432 10862 5432 10863 5398 10863 5433 10863 5433 10864 5397 10864 5364 10864 5364 10865 5372 10865 5434 10865 5434 10866 5424 10866 5366 10866 5366 10867 5425 10867 5426 10867 5426 10868 5370 10868 5367 10868 5367 10869 5435 10869 5427 10869 5427 10870 5369 10870 5436 10870 5436 10871 5394 10871 5428 10871 5428 10872 5429 10872 5368 10872 1222 10873 5437 10873 5661 10873 5661 10874 5437 10874 2045 10874 5670 10875 2045 10875 5454 10875 5438 10876 5454 10876 5439 10876 5455 10877 5439 10877 5456 10877 5658 10878 5456 10878 5440 10878 5657 10879 5440 10879 5442 10879 5441 10880 5442 10880 5457 10880 5458 10881 5457 10881 1359 10881 5459 10882 1359 10882 5443 10882 5671 10883 5443 10883 5444 10883 5672 10884 5444 10884 5446 10884 5445 10885 5446 10885 5448 10885 5447 10886 5448 10886 1394 10886 5674 10887 1394 10887 1401 10887 5460 10888 1401 10888 1402 10888 5461 10889 1402 10889 1410 10889 5462 10890 1410 10890 5463 10890 5464 10891 5463 10891 5449 10891 5465 10892 5449 10892 5466 10892 4703 10893 5466 10893 1431 10893 4704 10894 1431 10894 1439 10894 4705 10895 1439 10895 5450 10895 4694 10896 5450 10896 1451 10896 4696 10897 1451 10897 5451 10897 5467 10898 5451 10898 1466 10898 5468 10899 1466 10899 5453 10899 5185 10900 5453 10900 5452 10900 5185 10901 5468 10901 5453 10901 5661 10902 2045 10902 5670 10902 5670 10903 5454 10903 5438 10903 5438 10904 5439 10904 5455 10904 5455 10905 5456 10905 5658 10905 5658 10906 5440 10906 5657 10906 5657 10907 5442 10907 5441 10907 5441 10908 5457 10908 5458 10908 5458 10909 1359 10909 5459 10909 5459 10910 5443 10910 5671 10910 5671 10911 5444 10911 5672 10911 5672 10912 5446 10912 5445 10912 5445 10913 5448 10913 5447 10913 5447 10914 1394 10914 5674 10914 5674 10915 1401 10915 5460 10915 5460 10916 1402 10916 5461 10916 5461 10917 1410 10917 5462 10917 5462 10918 5463 10918 5464 10918 5464 10919 5449 10919 5465 10919 5465 10920 5466 10920 4703 10920 4703 10921 1431 10921 4704 10921 4704 10922 1439 10922 4705 10922 4705 10923 5450 10923 4694 10923 4694 10924 1451 10924 4696 10924 4696 10925 5451 10925 5467 10925 5467 10926 1466 10926 5468 10926 5453 10927 1478 10927 5452 10927 5452 10928 1478 10928 5193 10928 5193 10929 1478 10929 5479 10929 5480 10930 5479 10930 1489 10930 5204 10931 1489 10931 5469 10931 5238 10932 5469 10932 5470 10932 5240 10933 5470 10933 1503 10933 5481 10934 1503 10934 5482 10934 5361 10935 5482 10935 1516 10935 5483 10936 1516 10936 1515 10936 5484 10937 1515 10937 1526 10937 5360 10938 1526 10938 5471 10938 5359 10939 5471 10939 5472 10939 5485 10940 5472 10940 5486 10940 5473 10941 5486 10941 5474 10941 5358 10942 5474 10942 5487 10942 5478 10943 5487 10943 5476 10943 5475 10944 5476 10944 5477 10944 5475 10945 5478 10945 5476 10945 5193 10946 5479 10946 5480 10946 5480 10947 1489 10947 5204 10947 5204 10948 5469 10948 5238 10948 5238 10949 5470 10949 5240 10949 5240 10950 1503 10950 5481 10950 5481 10951 5482 10951 5361 10951 5361 10952 1516 10952 5483 10952 5483 10953 1515 10953 5484 10953 5484 10954 1526 10954 5360 10954 5360 10955 5471 10955 5359 10955 5359 10956 5472 10956 5485 10956 5485 10957 5486 10957 5473 10957 5473 10958 5474 10958 5358 10958 5358 10959 5487 10959 5478 10959 5476 10960 1559 10960 5477 10960 5477 10961 1559 10961 5356 10961 5356 10962 1559 10962 5488 10962 5493 10963 5488 10963 1577 10963 5355 10964 1577 10964 1585 10964 5494 10965 1585 10965 1590 10965 5008 10966 1590 10966 5490 10966 5489 10967 5490 10967 1599 10967 5009 10968 1599 10968 5491 10968 5495 10969 5491 10969 1607 10969 5496 10970 1607 10970 5497 10970 5498 10971 5497 10971 5499 10971 5011 10972 5499 10972 5500 10972 5014 10973 5500 10973 5501 10973 5502 10974 5501 10974 1637 10974 5492 10975 1637 10975 4662 10975 5492 10976 5502 10976 1637 10976 5356 10977 5488 10977 5493 10977 5493 10978 1577 10978 5355 10978 5355 10979 1585 10979 5494 10979 5494 10980 1590 10980 5008 10980 5008 10981 5490 10981 5489 10981 5489 10982 1599 10982 5009 10982 5009 10983 5491 10983 5495 10983 5495 10984 1607 10984 5496 10984 5496 10985 5497 10985 5498 10985 5498 10986 5499 10986 5011 10986 5011 10987 5500 10987 5014 10987 5014 10988 5501 10988 5502 10988 1637 10989 5504 10989 4662 10989 4662 10990 5504 10990 5503 10990 5503 10991 5504 10991 5509 10991 4512 10992 5509 10992 5505 10992 4612 10993 5505 10993 1665 10993 5510 10994 1665 10994 1673 10994 4527 10995 1673 10995 1677 10995 5511 10996 1677 10996 1679 10996 4450 10997 1679 10997 5512 10997 5343 10998 5512 10998 5513 10998 5514 10999 5513 10999 2006 10999 5515 11000 2006 11000 5516 11000 5342 11001 5516 11001 1699 11001 5517 11002 1699 11002 1706 11002 5506 11003 1706 11003 5508 11003 5507 11004 5508 11004 5518 11004 5507 11005 5506 11005 5508 11005 5503 11006 5509 11006 4512 11006 4512 11007 5505 11007 4612 11007 4612 11008 1665 11008 5510 11008 5510 11009 1673 11009 4527 11009 4527 11010 1677 11010 5511 11010 5511 11011 1679 11011 4450 11011 4450 11012 5512 11012 5343 11012 5343 11013 5513 11013 5514 11013 5514 11014 2006 11014 5515 11014 5515 11015 5516 11015 5342 11015 5342 11016 1699 11016 5517 11016 5517 11017 1706 11017 5506 11017 5508 11018 5519 11018 5518 11018 5518 11019 5519 11019 5520 11019 5520 11020 5519 11020 5529 11020 5530 11021 5529 11021 5531 11021 5532 11022 5531 11022 1735 11022 5533 11023 1735 11023 1742 11023 5521 11024 1742 11024 1741 11024 5534 11025 1741 11025 5535 11025 5522 11026 5535 11026 5524 11026 5523 11027 5524 11027 5525 11027 4322 11028 5525 11028 1766 11028 4323 11029 1766 11029 1765 11029 5526 11030 1765 11030 1781 11030 4324 11031 1781 11031 1788 11031 5527 11032 1788 11032 1787 11032 4325 11033 1787 11033 1792 11033 4327 11034 1792 11034 5528 11034 4314 11035 5528 11035 5536 11035 4314 11036 4327 11036 5528 11036 5520 11037 5529 11037 5530 11037 5530 11038 5531 11038 5532 11038 5532 11039 1735 11039 5533 11039 5533 11040 1742 11040 5521 11040 5521 11041 1741 11041 5534 11041 5534 11042 5535 11042 5522 11042 5522 11043 5524 11043 5523 11043 5523 11044 5525 11044 4322 11044 4322 11045 1766 11045 4323 11045 4323 11046 1765 11046 5526 11046 5526 11047 1781 11047 4324 11047 4324 11048 1788 11048 5527 11048 5527 11049 1787 11049 4325 11049 4325 11050 1792 11050 4327 11050 5528 11051 5537 11051 5536 11051 5536 11052 5537 11052 4315 11052 4315 11053 5537 11053 1812 11053 4015 11054 1812 11054 5539 11054 5538 11055 5539 11055 2000 11055 3856 11056 2000 11056 5548 11056 3972 11057 5548 11057 1835 11057 5540 11058 1835 11058 1834 11058 3874 11059 1834 11059 5549 11059 5550 11060 5549 11060 5551 11060 5552 11061 5551 11061 1857 11061 5636 11062 1857 11062 1868 11062 5637 11063 1868 11063 5542 11063 5541 11064 5542 11064 1881 11064 5638 11065 1881 11065 1890 11065 5639 11066 1890 11066 5553 11066 5640 11067 5553 11067 1906 11067 5641 11068 1906 11068 5554 11068 5643 11069 5554 11069 1911 11069 5644 11070 1911 11070 5543 11070 5645 11071 5543 11071 1931 11071 5544 11072 1931 11072 5545 11072 5664 11073 5545 11073 5546 11073 5667 11074 5546 11074 1950 11074 5547 11075 1950 11075 1951 11075 5668 11076 1951 11076 5555 11076 5556 11077 5555 11077 1964 11077 5669 11078 5556 11078 1964 11078 4315 11079 1812 11079 4015 11079 4015 11080 5539 11080 5538 11080 5538 11081 2000 11081 3856 11081 3856 11082 5548 11082 3972 11082 3972 11083 1835 11083 5540 11083 5540 11084 1834 11084 3874 11084 3874 11085 5549 11085 5550 11085 5550 11086 5551 11086 5552 11086 5552 11087 1857 11087 5636 11087 5636 11088 1868 11088 5637 11088 5637 11089 5542 11089 5541 11089 5541 11090 1881 11090 5638 11090 5638 11091 1890 11091 5639 11091 5639 11092 5553 11092 5640 11092 5640 11093 1906 11093 5641 11093 5641 11094 5554 11094 5643 11094 5643 11095 1911 11095 5644 11095 5644 11096 5543 11096 5645 11096 5645 11097 1931 11097 5544 11097 5544 11098 5545 11098 5664 11098 5664 11099 5546 11099 5667 11099 5667 11100 1950 11100 5547 11100 5547 11101 1951 11101 5668 11101 5668 11102 5555 11102 5556 11102 5760 11103 5558 11103 5557 11103 5557 11104 5558 11104 5585 11104 5585 11105 5558 11105 5742 11105 5566 11106 5742 11106 5567 11106 5599 11107 5567 11107 5559 11107 5560 11108 5559 11108 5561 11108 5568 11109 5561 11109 5569 11109 5570 11110 5569 11110 5562 11110 5601 11111 5562 11111 5741 11111 5602 11112 5741 11112 5571 11112 5563 11113 5571 11113 5740 11113 5572 11114 5740 11114 5564 11114 5603 11115 5564 11115 5739 11115 5604 11116 5739 11116 5565 11116 5600 11117 5604 11117 5565 11117 5585 11118 5742 11118 5566 11118 5566 11119 5567 11119 5599 11119 5599 11120 5559 11120 5560 11120 5560 11121 5561 11121 5568 11121 5568 11122 5569 11122 5570 11122 5570 11123 5562 11123 5601 11123 5601 11124 5741 11124 5602 11124 5602 11125 5571 11125 5563 11125 5563 11126 5740 11126 5572 11126 5572 11127 5564 11127 5603 11127 5603 11128 5739 11128 5604 11128 5675 11129 3153 11129 5707 11129 5675 11130 5676 11130 3153 11130 3153 11131 5676 11131 5679 11131 5708 11132 3153 11132 5679 11132 5708 11133 5680 11133 3153 11133 3153 11134 5680 11134 5573 11134 5574 11135 3153 11135 5573 11135 5574 11136 5575 11136 3153 11136 3153 11137 5575 11137 5612 11137 3095 11138 5612 11138 3097 11138 3095 11139 3153 11139 5612 11139 5575 11140 5711 11140 5612 11140 5612 11141 5711 11141 5576 11141 5713 11142 5612 11142 5576 11142 5713 11143 5577 11143 5612 11143 5612 11144 5577 11144 5684 11144 5716 11145 5612 11145 5684 11145 5716 11146 5578 11146 5612 11146 5612 11147 5578 11147 5579 11147 5717 11148 5612 11148 5579 11148 5717 11149 5580 11149 5612 11149 5612 11150 5580 11150 5632 11150 5632 11151 5580 11151 5622 11151 5622 11152 5580 11152 5616 11152 5630 11153 5616 11153 5581 11153 5630 11154 5622 11154 5616 11154 5580 11155 5720 11155 5616 11155 5616 11156 5720 11156 5686 11156 5687 11157 5616 11157 5686 11157 5687 11158 5722 11158 5616 11158 5616 11159 5722 11159 5582 11159 5723 11160 5616 11160 5582 11160 5723 11161 5724 11161 5616 11161 5616 11162 5724 11162 5596 11162 5600 11163 5596 11163 5583 11163 5689 11164 5600 11164 5583 11164 5689 11165 5691 11165 5600 11165 5600 11166 5691 11166 5692 11166 5725 11167 5600 11167 5692 11167 5725 11168 5726 11168 5600 11168 5600 11169 5726 11169 5727 11169 5584 11170 5600 11170 5727 11170 5584 11171 5566 11171 5600 11171 5584 11172 5585 11172 5566 11172 5584 11173 5557 11173 5585 11173 5584 11174 5586 11174 5557 11174 5557 11175 5586 11175 5587 11175 5730 11176 5557 11176 5587 11176 5730 11177 5732 11177 5557 11177 5557 11178 5732 11178 5696 11178 5588 11179 5557 11179 5696 11179 5588 11180 5698 11180 5557 11180 5557 11181 5698 11181 5589 11181 5699 11182 5557 11182 5589 11182 5699 11183 5590 11183 5557 11183 5557 11184 5590 11184 2745 11184 5591 11185 5557 11185 2745 11185 5591 11186 5592 11186 5557 11186 5557 11187 5592 11187 5593 11187 2747 11188 5557 11188 5593 11188 2747 11189 2735 11189 5557 11189 5557 11190 2735 11190 2733 11190 5594 11191 5557 11191 2733 11191 5594 11192 2706 11192 5557 11192 5557 11193 2706 11193 5595 11193 3326 11194 5557 11194 5595 11194 5616 11195 5596 11195 5600 11195 5590 11196 5735 11196 2745 11196 2745 11197 5735 11197 5700 11197 5701 11198 2745 11198 5700 11198 5701 11199 5703 11199 2745 11199 2745 11200 5703 11200 5736 11200 5597 11201 2745 11201 5736 11201 5597 11202 5598 11202 2745 11202 2745 11203 5598 11203 5707 11203 3153 11204 2745 11204 5707 11204 5566 11205 5599 11205 5600 11205 5600 11206 5599 11206 5560 11206 5568 11207 5600 11207 5560 11207 5568 11208 5570 11208 5600 11208 5600 11209 5570 11209 5601 11209 5602 11210 5600 11210 5601 11210 5602 11211 5563 11211 5600 11211 5600 11212 5563 11212 5572 11212 5603 11213 5600 11213 5572 11213 5603 11214 5604 11214 5600 11214 5617 11215 5605 11215 5616 11215 5616 11216 5605 11216 5606 11216 5607 11217 5616 11217 5606 11217 5607 11218 5608 11218 5616 11218 5616 11219 5608 11219 5627 11219 5609 11220 5616 11220 5627 11220 5609 11221 5610 11221 5616 11221 5616 11222 5610 11222 5581 11222 5351 11223 3138 11223 5612 11223 5612 11224 3138 11224 5611 11224 5613 11225 5612 11225 5611 11225 5613 11226 3131 11226 5612 11226 5612 11227 3131 11227 3123 11227 3117 11228 5612 11228 3123 11228 3117 11229 5614 11229 5612 11229 5612 11230 5614 11230 3097 11230 5600 11231 5565 11231 5616 11231 5616 11232 5565 11232 5772 11232 5772 11233 5615 11233 5616 11233 5616 11234 5615 11234 5617 11234 5617 11235 5615 11235 5624 11235 5605 11236 5624 11236 5625 11236 5606 11237 5625 11237 5626 11237 5607 11238 5626 11238 5618 11238 5608 11239 5618 11239 5619 11239 5627 11240 5619 11240 5628 11240 5609 11241 5628 11241 5629 11241 5610 11242 5629 11242 5620 11242 5581 11243 5620 11243 5621 11243 5630 11244 5621 11244 5631 11244 5622 11245 5631 11245 5623 11245 5632 11246 5623 11246 5759 11246 5612 11247 5632 11247 5759 11247 5617 11248 5624 11248 5605 11248 5605 11249 5625 11249 5606 11249 5606 11250 5626 11250 5607 11250 5607 11251 5618 11251 5608 11251 5608 11252 5619 11252 5627 11252 5627 11253 5628 11253 5609 11253 5609 11254 5629 11254 5610 11254 5610 11255 5620 11255 5581 11255 5581 11256 5621 11256 5630 11256 5630 11257 5631 11257 5622 11257 5622 11258 5623 11258 5632 11258 5633 11259 3333 11259 5634 11259 5634 11260 3333 11260 3342 11260 5633 11261 5634 11261 5635 11261 5635 11262 5634 11262 3338 11262 3334 11263 5635 11263 3337 11263 3337 11264 5635 11264 3338 11264 3333 11265 5633 11265 3334 11265 3334 11266 5633 11266 5635 11266 5636 11267 5637 11267 5642 11267 5642 11268 5637 11268 5541 11268 5638 11269 5642 11269 5541 11269 5638 11270 5639 11270 5642 11270 5642 11271 5639 11271 5640 11271 5641 11272 5642 11272 5640 11272 5641 11273 5643 11273 5642 11273 5642 11274 5643 11274 5644 11274 5645 11275 5642 11275 5644 11275 5645 11276 5544 11276 5642 11276 5642 11277 5544 11277 1280 11277 1279 11278 5642 11278 1280 11278 1279 11279 1239 11279 5642 11279 5642 11280 1239 11280 1238 11280 1277 11281 5642 11281 1238 11281 1277 11282 1236 11282 5642 11282 5642 11283 1236 11283 5646 11283 1235 11284 5642 11284 5646 11284 1235 11285 5647 11285 5642 11285 5642 11286 5647 11286 1234 11286 5648 11287 5642 11287 1234 11287 5648 11288 1233 11288 5642 11288 5642 11289 1233 11289 1232 11289 5649 11290 5642 11290 1232 11290 5649 11291 5650 11291 5642 11291 5642 11292 5650 11292 5673 11292 5673 11293 5650 11293 5651 11293 5652 11294 5673 11294 5651 11294 5652 11295 5653 11295 5673 11295 5673 11296 5653 11296 1272 11296 1270 11297 5673 11297 1272 11297 1270 11298 1269 11298 5673 11298 5673 11299 1269 11299 1267 11299 5654 11300 5673 11300 1267 11300 5654 11301 1266 11301 5673 11301 5673 11302 1266 11302 1227 11302 1226 11303 5673 11303 1227 11303 1226 11304 5655 11304 5673 11304 5673 11305 5655 11305 1264 11305 1225 11306 5673 11306 1264 11306 1225 11307 5657 11307 5673 11307 1225 11308 5656 11308 5657 11308 5657 11309 5656 11309 5658 11309 5658 11310 5656 11310 5659 11310 5455 11311 5659 11311 1224 11311 5438 11312 1224 11312 1262 11312 5670 11313 1262 11313 5660 11313 1260 11314 5670 11314 5660 11314 1260 11315 5661 11315 5670 11315 1260 11316 5662 11316 5661 11316 5661 11317 5662 11317 1222 11317 1280 11318 5544 11318 5666 11318 5666 11319 5544 11319 5664 11319 5663 11320 5664 11320 5667 11320 5665 11321 5667 11321 5547 11321 1281 11322 5547 11322 5668 11322 1242 11323 5668 11323 1243 11323 1242 11324 1281 11324 5668 11324 5666 11325 5664 11325 5663 11325 5663 11326 5667 11326 5665 11326 5665 11327 5547 11327 1281 11327 5668 11328 5556 11328 1243 11328 1243 11329 5556 11329 1244 11329 1244 11330 5556 11330 5669 11330 5658 11331 5659 11331 5455 11331 5455 11332 1224 11332 5438 11332 5438 11333 1262 11333 5670 11333 5657 11334 5441 11334 5673 11334 5673 11335 5441 11335 5458 11335 5459 11336 5673 11336 5458 11336 5459 11337 5671 11337 5673 11337 5673 11338 5671 11338 5672 11338 5445 11339 5673 11339 5672 11339 5445 11340 5447 11340 5673 11340 5673 11341 5447 11341 5674 11341 5460 11342 5673 11342 5674 11342 5460 11343 5461 11343 5673 11343 5677 11344 5675 11344 5768 11344 5677 11345 5676 11345 5675 11345 5677 11346 5678 11346 5676 11346 5676 11347 5678 11347 5679 11347 5679 11348 5678 11348 5769 11348 5708 11349 5769 11349 5770 11349 5680 11350 5770 11350 5771 11350 5573 11351 5771 11351 5681 11351 5574 11352 5681 11352 5709 11352 5575 11353 5709 11353 5710 11353 5711 11354 5710 11354 5682 11354 5576 11355 5682 11355 5712 11355 5713 11356 5712 11356 5714 11356 5577 11357 5714 11357 5683 11357 5684 11358 5683 11358 5715 11358 5716 11359 5715 11359 5685 11359 5578 11360 5685 11360 5754 11360 5579 11361 5754 11361 5753 11361 5717 11362 5753 11362 5718 11362 5580 11363 5718 11363 5719 11363 5720 11364 5719 11364 5721 11364 5686 11365 5721 11365 5752 11365 5687 11366 5752 11366 5751 11366 5722 11367 5751 11367 5750 11367 5582 11368 5750 11368 5749 11368 5723 11369 5749 11369 5688 11369 5724 11370 5688 11370 5748 11370 5596 11371 5748 11371 5747 11371 5583 11372 5747 11372 5746 11372 5689 11373 5746 11373 5690 11373 5691 11374 5690 11374 5693 11374 5692 11375 5693 11375 5745 11375 5725 11376 5745 11376 5744 11376 5726 11377 5744 11377 5743 11377 5727 11378 5743 11378 5728 11378 5584 11379 5728 11379 5694 11379 5586 11380 5694 11380 5695 11380 5587 11381 5695 11381 5729 11381 5730 11382 5729 11382 5731 11382 5732 11383 5731 11383 5697 11383 5696 11384 5697 11384 5733 11384 5588 11385 5733 11385 5766 11385 5698 11386 5766 11386 5765 11386 5589 11387 5765 11387 5734 11387 5699 11388 5734 11388 5764 11388 5590 11389 5764 11389 5775 11389 5735 11390 5775 11390 5774 11390 5700 11391 5774 11391 5702 11391 5701 11392 5702 11392 5704 11392 5703 11393 5704 11393 5705 11393 5736 11394 5705 11394 5737 11394 5597 11395 5737 11395 5738 11395 5598 11396 5738 11396 5706 11396 5707 11397 5706 11397 5768 11397 5675 11398 5707 11398 5768 11398 5679 11399 5769 11399 5708 11399 5708 11400 5770 11400 5680 11400 5680 11401 5771 11401 5573 11401 5573 11402 5681 11402 5574 11402 5574 11403 5709 11403 5575 11403 5575 11404 5710 11404 5711 11404 5711 11405 5682 11405 5576 11405 5576 11406 5712 11406 5713 11406 5713 11407 5714 11407 5577 11407 5577 11408 5683 11408 5684 11408 5684 11409 5715 11409 5716 11409 5716 11410 5685 11410 5578 11410 5578 11411 5754 11411 5579 11411 5579 11412 5753 11412 5717 11412 5717 11413 5718 11413 5580 11413 5580 11414 5719 11414 5720 11414 5720 11415 5721 11415 5686 11415 5686 11416 5752 11416 5687 11416 5687 11417 5751 11417 5722 11417 5722 11418 5750 11418 5582 11418 5582 11419 5749 11419 5723 11419 5723 11420 5688 11420 5724 11420 5724 11421 5748 11421 5596 11421 5596 11422 5747 11422 5583 11422 5583 11423 5746 11423 5689 11423 5689 11424 5690 11424 5691 11424 5691 11425 5693 11425 5692 11425 5692 11426 5745 11426 5725 11426 5725 11427 5744 11427 5726 11427 5726 11428 5743 11428 5727 11428 5727 11429 5728 11429 5584 11429 5584 11430 5694 11430 5586 11430 5586 11431 5695 11431 5587 11431 5587 11432 5729 11432 5730 11432 5730 11433 5731 11433 5732 11433 5732 11434 5697 11434 5696 11434 5696 11435 5733 11435 5588 11435 5588 11436 5766 11436 5698 11436 5698 11437 5765 11437 5589 11437 5589 11438 5734 11438 5699 11438 5699 11439 5764 11439 5590 11439 5590 11440 5775 11440 5735 11440 5735 11441 5774 11441 5700 11441 5700 11442 5702 11442 5701 11442 5701 11443 5704 11443 5703 11443 5703 11444 5705 11444 5736 11444 5736 11445 5737 11445 5597 11445 5597 11446 5738 11446 5598 11446 5598 11447 5706 11447 5707 11447 5739 11448 5564 11448 5565 11448 5565 11449 5564 11449 5740 11449 5571 11450 5565 11450 5740 11450 5571 11451 5741 11451 5565 11451 5565 11452 5741 11452 5562 11452 5569 11453 5565 11453 5562 11453 5569 11454 5561 11454 5565 11454 5565 11455 5561 11455 5559 11455 5567 11456 5565 11456 5559 11456 5567 11457 5742 11457 5565 11457 5565 11458 5742 11458 5694 11458 5728 11459 5565 11459 5694 11459 5728 11460 5743 11460 5565 11460 5565 11461 5743 11461 5744 11461 5745 11462 5565 11462 5744 11462 5745 11463 5693 11463 5565 11463 5565 11464 5693 11464 5690 11464 5746 11465 5565 11465 5690 11465 5746 11466 5747 11466 5565 11466 5565 11467 5747 11467 5772 11467 5772 11468 5747 11468 5748 11468 5688 11469 5772 11469 5748 11469 5688 11470 5749 11470 5772 11470 5772 11471 5749 11471 5750 11471 5751 11472 5772 11472 5750 11472 5751 11473 5752 11473 5772 11473 5772 11474 5752 11474 5721 11474 5719 11475 5772 11475 5721 11475 5719 11476 5631 11476 5772 11476 5719 11477 5623 11477 5631 11477 5719 11478 5759 11478 5623 11478 5719 11479 5718 11479 5759 11479 5759 11480 5718 11480 5753 11480 5754 11481 5759 11481 5753 11481 5754 11482 5685 11482 5759 11482 5759 11483 5685 11483 5715 11483 5683 11484 5759 11484 5715 11484 5683 11485 5714 11485 5759 11485 5759 11486 5714 11486 5712 11486 5682 11487 5759 11487 5712 11487 5682 11488 5710 11488 5759 11488 5759 11489 5710 11489 5767 11489 3453 11490 5759 11490 5767 11490 3453 11491 3456 11491 5759 11491 5759 11492 3456 11492 3451 11492 5755 11493 5759 11493 3451 11493 5755 11494 3450 11494 5759 11494 5759 11495 3450 11495 3449 11495 5756 11496 5759 11496 3449 11496 5756 11497 3447 11497 5759 11497 5759 11498 3447 11498 5757 11498 5758 11499 5759 11499 5757 11499 5742 11500 5558 11500 5694 11500 5694 11501 5558 11501 5760 11501 5695 11502 5760 11502 5729 11502 5695 11503 5694 11503 5760 11503 3579 11504 5761 11504 5760 11504 5760 11505 5761 11505 5762 11505 3581 11506 5760 11506 5762 11506 3581 11507 3576 11507 5760 11507 5760 11508 3576 11508 3575 11508 5763 11509 5760 11509 3575 11509 5763 11510 3572 11510 5760 11510 5760 11511 3572 11511 3571 11511 3570 11512 5760 11512 3571 11512 3570 11513 5773 11513 5760 11513 5760 11514 5773 11514 5775 11514 5764 11515 5760 11515 5775 11515 5764 11516 5734 11516 5760 11516 5760 11517 5734 11517 5765 11517 5766 11518 5760 11518 5765 11518 5766 11519 5733 11519 5760 11519 5760 11520 5733 11520 5697 11520 5731 11521 5760 11521 5697 11521 5731 11522 5729 11522 5760 11522 5767 11523 5768 11523 5773 11523 5767 11524 5677 11524 5768 11524 5767 11525 5678 11525 5677 11525 5767 11526 5769 11526 5678 11526 5767 11527 5770 11527 5769 11527 5767 11528 5771 11528 5770 11528 5767 11529 5681 11529 5771 11529 5767 11530 5709 11530 5681 11530 5767 11531 5710 11531 5709 11531 5631 11532 5621 11532 5772 11532 5772 11533 5621 11533 5620 11533 5629 11534 5772 11534 5620 11534 5629 11535 5628 11535 5772 11535 5772 11536 5628 11536 5619 11536 5618 11537 5772 11537 5619 11537 5618 11538 5626 11538 5772 11538 5772 11539 5626 11539 5625 11539 5624 11540 5772 11540 5625 11540 5624 11541 5615 11541 5772 11541 5768 11542 5706 11542 5773 11542 5773 11543 5706 11543 5738 11543 5737 11544 5773 11544 5738 11544 5737 11545 5705 11545 5773 11545 5773 11546 5705 11546 5704 11546 5702 11547 5773 11547 5704 11547 5702 11548 5774 11548 5773 11548 5773 11549 5774 11549 5775 11549 2746 11550 2745 11550 3157 11550 3157 11551 2745 11551 3153 11551 5776 11552 6249 11552 5783 11552 5783 11553 6249 11553 5781 11553 5777 11554 5781 11554 5780 11554 5777 11555 5783 11555 5781 11555 5777 11556 5780 11556 5796 11556 5796 11557 5780 11557 5799 11557 5783 11558 5777 11558 5782 11558 5782 11559 5777 11559 5778 11559 5778 11560 5777 11560 5796 11560 5779 11561 5782 11561 5801 11561 5801 11562 5782 11562 5778 11562 5799 11563 5780 11563 5801 11563 5801 11564 5780 11564 5779 11564 5779 11565 5780 11565 5781 11565 5782 11566 6208 11566 5783 11566 5783 11567 6208 11567 5776 11567 5782 11568 5779 11568 6208 11568 6208 11569 5779 11569 6211 11569 5779 11570 5781 11570 6211 11570 6211 11571 5781 11571 6249 11571 5789 11572 5852 11572 5786 11572 5786 11573 5852 11573 5846 11573 5874 11574 5784 11574 5785 11574 5785 11575 5784 11575 5790 11575 5846 11576 5874 11576 5786 11576 5786 11577 5874 11577 5785 11577 5789 11578 5786 11578 6233 11578 6233 11579 5786 11579 6277 11579 5786 11580 5785 11580 6277 11580 6277 11581 5785 11581 5787 11581 5785 11582 5790 11582 5787 11582 5787 11583 5790 11583 5788 11583 6233 11584 5788 11584 5789 11584 5789 11585 5788 11585 5790 11585 5852 11586 5790 11586 5784 11586 5852 11587 5789 11587 5790 11587 5795 11588 5794 11588 5793 11588 5826 11589 5795 11589 5793 11589 5791 11590 5955 11590 5792 11590 5792 11591 5955 11591 5793 11591 5794 11592 5792 11592 5793 11592 5792 11593 5794 11593 6752 11593 6752 11594 5794 11594 6732 11594 5795 11595 6750 11595 5794 11595 5794 11596 6750 11596 6732 11596 5799 11597 6750 11597 5796 11597 5799 11598 5852 11598 6750 11598 5799 11599 5797 11599 5852 11599 5799 11600 6037 11600 5797 11600 5799 11601 6067 11601 6037 11601 5799 11602 6035 11602 6067 11602 5799 11603 6065 11603 6035 11603 5799 11604 6064 11604 6065 11604 5799 11605 6062 11605 6064 11605 5799 11606 6034 11606 6062 11606 5799 11607 5798 11607 6034 11607 5799 11608 6060 11608 5798 11608 5799 11609 6059 11609 6060 11609 5799 11610 5800 11610 6059 11610 5799 11611 5801 11611 5800 11611 5800 11612 5801 11612 6058 11612 6058 11613 5801 11613 6057 11613 6057 11614 5801 11614 6056 11614 6056 11615 5801 11615 5802 11615 5802 11616 5801 11616 6055 11616 6055 11617 5801 11617 5803 11617 5803 11618 5801 11618 6053 11618 6053 11619 5801 11619 5804 11619 5804 11620 5801 11620 5805 11620 5805 11621 5801 11621 5806 11621 5806 11622 5801 11622 6026 11622 6026 11623 5801 11623 5807 11623 5807 11624 5801 11624 6024 11624 6024 11625 5801 11625 5891 11625 5808 11626 5891 11626 5896 11626 5808 11627 6024 11627 5891 11627 5891 11628 5801 11628 5811 11628 5811 11629 5801 11629 5778 11629 5809 11630 5778 11630 5967 11630 5809 11631 5811 11631 5778 11631 5809 11632 5968 11632 5811 11632 5811 11633 5968 11633 5989 11633 5990 11634 5811 11634 5989 11634 5990 11635 5992 11635 5811 11635 5811 11636 5992 11636 5810 11636 5993 11637 5811 11637 5810 11637 5993 11638 5969 11638 5811 11638 5811 11639 5969 11639 5995 11639 5970 11640 5811 11640 5995 11640 5970 11641 5812 11641 5811 11641 5811 11642 5812 11642 5972 11642 5997 11643 5811 11643 5972 11643 5997 11644 5813 11644 5811 11644 5811 11645 5813 11645 5998 11645 5975 11646 5811 11646 5998 11646 5975 11647 6000 11647 5811 11647 5811 11648 6000 11648 5814 11648 5820 11649 5814 11649 6001 11649 5978 11650 5820 11650 6001 11650 5978 11651 5979 11651 5820 11651 5820 11652 5979 11652 6003 11652 5815 11653 5820 11653 6003 11653 5815 11654 5816 11654 5820 11654 5820 11655 5816 11655 6007 11655 5817 11656 5820 11656 6007 11656 5817 11657 5818 11657 5820 11657 5820 11658 5818 11658 5980 11658 5819 11659 5820 11659 5980 11659 5819 11660 5821 11660 5820 11660 5820 11661 5821 11661 6008 11661 6010 11662 5820 11662 6008 11662 6010 11663 5822 11663 5820 11663 5820 11664 5822 11664 5826 11664 5826 11665 5822 11665 5823 11665 6011 11666 5826 11666 5823 11666 6011 11667 5824 11667 5826 11667 5826 11668 5824 11668 5825 11668 5984 11669 5826 11669 5825 11669 5984 11670 6012 11670 5826 11670 5826 11671 6012 11671 5827 11671 6013 11672 5826 11672 5827 11672 6013 11673 6014 11673 5826 11673 5826 11674 6014 11674 5986 11674 5959 11675 5826 11675 5986 11675 5959 11676 5778 11676 5826 11676 5959 11677 5961 11677 5778 11677 5778 11678 5961 11678 5962 11678 5964 11679 5778 11679 5962 11679 5964 11680 5828 11680 5778 11680 5778 11681 5828 11681 5829 11681 5966 11682 5778 11682 5829 11682 5966 11683 5988 11683 5778 11683 5778 11684 5988 11684 5967 11684 5778 11685 5796 11685 5826 11685 5826 11686 5796 11686 5795 11686 5795 11687 5796 11687 6750 11687 6750 11688 5852 11688 5830 11688 5830 11689 5852 11689 5784 11689 5831 11690 5784 11690 5833 11690 5831 11691 5830 11691 5784 11691 5784 11692 5874 11692 5833 11692 5833 11693 5874 11693 5832 11693 6171 11694 5833 11694 5832 11694 6171 11695 6140 11695 5833 11695 5833 11696 6140 11696 6141 11696 5834 11697 5833 11697 6141 11697 5834 11698 5835 11698 5833 11698 5833 11699 5835 11699 6144 11699 5836 11700 5833 11700 6144 11700 5836 11701 6172 11701 5833 11701 5833 11702 6172 11702 5838 11702 5837 11703 5833 11703 5838 11703 5837 11704 6174 11704 5833 11704 5833 11705 6174 11705 5842 11705 5842 11706 6174 11706 5839 11706 6175 11707 5842 11707 5839 11707 6175 11708 6148 11708 5842 11708 5842 11709 6148 11709 5840 11709 6177 11710 5842 11710 5840 11710 6177 11711 6178 11711 5842 11711 5842 11712 6178 11712 5841 11712 6179 11713 5842 11713 5841 11713 6179 11714 6150 11714 5842 11714 5842 11715 6150 11715 6180 11715 5843 11716 5842 11716 6180 11716 5843 11717 5844 11717 5842 11717 5842 11718 5844 11718 6181 11718 6153 11719 5842 11719 6181 11719 6153 11720 5845 11720 5842 11720 5842 11721 5845 11721 5870 11721 5811 11722 5842 11722 5870 11722 5811 11723 5820 11723 5842 11723 5811 11724 5814 11724 5820 11724 5846 11725 5870 11725 5874 11725 5846 11726 5858 11726 5870 11726 5846 11727 6105 11727 5858 11727 5846 11728 5847 11728 6105 11728 5846 11729 6127 11729 5847 11729 5846 11730 5848 11730 6127 11730 5846 11731 6102 11731 5848 11731 5846 11732 6125 11732 6102 11732 5846 11733 6100 11733 6125 11733 5846 11734 5849 11734 6100 11734 5846 11735 6097 11735 5849 11735 5846 11736 5850 11736 6097 11736 5846 11737 6124 11737 5850 11737 5846 11738 6095 11738 6124 11738 5846 11739 6093 11739 6095 11739 5846 11740 5851 11740 6093 11740 5846 11741 5852 11741 5851 11741 5851 11742 5852 11742 6092 11742 6092 11743 5852 11743 5853 11743 5853 11744 5852 11744 6120 11744 6120 11745 5852 11745 5854 11745 5854 11746 5852 11746 5855 11746 5855 11747 5852 11747 6119 11747 6119 11748 5852 11748 6088 11748 6088 11749 5852 11749 5856 11749 5856 11750 5852 11750 5857 11750 5857 11751 5852 11751 5797 11751 5858 11752 6040 11752 5891 11752 5858 11753 5880 11753 6040 11753 5858 11754 6078 11754 5880 11754 5858 11755 5859 11755 6078 11755 5858 11756 6076 11756 5859 11756 5858 11757 6138 11757 6076 11757 5858 11758 6114 11758 6138 11758 5858 11759 6112 11759 6114 11759 5858 11760 6137 11760 6112 11760 5858 11761 5861 11761 6137 11761 5858 11762 5860 11762 5861 11762 5858 11763 6136 11763 5860 11763 5858 11764 6134 11764 6136 11764 5858 11765 5862 11765 6134 11765 5858 11766 5863 11766 5862 11766 5858 11767 6109 11767 5863 11767 5858 11768 6130 11768 6109 11768 5858 11769 5864 11769 6130 11769 5858 11770 6129 11770 5864 11770 5858 11771 5865 11771 6129 11771 5858 11772 5866 11772 5865 11772 5858 11773 6105 11773 5866 11773 5845 11774 5867 11774 5870 11774 5870 11775 5867 11775 6155 11775 6184 11776 5870 11776 6155 11776 6184 11777 5868 11777 5870 11777 5870 11778 5868 11778 5869 11778 6187 11779 5870 11779 5869 11779 6187 11780 6188 11780 5870 11780 5870 11781 6188 11781 5871 11781 6189 11782 5870 11782 5871 11782 6189 11783 6159 11783 5870 11783 5870 11784 6159 11784 6190 11784 6191 11785 5870 11785 6190 11785 6191 11786 5872 11786 5870 11786 5870 11787 5872 11787 6162 11787 6193 11788 5870 11788 6162 11788 6193 11789 6194 11789 5870 11789 5870 11790 6194 11790 5873 11790 5874 11791 5873 11791 6196 11791 6164 11792 5874 11792 6196 11792 6164 11793 6165 11793 5874 11793 5874 11794 6165 11794 5875 11794 5876 11795 5874 11795 5875 11795 5876 11796 5877 11796 5874 11796 5874 11797 5877 11797 5878 11797 5879 11798 5874 11798 5878 11798 5879 11799 5832 11799 5874 11799 5870 11800 5873 11800 5874 11800 6040 11801 5880 11801 6071 11801 6071 11802 5880 11802 6116 11802 5887 11803 6116 11803 6117 11803 5881 11804 6117 11804 5888 11804 5882 11805 5888 11805 5883 11805 6038 11806 5883 11806 6118 11806 5884 11807 6118 11807 6083 11807 5889 11808 6083 11808 6085 11808 5885 11809 6085 11809 5886 11809 6037 11810 5886 11810 5797 11810 6037 11811 5885 11811 5886 11811 6071 11812 6116 11812 5887 11812 5887 11813 6117 11813 5881 11813 5881 11814 5888 11814 5882 11814 5882 11815 5883 11815 6038 11815 6038 11816 6118 11816 5884 11816 5884 11817 6083 11817 5889 11817 5889 11818 6085 11818 5885 11818 5890 11819 6075 11819 5891 11819 6041 11820 5891 11820 6073 11820 6041 11821 5890 11821 5891 11821 6075 11822 5892 11822 5891 11822 5891 11823 5892 11823 5894 11823 5893 11824 5891 11824 5894 11824 5893 11825 6017 11825 5891 11825 5891 11826 6017 11826 6042 11826 6043 11827 5891 11827 6042 11827 6043 11828 6044 11828 5891 11828 5891 11829 6044 11829 6045 11829 6047 11830 5891 11830 6045 11830 6047 11831 6021 11831 5891 11831 5891 11832 6021 11832 6048 11832 6049 11833 5891 11833 6048 11833 6049 11834 5895 11834 5891 11834 5891 11835 5895 11835 5896 11835 6040 11836 6073 11836 5891 11836 5898 11837 5899 11837 5897 11837 5898 11838 5958 11838 5899 11838 5899 11839 5900 11839 5897 11839 5897 11840 5900 11840 5831 11840 5833 11841 5897 11841 5831 11841 5900 11842 6744 11842 5831 11842 5831 11843 6744 11843 5830 11843 5899 11844 6729 11844 5900 11844 5900 11845 6729 11845 6744 11845 5910 11846 5951 11846 5944 11846 5944 11847 5951 11847 5949 11847 6541 11848 6540 11848 5944 11848 5901 11849 5944 11849 5946 11849 5901 11850 6541 11850 5944 11850 6540 11851 5902 11851 5944 11851 5944 11852 5902 11852 5903 11852 6460 11853 5944 11853 5903 11853 6460 11854 5910 11854 5944 11854 6460 11855 5904 11855 5910 11855 5910 11856 5904 11856 5905 11856 5906 11857 5910 11857 5905 11857 5906 11858 6461 11858 5910 11858 5910 11859 6461 11859 6466 11859 6468 11860 5910 11860 6466 11860 6468 11861 5907 11861 5910 11861 5910 11862 5907 11862 6465 11862 6464 11863 5910 11863 6465 11863 6464 11864 5908 11864 5910 11864 5910 11865 5908 11865 6469 11865 5909 11866 5910 11866 6469 11866 5909 11867 6471 11867 5910 11867 5910 11868 6471 11868 6472 11868 6475 11869 5910 11869 6472 11869 6475 11870 6476 11870 5910 11870 5910 11871 6476 11871 5911 11871 5912 11872 5910 11872 5911 11872 5912 11873 5913 11873 5910 11873 5910 11874 5913 11874 5914 11874 5919 11875 5914 11875 6479 11875 6480 11876 5919 11876 6479 11876 6480 11877 6481 11877 5919 11877 5919 11878 6481 11878 5915 11878 6483 11879 5919 11879 5915 11879 6483 11880 6486 11880 5919 11880 5919 11881 6486 11881 6488 11881 5916 11882 5919 11882 6488 11882 5916 11883 6485 11883 5919 11883 5919 11884 6485 11884 6490 11884 5917 11885 5919 11885 6490 11885 5917 11886 5918 11886 5919 11886 5919 11887 5918 11887 5920 11887 6493 11888 5919 11888 5920 11888 6493 11889 5921 11889 5919 11889 5919 11890 5921 11890 6496 11890 5922 11891 5919 11891 6496 11891 5922 11892 6497 11892 5919 11892 5919 11893 6497 11893 6495 11893 5925 11894 6495 11894 6499 11894 6500 11895 5925 11895 6499 11895 6500 11896 5923 11896 5925 11896 5925 11897 5923 11897 6505 11897 5924 11898 5925 11898 6505 11898 5924 11899 6504 11899 5925 11899 5925 11900 6504 11900 6506 11900 5926 11901 5925 11901 6506 11901 5926 11902 5927 11902 5925 11902 5925 11903 5927 11903 5948 11903 5948 11904 5927 11904 5928 11904 5929 11905 5948 11905 5928 11905 5929 11906 6508 11906 5948 11906 5948 11907 6508 11907 5930 11907 5931 11908 5948 11908 5930 11908 5931 11909 5932 11909 5948 11909 5948 11910 5932 11910 5933 11910 5934 11911 5948 11911 5933 11911 5934 11912 6512 11912 5948 11912 5948 11913 6512 11913 5935 11913 5936 11914 5948 11914 5935 11914 5936 11915 5937 11915 5948 11915 5948 11916 5937 11916 5938 11916 6511 11917 5948 11917 5938 11917 6511 11918 6516 11918 5948 11918 5948 11919 6516 11919 6519 11919 6520 11920 5948 11920 6519 11920 6520 11921 5947 11921 5948 11921 6520 11922 6517 11922 5947 11922 5947 11923 6517 11923 6521 11923 6526 11924 5947 11924 6521 11924 6526 11925 5939 11925 5947 11925 5947 11926 5939 11926 6525 11926 6523 11927 5947 11927 6525 11927 6523 11928 6527 11928 5947 11928 5947 11929 6527 11929 5940 11929 5941 11930 5947 11930 5940 11930 5941 11931 6531 11931 5947 11931 5947 11932 6531 11932 6529 11932 5942 11933 5947 11933 6529 11933 5942 11934 6535 11934 5947 11934 5947 11935 6535 11935 6534 11935 5943 11936 5947 11936 6534 11936 5943 11937 6537 11937 5947 11937 5947 11938 6537 11938 5944 11938 5944 11939 6537 11939 5945 11939 6538 11940 5944 11940 5945 11940 6538 11941 5946 11941 5944 11941 5910 11942 5914 11942 5919 11942 5919 11943 6495 11943 5925 11943 5842 11944 5820 11944 5947 11944 5947 11945 5820 11945 5948 11945 5949 11946 5898 11946 5944 11946 5944 11947 5898 11947 5947 11947 5947 11948 5898 11948 5897 11948 5956 11949 5919 11949 5950 11949 5950 11950 5919 11950 5925 11950 5957 11951 5951 11951 5952 11951 5952 11952 5951 11952 5910 11952 5919 11953 5952 11953 5910 11953 5919 11954 5953 11954 5952 11954 5919 11955 5956 11955 5953 11955 5953 11956 5956 11956 5954 11956 5950 11957 5955 11957 5956 11957 5956 11958 5955 11958 5791 11958 5954 11959 5956 11959 5791 11959 5793 11960 5948 11960 5826 11960 5826 11961 5948 11961 5820 11961 5833 11962 5842 11962 5897 11962 5897 11963 5842 11963 5947 11963 5792 11964 6728 11964 5791 11964 5791 11965 6728 11965 5953 11965 5954 11966 5791 11966 5953 11966 5950 11967 5925 11967 5955 11967 5955 11968 5925 11968 5948 11968 5793 11969 5955 11969 5948 11969 5957 11970 5952 11970 5958 11970 5958 11971 5952 11971 6727 11971 5899 11972 5958 11972 6727 11972 5957 11973 5958 11973 5951 11973 5951 11974 5958 11974 5898 11974 5949 11975 5951 11975 5898 11975 5960 11976 5959 11976 6204 11976 5960 11977 5961 11977 5959 11977 5960 11978 6205 11978 5961 11978 5961 11979 6205 11979 5962 11979 5962 11980 6205 11980 5963 11980 5964 11981 5963 11981 5965 11981 5828 11982 5965 11982 5987 11982 5829 11983 5987 11983 6206 11983 5966 11984 6206 11984 6207 11984 5988 11985 6207 11985 6209 11985 5967 11986 6209 11986 6210 11986 5809 11987 6210 11987 6274 11987 5968 11988 6274 11988 6273 11988 5989 11989 6273 11989 6272 11989 5990 11990 6272 11990 5991 11990 5992 11991 5991 11991 6271 11991 5810 11992 6271 11992 6270 11992 5993 11993 6270 11993 6269 11993 5969 11994 6269 11994 5994 11994 5995 11995 5994 11995 5996 11995 5970 11996 5996 11996 5971 11996 5812 11997 5971 11997 6267 11997 5972 11998 6267 11998 5973 11998 5997 11999 5973 11999 5974 11999 5813 12000 5974 12000 6266 12000 5998 12001 6266 12001 5999 12001 5975 12002 5999 12002 6265 12002 6000 12003 6265 12003 5976 12003 5814 12004 5976 12004 5977 12004 6001 12005 5977 12005 6264 12005 5978 12006 6264 12006 6263 12006 5979 12007 6263 12007 6002 12007 6003 12008 6002 12008 6004 12008 5815 12009 6004 12009 6005 12009 5816 12010 6005 12010 6006 12010 6007 12011 6006 12011 6289 12011 5817 12012 6289 12012 6288 12012 5818 12013 6288 12013 6287 12013 5980 12014 6287 12014 6290 12014 5819 12015 6290 12015 6284 12015 5821 12016 6284 12016 5981 12016 6008 12017 5981 12017 6009 12017 6010 12018 6009 12018 6282 12018 5822 12019 6282 12019 6281 12019 5823 12020 6281 12020 5982 12020 6011 12021 5982 12021 5983 12021 5824 12022 5983 12022 6280 12022 5825 12023 6280 12023 6279 12023 5984 12024 6279 12024 5985 12024 6012 12025 5985 12025 6200 12025 5827 12026 6200 12026 6201 12026 6013 12027 6201 12027 6202 12027 6014 12028 6202 12028 6203 12028 5986 12029 6203 12029 6204 12029 5959 12030 5986 12030 6204 12030 5962 12031 5963 12031 5964 12031 5964 12032 5965 12032 5828 12032 5828 12033 5987 12033 5829 12033 5829 12034 6206 12034 5966 12034 5966 12035 6207 12035 5988 12035 5988 12036 6209 12036 5967 12036 5967 12037 6210 12037 5809 12037 5809 12038 6274 12038 5968 12038 5968 12039 6273 12039 5989 12039 5989 12040 6272 12040 5990 12040 5990 12041 5991 12041 5992 12041 5992 12042 6271 12042 5810 12042 5810 12043 6270 12043 5993 12043 5993 12044 6269 12044 5969 12044 5969 12045 5994 12045 5995 12045 5995 12046 5996 12046 5970 12046 5970 12047 5971 12047 5812 12047 5812 12048 6267 12048 5972 12048 5972 12049 5973 12049 5997 12049 5997 12050 5974 12050 5813 12050 5813 12051 6266 12051 5998 12051 5998 12052 5999 12052 5975 12052 5975 12053 6265 12053 6000 12053 6000 12054 5976 12054 5814 12054 5814 12055 5977 12055 6001 12055 6001 12056 6264 12056 5978 12056 5978 12057 6263 12057 5979 12057 5979 12058 6002 12058 6003 12058 6003 12059 6004 12059 5815 12059 5815 12060 6005 12060 5816 12060 5816 12061 6006 12061 6007 12061 6007 12062 6289 12062 5817 12062 5817 12063 6288 12063 5818 12063 5818 12064 6287 12064 5980 12064 5980 12065 6290 12065 5819 12065 5819 12066 6284 12066 5821 12066 5821 12067 5981 12067 6008 12067 6008 12068 6009 12068 6010 12068 6010 12069 6282 12069 5822 12069 5822 12070 6281 12070 5823 12070 5823 12071 5982 12071 6011 12071 6011 12072 5983 12072 5824 12072 5824 12073 6280 12073 5825 12073 5825 12074 6279 12074 5984 12074 5984 12075 5985 12075 6012 12075 6012 12076 6200 12076 5827 12076 5827 12077 6201 12077 6013 12077 6013 12078 6202 12078 6014 12078 6014 12079 6203 12079 5986 12079 6015 12080 5892 12080 6016 12080 6015 12081 5894 12081 5892 12081 6015 12082 6216 12082 5894 12082 5894 12083 6216 12083 5893 12083 5893 12084 6216 12084 6018 12084 6017 12085 6018 12085 6215 12085 6042 12086 6215 12086 6019 12086 6043 12087 6019 12087 6020 12087 6044 12088 6020 12088 6214 12088 6045 12089 6214 12089 6046 12089 6047 12090 6046 12090 6213 12090 6021 12091 6213 12091 6212 12091 6048 12092 6212 12092 6022 12092 6049 12093 6022 12093 6023 12093 5895 12094 6023 12094 6050 12094 5896 12095 6050 12095 6051 12095 5808 12096 6051 12096 6025 12096 6024 12097 6025 12097 6052 12097 5807 12098 6052 12098 6248 12098 6026 12099 6248 12099 6027 12099 5806 12100 6027 12100 6247 12100 5805 12101 6247 12101 6028 12101 5804 12102 6028 12102 6029 12102 6053 12103 6029 12103 6054 12103 5803 12104 6054 12104 6030 12104 6055 12105 6030 12105 6031 12105 5802 12106 6031 12106 6245 12106 6056 12107 6245 12107 6246 12107 6057 12108 6246 12108 6244 12108 6058 12109 6244 12109 6032 12109 5800 12110 6032 12110 6243 12110 6059 12111 6243 12111 6033 12111 6060 12112 6033 12112 6061 12112 5798 12113 6061 12113 6242 12113 6034 12114 6242 12114 6241 12114 6062 12115 6241 12115 6063 12115 6064 12116 6063 12116 6240 12116 6065 12117 6240 12117 6066 12117 6035 12118 6066 12118 6239 12118 6067 12119 6239 12119 6036 12119 6037 12120 6036 12120 6238 12120 5885 12121 6238 12121 6297 12121 5889 12122 6297 12122 6295 12122 5884 12123 6295 12123 6039 12123 6038 12124 6039 12124 6294 12124 5882 12125 6294 12125 6068 12125 5881 12126 6068 12126 6069 12126 5887 12127 6069 12127 6070 12127 6071 12128 6070 12128 6072 12128 6040 12129 6072 12129 6220 12129 6073 12130 6220 12130 6219 12130 6041 12131 6219 12131 6218 12131 5890 12132 6218 12132 6074 12132 6075 12133 6074 12133 6016 12133 5892 12134 6075 12134 6016 12134 5893 12135 6018 12135 6017 12135 6017 12136 6215 12136 6042 12136 6042 12137 6019 12137 6043 12137 6043 12138 6020 12138 6044 12138 6044 12139 6214 12139 6045 12139 6045 12140 6046 12140 6047 12140 6047 12141 6213 12141 6021 12141 6021 12142 6212 12142 6048 12142 6048 12143 6022 12143 6049 12143 6049 12144 6023 12144 5895 12144 5895 12145 6050 12145 5896 12145 5896 12146 6051 12146 5808 12146 5808 12147 6025 12147 6024 12147 6024 12148 6052 12148 5807 12148 5807 12149 6248 12149 6026 12149 6026 12150 6027 12150 5806 12150 5806 12151 6247 12151 5805 12151 5805 12152 6028 12152 5804 12152 5804 12153 6029 12153 6053 12153 6053 12154 6054 12154 5803 12154 5803 12155 6030 12155 6055 12155 6055 12156 6031 12156 5802 12156 5802 12157 6245 12157 6056 12157 6056 12158 6246 12158 6057 12158 6057 12159 6244 12159 6058 12159 6058 12160 6032 12160 5800 12160 5800 12161 6243 12161 6059 12161 6059 12162 6033 12162 6060 12162 6060 12163 6061 12163 5798 12163 5798 12164 6242 12164 6034 12164 6034 12165 6241 12165 6062 12165 6062 12166 6063 12166 6064 12166 6064 12167 6240 12167 6065 12167 6065 12168 6066 12168 6035 12168 6035 12169 6239 12169 6067 12169 6067 12170 6036 12170 6037 12170 6037 12171 6238 12171 5885 12171 5885 12172 6297 12172 5889 12172 5889 12173 6295 12173 5884 12173 5884 12174 6039 12174 6038 12174 6038 12175 6294 12175 5882 12175 5882 12176 6068 12176 5881 12176 5881 12177 6069 12177 5887 12177 5887 12178 6070 12178 6071 12178 6071 12179 6072 12179 6040 12179 6040 12180 6220 12180 6073 12180 6073 12181 6219 12181 6041 12181 6041 12182 6218 12182 5890 12182 5890 12183 6074 12183 6075 12183 6077 12184 6076 12184 6224 12184 6077 12185 5859 12185 6076 12185 6077 12186 6223 12186 5859 12186 5859 12187 6223 12187 6078 12187 6078 12188 6223 12188 6079 12188 5880 12189 6079 12189 6221 12189 6116 12190 6221 12190 6222 12190 6117 12191 6222 12191 6080 12191 5888 12192 6080 12192 6298 12192 5883 12193 6298 12193 6081 12193 6118 12194 6081 12194 6082 12194 6083 12195 6082 12195 6084 12195 6085 12196 6084 12196 6086 12196 5886 12197 6086 12197 6296 12197 5797 12198 6296 12198 6237 12198 5857 12199 6237 12199 6236 12199 5856 12200 6236 12200 6087 12200 6088 12201 6087 12201 6235 12201 6119 12202 6235 12202 6089 12202 5855 12203 6089 12203 6234 12203 5854 12204 6234 12204 6090 12204 6120 12205 6090 12205 6121 12205 5853 12206 6121 12206 6091 12206 6092 12207 6091 12207 6122 12207 5851 12208 6122 12208 6094 12208 6093 12209 6094 12209 6096 12209 6095 12210 6096 12210 6123 12210 6124 12211 6123 12211 6232 12211 5850 12212 6232 12212 6098 12212 6097 12213 6098 12213 6099 12213 5849 12214 6099 12214 6231 12214 6100 12215 6231 12215 6101 12215 6125 12216 6101 12216 6230 12216 6102 12217 6230 12217 6126 12217 5848 12218 6126 12218 6229 12218 6127 12219 6229 12219 6103 12219 5847 12220 6103 12220 6104 12220 6105 12221 6104 12221 6106 12221 5866 12222 6106 12222 6107 12222 5865 12223 6107 12223 6128 12223 6129 12224 6128 12224 6228 12224 5864 12225 6228 12225 6108 12225 6130 12226 6108 12226 6131 12226 6109 12227 6131 12227 6132 12227 5863 12228 6132 12228 6227 12228 5862 12229 6227 12229 6133 12229 6134 12230 6133 12230 6135 12230 6136 12231 6135 12231 6110 12231 5860 12232 6110 12232 6111 12232 5861 12233 6111 12233 6226 12233 6137 12234 6226 12234 6113 12234 6112 12235 6113 12235 6115 12235 6114 12236 6115 12236 6225 12236 6138 12237 6225 12237 6224 12237 6076 12238 6138 12238 6224 12238 6078 12239 6079 12239 5880 12239 5880 12240 6221 12240 6116 12240 6116 12241 6222 12241 6117 12241 6117 12242 6080 12242 5888 12242 5888 12243 6298 12243 5883 12243 5883 12244 6081 12244 6118 12244 6118 12245 6082 12245 6083 12245 6083 12246 6084 12246 6085 12246 6085 12247 6086 12247 5886 12247 5886 12248 6296 12248 5797 12248 5797 12249 6237 12249 5857 12249 5857 12250 6236 12250 5856 12250 5856 12251 6087 12251 6088 12251 6088 12252 6235 12252 6119 12252 6119 12253 6089 12253 5855 12253 5855 12254 6234 12254 5854 12254 5854 12255 6090 12255 6120 12255 6120 12256 6121 12256 5853 12256 5853 12257 6091 12257 6092 12257 6092 12258 6122 12258 5851 12258 5851 12259 6094 12259 6093 12259 6093 12260 6096 12260 6095 12260 6095 12261 6123 12261 6124 12261 6124 12262 6232 12262 5850 12262 5850 12263 6098 12263 6097 12263 6097 12264 6099 12264 5849 12264 5849 12265 6231 12265 6100 12265 6100 12266 6101 12266 6125 12266 6125 12267 6230 12267 6102 12267 6102 12268 6126 12268 5848 12268 5848 12269 6229 12269 6127 12269 6127 12270 6103 12270 5847 12270 5847 12271 6104 12271 6105 12271 6105 12272 6106 12272 5866 12272 5866 12273 6107 12273 5865 12273 5865 12274 6128 12274 6129 12274 6129 12275 6228 12275 5864 12275 5864 12276 6108 12276 6130 12276 6130 12277 6131 12277 6109 12277 6109 12278 6132 12278 5863 12278 5863 12279 6227 12279 5862 12279 5862 12280 6133 12280 6134 12280 6134 12281 6135 12281 6136 12281 6136 12282 6110 12282 5860 12282 5860 12283 6111 12283 5861 12283 5861 12284 6226 12284 6137 12284 6137 12285 6113 12285 6112 12285 6112 12286 6115 12286 6114 12286 6114 12287 6225 12287 6138 12287 6139 12288 6140 12288 6170 12288 6139 12289 6141 12289 6140 12289 6139 12290 6254 12290 6141 12290 6141 12291 6254 12291 5834 12291 5834 12292 6254 12292 6142 12292 5835 12293 6142 12293 6143 12293 6144 12294 6143 12294 6145 12294 5836 12295 6145 12295 6146 12295 6172 12296 6146 12296 6173 12296 5838 12297 6173 12297 6253 12297 5837 12298 6253 12298 6147 12298 6174 12299 6147 12299 6252 12299 5839 12300 6252 12300 6251 12300 6175 12301 6251 12301 6250 12301 6148 12302 6250 12302 6283 12302 5840 12303 6283 12303 6176 12303 6177 12304 6176 12304 6285 12304 6178 12305 6285 12305 6286 12305 5841 12306 6286 12306 6149 12306 6179 12307 6149 12307 6151 12307 6150 12308 6151 12308 6291 12308 6180 12309 6291 12309 6292 12309 5843 12310 6292 12310 6293 12310 5844 12311 6293 12311 6152 12311 6181 12312 6152 12312 6182 12312 6153 12313 6182 12313 6154 12313 5845 12314 6154 12314 6183 12314 5867 12315 6183 12315 6262 12315 6155 12316 6262 12316 6156 12316 6184 12317 6156 12317 6185 12317 5868 12318 6185 12318 6261 12318 5869 12319 6261 12319 6186 12319 6187 12320 6186 12320 6157 12320 6188 12321 6157 12321 6260 12321 5871 12322 6260 12322 6158 12322 6189 12323 6158 12323 6160 12323 6159 12324 6160 12324 6259 12324 6190 12325 6259 12325 6161 12325 6191 12326 6161 12326 6258 12326 5872 12327 6258 12327 6257 12327 6162 12328 6257 12328 6192 12328 6193 12329 6192 12329 6256 12329 6194 12330 6256 12330 6163 12330 5873 12331 6163 12331 6195 12331 6196 12332 6195 12332 6278 12332 6164 12333 6278 12333 6197 12333 6165 12334 6197 12334 6166 12334 5875 12335 6166 12335 6167 12335 5876 12336 6167 12336 6168 12336 5877 12337 6168 12337 6198 12337 5878 12338 6198 12338 6199 12338 5879 12339 6199 12339 6255 12339 5832 12340 6255 12340 6169 12340 6171 12341 6169 12341 6170 12341 6140 12342 6171 12342 6170 12342 5834 12343 6142 12343 5835 12343 5835 12344 6143 12344 6144 12344 6144 12345 6145 12345 5836 12345 5836 12346 6146 12346 6172 12346 6172 12347 6173 12347 5838 12347 5838 12348 6253 12348 5837 12348 5837 12349 6147 12349 6174 12349 6174 12350 6252 12350 5839 12350 5839 12351 6251 12351 6175 12351 6175 12352 6250 12352 6148 12352 6148 12353 6283 12353 5840 12353 5840 12354 6176 12354 6177 12354 6177 12355 6285 12355 6178 12355 6178 12356 6286 12356 5841 12356 5841 12357 6149 12357 6179 12357 6179 12358 6151 12358 6150 12358 6150 12359 6291 12359 6180 12359 6180 12360 6292 12360 5843 12360 5843 12361 6293 12361 5844 12361 5844 12362 6152 12362 6181 12362 6181 12363 6182 12363 6153 12363 6153 12364 6154 12364 5845 12364 5845 12365 6183 12365 5867 12365 5867 12366 6262 12366 6155 12366 6155 12367 6156 12367 6184 12367 6184 12368 6185 12368 5868 12368 5868 12369 6261 12369 5869 12369 5869 12370 6186 12370 6187 12370 6187 12371 6157 12371 6188 12371 6188 12372 6260 12372 5871 12372 5871 12373 6158 12373 6189 12373 6189 12374 6160 12374 6159 12374 6159 12375 6259 12375 6190 12375 6190 12376 6161 12376 6191 12376 6191 12377 6258 12377 5872 12377 5872 12378 6257 12378 6162 12378 6162 12379 6192 12379 6193 12379 6193 12380 6256 12380 6194 12380 6194 12381 6163 12381 5873 12381 5873 12382 6195 12382 6196 12382 6196 12383 6278 12383 6164 12383 6164 12384 6197 12384 6165 12384 6165 12385 6166 12385 5875 12385 5875 12386 6167 12386 5876 12386 5876 12387 6168 12387 5877 12387 5877 12388 6198 12388 5878 12388 5878 12389 6199 12389 5879 12389 5879 12390 6255 12390 5832 12390 5832 12391 6169 12391 6171 12391 6268 12392 6217 12392 5811 12392 5811 12393 6217 12393 5891 12393 6217 12394 6275 12394 5891 12394 5891 12395 6275 12395 5858 12395 6275 12396 6276 12396 5858 12396 5858 12397 6276 12397 5870 12397 6276 12398 6268 12398 5870 12398 5870 12399 6268 12399 5811 12399 6208 12400 6200 12400 5776 12400 6208 12401 6201 12401 6200 12401 6208 12402 6202 12402 6201 12402 6208 12403 6203 12403 6202 12403 6208 12404 6204 12404 6203 12404 6208 12405 5960 12405 6204 12405 6208 12406 6205 12406 5960 12406 6208 12407 5963 12407 6205 12407 6208 12408 5965 12408 5963 12408 6208 12409 5987 12409 5965 12409 6208 12410 6206 12410 5987 12410 6208 12411 6207 12411 6206 12411 6208 12412 6209 12412 6207 12412 6208 12413 6210 12413 6209 12413 6208 12414 6268 12414 6210 12414 6208 12415 6211 12415 6268 12415 6268 12416 6211 12416 6217 12416 6217 12417 6211 12417 6248 12417 6052 12418 6217 12418 6248 12418 6052 12419 6025 12419 6217 12419 6217 12420 6025 12420 6051 12420 6050 12421 6217 12421 6051 12421 6050 12422 6023 12422 6217 12422 6217 12423 6023 12423 6022 12423 6212 12424 6217 12424 6022 12424 6212 12425 6213 12425 6217 12425 6217 12426 6213 12426 6046 12426 6214 12427 6217 12427 6046 12427 6214 12428 6020 12428 6217 12428 6217 12429 6020 12429 6019 12429 6215 12430 6217 12430 6019 12430 6215 12431 6018 12431 6217 12431 6217 12432 6018 12432 6216 12432 6015 12433 6217 12433 6216 12433 6015 12434 6016 12434 6217 12434 6217 12435 6016 12435 6074 12435 6218 12436 6217 12436 6074 12436 6218 12437 6219 12437 6217 12437 6217 12438 6219 12438 6220 12438 6221 12439 6220 12439 6222 12439 6221 12440 6217 12440 6220 12440 6221 12441 6275 12441 6217 12441 6221 12442 6079 12442 6275 12442 6275 12443 6079 12443 6223 12443 6077 12444 6275 12444 6223 12444 6077 12445 6224 12445 6275 12445 6275 12446 6224 12446 6225 12446 6115 12447 6275 12447 6225 12447 6115 12448 6113 12448 6275 12448 6275 12449 6113 12449 6226 12449 6111 12450 6275 12450 6226 12450 6111 12451 6110 12451 6275 12451 6275 12452 6110 12452 6135 12452 6133 12453 6275 12453 6135 12453 6133 12454 6227 12454 6275 12454 6275 12455 6227 12455 6132 12455 6131 12456 6275 12456 6132 12456 6131 12457 6108 12457 6275 12457 6275 12458 6108 12458 6228 12458 6128 12459 6275 12459 6228 12459 6128 12460 6107 12460 6275 12460 6275 12461 6107 12461 6106 12461 6104 12462 6275 12462 6106 12462 6104 12463 6277 12463 6275 12463 6104 12464 6103 12464 6277 12464 6277 12465 6103 12465 6229 12465 6126 12466 6277 12466 6229 12466 6126 12467 6230 12467 6277 12467 6277 12468 6230 12468 6101 12468 6231 12469 6277 12469 6101 12469 6231 12470 6099 12470 6277 12470 6277 12471 6099 12471 6098 12471 6232 12472 6277 12472 6098 12472 6232 12473 6123 12473 6277 12473 6277 12474 6123 12474 6096 12474 6094 12475 6277 12475 6096 12475 6094 12476 6122 12476 6277 12476 6277 12477 6122 12477 6233 12477 6233 12478 6122 12478 6091 12478 6121 12479 6233 12479 6091 12479 6121 12480 6090 12480 6233 12480 6233 12481 6090 12481 6234 12481 6089 12482 6233 12482 6234 12482 6089 12483 6235 12483 6233 12483 6233 12484 6235 12484 6087 12484 6236 12485 6233 12485 6087 12485 6236 12486 6249 12486 6233 12486 6236 12487 6237 12487 6249 12487 6249 12488 6237 12488 6238 12488 6036 12489 6249 12489 6238 12489 6036 12490 6239 12490 6249 12490 6249 12491 6239 12491 6066 12491 6240 12492 6249 12492 6066 12492 6240 12493 6063 12493 6249 12493 6249 12494 6063 12494 6241 12494 6242 12495 6249 12495 6241 12495 6242 12496 6061 12496 6249 12496 6249 12497 6061 12497 6033 12497 6211 12498 6033 12498 6243 12498 6032 12499 6211 12499 6243 12499 6032 12500 6244 12500 6211 12500 6211 12501 6244 12501 6246 12501 6245 12502 6211 12502 6246 12502 6245 12503 6031 12503 6211 12503 6211 12504 6031 12504 6030 12504 6054 12505 6211 12505 6030 12505 6054 12506 6029 12506 6211 12506 6211 12507 6029 12507 6028 12507 6247 12508 6211 12508 6028 12508 6247 12509 6027 12509 6211 12509 6211 12510 6027 12510 6248 12510 6211 12511 6249 12511 6033 12511 6233 12512 6249 12512 5788 12512 5788 12513 6249 12513 5776 12513 6250 12514 5776 12514 6283 12514 6250 12515 5788 12515 5776 12515 6250 12516 6251 12516 5788 12516 5788 12517 6251 12517 6252 12517 6147 12518 5788 12518 6252 12518 6147 12519 6253 12519 5788 12519 5788 12520 6253 12520 6173 12520 6146 12521 5788 12521 6173 12521 6146 12522 6145 12522 5788 12522 5788 12523 6145 12523 6143 12523 5787 12524 6143 12524 6142 12524 6254 12525 5787 12525 6142 12525 6254 12526 6139 12526 5787 12526 5787 12527 6139 12527 6170 12527 6169 12528 5787 12528 6170 12528 6169 12529 6255 12529 5787 12529 5787 12530 6255 12530 6199 12530 6198 12531 5787 12531 6199 12531 6198 12532 6168 12532 5787 12532 5787 12533 6168 12533 6167 12533 6166 12534 5787 12534 6167 12534 6166 12535 6197 12535 5787 12535 5787 12536 6197 12536 6278 12536 6276 12537 6278 12537 6195 12537 6163 12538 6276 12538 6195 12538 6163 12539 6256 12539 6276 12539 6276 12540 6256 12540 6192 12540 6257 12541 6276 12541 6192 12541 6257 12542 6258 12542 6276 12542 6276 12543 6258 12543 6161 12543 6259 12544 6276 12544 6161 12544 6259 12545 6160 12545 6276 12545 6276 12546 6160 12546 6158 12546 6260 12547 6276 12547 6158 12547 6260 12548 6157 12548 6276 12548 6276 12549 6157 12549 6186 12549 6261 12550 6276 12550 6186 12550 6261 12551 6185 12551 6276 12551 6276 12552 6185 12552 6156 12552 6262 12553 6276 12553 6156 12553 6262 12554 6183 12554 6276 12554 6276 12555 6183 12555 6154 12555 6182 12556 6276 12556 6154 12556 6182 12557 6152 12557 6276 12557 6276 12558 6152 12558 6002 12558 6268 12559 6002 12559 6263 12559 6264 12560 6268 12560 6263 12560 6264 12561 5977 12561 6268 12561 6268 12562 5977 12562 5976 12562 6265 12563 6268 12563 5976 12563 6265 12564 5999 12564 6268 12564 6268 12565 5999 12565 6266 12565 5974 12566 6268 12566 6266 12566 5974 12567 5973 12567 6268 12567 6268 12568 5973 12568 6267 12568 5971 12569 6268 12569 6267 12569 5971 12570 5996 12570 6268 12570 6268 12571 5996 12571 5994 12571 6269 12572 6268 12572 5994 12572 6269 12573 6270 12573 6268 12573 6268 12574 6270 12574 6271 12574 5991 12575 6268 12575 6271 12575 5991 12576 6272 12576 6268 12576 6268 12577 6272 12577 6273 12577 6274 12578 6268 12578 6273 12578 6274 12579 6210 12579 6268 12579 5787 12580 5788 12580 6143 12580 6275 12581 6277 12581 6276 12581 6276 12582 6277 12582 5787 12582 6278 12583 6276 12583 5787 12583 6200 12584 5985 12584 5776 12584 5776 12585 5985 12585 6279 12585 6280 12586 5776 12586 6279 12586 6280 12587 5983 12587 5776 12587 5776 12588 5983 12588 5982 12588 6281 12589 5776 12589 5982 12589 6281 12590 6282 12590 5776 12590 5776 12591 6282 12591 6009 12591 5981 12592 5776 12592 6009 12592 5981 12593 6283 12593 5776 12593 5981 12594 6176 12594 6283 12594 5981 12595 6284 12595 6176 12595 6176 12596 6284 12596 6285 12596 6285 12597 6284 12597 6290 12597 6286 12598 6290 12598 6287 12598 6149 12599 6287 12599 6288 12599 6151 12600 6288 12600 6289 12600 6291 12601 6289 12601 6006 12601 6292 12602 6006 12602 6005 12602 6293 12603 6005 12603 6004 12603 6152 12604 6004 12604 6002 12604 6152 12605 6293 12605 6004 12605 6285 12606 6290 12606 6286 12606 6286 12607 6287 12607 6149 12607 6149 12608 6288 12608 6151 12608 6151 12609 6289 12609 6291 12609 6291 12610 6006 12610 6292 12610 6292 12611 6005 12611 6293 12611 6276 12612 6002 12612 6268 12612 6220 12613 6072 12613 6222 12613 6222 12614 6072 12614 6080 12614 6080 12615 6072 12615 6070 12615 6298 12616 6070 12616 6069 12616 6081 12617 6069 12617 6068 12617 6082 12618 6068 12618 6294 12618 6084 12619 6294 12619 6039 12619 6086 12620 6039 12620 6295 12620 6296 12621 6295 12621 6297 12621 6237 12622 6297 12622 6238 12622 6237 12623 6296 12623 6297 12623 6080 12624 6070 12624 6298 12624 6298 12625 6069 12625 6081 12625 6081 12626 6068 12626 6082 12626 6082 12627 6294 12627 6084 12627 6084 12628 6039 12628 6086 12628 6086 12629 6295 12629 6296 12629 6299 12630 6300 12630 6410 12630 6299 12631 6301 12631 6300 12631 6299 12632 6453 12632 6301 12632 6301 12633 6453 12633 6304 12633 6303 12634 6304 12634 6308 12634 6302 12635 6308 12635 6307 12635 6302 12636 6303 12636 6308 12636 6302 12637 6734 12637 6303 12637 6303 12638 6734 12638 6411 12638 6301 12639 6411 12639 6300 12639 6301 12640 6303 12640 6411 12640 6301 12641 6304 12641 6303 12641 6453 12642 6454 12642 6304 12642 6304 12643 6454 12643 6305 12643 6308 12644 6305 12644 6309 12644 6307 12645 6309 12645 6306 12645 6307 12646 6308 12646 6309 12646 6454 12647 6455 12647 6305 12647 6305 12648 6455 12648 6413 12648 6309 12649 6413 12649 6310 12649 6306 12650 6310 12650 6735 12650 6306 12651 6309 12651 6310 12651 6455 12652 6313 12652 6413 12652 6413 12653 6313 12653 6311 12653 6310 12654 6311 12654 6312 12654 6735 12655 6312 12655 6316 12655 6735 12656 6310 12656 6312 12656 6313 12657 6456 12657 6311 12657 6311 12658 6456 12658 6314 12658 6312 12659 6314 12659 6315 12659 6316 12660 6315 12660 6736 12660 6316 12661 6312 12661 6315 12661 6456 12662 6457 12662 6314 12662 6314 12663 6457 12663 6414 12663 6315 12664 6414 12664 6320 12664 6736 12665 6320 12665 6317 12665 6736 12666 6315 12666 6320 12666 6457 12667 6318 12667 6414 12667 6414 12668 6318 12668 6319 12668 6320 12669 6319 12669 6321 12669 6317 12670 6321 12670 6322 12670 6317 12671 6320 12671 6321 12671 6318 12672 6459 12672 6319 12672 6319 12673 6459 12673 6415 12673 6321 12674 6415 12674 6416 12674 6322 12675 6416 12675 6323 12675 6322 12676 6321 12676 6416 12676 6459 12677 6325 12677 6415 12677 6415 12678 6325 12678 6417 12678 6416 12679 6417 12679 6327 12679 6323 12680 6327 12680 6324 12680 6323 12681 6416 12681 6327 12681 6325 12682 6452 12682 6417 12682 6417 12683 6452 12683 6326 12683 6327 12684 6326 12684 6418 12684 6324 12685 6418 12685 6737 12685 6324 12686 6327 12686 6418 12686 6452 12687 6328 12687 6326 12687 6326 12688 6328 12688 6419 12688 6418 12689 6419 12689 6331 12689 6737 12690 6331 12690 6329 12690 6737 12691 6418 12691 6331 12691 6328 12692 6451 12692 6419 12692 6419 12693 6451 12693 6330 12693 6331 12694 6330 12694 6420 12694 6329 12695 6420 12695 6738 12695 6329 12696 6331 12696 6420 12696 6451 12697 6450 12697 6330 12697 6330 12698 6450 12698 6332 12698 6420 12699 6332 12699 6333 12699 6738 12700 6333 12700 6739 12700 6738 12701 6420 12701 6333 12701 6450 12702 6458 12702 6332 12702 6332 12703 6458 12703 6334 12703 6333 12704 6334 12704 6337 12704 6739 12705 6337 12705 6336 12705 6739 12706 6333 12706 6337 12706 6458 12707 6338 12707 6334 12707 6334 12708 6338 12708 6421 12708 6337 12709 6421 12709 6335 12709 6336 12710 6335 12710 6740 12710 6336 12711 6337 12711 6335 12711 6338 12712 6449 12712 6421 12712 6421 12713 6449 12713 6422 12713 6335 12714 6422 12714 6340 12714 6740 12715 6340 12715 6741 12715 6740 12716 6335 12716 6340 12716 6449 12717 6448 12717 6422 12717 6422 12718 6448 12718 6423 12718 6340 12719 6423 12719 6339 12719 6741 12720 6339 12720 6343 12720 6741 12721 6340 12721 6339 12721 6448 12722 6341 12722 6423 12722 6423 12723 6341 12723 6424 12723 6339 12724 6424 12724 6342 12724 6343 12725 6342 12725 6742 12725 6343 12726 6339 12726 6342 12726 6341 12727 6345 12727 6424 12727 6424 12728 6345 12728 6346 12728 6342 12729 6346 12729 6344 12729 6742 12730 6344 12730 6348 12730 6742 12731 6342 12731 6344 12731 6345 12732 6349 12732 6346 12732 6346 12733 6349 12733 6347 12733 6344 12734 6347 12734 6351 12734 6348 12735 6351 12735 6353 12735 6348 12736 6344 12736 6351 12736 6349 12737 6354 12737 6347 12737 6347 12738 6354 12738 6350 12738 6351 12739 6350 12739 6352 12739 6353 12740 6352 12740 6355 12740 6353 12741 6351 12741 6352 12741 6354 12742 6436 12742 6350 12742 6350 12743 6436 12743 6425 12743 6352 12744 6425 12744 6356 12744 6355 12745 6356 12745 6359 12745 6355 12746 6352 12746 6356 12746 6436 12747 6357 12747 6425 12747 6425 12748 6357 12748 6358 12748 6356 12749 6358 12749 6360 12749 6359 12750 6360 12750 6361 12750 6359 12751 6356 12751 6360 12751 6357 12752 6442 12752 6358 12752 6358 12753 6442 12753 6362 12753 6360 12754 6362 12754 6363 12754 6361 12755 6363 12755 6743 12755 6361 12756 6360 12756 6363 12756 6442 12757 6443 12757 6362 12757 6362 12758 6443 12758 6364 12758 6363 12759 6364 12759 6426 12759 6743 12760 6426 12760 6367 12760 6743 12761 6363 12761 6426 12761 6443 12762 6365 12762 6364 12762 6364 12763 6365 12763 6366 12763 6426 12764 6366 12764 6368 12764 6367 12765 6368 12765 6745 12765 6367 12766 6426 12766 6368 12766 6365 12767 6439 12767 6366 12767 6366 12768 6439 12768 6427 12768 6368 12769 6427 12769 6428 12769 6745 12770 6428 12770 6746 12770 6745 12771 6368 12771 6428 12771 6439 12772 6445 12772 6427 12772 6427 12773 6445 12773 6429 12773 6428 12774 6429 12774 6369 12774 6746 12775 6369 12775 6747 12775 6746 12776 6428 12776 6369 12776 6445 12777 6370 12777 6429 12777 6429 12778 6370 12778 6430 12778 6369 12779 6430 12779 6372 12779 6747 12780 6372 12780 6748 12780 6747 12781 6369 12781 6372 12781 6370 12782 6447 12782 6430 12782 6430 12783 6447 12783 6371 12783 6372 12784 6371 12784 6373 12784 6748 12785 6373 12785 6374 12785 6748 12786 6372 12786 6373 12786 6447 12787 6440 12787 6371 12787 6371 12788 6440 12788 6375 12788 6373 12789 6375 12789 6431 12789 6374 12790 6431 12790 6749 12790 6374 12791 6373 12791 6431 12791 6440 12792 6377 12792 6375 12792 6375 12793 6377 12793 6379 12793 6431 12794 6379 12794 6376 12794 6749 12795 6376 12795 6380 12795 6749 12796 6431 12796 6376 12796 6377 12797 6378 12797 6379 12797 6379 12798 6378 12798 6382 12798 6376 12799 6382 12799 6384 12799 6380 12800 6384 12800 6381 12800 6380 12801 6376 12801 6384 12801 6378 12802 6441 12802 6382 12802 6382 12803 6441 12803 6383 12803 6384 12804 6383 12804 6386 12804 6381 12805 6386 12805 6751 12805 6381 12806 6384 12806 6386 12806 6441 12807 6387 12807 6383 12807 6383 12808 6387 12808 6432 12808 6386 12809 6432 12809 6385 12809 6751 12810 6385 12810 6388 12810 6751 12811 6386 12811 6385 12811 6387 12812 6391 12812 6432 12812 6432 12813 6391 12813 6433 12813 6385 12814 6433 12814 6390 12814 6388 12815 6390 12815 6389 12815 6388 12816 6385 12816 6390 12816 6391 12817 6446 12817 6433 12817 6433 12818 6446 12818 6392 12818 6390 12819 6392 12819 6393 12819 6389 12820 6393 12820 6394 12820 6389 12821 6390 12821 6393 12821 6446 12822 6395 12822 6392 12822 6392 12823 6395 12823 6434 12823 6393 12824 6434 12824 6396 12824 6394 12825 6396 12825 6730 12825 6394 12826 6393 12826 6396 12826 6395 12827 6444 12827 6434 12827 6434 12828 6444 12828 6397 12828 6396 12829 6397 12829 6398 12829 6730 12830 6398 12830 6399 12830 6730 12831 6396 12831 6398 12831 6444 12832 6400 12832 6397 12832 6397 12833 6400 12833 6435 12833 6398 12834 6435 12834 6401 12834 6399 12835 6401 12835 6403 12835 6399 12836 6398 12836 6401 12836 6400 12837 6438 12837 6435 12837 6435 12838 6438 12838 6402 12838 6401 12839 6402 12839 6404 12839 6403 12840 6404 12840 6731 12840 6403 12841 6401 12841 6404 12841 6438 12842 6437 12842 6402 12842 6402 12843 6437 12843 6408 12843 6404 12844 6408 12844 6406 12844 6731 12845 6406 12845 6405 12845 6731 12846 6404 12846 6406 12846 6437 12847 6407 12847 6408 12847 6408 12848 6407 12848 6412 12848 6406 12849 6412 12849 6409 12849 6405 12850 6409 12850 6733 12850 6405 12851 6406 12851 6409 12851 6407 12852 6410 12852 6412 12852 6412 12853 6410 12853 6300 12853 6409 12854 6300 12854 6411 12854 6733 12855 6411 12855 6734 12855 6733 12856 6409 12856 6411 12856 6412 12857 6406 12857 6408 12857 6409 12858 6412 12858 6300 12858 6308 12859 6304 12859 6305 12859 6309 12860 6305 12860 6413 12860 6310 12861 6413 12861 6311 12861 6312 12862 6311 12862 6314 12862 6315 12863 6314 12863 6414 12863 6320 12864 6414 12864 6319 12864 6321 12865 6319 12865 6415 12865 6416 12866 6415 12866 6417 12866 6327 12867 6417 12867 6326 12867 6418 12868 6326 12868 6419 12868 6331 12869 6419 12869 6330 12869 6420 12870 6330 12870 6332 12870 6333 12871 6332 12871 6334 12871 6337 12872 6334 12872 6421 12872 6335 12873 6421 12873 6422 12873 6340 12874 6422 12874 6423 12874 6339 12875 6423 12875 6424 12875 6342 12876 6424 12876 6346 12876 6344 12877 6346 12877 6347 12877 6351 12878 6347 12878 6350 12878 6352 12879 6350 12879 6425 12879 6356 12880 6425 12880 6358 12880 6360 12881 6358 12881 6362 12881 6363 12882 6362 12882 6364 12882 6426 12883 6364 12883 6366 12883 6368 12884 6366 12884 6427 12884 6428 12885 6427 12885 6429 12885 6369 12886 6429 12886 6430 12886 6372 12887 6430 12887 6371 12887 6373 12888 6371 12888 6375 12888 6431 12889 6375 12889 6379 12889 6376 12890 6379 12890 6382 12890 6384 12891 6382 12891 6383 12891 6386 12892 6383 12892 6432 12892 6385 12893 6432 12893 6433 12893 6390 12894 6433 12894 6392 12894 6393 12895 6392 12895 6434 12895 6396 12896 6434 12896 6397 12896 6398 12897 6397 12897 6435 12897 6401 12898 6435 12898 6402 12898 6404 12899 6402 12899 6408 12899 6407 12900 6436 12900 6410 12900 6407 12901 6357 12901 6436 12901 6407 12902 6437 12902 6357 12902 6357 12903 6437 12903 6442 12903 6442 12904 6437 12904 6438 12904 6443 12905 6438 12905 6400 12905 6365 12906 6400 12906 6444 12906 6439 12907 6444 12907 6395 12907 6445 12908 6395 12908 6446 12908 6370 12909 6446 12909 6391 12909 6447 12910 6391 12910 6387 12910 6440 12911 6387 12911 6441 12911 6377 12912 6441 12912 6378 12912 6377 12913 6440 12913 6441 12913 6442 12914 6438 12914 6443 12914 6443 12915 6400 12915 6365 12915 6365 12916 6444 12916 6439 12916 6439 12917 6395 12917 6445 12917 6445 12918 6446 12918 6370 12918 6370 12919 6391 12919 6447 12919 6447 12920 6387 12920 6440 12920 6436 12921 6354 12921 6410 12921 6410 12922 6354 12922 6299 12922 6299 12923 6354 12923 6349 12923 6453 12924 6349 12924 6345 12924 6454 12925 6345 12925 6341 12925 6455 12926 6341 12926 6448 12926 6313 12927 6448 12927 6449 12927 6456 12928 6449 12928 6338 12928 6457 12929 6338 12929 6458 12929 6318 12930 6458 12930 6450 12930 6459 12931 6450 12931 6451 12931 6325 12932 6451 12932 6328 12932 6452 12933 6325 12933 6328 12933 6299 12934 6349 12934 6453 12934 6453 12935 6345 12935 6454 12935 6454 12936 6341 12936 6455 12936 6455 12937 6448 12937 6313 12937 6313 12938 6449 12938 6456 12938 6456 12939 6338 12939 6457 12939 6457 12940 6458 12940 6318 12940 6318 12941 6450 12941 6459 12941 6459 12942 6451 12942 6325 12942 6693 12943 5902 12943 6692 12943 6693 12944 5903 12944 5902 12944 6693 12945 6548 12945 5903 12945 5903 12946 6548 12946 6460 12946 6460 12947 6548 12947 6547 12947 5904 12948 6547 12948 6596 12948 5905 12949 6596 12949 5906 12949 5905 12950 5904 12950 6596 12950 6460 12951 6547 12951 5904 12951 6596 12952 6595 12952 5906 12952 5906 12953 6595 12953 6461 12953 6461 12954 6595 12954 6594 12954 6466 12955 6594 12955 6467 12955 6468 12956 6467 12956 6462 12956 5907 12957 6462 12957 6463 12957 6465 12958 6463 12958 6464 12958 6465 12959 5907 12959 6463 12959 6461 12960 6594 12960 6466 12960 6466 12961 6467 12961 6468 12961 6468 12962 6462 12962 5907 12962 6463 12963 6608 12963 6464 12963 6464 12964 6608 12964 5908 12964 5908 12965 6608 12965 6609 12965 6469 12966 6609 12966 6470 12966 5909 12967 6470 12967 6612 12967 6471 12968 6612 12968 6472 12968 6471 12969 5909 12969 6612 12969 5908 12970 6609 12970 6469 12970 6469 12971 6470 12971 5909 12971 6612 12972 6473 12972 6472 12972 6472 12973 6473 12973 6475 12973 6475 12974 6473 12974 6614 12974 6476 12975 6614 12975 6474 12975 5911 12976 6474 12976 6477 12976 5912 12977 6477 12977 5913 12977 5912 12978 5911 12978 6477 12978 6475 12979 6614 12979 6476 12979 6476 12980 6474 12980 5911 12980 6477 12981 6478 12981 5913 12981 5913 12982 6478 12982 5914 12982 5914 12983 6478 12983 6588 12983 6479 12984 6588 12984 6622 12984 6480 12985 6622 12985 6586 12985 6481 12986 6586 12986 5915 12986 6481 12987 6480 12987 6586 12987 5914 12988 6588 12988 6479 12988 6479 12989 6622 12989 6480 12989 6586 12990 6482 12990 5915 12990 5915 12991 6482 12991 6483 12991 6483 12992 6482 12992 6585 12992 6486 12993 6585 12993 6487 12993 6488 12994 6487 12994 6484 12994 5916 12995 6484 12995 6489 12995 6485 12996 6489 12996 6490 12996 6485 12997 5916 12997 6489 12997 6483 12998 6585 12998 6486 12998 6486 12999 6487 12999 6488 12999 6488 13000 6484 13000 5916 13000 6489 13001 6491 13001 6490 13001 6490 13002 6491 13002 5917 13002 5917 13003 6491 13003 6583 13003 5918 13004 6583 13004 6492 13004 5920 13005 6492 13005 6580 13005 6493 13006 6580 13006 5921 13006 6493 13007 5920 13007 6580 13007 5917 13008 6583 13008 5918 13008 5918 13009 6492 13009 5920 13009 6580 13010 6494 13010 5921 13010 5921 13011 6494 13011 6496 13011 6496 13012 6494 13012 6636 13012 5922 13013 6636 13013 6579 13013 6497 13014 6579 13014 6498 13014 6495 13015 6498 13015 6499 13015 6495 13016 6497 13016 6498 13016 6496 13017 6636 13017 5922 13017 5922 13018 6579 13018 6497 13018 6498 13019 6501 13019 6499 13019 6499 13020 6501 13020 6500 13020 6500 13021 6501 13021 6502 13021 5923 13022 6502 13022 6503 13022 6505 13023 6503 13023 6575 13023 5924 13024 6575 13024 6574 13024 6504 13025 6574 13025 6506 13025 6504 13026 5924 13026 6574 13026 6500 13027 6502 13027 5923 13027 5923 13028 6503 13028 6505 13028 6505 13029 6575 13029 5924 13029 6574 13030 6573 13030 6506 13030 6506 13031 6573 13031 5926 13031 5926 13032 6573 13032 6571 13032 5927 13033 6571 13033 6507 13033 5928 13034 6507 13034 6569 13034 5929 13035 6569 13035 6508 13035 5929 13036 5928 13036 6569 13036 5926 13037 6571 13037 5927 13037 5927 13038 6507 13038 5928 13038 6569 13039 6568 13039 6508 13039 6508 13040 6568 13040 5930 13040 5930 13041 6568 13041 6567 13041 5931 13042 6567 13042 6566 13042 5932 13043 6566 13043 6565 13043 5933 13044 6565 13044 5934 13044 5933 13045 5932 13045 6565 13045 5930 13046 6567 13046 5931 13046 5931 13047 6566 13047 5932 13047 6565 13048 6653 13048 5934 13048 5934 13049 6653 13049 6512 13049 6512 13050 6653 13050 6513 13050 5935 13051 6513 13051 6509 13051 5936 13052 6509 13052 6510 13052 5937 13053 6510 13053 6514 13053 5938 13054 6514 13054 6511 13054 5938 13055 5937 13055 6514 13055 6512 13056 6513 13056 5935 13056 5935 13057 6509 13057 5936 13057 5936 13058 6510 13058 5937 13058 6514 13059 6515 13059 6511 13059 6511 13060 6515 13060 6516 13060 6516 13061 6515 13061 6518 13061 6519 13062 6518 13062 6562 13062 6520 13063 6562 13063 6560 13063 6517 13064 6560 13064 6521 13064 6517 13065 6520 13065 6560 13065 6516 13066 6518 13066 6519 13066 6519 13067 6562 13067 6520 13067 6560 13068 6522 13068 6521 13068 6521 13069 6522 13069 6526 13069 6526 13070 6522 13070 6559 13070 5939 13071 6559 13071 6671 13071 6525 13072 6671 13072 6524 13072 6523 13073 6524 13073 6527 13073 6523 13074 6525 13074 6524 13074 6526 13075 6559 13075 5939 13075 5939 13076 6671 13076 6525 13076 6524 13077 6528 13077 6527 13077 6527 13078 6528 13078 5940 13078 5940 13079 6528 13079 6677 13079 5941 13080 6677 13080 6530 13080 6531 13081 6530 13081 6532 13081 6529 13082 6532 13082 5942 13082 6529 13083 6531 13083 6532 13083 5940 13084 6677 13084 5941 13084 5941 13085 6530 13085 6531 13085 6532 13086 6533 13086 5942 13086 5942 13087 6533 13087 6535 13087 6535 13088 6533 13088 6536 13088 6534 13089 6536 13089 6682 13089 5943 13090 6682 13090 6556 13090 6537 13091 6556 13091 6554 13091 5945 13092 6554 13092 6538 13092 5945 13093 6537 13093 6554 13093 6535 13094 6536 13094 6534 13094 6534 13095 6682 13095 5943 13095 5943 13096 6556 13096 6537 13096 6554 13097 6539 13097 6538 13097 6538 13098 6539 13098 5946 13098 5946 13099 6539 13099 6552 13099 5901 13100 6552 13100 6690 13100 6541 13101 6690 13101 6692 13101 6540 13102 6692 13102 5902 13102 6540 13103 6541 13103 6692 13103 5946 13104 6552 13104 5901 13104 5901 13105 6690 13105 6541 13105 6543 13106 6549 13106 6542 13106 6543 13107 6545 13107 6549 13107 6543 13108 6544 13108 6545 13108 6545 13109 6544 13109 6546 13109 6547 13110 6546 13110 6596 13110 6547 13111 6545 13111 6546 13111 6547 13112 6548 13112 6545 13112 6545 13113 6548 13113 6549 13113 6549 13114 6548 13114 6693 13114 6550 13115 6693 13115 6692 13115 6551 13116 6692 13116 6690 13116 6689 13117 6690 13117 6552 13117 6553 13118 6552 13118 6539 13118 6686 13119 6539 13119 6554 13119 6555 13120 6554 13120 6556 13120 6685 13121 6556 13121 6682 13121 6683 13122 6682 13122 6536 13122 6681 13123 6536 13123 6533 13123 6679 13124 6533 13124 6532 13124 6557 13125 6532 13125 6530 13125 6676 13126 6530 13126 6677 13126 6675 13127 6677 13127 6528 13127 6672 13128 6528 13128 6524 13128 6673 13129 6524 13129 6671 13129 6558 13130 6671 13130 6559 13130 6668 13131 6559 13131 6522 13131 6667 13132 6522 13132 6560 13132 6561 13133 6560 13133 6562 13133 6563 13134 6562 13134 6518 13134 6663 13135 6518 13135 6515 13135 6662 13136 6515 13136 6514 13136 6660 13137 6514 13137 6510 13137 6659 13138 6510 13138 6509 13138 6656 13139 6509 13139 6513 13139 6564 13140 6513 13140 6653 13140 6651 13141 6653 13141 6565 13141 6652 13142 6565 13142 6566 13142 6650 13143 6566 13143 6567 13143 6648 13144 6567 13144 6568 13144 6647 13145 6568 13145 6569 13145 6645 13146 6569 13146 6507 13146 6644 13147 6507 13147 6571 13147 6570 13148 6571 13148 6573 13148 6572 13149 6573 13149 6574 13149 6641 13150 6574 13150 6575 13150 6639 13151 6575 13151 6503 13151 6640 13152 6503 13152 6502 13152 6637 13153 6502 13153 6501 13153 6576 13154 6501 13154 6498 13154 6577 13155 6498 13155 6579 13155 6578 13156 6579 13156 6636 13156 6635 13157 6636 13157 6494 13157 6633 13158 6494 13158 6580 13158 6581 13159 6580 13159 6492 13159 6582 13160 6492 13160 6583 13160 6631 13161 6583 13161 6491 13161 6630 13162 6491 13162 6489 13162 6584 13163 6489 13163 6484 13163 6628 13164 6484 13164 6487 13164 6627 13165 6487 13165 6585 13165 6626 13166 6585 13166 6482 13166 6624 13167 6482 13167 6586 13167 6623 13168 6586 13168 6622 13168 6587 13169 6622 13169 6588 13169 6619 13170 6588 13170 6478 13170 6617 13171 6478 13171 6477 13171 6589 13172 6477 13172 6474 13172 6616 13173 6474 13173 6614 13173 6615 13174 6614 13174 6473 13174 6590 13175 6473 13175 6612 13175 6611 13176 6612 13176 6470 13176 6591 13177 6470 13177 6609 13177 6592 13178 6609 13178 6608 13178 6607 13179 6608 13179 6463 13179 6606 13180 6463 13180 6462 13180 6604 13181 6462 13181 6467 13181 6593 13182 6467 13182 6594 13182 6601 13183 6594 13183 6595 13183 6598 13184 6595 13184 6596 13184 6546 13185 6598 13185 6596 13185 6546 13186 6597 13186 6598 13186 6546 13187 6544 13187 6597 13187 6597 13188 6599 13188 6598 13188 6598 13189 6599 13189 6601 13189 6595 13190 6598 13190 6601 13190 6599 13191 6600 13191 6601 13191 6601 13192 6600 13192 6593 13192 6594 13193 6601 13193 6593 13193 6600 13194 6602 13194 6593 13194 6593 13195 6602 13195 6604 13195 6467 13196 6593 13196 6604 13196 6602 13197 6603 13197 6604 13197 6604 13198 6603 13198 6606 13198 6462 13199 6604 13199 6606 13199 6603 13200 6605 13200 6606 13200 6606 13201 6605 13201 6607 13201 6463 13202 6606 13202 6607 13202 6605 13203 6714 13203 6607 13203 6607 13204 6714 13204 6592 13204 6608 13205 6607 13205 6592 13205 6714 13206 6721 13206 6592 13206 6592 13207 6721 13207 6591 13207 6609 13208 6592 13208 6591 13208 6721 13209 6610 13209 6591 13209 6591 13210 6610 13210 6611 13210 6470 13211 6591 13211 6611 13211 6610 13212 6613 13212 6611 13212 6611 13213 6613 13213 6590 13213 6612 13214 6611 13214 6590 13214 6613 13215 6723 13215 6590 13215 6590 13216 6723 13216 6615 13216 6473 13217 6590 13217 6615 13217 6723 13218 6724 13218 6615 13218 6615 13219 6724 13219 6616 13219 6614 13220 6615 13220 6616 13220 6724 13221 6725 13221 6616 13221 6616 13222 6725 13222 6589 13222 6474 13223 6616 13223 6589 13223 6725 13224 6718 13224 6589 13224 6589 13225 6718 13225 6617 13225 6477 13226 6589 13226 6617 13226 6718 13227 6618 13227 6617 13227 6617 13228 6618 13228 6619 13228 6478 13229 6617 13229 6619 13229 6618 13230 6620 13230 6619 13230 6619 13231 6620 13231 6587 13231 6588 13232 6619 13232 6587 13232 6620 13233 6621 13233 6587 13233 6587 13234 6621 13234 6623 13234 6622 13235 6587 13235 6623 13235 6621 13236 6726 13236 6623 13236 6623 13237 6726 13237 6624 13237 6586 13238 6623 13238 6624 13238 6726 13239 6719 13239 6624 13239 6624 13240 6719 13240 6626 13240 6482 13241 6624 13241 6626 13241 6719 13242 6625 13242 6626 13242 6626 13243 6625 13243 6627 13243 6585 13244 6626 13244 6627 13244 6625 13245 6717 13245 6627 13245 6627 13246 6717 13246 6628 13246 6487 13247 6627 13247 6628 13247 6717 13248 6629 13248 6628 13248 6628 13249 6629 13249 6584 13249 6484 13250 6628 13250 6584 13250 6629 13251 6716 13251 6584 13251 6584 13252 6716 13252 6630 13252 6489 13253 6584 13253 6630 13253 6716 13254 6722 13254 6630 13254 6630 13255 6722 13255 6631 13255 6491 13256 6630 13256 6631 13256 6722 13257 6715 13257 6631 13257 6631 13258 6715 13258 6582 13258 6583 13259 6631 13259 6582 13259 6715 13260 6713 13260 6582 13260 6582 13261 6713 13261 6581 13261 6492 13262 6582 13262 6581 13262 6713 13263 6632 13263 6581 13263 6581 13264 6632 13264 6633 13264 6580 13265 6581 13265 6633 13265 6632 13266 6634 13266 6633 13266 6633 13267 6634 13267 6635 13267 6494 13268 6633 13268 6635 13268 6634 13269 6712 13269 6635 13269 6635 13270 6712 13270 6578 13270 6636 13271 6635 13271 6578 13271 6712 13272 6711 13272 6578 13272 6578 13273 6711 13273 6577 13273 6579 13274 6578 13274 6577 13274 6711 13275 6710 13275 6577 13275 6577 13276 6710 13276 6576 13276 6498 13277 6577 13277 6576 13277 6710 13278 6709 13278 6576 13278 6576 13279 6709 13279 6637 13279 6501 13280 6576 13280 6637 13280 6709 13281 6720 13281 6637 13281 6637 13282 6720 13282 6640 13282 6502 13283 6637 13283 6640 13283 6720 13284 6638 13284 6640 13284 6640 13285 6638 13285 6639 13285 6503 13286 6640 13286 6639 13286 6638 13287 6694 13287 6639 13287 6639 13288 6694 13288 6641 13288 6575 13289 6639 13289 6641 13289 6694 13290 6696 13290 6641 13290 6641 13291 6696 13291 6572 13291 6574 13292 6641 13292 6572 13292 6696 13293 6642 13293 6572 13293 6572 13294 6642 13294 6570 13294 6573 13295 6572 13295 6570 13295 6642 13296 6703 13296 6570 13296 6570 13297 6703 13297 6644 13297 6571 13298 6570 13298 6644 13298 6703 13299 6643 13299 6644 13299 6644 13300 6643 13300 6645 13300 6507 13301 6644 13301 6645 13301 6643 13302 6646 13302 6645 13302 6645 13303 6646 13303 6647 13303 6569 13304 6645 13304 6647 13304 6646 13305 6704 13305 6647 13305 6647 13306 6704 13306 6648 13306 6568 13307 6647 13307 6648 13307 6704 13308 6705 13308 6648 13308 6648 13309 6705 13309 6650 13309 6567 13310 6648 13310 6650 13310 6705 13311 6649 13311 6650 13311 6650 13312 6649 13312 6652 13312 6566 13313 6650 13313 6652 13313 6649 13314 6699 13314 6652 13314 6652 13315 6699 13315 6651 13315 6565 13316 6652 13316 6651 13316 6699 13317 6654 13317 6651 13317 6651 13318 6654 13318 6564 13318 6653 13319 6651 13319 6564 13319 6654 13320 6655 13320 6564 13320 6564 13321 6655 13321 6656 13321 6513 13322 6564 13322 6656 13322 6655 13323 6657 13323 6656 13323 6656 13324 6657 13324 6659 13324 6509 13325 6656 13325 6659 13325 6657 13326 6658 13326 6659 13326 6659 13327 6658 13327 6660 13327 6510 13328 6659 13328 6660 13328 6658 13329 6708 13329 6660 13329 6660 13330 6708 13330 6662 13330 6514 13331 6660 13331 6662 13331 6708 13332 6661 13332 6662 13332 6662 13333 6661 13333 6663 13333 6515 13334 6662 13334 6663 13334 6661 13335 6664 13335 6663 13335 6663 13336 6664 13336 6563 13336 6518 13337 6663 13337 6563 13337 6664 13338 6665 13338 6563 13338 6563 13339 6665 13339 6561 13339 6562 13340 6563 13340 6561 13340 6665 13341 6666 13341 6561 13341 6561 13342 6666 13342 6667 13342 6560 13343 6561 13343 6667 13343 6666 13344 6669 13344 6667 13344 6667 13345 6669 13345 6668 13345 6522 13346 6667 13346 6668 13346 6669 13347 6702 13347 6668 13347 6668 13348 6702 13348 6558 13348 6559 13349 6668 13349 6558 13349 6702 13350 6670 13350 6558 13350 6558 13351 6670 13351 6673 13351 6671 13352 6558 13352 6673 13352 6670 13353 6707 13353 6673 13353 6673 13354 6707 13354 6672 13354 6524 13355 6673 13355 6672 13355 6707 13356 6701 13356 6672 13356 6672 13357 6701 13357 6675 13357 6528 13358 6672 13358 6675 13358 6701 13359 6674 13359 6675 13359 6675 13360 6674 13360 6676 13360 6677 13361 6675 13361 6676 13361 6674 13362 6678 13362 6676 13362 6676 13363 6678 13363 6557 13363 6530 13364 6676 13364 6557 13364 6678 13365 6700 13365 6557 13365 6557 13366 6700 13366 6679 13366 6532 13367 6557 13367 6679 13367 6700 13368 6706 13368 6679 13368 6679 13369 6706 13369 6681 13369 6533 13370 6679 13370 6681 13370 6706 13371 6680 13371 6681 13371 6681 13372 6680 13372 6683 13372 6536 13373 6681 13373 6683 13373 6680 13374 6684 13374 6683 13374 6683 13375 6684 13375 6685 13375 6682 13376 6683 13376 6685 13376 6684 13377 6698 13377 6685 13377 6685 13378 6698 13378 6555 13378 6556 13379 6685 13379 6555 13379 6698 13380 6697 13380 6555 13380 6555 13381 6697 13381 6686 13381 6554 13382 6555 13382 6686 13382 6697 13383 6687 13383 6686 13383 6686 13384 6687 13384 6553 13384 6539 13385 6686 13385 6553 13385 6687 13386 6688 13386 6553 13386 6553 13387 6688 13387 6689 13387 6552 13388 6553 13388 6689 13388 6688 13389 6695 13389 6689 13389 6689 13390 6695 13390 6551 13390 6690 13391 6689 13391 6551 13391 6695 13392 6691 13392 6551 13392 6551 13393 6691 13393 6550 13393 6692 13394 6551 13394 6550 13394 6691 13395 6542 13395 6550 13395 6550 13396 6542 13396 6549 13396 6693 13397 6550 13397 6549 13397 6691 13398 6694 13398 6542 13398 6691 13399 6696 13399 6694 13399 6691 13400 6695 13400 6696 13400 6696 13401 6695 13401 6642 13401 6642 13402 6695 13402 6688 13402 6703 13403 6688 13403 6687 13403 6643 13404 6687 13404 6697 13404 6646 13405 6697 13405 6698 13405 6704 13406 6698 13406 6684 13406 6705 13407 6684 13407 6680 13407 6649 13408 6680 13408 6706 13408 6699 13409 6706 13409 6700 13409 6654 13410 6700 13410 6678 13410 6655 13411 6678 13411 6674 13411 6657 13412 6674 13412 6701 13412 6658 13413 6701 13413 6707 13413 6708 13414 6707 13414 6670 13414 6661 13415 6670 13415 6702 13415 6664 13416 6702 13416 6669 13416 6665 13417 6669 13417 6666 13417 6665 13418 6664 13418 6669 13418 6642 13419 6688 13419 6703 13419 6703 13420 6687 13420 6643 13420 6643 13421 6697 13421 6646 13421 6646 13422 6698 13422 6704 13422 6704 13423 6684 13423 6705 13423 6705 13424 6680 13424 6649 13424 6649 13425 6706 13425 6699 13425 6699 13426 6700 13426 6654 13426 6654 13427 6678 13427 6655 13427 6655 13428 6674 13428 6657 13428 6657 13429 6701 13429 6658 13429 6658 13430 6707 13430 6708 13430 6708 13431 6670 13431 6661 13431 6661 13432 6702 13432 6664 13432 6694 13433 6638 13433 6542 13433 6542 13434 6638 13434 6543 13434 6543 13435 6638 13435 6720 13435 6544 13436 6720 13436 6709 13436 6597 13437 6709 13437 6710 13437 6599 13438 6710 13438 6711 13438 6600 13439 6711 13439 6712 13439 6602 13440 6712 13440 6634 13440 6603 13441 6634 13441 6632 13441 6605 13442 6632 13442 6713 13442 6714 13443 6713 13443 6715 13443 6721 13444 6715 13444 6722 13444 6610 13445 6722 13445 6716 13445 6613 13446 6716 13446 6629 13446 6723 13447 6629 13447 6717 13447 6724 13448 6717 13448 6625 13448 6725 13449 6625 13449 6719 13449 6718 13450 6719 13450 6726 13450 6618 13451 6726 13451 6621 13451 6620 13452 6618 13452 6621 13452 6543 13453 6720 13453 6544 13453 6544 13454 6709 13454 6597 13454 6597 13455 6710 13455 6599 13455 6599 13456 6711 13456 6600 13456 6600 13457 6712 13457 6602 13457 6602 13458 6634 13458 6603 13458 6603 13459 6632 13459 6605 13459 6605 13460 6713 13460 6714 13460 6714 13461 6715 13461 6721 13461 6721 13462 6722 13462 6610 13462 6610 13463 6716 13463 6613 13463 6613 13464 6629 13464 6723 13464 6723 13465 6717 13465 6724 13465 6724 13466 6625 13466 6725 13466 6725 13467 6719 13467 6718 13467 6718 13468 6726 13468 6618 13468 5952 13469 5953 13469 6727 13469 6727 13470 5953 13470 6728 13470 6727 13471 6728 13471 5899 13471 5899 13472 6728 13472 5792 13472 6729 13473 5792 13473 6752 13473 6729 13474 5899 13474 5792 13474 6403 13475 6731 13475 6732 13475 6399 13476 6732 13476 6730 13476 6399 13477 6403 13477 6732 13477 6731 13478 6405 13478 6732 13478 6732 13479 6405 13479 6733 13479 6734 13480 6732 13480 6733 13480 6734 13481 6302 13481 6732 13481 6732 13482 6302 13482 6307 13482 6306 13483 6732 13483 6307 13483 6306 13484 6735 13484 6732 13484 6732 13485 6735 13485 6316 13485 6736 13486 6732 13486 6316 13486 6736 13487 6752 13487 6732 13487 6736 13488 6317 13488 6752 13488 6752 13489 6317 13489 6322 13489 6729 13490 6322 13490 6323 13490 6324 13491 6729 13491 6323 13491 6324 13492 6737 13492 6729 13492 6729 13493 6737 13493 6744 13493 6744 13494 6737 13494 6329 13494 6738 13495 6744 13495 6329 13495 6738 13496 6739 13496 6744 13496 6744 13497 6739 13497 6336 13497 6740 13498 6744 13498 6336 13498 6740 13499 6741 13499 6744 13499 6744 13500 6741 13500 6343 13500 6742 13501 6744 13501 6343 13501 6742 13502 6348 13502 6744 13502 6744 13503 6348 13503 6353 13503 6355 13504 6744 13504 6353 13504 6355 13505 6359 13505 6744 13505 6744 13506 6359 13506 6361 13506 6743 13507 6744 13507 6361 13507 6743 13508 6367 13508 6744 13508 6744 13509 6367 13509 5830 13509 5830 13510 6367 13510 6745 13510 6746 13511 5830 13511 6745 13511 6746 13512 6747 13512 5830 13512 5830 13513 6747 13513 6748 13513 6374 13514 5830 13514 6748 13514 6374 13515 6750 13515 5830 13515 6374 13516 6749 13516 6750 13516 6750 13517 6749 13517 6380 13517 6381 13518 6750 13518 6380 13518 6381 13519 6751 13519 6750 13519 6750 13520 6751 13520 6388 13520 6732 13521 6388 13521 6389 13521 6394 13522 6732 13522 6389 13522 6394 13523 6730 13523 6732 13523 6752 13524 6322 13524 6729 13524 6750 13525 6388 13525 6732 13525

+
+
+
+
+ + + + + + + + + + + + + + +
diff --git a/models/rg_robot/meshes/ActiveHinge_Frame.dae b/models/rg_robot/meshes/ActiveHinge_Frame.dae index 387e9ff21b..c557fc6d46 100644 --- a/models/rg_robot/meshes/ActiveHinge_Frame.dae +++ b/models/rg_robot/meshes/ActiveHinge_Frame.dae @@ -1,63 +1,70 @@ - - - - - VCGLab - VCGLib | MeshLab - - Y_UP - do sep. 17 12:07:25 2015 - do sep. 17 12:07:25 2015 - - - - - - - - - 0.0062916 -0.0145 0.000173725 0.0062916 -0.017 0.000173725 0.00626647 -0.0145 0.000345828 0.00626647 -0.017 0.000345828 0.00622484 -0.0145 0.000514702 0.00609382 -0.0145 0.000836502 0.00566072 -0.017 0.00137577 0.00552252 -0.0145 0.00148137 0.00537477 -0.0145 0.00157314 0.00552252 -0.017 0.00148137 0.00537477 -0.017 0.00157314 0.00521886 -0.017 0.00165023 0.00419696 -0.0145 0.00177431 0.00436962 -0.017 0.00179527 0.00402713 -0.0145 0.00173678 0.00419696 -0.017 0.00177431 0.00402713 -0.017 0.00173678 0.00355025 -0.017 0.00152904 0.00327417 -0.017 0.00131808 0.00286763 -0.0145 0.000758522 0.00271889 -0.017 0.00026008 0.0027021 -0.0145 8.69641e-05 0.00275231 -0.0145 -0.000430768 0.00294846 -0.017 -0.000912528 0.00340711 -0.017 -0.00143024 0.00355025 -0.017 -0.00152904 0.00386171 -0.017 -0.00168303 0.00436962 -0.017 -0.00179527 0.0045435 -0.017 -0.00179947 0.00505623 -0.017 -0.0017119 0.00590341 -0.0145 -0.00112713 0.00600565 -0.0145 -0.000986423 0.00600565 -0.017 -0.000986423 0.00609382 -0.0145 -0.000836502 0.00609382 -0.017 -0.000836502 0.00622484 -0.017 0.000514702 0.00616711 -0.0145 0.000678771 0.00609382 -0.017 0.000836502 0.00590341 -0.017 0.00112713 0.00578808 -0.0145 0.00125732 0.00578808 -0.017 0.00125732 0.00521886 -0.0145 0.00165023 0.00488841 -0.0145 0.00175759 0.00488841 -0.017 0.00175759 0.00471697 -0.017 0.00178688 0.0045435 -0.0145 0.00179947 0.0045435 -0.017 0.00179947 0.00436962 -0.0145 0.00179527 0.00370226 -0.017 0.00161357 0.00315268 -0.017 0.00119362 0.00304377 -0.0145 0.00105801 0.00304377 -0.017 0.00105801 0.00286763 -0.017 0.000758522 0.00271889 -0.0145 0.00026008 0.0027021 -0.017 8.69641e-05 0.0027021 -0.0145 -8.69641e-05 0.00271889 -0.0145 -0.00026008 0.00280204 -0.0145 -0.000597434 0.00286763 -0.0145 -0.000758522 0.00294846 -0.0145 -0.000912528 0.00315268 -0.0145 -0.00119362 0.00327417 -0.0145 -0.00131808 0.00340711 -0.0145 -0.00143024 0.00370226 -0.0145 -0.00161357 0.00386171 -0.0145 -0.00168303 0.00402713 -0.0145 -0.00173678 0.00402713 -0.017 -0.00173678 0.00436962 -0.0145 -0.00179527 0.00471697 -0.017 -0.00178688 0.00505623 -0.0145 -0.0017119 0.00521886 -0.0145 -0.00165023 0.00578808 -0.017 -0.00125732 0.00590341 -0.017 -0.00112713 0.00626647 -0.0145 -0.000345828 0.0063 -0.0145 0 0.00524162 -0.017 0.000111782 0.00521668 -0.017 0.000221066 0.00600565 -0.017 0.000986423 0.00616711 -0.017 0.000678771 0.004875 -0.017 0.000649519 0.00477401 -0.017 0.000698155 0.00505623 -0.017 0.0017119 0.00433311 -0.017 0.000731196 0.00386171 -0.017 0.00168303 0.00422599 -0.017 0.000698155 0.004125 -0.017 0.000649519 0.00403238 -0.017 0.000586374 0.00340711 -0.017 0.00143024 0.00382427 -0.017 0.000325413 0.00294846 -0.017 0.000912528 0.00378332 -0.017 0.000221066 0.00280204 -0.017 0.000597434 0.00275231 -0.017 0.000430768 0.00375838 -0.017 0.000111782 0.00375 -0.017 0 0.00271889 -0.017 -0.00026008 0.0027021 -0.017 -8.69641e-05 0.00378332 -0.017 -0.000221066 0.00275231 -0.017 -0.000430768 0.00286763 -0.017 -0.000758522 0.00280204 -0.017 -0.000597434 0.00395021 -0.017 -0.000510129 0.00315268 -0.017 -0.00119362 0.00304377 -0.017 -0.00105801 0.00327417 -0.017 -0.00131808 0.00403238 -0.017 -0.000586374 0.004125 -0.017 -0.000649519 0.00370226 -0.017 -0.00161357 0.00419696 -0.017 -0.00177431 0.00444395 -0.017 -0.000747903 0.00455605 -0.017 -0.000747903 0.00488841 -0.017 -0.00175759 0.00466689 -0.017 -0.000731196 0.00537477 -0.017 -0.00157314 0.00521886 -0.017 -0.00165023 0.004875 -0.017 -0.000649519 0.00552252 -0.017 -0.00148137 0.00496762 -0.017 -0.000586374 0.00566072 -0.017 -0.00137577 0.00504979 -0.017 -0.000510129 0.00511968 -0.017 -0.00042249 0.00616711 -0.017 -0.000678771 0.00622484 -0.017 -0.000514702 0.00517573 -0.017 -0.000325413 0.00626647 -0.017 -0.000345828 0.0063 -0.017 0 0.0062916 -0.017 -0.000173725 0.00746568 -0.015 0.000452473 0.00746568 -0.0145 0.000452473 0.00742297 -0.0145 0.000675468 0.00736351 -0.0145 0.000894594 0.00696647 -0.015 0.00170779 0.00708856 -0.0145 0.00151636 0.00683025 -0.015 0.00188943 0.00696647 -0.0145 0.00170779 0.00683025 -0.0145 0.00188943 0.006345 -0.015 0.00236558 0.00616081 -0.015 0.00249834 0.0059671 -0.0145 0.00261679 0.00512003 -0.015 0.00293523 0.00489627 -0.0145 0.00297371 0.00467024 -0.015 0.00299517 0.00444323 -0.015 0.00299946 0.00467024 -0.0145 0.00299517 0.00444323 -0.0145 0.00299946 0.00399149 -0.0145 0.00295659 0.00376934 -0.0145 0.00290966 0.00355138 -0.0145 0.00284607 0.00313297 -0.015 0.00267044 0.00333885 -0.0145 0.00276618 0.00256681 -0.0145 0.00229407 0.00224292 -0.0145 0.00197626 0.00209992 -0.0145 0.0017999 0.00185591 -0.015 0.00141732 0.00167239 -0.015 0.00100231 0.00167239 -0.0145 0.00100231 0.00151932 -0.0145 0.000339923 0.00151932 -0.0145 -0.000339923 0.00155357 -0.0145 -0.000564374 0.00185591 -0.015 -0.00141732 0.00185591 -0.0145 -0.00141732 0.00224292 -0.0145 -0.00197626 0.00256681 -0.0145 -0.00229407 0.00333885 -0.015 -0.00276618 0.00355138 -0.0145 -0.00284607 0.00399149 -0.015 -0.00295659 0.00421654 -0.0145 -0.00298658 0.00467024 -0.0145 -0.00299517 0.00512003 -0.0145 -0.00293523 0.005765 -0.0145 -0.00272025 0.0059671 -0.0145 -0.00261679 0.00616081 -0.0145 -0.00249834 0.00651862 -0.015 -0.00221928 0.00696647 -0.015 -0.00170779 0.00708856 -0.0145 -0.00151636 0.00728766 -0.015 -0.0011086 0.00736351 -0.0145 -0.000894594 0.00742297 -0.015 -0.000675468 0.00746568 -0.015 -0.000452473 0.00742297 -0.0145 -0.000675468 0.00746568 -0.0145 -0.000452473 0.00749141 -0.0145 -0.000226886 0.0075 -0.0145 0 0.006345 -0.0145 0.00236558 0.00489627 -0.015 0.00297371 0.00421654 -0.015 0.00298658 0.00421654 -0.0145 0.00298658 0.00399149 -0.015 0.00295659 0.00355138 -0.015 0.00284607 0.00293493 -0.0145 0.0025594 0.00274584 -0.015 0.00243371 0.00256681 -0.015 0.00229407 0.00239885 -0.015 0.0021413 0.00224292 -0.015 0.00197626 0.00150215 -0.0145 -0.000113524 0.00155357 -0.015 -0.000564374 0.00197067 -0.015 -0.00161323 0.00209992 -0.015 -0.0017999 0.00209992 -0.0145 -0.0017999 0.00239885 -0.015 -0.0021413 0.00239885 -0.0145 -0.0021413 0.00256681 -0.015 -0.00229407 0.00274584 -0.015 -0.00243371 0.00293493 -0.015 -0.0025594 0.00293493 -0.0145 -0.0025594 0.00355138 -0.015 -0.00284607 0.00376934 -0.015 -0.00290966 0.00399149 -0.0145 -0.00295659 0.00421654 -0.015 -0.00298658 0.00489627 -0.015 -0.00297371 0.00534024 -0.015 -0.00287993 0.00616081 -0.015 -0.00249834 0.006345 -0.015 -0.00236558 0.00683025 -0.015 -0.00188943 0.00683025 -0.0145 -0.00188943 0.00696647 -0.0145 -0.00170779 0.00719583 -0.015 -0.00131625 0.00749141 -0.015 -0.000226886 0.00749141 -0.0145 0.000226886 0.00728766 -0.0145 0.0011086 0.00719583 -0.0145 0.00131625 0.00600565 -0.0145 0.000986423 0.00590341 -0.0145 0.00112713 0.00668068 -0.0145 0.00206025 0.00651862 -0.0145 0.00221928 0.00566072 -0.0145 0.00137577 0.005765 -0.0145 0.00272025 0.00616081 -0.0145 0.00249834 0.00505623 -0.0145 0.0017119 0.00534024 -0.0145 0.00287993 0.00512003 -0.0145 0.00293523 0.00555564 -0.0145 0.00280813 0.00471697 -0.0145 0.00178688 0.00386171 -0.0145 0.00168303 0.00370226 -0.0145 0.00161357 0.00313297 -0.0145 0.00267044 0.00355025 -0.0145 0.00152904 0.00340711 -0.0145 0.00143024 0.00274584 -0.0145 0.00243371 0.00327417 -0.0145 0.00131808 0.00239885 -0.0145 0.0021413 0.00315268 -0.0145 0.00119362 0.00280204 -0.0145 0.000597434 0.00175629 -0.0145 0.00121329 0.00197067 -0.0145 0.00161323 0.00294846 -0.0145 0.000912528 0.00185591 -0.0145 0.00141732 0.00160469 -0.0145 0.000785594 0.00275231 -0.0145 0.000430768 0.00150215 -0.0145 0.000113524 0.00155357 -0.0145 0.000564374 0.00160469 -0.0145 -0.000785594 0.00167239 -0.0145 -0.00100231 0.00175629 -0.0145 -0.00121329 0.00197067 -0.0145 -0.00161323 0.00304377 -0.0145 -0.00105801 0.00355025 -0.0145 -0.00152904 0.00313297 -0.0145 -0.00267044 0.00274584 -0.0145 -0.00243371 0.00419696 -0.0145 -0.00177431 0.00333885 -0.0145 -0.00276618 0.00376934 -0.0145 -0.00290966 0.00444323 -0.0145 -0.00299946 0.0045435 -0.0145 -0.00179947 0.00489627 -0.0145 -0.00297371 0.00471697 -0.0145 -0.00178688 0.00488841 -0.0145 -0.00175759 0.00534024 -0.0145 -0.00287993 0.00555564 -0.0145 -0.00280813 0.00537477 -0.0145 -0.00157314 0.00552252 -0.0145 -0.00148137 0.006345 -0.0145 -0.00236558 0.00651862 -0.0145 -0.00221928 0.00566072 -0.0145 -0.00137577 0.00578808 -0.0145 -0.00125732 0.00668068 -0.0145 -0.00206025 0.00719583 -0.0145 -0.00131625 0.00616711 -0.0145 -0.000678771 0.00728766 -0.0145 -0.0011086 0.00622484 -0.0145 -0.000514702 0.0062916 -0.0145 -0.000173725 -0.00727819 0.0171734 0.005 -0.00706543 0.0173259 0 -0.00727819 0.0171734 0 -0.00706543 0.0173259 -0.005 -0.00748053 0.0170074 0 -0.00706543 0.0173259 0.005 -0.00684315 0.0174641 0 -0.0066123 0.0175875 0 -0.00637388 0.0176955 0 -0.00637388 0.0176955 -0.005 -0.0066123 0.0175875 0.005 -0.00637388 0.0176955 0.005 -0.0061289 0.0177877 0 -0.00587842 0.0178637 0 -0.00536525 0.0179658 -0.005 -0.00562351 0.0179231 0 -0.00536525 0.0179658 0 -0.00510476 0.0179914 0 -0.00536525 0.0179658 0.005 -0.00510476 0.0179914 0.005 -0.00748053 0.0170074 -0.005 -0.00702821 0.0132607 -0.005 -0.00656067 0.0137282 -0.005 -0.00634638 0.0139092 -0.005 -0.00539835 0.0145048 -0.005 -0.00727819 0.0171734 -0.005 -0.00684315 0.0174641 -0.005 -0.0066123 0.0175875 -0.005 -0.0061289 0.0177877 -0.005 -0.00587842 0.0178637 -0.005 -0.00562351 0.0179231 -0.005 -0.00510476 0.0179914 -0.005 -0.00484315 0.018 -0.005 -0.00350928 0.0149921 -0.005 -0.00720916 0.0130464 -0.005 -0.00767157 0.0168284 -0.005 -0.00753362 0.0125891 -0.005 -0.00767612 0.0123475 -0.005 -0.00780484 0.0120984 -0.005 -0.00801942 0.0115803 -0.005 -0.00829213 -0.0102093 -0.005 -0.0095 -0.015 -0.005 -0.00810459 -0.0113131 -0.005 -0.0079194 -0.0118424 -0.005 -0.00767157 -0.0168284 -0.005 -0.00753362 -0.0125891 -0.005 -0.00748053 -0.0170074 -0.005 -0.00702821 -0.0132607 -0.005 -0.00706543 -0.0173259 -0.005 -0.00720916 -0.0130464 -0.005 -0.00727819 -0.0171734 -0.005 -0.00588579 -0.0144142 -0.005 -0.00637388 -0.0176955 -0.005 -0.00561873 -0.0146383 -0.005 -0.00531681 -0.0148126 -0.005 -0.00510476 -0.0179914 -0.005 -0.00498921 -0.0149319 -0.005 0.000346688 -0.015 -0.005 -0.00524382 -0.0179799 0.000436521 -0.00537865 -0.017964 0.000326534 -0.00536525 -0.0179658 0.005 -0.00546894 -0.0179507 0.000173488 -0.00587842 -0.0178637 0.005 -0.0061289 -0.0177877 0 -0.0066123 -0.0175875 -0.005 -0.0061289 -0.0177877 -0.005 -0.00587842 -0.0178637 -0.005 -0.00562351 -0.0179231 -0.005 -0.0055 -0.0179457 0 -0.00562351 -0.0179231 0 -0.0066123 -0.0175875 0 -0.00637388 -0.0176955 0 -0.00684315 -0.0174641 -0.005 -0.00706543 -0.0173259 0 -0.00684315 -0.0174641 0 -0.00706543 -0.0173259 0.005 -0.00727819 -0.0171734 0 -0.00748053 -0.0170074 0 -0.00748053 -0.0170074 0.005 -0.00767157 -0.0168284 0.005 -0.00587842 -0.0178637 0 -0.00536525 -0.0179658 -0.005 -0.00529391 -0.0179745 -0.000404493 -0.00533893 -0.0179692 -0.000367599 -0.00524382 -0.0179799 -0.000436521 -0.00518977 -0.017985 -0.000462589 -0.00484315 -0.018 -0.00047476 -0.00490107 -0.0179996 -0.000490115 -0.00549666 -0.0179463 -5.76878e-05 -0.00484315 -0.018 0.00047476 -0.00514235 0.0146194 0.005 -0.0061289 0.0177877 0.005 -0.00612229 0.0140778 0.005 -0.00634638 0.0139092 0.005 -0.00684315 0.0174641 0.005 -0.00656067 0.0137282 0.005 -0.00767157 0.0168284 0.005 -0.00753362 0.0125891 0.005 -0.00562351 0.0179231 0.005 -0.00564753 0.0143761 0.005 -0.00587842 0.0178637 0.005 -0.00748053 0.0170074 0.005 -0.00801942 0.0115803 0.005 -0.00817464 0.0110415 0.005 -0.00826856 0.0104888 0.005 -0.0083 0.00992893 0.005 -0.00826856 -0.0104888 0.005 -0.00829213 -0.0102093 0.005 -0.0079194 -0.0118424 0.005 -0.00767612 -0.0123475 0.005 -0.00727819 -0.0171734 0.005 -0.00588579 -0.0144142 0.005 -0.00684315 -0.0174641 0.005 -0.00575715 -0.0145321 0.005 -0.00561873 -0.0146383 0.005 -0.0066123 -0.0175875 0.005 -0.00547157 -0.0147321 0.005 -0.00637388 -0.0176955 0.005 -0.0061289 -0.0177877 0.005 -0.00562351 -0.0179231 0.005 -0.00510476 -0.0179914 0.005 -0.0105 0.002 0.005 -0.00434154 0.0148746 0.005 0.000346688 0.018 0.005 -0.00203376 -0.018 0.000180621 -0.00203376 -0.018 -0.000180621 -0.00207489 -0.018 -0.000263216 -0.0021305 -0.018 -0.000336848 -0.00245387 -0.018 -0.000497867 -0.00263683 -0.018 -0.000480913 -0.00296624 -0.018 -0.000180621 -0.00299149 -0.018 -9.18746e-05 -0.003 -0.018 0 -0.00296624 -0.018 0.000180621 -0.0028695 -0.018 0.000336848 -0.00280132 -0.018 0.000399009 -0.00272287 -0.018 0.000447582 -0.00254613 -0.018 0.000497867 -0.00245387 -0.018 0.000497867 -0.00484315 -0.018 0.005 -0.0021305 -0.018 0.000336848 0.000609339 -0.018 0.00520699 0.000346688 -0.018 -0.005 0.00145617 -0.018 -0.00574326 0.00175557 -0.018 0.00589221 0.00206225 -0.018 -0.00602556 0.00269412 -0.018 -0.0062441 0.00334511 -0.018 -0.00639658 0.00367562 -0.018 0.00644751 0.00378332 -0.018 0.000221066 0.004125 -0.018 0.000649519 0.00403238 -0.018 0.000586374 0.00400831 -0.018 0.00648138 0.0043423 -0.018 0.00649809 0.00504979 -0.018 0.000510129 0.00524162 -0.018 0.000111782 0.00501066 -0.018 0.00647991 0.00534325 -0.018 0.00644507 0.00567361 -0.018 0.00639317 0.00632414 -0.018 0.00623879 0.00600086 -0.018 0.00632435 -0.00476121 -0.018 0.000439291 -0.00462254 -0.018 0.000327906 -0.0045 -0.018 0 -0.00457023 -0.018 -0.000255541 -0.00462254 -0.018 -0.000327906 -0.00468688 -0.018 -0.000389814 -0.00484315 -0.018 -0.005 0.000346688 -0.018 0.005 0.000882288 -0.018 0.0054002 0.000882288 -0.018 -0.0054002 0.00237537 -0.018 -0.00614296 0.00334511 -0.018 0.00639658 0.00400831 -0.018 -0.00648138 0.00422599 -0.018 -0.000698155 0.004125 -0.018 -0.000649519 0.0043423 -0.018 -0.00649809 0.00496762 -0.018 -0.000586374 0.00504979 -0.018 -0.000510129 0.00501066 -0.018 -0.00647991 0.00521668 -0.018 -0.000221066 0.00600086 -0.018 -0.00632435 0.00664259 -0.018 -0.00613672 0.00632414 -0.018 -0.00623879 0.00695537 -0.018 -0.0060184 0.00756063 -0.018 -0.00573433 0.0081335 -0.018 -0.00538959 0.00979985 -0.018 -0.00376319 0.00998638 -0.018 -0.00348563 0.0106927 -0.018 -0.00197496 0.0109656 -0.018 -0.000667716 0.011 -0.018 0 0.0109914 -0.018 0.000334301 0.0108628 -0.018 0.00132837 0.0107861 -0.018 0.00165385 0.0103154 -0.018 0.0029036 0.00998638 -0.018 0.00348563 0.00959929 -0.018 0.00403078 0.00915825 -0.018 0.00453329 0.00866793 -0.018 0.00498783 0.00756063 -0.018 0.00573433 0.00433311 -0.018 0.000731196 0.00378332 -0.018 -0.000221066 -0.00490107 -0.0179996 0.000490115 -0.00495943 -0.0179983 0.000498351 -0.00501678 -0.0179962 0.000499718 -0.00513296 -0.0179895 0.000481997 -0.00518977 -0.017985 0.000462589 -0.00522287 -0.0165 0.000447582 -0.00536839 -0.015 0.000338064 -0.00530132 -0.0165 0.000399009 -0.00522139 -0.015 0.000448314 -0.00513683 -0.0165 0.000480913 -0.00504613 -0.0165 0.000497867 -0.00504449 -0.015 0.000498016 -0.00477713 -0.0165 0.000447582 -0.00477566 -0.015 0.000446844 -0.00469737 -0.015 0.000398013 -0.00457403 -0.015 0.000261813 -0.00450882 -0.015 -9.34949e-05 -0.00453376 -0.0165 -0.000180621 -0.00457489 -0.0165 -0.000263216 -0.00453436 -0.015 -0.000182157 -0.0047 -0.015 -0.0004 -0.00504778 -0.015 -0.000497712 -0.00513842 -0.015 -0.000480459 -0.00522434 -0.015 -0.000446844 -0.0053695 -0.0165 -0.000336848 -0.00546624 -0.0165 -0.000180621 -0.00546624 -0.0165 0.000180621 -0.00546564 -0.015 0.000182157 -0.00533893 -0.0179692 0.000367599 -0.00529391 -0.0179745 0.000404493 -0.00495387 -0.0165 0.000497867 -0.00507477 -0.0179933 0.000494377 -0.0053695 -0.0165 0.000336848 -0.00548634 -0.0179479 0.000116055 -0.00549666 -0.0179463 5.76878e-05 -0.0055 -0.0165 0 -0.00549149 -0.0165 9.18746e-05 -0.00541429 -0.017959 0.000279937 -0.00542424 -0.015 0.000264617 -0.00542511 -0.0165 0.000263216 -0.00544472 -0.0179545 0.000228531 -0.00549149 -0.0165 -9.18746e-05 -0.00548634 -0.0179479 -0.000116055 -0.00546894 -0.0179507 -0.000173488 -0.00544472 -0.0179545 -0.000228531 -0.00541429 -0.017959 -0.000279937 -0.00542511 -0.0165 -0.000263216 -0.00537061 -0.015 -0.000335628 -0.00546683 -0.015 -0.000179082 -0.00537865 -0.017964 -0.000326534 -0.00530132 -0.0165 -0.000399009 -0.00513683 -0.0165 -0.000480913 -0.00513296 -0.0179895 -0.000481997 -0.00507477 -0.0179933 -0.000494377 -0.00495387 -0.0165 -0.000497867 -0.00495943 -0.0179983 -0.000498351 -0.00477713 -0.0165 -0.000447582 -0.00501678 -0.0179962 -0.000499718 -0.00504613 -0.0165 -0.000497867 -0.00476121 -0.018 -0.000439291 -0.00469868 -0.0165 -0.000399009 -0.0046305 -0.0165 -0.000336848 -0.00453164 -0.018 -0.000175027 -0.00450851 -0.0165 -9.18746e-05 -0.00450797 -0.018 -8.89316e-05 -0.0045 -0.0165 0 -0.00453376 -0.0165 0.000180621 -0.00486317 -0.0165 0.000480913 -0.00450797 -0.018 8.89316e-05 -0.00450821 -0.015 9.02538e-05 -0.00450851 -0.0165 9.18746e-05 -0.00453164 -0.018 0.000175027 -0.00453317 -0.015 0.000179082 -0.00457023 -0.018 0.000255541 -0.00457489 -0.0165 0.000263216 -0.0046305 -0.0165 0.000336848 -0.00468688 -0.018 0.000389814 -0.00469868 -0.0165 0.000399009 -0.00495222 -0.015 0.000497712 -0.00486317 -0.0165 -0.000480913 -0.00522287 -0.0165 -0.000447582 -0.00549118 -0.015 9.34949e-05 -0.00585274 -0.0144465 -0.00373205 -0.00575715 -0.0145321 -0.005 -0.00540416 -0.0147693 -0.00373205 -0.00523694 -0.0148478 -0.00373205 -0.00488438 -0.0149569 -0.00373205 -0.00470206 -0.0149867 -0.00373205 -0.00464588 -0.0149924 -0.005 -0.00447157 -0.015 -0.005 -0.00447157 -0.015 -0.000848979 -0.00451777 -0.0149995 -0.00373205 -0.00588579 -0.0144142 -0.000464093 -0.00583485 -0.0144634 -0.000550482 -0.0057144 -0.014567 -0.000699741 -0.00564493 -0.0146196 -0.000764241 -0.00556343 -0.0146757 -0.00373205 -0.00529622 -0.0148221 -0.00095512 -0.00506319 -0.0149105 -0.00373205 -0.00509424 -0.0149006 -0.00099555 -0.00488673 -0.0149564 -0.000993564 -0.00548298 -0.0147254 -0.000875633 -0.00477961 -0.0149761 -0.000975412 -0.00481887 -0.0149696 -0.005 -0.00515561 -0.0148794 -0.005 -0.00547157 -0.0147321 -0.005 -0.00571368 -0.0145675 -0.00373205 0.00749141 -0.015 0.000226886 0.00742297 -0.015 0.000675468 0.0107861 -0.015 0.00165385 0.00736351 -0.015 0.000894594 0.00719583 -0.015 0.00131625 0.00708856 -0.015 0.00151636 0.00728766 -0.015 0.0011086 0.0101584 -0.015 0.00319885 0.00998638 -0.015 0.00348563 0.00668068 -0.015 0.00206025 0.00938524 -0.015 0.00428771 0.00651862 -0.015 0.00221928 0.00915825 -0.015 0.00453329 0.00866793 -0.015 0.00498783 0.00891894 -0.015 0.00476687 0.00840588 -0.015 0.00519558 0.0059671 -0.015 0.00261679 0.0081335 -0.015 0.00538959 0.005765 -0.015 0.00272025 0.00726165 -0.015 0.00588415 0.00695537 -0.015 0.0060184 0.00555564 -0.015 0.00280813 0.00664259 -0.015 0.00613672 0.00534024 -0.015 0.00287993 0.00632414 -0.015 0.00623879 0.00600086 -0.015 0.00632435 0.00534325 -0.015 0.00644507 0.00400831 -0.015 0.00648138 0.00376934 -0.015 0.00290966 0.000882288 -0.015 0.0054002 0.00333885 -0.015 0.00276618 0.000609339 -0.015 0.00520699 0.00293493 -0.015 0.0025594 0.000346688 -0.015 0.005 0.00209992 -0.015 0.0017999 0.00197067 -0.015 0.00161323 -0.00245387 -0.015 0.000497867 -0.00263683 -0.015 0.000480913 -0.00447157 -0.015 0.000848979 -0.00296624 -0.015 0.000180621 -0.00458513 -0.015 0.000909879 -0.00462939 -0.015 0.000335628 -0.00470557 -0.015 0.000955672 -0.00486158 -0.015 0.000480459 -0.00513525 -0.015 0.000481361 -0.00533899 -0.015 0.000940789 -0.0053 -0.015 0.0004 -0.00566886 -0.015 0.000743389 -0.0057589 -0.015 0.000651213 -0.00594849 -0.015 0.000316801 -0.00599792 -0.015 6.44264e-05 -0.0055 -0.015 1.6489e-06 -0.00549179 -0.015 -9.02538e-05 -0.00599792 -0.015 -6.44264e-05 0.00160469 -0.015 0.000785594 0.00155357 -0.015 0.000564374 -0.00207489 -0.015 0.000263216 -0.00203376 -0.015 0.000180621 0.00151932 -0.015 0.000339923 0.00150215 -0.015 0.000113524 0.00150215 -0.015 -0.000113524 -0.00200851 -0.015 -9.18746e-05 -0.00207489 -0.015 -0.000263216 -0.00254613 -0.015 -0.000497867 -0.00272287 -0.015 -0.000447582 0.00175629 -0.015 0.00121329 0.00151932 -0.015 -0.000339923 0.00167239 -0.015 -0.00100231 0.00160469 -0.015 -0.000785594 0.00175629 -0.015 -0.00121329 0.00224292 -0.015 -0.00197626 0.00145617 -0.015 -0.00574326 0.00313297 -0.015 -0.00267044 0.00444323 -0.015 -0.00299946 0.00467024 -0.015 -0.00299517 0.00567361 -0.015 -0.00639317 0.00512003 -0.015 -0.00293523 0.00632414 -0.015 -0.00623879 0.00555564 -0.015 -0.00280813 0.005765 -0.015 -0.00272025 0.0059671 -0.015 -0.00261679 0.00668068 -0.015 -0.00206025 0.00708856 -0.015 -0.00151636 0.00736351 -0.015 -0.000894594 0.00756063 -0.015 -0.00573433 0.00998638 -0.015 -0.00348563 0.0075 -0.015 0 0.0103154 -0.015 -0.0029036 0.0107861 -0.015 -0.00165385 0.0109227 -0.015 -0.000999364 0.0109656 -0.015 -0.000667716 0.00726165 -0.015 -0.00588415 -0.00545715 -0.015 0.000889389 -0.00542597 -0.015 -0.000261813 -0.0057589 -0.015 -0.000651213 -0.00583633 -0.015 -0.000548224 -0.00530263 -0.015 -0.000398013 -0.00556772 -0.015 -0.000823223 -0.00533899 -0.015 -0.000940789 -0.00521521 -0.015 -0.000976568 -0.00495551 -0.015 -0.000498016 -0.00470557 -0.015 -0.000955672 -0.00486476 -0.015 -0.000481361 -0.0048309 -0.015 -0.000985598 -0.00458513 -0.015 -0.000909879 -0.00477861 -0.015 -0.000448314 -0.00463161 -0.015 -0.000338064 -0.00457576 -0.015 -0.000264617 -0.00299149 -0.015 -9.18746e-05 -0.00292511 -0.015 -0.000263216 -0.00280132 -0.015 -0.000399009 -0.0045 -0.015 -1.6489e-06 -0.00521521 -0.015 0.000976568 -0.00447157 -0.015 0.005 -0.0045696 -0.0149976 0.000902639 -0.00451777 -0.0149995 0.001 -0.00464588 -0.0149924 0.005 -0.00470206 -0.0149867 0.001 -0.00481887 -0.0149696 0.005 -0.00499157 -0.0149312 0.000999964 -0.00519637 -0.014864 0.000980529 -0.00529622 -0.0148221 0.00095512 -0.00539218 -0.0147755 0.000919888 -0.00531681 -0.0148126 0.005 -0.00540416 -0.0147693 0.001 -0.00498921 -0.0149319 0.005 -0.00515561 -0.0148794 0.005 -0.00556343 -0.0146757 0.001 -0.00564493 -0.0146196 0.000764241 -0.00585274 -0.0144465 0.001 -0.00571433 -0.014567 0.001 -0.00591216 -0.0143878 -0.000409845 -0.00593521 -0.0143648 -0.000354106 -0.00598367 -0.0143163 -0.000179993 -0.00683553 -0.0134645 -0.005 -0.00595485 -0.0143451 0.000297079 -0.00593521 -0.0143648 0.000354106 -0.00467303 -0.0149898 0.000945035 -0.00477961 -0.0149761 0.000975412 -0.00488673 -0.0149564 0.000993564 -0.00495903 -0.015 0.00099916 -0.00509424 -0.0149006 0.00099555 -0.0048309 -0.015 0.000985598 -0.00508785 -0.015 0.000996134 -0.00556772 -0.015 0.000823223 -0.00556754 -0.014673 0.000823344 -0.00548298 -0.0147254 0.000875633 -0.00577716 -0.0145151 0.000629308 -0.00583485 -0.0144634 0.000550482 -0.0057144 -0.014567 0.000699741 -0.00583633 -0.015 0.000548224 -0.00588579 -0.0144142 0.000464093 -0.00589988 -0.015 0.000436133 -0.00591216 -0.0143878 0.000409845 -0.00597103 -0.014329 0.00023897 -0.00598135 -0.015 0.000192209 -0.00598367 -0.0143163 0.000179993 -0.00599273 -0.0143073 0.00012036 -0.00599818 -0.0143018 6.02899e-05 -0.006 -0.0143 0 -0.00599818 -0.0143018 -6.02899e-05 -0.00599273 -0.0143073 -0.00012036 -0.00598135 -0.015 -0.000192209 -0.00597103 -0.014329 -0.00023897 -0.00594849 -0.015 -0.000316801 -0.00595485 -0.0143451 -0.000297079 -0.00589988 -0.015 -0.000436133 -0.00556754 -0.014673 -0.000823344 -0.00577716 -0.0145151 -0.000629308 -0.00566886 -0.015 -0.000743389 -0.00545715 -0.015 -0.000889389 -0.00499157 -0.0149312 -0.000999964 -0.00508785 -0.015 -0.000996134 -0.00539218 -0.0147755 -0.000919888 -0.00519637 -0.014864 -0.000980529 -0.00495903 -0.015 -0.00099916 -0.00467303 -0.0149898 -0.000945035 -0.0045696 -0.0149976 -0.000902639 -0.00683553 -0.0134645 0.005 -0.00687425 -0.0134253 0 -0.00720916 -0.0130464 0.005 -0.0073778 -0.0128223 0.005 -0.00743169 -0.0127448 0 -0.00773317 -0.0122413 0 -0.00786124 -0.0119771 0 -0.00810459 -0.0113131 0.005 -0.00817464 -0.0110415 0.005 -0.00821267 -0.0108593 0 -0.00822936 -0.0107665 0.005 -0.0082997 -0.00998398 0 -0.0083 -0.00992893 0.005 -0.00707328 -0.0132095 0 -0.0073778 -0.0128223 -0.005 -0.00797358 -0.0117059 0 -0.00801942 -0.0115803 -0.005 -0.00817464 -0.0110415 -0.005 -0.00822936 -0.0107665 -0.005 -0.00828785 -0.0102773 0 -0.00767612 -0.0123475 -0.005 -0.00780484 -0.0120984 -0.005 -0.0081496 -0.0111461 0 -0.00826856 -0.0104888 -0.005 -0.00825881 -0.0105694 0 -0.00806981 -0.0114286 0 -0.00801942 -0.0115803 0.005 -0.00780484 -0.0120984 0.005 -0.00753362 -0.0125891 0.005 -0.00758982 -0.0124975 0 -0.00725931 -0.0129824 0 -0.00702821 -0.0132607 0.005 -0.0083 -0.00992893 -0.005 -0.00683553 0.0134645 -0.005 -0.00683553 0.0134645 0.005 -0.00687425 0.0134253 0 -0.00707328 0.0132095 0 -0.00806981 0.0114286 0 -0.00810459 0.0113131 -0.005 -0.0081496 0.0111461 0 -0.00821267 0.0108593 0 -0.00828785 0.0102773 0 -0.00829213 0.0102093 -0.005 -0.0083 0.00992893 -0.005 -0.00702821 0.0132607 0.005 -0.00720916 0.0130464 0.005 -0.00725931 0.0129824 0 -0.0073778 0.0128223 0.005 -0.00773317 0.0122413 0 -0.00786124 0.0119771 0 -0.00780484 0.0120984 0.005 -0.0079194 0.0118424 0.005 -0.00810459 0.0113131 0.005 -0.00822936 0.0107665 0.005 -0.00825881 0.0105694 0 -0.0082997 0.00998398 0 -0.00829213 0.0102093 0.005 -0.00767612 0.0123475 0.005 -0.00797358 0.0117059 0 -0.00826856 0.0104888 -0.005 -0.00822936 0.0107665 -0.005 -0.00817464 0.0110415 -0.005 -0.0079194 0.0118424 -0.005 -0.00758982 0.0124975 0 -0.00743169 0.0127448 0 -0.0073778 0.0128223 -0.005 -0.00579748 0.0142898 0 -0.00554129 0.0144332 0 -0.00500592 0.0146736 0 -0.00386943 0.0149588 0 -0.00322893 0.015 0.005 -0.00322893 0.015 -0.005 -0.00328398 0.0149997 0 -0.00628244 0.0139593 0 -0.00612229 0.0140778 -0.005 -0.00604481 0.0141317 0 -0.00564753 0.0143761 -0.005 -0.00514235 0.0146194 -0.005 -0.00488033 0.0147194 -0.005 -0.00472857 0.0147698 0 -0.00461311 0.0148046 -0.005 -0.00434154 0.0148746 -0.005 -0.00406646 0.0149294 -0.005 -0.00378875 0.0149686 -0.005 -0.00588909 0.0142336 -0.005 -0.00444606 0.0148496 0 -0.00415935 0.0149127 0 -0.00350928 0.0149921 0.005 -0.00378875 0.0149686 0.005 -0.00357731 0.0149879 0 -0.00406646 0.0149294 0.005 -0.00461311 0.0148046 0.005 -0.00488033 0.0147194 0.005 -0.00527713 0.0145612 0 -0.00539835 0.0145048 0.005 -0.00588909 0.0142336 0.005 -0.00650954 0.0137733 0 -0.00672533 0.0135742 0 -0.00676447 0.0135355 -0.005 0.000609339 0.015 0.00520699 0.00251824 0.015 -0.000833743 0.002696 0.015 -0.00116964 0.000346688 0.015 -0.005 0.000609339 0.015 -0.00520699 0.000882288 0.015 -0.0054002 0.00145617 0.015 -0.00574326 0.00175557 0.015 -0.00589221 0.0037103 0.015 -0.00199972 0.00237537 0.015 -0.00614296 0.00389013 0.015 -0.00206169 0.00407473 0.015 -0.00210752 0.00367562 0.015 -0.00644751 0.0043423 0.015 -0.00649809 0.00445244 0.015 -0.00214947 0.00467671 0.015 -0.0064976 0.00464259 0.015 -0.00214527 0.00501808 0.015 -0.00208665 0.00567361 0.015 -0.00639317 0.00600086 0.015 -0.00632435 0.0081335 0.015 -0.00538959 0.00600337 0.015 -0.001537 0.00891894 0.015 -0.00476687 0.00915825 0.015 -0.00453329 0.00979985 0.015 -0.00376319 0.00998638 0.015 -0.00348563 0.00644294 0.015 -0.000920587 0.0103154 0.015 -0.0029036 0.0106927 0.015 -0.00197496 0.00661641 0.015 -0.000378547 0.0108628 0.015 -0.00132837 0.0109227 0.015 -0.000999364 0.0109656 0.015 -0.000667716 0.0109227 0.015 0.000999364 0.00657467 0.015 0.000564115 0.0065167 0.015 0.000745267 0.00979985 0.015 0.00376319 0.0081335 0.015 0.00538959 0.00632414 0.015 0.00623879 0.00600086 0.015 0.00632435 0.00567361 0.015 0.00639317 0.00501066 0.015 0.00647991 0.00464259 0.015 0.00214527 0.00445244 0.015 0.00214947 0.00400831 0.015 0.00648138 0.00367562 0.015 0.00644751 0.00301765 0.015 0.00632872 0.00407473 0.015 0.00210752 0.00389013 0.015 0.00206169 0.0037103 0.015 0.00199972 0.00245231 0.015 0.000655333 0.00206225 0.015 0.00602556 0.00353666 0.015 -0.0019221 0.00206225 0.015 -0.00602556 0.00570927 0.015 -0.00177769 0.0065167 0.015 -0.000745267 0.0105829 0.015 -0.00229085 0.00657467 0.015 -0.000564115 0.0107861 0.015 0.00165385 0.00726165 0.015 0.00588415 0.00537738 0.015 0.00196283 0.00520047 0.015 0.00203269 0.00483164 0.015 0.00212427 0.0023521 0.015 9.51018e-05 0.000609339 -0.018 -0.00520699 0.000609339 -0.015 -0.00520699 0.000882288 -0.015 -0.0054002 0.00116481 -0.015 -0.00557912 0.00175557 -0.015 -0.00589221 0.00206225 -0.015 -0.00602556 0.00367562 -0.018 -0.00644751 0.00400831 -0.015 -0.00648138 0.00367562 -0.015 -0.00644751 0.0043423 -0.015 -0.00649809 0.00467671 -0.015 -0.0064976 0.00467671 -0.018 -0.0064976 0.00501066 -0.015 -0.00647991 0.00534325 -0.018 -0.00644507 0.00534325 -0.015 -0.00644507 0.00695537 -0.015 -0.0060184 0.00726165 -0.018 -0.00588415 0.0078515 -0.015 -0.00556933 0.0081335 -0.015 -0.00538959 0.00840588 -0.015 -0.00519559 0.00866793 -0.018 -0.00498783 0.00891894 -0.018 -0.00476687 0.00891894 -0.015 -0.00476687 0.00938524 -0.018 -0.00428771 0.00938524 -0.015 -0.00428771 0.0101584 -0.015 -0.00319885 0.0104571 -0.018 -0.00260066 0.0105829 -0.018 -0.00229085 0.0105829 -0.015 -0.00229085 0.0108628 -0.015 -0.00132837 0.011 -0.015 0 0.0109914 -0.015 0.000334301 0.0109656 -0.015 0.000667716 0.0109227 -0.015 0.000999364 0.0108628 -0.015 0.00132837 0.0106927 -0.015 0.00197496 0.0106927 -0.018 0.00197496 0.0105829 -0.015 0.00229085 0.0104571 -0.018 0.00260066 0.0104571 -0.015 0.00260066 0.0101584 -0.018 0.00319885 0.00959929 -0.015 0.00403078 0.00756063 -0.015 0.00573433 0.00695537 -0.018 0.0060184 0.00664259 -0.018 0.00613672 0.00567361 -0.015 0.00639317 0.00501066 -0.015 0.00647991 0.00467671 -0.015 0.0064976 0.00334511 -0.015 0.00639658 0.00301765 -0.015 0.00632872 0.00269412 -0.015 0.0062441 0.00269412 -0.018 0.0062441 0.00206225 -0.015 0.00602556 0.00175557 -0.015 0.00589221 0.00145617 -0.018 0.00574326 0.00145617 -0.015 0.00574326 0.00116481 -0.018 0.00557912 0.00116481 -0.018 -0.00557912 0.00175557 -0.018 -0.00589221 0.00237537 -0.015 -0.00614296 0.00269412 -0.015 -0.0062441 0.00301765 -0.018 -0.00632872 0.00301765 -0.015 -0.00632872 0.00334511 -0.015 -0.00639658 0.00567361 -0.018 -0.00639317 0.00600086 -0.015 -0.00632435 0.00664259 -0.015 -0.00613672 0.0078515 -0.018 -0.00556933 0.00840588 -0.018 -0.00519559 0.00866793 -0.015 -0.00498783 0.00915825 -0.015 -0.00453329 0.00915825 -0.018 -0.00453329 0.00959929 -0.015 -0.00403078 0.00959929 -0.018 -0.00403078 0.00979985 -0.015 -0.00376319 0.0101584 -0.018 -0.00319885 0.0103154 -0.018 -0.0029036 0.0104571 -0.015 -0.00260066 0.0106927 -0.015 -0.00197496 0.0107861 -0.018 -0.00165385 0.0108628 -0.018 -0.00132837 0.0109227 -0.018 -0.000999364 0.0109914 -0.015 -0.000334301 0.0109914 -0.018 -0.000334301 0.0109656 -0.018 0.000667716 0.0109227 -0.018 0.000999364 0.0105829 -0.018 0.00229085 0.0103154 -0.015 0.0029036 0.00979985 -0.015 0.00376319 0.00979985 -0.018 0.00376319 0.00938524 -0.018 0.00428771 0.00891894 -0.018 0.00476687 0.00840588 -0.018 0.00519558 0.0081335 -0.018 0.00538959 0.0078515 -0.015 0.00556933 0.0078515 -0.018 0.00556933 0.00726165 -0.018 0.00588415 0.00467671 -0.018 0.0064976 0.0043423 -0.015 0.00649809 0.00367562 -0.015 0.00644751 0.00301765 -0.018 0.00632872 0.00237537 -0.015 0.00614296 0.00237537 -0.018 0.00614296 0.00206225 -0.018 0.00602556 0.00116481 -0.015 0.00557912 0.000346688 0.015 0.005 0.000609339 0.018 0.00520699 0.000882288 0.015 0.0054002 0.00116481 0.015 0.00557912 0.00116481 0.018 0.00557912 0.00145617 0.018 0.00574326 0.00237537 0.015 0.00614296 0.00334511 0.015 0.00639658 0.0043423 0.018 0.00649809 0.0043423 0.015 0.00649809 0.00467671 0.015 0.0064976 0.00534325 0.015 0.00644507 0.00567361 0.018 0.00639317 0.00632414 0.018 0.00623879 0.00664259 0.015 0.00613672 0.00695537 0.015 0.0060184 0.00695537 0.018 0.0060184 0.00726165 0.018 0.00588415 0.00756063 0.015 0.00573433 0.00756063 0.018 0.00573433 0.0078515 0.015 0.00556933 0.00840588 0.018 0.00519558 0.00840588 0.015 0.00519558 0.00866793 0.015 0.00498783 0.00866793 0.018 0.00498783 0.00891894 0.015 0.00476687 0.00915825 0.018 0.00453329 0.00915825 0.015 0.00453329 0.00959929 0.015 0.00403078 0.00979985 0.018 0.00376319 0.00998638 0.018 0.00348563 0.00145617 0.015 0.00574326 0.00175557 0.015 0.00589221 0.00175557 0.018 0.00589221 0.00269412 0.015 0.0062441 0.00269412 0.018 0.0062441 0.00400831 0.018 0.00648138 0.0078515 0.018 0.00556933 0.00938524 0.015 0.00428771 0.00938524 0.018 0.00428771 0.00959929 0.018 0.00403078 0.00998638 0.015 0.00348563 0.0101584 0.015 0.00319885 0.0103154 0.015 0.0029036 0.0106927 0.015 0.00197496 0.0107861 0.018 0.00165385 0.0109914 0.018 0.000334301 0.0109914 0.015 0.000334301 0.011 0.015 0 0.0109914 0.015 -0.000334301 0.0107861 0.018 -0.00165385 0.0107861 0.015 -0.00165385 0.0104571 0.018 -0.00260066 0.0104571 0.015 -0.00260066 0.0103154 0.018 -0.0029036 0.0101584 0.015 -0.00319885 0.00979985 0.018 -0.00376319 0.00959929 0.015 -0.00403078 0.00959929 0.018 -0.00403078 0.00938524 0.015 -0.00428771 0.00840588 0.015 -0.00519559 0.0078515 0.018 -0.00556933 0.0078515 0.015 -0.00556933 0.00756063 0.018 -0.00573433 0.00756063 0.015 -0.00573433 0.00695537 0.015 -0.0060184 0.00695537 0.018 -0.0060184 0.00664259 0.015 -0.00613672 0.00400831 0.015 -0.00648138 0.00334511 0.015 -0.00639658 0.00334511 0.018 -0.00639658 0.00301765 0.015 -0.00632872 0.00301765 0.018 -0.00632872 0.00175557 0.018 -0.00589221 0.00116481 0.015 -0.00557912 0.0103154 0.018 0.0029036 0.0104571 0.018 0.00260066 0.0104571 0.015 0.00260066 0.0105829 0.015 0.00229085 0.0106927 0.018 0.00197496 0.0108628 0.015 0.00132837 0.0109227 0.018 0.000999364 0.0109656 0.015 0.000667716 0.0109227 0.018 -0.000999364 0.00998638 0.018 -0.00348563 0.00866793 0.015 -0.00498783 0.00866793 0.018 -0.00498783 0.00726165 0.015 -0.00588415 0.00632414 0.015 -0.00623879 0.00567361 0.018 -0.00639317 0.00534325 0.015 -0.00644507 0.00501066 0.015 -0.00647991 0.00501066 0.018 -0.00647991 0.00467671 0.018 -0.0064976 0.00400831 0.018 -0.00648138 0.00269412 0.015 -0.0062441 0.00269412 0.018 -0.0062441 0.00237537 0.018 -0.00614296 0.00145617 0.018 -0.00574326 -0.00484315 0.018 0.005 0.000609339 0.018 -0.00520699 0.000882288 0.018 -0.0054002 0.000882288 0.018 0.0054002 0.00116481 0.018 -0.00557912 0.00206225 0.018 0.00602556 0.0023521 0.018 -9.51018e-05 0.00206225 0.018 -0.00602556 0.00337055 0.018 -0.00182944 0.00353666 0.018 -0.0019221 0.0037103 0.018 -0.00199972 0.000346688 0.018 -0.005 0.0024024 0.018 0.000471793 0.00237537 0.018 0.00614296 0.002696 0.018 0.00116964 0.00306609 0.018 0.001602 0.00301765 0.018 0.00632872 0.00334511 0.018 0.00639658 0.00367562 0.018 0.00644751 0.0037103 0.018 0.00199972 0.00407473 0.018 0.00210752 0.00426265 0.018 0.00213686 0.00483164 0.018 0.00212427 0.00467671 0.018 0.0064976 0.00501808 0.018 0.00208665 0.00501066 0.018 0.00647991 0.00534325 0.018 0.00644507 0.00554742 0.018 0.00187761 0.00586165 0.018 0.00166386 0.00613333 0.018 0.00139812 0.00600086 0.018 0.00632435 0.0062505 0.018 0.0012483 0.00664159 0.018 0.000190017 0.00726165 0.018 -0.00588415 0.0081335 0.018 -0.00538959 0.00840588 0.018 -0.00519559 0.00915825 0.018 -0.00453329 0.00891894 0.018 -0.00476687 0.00938524 0.018 -0.00428771 0.0101584 0.018 -0.00319885 0.0105829 0.018 -0.00229085 0.0106927 0.018 -0.00197496 0.0108628 0.018 -0.00132837 0.0109656 0.018 -0.000667716 0.0109914 0.018 -0.000334301 0.011 0.018 0 0.00664259 0.018 0.00613672 0.0109656 0.018 0.000667716 0.0108628 0.018 0.00132837 0.0105829 0.018 0.00229085 0.0101584 0.018 0.00319885 0.00891894 0.018 0.00476687 0.0081335 0.018 0.00538959 0.00664259 0.018 -0.00613672 0.00632414 0.018 -0.00623879 0.00600086 0.018 -0.00632435 0.00534325 0.018 -0.00644507 0.0043423 0.018 -0.00649809 0.00426265 0.018 -0.00213686 0.00445244 0.018 -0.00214947 0.00367562 0.018 -0.00644751 0.00407473 0.018 -0.00210752 0.00657467 0.018 0.000564115 0.0065167 0.018 0.000745267 0.00644294 0.018 0.000920587 0.00537738 0.018 -0.00196283 0.00613333 0.018 -0.00139812 0.0062505 0.018 -0.0012483 0.00661641 0.018 -0.000378547 -0.011 -0.0126305 0.0134914 -0.0095 -0.0126305 0.0134914 -0.0095 -0.0127588 0.0134659 -0.011 -0.0127588 0.0134659 -0.011 -0.0132071 0.0132071 -0.0095 -0.013366 0.013 -0.0095 -0.0134239 0.0128827 -0.011 -0.0134659 0.0122412 -0.0095 -0.013366 0.012 -0.0095 -0.0132934 0.0118912 -0.011 -0.013 0.011634 -0.0095 -0.013 0.011634 -0.011 -0.0127588 0.0115341 -0.0095 -0.0126305 0.0115086 -0.011 -0.0125 0.0115 -0.011 -0.0123695 0.0115086 -0.011 -0.0121173 0.0115761 -0.0095 -0.0122412 0.0115341 -0.011 -0.012 0.011634 -0.0095 -0.012 0.011634 -0.0095 -0.0118912 0.0117066 -0.0095 -0.0117929 0.0117929 -0.0095 -0.0117066 0.0118912 -0.011 -0.011634 0.012 -0.0095 -0.0115341 0.0122412 -0.0095 -0.0115 0.0125 -0.011 -0.0115341 0.0127588 -0.011 -0.0115761 0.0128827 -0.0095 -0.0115341 0.0127588 -0.0095 -0.0117066 0.0131088 -0.0095 -0.0117929 0.0132071 -0.0095 -0.012 0.013366 -0.0095 -0.0122412 0.0134659 -0.011 -0.0128827 0.0134239 -0.0095 -0.013 0.013366 -0.011 -0.0132934 0.0131088 -0.011 -0.013366 0.013 -0.011 -0.0134239 0.0128827 -0.011 -0.0134914 0.0126305 -0.011 -0.0135 0.0125 -0.011 -0.0134239 0.0121173 -0.0095 -0.0132071 0.0117929 -0.011 -0.0122412 0.0115341 -0.011 -0.0117929 0.0117929 -0.0095 -0.011634 0.012 -0.0095 -0.0115761 0.0121173 -0.011 -0.0115341 0.0122412 -0.0095 -0.0115761 0.0128827 -0.011 -0.011634 0.013 -0.011 -0.0117929 0.0132071 -0.011 -0.0121173 0.0134239 -0.0095 -0.0121173 0.0134239 -0.0095 -0.0123695 0.0134914 -0.0095 -0.0126305 -0.0115086 -0.011 -0.0128827 -0.0115761 -0.0095 -0.0127588 -0.0115341 -0.011 -0.013 -0.011634 -0.011 -0.0131088 -0.0117066 -0.0095 -0.013 -0.011634 -0.0095 -0.0132071 -0.0117929 -0.011 -0.0134659 -0.0127588 -0.0095 -0.0132934 -0.0131088 -0.0095 -0.013 -0.013366 -0.011 -0.0125 -0.0135 -0.011 -0.0122412 -0.0134659 -0.0095 -0.0123695 -0.0134914 -0.011 -0.0118912 -0.0132934 -0.011 -0.0117929 -0.0132071 -0.0095 -0.0117929 -0.0132071 -0.0095 -0.0115761 -0.0128827 -0.0095 -0.0115086 -0.0126305 -0.0095 -0.0115 -0.0125 -0.0095 -0.0118912 -0.0117066 -0.011 -0.0121173 -0.0115761 -0.0095 -0.0121173 -0.0115761 -0.0095 -0.0125 -0.0115 -0.0095 -0.0128827 -0.0115761 -0.0095 -0.0131088 -0.0117066 -0.011 -0.013366 -0.012 -0.011 -0.0134239 -0.0121173 -0.0095 -0.0134239 -0.0121173 -0.011 -0.0134659 -0.0122412 -0.0095 -0.0134659 -0.0122412 -0.011 -0.0135 -0.0125 -0.011 -0.0134914 -0.0126305 -0.011 -0.0134239 -0.0128827 -0.0095 -0.0134239 -0.0128827 -0.0095 -0.0132071 -0.0132071 -0.011 -0.013 -0.013366 -0.011 -0.0128827 -0.0134239 -0.011 -0.0127588 -0.0134659 -0.0095 -0.0126305 -0.0134914 -0.0095 -0.0125 -0.0135 -0.011 -0.0123695 -0.0134914 -0.011 -0.0121173 -0.0134239 -0.0095 -0.0118912 -0.0132934 -0.011 -0.0117066 -0.0131088 -0.011 -0.011634 -0.013 -0.0095 -0.011634 -0.013 -0.011 -0.0115761 -0.0128827 -0.011 -0.0115086 -0.0126305 -0.011 -0.0115761 -0.0121173 -0.0095 -0.0115761 -0.0121173 -0.011 -0.011634 -0.012 -0.0095 -0.0117066 -0.0118912 -0.011 -0.0117929 -0.0117929 -0.0095 -0.0117929 -0.0117929 -0.011 -0.012 -0.011634 -0.011 0.0123695 0.0134914 -0.0095 0.0123695 0.0134914 -0.0095 0.0122412 0.0134659 -0.011 0.0117929 0.0132071 -0.0095 0.0118912 0.0132934 -0.011 0.0117066 0.0131088 -0.0095 0.0117066 0.0131088 -0.0095 0.011634 0.013 -0.0095 0.0115341 0.0127588 -0.0095 0.0115086 0.0126305 -0.011 0.0115086 0.0123695 -0.0095 0.0115341 0.0122412 -0.011 0.0121173 0.0115761 -0.0095 0.0121173 0.0115761 -0.011 0.0122412 0.0115341 -0.011 0.0123695 0.0115086 -0.0095 0.013 0.011634 -0.011 0.0132934 0.0118912 -0.0095 0.0132934 0.0118912 -0.011 0.0134239 0.0121173 -0.0095 0.0134239 0.0121173 -0.0095 0.0134914 0.0123695 -0.011 0.0134239 0.0128827 -0.0095 0.0134239 0.0128827 -0.0095 0.0132934 0.0131088 -0.0095 0.0132071 0.0132071 -0.0095 0.0131088 0.0132934 -0.0095 0.0126305 0.0134914 -0.0095 0.012 0.013366 -0.011 0.0115086 0.0126305 -0.011 0.0115 0.0125 -0.0095 0.0115 0.0125 -0.0095 0.0115086 0.0123695 -0.011 0.0115761 0.0121173 -0.011 0.011634 0.012 -0.011 0.0117066 0.0118912 -0.0095 0.0117066 0.0118912 -0.0095 0.0122412 0.0115341 -0.0095 0.0125 0.0115 -0.011 0.0126305 0.0115086 -0.0095 0.0126305 0.0115086 -0.0095 0.0127588 0.0115341 -0.011 0.0128827 0.0115761 -0.0095 0.0128827 0.0115761 -0.011 0.0131088 0.0117066 -0.011 0.013366 0.012 -0.0095 0.013366 0.012 -0.0095 0.0134659 0.0122412 -0.011 0.0134914 0.0123695 -0.0095 0.0135 0.0125 -0.011 0.0134914 0.0126305 -0.0095 0.0134914 0.0126305 -0.0095 0.0134659 0.0127588 -0.011 0.0132934 0.0131088 -0.011 0.0132071 0.0132071 -0.011 0.0131088 0.0132934 -0.011 0.0127588 0.0134659 -0.011 0.0126305 0.0134914 -0.011 0.0125 0.0135 -0.0095 0.0123695 -0.0115086 -0.011 0.0122412 -0.0115341 -0.011 0.0121173 -0.0115761 -0.0095 0.0117929 -0.0117929 -0.0095 0.011634 -0.012 -0.0095 0.0115761 -0.0121173 -0.0095 0.0115086 -0.0123695 -0.0095 0.0115 -0.0125 -0.0095 0.0115341 -0.0127588 -0.0095 0.0115761 -0.0128827 -0.011 0.0117066 -0.0131088 -0.0095 0.0117066 -0.0131088 -0.0095 0.012 -0.013366 -0.011 0.0121173 -0.0134239 -0.0095 0.0121173 -0.0134239 -0.011 0.0127588 -0.0134659 -0.0095 0.0127588 -0.0134659 -0.0095 0.0128827 -0.0134239 -0.011 0.013 -0.013366 -0.0095 0.013 -0.013366 -0.0095 0.0132071 -0.0132071 -0.0095 0.0132934 -0.0131088 -0.0095 0.0134239 -0.0128827 -0.011 0.0134659 -0.0127588 -0.0095 0.0132934 -0.0118912 -0.011 0.0131088 -0.0117066 -0.0095 0.0132071 -0.0117929 -0.0095 0.0125 -0.0115 -0.011 0.0125 -0.0115 -0.011 0.012 -0.011634 -0.0095 0.012 -0.011634 -0.011 0.0118912 -0.0117066 -0.0095 0.0118912 -0.0117066 -0.011 0.011634 -0.012 -0.011 0.0115761 -0.0121173 -0.011 0.0115341 -0.0122412 -0.0095 0.0115086 -0.0126305 -0.011 0.0117929 -0.0132071 -0.011 0.012 -0.013366 -0.0095 0.0122412 -0.0134659 -0.011 0.0123695 -0.0134914 -0.0095 0.0123695 -0.0134914 -0.011 0.0125 -0.0135 -0.011 0.0131088 -0.0132934 -0.011 0.0132934 -0.0131088 -0.0095 0.0135 -0.0125 -0.0095 0.0134914 -0.0123695 -0.011 0.0134659 -0.0122412 -0.011 0.0134239 -0.0121173 -0.011 0.013366 -0.012 -0.0095 0.013366 -0.012 -0.011 0.013 -0.011634 -0.011 0.0128827 -0.0115761 -0.0095 0.0128827 -0.0115761 -0.011 0.0126305 -0.0115086 -0.0105 -0.0045 -0.009 -0.0105 -0.0045 -0.012 -0.0105 0.0045 -0.012 -0.0105 0.0045 -0.009 -0.0095 -0.0125 0.0135 -0.0095 -0.0118912 0.0132934 -0.0095 -0.011634 0.013 -0.0095 -0.003 0.0155 -0.0095 -0.0115086 0.0126305 -0.0095 -0.0115086 0.0123695 -0.0095 -0.0121173 0.0115761 -0.0095 -0.0123695 0.0115086 -0.0095 -0.0125 0.0115 -0.0095 -0.0127588 0.0115341 -0.0095 -0.0128827 0.0115761 -0.0095 -0.0131088 0.0117066 -0.0095 -0.015 0.005 -0.0095 -0.0134239 0.0121173 -0.0095 -0.0134659 0.0122412 -0.0095 -0.0134914 0.0123695 -0.0095 -0.0135 0.0125 -0.0095 -0.0134914 0.0126305 -0.0095 -0.0134659 0.0127588 -0.0095 -0.0132934 0.0131088 -0.0095 -0.0131088 0.0132934 -0.0095 -0.0132071 0.0132071 -0.0095 -0.0128827 0.0134239 -0.0095 -0.0123695 -0.0115086 -0.0095 -0.0122412 -0.0115341 -0.0095 -0.012 -0.011634 -0.0095 -0.0055 -0.008 -0.0095 -0.011634 -0.012 -0.0095 -0.0115341 -0.0122412 -0.0095 -0.0115086 -0.0123695 -0.0095 -0.0115341 -0.0127588 -0.0095 -0.0117066 -0.0131088 -0.0095 -0.0055 -0.013 -0.0095 -0.012 -0.013366 -0.0095 -0.0121173 -0.0134239 -0.0095 -0.0122412 -0.0134659 -0.0095 -0.0128827 -0.0134239 -0.0095 -0.0127588 -0.0134659 -0.0095 -0.0131088 -0.0132934 -0.0095 -0.013366 -0.013 -0.0095 -0.0155 -0.0155 -0.0095 -0.0134659 -0.0127588 -0.0095 -0.0135 -0.0125 -0.0095 -0.0134914 -0.0126305 -0.0095 -0.0134914 -0.0123695 -0.0095 -0.013366 -0.012 -0.0095 -0.0132934 -0.0118912 -0.0095 -0.0155 0.0155 -0.0095 0.0125 0.0135 -0.0095 0.0127588 0.0134659 -0.0095 0.0128827 0.0134239 -0.0095 0.013 0.013366 -0.0095 0.0155 0.0155 -0.0095 0.013366 0.013 -0.0095 0.0132071 0.0117929 -0.0095 0.0131088 0.0117066 -0.0095 0.0123695 0.0115086 -0.0095 0.015 0.005 -0.0095 0.012 0.011634 -0.0095 0.0118912 0.0117066 -0.0095 0.0117929 0.0117929 -0.0095 0.003 0.005 -0.0095 0.011634 0.012 -0.0095 0.0115761 0.0121173 -0.0095 0.0115761 0.0128827 -0.0095 0.003 0.0155 -0.0095 0.0117929 0.0132071 -0.0095 0.0121173 0.0134239 -0.0095 0.0134659 -0.0122412 -0.0095 0.0134239 -0.0121173 -0.0095 0.0131088 -0.0117066 -0.0095 0.013 -0.011634 -0.0095 0.0126305 -0.0115086 -0.0095 0.0127588 -0.0115341 -0.0095 0.015 -0.005 -0.0095 0.0122412 -0.0115341 -0.0095 0.0121173 -0.0115761 -0.0095 0.0134914 -0.0126305 -0.0095 0.0134659 -0.0127588 -0.0095 0.013366 -0.013 -0.0095 0.0131088 -0.0132934 -0.0095 0.0126305 -0.0134914 -0.0095 0.0125 -0.0135 -0.0095 0.0055 -0.013 -0.0095 0.0118912 -0.0132934 -0.0095 0.0117929 -0.0132071 -0.0095 0.011634 -0.013 -0.0095 0.0115341 -0.0122412 -0.0095 0.0117066 -0.0118912 -0.0095 0.0055 -0.008 -0.0105 -0.002 0.0165 -0.0105 -0.002 0.005 -0.0095 -0.003 0.005 -0.011 -0.002 0.017 -0.0105 0.002 0.0165 -0.0095 0.0155 -0.0155 -0.011 -0.017 -0.017 -0.011 -0.0123695 0.0134914 -0.011 -0.0125 0.0135 -0.011 -0.0122412 0.0134659 -0.011 -0.013 0.013366 -0.011 -0.0131088 0.0132934 -0.011 -0.017 0.017 -0.011 -0.0134659 0.0127588 -0.011 -0.0134914 0.0123695 -0.011 -0.013366 0.012 -0.011 -0.0134914 -0.0123695 -0.011 -0.013366 -0.013 -0.011 -0.0132934 -0.0131088 -0.011 -0.0132071 -0.0132071 -0.011 -0.0131088 -0.0132934 -0.011 -0.0126305 -0.0134914 -0.011 -0.012 -0.013366 -0.011 -0.0115341 -0.0127588 -0.011 -0.0045 -0.012 -0.011 -0.0115 -0.0125 -0.011 -0.0115086 -0.0123695 -0.011 -0.0115341 -0.0122412 -0.011 -0.0117066 -0.0118912 -0.011 -0.0118912 -0.0117066 -0.011 -0.0122412 -0.0115341 -0.011 -0.0123695 -0.0115086 -0.011 -0.0125 -0.0115 -0.011 -0.0118912 0.0117066 -0.011 -0.002 0.005 -0.011 -0.0117066 0.0118912 -0.011 -0.0115761 0.0121173 -0.011 -0.0115086 0.0123695 -0.011 -0.0115 0.0125 -0.011 -0.0115086 0.0126305 -0.011 -0.0117066 0.0131088 -0.011 -0.0118912 0.0132934 -0.011 -0.012 0.013366 -0.011 -0.0132934 0.0118912 -0.011 -0.0132071 0.0117929 -0.011 -0.0131088 0.0117066 -0.011 -0.0126305 0.0115086 -0.011 -0.0126305 -0.0115086 -0.011 -0.0132934 -0.0118912 -0.011 -0.0132071 -0.0117929 -0.011 -0.0128827 0.0115761 -0.011 -0.0127588 -0.0115341 -0.011 -0.0045 -0.009 -0.011 0.0125 0.0115 -0.011 0.012 0.011634 -0.011 0.0118912 0.0117066 -0.011 0.0117929 0.0117929 -0.011 0.0115341 0.0122412 -0.011 0.002 0.005 -0.011 0.002 0.017 -0.011 0.0115341 0.0127588 -0.011 0.0115761 0.0128827 -0.011 0.011634 0.013 -0.011 0.0118912 0.0132934 -0.011 0.012 0.013366 -0.011 0.0121173 0.0134239 -0.011 0.0122412 0.0134659 -0.011 0.017 -0.017 -0.011 0.0118912 -0.0132934 -0.011 0.0115761 -0.0128827 -0.011 0.011634 -0.013 -0.011 0.0115341 -0.0127588 -0.011 0.0115086 -0.0126305 -0.011 0.0045 -0.012 -0.011 0.0115086 -0.0123695 -0.011 0.0115 -0.0125 -0.011 0.0117066 -0.0118912 -0.011 0.0117929 -0.0117929 -0.011 0.0045 -0.009 -0.011 0.0123695 -0.0115086 -0.011 0.0127588 -0.0115341 -0.011 0.0127588 0.0115341 -0.011 0.013 0.011634 -0.011 0.0134914 -0.0123695 -0.011 0.0134914 -0.0126305 -0.011 0.0135 -0.0125 -0.011 0.0134239 -0.0128827 -0.011 0.013366 -0.013 -0.011 0.0132071 -0.0132071 -0.011 0.0128827 -0.0134239 -0.011 0.0126305 -0.0134914 -0.011 0.0122412 -0.0134659 -0.011 0.0132071 -0.0117929 -0.011 0.0132071 0.0117929 -0.011 0.0132934 -0.0118912 -0.011 0.0134659 0.0122412 -0.011 0.0135 0.0125 -0.011 0.0134659 0.0127588 -0.011 0.017 0.017 -0.011 0.013366 0.013 -0.011 0.013 0.013366 -0.011 0.0128827 0.0134239 0.00665 0.015 0 0.00664159 0.015 0.000190017 0.00661641 0.018 0.000378547 0.00661641 0.015 0.000378547 0.00635398 0.018 0.0010887 0.00635398 0.015 0.0010887 0.00613333 0.015 0.00139812 0.00600337 0.015 0.001537 0.00570927 0.018 0.00177769 0.00586165 0.015 0.00166386 0.00570927 0.015 0.00177769 0.00554742 0.015 0.00187761 0.00537738 0.018 0.00196283 0.00501808 0.015 0.00208665 0.00464259 0.018 0.00214527 0.00426265 0.015 0.00213686 0.00353666 0.015 0.0019221 0.00321328 0.015 0.00172246 0.00306609 0.015 0.001602 0.00293011 0.018 0.001469 0.00293011 0.015 0.001469 0.00280643 0.018 0.0013245 0.00280643 0.015 0.0013245 0.00259968 0.018 0.00100563 0.002696 0.015 0.00116964 0.00259968 0.015 0.00100563 0.00251824 0.015 0.000833743 0.0023521 0.015 -9.51018e-05 0.00236891 0.015 -0.000284561 0.00245231 0.018 -0.000655333 0.00251824 0.018 -0.000833743 0.00245231 0.015 -0.000655333 0.00259968 0.018 -0.00100563 0.002696 0.018 -0.00116964 0.00259968 0.015 -0.00100563 0.00280643 0.015 -0.0013245 0.00293011 0.015 -0.001469 0.00306609 0.015 -0.001602 0.00321328 0.015 -0.00172246 0.00337055 0.015 -0.00182944 0.00389013 0.018 -0.00206169 0.00426265 0.015 -0.00213686 0.00501808 0.018 -0.00208665 0.00483164 0.015 -0.00212427 0.00520047 0.015 -0.00203269 0.00537738 0.015 -0.00196283 0.00554742 0.015 -0.00187761 0.00586165 0.015 -0.00166386 0.00613333 0.015 -0.00139812 0.0062505 0.015 -0.0012483 0.00635398 0.015 -0.0010887 0.00664159 0.015 -0.000190017 0.00644294 0.015 0.000920587 0.0062505 0.015 0.0012483 0.00600337 0.018 0.001537 0.00520047 0.018 0.00203269 0.00445244 0.018 0.00214947 0.00389013 0.018 0.00206169 0.00353666 0.018 0.0019221 0.00337055 0.018 0.00182944 0.00337055 0.015 0.00182944 0.00321328 0.018 0.00172246 0.00251824 0.018 0.000833743 0.00245231 0.018 0.000655333 0.0024024 0.015 0.000471793 0.00236891 0.018 0.000284561 0.00236891 0.015 0.000284561 0.0023521 0.018 9.51018e-05 0.00236891 0.018 -0.000284561 0.0024024 0.018 -0.000471793 0.0024024 0.015 -0.000471793 0.00280643 0.018 -0.0013245 0.00293011 0.018 -0.001469 0.00306609 0.018 -0.001602 0.00321328 0.018 -0.00172246 0.00464259 0.018 -0.00214527 0.00483164 0.018 -0.00212427 0.00520047 0.018 -0.00203269 0.00554742 0.018 -0.00187761 0.00570927 0.018 -0.00177769 0.00586165 0.018 -0.00166386 0.00600337 0.018 -0.001537 0.00635398 0.018 -0.0010887 0.00644294 0.018 -0.000920587 0.0065167 0.018 -0.000745267 0.00657467 0.018 -0.000564115 0.00664159 0.018 -0.000190017 0.00665 0.018 0 0.00525 -0.017 0 0.00524162 -0.017 -0.000111782 0.00521668 -0.017 -0.000221066 0.00517573 -0.018 -0.000325413 0.004875 -0.018 -0.000649519 0.00477401 -0.018 -0.000698155 0.00477401 -0.017 -0.000698155 0.00455605 -0.018 -0.000747903 0.00444395 -0.018 -0.000747903 0.00433311 -0.018 -0.000731196 0.00433311 -0.017 -0.000731196 0.00395021 -0.018 -0.000510129 0.00388032 -0.017 -0.00042249 0.00382427 -0.018 -0.000325413 0.00375838 -0.018 -0.000111782 0.00375 -0.018 0 0.00382427 -0.018 0.000325413 0.00395021 -0.018 0.000510129 0.00395021 -0.017 0.000510129 0.00444395 -0.017 0.000747903 0.00466689 -0.018 0.000731196 0.00477401 -0.018 0.000698155 0.00466689 -0.017 0.000731196 0.00496762 -0.017 0.000586374 0.00511968 -0.018 0.00042249 0.00504979 -0.017 0.000510129 0.00517573 -0.018 0.000325413 0.00511968 -0.017 0.00042249 0.00525 -0.018 0 0.00524162 -0.018 -0.000111782 0.00511968 -0.018 -0.00042249 0.00466689 -0.018 -0.000731196 0.00422599 -0.017 -0.000698155 0.00403238 -0.018 -0.000586374 0.00388032 -0.018 -0.00042249 0.00382427 -0.017 -0.000325413 0.00375838 -0.017 -0.000111782 0.00375838 -0.018 0.000111782 0.00388032 -0.018 0.00042249 0.00388032 -0.017 0.00042249 0.00422599 -0.018 0.000698155 0.00444395 -0.018 0.000747903 0.00455605 -0.018 0.000747903 0.00455605 -0.017 0.000747903 0.004875 -0.018 0.000649519 0.00496762 -0.018 0.000586374 0.00517573 -0.017 0.000325413 0.00521668 -0.018 0.000221066 -0.002 -0.015 0 -0.00200851 -0.0165 -9.18746e-05 -0.00203376 -0.0165 -0.000180621 -0.00203376 -0.015 -0.000180621 -0.00207489 -0.0165 -0.000263216 -0.00200851 -0.018 -9.18746e-05 -0.002 -0.018 0 -0.00200851 -0.018 9.18746e-05 -0.00207489 -0.018 0.000263216 -0.00219868 -0.018 0.000399009 -0.00254613 -0.0165 0.000497867 -0.00263683 -0.018 0.000480913 -0.00292511 -0.018 0.000263216 -0.00299149 -0.018 9.18746e-05 -0.0028695 -0.018 -0.000336848 -0.00272287 -0.0165 -0.000447582 -0.00280132 -0.018 -0.000399009 -0.00245387 -0.0165 -0.000497867 -0.00254613 -0.018 -0.000497867 -0.0021305 -0.0165 -0.000336848 -0.0021305 -0.015 -0.000336848 -0.00219868 -0.018 -0.000399009 -0.00219868 -0.0165 -0.000399009 -0.00227713 -0.018 -0.000447582 -0.00227713 -0.0165 -0.000447582 -0.00219868 -0.015 -0.000399009 -0.00227713 -0.015 -0.000447582 -0.00236317 -0.018 -0.000480913 -0.00236317 -0.015 -0.000480913 -0.00236317 -0.0165 -0.000480913 -0.00245387 -0.015 -0.000497867 -0.00254613 -0.0165 -0.000497867 -0.00263683 -0.0165 -0.000480913 -0.00272287 -0.018 -0.000447582 -0.00263683 -0.015 -0.000480913 -0.00280132 -0.0165 -0.000399009 -0.0028695 -0.0165 -0.000336848 -0.0028695 -0.015 -0.000336848 -0.00292511 -0.018 -0.000263216 -0.00292511 -0.0165 -0.000263216 -0.00296624 -0.0165 -0.000180621 -0.00296624 -0.015 -0.000180621 -0.00299149 -0.0165 -9.18746e-05 -0.003 -0.0165 0 -0.003 -0.015 0 -0.00299149 -0.0165 9.18746e-05 -0.00296624 -0.0165 0.000180621 -0.00299149 -0.015 9.18746e-05 -0.00292511 -0.0165 0.000263216 -0.00292511 -0.015 0.000263216 -0.0028695 -0.015 0.000336848 -0.0028695 -0.0165 0.000336848 -0.00280132 -0.0165 0.000399009 -0.00280132 -0.015 0.000399009 -0.00272287 -0.0165 0.000447582 -0.00272287 -0.015 0.000447582 -0.00263683 -0.0165 0.000480913 -0.00254613 -0.015 0.000497867 -0.00245387 -0.0165 0.000497867 -0.00236317 -0.018 0.000480913 -0.00236317 -0.0165 0.000480913 -0.00236317 -0.015 0.000480913 -0.00227713 -0.018 0.000447582 -0.00227713 -0.0165 0.000447582 -0.00227713 -0.015 0.000447582 -0.00219868 -0.015 0.000399009 -0.00219868 -0.0165 0.000399009 -0.0021305 -0.015 0.000336848 -0.0021305 -0.0165 0.000336848 -0.00207489 -0.0165 0.000263216 -0.00203376 -0.0165 0.000180621 -0.00200851 -0.015 9.18746e-05 -0.00200851 -0.0165 9.18746e-05 -0.002 -0.0165 0 -0.00676447 0.0135355 0.005 - - - - - - - - - - -0.998832 0 -0.0483122 -0.989506 0 -0.144492 -0.989506 0 -0.144492 -0.970941 0 -0.239317 -0.970941 0 -0.239317 -0.943313 0 -0.331905 -0.906874 0 -0.421402 -0.861972 0 -0.506957 -0.809015 0 -0.587788 -0.748512 0 -0.663122 -0.681016 0 -0.732269 -0.607162 0 -0.794578 -0.527642 0 -0.849467 -0.44319 0 -0.896428 -0.354605 0 -0.935016 -0.262708 0 -0.964875 -0.168357 0 -0.985726 -0.0724332 0 -0.997373 0.0241614 0 -0.999708 0.120539 0 -0.992709 0.215783 0 -0.976441 0.309018 0 -0.951056 0.399365 0 -0.916792 0.485982 0 -0.873969 0.568065 0 -0.822983 0.644843 0 -0.764315 0.715598 0 -0.698513 0.779676 0 -0.626183 0.83647 0 -0.548013 0.885455 0 -0.464725 0.926176 0 -0.377092 0.958246 0 -0.285946 0.981369 0 -0.19213 0.995332 0 -0.0965147 1 0 0 0.995332 0 0.0965147 0.981369 0 0.19213 0.958246 0 0.285946 0.926176 0 0.377092 0.885455 0 0.464725 0.83647 0 0.548013 0.779676 0 0.626183 0.715597 0 0.698513 0.644845 0 0.764314 0.568065 0 0.822984 0.485983 0 0.873968 0.399363 0 0.916793 0.309019 0 0.951056 0.215783 0 0.976441 0.120536 0 0.992709 0.0241641 0 0.999708 -0.0724359 0 0.997373 -0.168357 0 0.985726 -0.262707 0 0.964876 -0.354606 0 0.935016 -0.443189 0 0.896428 -0.527642 0 0.849467 -0.607162 0 0.794578 -0.681016 0 0.732269 -0.748512 0 0.663122 -0.809015 0 0.587788 -0.861972 0 0.506957 -0.906874 0 0.421402 -0.943313 0 0.331905 -0.970941 0 0.239317 -0.989506 0 0.144492 -0.998832 0 0.0483122 -0.998832 0 -0.0483122 -0.943313 0 -0.331905 -0.906874 0 -0.421402 -0.861972 0 -0.506957 -0.809015 0 -0.587788 -0.748512 0 -0.663122 -0.681016 0 -0.732269 -0.607162 0 -0.794578 -0.527642 0 -0.849467 -0.44319 0 -0.896428 -0.354605 0 -0.935016 -0.262708 0 -0.964875 -0.168357 0 -0.985726 -0.0724332 0 -0.997373 0.0241614 0 -0.999708 0.120539 0 -0.992709 0.215783 0 -0.976441 0.309018 0 -0.951056 0.399365 0 -0.916792 0.485982 0 -0.873969 0.568065 0 -0.822983 0.644843 0 -0.764315 0.715598 0 -0.698513 0.779676 0 -0.626183 0.83647 0 -0.548013 0.885455 0 -0.464725 0.926176 0 -0.377092 0.958246 0 -0.285946 0.981369 0 -0.19213 0.995332 0 -0.0965147 1 0 0 0.995332 0 0.0965147 0.981369 0 0.19213 0.958246 0 0.285946 0.926176 0 0.377092 0.885455 0 0.464725 0.83647 0 0.548013 0.779676 0 0.626183 0.715597 0 0.698513 0.644845 0 0.764314 0.568065 0 0.822984 0.485983 0 0.873968 0.399363 0 0.916793 0.309019 0 0.951056 0.215783 0 0.976441 0.120536 0 0.992709 0.0241641 0 0.999708 -0.0724359 0 0.997373 -0.168357 0 0.985726 -0.262707 0 0.964876 -0.354606 0 0.935016 -0.443189 0 0.896428 -0.527642 0 0.849467 -0.607162 0 0.794578 -0.681016 0 0.732269 -0.748512 0 0.663122 -0.809015 0 0.587788 -0.861972 0 0.506957 -0.906874 0 0.421402 -0.943313 0 0.331905 -0.970941 0 0.239317 -0.989506 0 0.144492 -0.998832 0 0.0483122 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0.999284 0 0.0378437 0.99356 0 0.113308 0.99356 0 0.113308 0.982145 0 0.188123 0.982145 0 0.188123 0.965105 0 0.261865 0.942537 0 0.334101 0.914569 0 0.404429 0.881362 0 0.472442 0.84311 0 0.537741 0.800022 0 0.59997 0.752361 0 0.658751 0.700384 0 0.713766 0.644397 0 0.764691 0.584718 0 0.811237 0.521692 0 0.853134 0.455675 0 0.890146 0.38705 0 0.922059 0.316208 0 0.94869 0.243554 0 0.969887 0.169505 0 0.985529 0.0944862 0 0.995526 0.0189219 0 0.999821 -0.056744 0 0.998389 -0.132092 0 0.991238 -0.206675 0 0.97841 -0.280083 0 0.959976 -0.35188 0 0.936045 -0.421664 0 0.906752 -0.489036 0 0.872264 -0.5536 0 0.832783 -0.615001 0 0.788527 -0.672872 0 0.739759 -0.726893 0 0.68675 -0.77675 0 0.62981 -0.822155 0 0.569264 -0.862854 0 0.505453 -0.898609 0 0.43875 -0.929219 0 0.369531 -0.954503 0 0.2982 -0.974323 0 0.225153 -0.988561 0 0.150825 -0.997136 0 0.0756306 -1 0 0 -0.997136 0 -0.0756306 -0.988561 0 -0.150825 -0.974323 0 -0.225153 -0.954503 0 -0.2982 -0.929219 0 -0.36953 -0.898609 0 -0.438751 -0.862854 0 -0.505453 -0.822155 0 -0.569263 -0.776749 0 -0.62981 -0.726893 0 -0.68675 -0.672872 0 -0.739758 -0.614999 0 -0.788528 -0.553601 0 -0.832782 -0.489035 0 -0.872264 -0.421664 0 -0.906752 -0.351881 0 -0.936045 -0.280082 0 -0.959976 -0.206676 0 -0.978409 -0.132089 0 -0.991238 -0.056746 0 -0.998389 0.0189239 0 -0.999821 0.0944852 0 -0.995526 0.169505 0 -0.985529 0.243553 0 -0.969888 0.316208 0 -0.94869 0.387051 0 -0.922058 0.455675 0 -0.890146 0.521693 0 -0.853133 0.584718 0 -0.811237 0.644398 0 -0.76469 0.700384 0 -0.713766 0.75236 0 -0.658752 0.800022 0 -0.599971 0.843111 0 -0.53774 0.881362 0 -0.472441 0.914569 0 -0.40443 0.942537 0 -0.334101 0.965105 0 -0.261865 0.982145 0 -0.188123 0.99356 0 -0.113308 0.999284 0 -0.0378437 0.999284 0 0.0378437 0.965105 0 0.261865 0.942537 0 0.334101 0.914569 0 0.404429 0.881362 0 0.472442 0.84311 0 0.537741 0.800022 0 0.59997 0.752361 0 0.658751 0.700384 0 0.713766 0.644397 0 0.764691 0.584718 0 0.811237 0.521692 0 0.853134 0.455675 0 0.890146 0.38705 0 0.922059 0.316208 0 0.94869 0.243554 0 0.969887 0.169505 0 0.985529 0.0944862 0 0.995526 0.0189219 0 0.999821 -0.056744 0 0.998389 -0.132092 0 0.991238 -0.206675 0 0.97841 -0.280083 0 0.959976 -0.35188 0 0.936045 -0.421664 0 0.906752 -0.489036 0 0.872264 -0.5536 0 0.832783 -0.615001 0 0.788527 -0.672872 0 0.739759 -0.726893 0 0.68675 -0.77675 0 0.62981 -0.822155 0 0.569264 -0.862854 0 0.505453 -0.898609 0 0.43875 -0.929219 0 0.369531 -0.954503 0 0.2982 -0.974323 0 0.225153 -0.988561 0 0.150825 -0.997136 0 0.0756306 -1 0 0 -0.997136 0 -0.0756306 -0.988561 0 -0.150825 -0.974323 0 -0.225153 -0.954503 0 -0.2982 -0.929219 0 -0.36953 -0.898609 0 -0.438751 -0.862854 0 -0.505453 -0.822155 0 -0.569263 -0.776749 0 -0.62981 -0.726893 0 -0.68675 -0.672872 0 -0.739758 -0.614999 0 -0.788528 -0.553601 0 -0.832782 -0.489035 0 -0.872264 -0.421664 0 -0.906752 -0.351881 0 -0.936045 -0.280082 0 -0.959976 -0.206676 0 -0.978409 -0.132089 0 -0.991238 -0.056746 0 -0.998389 0.0189239 0 -0.999821 0.0944852 0 -0.995526 0.169505 0 -0.985529 0.243553 0 -0.969888 0.316208 0 -0.94869 0.387051 0 -0.922058 0.455675 0 -0.890146 0.521693 0 -0.853133 0.584718 0 -0.811237 0.644398 0 -0.76469 0.700384 0 -0.713766 0.75236 0 -0.658752 0.800022 0 -0.599971 0.843111 0 -0.53774 0.881362 0 -0.472441 0.914569 0 -0.40443 0.942537 0 -0.334101 0.965105 0 -0.261865 0.982145 0 -0.188123 0.99356 0 -0.113308 0.999284 0 -0.0378437 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -0.683594 0.729862 0 -0.634391 0.773012 0 -0.683594 0.729862 0 -0.634391 0.773012 0 -0.582477 0.812847 0 -0.582477 0.812847 0 -0.528068 0.849202 0 -0.471395 0.881922 0 -0.528068 0.849202 0 -0.528068 0.849202 0 -0.582477 0.812847 0 -0.582477 0.812847 0 -0.634391 0.773012 0 -0.634391 0.773012 0 -0.683594 0.729862 0 -0.528068 0.849202 0 -0.471395 0.881922 0 -0.412715 0.91086 0 -0.471395 0.881922 0 -0.471395 0.881922 0 -0.412715 0.91086 0 -0.352244 0.935908 0 -0.412715 0.91086 0 -0.412715 0.91086 0 -0.352244 0.935908 0 -0.290292 0.956938 0 -0.352244 0.935908 0 -0.352244 0.935908 0 -0.290292 0.956938 0 -0.227074 0.973878 0 -0.290292 0.956938 0 -0.290292 0.956938 0 -0.227074 0.973878 0 -0.162887 0.986645 0 -0.227074 0.973878 0 -0.227074 0.973878 0 -0.162887 0.986645 0 -0.0980235 0.995184 0 -0.162887 0.986645 0 -0.162887 0.986645 0 -0.0980235 0.995184 0 -0.0327196 0.999465 0 -0.0980235 0.995184 0 -0.0980235 0.995184 0 -0.0327196 0.999465 0 -0.0327196 0.999465 0 -0.707107 0.707107 0 -0.707107 0.707107 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -0.0327196 -0.999464 0.000470076 -0.0362764 -0.999342 0.000355063 -0.050655 -0.998716 7.32984e-05 -0.0651592 -0.997875 -2.35692e-05 -0.0795809 -0.996828 6.6908e-05 -0.0935748 -0.995612 0.000330822 -0.0980165 -0.995185 0.000467256 -0.106598 -0.994302 0.00023619 -0.118338 -0.992973 5.1418e-05 -0.128888 -0.991659 -9.49023e-06 -0.138381 -0.990379 1.82147e-05 -0.146776 -0.98917 0.000107175 -0.153942 -0.98808 0.000229086 -0.162893 -0.986644 0.00042587 -0.164304 -0.98641 0.000395846 -0.160231 -0.987079 0.000513614 -0.22708 -0.973876 0 -0.290285 -0.95694 0 -0.290285 -0.95694 0 -0.352244 -0.935908 0 -0.412715 -0.91086 0 -0.352244 -0.935908 0 -0.352244 -0.935908 0 -0.290285 -0.95694 0 -0.290285 -0.95694 0 -0.22708 -0.973876 0 -0.168327 -0.985731 -0.00028942 -0.179677 -0.983726 0 -0.179677 -0.983726 0 -0.168327 -0.985731 0.00028942 -0.352244 -0.935908 0 -0.412715 -0.91086 0 -0.47139 -0.881925 0 -0.412715 -0.91086 0 -0.412715 -0.91086 0 -0.47139 -0.881925 0 -0.528073 -0.849199 0 -0.47139 -0.881925 0 -0.47139 -0.881925 0 -0.528073 -0.849199 0 -0.582473 -0.81285 0 -0.528073 -0.849199 0 -0.528073 -0.849199 0 -0.582473 -0.81285 0 -0.634395 -0.773009 0 -0.582473 -0.81285 0 -0.582473 -0.81285 0 -0.634395 -0.773009 0 -0.683594 -0.729862 0 -0.634395 -0.773009 0 -0.634395 -0.773009 0 -0.683594 -0.729862 0 -0.683594 -0.729862 0 -0.22708 -0.973876 0 -0.22708 -0.973876 0 -0.162893 -0.986644 -0.000425871 -0.153942 -0.98808 -0.000229087 -0.146776 -0.98917 -0.000107175 -0.138381 -0.990379 -1.82147e-05 -0.128888 -0.991659 9.49028e-06 -0.118338 -0.992973 -5.14181e-05 -0.106598 -0.994302 -0.00023619 -0.0980165 -0.995185 -0.000467256 -0.0935748 -0.995612 -0.000330822 -0.0795809 -0.996828 -6.69081e-05 -0.0651592 -0.997875 2.35692e-05 -0.0506551 -0.998716 -7.32984e-05 -0.0362764 -0.999342 -0.000355063 -0.0327196 -0.999464 -0.000470076 -0.00723547 -0.999974 0 -0.021736 -0.999764 -0.000186251 -0.160231 -0.987079 -0.000513614 -0.164304 -0.98641 -0.000395847 -0.021736 -0.999764 0.000186251 -0.00723547 -0.999974 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -0.25625 0.000544515 -0.96661 -0.183751 -0.0013881 -0.982972 -0.139744 0.000198665 -0.990188 -0.0238385 -0.000234483 -0.999716 0.0917362 0.000567817 -0.995783 0.208095 -0.000171356 -0.978109 0.323317 0.000151185 -0.946291 0.361238 -0.00137112 -0.932473 0.434419 0.000581245 -0.900711 0.526432 -0.00109899 -0.850217 0.671257 -9.96553e-05 -0.741225 0.526432 9.95675e-05 -0.850217 0.523626 -9.96296e-05 -0.851948 0.361239 9.95943e-05 -0.932473 0.358163 -9.95346e-05 -0.933659 0.180506 -9.97222e-05 -0.983574 -0.00329683 -9.96899e-05 -0.999995 -0.186987 -9.93821e-05 -0.982362 -0.364318 -9.9908e-05 -0.931275 -0.52923 -9.95481e-05 -0.848479 -0.67613 -9.95838e-05 -0.736783 -0.8 -9.96429e-05 -0.599999 -0.896627 -9.94707e-05 -0.442787 -0.962723 -9.94966e-05 -0.27049 -0.996033 -9.96683e-05 -0.08898 -0.995425 -9.98119e-05 0.0955515 -0.960919 -9.94888e-05 0.276829 -0.893688 -9.96102e-05 0.44869 -0.796024 -9.97734e-05 0.605265 -0.67126 -9.94507e-05 0.741222 -0.523624 -9.97908e-05 0.85195 -0.358163 -9.95346e-05 0.933659 -0.180507 -9.96672e-05 0.983574 0.00329683 -9.96909e-05 0.999995 0.186987 -9.94401e-05 0.982362 0.364318 -9.9908e-05 0.931275 0.529227 -9.9551e-05 0.84848 0.676132 -9.979e-05 0.73678 0.799998 -9.9399e-05 0.600003 0.896629 -9.94657e-05 0.442783 0.962723 -9.94966e-05 0.27049 0.996033 -9.99775e-05 0.08898 0.995425 -9.95083e-05 -0.0955465 0.960918 -9.94835e-05 -0.276833 0.893688 -9.96102e-05 -0.44869 0.796026 -9.97778e-05 -0.605262 0.673696 9.94464e-05 -0.739008 0.673696 -0.00144642 -0.739008 0.633932 0.000306549 -0.773389 0.538659 -0.000428945 -0.842524 0 -0.00123719 -0.999999 0 9.94963e-05 -1 0.183751 -0.00122689 -0.982972 0.183751 9.96876e-05 -0.982973 0.718779 0.000408034 -0.695238 0.79427 -0.000681206 -0.607565 0.860564 0.000544208 -0.509342 0.915317 0.000275827 -0.402733 0.957003 -0.000355689 -0.290079 0.961825 -0.00115465 -0.273663 0.984739 0.000590446 -0.174039 0.995733 -0.00138175 -0.092271 0.995734 9.92669e-05 -0.092271 0.798019 -0.000988712 -0.602631 0.79802 9.96255e-05 -0.602632 0.895162 -0.00143893 -0.445739 0.895163 9.9727e-05 -0.445739 0.998329 0 -0.0577803 0.998329 0 0.0577803 0.984739 0.000590445 0.174039 0.957003 -0.000355689 0.290079 0.915317 0.000275826 0.402733 0.895162 -0.00143893 0.445739 0.860564 0.000544209 0.509342 0.798019 -0.000988712 0.602631 0.79802 9.93926e-05 0.602632 0.995733 -0.00138175 0.092271 0.995734 9.96805e-05 0.092271 0.961825 -0.00115465 0.273663 0.961826 9.97558e-05 0.273663 0.79427 -0.000681205 0.607565 0.718779 0.000408034 0.695238 0.633932 0.000306549 0.773389 0.538659 -0.000428944 0.842524 0.526432 -0.00109899 0.850217 0.434419 0.000581245 0.900711 0.361238 -0.00137112 0.932473 0.361239 9.97262e-05 0.932473 0.673696 -0.00144642 0.739008 0.673696 9.96759e-05 0.739008 0.323317 0.000151185 0.946291 0.208095 -0.000171356 0.978109 0.0917362 0.000567817 0.995783 -0.0238385 -0.000234483 0.999716 -0.139744 0.000198665 0.990188 -0.183751 -0.0013881 0.982972 -0.25625 0.000544515 0.96661 -0.361237 -0.000996949 0.932474 -0.361237 9.94842e-05 0.932474 0.183751 -0.00122689 0.982972 0.183751 9.95392e-05 0.982973 0 -0.00123719 0.999999 0 9.94963e-05 1 -0.397238 0.000854862 0.917715 -0.526434 -0.000889911 0.850215 -0.526435 9.9728e-05 0.850216 -0.554129 0.000742073 0.83243 -0.673696 -0.000771507 0.739008 -0.673696 9.94469e-05 0.739008 -0.69335 0.000617272 0.7206 -0.798017 -0.000641271 0.602635 -0.798017 9.96299e-05 0.602635 -0.810466 0.000480815 0.585785 -0.895163 -0.000498757 0.445739 -0.895163 9.9727e-05 0.445739 -0.901733 0.000332524 0.432294 -0.961825 -0.000344311 0.273663 -0.961826 9.94373e-05 0.273663 -0.964243 0.00017214 0.265019 -0.995734 -0.000178165 0.092266 -0.995734 9.95705e-05 0.092266 -0.996006 0 0.0892853 -0.996006 0 -0.0892853 -0.964243 0.000172141 -0.265019 -0.901733 0.000332524 -0.432294 -0.810466 0.000480815 -0.585785 -0.69335 0.000617272 -0.7206 -0.554129 0.000742073 -0.83243 -0.397238 0.000854862 -0.917715 -0.361237 -0.000996949 -0.932474 -0.361237 9.98363e-05 -0.932474 -0.995734 -0.000178165 -0.092266 -0.995734 9.9686e-05 -0.092266 -0.961825 -0.000344312 -0.273663 -0.961826 9.97558e-05 -0.273663 -0.895163 -0.000498757 -0.445739 -0.895163 9.9595e-05 -0.445739 -0.798017 -0.000641271 -0.602635 -0.798017 9.96359e-05 -0.602635 -0.673696 -0.000771508 -0.739008 -0.673696 9.96759e-05 -0.739008 -0.526434 -0.000889911 -0.850215 -0.526435 9.92926e-05 -0.850216 -0.183751 9.95962e-05 -0.982973 -0.183751 9.96876e-05 0.982973 0.526432 9.92897e-05 0.850217 0.895163 9.98729e-05 0.445739 0.961826 9.97359e-05 -0.273663 0.698906 0.715214 0 0.656518 0.754309 -0.00141851 0.584113 0.811671 -0.00155813 0.506653 0.862149 -0.00163681 0.424946 0.905217 -0.00167739 0.339606 0.940566 -0.00167764 0.251371 0.967889 -0.00163617 0.160998 0.986954 -0.00155339 0.0692439 0.997599 -0.00142909 0.0115511 0.999933 0 0.043615 0.999048 0.00116857 0.694405 0.719584 -8.8689e-05 0.667494 0.744615 0.000195495 0.637228 0.770675 5.80769e-06 0.60419 0.79684 -6.84057e-06 0.584113 0.811672 0.000718656 0.567509 0.823367 4.19222e-05 0.526992 0.84987 -3.97978e-05 0.483122 0.875553 -0.000108043 0.43651 0.899699 0.000144366 0.424947 0.905218 0.000698829 0.387457 0.921888 -0.000255682 0.339606 0.940568 0.000560342 0.337044 0.941489 0.000420774 0.285757 0.958302 -0.000222042 0.251371 0.967891 0.000752034 0.233877 0.972266 2.44881e-05 0.180866 0.983508 -2.29672e-05 0.127477 0.991842 -0.000205558 0.0747538 0.997202 0.000349675 0.069244 0.9976 0.000609149 0.024628 0.999697 -0.000209578 0.656519 0.754309 0.000677117 0.506653 0.86215 0.00074696 0.160998 0.986955 0.000756183 0.13053 0.991444 0.00131385 0.216441 0.976295 0.00141793 0.300704 0.953716 0.00148072 0.382686 0.923877 0.00150182 0.461744 0.887012 0.00148028 0.5373 0.84339 0.00141779 0.608759 0.793354 0.00131005 0.675592 0.737275 0.00116933 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0.0115511 0.999933 0 0.0271187 0.999621 0.00476308 0.0365984 0.99933 -0.000289352 0.043615 0.999048 -0.000457388 0.0749486 0.997187 0.000128327 0.107818 0.994171 -9.36162e-05 0.134745 0.99088 -0.000290984 0.180891 0.983503 0.000171082 0.216441 0.976296 -0.000454776 0.233879 0.972266 1.60013e-05 0.285767 0.958299 -1.68111e-05 0.336966 0.941517 0.000176029 0.387551 0.921848 -0.000183187 0.436433 0.899737 0.000117331 0.460972 0.887415 -0.000448019 0.481834 0.876255 0.00354509 0.486041 0.873936 6.18426e-05 0.461744 0.887013 -0.000469233 0.130837 0.991328 0.012227 0.300705 0.953717 -0.000444362 0.382686 0.923878 -0.000354932 0.525874 0.850562 -8.59096e-05 0.566219 0.824255 0.00012904 0.604189 0.796841 8.34033e-06 0.637227 0.770676 -4.40552e-06 0.636913 0.770936 8.53037e-05 0.6714 0.741095 -0.000188725 0.668346 0.74385 -0.00132099 0.693961 0.720012 0.000593974 0.698906 0.715214 0 0.675592 0.737275 -0.000370665 0.526713 0.850043 0.00066441 0.567851 0.823131 -0.000701809 0.603692 0.797217 -0.000221928 0.60876 0.793354 -0.000411886 0.537301 0.843391 -0.000449423 0.13053 0.991444 -0.000416476 0.707094 0.707119 0 0.707107 0.707107 1.45596e-07 0.707116 0.707098 3.30121e-07 0.707118 0.707096 3.90061e-07 0.707107 0.707107 0 0.707065 0.707149 -1.70418e-06 0.707148 0.707066 1.92235e-06 0.707107 0.707107 5.89018e-08 0.707107 0.707107 0 0.707107 0.707107 -5.89018e-08 0.707065 0.707149 1.70314e-06 0.707148 0.707066 -1.92235e-06 0.707107 0.707107 0 0.707107 0.707107 0 0.707094 0.707119 0 0.707107 0.707107 -1.45972e-07 0.707116 0.707098 -3.30121e-07 0.707118 0.707096 -3.90061e-07 0.707107 0.707107 0 -0.439106 0.369928 -0.818743 -0.384082 -0.220127 -0.896675 -0.350965 0.157317 -0.923079 -0.231438 0.0836565 -0.969246 -0.105137 0.0468918 -0.993352 0.0234816 0.0227904 -0.999465 0.146769 0.00362297 -0.989164 0.0420142 -0.0031706 -0.999112 -0.283731 -0.0907117 -0.954604 -0.174804 -0.0441804 -0.983612 -0.065559 -0.0193518 -0.997661 0.151839 0.00776109 -0.988375 0.244409 -0.0056085 -0.969656 0.277663 0.0106217 -0.96062 0.398887 0.00913562 -0.916955 0.513493 0.00530999 -0.858077 0.606941 -3.17129e-05 -0.794747 0.525927 3.2245e-05 -0.85053 0.341864 -0.0064954 -0.939727 0.436334 -0.0040366 -0.899776 0.619569 0.00405362 -0.784932 0.6794 -0.00251554 -0.733764 0.715357 0.00479568 -0.698742 0.799271 0.00295146 -0.600964 0.861337 -0.000245601 -0.508035 0.807044 0.00025165 -0.590491 0.745876 -0.00200062 -0.666082 0.869911 0.00260873 -0.493201 0.899237 -0.00075746 -0.437461 0.924412 0.00215518 -0.381389 0.926107 0.00278393 -0.37725 0.963471 0.00129292 -0.267811 0.945392 -0.000633193 -0.325934 0.966928 0.00289128 -0.255032 0.977774 -0.000437226 -0.209663 0.988663 0.000663046 -0.150151 0.991694 0.00294958 -0.128586 0.999545 0.000191369 -0.0301493 0.995907 -0.000166834 -0.0903806 0.999996 0.00296758 0 0.999545 0.000191369 0.0301493 0.995907 -0.000166834 0.0903806 0.991694 0.00294958 0.128586 0.977774 -0.000437226 0.209663 0.988663 0.000663046 0.150151 0.966928 0.00289128 0.255032 0.96347 0.00129292 0.267811 0.945392 -0.000633193 0.325934 0.926107 0.00278393 0.37725 0.899237 -0.00075746 0.437461 0.924412 0.00215518 0.381389 0.869911 0.00260873 0.493201 0.861337 -0.000245595 0.508035 0.807044 0.00025165 0.590491 0.799271 0.00295146 0.600964 0.715357 0.00479568 0.698742 0.619569 0.00405361 0.784932 0.525927 3.2245e-05 0.85053 0.606941 -3.17129e-05 0.794747 0.745876 -0.00200062 0.666082 0.6794 -0.00251554 0.733764 0.513493 0.00530998 0.858077 0.436334 -0.00403659 0.899776 0.398887 0.00913562 0.916955 0.277663 0.0106217 0.96062 0.151839 0.00776109 0.988375 0.0420142 -0.0031706 0.999112 0.146769 0.00362297 0.989164 0.341864 -0.00649541 0.939727 0.244409 -0.0056085 0.969656 0.0234816 0.0227904 0.999465 -0.065559 -0.0193518 0.997661 -0.105137 0.0468918 0.993352 -0.231438 0.0836565 0.969246 -0.350965 0.157317 0.923079 -0.439106 0.369927 0.818743 -0.384082 -0.220127 0.896675 -0.174804 -0.0441804 0.983612 -0.283731 -0.0907116 0.954604 0.710991 0.703201 0 0.735067 0.677994 0.000307103 0.773595 0.63368 0.000345191 0.809447 0.587192 0.000376021 0.842514 0.538675 0.000400009 0.872676 0.4883 0.000417181 0.899828 0.436245 0.000427547 0.923879 0.382685 0.000430988 0.944747 0.327801 0.000427634 0.962355 0.271794 0.000417298 0.976648 0.214845 0.000400141 0.987573 0.157161 0.000376054 0.995094 0.0989298 0.000345264 0.999185 0.0403602 0.000307408 0.999985 0.00551506 0 0.999607 0.0280463 -0.000248095 0.764036 0.645174 0.000289632 0.773595 0.63368 -0.000345191 0.799011 0.601316 0.000323658 0.809447 0.587192 -0.000376021 0.83147 0.55557 0.000351316 0.861313 0.508074 0.000371987 0.888445 0.458983 0.000385647 0.912783 0.408446 0.000392518 0.934249 0.356623 0.000392538 0.952776 0.303675 0.000385541 0.968303 0.249776 0.000371862 0.980785 0.195089 0.000351195 0.990181 0.139794 0.000323682 0.996462 0.0840493 0.000289299 0.999607 0.0280463 0.000248095 0.999185 0.0403602 -0.000307408 0.842514 0.538675 -0.000400009 0.872676 0.4883 -0.00041718 0.899828 0.436245 -0.000427547 0.923879 0.382685 -0.000430987 0.944747 0.327801 -0.000427634 0.962355 0.271794 -0.000417298 0.976648 0.214845 -0.000400141 0.987573 0.157161 -0.000376054 0.995094 0.0989298 -0.000345264 0.996462 0.0840493 -0.000289299 0.990181 0.139794 -0.000323682 0.980785 0.195089 -0.000351195 0.968303 0.249776 -0.000371862 0.952776 0.303675 -0.000385541 0.934249 0.356623 -0.000392538 0.912783 0.408446 -0.000392519 0.888445 0.458983 -0.000385647 0.861313 0.508074 -0.000371987 0.83147 0.55557 -0.000351316 0.799011 0.601316 -0.000323658 0.764036 0.645174 -0.000289632 0.726661 0.686996 -0.000248178 0.735067 0.677994 -0.000307103 0.726661 0.686996 0.000248176 1 0 0 1 0 0 0.710991 -0.703201 0 0.735067 -0.677994 -0.000307229 0.773595 -0.63368 -0.000345191 0.809447 -0.587192 -0.000376021 0.842514 -0.538675 -0.000400009 0.872676 -0.4883 -0.000417181 0.899827 -0.436247 -0.000427503 0.923879 -0.382684 -0.000430952 0.944747 -0.327801 -0.000427634 0.962356 -0.271793 -0.000417319 0.976648 -0.214846 -0.000400166 0.987573 -0.15716 -0.000376092 0.995094 -0.0989302 -0.000345277 0.999185 -0.0403602 -0.000307408 0.999985 -0.00551506 0 0.999607 -0.0280463 0.000248095 0.764037 -0.645172 -0.00028954 0.773595 -0.63368 0.000345191 0.799011 -0.601316 -0.000323658 0.809447 -0.587192 0.000376021 0.83147 -0.55557 -0.000351316 0.861313 -0.508074 -0.000371987 0.888445 -0.458983 -0.000385647 0.912783 -0.408446 -0.000392594 0.934249 -0.356623 -0.000392538 0.952776 -0.303675 -0.000385541 0.968303 -0.249776 -0.000371816 0.980785 -0.19509 -0.000351171 0.990181 -0.139793 -0.000323649 0.996462 -0.0840493 -0.000289299 0.999607 -0.0280463 -0.000248095 0.999185 -0.0403602 0.000307408 0.842514 -0.538675 0.000400009 0.872676 -0.4883 0.00041718 0.899827 -0.436247 0.000427503 0.923879 -0.382684 0.000430953 0.944747 -0.327801 0.000427634 0.962356 -0.271793 0.000417319 0.976648 -0.214846 0.000400166 0.987573 -0.15716 0.000376092 0.995094 -0.0989302 0.000345277 0.996462 -0.0840493 0.000289299 0.990181 -0.139793 0.000323649 0.980785 -0.19509 0.000351171 0.968303 -0.249776 0.000371816 0.952776 -0.303675 0.000385541 0.934249 -0.356623 0.000392538 0.912783 -0.408446 0.000392595 0.888445 -0.458983 0.000385647 0.861313 -0.508074 0.000371987 0.83147 -0.55557 0.000351316 0.799011 -0.601316 0.000323658 0.764037 -0.645172 0.00028954 0.72666 -0.686998 0.000248151 0.735067 -0.677994 0.000307229 0.72666 -0.686998 -0.000248151 0.703197 -0.710995 0 0.677994 -0.735068 0.000307138 0.633681 -0.773595 0.000345016 0.587195 -0.809446 0.000376 0.538672 -0.842515 0.000399708 0.488302 -0.872675 0.000417414 0.436247 -0.899827 0.000427397 0.382674 -0.923884 0.000430763 0.32781 -0.944744 0.000427922 0.271791 -0.962356 0.000417541 0.214849 -0.976647 0.00040045 0.157155 -0.987574 0.000376174 0.0989393 -0.995093 0.000344914 0.0403632 -0.999185 0.000306992 0.00546438 -0.999985 0 0.0280464 -0.999607 -0.000248649 0.645171 -0.764038 0.000289651 0.633681 -0.773595 -0.000345016 0.601319 -0.799009 0.000323751 0.587195 -0.809446 -0.000376 0.555565 -0.831473 0.000351507 0.508083 -0.861308 0.000372007 0.458979 -0.888447 0.000385589 0.40844 -0.912785 0.000392906 0.356629 -0.934246 0.000392197 0.303678 -0.952775 0.00038544 0.249777 -0.968303 0.000371496 0.195086 -0.980786 0.000351134 0.139783 -0.990182 0.000323714 0.0840561 -0.996461 0.000289766 0.0280464 -0.999607 0.000248649 0.0403632 -0.999185 -0.000306992 0.538672 -0.842515 -0.000399709 0.488302 -0.872675 -0.000417414 0.436247 -0.899827 -0.000427396 0.382674 -0.923884 -0.000430762 0.32781 -0.944744 -0.000427922 0.271791 -0.962356 -0.00041754 0.214849 -0.976647 -0.00040045 0.157155 -0.987574 -0.000376174 0.0989393 -0.995093 -0.000344914 0.0840561 -0.996461 -0.000289766 0.139783 -0.990182 -0.000323714 0.195086 -0.980786 -0.000351134 0.249777 -0.968303 -0.000371496 0.303678 -0.952775 -0.00038544 0.356629 -0.934246 -0.000392197 0.40844 -0.912785 -0.000392906 0.458979 -0.888447 -0.000385588 0.508083 -0.861308 -0.000372007 0.555565 -0.831473 -0.000351507 0.601319 -0.799009 -0.00032375 0.645171 -0.764038 -0.000289651 0.686995 -0.726662 -0.000248124 0.677994 -0.735068 -0.000307138 0.686995 -0.726662 0.000248123 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -0.618972 0 -0.785413 -0.618972 0 -0.785413 -0.577758 0 -0.816208 -0.535015 0 -0.844843 -0.490857 0 -0.87124 -0.445398 0 -0.895333 -0.398761 0 -0.917055 -0.351068 0 -0.93635 -0.302447 0 -0.953166 -0.253024 0 -0.96746 -0.202931 0 -0.979193 -0.152302 0 -0.988334 -0.101271 0 -0.994859 -0.101271 0 -0.994859 -0.0499705 0 -0.998751 -0.0499705 0 -0.998751 0.0014635 0 -0.999999 0.052892 0 -0.9986 0.10418 0 -0.994558 0.155193 0 -0.987884 0.205797 0 -0.978595 0.255855 0 -0.966715 0.305234 0 -0.952277 0.353806 0 -0.935319 0.401443 0 -0.915884 0.448014 0 -0.894026 0.493405 0 -0.8698 0.537484 0 -0.843274 0.580145 0 -0.814513 0.621266 0 -0.7836 0.660748 0 -0.750608 0.698476 0 -0.715634 0.734356 0 -0.678764 0.768297 0 -0.640094 0.8002 0 -0.599734 0.829983 0 -0.557788 0.857573 0 -0.514362 0.882894 0 -0.469573 0.905876 0 -0.423543 0.926458 0 -0.376398 0.944592 0 -0.328247 0.960224 0 -0.279232 0.973314 0 -0.229478 0.983828 0 -0.179117 0.991739 0 -0.128273 0.997023 0 -0.0771073 0.999669 0 -0.0257247 0.999669 0 0.0257247 0.997023 0 0.0771073 0.991739 0 0.128273 0.983828 0 0.179117 0.973314 0 0.229478 0.960224 0 0.279232 0.944592 0 0.328247 0.926458 0 0.376398 0.905876 0 0.423543 0.882894 0 0.469573 0.857573 0 0.514362 0.829984 0 0.557788 0.800199 0 0.599734 0.768297 0 0.640094 0.734356 0 0.678765 0.698476 0 0.715633 0.660747 0 0.750609 0.621266 0 0.7836 0.580146 0 0.814513 0.537484 0 0.843274 0.493405 0 0.8698 0.448014 0 0.894026 0.401442 0 0.915884 0.353809 0 0.935318 0.305234 0 0.952277 0.255853 0 0.966716 0.205797 0 0.978595 0.155192 0 0.987884 0.104181 0 0.994558 0.052892 0 0.9986 0.0014635 0 0.999999 -0.0499691 0 0.998751 -0.101271 0 0.994859 -0.152303 0 0.988334 -0.202931 0 0.979193 -0.253025 0 0.96746 -0.302446 0 0.953167 -0.351068 0 0.93635 -0.398761 0 0.917055 -0.445398 0 0.895333 -0.490857 0 0.87124 -0.535015 0 0.844843 -0.577758 0 0.816208 -0.618972 0 0.785413 -0.618972 0 0.785413 -0.577758 0 -0.816208 -0.535015 0 -0.844843 -0.490857 0 -0.87124 -0.445398 0 -0.895333 -0.398761 0 -0.917055 -0.351068 0 -0.93635 -0.302447 0 -0.953166 -0.253024 0 -0.96746 -0.202931 0 -0.979193 -0.152302 0 -0.988334 0.0014635 0 -0.999999 0.052892 0 -0.9986 0.10418 0 -0.994558 0.155193 0 -0.987884 0.205797 0 -0.978595 0.255855 0 -0.966715 0.305234 0 -0.952277 0.353806 0 -0.935319 0.401443 0 -0.915884 0.448014 0 -0.894026 0.493405 0 -0.8698 0.537484 0 -0.843274 0.580145 0 -0.814513 0.621266 0 -0.7836 0.660748 0 -0.750608 0.698476 0 -0.715634 0.734356 0 -0.678764 0.768297 0 -0.640094 0.8002 0 -0.599734 0.829983 0 -0.557788 0.857573 0 -0.514362 0.882894 0 -0.469573 0.905876 0 -0.423543 0.926458 0 -0.376398 0.944592 0 -0.328247 0.960224 0 -0.279232 0.973314 0 -0.229478 0.983828 0 -0.179117 0.991739 0 -0.128273 0.997023 0 -0.0771073 0.999669 0 -0.0257247 0.999669 0 0.0257247 0.997023 0 0.0771073 0.991739 0 0.128273 0.983828 0 0.179117 0.973314 0 0.229478 0.960224 0 0.279232 0.944592 0 0.328247 0.926458 0 0.376398 0.905876 0 0.423543 0.882894 0 0.469573 0.857573 0 0.514362 0.829984 0 0.557788 0.800199 0 0.599734 0.768297 0 0.640094 0.734356 0 0.678765 0.698476 0 0.715633 0.660747 0 0.750609 0.621266 0 0.7836 0.580146 0 0.814513 0.537484 0 0.843274 0.493405 0 0.8698 0.448014 0 0.894026 0.401442 0 0.915884 0.353809 0 0.935318 0.305234 0 0.952277 0.255853 0 0.966716 0.205797 0 0.978595 0.155192 0 0.987884 0.104181 0 0.994558 0.052892 0 0.9986 0.0014635 0 0.999999 -0.0499691 0 0.998751 -0.101271 0 0.994859 -0.152303 0 0.988334 -0.202931 0 0.979193 -0.253025 0 0.96746 -0.302446 0 0.953167 -0.351068 0 0.93635 -0.398761 0 0.917055 -0.445398 0 0.895333 -0.490857 0 0.87124 -0.535015 0 0.844843 -0.577758 0 0.816208 -0.618972 0 0.785413 -0.618972 0 0.785413 -0.577758 0 0.816208 -0.535015 0 0.844843 -0.490857 0 0.87124 -0.445398 0 0.895333 -0.398761 0 0.917055 -0.351068 0 0.93635 -0.302446 0 0.953167 -0.253025 0 0.96746 -0.202931 0 0.979193 -0.152303 0 0.988334 -0.101271 0 0.994859 -0.0499691 0 0.998751 0.0014635 0 0.999999 0.052892 0 0.9986 0.104181 0 0.994558 0.155192 0 0.987884 0.205797 0 0.978595 0.255853 0 0.966716 0.305234 0 0.952277 0.353809 0 0.935318 0.401442 0 0.915884 0.448014 0 0.894026 0.493405 0 0.8698 0.537484 0 0.843274 0.580146 0 0.814513 0.621266 0 0.7836 0.660747 0 0.750609 0.698476 0 0.715633 0.734356 0 0.678765 0.768297 0 0.640094 0.800199 0 0.599734 0.829984 0 0.557788 0.857573 0 0.514362 0.829984 0 0.557788 -0.577758 0 0.816208 -0.535015 0 0.844843 -0.490857 0 0.87124 -0.445398 0 0.895333 -0.398761 0 0.917055 -0.351068 0 0.93635 -0.302446 0 0.953167 -0.253025 0 0.96746 -0.202931 0 0.979193 -0.152303 0 0.988334 -0.101271 0 0.994859 -0.0499691 0 0.998751 0.0014635 0 0.999999 0.052892 0 0.9986 0.104181 0 0.994558 0.155192 0 0.987884 0.205797 0 0.978595 0.255853 0 0.966716 0.305234 0 0.952277 0.353809 0 0.935318 0.401442 0 0.915884 0.448014 0 0.894026 0.493405 0 0.8698 0.537484 0 0.843274 0.580146 0 0.814513 0.621266 0 0.7836 0.660747 0 0.750609 0.698476 0 0.715633 0.734356 0 0.678765 0.768297 0 0.640094 0.800199 0 0.599734 0.857573 0 0.514362 0.882894 0 0.469573 0.882894 0 0.469573 0.905876 0 0.423543 0.926458 0 0.376398 0.944592 0 0.328247 0.960224 0 0.279232 0.973314 0 0.229478 0.983828 0 0.179117 0.991739 0 0.128273 0.997023 0 0.0771073 0.999669 0 0.0257247 0.999669 0 -0.0257247 0.997023 0 -0.0771073 0.991739 0 -0.128273 0.983828 0 -0.179117 0.973314 0 -0.229478 0.960224 0 -0.279232 0.944592 0 -0.328247 0.926458 0 -0.376398 0.905876 0 -0.423543 0.882894 0 -0.469573 0.857573 0 -0.514362 0.829983 0 -0.557788 0.829983 0 -0.557788 0.8002 0 -0.599734 0.8002 0 -0.599734 0.768297 0 -0.640094 0.734356 0 -0.678764 0.698476 0 -0.715634 0.660748 0 -0.750608 0.621266 0 -0.7836 0.580145 0 -0.814513 0.537484 0 -0.843274 0.493405 0 -0.8698 0.448014 0 -0.894026 0.401443 0 -0.915884 0.353806 0 -0.935319 0.305234 0 -0.952277 0.255855 0 -0.966715 0.205797 0 -0.978595 0.155193 0 -0.987884 0.10418 0 -0.994558 0.052892 0 -0.9986 0.0014635 0 -0.999999 -0.0499705 0 -0.998751 -0.101271 0 -0.994859 -0.152302 0 -0.988334 -0.202931 0 -0.979193 -0.253024 0 -0.96746 -0.302447 0 -0.953166 -0.351068 0 -0.93635 -0.398761 0 -0.917055 -0.445398 0 -0.895333 -0.490857 0 -0.87124 -0.535015 0 -0.844843 -0.577758 0 -0.816208 -0.618972 0 -0.785413 -0.618972 0 -0.785413 0.905876 0 0.423543 0.926458 0 0.376398 0.944592 0 0.328247 0.960224 0 0.279232 0.973314 0 0.229478 0.983828 0 0.179117 0.991739 0 0.128273 0.997023 0 0.0771073 0.999669 0 0.0257247 0.999669 0 -0.0257247 0.997023 0 -0.0771073 0.991739 0 -0.128273 0.983828 0 -0.179117 0.973314 0 -0.229478 0.960224 0 -0.279232 0.944592 0 -0.328247 0.926458 0 -0.376398 0.905876 0 -0.423543 0.882894 0 -0.469573 0.857573 0 -0.514362 0.768297 0 -0.640094 0.734356 0 -0.678764 0.698476 0 -0.715634 0.660748 0 -0.750608 0.621266 0 -0.7836 0.580145 0 -0.814513 0.537484 0 -0.843274 0.493405 0 -0.8698 0.448014 0 -0.894026 0.401443 0 -0.915884 0.353806 0 -0.935319 0.305234 0 -0.952277 0.255855 0 -0.966715 0.205797 0 -0.978595 0.155193 0 -0.987884 0.10418 0 -0.994558 0.052892 0 -0.9986 0.0014635 0 -0.999999 -0.0499705 0 -0.998751 -0.101271 0 -0.994859 -0.152302 0 -0.988334 -0.202931 0 -0.979193 -0.253024 0 -0.96746 -0.302447 0 -0.953166 -0.351068 0 -0.93635 -0.398761 0 -0.917055 -0.445398 0 -0.895333 -0.490857 0 -0.87124 -0.535015 0 -0.844843 -0.577758 0 -0.816208 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0.0653962 -0.997859 0 0.195096 -0.980784 0 0.195096 -0.980784 0 0.321437 -0.946931 0 0.321437 -0.946931 0 0.442286 -0.896874 0 0.555572 -0.831468 0 0.659346 -0.75184 0 0.751839 -0.659346 0 0.831468 -0.555572 0 0.896874 -0.442286 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653962 0 0.997859 0.0653966 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896876 0.442283 0 0.831468 0.555572 0 0.751839 0.659346 0 0.659342 0.751843 0 0.555572 0.831468 0 0.442286 0.896874 0 0.321437 0.946931 0 0.195096 0.980784 0 0.0653962 0.997859 0 -0.0653966 0.997859 0 -0.195096 0.980784 0 -0.321437 0.946931 0 -0.442283 0.896876 0 -0.555572 0.831468 0 -0.659342 0.751843 0 -0.751843 0.659342 0 -0.831468 0.555572 0 -0.896876 0.442283 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653966 0 -0.997859 -0.0653962 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896874 -0.442286 0 -0.831468 -0.555572 0 -0.751843 -0.659342 0 -0.659346 -0.75184 0 -0.555572 -0.831468 0 -0.442283 -0.896876 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653966 -0.997859 0 0.0653962 -0.997859 0 0.442286 -0.896874 0 0.555572 -0.831468 0 0.659346 -0.75184 0 0.751839 -0.659346 0 0.831468 -0.555572 0 0.896874 -0.442286 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653962 0 0.997859 0.0653966 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896876 0.442283 0 0.831468 0.555572 0 0.751839 0.659346 0 0.659342 0.751843 0 0.555572 0.831468 0 0.442286 0.896874 0 0.321437 0.946931 0 0.195096 0.980784 0 0.0653962 0.997859 0 -0.0653966 0.997859 0 -0.195096 0.980784 0 -0.321437 0.946931 0 -0.442283 0.896876 0 -0.555572 0.831468 0 -0.659342 0.751843 0 -0.751843 0.659342 0 -0.831468 0.555572 0 -0.896876 0.442283 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653966 0 -0.997859 -0.0653962 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896874 -0.442286 0 -0.831468 -0.555572 0 -0.751843 -0.659342 0 -0.659346 -0.75184 0 -0.555572 -0.831468 0 -0.442283 -0.896876 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653966 -0.997859 0 0.0653962 -0.997859 0 0.195096 -0.980784 0 0.195096 -0.980784 0 0.321437 -0.946931 0 0.321437 -0.946931 0 0.442286 -0.896874 0 0.555572 -0.831468 0 0.659346 -0.75184 0 0.751836 -0.65935 0 0.831468 -0.555572 0 0.896876 -0.442283 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653966 0 0.997859 0.0653962 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896874 0.442286 0 0.83147 0.555569 0 0.751833 0.659354 0 0.659346 0.75184 0 0.555572 0.831468 0 0.442291 0.896871 0 0.321443 0.946929 0 0.195082 0.980787 0 0.0654033 0.997859 0 -0.0654037 0.997859 0 -0.195082 0.980787 0 -0.321443 0.946929 0 -0.442288 0.896873 0 -0.555572 0.831468 0 -0.659346 0.75184 0 -0.751837 0.65935 0 -0.83147 0.555569 0 -0.896874 0.442286 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653962 0 -0.997859 -0.0653966 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896876 -0.442283 0 -0.831468 -0.555572 0 -0.75184 -0.659346 0 -0.659346 -0.75184 0 -0.555572 -0.831468 0 -0.442283 -0.896876 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653966 -0.997859 0 0.0653962 -0.997859 0 0.442286 -0.896874 0 0.555572 -0.831468 0 0.659346 -0.75184 0 0.751836 -0.65935 0 0.831468 -0.555572 0 0.896876 -0.442283 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653966 0 0.997859 0.0653962 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896874 0.442286 0 0.83147 0.555569 0 0.751833 0.659354 0 0.659346 0.75184 0 0.555572 0.831468 0 0.442291 0.896871 0 0.321443 0.946929 0 0.195082 0.980787 0 0.0654033 0.997859 0 -0.0654037 0.997859 0 -0.195082 0.980787 0 -0.321443 0.946929 0 -0.442288 0.896873 0 -0.555572 0.831468 0 -0.659346 0.75184 0 -0.751837 0.65935 0 -0.83147 0.555569 0 -0.896874 0.442286 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653962 0 -0.997859 -0.0653966 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896876 -0.442283 0 -0.831468 -0.555572 0 -0.75184 -0.659346 0 -0.659346 -0.75184 0 -0.555572 -0.831468 0 -0.442283 -0.896876 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653966 -0.997859 0 0.0653966 -0.997859 0 0.195096 -0.980784 0 0.195096 -0.980784 0 0.321437 -0.946931 0 0.321437 -0.946931 0 0.442283 -0.896876 0 0.555572 -0.831468 0 0.659342 -0.751843 0 0.751846 -0.659338 0 0.831468 -0.555572 0 0.896874 -0.442286 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653962 0 0.997859 0.0653966 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896876 0.442283 0 0.831468 0.555572 0 0.751846 0.659338 0 0.659338 0.751846 0 0.555572 0.831468 0 0.442283 0.896876 0 0.321437 0.946931 0 0.195096 0.980784 0 0.0653966 0.997859 0 -0.0653962 0.997859 0 -0.195096 0.980784 0 -0.321437 0.946931 0 -0.442286 0.896874 0 -0.555572 0.831468 0 -0.659338 0.751846 0 -0.751843 0.659342 0 -0.831468 0.555572 0 -0.896876 0.442283 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653966 0 -0.997859 -0.0653962 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896874 -0.442286 0 -0.831468 -0.555572 0 -0.751843 -0.659342 0 -0.659342 -0.751843 0 -0.555572 -0.831468 0 -0.442286 -0.896874 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653962 -0.997859 0 0.0653966 -0.997859 0 0.442283 -0.896876 0 0.555572 -0.831468 0 0.659342 -0.751843 0 0.751846 -0.659338 0 0.831468 -0.555572 0 0.896874 -0.442286 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653962 0 0.997859 0.0653966 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896876 0.442283 0 0.831468 0.555572 0 0.751846 0.659338 0 0.659338 0.751846 0 0.555572 0.831468 0 0.442283 0.896876 0 0.321437 0.946931 0 0.195096 0.980784 0 0.0653966 0.997859 0 -0.0653962 0.997859 0 -0.195096 0.980784 0 -0.321437 0.946931 0 -0.442286 0.896874 0 -0.555572 0.831468 0 -0.659338 0.751846 0 -0.751843 0.659342 0 -0.831468 0.555572 0 -0.896876 0.442283 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653966 0 -0.997859 -0.0653962 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896874 -0.442286 0 -0.831468 -0.555572 0 -0.751843 -0.659342 0 -0.659342 -0.751843 0 -0.555572 -0.831468 0 -0.442286 -0.896874 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653962 -0.997859 0 0.0653966 -0.997859 0 0.195096 -0.980784 0 0.195096 -0.980784 0 0.321437 -0.946931 0 0.321437 -0.946931 0 0.442283 -0.896876 0 0.555572 -0.831468 0 0.659342 -0.751843 0 0.751843 -0.659342 0 0.831468 -0.555572 0 0.896876 -0.442283 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653966 0 0.997859 0.0653962 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896874 0.442286 0 0.83147 0.555569 0 0.75184 0.659346 0 0.659342 0.751843 0 0.555572 0.831468 0 0.442288 0.896873 0 0.321443 0.946929 0 0.195082 0.980787 0 0.0654037 0.997859 0 -0.0654033 0.997859 0 -0.195082 0.980787 0 -0.321443 0.946929 0 -0.442291 0.896871 0 -0.555572 0.831468 0 -0.659342 0.751843 0 -0.751837 0.65935 0 -0.83147 0.555569 0 -0.896874 0.442286 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653962 0 -0.997859 -0.0653966 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896876 -0.442283 0 -0.831468 -0.555572 0 -0.75184 -0.659346 0 -0.659342 -0.751843 0 -0.555572 -0.831468 0 -0.442286 -0.896874 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653962 -0.997859 0 0.0653966 -0.997859 0 0.442283 -0.896876 0 0.555572 -0.831468 0 0.659342 -0.751843 0 0.751843 -0.659342 0 0.831468 -0.555572 0 0.896876 -0.442283 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653966 0 0.997859 0.0653962 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896874 0.442286 0 0.83147 0.555569 0 0.75184 0.659346 0 0.659342 0.751843 0 0.555572 0.831468 0 0.442288 0.896873 0 0.321443 0.946929 0 0.195082 0.980787 0 0.0654037 0.997859 0 -0.0654033 0.997859 0 -0.195082 0.980787 0 -0.321443 0.946929 0 -0.442291 0.896871 0 -0.555572 0.831468 0 -0.659342 0.751843 0 -0.751837 0.65935 0 -0.83147 0.555569 0 -0.896874 0.442286 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653962 0 -0.997859 -0.0653966 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896876 -0.442283 0 -0.831468 -0.555572 0 -0.75184 -0.659346 0 -0.659342 -0.751843 0 -0.555572 -0.831468 0 -0.442286 -0.896874 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653962 -0.997859 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 0 -0.707107 0.707107 0 -0.707107 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 0.707107 0 0.707107 0.707107 0 0.707106 0 0.707107 0.707107 4.43524e-08 0.707107 0.707107 0 0.707106 0 1 0 0 1 0 0 0 1 0 0 1 0 1 0 0 1 0 0 -1 0 0 -1 0 0.707106 0 0.707107 0.707107 -4.43524e-08 0.707107 0.707107 0 0.707106 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 0 -0.707107 0.707107 0 -0.707107 0.707107 -0.707107 0 0.707107 -0.707107 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -0.999021 0 -0.0442321 -0.991203 0 -0.132349 -0.991203 0 -0.132349 -0.975626 0 -0.219442 -0.975626 0 -0.219442 -0.952414 0 -0.304808 -0.921749 0 -0.387787 -0.88387 0 -0.467733 -0.839072 0 -0.54402 -0.787708 0 -0.616049 -0.730178 0 -0.683257 -0.666938 0 -0.745113 -0.59847 0 -0.801145 -0.525324 0 -0.850902 -0.448068 0 -0.894 -0.367302 0 -0.930102 -0.283661 0 -0.958925 -0.197803 0 -0.980242 -0.110393 0 -0.993888 -0.0221222 0 -0.999755 0.0663227 0 -0.997798 0.15425 0 -0.988032 0.240966 0 -0.970534 0.3258 0 -0.945439 0.408085 0 -0.912944 0.487174 0 -0.873305 0.562449 0 -0.826832 0.633323 0 -0.773888 0.699242 0 -0.714885 0.759688 0 -0.650288 0.814187 0 -0.580603 0.862314 0 -0.506375 0.903694 0 -0.428178 0.938 0 -0.346636 0.964966 0 -0.262376 0.984378 0 -0.17607 0.996087 0 -0.0883811 1 0 0 0.996087 0 0.0883811 0.984378 0 0.17607 0.964966 0 0.262376 0.938 0 0.346636 0.903694 0 0.428178 0.862314 0 0.506375 0.814187 0 0.580603 0.759688 0 0.650288 0.699241 0 0.714886 0.633325 0 0.773886 0.562448 0 0.826833 0.487175 0 0.873305 0.408084 0 0.912945 0.3258 0 0.945439 0.240967 0 0.970533 0.154249 0 0.988032 0.0663239 0 0.997798 -0.0221234 0 0.999755 -0.110394 0 0.993888 -0.197801 0 0.980242 -0.283662 0 0.958924 -0.367303 0 0.930102 -0.448067 0 0.894 -0.525325 0 0.850902 -0.59847 0 0.801145 -0.666938 0 0.745113 -0.730178 0 0.683257 -0.787708 0 0.616049 -0.839072 0 0.544021 -0.88387 0 0.467732 -0.921749 0 0.387787 -0.952414 0 0.304808 -0.975626 0 0.219442 -0.991203 0 0.132349 -0.999021 0 0.0442321 -0.999021 0 -0.0442321 -0.952414 0 -0.304808 -0.921749 0 -0.387787 -0.88387 0 -0.467733 -0.839072 0 -0.54402 -0.787708 0 -0.616049 -0.730178 0 -0.683257 -0.666938 0 -0.745113 -0.59847 0 -0.801145 -0.525324 0 -0.850902 -0.448068 0 -0.894 -0.367302 0 -0.930102 -0.283661 0 -0.958925 -0.197803 0 -0.980242 -0.110393 0 -0.993888 -0.0221222 0 -0.999755 0.0663227 0 -0.997798 0.15425 0 -0.988032 0.240966 0 -0.970534 0.3258 0 -0.945439 0.408085 0 -0.912944 0.487174 0 -0.873305 0.562449 0 -0.826832 0.633323 0 -0.773888 0.699242 0 -0.714885 0.759688 0 -0.650288 0.814187 0 -0.580603 0.862314 0 -0.506375 0.903694 0 -0.428178 0.938 0 -0.346636 0.964966 0 -0.262376 0.984378 0 -0.17607 0.996087 0 -0.0883811 1 0 0 0.996087 0 0.0883811 0.984378 0 0.17607 0.964966 0 0.262376 0.938 0 0.346636 0.903694 0 0.428178 0.862314 0 0.506375 0.814187 0 0.580603 0.759688 0 0.650288 0.699241 0 0.714886 0.633325 0 0.773886 0.562448 0 0.826833 0.487175 0 0.873305 0.408084 0 0.912945 0.3258 0 0.945439 0.240967 0 0.970533 0.154249 0 0.988032 0.0663239 0 0.997798 -0.0221234 0 0.999755 -0.110394 0 0.993888 -0.197801 0 0.980242 -0.283662 0 0.958924 -0.367303 0 0.930102 -0.448067 0 0.894 -0.525325 0 0.850902 -0.59847 0 0.801145 -0.666938 0 0.745113 -0.730178 0 0.683257 -0.787708 0 0.616049 -0.839072 0 0.544021 -0.88387 0 0.467732 -0.921749 0 0.387787 -0.952414 0 0.304808 -0.975626 0 0.219442 -0.991203 0 0.132349 -0.999021 0 0.0442321 -0.997204 0 0.0747334 -0.974929 0 0.222517 -0.974929 0 0.222517 -0.930873 0 0.365342 -0.930873 0 0.365342 -0.866026 0 0.499999 -0.781829 0 0.623493 -0.680174 0 0.73305 -0.563319 0 0.82624 -0.433886 0 0.900968 -0.294755 0 0.955573 -0.149042 0 0.988831 0 0 1 0.149042 0 0.988831 0.294755 0 0.955573 0.433884 0 0.900969 0.563321 0 0.826238 0.680174 0 0.73305 0.781828 0 0.623494 0.866026 0 0.499999 0.930874 0 0.36534 0.974928 0 0.222519 0.997204 0 0.0747313 0.997204 0 -0.0747313 0.974928 0 -0.222519 0.930874 0 -0.36534 0.866026 0 -0.499999 0.781828 0 -0.623494 0.680174 0 -0.73305 0.563321 0 -0.826238 0.433884 0 -0.900969 0.294755 0 -0.955573 0.149042 0 -0.988831 0 0 -1 -0.149042 0 -0.988831 -0.294755 0 -0.955573 -0.433886 0 -0.900968 -0.563319 0 -0.82624 -0.680174 0 -0.73305 -0.781829 0 -0.623493 -0.866026 0 -0.499999 -0.930873 0 -0.365342 -0.974929 0 -0.222517 -0.997204 0 -0.0747334 -0.997204 0 0.0747334 -0.866026 0 0.499999 -0.781829 0 0.623493 -0.680174 0 0.73305 -0.563319 0 0.82624 -0.433886 0 0.900968 -0.294755 0 0.955573 -0.149042 0 0.988831 0 0 1 0.149042 0 0.988831 0.294755 0 0.955573 0.433884 0 0.900969 0.563321 0 0.826238 0.680174 0 0.73305 0.781828 0 0.623494 0.866026 0 0.499999 0.930874 0 0.36534 0.974928 0 0.222519 0.997204 0 0.0747313 0.997204 0 -0.0747313 0.974928 0 -0.222519 0.930874 0 -0.36534 0.866026 0 -0.499999 0.781828 0 -0.623494 0.680174 0 -0.73305 0.563321 0 -0.826238 0.433884 0 -0.900969 0.294755 0 -0.955573 0.149042 0 -0.988831 0 0 -1 -0.149042 0 -0.988831 -0.294755 0 -0.955573 -0.433886 0 -0.900968 -0.563319 0 -0.82624 -0.680174 0 -0.73305 -0.781829 0 -0.623493 -0.866026 0 -0.499999 -0.930873 0 -0.365342 -0.974929 0 -0.222517 -0.997204 0 -0.0747334 -0.995734 0 0.0922685 -0.961826 0 0.273663 -0.961826 0 0.273663 -0.895163 0 0.445739 -0.798018 0 0.602633 -0.895163 0 0.445739 -0.895163 0 0.445739 -0.961826 0 0.273663 -0.961826 0 0.273663 -0.995734 0 0.0922685 -0.995734 0 -0.0922685 -0.961826 0 -0.273663 -0.895163 0 -0.445739 -0.798018 0 -0.602633 -0.673696 0 -0.739008 -0.526433 0 -0.850216 -0.361238 0 -0.932474 -0.183751 0 -0.982973 0 0 -1 0.183751 0 -0.982973 0.361238 0 -0.932474 0.526435 0 -0.850216 0.673696 0 -0.739008 0.798017 0 -0.602635 0.895163 0 -0.445739 0.961826 0 -0.273663 0.995734 0 -0.092266 0.995734 0 0.092266 0.961826 0 0.273663 0.895163 0 0.445739 0.798017 0 0.602635 0.673696 0 0.739008 0.526435 0 0.850216 0.361238 0 0.932474 0.183751 0 0.982973 0 0 1 -0.183751 0 0.982973 -0.361238 0 0.932474 -0.526433 0 0.850216 -0.673696 0 0.739008 -0.798018 0 0.602633 -0.798018 0 0.602633 -0.895163 0 0.445739 -0.798018 0 0.602633 -0.673696 0 0.739008 -0.673696 0 0.739008 -0.673696 0 0.739008 -0.526433 0 0.850216 -0.526433 0 0.850216 -0.526433 0 0.850216 -0.361238 0 0.932474 -0.361238 0 0.932474 -0.361238 0 0.932474 -0.183751 0 0.982973 -0.183751 0 0.982973 -0.183751 0 0.982973 0 0 1 0 0 1 0 0 1 0.183751 0 0.982973 0.183751 0 0.982973 0.183751 0 0.982973 0.361238 0 0.932474 0.361238 0 0.932474 0.361238 0 0.932474 0.526435 0 0.850216 0.526435 0 0.850216 0.526435 0 0.850216 0.673696 0 0.739008 0.673696 0 0.739008 0.673696 0 0.739008 0.798017 0 0.602635 0.798017 0 0.602635 0.798017 0 0.602635 0.895163 0 0.445739 0.895163 0 0.445739 0.895163 0 0.445739 0.961826 0 0.273663 0.961826 0 0.273663 0.961826 0 0.273663 0.995734 0 0.092266 0.995734 0 0.092266 0.995734 0 0.092266 0.995734 0 -0.092266 0.995734 0 -0.092266 0.995734 0 -0.092266 0.961826 0 -0.273663 0.961826 0 -0.273663 0.961826 0 -0.273663 0.895163 0 -0.445739 0.895163 0 -0.445739 0.895163 0 -0.445739 0.798017 0 -0.602635 0.798017 0 -0.602635 0.798017 0 -0.602635 0.673696 0 -0.739008 0.673696 0 -0.739008 0.673696 0 -0.739008 0.526435 0 -0.850216 0.526435 0 -0.850216 0.526435 0 -0.850216 0.361238 0 -0.932474 0.361238 0 -0.932474 0.361238 0 -0.932474 0.183751 0 -0.982973 0.183751 0 -0.982973 0.183751 0 -0.982973 0 0 -1 0 0 -1 0 0 -1 -0.183751 0 -0.982973 -0.183751 0 -0.982973 -0.183751 0 -0.982973 -0.361238 0 -0.932474 -0.361238 0 -0.932474 -0.361238 0 -0.932474 -0.526433 0 -0.850216 -0.526433 0 -0.850216 -0.526433 0 -0.850216 -0.673696 0 -0.739008 -0.673696 0 -0.739008 -0.673696 0 -0.739008 -0.798018 0 -0.602633 -0.798018 0 -0.602633 -0.798018 0 -0.602633 -0.895163 0 -0.445739 -0.895163 0 -0.445739 -0.895163 0 -0.445739 -0.961826 0 -0.273663 -0.961826 0 -0.273663 -0.961826 0 -0.273663 -0.995734 0 -0.0922685 -0.995734 0 -0.0922685 -0.995734 0 -0.0922685 -0.995734 0 0.0922685 -0.995734 0 0.0922685 0.707109 -0.707105 0 0.707109 -0.707105 0 -0.707107 -0.707107 0 -0.707107 -0.707107 0 - - - - - - - - - - - - - - -

1 0 0 0 125 0 1 1 2 1 0 1 1 2 3 2 2 2 2 3 3 3 4 3 4 4 3 4 35 4 36 5 35 5 78 5 5 6 78 6 37 6 221 7 37 7 77 7 222 8 77 8 38 8 39 9 38 9 40 9 225 10 40 10 6 10 7 11 6 11 9 11 8 12 9 12 10 12 41 13 10 13 11 13 228 14 11 14 81 14 42 15 81 15 43 15 232 16 43 16 44 16 45 17 44 17 46 17 47 18 46 18 13 18 12 19 13 19 15 19 14 20 15 20 16 20 233 21 16 21 83 21 234 22 83 22 48 22 236 23 48 23 17 23 237 24 17 24 87 24 239 25 87 25 18 25 241 26 18 26 49 26 50 27 49 27 51 27 245 28 51 28 89 28 19 29 89 29 52 29 242 30 52 30 91 30 248 31 91 31 92 31 53 32 92 32 20 32 21 33 20 33 54 33 55 34 54 34 96 34 56 35 96 35 95 35 22 36 95 36 98 36 57 37 98 37 100 37 58 38 100 38 99 38 59 39 99 39 23 39 255 40 23 40 103 40 60 41 103 41 102 41 61 42 102 42 104 42 62 43 104 43 24 43 256 44 24 44 25 44 63 45 25 45 107 45 64 46 107 46 26 46 65 47 26 47 66 47 259 48 66 48 108 48 67 49 108 49 27 49 263 50 27 50 28 50 265 51 28 51 68 51 266 52 68 52 111 52 69 53 111 53 29 53 70 54 29 54 114 54 269 55 114 55 113 55 270 56 113 56 116 56 273 57 116 57 118 57 274 58 118 58 71 58 30 59 71 59 72 59 31 60 72 60 32 60 33 61 32 61 34 61 277 62 34 62 121 62 279 63 121 63 122 63 73 64 122 64 124 64 280 65 124 65 126 65 74 66 126 66 125 66 0 67 74 67 125 67 4 68 35 68 36 68 36 69 78 69 5 69 5 70 37 70 221 70 221 71 77 71 222 71 222 72 38 72 39 72 39 73 40 73 225 73 225 74 6 74 7 74 7 75 9 75 8 75 8 76 10 76 41 76 41 77 11 77 228 77 228 78 81 78 42 78 42 79 43 79 232 79 232 80 44 80 45 80 45 81 46 81 47 81 47 82 13 82 12 82 12 83 15 83 14 83 14 84 16 84 233 84 233 85 83 85 234 85 234 86 48 86 236 86 236 87 17 87 237 87 237 88 87 88 239 88 239 89 18 89 241 89 241 90 49 90 50 90 50 91 51 91 245 91 245 92 89 92 19 92 19 93 52 93 242 93 242 94 91 94 248 94 248 95 92 95 53 95 53 96 20 96 21 96 21 97 54 97 55 97 55 98 96 98 56 98 56 99 95 99 22 99 22 100 98 100 57 100 57 101 100 101 58 101 58 102 99 102 59 102 59 103 23 103 255 103 255 104 103 104 60 104 60 105 102 105 61 105 61 106 104 106 62 106 62 107 24 107 256 107 256 108 25 108 63 108 63 109 107 109 64 109 64 110 26 110 65 110 65 111 66 111 259 111 259 112 108 112 67 112 67 113 27 113 263 113 263 114 28 114 265 114 265 115 68 115 266 115 266 116 111 116 69 116 69 117 29 117 70 117 70 118 114 118 269 118 269 119 113 119 270 119 270 120 116 120 273 120 273 121 118 121 274 121 274 122 71 122 30 122 30 123 72 123 31 123 31 124 32 124 33 124 33 125 34 125 277 125 277 126 121 126 279 126 279 127 122 127 73 127 73 128 124 128 280 128 280 129 126 129 74 129 75 130 1 130 1711 130 75 131 3 131 1 131 75 132 76 132 3 132 3 133 76 133 35 133 35 134 76 134 78 134 78 135 76 135 1757 135 37 136 1757 136 1738 136 77 137 1738 137 38 137 77 138 37 138 1738 138 78 139 1757 139 37 139 1738 140 1736 140 38 140 38 141 1736 141 40 141 40 142 1736 142 6 142 6 143 1736 143 1734 143 9 144 1734 144 79 144 10 145 79 145 11 145 10 146 9 146 79 146 6 147 1734 147 9 147 79 148 80 148 11 148 11 149 80 149 81 149 81 150 80 150 1733 150 43 151 1733 151 44 151 43 152 81 152 1733 152 1733 153 1754 153 44 153 44 154 1754 154 46 154 46 155 1754 155 1730 155 13 156 1730 156 15 156 13 157 46 157 1730 157 1730 158 82 158 15 158 15 159 82 159 16 159 16 160 82 160 84 160 83 161 84 161 48 161 83 162 16 162 84 162 84 163 85 163 48 163 48 164 85 164 17 164 17 165 85 165 86 165 87 166 86 166 18 166 87 167 17 167 86 167 86 168 1729 168 18 168 18 169 1729 169 49 169 49 170 1729 170 51 170 51 171 1729 171 1750 171 89 172 1750 172 88 172 52 173 88 173 91 173 52 174 89 174 88 174 51 175 1750 175 89 175 88 176 90 176 91 176 91 177 90 177 92 177 92 178 90 178 93 178 20 179 93 179 54 179 20 180 92 180 93 180 93 181 94 181 54 181 54 182 94 182 96 182 96 183 94 183 1747 183 95 184 1747 184 98 184 95 185 96 185 1747 185 1747 186 97 186 98 186 98 187 97 187 100 187 100 188 97 188 1746 188 99 189 1746 189 23 189 99 190 100 190 1746 190 1746 191 1723 191 23 191 23 192 1723 192 103 192 103 193 1723 193 101 193 102 194 101 194 104 194 102 195 103 195 101 195 101 196 105 196 104 196 104 197 105 197 24 197 24 198 105 198 25 198 25 199 105 199 106 199 107 200 106 200 1743 200 26 201 1743 201 66 201 26 202 107 202 1743 202 25 203 106 203 107 203 1743 204 1721 204 66 204 66 205 1721 205 108 205 108 206 1721 206 109 206 27 207 109 207 28 207 27 208 108 208 109 208 109 209 110 209 28 209 28 210 110 210 68 210 68 211 110 211 112 211 111 212 112 212 29 212 111 213 68 213 112 213 112 214 1717 214 29 214 29 215 1717 215 114 215 114 216 1717 216 115 216 113 217 115 217 116 217 113 218 114 218 115 218 115 219 117 219 116 219 116 220 117 220 118 220 118 221 117 221 119 221 71 222 119 222 72 222 71 223 118 223 119 223 119 224 120 224 72 224 72 225 120 225 32 225 32 226 120 226 34 226 34 227 120 227 123 227 121 228 123 228 1713 228 122 229 1713 229 124 229 122 230 121 230 1713 230 34 231 123 231 121 231 1713 232 1712 232 124 232 124 233 1712 233 126 233 126 234 1712 234 1711 234 125 235 1711 235 1 235 125 236 126 236 1711 236 218 237 591 237 182 237 218 238 127 238 591 238 218 239 128 239 127 239 127 240 128 240 592 240 592 241 128 241 129 241 594 242 129 242 130 242 597 243 130 243 219 243 595 244 219 244 220 244 596 245 220 245 132 245 131 246 132 246 134 246 133 247 134 247 135 247 600 248 135 248 223 248 602 249 223 249 224 249 136 250 224 250 183 250 137 251 183 251 227 251 607 252 227 252 138 252 609 253 138 253 226 253 612 254 226 254 231 254 614 255 231 255 229 255 139 256 229 256 230 256 184 257 230 257 140 257 141 258 140 258 143 258 142 259 143 259 144 259 185 260 144 260 186 260 187 261 186 261 145 261 619 262 145 262 146 262 188 263 146 263 147 263 621 264 147 264 149 264 148 265 149 265 235 265 623 266 235 266 189 266 190 267 189 267 238 267 191 268 238 268 150 268 192 269 150 269 240 269 193 270 240 270 151 270 625 271 151 271 152 271 626 272 152 272 244 272 153 273 244 273 246 273 656 274 246 274 243 274 154 275 243 275 155 275 645 276 155 276 247 276 646 277 247 277 250 277 649 278 250 278 156 278 650 279 156 279 249 279 651 280 249 280 194 280 657 281 194 281 157 281 195 282 157 282 158 282 659 283 158 283 251 283 658 284 251 284 252 284 660 285 252 285 253 285 159 286 253 286 160 286 196 287 160 287 254 287 197 288 254 288 198 288 661 289 198 289 161 289 199 290 161 290 200 290 201 291 200 291 162 291 202 292 162 292 258 292 203 293 258 293 204 293 663 294 204 294 257 294 163 295 257 295 260 295 205 296 260 296 164 296 206 297 164 297 261 297 165 298 261 298 207 298 208 299 207 299 166 299 664 300 166 300 262 300 665 301 262 301 167 301 209 302 167 302 264 302 667 303 264 303 168 303 210 304 168 304 267 304 669 305 267 305 268 305 670 306 268 306 169 306 671 307 169 307 170 307 211 308 170 308 171 308 212 309 171 309 271 309 172 310 271 310 272 310 672 311 272 311 275 311 213 312 275 312 214 312 173 313 214 313 215 313 673 314 215 314 174 314 216 315 174 315 276 315 175 316 276 316 278 316 674 317 278 317 176 317 177 318 176 318 179 318 178 319 179 319 180 319 217 320 180 320 181 320 677 321 181 321 182 321 591 322 677 322 182 322 592 323 129 323 594 323 594 324 130 324 597 324 597 325 219 325 595 325 595 326 220 326 596 326 596 327 132 327 131 327 131 328 134 328 133 328 133 329 135 329 600 329 600 330 223 330 602 330 602 331 224 331 136 331 136 332 183 332 137 332 137 333 227 333 607 333 607 334 138 334 609 334 609 335 226 335 612 335 612 336 231 336 614 336 614 337 229 337 139 337 139 338 230 338 184 338 184 339 140 339 141 339 141 340 143 340 142 340 142 341 144 341 185 341 185 342 186 342 187 342 187 343 145 343 619 343 619 344 146 344 188 344 188 345 147 345 621 345 621 346 149 346 148 346 148 347 235 347 623 347 623 348 189 348 190 348 190 349 238 349 191 349 191 350 150 350 192 350 192 351 240 351 193 351 193 352 151 352 625 352 625 353 152 353 626 353 626 354 244 354 153 354 153 355 246 355 656 355 656 356 243 356 154 356 154 357 155 357 645 357 645 358 247 358 646 358 646 359 250 359 649 359 649 360 156 360 650 360 650 361 249 361 651 361 651 362 194 362 657 362 657 363 157 363 195 363 195 364 158 364 659 364 659 365 251 365 658 365 658 366 252 366 660 366 660 367 253 367 159 367 159 368 160 368 196 368 196 369 254 369 197 369 197 370 198 370 661 370 661 371 161 371 199 371 199 372 200 372 201 372 201 373 162 373 202 373 202 374 258 374 203 374 203 375 204 375 663 375 663 376 257 376 163 376 163 377 260 377 205 377 205 378 164 378 206 378 206 379 261 379 165 379 165 380 207 380 208 380 208 381 166 381 664 381 664 382 262 382 665 382 665 383 167 383 209 383 209 384 264 384 667 384 667 385 168 385 210 385 210 386 267 386 669 386 669 387 268 387 670 387 670 388 169 388 671 388 671 389 170 389 211 389 211 390 171 390 212 390 212 391 271 391 172 391 172 392 272 392 672 392 672 393 275 393 213 393 213 394 214 394 173 394 173 395 215 395 673 395 673 396 174 396 216 396 216 397 276 397 175 397 175 398 278 398 674 398 674 399 176 399 177 399 177 400 179 400 178 400 178 401 180 401 217 401 217 402 181 402 677 402 0 403 218 403 74 403 0 404 128 404 218 404 0 405 2 405 128 405 128 406 2 406 129 406 129 407 2 407 4 407 130 408 4 408 36 408 219 409 36 409 220 409 219 410 130 410 36 410 129 411 4 411 130 411 36 412 5 412 220 412 220 413 5 413 132 413 132 414 5 414 221 414 134 415 221 415 222 415 135 416 222 416 223 416 135 417 134 417 222 417 132 418 221 418 134 418 222 419 39 419 223 419 223 420 39 420 224 420 224 421 39 421 225 421 183 422 225 422 7 422 227 423 7 423 8 423 138 424 8 424 226 424 138 425 227 425 8 425 224 426 225 426 183 426 183 427 7 427 227 427 8 428 41 428 226 428 226 429 41 429 231 429 231 430 41 430 228 430 229 431 228 431 42 431 230 432 42 432 140 432 230 433 229 433 42 433 231 434 228 434 229 434 42 435 232 435 140 435 140 436 232 436 143 436 143 437 232 437 45 437 144 438 45 438 47 438 186 439 47 439 12 439 145 440 12 440 146 440 145 441 186 441 12 441 143 442 45 442 144 442 144 443 47 443 186 443 12 444 14 444 146 444 146 445 14 445 147 445 147 446 14 446 233 446 149 447 233 447 234 447 235 448 234 448 236 448 189 449 236 449 238 449 189 450 235 450 236 450 147 451 233 451 149 451 149 452 234 452 235 452 236 453 237 453 238 453 238 454 237 454 150 454 150 455 237 455 239 455 240 456 239 456 241 456 151 457 241 457 152 457 151 458 240 458 241 458 150 459 239 459 240 459 241 460 50 460 152 460 152 461 50 461 244 461 244 462 50 462 245 462 246 463 245 463 19 463 243 464 19 464 242 464 155 465 242 465 247 465 155 466 243 466 242 466 244 467 245 467 246 467 246 468 19 468 243 468 242 469 248 469 247 469 247 470 248 470 250 470 250 471 248 471 53 471 156 472 53 472 21 472 249 473 21 473 55 473 194 474 55 474 157 474 194 475 249 475 55 475 250 476 53 476 156 476 156 477 21 477 249 477 55 478 56 478 157 478 157 479 56 479 158 479 158 480 56 480 22 480 251 481 22 481 57 481 252 482 57 482 253 482 252 483 251 483 57 483 158 484 22 484 251 484 57 485 58 485 253 485 253 486 58 486 160 486 160 487 58 487 59 487 254 488 59 488 255 488 198 489 255 489 60 489 161 490 60 490 200 490 161 491 198 491 60 491 160 492 59 492 254 492 254 493 255 493 198 493 60 494 61 494 200 494 200 495 61 495 162 495 162 496 61 496 62 496 258 497 62 497 256 497 204 498 256 498 257 498 204 499 258 499 256 499 162 500 62 500 258 500 256 501 63 501 257 501 257 502 63 502 260 502 260 503 63 503 64 503 164 504 64 504 65 504 261 505 65 505 259 505 207 506 259 506 166 506 207 507 261 507 259 507 260 508 64 508 164 508 164 509 65 509 261 509 259 510 67 510 166 510 166 511 67 511 262 511 262 512 67 512 263 512 167 513 263 513 265 513 264 514 265 514 266 514 168 515 266 515 267 515 168 516 264 516 266 516 262 517 263 517 167 517 167 518 265 518 264 518 266 519 69 519 267 519 267 520 69 520 268 520 268 521 69 521 70 521 169 522 70 522 269 522 170 523 269 523 171 523 170 524 169 524 269 524 268 525 70 525 169 525 269 526 270 526 171 526 171 527 270 527 271 527 271 528 270 528 273 528 272 529 273 529 274 529 275 530 274 530 30 530 214 531 30 531 215 531 214 532 275 532 30 532 271 533 273 533 272 533 272 534 274 534 275 534 30 535 31 535 215 535 215 536 31 536 174 536 174 537 31 537 33 537 276 538 33 538 277 538 278 539 277 539 176 539 278 540 276 540 277 540 174 541 33 541 276 541 277 542 279 542 176 542 176 543 279 543 179 543 179 544 279 544 73 544 180 545 73 545 280 545 181 546 280 546 74 546 182 547 74 547 218 547 182 548 181 548 74 548 179 549 73 549 180 549 180 550 280 550 181 550 316 551 377 551 285 551 301 552 285 552 306 552 301 553 316 553 285 553 281 554 283 554 382 554 281 555 282 555 283 555 281 556 286 556 282 556 282 557 286 557 287 557 307 558 287 558 308 558 307 559 282 559 287 559 307 560 284 560 282 560 282 561 284 561 283 561 283 562 284 562 306 562 285 563 283 563 306 563 285 564 382 564 283 564 285 565 377 565 382 565 286 566 375 566 287 566 287 567 375 567 288 567 308 568 288 568 290 568 308 569 287 569 288 569 375 570 291 570 288 570 288 571 291 571 289 571 290 572 289 572 309 572 290 573 288 573 289 573 291 574 292 574 289 574 289 575 292 575 293 575 309 576 293 576 310 576 309 577 289 577 293 577 292 578 372 578 293 578 293 579 372 579 294 579 310 580 294 580 311 580 310 581 293 581 294 581 372 582 381 582 294 582 294 583 381 583 296 583 311 584 296 584 295 584 311 585 294 585 296 585 381 586 379 586 296 586 296 587 379 587 297 587 295 588 297 588 312 588 295 589 296 589 297 589 379 590 299 590 297 590 297 591 299 591 298 591 312 592 298 592 313 592 312 593 297 593 298 593 299 594 300 594 298 594 298 595 300 595 1136 595 313 596 298 596 1136 596 377 597 316 597 1488 597 1488 598 316 598 1505 598 301 599 315 599 316 599 301 600 302 600 315 600 301 601 306 601 302 601 302 602 306 602 802 602 802 603 306 603 867 603 867 604 306 604 303 604 303 605 306 605 304 605 304 606 306 606 843 606 843 607 306 607 853 607 853 608 306 608 845 608 845 609 306 609 305 609 305 610 306 610 846 610 846 611 306 611 847 611 847 612 306 612 849 612 849 613 306 613 284 613 850 614 284 614 851 614 850 615 849 615 284 615 284 616 307 616 851 616 851 617 307 617 308 617 290 618 851 618 308 618 290 619 309 619 851 619 851 620 309 620 310 620 311 621 851 621 310 621 311 622 295 622 851 622 851 623 295 623 312 623 313 624 851 624 312 624 313 625 852 625 851 625 313 626 314 626 852 626 313 627 840 627 314 627 313 628 1147 628 840 628 840 629 1147 629 871 629 315 630 834 630 316 630 316 631 834 631 317 631 318 632 316 632 317 632 318 633 319 633 316 633 316 634 319 634 831 634 1505 635 831 635 320 635 807 636 1505 636 320 636 807 637 830 637 1505 637 1505 638 830 638 829 638 828 639 1505 639 829 639 828 640 811 640 1505 640 1505 641 811 641 812 641 322 642 812 642 801 642 321 643 322 643 801 643 321 644 792 644 322 644 322 645 792 645 787 645 786 646 322 646 787 646 786 647 323 647 322 647 322 648 323 648 785 648 324 649 322 649 785 649 324 650 325 650 322 650 324 651 790 651 325 651 325 652 790 652 789 652 326 653 325 653 789 653 326 654 783 654 325 654 325 655 783 655 330 655 327 656 330 656 328 656 331 657 328 657 725 657 329 658 725 658 353 658 329 659 331 659 725 659 316 660 831 660 1505 660 1505 661 812 661 322 661 325 662 330 662 327 662 327 663 328 663 331 663 353 664 725 664 345 664 345 665 725 665 332 665 333 666 332 666 346 666 333 667 345 667 332 667 346 668 332 668 347 668 347 669 332 669 567 669 334 670 347 670 567 670 334 671 348 671 347 671 334 672 589 672 348 672 348 673 589 673 362 673 362 674 589 674 335 674 588 675 362 675 335 675 588 676 336 676 362 676 588 677 337 677 336 677 336 678 337 678 448 678 448 679 337 679 587 679 572 680 448 680 587 680 572 681 573 681 448 681 448 682 573 682 423 682 423 683 573 683 338 683 401 684 485 684 420 684 401 685 486 685 485 685 401 686 515 686 486 686 401 687 487 687 515 687 401 688 488 688 487 688 401 689 339 689 488 689 401 690 341 690 339 690 339 691 341 691 513 691 513 692 341 692 512 692 512 693 341 693 340 693 340 694 341 694 521 694 521 695 341 695 524 695 524 696 341 696 342 696 342 697 341 697 400 697 517 698 400 698 518 698 517 699 342 699 400 699 343 700 361 700 400 700 343 701 344 701 361 701 343 702 399 702 344 702 344 703 399 703 352 703 333 704 352 704 345 704 333 705 344 705 352 705 333 706 346 706 344 706 344 707 346 707 361 707 361 708 346 708 347 708 350 709 347 709 348 709 349 710 348 710 369 710 349 711 350 711 348 711 349 712 400 712 350 712 349 713 518 713 400 713 399 714 398 714 352 714 352 715 398 715 351 715 345 716 351 716 353 716 345 717 352 717 351 717 398 718 396 718 351 718 351 719 396 719 355 719 353 720 355 720 329 720 353 721 351 721 355 721 396 722 393 722 355 722 355 723 393 723 354 723 329 724 354 724 331 724 329 725 355 725 354 725 393 726 356 726 354 726 354 727 356 727 357 727 331 728 357 728 327 728 331 729 354 729 357 729 356 730 391 730 357 730 357 731 391 731 358 731 327 732 358 732 325 732 327 733 357 733 358 733 391 734 359 734 358 734 358 735 359 735 360 735 325 736 358 736 360 736 361 737 347 737 350 737 400 738 361 738 350 738 362 739 527 739 348 739 362 740 528 740 527 740 362 741 529 741 528 741 362 742 533 742 529 742 362 743 364 743 533 743 362 744 363 744 364 744 362 745 365 745 363 745 362 746 336 746 365 746 365 747 336 747 366 747 366 748 336 748 536 748 536 749 336 749 537 749 537 750 336 750 541 750 541 751 336 751 539 751 539 752 336 752 448 752 368 753 448 753 367 753 368 754 539 754 448 754 527 755 526 755 348 755 348 756 526 756 369 756 485 757 484 757 420 757 420 758 484 758 370 758 300 759 861 759 1136 759 300 760 371 760 861 760 300 761 299 761 371 761 371 762 299 762 863 762 863 763 299 763 379 763 380 764 379 764 381 764 864 765 381 765 372 765 373 766 372 766 292 766 374 767 292 767 291 767 376 768 291 768 375 768 286 769 376 769 375 769 286 770 1833 770 376 770 286 771 803 771 1833 771 286 772 281 772 803 772 803 773 281 773 813 773 813 774 281 774 382 774 814 775 382 775 377 775 816 776 377 776 378 776 816 777 814 777 377 777 863 778 379 778 380 778 380 779 381 779 864 779 864 780 372 780 373 780 373 781 292 781 374 781 374 782 291 782 376 782 813 783 382 783 814 783 1488 784 819 784 377 784 1488 785 820 785 819 785 1488 786 383 786 820 786 1488 787 821 787 383 787 1488 788 384 788 821 788 1488 789 822 789 384 789 1488 790 385 790 822 790 1488 791 825 791 385 791 1488 792 386 792 825 792 1488 793 1492 793 386 793 386 794 1492 794 781 794 781 795 1492 795 1523 795 1443 796 781 796 1523 796 1443 797 388 797 781 797 1443 798 387 798 388 798 1443 799 779 799 387 799 1443 800 777 800 779 800 1443 801 776 801 777 801 1443 802 795 802 776 802 1443 803 389 803 795 803 1443 804 796 804 389 804 1443 805 360 805 796 805 796 806 360 806 390 806 390 807 360 807 797 807 797 808 360 808 772 808 772 809 360 809 771 809 771 810 360 810 359 810 800 811 359 811 391 811 769 812 391 812 356 812 393 813 769 813 356 813 393 814 392 814 769 814 393 815 394 815 392 815 393 816 395 816 394 816 393 817 397 817 395 817 393 818 396 818 397 818 397 819 396 819 714 819 714 820 396 820 398 820 399 821 714 821 398 821 399 822 343 822 714 822 714 823 343 823 400 823 341 824 714 824 400 824 341 825 717 825 714 825 341 826 401 826 717 826 717 827 401 827 716 827 716 828 401 828 420 828 709 829 420 829 707 829 709 830 716 830 420 830 1492 831 402 831 1523 831 1523 832 402 832 1522 832 1522 833 402 833 1579 833 1555 834 1522 834 1579 834 771 835 359 835 800 835 800 836 391 836 769 836 707 837 420 837 704 837 704 838 420 838 449 838 624 839 704 839 449 839 819 840 826 840 377 840 377 841 826 841 378 841 861 842 860 842 1136 842 1136 843 860 843 403 843 859 844 1136 844 403 844 859 845 857 845 1136 845 1136 846 857 846 856 846 839 847 1136 847 856 847 839 848 404 848 1136 848 839 849 1037 849 404 849 1766 850 1765 850 449 850 405 851 449 851 1767 851 405 852 1766 852 449 852 1765 853 1764 853 449 853 449 854 1764 854 406 854 423 855 406 855 407 855 448 856 407 856 408 856 1780 857 448 857 408 857 1780 858 1782 858 448 858 448 859 1782 859 1786 859 409 860 448 860 1786 860 409 861 1777 861 448 861 448 862 1777 862 410 862 1792 863 448 863 410 863 1792 864 1775 864 448 864 448 865 1775 865 1773 865 1797 866 448 866 1773 866 1797 867 411 867 448 867 448 868 411 868 412 868 413 869 448 869 412 869 413 870 444 870 448 870 413 871 420 871 444 871 413 872 1772 872 420 872 420 873 1772 873 414 873 1771 874 420 874 414 874 1771 875 415 875 420 875 420 876 415 876 416 876 417 877 420 877 416 877 417 878 1770 878 420 878 420 879 1770 879 418 879 419 880 420 880 418 880 419 881 1818 881 420 881 420 882 1818 882 1821 882 1768 883 420 883 1821 883 1768 884 421 884 420 884 420 885 421 885 1767 885 449 886 420 886 1767 886 449 887 406 887 423 887 422 888 423 888 932 888 450 889 932 889 451 889 988 890 451 890 989 890 986 891 989 891 424 891 425 892 424 892 990 892 1035 893 990 893 426 893 1034 894 426 894 452 894 983 895 452 895 427 895 1032 896 427 896 993 896 453 897 993 897 428 897 429 898 428 898 938 898 1726 899 938 899 1725 899 1726 900 429 900 938 900 1726 901 1748 901 429 901 429 902 1748 902 430 902 1727 903 429 903 430 903 1727 904 1749 904 429 904 429 905 1749 905 1728 905 433 906 1728 906 432 906 431 907 433 907 432 907 431 908 1751 908 433 908 433 909 1751 909 482 909 434 910 482 910 1752 910 1753 911 434 911 1752 911 1753 912 1731 912 434 912 434 913 1731 913 1029 913 1029 914 1731 914 1732 914 1755 915 1029 915 1732 915 1755 916 1756 916 1029 916 1029 917 1756 917 437 917 437 918 1756 918 435 918 1735 919 437 919 435 919 1735 920 1737 920 437 920 437 921 1737 921 1758 921 436 922 437 922 1758 922 436 923 1739 923 437 923 437 924 1739 924 438 924 438 925 1739 925 439 925 439 926 1739 926 441 926 441 927 1739 927 945 927 440 928 945 928 976 928 440 929 441 929 945 929 423 930 407 930 448 930 370 931 442 931 420 931 420 932 442 932 560 932 443 933 420 933 560 933 443 934 557 934 420 934 420 935 557 935 555 935 552 936 420 936 555 936 552 937 444 937 420 937 444 938 548 938 448 938 448 939 548 939 546 939 445 940 448 940 546 940 445 941 446 941 448 941 448 942 446 942 447 942 543 943 448 943 447 943 543 944 367 944 448 944 449 945 423 945 422 945 422 946 932 946 450 946 450 947 451 947 988 947 988 948 989 948 986 948 986 949 424 949 425 949 425 950 990 950 1035 950 1035 951 426 951 1034 951 1034 952 452 952 983 952 983 953 427 953 1032 953 1032 954 993 954 453 954 453 955 428 955 429 955 454 956 1722 956 938 956 454 957 1744 957 1722 957 454 958 456 958 1744 958 454 959 455 959 456 959 454 960 1720 960 455 960 454 961 457 961 1720 961 1720 962 457 962 1719 962 1719 963 457 963 1718 963 1718 964 457 964 1742 964 1742 965 457 965 943 965 1716 966 943 966 1715 966 1716 967 1742 967 943 967 1715 968 943 968 458 968 458 969 943 969 460 969 459 970 460 970 1741 970 459 971 458 971 460 971 945 972 1739 972 460 972 460 973 1739 973 1740 973 461 974 460 974 1740 974 461 975 1714 975 460 975 460 976 1714 976 1741 976 996 977 462 977 945 977 945 978 462 978 464 978 463 979 945 979 464 979 463 980 465 980 945 980 945 981 465 981 948 981 466 982 945 982 948 982 466 983 999 983 945 983 945 984 999 984 467 984 1000 985 945 985 467 985 1000 986 952 986 945 986 945 987 952 987 953 987 1003 988 945 988 953 988 1003 989 955 989 945 989 945 990 955 990 1005 990 468 991 945 991 1005 991 468 992 469 992 945 992 945 993 469 993 1007 993 1008 994 945 994 1007 994 1008 995 958 995 945 995 945 996 958 996 959 996 470 997 945 997 959 997 470 998 1011 998 945 998 945 999 1011 999 1012 999 1013 1000 945 1000 1012 1000 1013 1001 471 1001 945 1001 945 1002 471 1002 1015 1002 472 1003 945 1003 1015 1003 472 1004 473 1004 945 1004 945 1005 473 1005 1016 1005 1017 1006 945 1006 1016 1006 1017 1007 474 1007 945 1007 945 1008 474 1008 475 1008 968 1009 945 1009 475 1009 968 1010 1018 1010 945 1010 945 1011 1018 1011 970 1011 476 1012 945 1012 970 1012 476 1013 972 1013 945 1013 945 1014 972 1014 477 1014 1021 1015 945 1015 477 1015 1021 1016 478 1016 945 1016 945 1017 478 1017 1022 1017 479 1018 945 1018 1022 1018 479 1019 1023 1019 945 1019 945 1020 1023 1020 480 1020 1024 1021 945 1021 480 1021 1024 1022 1025 1022 945 1022 945 1023 1025 1023 1027 1023 481 1024 945 1024 1027 1024 481 1025 1028 1025 945 1025 945 1026 1028 1026 975 1026 976 1027 945 1027 975 1027 434 1028 433 1028 482 1028 433 1029 429 1029 1728 1029 1722 1030 1745 1030 938 1030 938 1031 1745 1031 1724 1031 483 1032 938 1032 1724 1032 483 1033 1725 1033 938 1033 484 1034 551 1034 370 1034 484 1035 514 1035 551 1035 484 1036 485 1036 514 1036 514 1037 485 1037 486 1037 494 1038 486 1038 515 1038 493 1039 515 1039 487 1039 488 1040 493 1040 487 1040 488 1041 489 1041 493 1041 488 1042 339 1042 489 1042 489 1043 339 1043 491 1043 637 1044 491 1044 490 1044 637 1045 489 1045 491 1045 637 1046 492 1046 489 1046 489 1047 492 1047 493 1047 493 1048 492 1048 635 1048 494 1049 635 1049 495 1049 514 1050 495 1050 562 1050 551 1051 562 1051 634 1051 496 1052 634 1052 497 1052 561 1053 497 1053 498 1053 559 1054 498 1054 632 1054 558 1055 632 1055 499 1055 550 1056 499 1056 556 1056 554 1057 556 1057 553 1057 549 1058 553 1058 702 1058 547 1059 702 1059 500 1059 501 1060 500 1060 503 1060 502 1061 503 1061 698 1061 545 1062 698 1062 697 1062 544 1063 697 1063 504 1063 540 1064 504 1064 696 1064 563 1065 696 1065 693 1065 538 1066 693 1066 691 1066 542 1067 691 1067 505 1067 535 1068 505 1068 506 1068 564 1069 506 1069 507 1069 534 1070 507 1070 687 1070 508 1071 687 1071 531 1071 530 1072 531 1072 684 1072 509 1073 684 1073 532 1073 525 1074 532 1074 643 1074 519 1075 643 1075 642 1075 520 1076 642 1076 565 1076 510 1077 565 1077 511 1077 523 1078 511 1078 522 1078 516 1079 522 1079 490 1079 491 1080 516 1080 490 1080 491 1081 512 1081 516 1081 491 1082 513 1082 512 1082 491 1083 339 1083 513 1083 514 1084 486 1084 494 1084 495 1085 514 1085 494 1085 494 1086 515 1086 493 1086 635 1087 494 1087 493 1087 512 1088 340 1088 516 1088 516 1089 340 1089 521 1089 523 1090 521 1090 524 1090 510 1091 524 1091 342 1091 517 1092 510 1092 342 1092 517 1093 520 1093 510 1093 517 1094 518 1094 520 1094 520 1095 518 1095 519 1095 642 1096 520 1096 519 1096 516 1097 521 1097 523 1097 522 1098 516 1098 523 1098 523 1099 524 1099 510 1099 511 1100 523 1100 510 1100 518 1101 349 1101 519 1101 519 1102 349 1102 369 1102 525 1103 369 1103 526 1103 509 1104 526 1104 527 1104 528 1105 509 1105 527 1105 528 1106 530 1106 509 1106 528 1107 529 1107 530 1107 530 1108 529 1108 508 1108 531 1109 530 1109 508 1109 519 1110 369 1110 525 1110 643 1111 519 1111 525 1111 525 1112 526 1112 509 1112 532 1113 525 1113 509 1113 529 1114 533 1114 508 1114 508 1115 533 1115 364 1115 534 1116 364 1116 363 1116 365 1117 534 1117 363 1117 365 1118 564 1118 534 1118 365 1119 366 1119 564 1119 564 1120 366 1120 535 1120 506 1121 564 1121 535 1121 508 1122 364 1122 534 1122 687 1123 508 1123 534 1123 366 1124 536 1124 535 1124 535 1125 536 1125 537 1125 542 1126 537 1126 541 1126 538 1127 541 1127 539 1127 368 1128 538 1128 539 1128 368 1129 563 1129 538 1129 368 1130 367 1130 563 1130 563 1131 367 1131 540 1131 696 1132 563 1132 540 1132 535 1133 537 1133 542 1133 505 1134 535 1134 542 1134 542 1135 541 1135 538 1135 691 1136 542 1136 538 1136 367 1137 543 1137 540 1137 540 1138 543 1138 544 1138 504 1139 540 1139 544 1139 543 1140 447 1140 544 1140 544 1141 447 1141 545 1141 697 1142 544 1142 545 1142 447 1143 446 1143 545 1143 545 1144 446 1144 502 1144 698 1145 545 1145 502 1145 446 1146 445 1146 502 1146 502 1147 445 1147 501 1147 503 1148 502 1148 501 1148 445 1149 546 1149 501 1149 501 1150 546 1150 547 1150 500 1151 501 1151 547 1151 546 1152 548 1152 547 1152 547 1153 548 1153 549 1153 702 1154 547 1154 549 1154 548 1155 444 1155 549 1155 549 1156 444 1156 552 1156 554 1157 552 1157 555 1157 550 1158 555 1158 557 1158 558 1159 557 1159 443 1159 559 1160 443 1160 560 1160 561 1161 560 1161 442 1161 496 1162 442 1162 370 1162 551 1163 496 1163 370 1163 551 1164 634 1164 496 1164 549 1165 552 1165 554 1165 553 1166 549 1166 554 1166 554 1167 555 1167 550 1167 556 1168 554 1168 550 1168 550 1169 557 1169 558 1169 499 1170 550 1170 558 1170 558 1171 443 1171 559 1171 632 1172 558 1172 559 1172 559 1173 560 1173 561 1173 498 1174 559 1174 561 1174 561 1175 442 1175 496 1175 497 1176 561 1176 496 1176 514 1177 562 1177 551 1177 563 1178 693 1178 538 1178 564 1179 507 1179 534 1179 530 1180 684 1180 509 1180 520 1181 565 1181 510 1181 332 1182 576 1182 566 1182 567 1183 566 1183 590 1183 334 1184 590 1184 580 1184 589 1185 580 1185 568 1185 335 1186 568 1186 569 1186 588 1187 569 1187 582 1187 337 1188 582 1188 570 1188 587 1189 570 1189 571 1189 572 1190 571 1190 575 1190 573 1191 575 1191 574 1191 573 1192 572 1192 575 1192 576 1193 577 1193 566 1193 566 1194 577 1194 759 1194 590 1195 759 1195 578 1195 579 1196 590 1196 578 1196 579 1197 580 1197 590 1197 579 1198 758 1198 580 1198 580 1199 758 1199 585 1199 568 1200 585 1200 764 1200 581 1201 568 1201 764 1201 581 1202 569 1202 568 1202 581 1203 765 1203 569 1203 569 1204 765 1204 582 1204 582 1205 765 1205 583 1205 762 1206 582 1206 583 1206 762 1207 570 1207 582 1207 762 1208 584 1208 570 1208 570 1209 584 1209 586 1209 571 1210 586 1210 767 1210 768 1211 571 1211 767 1211 768 1212 575 1212 571 1212 768 1213 574 1213 575 1213 566 1214 759 1214 590 1214 580 1215 585 1215 568 1215 570 1216 586 1216 571 1216 572 1217 587 1217 571 1217 587 1218 337 1218 570 1218 337 1219 588 1219 582 1219 588 1220 335 1220 569 1220 335 1221 589 1221 568 1221 589 1222 334 1222 580 1222 334 1223 567 1223 590 1223 567 1224 332 1224 566 1224 591 1225 963 1225 677 1225 591 1226 964 1226 963 1226 591 1227 127 1227 964 1227 964 1228 127 1228 965 1228 965 1229 127 1229 592 1229 966 1230 592 1230 593 1230 966 1231 965 1231 592 1231 592 1232 594 1232 593 1232 593 1233 594 1233 967 1233 967 1234 594 1234 597 1234 969 1235 597 1235 595 1235 971 1236 595 1236 596 1236 1019 1237 596 1237 598 1237 1019 1238 971 1238 596 1238 967 1239 597 1239 969 1239 969 1240 595 1240 971 1240 596 1241 131 1241 598 1241 598 1242 131 1242 599 1242 599 1243 131 1243 133 1243 1020 1244 133 1244 973 1244 1020 1245 599 1245 133 1245 133 1246 600 1246 973 1246 973 1247 600 1247 601 1247 601 1248 600 1248 602 1248 603 1249 602 1249 136 1249 605 1250 136 1250 604 1250 605 1251 603 1251 136 1251 601 1252 602 1252 603 1252 136 1253 137 1253 604 1253 604 1254 137 1254 606 1254 606 1255 137 1255 607 1255 608 1256 607 1256 1026 1256 608 1257 606 1257 607 1257 607 1258 609 1258 1026 1258 1026 1259 609 1259 974 1259 974 1260 609 1260 610 1260 610 1261 609 1261 612 1261 611 1262 612 1262 614 1262 613 1263 614 1263 615 1263 613 1264 611 1264 614 1264 610 1265 612 1265 611 1265 614 1266 139 1266 615 1266 615 1267 139 1267 616 1267 616 1268 139 1268 977 1268 977 1269 139 1269 184 1269 617 1270 184 1270 141 1270 978 1271 141 1271 979 1271 978 1272 617 1272 141 1272 977 1273 184 1273 617 1273 141 1274 142 1274 979 1274 979 1275 142 1275 1030 1275 1030 1276 142 1276 618 1276 618 1277 142 1277 185 1277 1031 1278 185 1278 980 1278 1031 1279 618 1279 185 1279 185 1280 187 1280 980 1280 980 1281 187 1281 981 1281 981 1282 187 1282 619 1282 982 1283 619 1283 1033 1283 982 1284 981 1284 619 1284 619 1285 188 1285 1033 1285 1033 1286 188 1286 984 1286 984 1287 188 1287 985 1287 985 1288 188 1288 621 1288 987 1289 621 1289 148 1289 1036 1290 148 1290 620 1290 1036 1291 987 1291 148 1291 985 1292 621 1292 987 1292 148 1293 623 1293 620 1293 620 1294 623 1294 622 1294 622 1295 623 1295 190 1295 624 1296 190 1296 191 1296 192 1297 624 1297 191 1297 192 1298 193 1298 624 1298 624 1299 193 1299 625 1299 626 1300 624 1300 625 1300 626 1301 153 1301 624 1301 624 1302 153 1302 1823 1302 1820 1303 624 1303 1823 1303 1820 1304 627 1304 624 1304 624 1305 627 1305 704 1305 704 1306 627 1306 1816 1306 629 1307 1816 1307 628 1307 1814 1308 629 1308 628 1308 1814 1309 1812 1309 629 1309 629 1310 1812 1310 1809 1310 1808 1311 629 1311 1809 1311 1808 1312 630 1312 629 1312 629 1313 630 1313 556 1313 499 1314 629 1314 556 1314 499 1315 631 1315 629 1315 499 1316 632 1316 631 1316 631 1317 632 1317 633 1317 633 1318 632 1318 498 1318 497 1319 633 1319 498 1319 497 1320 733 1320 633 1320 497 1321 634 1321 733 1321 733 1322 634 1322 731 1322 731 1323 634 1323 562 1323 495 1324 731 1324 562 1324 495 1325 734 1325 731 1325 495 1326 635 1326 734 1326 734 1327 635 1327 703 1327 703 1328 635 1328 492 1328 636 1329 492 1329 637 1329 490 1330 636 1330 637 1330 490 1331 683 1331 636 1331 490 1332 522 1332 683 1332 683 1333 522 1333 511 1333 735 1334 511 1334 565 1334 642 1335 735 1335 565 1335 642 1336 638 1336 735 1336 642 1337 639 1337 638 1337 642 1338 741 1338 639 1338 642 1339 743 1339 741 1339 642 1340 640 1340 743 1340 642 1341 746 1341 640 1341 642 1342 641 1342 746 1342 642 1343 644 1343 641 1343 642 1344 643 1344 644 1344 644 1345 643 1345 753 1345 753 1346 643 1346 532 1346 755 1347 532 1347 757 1347 755 1348 753 1348 532 1348 622 1349 190 1349 624 1349 1823 1350 153 1350 1824 1350 1824 1351 153 1351 656 1351 1826 1352 656 1352 154 1352 645 1353 1826 1353 154 1353 645 1354 647 1354 1826 1354 645 1355 646 1355 647 1355 647 1356 646 1356 648 1356 648 1357 646 1357 649 1357 1830 1358 649 1358 650 1358 1759 1359 650 1359 651 1359 338 1360 651 1360 933 1360 338 1361 1759 1361 651 1361 338 1362 652 1362 1759 1362 338 1363 1762 1363 652 1363 338 1364 653 1364 1762 1364 338 1365 1779 1365 653 1365 338 1366 1784 1366 1779 1366 338 1367 1785 1367 1784 1367 338 1368 1787 1368 1785 1368 338 1369 1789 1369 1787 1369 338 1370 573 1370 1789 1370 1789 1371 573 1371 654 1371 654 1372 573 1372 574 1372 1793 1373 574 1373 655 1373 1793 1374 654 1374 574 1374 1824 1375 656 1375 1826 1375 648 1376 649 1376 1830 1376 1830 1377 650 1377 1759 1377 657 1378 662 1378 651 1378 657 1379 195 1379 662 1379 662 1380 195 1380 659 1380 658 1381 662 1381 659 1381 658 1382 660 1382 662 1382 662 1383 660 1383 159 1383 196 1384 662 1384 159 1384 196 1385 197 1385 662 1385 662 1386 197 1386 661 1386 199 1387 662 1387 661 1387 199 1388 201 1388 662 1388 662 1389 201 1389 202 1389 203 1390 662 1390 202 1390 203 1391 663 1391 662 1391 662 1392 663 1392 163 1392 936 1393 163 1393 205 1393 937 1394 205 1394 991 1394 937 1395 936 1395 205 1395 662 1396 163 1396 936 1396 205 1397 206 1397 991 1397 991 1398 206 1398 992 1398 992 1399 206 1399 994 1399 994 1400 206 1400 165 1400 995 1401 165 1401 208 1401 940 1402 208 1402 939 1402 940 1403 995 1403 208 1403 994 1404 165 1404 995 1404 208 1405 664 1405 939 1405 939 1406 664 1406 941 1406 941 1407 664 1407 942 1407 942 1408 664 1408 665 1408 944 1409 665 1409 946 1409 944 1410 942 1410 665 1410 665 1411 209 1411 946 1411 946 1412 209 1412 666 1412 666 1413 209 1413 667 1413 997 1414 667 1414 668 1414 997 1415 666 1415 667 1415 667 1416 210 1416 668 1416 668 1417 210 1417 998 1417 998 1418 210 1418 947 1418 947 1419 210 1419 669 1419 682 1420 669 1420 670 1420 675 1421 670 1421 671 1421 211 1422 675 1422 671 1422 211 1423 212 1423 675 1423 675 1424 212 1424 172 1424 672 1425 675 1425 172 1425 672 1426 213 1426 675 1426 675 1427 213 1427 173 1427 673 1428 675 1428 173 1428 673 1429 216 1429 675 1429 675 1430 216 1430 175 1430 674 1431 675 1431 175 1431 674 1432 177 1432 675 1432 675 1433 177 1433 178 1433 217 1434 675 1434 178 1434 217 1435 677 1435 675 1435 675 1436 677 1436 949 1436 949 1437 677 1437 950 1437 950 1438 677 1438 951 1438 951 1439 677 1439 1001 1439 1001 1440 677 1440 954 1440 954 1441 677 1441 1002 1441 1002 1442 677 1442 956 1442 956 1443 677 1443 1004 1443 1004 1444 677 1444 1006 1444 1006 1445 677 1445 676 1445 676 1446 677 1446 957 1446 957 1447 677 1447 678 1447 678 1448 677 1448 1009 1448 1009 1449 677 1449 960 1449 960 1450 677 1450 1010 1450 1010 1451 677 1451 679 1451 679 1452 677 1452 961 1452 961 1453 677 1453 680 1453 680 1454 677 1454 681 1454 681 1455 677 1455 1014 1455 1014 1456 677 1456 962 1456 962 1457 677 1457 963 1457 947 1458 669 1458 682 1458 682 1459 670 1459 675 1459 683 1460 511 1460 735 1460 532 1461 684 1461 757 1461 757 1462 684 1462 686 1462 686 1463 684 1463 531 1463 685 1464 531 1464 687 1464 760 1465 687 1465 688 1465 760 1466 685 1466 687 1466 686 1467 531 1467 685 1467 687 1468 507 1468 688 1468 688 1469 507 1469 761 1469 761 1470 507 1470 506 1470 689 1471 506 1471 690 1471 689 1472 761 1472 506 1472 506 1473 505 1473 690 1473 690 1474 505 1474 763 1474 763 1475 505 1475 766 1475 766 1476 505 1476 691 1476 694 1477 691 1477 693 1477 692 1478 693 1478 695 1478 692 1479 694 1479 693 1479 766 1480 691 1480 694 1480 693 1481 696 1481 695 1481 695 1482 696 1482 574 1482 574 1483 696 1483 504 1483 697 1484 574 1484 504 1484 697 1485 698 1485 574 1485 574 1486 698 1486 503 1486 500 1487 574 1487 503 1487 500 1488 702 1488 574 1488 574 1489 702 1489 1803 1489 699 1490 574 1490 1803 1490 699 1491 1800 1491 574 1491 574 1492 1800 1492 700 1492 1796 1493 574 1493 700 1493 1796 1494 701 1494 574 1494 574 1495 701 1495 655 1495 702 1496 553 1496 1803 1496 1803 1497 553 1497 1806 1497 1806 1498 553 1498 556 1498 630 1499 1806 1499 556 1499 703 1500 492 1500 636 1500 704 1501 1816 1501 629 1501 662 1502 935 1502 651 1502 651 1503 935 1503 934 1503 933 1504 651 1504 934 1504 704 1505 629 1505 706 1505 705 1506 706 1506 629 1506 705 1507 704 1507 706 1507 705 1508 707 1508 704 1508 705 1509 728 1509 707 1509 707 1510 728 1510 708 1510 709 1511 708 1511 729 1511 730 1512 709 1512 729 1512 730 1513 716 1513 709 1513 730 1514 710 1514 716 1514 716 1515 710 1515 732 1515 717 1516 732 1516 711 1516 714 1517 711 1517 712 1517 713 1518 714 1518 712 1518 713 1519 715 1519 714 1519 713 1520 737 1520 715 1520 715 1521 737 1521 397 1521 714 1522 715 1522 397 1522 728 1523 729 1523 708 1523 716 1524 732 1524 717 1524 717 1525 711 1525 714 1525 397 1526 737 1526 718 1526 395 1527 718 1527 719 1527 721 1528 719 1528 740 1528 738 1529 721 1529 740 1529 738 1530 394 1530 721 1530 738 1531 720 1531 394 1531 738 1532 739 1532 720 1532 720 1533 739 1533 742 1533 392 1534 720 1534 742 1534 392 1535 394 1535 720 1535 737 1536 736 1536 718 1536 718 1537 736 1537 719 1537 395 1538 719 1538 721 1538 394 1539 395 1539 721 1539 395 1540 397 1540 718 1540 709 1541 707 1541 708 1541 576 1542 332 1542 722 1542 722 1543 332 1543 723 1543 723 1544 332 1544 756 1544 756 1545 332 1545 754 1545 754 1546 332 1546 724 1546 724 1547 332 1547 752 1547 752 1548 332 1548 751 1548 751 1549 332 1549 725 1549 750 1550 725 1550 769 1550 749 1551 769 1551 392 1551 748 1552 392 1552 747 1552 748 1553 749 1553 392 1553 751 1554 725 1554 750 1554 750 1555 769 1555 749 1555 742 1556 744 1556 392 1556 392 1557 744 1557 727 1557 726 1558 392 1558 727 1558 726 1559 745 1559 392 1559 392 1560 745 1560 747 1560 629 1561 631 1561 705 1561 705 1562 631 1562 728 1562 728 1563 631 1563 633 1563 729 1564 633 1564 733 1564 730 1565 733 1565 731 1565 710 1566 731 1566 734 1566 732 1567 734 1567 711 1567 732 1568 710 1568 734 1568 728 1569 633 1569 729 1569 729 1570 733 1570 730 1570 730 1571 731 1571 710 1571 734 1572 703 1572 711 1572 711 1573 703 1573 712 1573 712 1574 703 1574 636 1574 713 1575 636 1575 683 1575 737 1576 683 1576 735 1576 736 1577 735 1577 719 1577 736 1578 737 1578 735 1578 712 1579 636 1579 713 1579 713 1580 683 1580 737 1580 735 1581 638 1581 719 1581 719 1582 638 1582 740 1582 740 1583 638 1583 639 1583 738 1584 639 1584 741 1584 739 1585 741 1585 742 1585 739 1586 738 1586 741 1586 740 1587 639 1587 738 1587 741 1588 743 1588 742 1588 742 1589 743 1589 744 1589 744 1590 743 1590 727 1590 727 1591 743 1591 640 1591 726 1592 640 1592 745 1592 726 1593 727 1593 640 1593 640 1594 746 1594 745 1594 745 1595 746 1595 747 1595 747 1596 746 1596 748 1596 748 1597 746 1597 641 1597 749 1598 641 1598 750 1598 749 1599 748 1599 641 1599 641 1600 644 1600 750 1600 750 1601 644 1601 751 1601 751 1602 644 1602 752 1602 752 1603 644 1603 753 1603 724 1604 753 1604 754 1604 724 1605 752 1605 753 1605 753 1606 755 1606 754 1606 754 1607 755 1607 756 1607 756 1608 755 1608 723 1608 723 1609 755 1609 757 1609 722 1610 757 1610 576 1610 722 1611 723 1611 757 1611 757 1612 686 1612 576 1612 576 1613 686 1613 577 1613 577 1614 686 1614 759 1614 759 1615 686 1615 685 1615 578 1616 685 1616 760 1616 579 1617 760 1617 688 1617 758 1618 688 1618 585 1618 758 1619 579 1619 688 1619 759 1620 685 1620 578 1620 578 1621 760 1621 579 1621 688 1622 761 1622 585 1622 585 1623 761 1623 764 1623 764 1624 761 1624 689 1624 581 1625 689 1625 690 1625 765 1626 690 1626 763 1626 583 1627 763 1627 762 1627 583 1628 765 1628 763 1628 764 1629 689 1629 581 1629 581 1630 690 1630 765 1630 763 1631 766 1631 762 1631 762 1632 766 1632 584 1632 584 1633 766 1633 694 1633 586 1634 694 1634 692 1634 767 1635 692 1635 695 1635 768 1636 695 1636 574 1636 768 1637 767 1637 695 1637 584 1638 694 1638 586 1638 586 1639 692 1639 767 1639 769 1640 725 1640 770 1640 800 1641 770 1641 782 1641 771 1642 782 1642 799 1642 772 1643 799 1643 773 1643 797 1644 773 1644 798 1644 390 1645 798 1645 774 1645 796 1646 774 1646 775 1646 389 1647 775 1647 784 1647 795 1648 784 1648 794 1648 776 1649 794 1649 791 1649 777 1650 791 1650 778 1650 779 1651 778 1651 793 1651 387 1652 793 1652 788 1652 388 1653 788 1653 780 1653 781 1654 780 1654 801 1654 781 1655 388 1655 780 1655 330 1656 782 1656 328 1656 330 1657 799 1657 782 1657 330 1658 783 1658 799 1658 799 1659 783 1659 773 1659 773 1660 783 1660 326 1660 798 1661 326 1661 789 1661 774 1662 789 1662 790 1662 775 1663 790 1663 324 1663 784 1664 324 1664 785 1664 794 1665 785 1665 323 1665 791 1666 323 1666 786 1666 778 1667 786 1667 787 1667 793 1668 787 1668 792 1668 788 1669 792 1669 321 1669 780 1670 321 1670 801 1670 780 1671 788 1671 321 1671 773 1672 326 1672 798 1672 798 1673 789 1673 774 1673 774 1674 790 1674 775 1674 775 1675 324 1675 784 1675 784 1676 785 1676 794 1676 794 1677 323 1677 791 1677 791 1678 786 1678 778 1678 778 1679 787 1679 793 1679 793 1680 792 1680 788 1680 388 1681 387 1681 788 1681 387 1682 779 1682 793 1682 779 1683 777 1683 778 1683 777 1684 776 1684 791 1684 776 1685 795 1685 794 1685 795 1686 389 1686 784 1686 389 1687 796 1687 775 1687 796 1688 390 1688 774 1688 390 1689 797 1689 798 1689 797 1690 772 1690 773 1690 772 1691 771 1691 799 1691 771 1692 800 1692 782 1692 800 1693 769 1693 770 1693 328 1694 782 1694 770 1694 725 1695 328 1695 770 1695 781 1696 801 1696 386 1696 386 1697 801 1697 812 1697 802 1698 803 1698 804 1698 302 1699 804 1699 805 1699 315 1700 805 1700 815 1700 834 1701 815 1701 833 1701 317 1702 833 1702 832 1702 318 1703 832 1703 817 1703 319 1704 817 1704 818 1704 831 1705 818 1705 827 1705 320 1706 827 1706 806 1706 807 1707 806 1707 808 1707 830 1708 808 1708 809 1708 829 1709 809 1709 823 1709 828 1710 823 1710 810 1710 811 1711 810 1711 824 1711 812 1712 824 1712 386 1712 812 1713 811 1713 824 1713 814 1714 805 1714 813 1714 814 1715 815 1715 805 1715 814 1716 816 1716 815 1716 815 1717 816 1717 833 1717 833 1718 816 1718 378 1718 832 1719 378 1719 826 1719 817 1720 826 1720 819 1720 818 1721 819 1721 820 1721 827 1722 820 1722 383 1722 806 1723 383 1723 821 1723 808 1724 821 1724 384 1724 809 1725 384 1725 822 1725 823 1726 822 1726 385 1726 810 1727 385 1727 825 1727 824 1728 825 1728 386 1728 824 1729 810 1729 825 1729 833 1730 378 1730 832 1730 832 1731 826 1731 817 1731 817 1732 819 1732 818 1732 818 1733 820 1733 827 1733 827 1734 383 1734 806 1734 806 1735 821 1735 808 1735 808 1736 384 1736 809 1736 809 1737 822 1737 823 1737 823 1738 385 1738 810 1738 811 1739 828 1739 810 1739 828 1740 829 1740 823 1740 829 1741 830 1741 809 1741 830 1742 807 1742 808 1742 807 1743 320 1743 806 1743 320 1744 831 1744 827 1744 831 1745 319 1745 818 1745 319 1746 318 1746 817 1746 318 1747 317 1747 832 1747 317 1748 834 1748 833 1748 834 1749 315 1749 815 1749 315 1750 302 1750 805 1750 302 1751 802 1751 804 1751 813 1752 805 1752 804 1752 803 1753 813 1753 804 1753 1833 1754 867 1754 866 1754 376 1755 866 1755 865 1755 374 1756 865 1756 842 1756 373 1757 842 1757 844 1757 864 1758 844 1758 835 1758 380 1759 835 1759 836 1759 863 1760 836 1760 862 1760 371 1761 862 1761 837 1761 861 1762 837 1762 848 1762 860 1763 848 1763 854 1763 403 1764 854 1764 855 1764 859 1765 855 1765 838 1765 857 1766 838 1766 858 1766 856 1767 858 1767 841 1767 839 1768 841 1768 840 1768 839 1769 856 1769 841 1769 304 1770 865 1770 303 1770 304 1771 842 1771 865 1771 304 1772 843 1772 842 1772 842 1773 843 1773 844 1773 844 1774 843 1774 853 1774 835 1775 853 1775 845 1775 836 1776 845 1776 305 1776 862 1777 305 1777 846 1777 837 1778 846 1778 847 1778 848 1779 847 1779 849 1779 854 1780 849 1780 850 1780 855 1781 850 1781 851 1781 838 1782 851 1782 852 1782 858 1783 852 1783 314 1783 841 1784 314 1784 840 1784 841 1785 858 1785 314 1785 844 1786 853 1786 835 1786 835 1787 845 1787 836 1787 836 1788 305 1788 862 1788 862 1789 846 1789 837 1789 837 1790 847 1790 848 1790 848 1791 849 1791 854 1791 854 1792 850 1792 855 1792 855 1793 851 1793 838 1793 838 1794 852 1794 858 1794 856 1795 857 1795 858 1795 857 1796 859 1796 838 1796 859 1797 403 1797 855 1797 403 1798 860 1798 854 1798 860 1799 861 1799 848 1799 861 1800 371 1800 837 1800 371 1801 863 1801 862 1801 863 1802 380 1802 836 1802 380 1803 864 1803 835 1803 864 1804 373 1804 844 1804 373 1805 374 1805 842 1805 374 1806 376 1806 865 1806 376 1807 1833 1807 866 1807 303 1808 865 1808 866 1808 867 1809 303 1809 866 1809 1037 1810 839 1810 931 1810 868 1811 931 1811 1039 1811 868 1812 1037 1812 931 1812 871 1813 1693 1813 840 1813 871 1814 1654 1814 1693 1814 871 1815 869 1815 1654 1815 871 1816 1657 1816 869 1816 871 1817 870 1817 1657 1817 871 1818 1658 1818 870 1818 871 1819 1659 1819 1658 1819 871 1820 1660 1820 1659 1820 871 1821 872 1821 1660 1821 1660 1822 872 1822 1661 1822 1661 1823 872 1823 873 1823 1111 1824 1661 1824 873 1824 1111 1825 1662 1825 1661 1825 1111 1826 874 1826 1662 1826 1662 1827 874 1827 875 1827 920 1828 875 1828 921 1828 876 1829 921 1829 877 1829 1132 1830 876 1830 877 1830 1132 1831 878 1831 876 1831 1132 1832 1108 1832 878 1832 878 1833 1108 1833 879 1833 879 1834 1108 1834 1106 1834 880 1835 879 1835 1106 1835 880 1836 1664 1836 879 1836 880 1837 1105 1837 1664 1837 1664 1838 1105 1838 882 1838 882 1839 1105 1839 881 1839 883 1840 882 1840 881 1840 883 1841 884 1841 882 1841 883 1842 1128 1842 884 1842 884 1843 1128 1843 1666 1843 1666 1844 1128 1844 1127 1844 886 1845 1666 1845 1127 1845 886 1846 885 1846 1666 1846 886 1847 887 1847 885 1847 885 1848 887 1848 1667 1848 1667 1849 887 1849 1125 1849 1104 1850 1667 1850 1125 1850 1104 1851 1668 1851 1667 1851 1104 1852 1102 1852 1668 1852 1668 1853 1102 1853 1124 1853 1669 1854 1124 1854 1101 1854 922 1855 1101 1855 1099 1855 888 1856 922 1856 1099 1856 888 1857 1670 1857 922 1857 888 1858 1097 1858 1670 1858 1670 1859 1097 1859 889 1859 889 1860 1097 1860 1122 1860 890 1861 889 1861 1122 1861 890 1862 1671 1862 889 1862 890 1863 891 1863 1671 1863 1671 1864 891 1864 1096 1864 1672 1865 1096 1865 1094 1865 892 1866 1672 1866 1094 1866 892 1867 1673 1867 1672 1867 892 1868 893 1868 1673 1868 1673 1869 893 1869 894 1869 894 1870 893 1870 1092 1870 895 1871 894 1871 1092 1871 895 1872 923 1872 894 1872 895 1873 1090 1873 923 1873 923 1874 1090 1874 924 1874 925 1875 924 1875 896 1875 1088 1876 925 1876 896 1876 1088 1877 897 1877 925 1877 1088 1878 898 1878 897 1878 897 1879 898 1879 899 1879 1674 1880 899 1880 900 1880 1086 1881 1674 1881 900 1881 1086 1882 1623 1882 1674 1882 1086 1883 1085 1883 1623 1883 1623 1884 1085 1884 1084 1884 1624 1885 1084 1885 1119 1885 901 1886 1624 1886 1119 1886 901 1887 1626 1887 1624 1887 901 1888 1117 1888 1626 1888 1626 1889 1117 1889 926 1889 902 1890 926 1890 1081 1890 1115 1891 902 1891 1081 1891 1115 1892 903 1892 902 1892 1115 1893 1114 1893 903 1893 903 1894 1114 1894 1080 1894 1079 1895 903 1895 1080 1895 1079 1896 1078 1896 903 1896 903 1897 1078 1897 904 1897 1065 1898 903 1898 904 1898 1065 1899 1075 1899 903 1899 903 1900 1075 1900 1064 1900 1062 1901 903 1901 1064 1901 1062 1902 1060 1902 903 1902 903 1903 1060 1903 1059 1903 905 1904 903 1904 1059 1904 905 1905 1057 1905 903 1905 903 1906 1057 1906 1675 1906 1675 1907 1057 1907 1628 1907 1628 1908 1057 1908 1676 1908 1676 1909 1057 1909 1629 1909 1629 1910 1057 1910 1630 1910 1630 1911 1057 1911 1632 1911 1632 1912 1057 1912 1633 1912 1633 1913 1057 1913 1055 1913 1634 1914 1055 1914 927 1914 928 1915 927 1915 1052 1915 1051 1916 928 1916 1052 1916 1051 1917 929 1917 928 1917 1051 1918 906 1918 929 1918 929 1919 906 1919 907 1919 1636 1920 907 1920 908 1920 930 1921 908 1921 1048 1921 909 1922 930 1922 1048 1922 909 1923 910 1923 930 1923 909 1924 1047 1924 910 1924 910 1925 1047 1925 911 1925 911 1926 1047 1926 1046 1926 912 1927 911 1927 1046 1927 912 1928 1638 1928 911 1928 912 1929 913 1929 1638 1929 1638 1930 913 1930 915 1930 915 1931 913 1931 1044 1931 914 1932 915 1932 1044 1932 914 1933 916 1933 915 1933 914 1934 1071 1934 916 1934 916 1935 1071 1935 917 1935 917 1936 1071 1936 1043 1936 919 1937 917 1937 1043 1937 919 1938 1639 1938 917 1938 919 1939 1683 1939 1639 1939 919 1940 1640 1940 1683 1940 919 1941 1641 1941 1640 1941 919 1942 1643 1942 1641 1942 919 1943 1645 1943 1643 1943 919 1944 1647 1944 1645 1944 919 1945 1648 1945 1647 1945 919 1946 1649 1946 1648 1946 919 1947 918 1947 1649 1947 919 1948 1687 1948 918 1948 919 1949 1689 1949 1687 1949 919 1950 931 1950 1689 1950 919 1951 1069 1951 931 1951 931 1952 1069 1952 1068 1952 1040 1953 931 1953 1068 1953 1040 1954 1039 1954 931 1954 1662 1955 875 1955 920 1955 920 1956 921 1956 876 1956 1668 1957 1124 1957 1669 1957 1669 1958 1101 1958 922 1958 1671 1959 1096 1959 1672 1959 923 1960 924 1960 925 1960 897 1961 899 1961 1674 1961 1623 1962 1084 1962 1624 1962 1626 1963 926 1963 902 1963 1633 1964 1055 1964 1634 1964 1634 1965 927 1965 928 1965 929 1966 907 1966 1636 1966 1636 1967 908 1967 930 1967 1693 1968 1651 1968 840 1968 840 1969 1651 1969 1650 1969 931 1970 840 1970 1650 1970 931 1971 839 1971 840 1971 423 1972 338 1972 932 1972 932 1973 338 1973 933 1973 451 1974 933 1974 934 1974 989 1975 934 1975 935 1975 424 1976 935 1976 662 1976 990 1977 662 1977 936 1977 426 1978 936 1978 937 1978 452 1979 937 1979 991 1979 427 1980 991 1980 992 1980 993 1981 992 1981 994 1981 428 1982 994 1982 995 1982 938 1983 995 1983 940 1983 939 1984 938 1984 940 1984 939 1985 454 1985 938 1985 939 1986 941 1986 454 1986 454 1987 941 1987 457 1987 457 1988 941 1988 942 1988 943 1989 942 1989 944 1989 460 1990 944 1990 946 1990 945 1991 946 1991 666 1991 996 1992 666 1992 997 1992 462 1993 997 1993 668 1993 464 1994 668 1994 998 1994 463 1995 998 1995 947 1995 465 1996 947 1996 682 1996 948 1997 682 1997 675 1997 466 1998 675 1998 949 1998 999 1999 949 1999 950 1999 467 2000 950 2000 951 2000 1000 2001 951 2001 1001 2001 952 2002 1001 2002 954 2002 953 2003 954 2003 1002 2003 1003 2004 1002 2004 956 2004 955 2005 956 2005 1004 2005 1005 2006 1004 2006 1006 2006 468 2007 1006 2007 676 2007 469 2008 676 2008 957 2008 1007 2009 957 2009 678 2009 1008 2010 678 2010 1009 2010 958 2011 1009 2011 960 2011 959 2012 960 2012 1010 2012 470 2013 1010 2013 679 2013 1011 2014 679 2014 961 2014 1012 2015 961 2015 680 2015 1013 2016 680 2016 681 2016 471 2017 681 2017 1014 2017 1015 2018 1014 2018 962 2018 472 2019 962 2019 963 2019 473 2020 963 2020 964 2020 1016 2021 964 2021 965 2021 1017 2022 965 2022 966 2022 474 2023 966 2023 593 2023 475 2024 593 2024 967 2024 968 2025 967 2025 969 2025 1018 2026 969 2026 971 2026 970 2027 971 2027 1019 2027 476 2028 1019 2028 598 2028 972 2029 598 2029 599 2029 477 2030 599 2030 1020 2030 1021 2031 1020 2031 973 2031 478 2032 973 2032 601 2032 1022 2033 601 2033 603 2033 479 2034 603 2034 605 2034 1023 2035 605 2035 604 2035 480 2036 604 2036 606 2036 1024 2037 606 2037 608 2037 1025 2038 608 2038 1026 2038 1027 2039 1026 2039 974 2039 481 2040 974 2040 610 2040 1028 2041 610 2041 611 2041 975 2042 611 2042 613 2042 976 2043 613 2043 615 2043 440 2044 615 2044 616 2044 441 2045 616 2045 977 2045 439 2046 977 2046 617 2046 438 2047 617 2047 978 2047 437 2048 978 2048 979 2048 1029 2049 979 2049 1030 2049 434 2050 1030 2050 618 2050 433 2051 618 2051 1031 2051 429 2052 1031 2052 980 2052 453 2053 980 2053 981 2053 1032 2054 981 2054 982 2054 983 2055 982 2055 1033 2055 1034 2056 1033 2056 984 2056 1035 2057 984 2057 985 2057 425 2058 985 2058 987 2058 986 2059 987 2059 1036 2059 988 2060 1036 2060 620 2060 450 2061 620 2061 622 2061 422 2062 622 2062 624 2062 449 2063 422 2063 624 2063 932 2064 933 2064 451 2064 451 2065 934 2065 989 2065 989 2066 935 2066 424 2066 424 2067 662 2067 990 2067 990 2068 936 2068 426 2068 426 2069 937 2069 452 2069 452 2070 991 2070 427 2070 427 2071 992 2071 993 2071 993 2072 994 2072 428 2072 428 2073 995 2073 938 2073 457 2074 942 2074 943 2074 943 2075 944 2075 460 2075 460 2076 946 2076 945 2076 945 2077 666 2077 996 2077 996 2078 997 2078 462 2078 462 2079 668 2079 464 2079 464 2080 998 2080 463 2080 463 2081 947 2081 465 2081 465 2082 682 2082 948 2082 948 2083 675 2083 466 2083 466 2084 949 2084 999 2084 999 2085 950 2085 467 2085 467 2086 951 2086 1000 2086 1000 2087 1001 2087 952 2087 952 2088 954 2088 953 2088 953 2089 1002 2089 1003 2089 1003 2090 956 2090 955 2090 955 2091 1004 2091 1005 2091 1005 2092 1006 2092 468 2092 468 2093 676 2093 469 2093 469 2094 957 2094 1007 2094 1007 2095 678 2095 1008 2095 1008 2096 1009 2096 958 2096 958 2097 960 2097 959 2097 959 2098 1010 2098 470 2098 470 2099 679 2099 1011 2099 1011 2100 961 2100 1012 2100 1012 2101 680 2101 1013 2101 1013 2102 681 2102 471 2102 471 2103 1014 2103 1015 2103 1015 2104 962 2104 472 2104 472 2105 963 2105 473 2105 473 2106 964 2106 1016 2106 1016 2107 965 2107 1017 2107 1017 2108 966 2108 474 2108 474 2109 593 2109 475 2109 475 2110 967 2110 968 2110 968 2111 969 2111 1018 2111 1018 2112 971 2112 970 2112 970 2113 1019 2113 476 2113 476 2114 598 2114 972 2114 972 2115 599 2115 477 2115 477 2116 1020 2116 1021 2116 1021 2117 973 2117 478 2117 478 2118 601 2118 1022 2118 1022 2119 603 2119 479 2119 479 2120 605 2120 1023 2120 1023 2121 604 2121 480 2121 480 2122 606 2122 1024 2122 1024 2123 608 2123 1025 2123 1025 2124 1026 2124 1027 2124 1027 2125 974 2125 481 2125 481 2126 610 2126 1028 2126 1028 2127 611 2127 975 2127 975 2128 613 2128 976 2128 976 2129 615 2129 440 2129 440 2130 616 2130 441 2130 441 2131 977 2131 439 2131 439 2132 617 2132 438 2132 438 2133 978 2133 437 2133 437 2134 979 2134 1029 2134 1029 2135 1030 2135 434 2135 434 2136 618 2136 433 2136 433 2137 1031 2137 429 2137 429 2138 980 2138 453 2138 453 2139 981 2139 1032 2139 1032 2140 982 2140 983 2140 983 2141 1033 2141 1034 2141 1034 2142 984 2142 1035 2142 1035 2143 985 2143 425 2143 425 2144 987 2144 986 2144 986 2145 1036 2145 988 2145 988 2146 620 2146 450 2146 450 2147 622 2147 422 2147 1037 2148 868 2148 404 2148 404 2149 868 2149 1038 2149 1038 2150 868 2150 1039 2150 1139 2151 1039 2151 1040 2151 1041 2152 1040 2152 1068 2152 1042 2153 1068 2153 1069 2153 1070 2154 1069 2154 919 2154 1141 2155 919 2155 1043 2155 1149 2156 1043 2156 1071 2156 1072 2157 1071 2157 914 2157 1152 2158 914 2158 1044 2158 1153 2159 1044 2159 913 2159 1154 2160 913 2160 912 2160 1073 2161 912 2161 1046 2161 1045 2162 1046 2162 1047 2162 1159 2163 1047 2163 909 2163 1161 2164 909 2164 1048 2164 1162 2165 1048 2165 908 2165 1049 2166 908 2166 907 2166 1166 2167 907 2167 906 2167 1050 2168 906 2168 1051 2168 1182 2169 1051 2169 1052 2169 1053 2170 1052 2170 927 2170 1054 2171 927 2171 1055 2171 1056 2172 1055 2172 1057 2172 1074 2173 1057 2173 905 2173 1188 2174 905 2174 1059 2174 1058 2175 1059 2175 1060 2175 1061 2176 1060 2176 1062 2176 1187 2177 1062 2177 1064 2177 1063 2178 1064 2178 1075 2178 1076 2179 1075 2179 1065 2179 1077 2180 1065 2180 904 2180 1066 2181 904 2181 1078 2181 1067 2182 1078 2182 1186 2182 1067 2183 1066 2183 1078 2183 1038 2184 1039 2184 1139 2184 1139 2185 1040 2185 1041 2185 1041 2186 1068 2186 1042 2186 1042 2187 1069 2187 1070 2187 1070 2188 919 2188 1141 2188 1141 2189 1043 2189 1149 2189 1149 2190 1071 2190 1072 2190 1072 2191 914 2191 1152 2191 1152 2192 1044 2192 1153 2192 1153 2193 913 2193 1154 2193 1154 2194 912 2194 1073 2194 1073 2195 1046 2195 1045 2195 1045 2196 1047 2196 1159 2196 1159 2197 909 2197 1161 2197 1161 2198 1048 2198 1162 2198 1162 2199 908 2199 1049 2199 1049 2200 907 2200 1166 2200 1166 2201 906 2201 1050 2201 1050 2202 1051 2202 1182 2202 1182 2203 1052 2203 1053 2203 1053 2204 927 2204 1054 2204 1054 2205 1055 2205 1056 2205 1056 2206 1057 2206 1074 2206 1074 2207 905 2207 1188 2207 1188 2208 1059 2208 1058 2208 1058 2209 1060 2209 1061 2209 1061 2210 1062 2210 1187 2210 1187 2211 1064 2211 1063 2211 1063 2212 1075 2212 1076 2212 1076 2213 1065 2213 1077 2213 1077 2214 904 2214 1066 2214 1078 2215 1079 2215 1186 2215 1186 2216 1079 2216 1112 2216 1112 2217 1079 2217 1080 2217 1113 2218 1080 2218 1114 2218 1185 2219 1114 2219 1115 2219 1116 2220 1115 2220 1081 2220 1082 2221 1081 2221 926 2221 1184 2222 926 2222 1117 2222 1118 2223 1117 2223 901 2223 1183 2224 901 2224 1119 2224 1083 2225 1119 2225 1084 2225 1181 2226 1084 2226 1085 2226 1180 2227 1085 2227 1086 2227 1179 2228 1086 2228 900 2228 1120 2229 900 2229 899 2229 1178 2230 899 2230 898 2230 1087 2231 898 2231 1088 2231 1177 2232 1088 2232 896 2232 1176 2233 896 2233 924 2233 1089 2234 924 2234 1090 2234 1091 2235 1090 2235 895 2235 1175 2236 895 2236 1092 2236 1121 2237 1092 2237 893 2237 892 2238 1121 2238 893 2238 892 2239 1093 2239 1121 2239 892 2240 1094 2240 1093 2240 1093 2241 1094 2241 1095 2241 1095 2242 1094 2242 1096 2242 1174 2243 1096 2243 891 2243 1172 2244 891 2244 890 2244 1173 2245 890 2245 1122 2245 1123 2246 1122 2246 1097 2246 1171 2247 1097 2247 888 2247 1170 2248 888 2248 1099 2248 1098 2249 1099 2249 1101 2249 1100 2250 1101 2250 1124 2250 1169 2251 1124 2251 1102 2251 1103 2252 1102 2252 1104 2252 1189 2253 1104 2253 1125 2253 1190 2254 1125 2254 887 2254 1191 2255 887 2255 886 2255 1126 2256 886 2256 1127 2256 1192 2257 1127 2257 1128 2257 1129 2258 1128 2258 883 2258 1130 2259 883 2259 881 2259 1193 2260 881 2260 1105 2260 1131 2261 1105 2261 880 2261 1196 2262 880 2262 1106 2262 1107 2263 1106 2263 1108 2263 1109 2264 1108 2264 1132 2264 1133 2265 1132 2265 877 2265 1134 2266 877 2266 921 2266 1143 2267 921 2267 875 2267 1110 2268 875 2268 874 2268 1135 2269 874 2269 1111 2269 1140 2270 1111 2270 873 2270 1138 2271 873 2271 872 2271 1137 2272 872 2272 871 2272 1147 2273 1137 2273 871 2273 1112 2274 1080 2274 1113 2274 1113 2275 1114 2275 1185 2275 1185 2276 1115 2276 1116 2276 1116 2277 1081 2277 1082 2277 1082 2278 926 2278 1184 2278 1184 2279 1117 2279 1118 2279 1118 2280 901 2280 1183 2280 1183 2281 1119 2281 1083 2281 1083 2282 1084 2282 1181 2282 1181 2283 1085 2283 1180 2283 1180 2284 1086 2284 1179 2284 1179 2285 900 2285 1120 2285 1120 2286 899 2286 1178 2286 1178 2287 898 2287 1087 2287 1087 2288 1088 2288 1177 2288 1177 2289 896 2289 1176 2289 1176 2290 924 2290 1089 2290 1089 2291 1090 2291 1091 2291 1091 2292 895 2292 1175 2292 1175 2293 1092 2293 1121 2293 1095 2294 1096 2294 1174 2294 1174 2295 891 2295 1172 2295 1172 2296 890 2296 1173 2296 1173 2297 1122 2297 1123 2297 1123 2298 1097 2298 1171 2298 1171 2299 888 2299 1170 2299 1170 2300 1099 2300 1098 2300 1098 2301 1101 2301 1100 2301 1100 2302 1124 2302 1169 2302 1169 2303 1102 2303 1103 2303 1103 2304 1104 2304 1189 2304 1189 2305 1125 2305 1190 2305 1190 2306 887 2306 1191 2306 1191 2307 886 2307 1126 2307 1126 2308 1127 2308 1192 2308 1192 2309 1128 2309 1129 2309 1129 2310 883 2310 1130 2310 1130 2311 881 2311 1193 2311 1193 2312 1105 2312 1131 2312 1131 2313 880 2313 1196 2313 1196 2314 1106 2314 1107 2314 1107 2315 1108 2315 1109 2315 1109 2316 1132 2316 1133 2316 1133 2317 877 2317 1134 2317 1134 2318 921 2318 1143 2318 1143 2319 875 2319 1110 2319 1110 2320 874 2320 1135 2320 1135 2321 1111 2321 1140 2321 1140 2322 873 2322 1138 2322 1138 2323 872 2323 1137 2323 313 2324 1136 2324 1147 2324 1147 2325 1136 2325 404 2325 1137 2326 404 2326 1038 2326 1138 2327 1038 2327 1139 2327 1140 2328 1139 2328 1041 2328 1135 2329 1041 2329 1042 2329 1110 2330 1042 2330 1070 2330 1143 2331 1070 2331 1141 2331 1142 2332 1141 2332 1690 2332 1142 2333 1143 2333 1141 2333 1142 2334 1691 2334 1143 2334 1143 2335 1691 2335 1692 2335 1652 2336 1143 2336 1692 2336 1652 2337 1653 2337 1143 2337 1143 2338 1653 2338 1655 2338 1656 2339 1143 2339 1655 2339 1656 2340 1694 2340 1143 2340 1143 2341 1694 2341 1695 2341 1696 2342 1143 2342 1695 2342 1696 2343 1697 2343 1143 2343 1143 2344 1697 2344 1144 2344 1145 2345 1143 2345 1144 2345 1145 2346 1146 2346 1143 2346 1143 2347 1146 2347 1134 2347 1134 2348 1146 2348 1663 2348 1133 2349 1663 2349 1109 2349 1133 2350 1134 2350 1663 2350 1147 2351 404 2351 1137 2351 1137 2352 1038 2352 1138 2352 1138 2353 1139 2353 1140 2353 1140 2354 1041 2354 1135 2354 1135 2355 1042 2355 1110 2355 1110 2356 1070 2356 1143 2356 1690 2357 1141 2357 1688 2357 1688 2358 1141 2358 1149 2358 1148 2359 1149 2359 1686 2359 1148 2360 1688 2360 1149 2360 1072 2361 1646 2361 1149 2361 1072 2362 1150 2362 1646 2362 1072 2363 1644 2363 1150 2363 1072 2364 1642 2364 1644 2364 1072 2365 1152 2365 1642 2365 1642 2366 1152 2366 1151 2366 1151 2367 1152 2367 1684 2367 1684 2368 1152 2368 1153 2368 1682 2369 1153 2369 1681 2369 1682 2370 1684 2370 1153 2370 1153 2371 1154 2371 1681 2371 1681 2372 1154 2372 1155 2372 1155 2373 1154 2373 1680 2373 1680 2374 1154 2374 1073 2374 1156 2375 1073 2375 1157 2375 1156 2376 1680 2376 1073 2376 1073 2377 1045 2377 1157 2377 1157 2378 1045 2378 1679 2378 1679 2379 1045 2379 1637 2379 1637 2380 1045 2380 1159 2380 1158 2381 1159 2381 1161 2381 1160 2382 1161 2382 1678 2382 1160 2383 1158 2383 1161 2383 1637 2384 1159 2384 1158 2384 1161 2385 1162 2385 1678 2385 1678 2386 1162 2386 1635 2386 1635 2387 1162 2387 1163 2387 1163 2388 1162 2388 1049 2388 1631 2389 1049 2389 1164 2389 1631 2390 1163 2390 1049 2390 1049 2391 1166 2391 1164 2391 1164 2392 1166 2392 1677 2392 1677 2393 1166 2393 1165 2393 1165 2394 1166 2394 1167 2394 1167 2395 1166 2395 1050 2395 1627 2396 1050 2396 1200 2396 1627 2397 1167 2397 1050 2397 1182 2398 1168 2398 1050 2398 1182 2399 1710 2399 1168 2399 1182 2400 1103 2400 1710 2400 1182 2401 1169 2401 1103 2401 1182 2402 1100 2402 1169 2402 1182 2403 1098 2403 1100 2403 1182 2404 1170 2404 1098 2404 1182 2405 1171 2405 1170 2405 1182 2406 1123 2406 1171 2406 1182 2407 1173 2407 1123 2407 1182 2408 1172 2408 1173 2408 1182 2409 1174 2409 1172 2409 1182 2410 1095 2410 1174 2410 1182 2411 1093 2411 1095 2411 1182 2412 1121 2412 1093 2412 1182 2413 1175 2413 1121 2413 1182 2414 1091 2414 1175 2414 1182 2415 1089 2415 1091 2415 1182 2416 1176 2416 1089 2416 1182 2417 1177 2417 1176 2417 1182 2418 1087 2418 1177 2418 1182 2419 1178 2419 1087 2419 1182 2420 1120 2420 1178 2420 1182 2421 1179 2421 1120 2421 1182 2422 1180 2422 1179 2422 1182 2423 1181 2423 1180 2423 1182 2424 1083 2424 1181 2424 1182 2425 1183 2425 1083 2425 1182 2426 1118 2426 1183 2426 1182 2427 1184 2427 1118 2427 1182 2428 1082 2428 1184 2428 1182 2429 1116 2429 1082 2429 1182 2430 1185 2430 1116 2430 1182 2431 1113 2431 1185 2431 1182 2432 1112 2432 1113 2432 1182 2433 1186 2433 1112 2433 1182 2434 1067 2434 1186 2434 1182 2435 1066 2435 1067 2435 1182 2436 1077 2436 1066 2436 1182 2437 1076 2437 1077 2437 1182 2438 1063 2438 1076 2438 1182 2439 1187 2439 1063 2439 1182 2440 1061 2440 1187 2440 1182 2441 1058 2441 1061 2441 1182 2442 1188 2442 1058 2442 1182 2443 1074 2443 1188 2443 1182 2444 1056 2444 1074 2444 1182 2445 1054 2445 1056 2445 1182 2446 1053 2446 1054 2446 1189 2447 1700 2447 1103 2447 1189 2448 1190 2448 1700 2448 1700 2449 1190 2449 1665 2449 1665 2450 1190 2450 1191 2450 1699 2451 1191 2451 1126 2451 1192 2452 1699 2452 1126 2452 1192 2453 1698 2453 1699 2453 1192 2454 1129 2454 1698 2454 1698 2455 1129 2455 1130 2455 1195 2456 1130 2456 1193 2456 1131 2457 1195 2457 1193 2457 1131 2458 1194 2458 1195 2458 1131 2459 1196 2459 1194 2459 1194 2460 1196 2460 1107 2460 1197 2461 1107 2461 1109 2461 1663 2462 1197 2462 1109 2462 1665 2463 1191 2463 1699 2463 1698 2464 1130 2464 1195 2464 1194 2465 1107 2465 1197 2465 1168 2466 1625 2466 1050 2466 1050 2467 1625 2467 1198 2467 1199 2468 1050 2468 1198 2468 1199 2469 1200 2469 1050 2469 1646 2470 1685 2470 1149 2470 1149 2471 1685 2471 1686 2471 1700 2472 1201 2472 1103 2472 1103 2473 1201 2473 1701 2473 1702 2474 1103 2474 1701 2474 1702 2475 1703 2475 1103 2475 1103 2476 1703 2476 1704 2476 1202 2477 1103 2477 1704 2477 1202 2478 1203 2478 1103 2478 1103 2479 1203 2479 1705 2479 1706 2480 1103 2480 1705 2480 1706 2481 1707 2481 1103 2481 1103 2482 1707 2482 1708 2482 1204 2483 1103 2483 1708 2483 1204 2484 1709 2484 1103 2484 1103 2485 1709 2485 1710 2485 1206 2486 1205 2486 1431 2486 1206 2487 1208 2487 1205 2487 1206 2488 1207 2488 1208 2488 1208 2489 1207 2489 1238 2489 1238 2490 1207 2490 1453 2490 1531 2491 1453 2491 1239 2491 1532 2492 1239 2492 1451 2492 1209 2493 1451 2493 1452 2493 1240 2494 1452 2494 1450 2494 1241 2495 1450 2495 1210 2495 1242 2496 1210 2496 1211 2496 1534 2497 1211 2497 1449 2497 1243 2498 1449 2498 1448 2498 1244 2499 1448 2499 1447 2499 1535 2500 1447 2500 1446 2500 1212 2501 1446 2501 1445 2501 1245 2502 1445 2502 1444 2502 1536 2503 1444 2503 1213 2503 1564 2504 1213 2504 1214 2504 1565 2505 1214 2505 1246 2505 1566 2506 1246 2506 1442 2506 1215 2507 1442 2507 1216 2507 1571 2508 1216 2508 1441 2508 1217 2509 1441 2509 1440 2509 1567 2510 1440 2510 1218 2510 1219 2511 1218 2511 1439 2511 1220 2512 1439 2512 1438 2512 1247 2513 1438 2513 1222 2513 1221 2514 1222 2514 1437 2514 1223 2515 1437 2515 1224 2515 1554 2516 1224 2516 1225 2516 1248 2517 1225 2517 1226 2517 1556 2518 1226 2518 1227 2518 1228 2519 1227 2519 1249 2519 1557 2520 1249 2520 1250 2520 1251 2521 1250 2521 1229 2521 1558 2522 1229 2522 1436 2522 1559 2523 1436 2523 1230 2523 1560 2524 1230 2524 1435 2524 1231 2525 1435 2525 1233 2525 1232 2526 1233 2526 1252 2526 1253 2527 1252 2527 1433 2527 1561 2528 1433 2528 1234 2528 1254 2529 1234 2529 1235 2529 1562 2530 1235 2530 1432 2530 1563 2531 1432 2531 1236 2531 1255 2532 1236 2532 1256 2532 1530 2533 1256 2533 1237 2533 1528 2534 1237 2534 1257 2534 1529 2535 1257 2535 1431 2535 1205 2536 1529 2536 1431 2536 1238 2537 1453 2537 1531 2537 1531 2538 1239 2538 1532 2538 1532 2539 1451 2539 1209 2539 1209 2540 1452 2540 1240 2540 1240 2541 1450 2541 1241 2541 1241 2542 1210 2542 1242 2542 1242 2543 1211 2543 1534 2543 1534 2544 1449 2544 1243 2544 1243 2545 1448 2545 1244 2545 1244 2546 1447 2546 1535 2546 1535 2547 1446 2547 1212 2547 1212 2548 1445 2548 1245 2548 1245 2549 1444 2549 1536 2549 1536 2550 1213 2550 1564 2550 1564 2551 1214 2551 1565 2551 1565 2552 1246 2552 1566 2552 1566 2553 1442 2553 1215 2553 1215 2554 1216 2554 1571 2554 1571 2555 1441 2555 1217 2555 1217 2556 1440 2556 1567 2556 1567 2557 1218 2557 1219 2557 1219 2558 1439 2558 1220 2558 1220 2559 1438 2559 1247 2559 1247 2560 1222 2560 1221 2560 1221 2561 1437 2561 1223 2561 1223 2562 1224 2562 1554 2562 1554 2563 1225 2563 1248 2563 1248 2564 1226 2564 1556 2564 1556 2565 1227 2565 1228 2565 1228 2566 1249 2566 1557 2566 1557 2567 1250 2567 1251 2567 1251 2568 1229 2568 1558 2568 1558 2569 1436 2569 1559 2569 1559 2570 1230 2570 1560 2570 1560 2571 1435 2571 1231 2571 1231 2572 1233 2572 1232 2572 1232 2573 1252 2573 1253 2573 1253 2574 1433 2574 1561 2574 1561 2575 1234 2575 1254 2575 1254 2576 1235 2576 1562 2576 1562 2577 1432 2577 1563 2577 1563 2578 1236 2578 1255 2578 1255 2579 1256 2579 1530 2579 1530 2580 1237 2580 1528 2580 1528 2581 1257 2581 1529 2581 1258 2582 1568 2582 1280 2582 1258 2583 1572 2583 1568 2583 1258 2584 1260 2584 1572 2584 1572 2585 1260 2585 1259 2585 1259 2586 1260 2586 1281 2586 1261 2587 1281 2587 1263 2587 1262 2588 1263 2588 1282 2588 1570 2589 1282 2589 1264 2589 1569 2590 1264 2590 1477 2590 1283 2591 1477 2591 1476 2591 1284 2592 1476 2592 1285 2592 1286 2593 1285 2593 1287 2593 1537 2594 1287 2594 1475 2594 1288 2595 1475 2595 1473 2595 1289 2596 1473 2596 1474 2596 1265 2597 1474 2597 1472 2597 1290 2598 1472 2598 1291 2598 1538 2599 1291 2599 1470 2599 1539 2600 1470 2600 1266 2600 1540 2601 1266 2601 1292 2601 1541 2602 1292 2602 1469 2602 1293 2603 1469 2603 1267 2603 1294 2604 1267 2604 1467 2604 1295 2605 1467 2605 1468 2605 1542 2606 1468 2606 1296 2606 1268 2607 1296 2607 1297 2607 1298 2608 1297 2608 1270 2608 1269 2609 1270 2609 1466 2609 1299 2610 1466 2610 1465 2610 1543 2611 1465 2611 1464 2611 1271 2612 1464 2612 1300 2612 1272 2613 1300 2613 1273 2613 1301 2614 1273 2614 1462 2614 1302 2615 1462 2615 1303 2615 1304 2616 1303 2616 1274 2616 1544 2617 1274 2617 1461 2617 1305 2618 1461 2618 1275 2618 1546 2619 1275 2619 1276 2619 1547 2620 1276 2620 1460 2620 1548 2621 1460 2621 1459 2621 1306 2622 1459 2622 1307 2622 1308 2623 1307 2623 1458 2623 1549 2624 1458 2624 1309 2624 1310 2625 1309 2625 1311 2625 1550 2626 1311 2626 1277 2626 1312 2627 1277 2627 1456 2627 1278 2628 1456 2628 1279 2628 1551 2629 1279 2629 1455 2629 1552 2630 1455 2630 1454 2630 1553 2631 1454 2631 1280 2631 1568 2632 1553 2632 1280 2632 1259 2633 1281 2633 1261 2633 1261 2634 1263 2634 1262 2634 1262 2635 1282 2635 1570 2635 1570 2636 1264 2636 1569 2636 1569 2637 1477 2637 1283 2637 1283 2638 1476 2638 1284 2638 1284 2639 1285 2639 1286 2639 1286 2640 1287 2640 1537 2640 1537 2641 1475 2641 1288 2641 1288 2642 1473 2642 1289 2642 1289 2643 1474 2643 1265 2643 1265 2644 1472 2644 1290 2644 1290 2645 1291 2645 1538 2645 1538 2646 1470 2646 1539 2646 1539 2647 1266 2647 1540 2647 1540 2648 1292 2648 1541 2648 1541 2649 1469 2649 1293 2649 1293 2650 1267 2650 1294 2650 1294 2651 1467 2651 1295 2651 1295 2652 1468 2652 1542 2652 1542 2653 1296 2653 1268 2653 1268 2654 1297 2654 1298 2654 1298 2655 1270 2655 1269 2655 1269 2656 1466 2656 1299 2656 1299 2657 1465 2657 1543 2657 1543 2658 1464 2658 1271 2658 1271 2659 1300 2659 1272 2659 1272 2660 1273 2660 1301 2660 1301 2661 1462 2661 1302 2661 1302 2662 1303 2662 1304 2662 1304 2663 1274 2663 1544 2663 1544 2664 1461 2664 1305 2664 1305 2665 1275 2665 1546 2665 1546 2666 1276 2666 1547 2666 1547 2667 1460 2667 1548 2667 1548 2668 1459 2668 1306 2668 1306 2669 1307 2669 1308 2669 1308 2670 1458 2670 1549 2670 1549 2671 1309 2671 1310 2671 1310 2672 1311 2672 1550 2672 1550 2673 1277 2673 1312 2673 1312 2674 1456 2674 1278 2674 1278 2675 1279 2675 1551 2675 1551 2676 1455 2676 1552 2676 1552 2677 1454 2677 1553 2677 1314 2678 1313 2678 1479 2678 1314 2679 1587 2679 1313 2679 1314 2680 1315 2680 1587 2680 1587 2681 1315 2681 1586 2681 1586 2682 1315 2682 1498 2682 1585 2683 1498 2683 1341 2683 1584 2684 1341 2684 1317 2684 1316 2685 1317 2685 1497 2685 1318 2686 1497 2686 1319 2686 1583 2687 1319 2687 1320 2687 1582 2688 1320 2688 1495 2688 1581 2689 1495 2689 1321 2689 1342 2690 1321 2690 1322 2690 1343 2691 1322 2691 1344 2691 1323 2692 1344 2692 1345 2692 1578 2693 1345 2693 1324 2693 1346 2694 1324 2694 1494 2694 1347 2695 1494 2695 1493 2695 1348 2696 1493 2696 1349 2696 1577 2697 1349 2697 1491 2697 1576 2698 1491 2698 1490 2698 1575 2699 1490 2699 1489 2699 1325 2700 1489 2700 1326 2700 1327 2701 1326 2701 1350 2701 1328 2702 1350 2702 1487 2702 1574 2703 1487 2703 1351 2703 1352 2704 1351 2704 1353 2704 1602 2705 1353 2705 1354 2705 1355 2706 1354 2706 1356 2706 1603 2707 1356 2707 1329 2707 1357 2708 1329 2708 1486 2708 1614 2709 1486 2709 1485 2709 1330 2710 1485 2710 1331 2710 1358 2711 1331 2711 1359 2711 1332 2712 1359 2712 1333 2712 1616 2713 1333 2713 1360 2713 1361 2714 1360 2714 1334 2714 1617 2715 1334 2715 1362 2715 1363 2716 1362 2716 1364 2716 1618 2717 1364 2717 1365 2717 1335 2718 1365 2718 1336 2718 1620 2719 1336 2719 1484 2719 1366 2720 1484 2720 1337 2720 1367 2721 1337 2721 1338 2721 1368 2722 1338 2722 1339 2722 1621 2723 1339 2723 1482 2723 1622 2724 1482 2724 1481 2724 1369 2725 1481 2725 1480 2725 1370 2726 1480 2726 1340 2726 1371 2727 1340 2727 1479 2727 1313 2728 1371 2728 1479 2728 1586 2729 1498 2729 1585 2729 1585 2730 1341 2730 1584 2730 1584 2731 1317 2731 1316 2731 1316 2732 1497 2732 1318 2732 1318 2733 1319 2733 1583 2733 1583 2734 1320 2734 1582 2734 1582 2735 1495 2735 1581 2735 1581 2736 1321 2736 1342 2736 1342 2737 1322 2737 1343 2737 1343 2738 1344 2738 1323 2738 1323 2739 1345 2739 1578 2739 1578 2740 1324 2740 1346 2740 1346 2741 1494 2741 1347 2741 1347 2742 1493 2742 1348 2742 1348 2743 1349 2743 1577 2743 1577 2744 1491 2744 1576 2744 1576 2745 1490 2745 1575 2745 1575 2746 1489 2746 1325 2746 1325 2747 1326 2747 1327 2747 1327 2748 1350 2748 1328 2748 1328 2749 1487 2749 1574 2749 1574 2750 1351 2750 1352 2750 1352 2751 1353 2751 1602 2751 1602 2752 1354 2752 1355 2752 1355 2753 1356 2753 1603 2753 1603 2754 1329 2754 1357 2754 1357 2755 1486 2755 1614 2755 1614 2756 1485 2756 1330 2756 1330 2757 1331 2757 1358 2757 1358 2758 1359 2758 1332 2758 1332 2759 1333 2759 1616 2759 1616 2760 1360 2760 1361 2760 1361 2761 1334 2761 1617 2761 1617 2762 1362 2762 1363 2762 1363 2763 1364 2763 1618 2763 1618 2764 1365 2764 1335 2764 1335 2765 1336 2765 1620 2765 1620 2766 1484 2766 1366 2766 1366 2767 1337 2767 1367 2767 1367 2768 1338 2768 1368 2768 1368 2769 1339 2769 1621 2769 1621 2770 1482 2770 1622 2770 1622 2771 1481 2771 1369 2771 1369 2772 1480 2772 1370 2772 1370 2773 1340 2773 1371 2773 1372 2774 1600 2774 1399 2774 1372 2775 1373 2775 1600 2775 1372 2776 1506 2776 1373 2776 1373 2777 1506 2777 1374 2777 1374 2778 1506 2778 1507 2778 1401 2779 1507 2779 1402 2779 1403 2780 1402 2780 1404 2780 1598 2781 1404 2781 1375 2781 1597 2782 1375 2782 1519 2782 1405 2783 1519 2783 1376 2783 1406 2784 1376 2784 1377 2784 1407 2785 1377 2785 1518 2785 1595 2786 1518 2786 1378 2786 1596 2787 1378 2787 1379 2787 1593 2788 1379 2788 1408 2788 1592 2789 1408 2789 1380 2789 1590 2790 1380 2790 1381 2790 1591 2791 1381 2791 1517 2791 1382 2792 1517 2792 1383 2792 1409 2793 1383 2793 1516 2793 1589 2794 1516 2794 1515 2794 1410 2795 1515 2795 1384 2795 1385 2796 1384 2796 1386 2796 1612 2797 1386 2797 1411 2797 1412 2798 1411 2798 1413 2798 1414 2799 1413 2799 1513 2799 1611 2800 1513 2800 1512 2800 1387 2801 1512 2801 1388 2801 1610 2802 1388 2802 1389 2802 1390 2803 1389 2803 1391 2803 1415 2804 1391 2804 1511 2804 1609 2805 1511 2805 1392 2805 1416 2806 1392 2806 1393 2806 1608 2807 1393 2807 1510 2807 1607 2808 1510 2808 1394 2808 1395 2809 1394 2809 1509 2809 1605 2810 1509 2810 1508 2810 1606 2811 1508 2811 1417 2811 1604 2812 1417 2812 1418 2812 1419 2813 1418 2813 1499 2813 1420 2814 1499 2814 1500 2814 1421 2815 1500 2815 1422 2815 1615 2816 1422 2816 1396 2816 1613 2817 1396 2817 1398 2817 1397 2818 1398 2818 1501 2818 1423 2819 1501 2819 1502 2819 1424 2820 1502 2820 1425 2820 1601 2821 1425 2821 1504 2821 1426 2822 1504 2822 1503 2822 1400 2823 1503 2823 1399 2823 1600 2824 1400 2824 1399 2824 1374 2825 1507 2825 1401 2825 1401 2826 1402 2826 1403 2826 1403 2827 1404 2827 1598 2827 1598 2828 1375 2828 1597 2828 1597 2829 1519 2829 1405 2829 1405 2830 1376 2830 1406 2830 1406 2831 1377 2831 1407 2831 1407 2832 1518 2832 1595 2832 1595 2833 1378 2833 1596 2833 1596 2834 1379 2834 1593 2834 1593 2835 1408 2835 1592 2835 1592 2836 1380 2836 1590 2836 1590 2837 1381 2837 1591 2837 1591 2838 1517 2838 1382 2838 1382 2839 1383 2839 1409 2839 1409 2840 1516 2840 1589 2840 1589 2841 1515 2841 1410 2841 1410 2842 1384 2842 1385 2842 1385 2843 1386 2843 1612 2843 1612 2844 1411 2844 1412 2844 1412 2845 1413 2845 1414 2845 1414 2846 1513 2846 1611 2846 1611 2847 1512 2847 1387 2847 1387 2848 1388 2848 1610 2848 1610 2849 1389 2849 1390 2849 1390 2850 1391 2850 1415 2850 1415 2851 1511 2851 1609 2851 1609 2852 1392 2852 1416 2852 1416 2853 1393 2853 1608 2853 1608 2854 1510 2854 1607 2854 1607 2855 1394 2855 1395 2855 1395 2856 1509 2856 1605 2856 1605 2857 1508 2857 1606 2857 1606 2858 1417 2858 1604 2858 1604 2859 1418 2859 1419 2859 1419 2860 1499 2860 1420 2860 1420 2861 1500 2861 1421 2861 1421 2862 1422 2862 1615 2862 1615 2863 1396 2863 1613 2863 1613 2864 1398 2864 1397 2864 1397 2865 1501 2865 1423 2865 1423 2866 1502 2866 1424 2866 1424 2867 1425 2867 1601 2867 1601 2868 1504 2868 1426 2868 1426 2869 1503 2869 1400 2869 1428 2870 1463 2870 1429 2870 1429 2871 1463 2871 1514 2871 1427 2872 1430 2872 1457 2872 1457 2873 1430 2873 1520 2873 1457 2874 1463 2874 1427 2874 1427 2875 1463 2875 1428 2875 1430 2876 1429 2876 1520 2876 1520 2877 1429 2877 1514 2877 1429 2878 1430 2878 1594 2878 1594 2879 1430 2879 1599 2879 1427 2880 1573 2880 1430 2880 1430 2881 1573 2881 1599 2881 1206 2882 1431 2882 1478 2882 1207 2883 1478 2883 1453 2883 1207 2884 1206 2884 1478 2884 1431 2885 1257 2885 1478 2885 1478 2886 1257 2886 1237 2886 1256 2887 1478 2887 1237 2887 1256 2888 1434 2888 1478 2888 1256 2889 1236 2889 1434 2889 1434 2890 1236 2890 1432 2890 1235 2891 1434 2891 1432 2891 1235 2892 1234 2892 1434 2892 1434 2893 1234 2893 1433 2893 1252 2894 1434 2894 1433 2894 1252 2895 1233 2895 1434 2895 1434 2896 1233 2896 1435 2896 1230 2897 1434 2897 1435 2897 1230 2898 1436 2898 1434 2898 1434 2899 1436 2899 1229 2899 1250 2900 1434 2900 1229 2900 1250 2901 1523 2901 1434 2901 1250 2902 1249 2902 1523 2902 1523 2903 1249 2903 1227 2903 1226 2904 1523 2904 1227 2904 1226 2905 1225 2905 1523 2905 1523 2906 1225 2906 1224 2906 1443 2907 1224 2907 1437 2907 1222 2908 1443 2908 1437 2908 1222 2909 1438 2909 1443 2909 1443 2910 1438 2910 1439 2910 1218 2911 1443 2911 1439 2911 1218 2912 1440 2912 1443 2912 1443 2913 1440 2913 1441 2913 1216 2914 1443 2914 1441 2914 1216 2915 1442 2915 1443 2915 1443 2916 1442 2916 1246 2916 1214 2917 1443 2917 1246 2917 1214 2918 1213 2918 1443 2918 1443 2919 1213 2919 1444 2919 1445 2920 1443 2920 1444 2920 1445 2921 1478 2921 1443 2921 1445 2922 1446 2922 1478 2922 1478 2923 1446 2923 1447 2923 1448 2924 1478 2924 1447 2924 1448 2925 1449 2925 1478 2925 1478 2926 1449 2926 1211 2926 1210 2927 1478 2927 1211 2927 1210 2928 1450 2928 1478 2928 1478 2929 1450 2929 1452 2929 1451 2930 1478 2930 1452 2930 1451 2931 1239 2931 1478 2931 1478 2932 1239 2932 1453 2932 1523 2933 1224 2933 1443 2933 1258 2934 1280 2934 322 2934 1260 2935 322 2935 1281 2935 1260 2936 1258 2936 322 2936 1280 2937 1454 2937 322 2937 322 2938 1454 2938 1455 2938 1279 2939 322 2939 1455 2939 1279 2940 1457 2940 322 2940 1279 2941 1456 2941 1457 2941 1457 2942 1456 2942 1277 2942 1311 2943 1457 2943 1277 2943 1311 2944 1309 2944 1457 2944 1457 2945 1309 2945 1458 2945 1307 2946 1457 2946 1458 2946 1307 2947 1463 2947 1457 2947 1307 2948 1459 2948 1463 2948 1463 2949 1459 2949 1460 2949 1276 2950 1463 2950 1460 2950 1276 2951 1275 2951 1463 2951 1463 2952 1275 2952 1461 2952 1274 2953 1463 2953 1461 2953 1274 2954 1303 2954 1463 2954 1463 2955 1303 2955 1462 2955 1273 2956 1463 2956 1462 2956 1273 2957 1300 2957 1463 2957 1463 2958 1300 2958 1464 2958 1465 2959 1463 2959 1464 2959 1465 2960 1471 2960 1463 2960 1465 2961 1466 2961 1471 2961 1471 2962 1466 2962 1270 2962 1297 2963 1471 2963 1270 2963 1297 2964 1296 2964 1471 2964 1471 2965 1296 2965 1468 2965 1467 2966 1471 2966 1468 2966 1467 2967 1267 2967 1471 2967 1471 2968 1267 2968 1469 2968 1292 2969 1471 2969 1469 2969 1292 2970 1266 2970 1471 2970 1471 2971 1266 2971 1470 2971 1291 2972 1471 2972 1470 2972 1291 2973 1472 2973 1471 2973 1471 2974 1472 2974 1474 2974 1473 2975 1471 2975 1474 2975 1473 2976 1475 2976 1471 2976 1471 2977 1475 2977 1287 2977 322 2978 1287 2978 1285 2978 1476 2979 322 2979 1285 2979 1476 2980 1477 2980 322 2980 322 2981 1477 2981 1264 2981 1282 2982 322 2982 1264 2982 1282 2983 1263 2983 322 2983 322 2984 1263 2984 1281 2984 1471 2985 1287 2985 322 2985 1443 2986 1471 2986 322 2986 1443 2987 1478 2987 1471 2987 1314 2988 1479 2988 1483 2988 1315 2989 1483 2989 1498 2989 1315 2990 1314 2990 1483 2990 1479 2991 1340 2991 1483 2991 1483 2992 1340 2992 1480 2992 1481 2993 1483 2993 1480 2993 1481 2994 1482 2994 1483 2994 1483 2995 1482 2995 1339 2995 1338 2996 1483 2996 1339 2996 1338 2997 1337 2997 1483 2997 1483 2998 1337 2998 1484 2998 1336 2999 1483 2999 1484 2999 1336 3000 1365 3000 1483 3000 1483 3001 1365 3001 1364 3001 1362 3002 1483 3002 1364 3002 1362 3003 1334 3003 1483 3003 1483 3004 1334 3004 1360 3004 1488 3005 1360 3005 1333 3005 1359 3006 1488 3006 1333 3006 1359 3007 1331 3007 1488 3007 1488 3008 1331 3008 1485 3008 1486 3009 1488 3009 1485 3009 1486 3010 1329 3010 1488 3010 1488 3011 1329 3011 1356 3011 1354 3012 1488 3012 1356 3012 1354 3013 1353 3013 1488 3013 1488 3014 1353 3014 1351 3014 1487 3015 1488 3015 1351 3015 1487 3016 1350 3016 1488 3016 1488 3017 1350 3017 1326 3017 1489 3018 1488 3018 1326 3018 1489 3019 1492 3019 1488 3019 1489 3020 1490 3020 1492 3020 1492 3021 1490 3021 1491 3021 1349 3022 1492 3022 1491 3022 1349 3023 1493 3023 1492 3023 1492 3024 1493 3024 1494 3024 1496 3025 1494 3025 1324 3025 1345 3026 1496 3026 1324 3026 1345 3027 1344 3027 1496 3027 1496 3028 1344 3028 1322 3028 1321 3029 1496 3029 1322 3029 1321 3030 1495 3030 1496 3030 1496 3031 1495 3031 1320 3031 1319 3032 1496 3032 1320 3032 1319 3033 1497 3033 1496 3033 1496 3034 1497 3034 1317 3034 1341 3035 1496 3035 1317 3035 1341 3036 1498 3036 1496 3036 1496 3037 1498 3037 1483 3037 1483 3038 1360 3038 1488 3038 1505 3039 1483 3039 1488 3039 1505 3040 1526 3040 1483 3040 1505 3041 1499 3041 1526 3041 1505 3042 1500 3042 1499 3042 1505 3043 1422 3043 1500 3043 1505 3044 1396 3044 1422 3044 1505 3045 1398 3045 1396 3045 1505 3046 1501 3046 1398 3046 1505 3047 1502 3047 1501 3047 1505 3048 1425 3048 1502 3048 1505 3049 1504 3049 1425 3049 1505 3050 1503 3050 1504 3050 1505 3051 1399 3051 1503 3051 1505 3052 1372 3052 1399 3052 1505 3053 1506 3053 1372 3053 1505 3054 1507 3054 1506 3054 1505 3055 1520 3055 1507 3055 1505 3056 322 3056 1520 3056 1520 3057 322 3057 1457 3057 1492 3058 1494 3058 1496 3058 1499 3059 1418 3059 1526 3059 1526 3060 1418 3060 1417 3060 1508 3061 1526 3061 1417 3061 1508 3062 1509 3062 1526 3062 1526 3063 1509 3063 1394 3063 1510 3064 1526 3064 1394 3064 1510 3065 1393 3065 1526 3065 1526 3066 1393 3066 1392 3066 1511 3067 1526 3067 1392 3067 1511 3068 1391 3068 1526 3068 1526 3069 1391 3069 1389 3069 1388 3070 1526 3070 1389 3070 1388 3071 1512 3071 1526 3071 1526 3072 1512 3072 1513 3072 1413 3073 1526 3073 1513 3073 1413 3074 1411 3074 1526 3074 1526 3075 1411 3075 1386 3075 1514 3076 1386 3076 1384 3076 1515 3077 1514 3077 1384 3077 1515 3078 1516 3078 1514 3078 1514 3079 1516 3079 1383 3079 1517 3080 1514 3080 1383 3080 1517 3081 1381 3081 1514 3081 1514 3082 1381 3082 1380 3082 1408 3083 1514 3083 1380 3083 1408 3084 1379 3084 1514 3084 1514 3085 1379 3085 1378 3085 1518 3086 1514 3086 1378 3086 1518 3087 1377 3087 1514 3087 1514 3088 1377 3088 1520 3088 1520 3089 1377 3089 1376 3089 1519 3090 1520 3090 1376 3090 1519 3091 1375 3091 1520 3091 1520 3092 1375 3092 1404 3092 1402 3093 1520 3093 1404 3093 1402 3094 1507 3094 1520 3094 1526 3095 1386 3095 1514 3095 1471 3096 1514 3096 1463 3096 1471 3097 1526 3097 1514 3097 1525 3098 402 3098 1496 3098 1496 3099 402 3099 1492 3099 1521 3100 1434 3100 1522 3100 1522 3101 1434 3101 1523 3101 1434 3102 1521 3102 1478 3102 1478 3103 1521 3103 1533 3103 1533 3104 1521 3104 1524 3104 1522 3105 1555 3105 1521 3105 1521 3106 1555 3106 1524 3106 1429 3107 1594 3107 1428 3107 1428 3108 1594 3108 1545 3108 1427 3109 1428 3109 1573 3109 1573 3110 1428 3110 1545 3110 402 3111 1525 3111 1579 3111 1579 3112 1525 3112 1580 3112 1496 3113 1483 3113 1525 3113 1525 3114 1483 3114 1619 3114 1580 3115 1525 3115 1619 3115 1588 3116 1619 3116 1526 3116 1526 3117 1619 3117 1483 3117 1588 3118 1526 3118 1527 3118 1527 3119 1526 3119 1471 3119 1533 3120 1527 3120 1478 3120 1478 3121 1527 3121 1471 3121 1528 3122 1529 3122 1533 3122 1530 3123 1533 3123 1524 3123 1255 3124 1524 3124 1563 3124 1255 3125 1530 3125 1524 3125 1529 3126 1205 3126 1533 3126 1533 3127 1205 3127 1208 3127 1238 3128 1533 3128 1208 3128 1238 3129 1531 3129 1533 3129 1533 3130 1531 3130 1532 3130 1209 3131 1533 3131 1532 3131 1209 3132 1240 3132 1533 3132 1533 3133 1240 3133 1241 3133 1242 3134 1533 3134 1241 3134 1242 3135 1534 3135 1533 3135 1533 3136 1534 3136 1243 3136 1244 3137 1533 3137 1243 3137 1244 3138 1535 3138 1533 3138 1533 3139 1535 3139 1212 3139 1245 3140 1533 3140 1212 3140 1245 3141 1536 3141 1533 3141 1533 3142 1536 3142 1283 3142 1527 3143 1283 3143 1284 3143 1286 3144 1527 3144 1284 3144 1286 3145 1537 3145 1527 3145 1527 3146 1537 3146 1288 3146 1289 3147 1527 3147 1288 3147 1289 3148 1265 3148 1527 3148 1527 3149 1265 3149 1290 3149 1538 3150 1527 3150 1290 3150 1538 3151 1539 3151 1527 3151 1527 3152 1539 3152 1540 3152 1541 3153 1527 3153 1540 3153 1541 3154 1293 3154 1527 3154 1527 3155 1293 3155 1294 3155 1295 3156 1527 3156 1294 3156 1295 3157 1542 3157 1527 3157 1527 3158 1542 3158 1268 3158 1298 3159 1527 3159 1268 3159 1298 3160 1269 3160 1527 3160 1527 3161 1269 3161 1299 3161 1545 3162 1299 3162 1543 3162 1271 3163 1545 3163 1543 3163 1271 3164 1272 3164 1545 3164 1545 3165 1272 3165 1301 3165 1302 3166 1545 3166 1301 3166 1302 3167 1304 3167 1545 3167 1545 3168 1304 3168 1544 3168 1305 3169 1545 3169 1544 3169 1305 3170 1546 3170 1545 3170 1545 3171 1546 3171 1547 3171 1548 3172 1545 3172 1547 3172 1548 3173 1306 3173 1545 3173 1545 3174 1306 3174 1573 3174 1573 3175 1306 3175 1308 3175 1549 3176 1573 3176 1308 3176 1549 3177 1310 3177 1573 3177 1573 3178 1310 3178 1550 3178 1312 3179 1573 3179 1550 3179 1312 3180 1278 3180 1573 3180 1573 3181 1278 3181 1551 3181 1552 3182 1573 3182 1551 3182 1552 3183 1553 3183 1573 3183 1573 3184 1553 3184 1219 3184 1555 3185 1219 3185 1220 3185 1247 3186 1555 3186 1220 3186 1247 3187 1221 3187 1555 3187 1555 3188 1221 3188 1223 3188 1554 3189 1555 3189 1223 3189 1554 3190 1248 3190 1555 3190 1555 3191 1248 3191 1556 3191 1228 3192 1555 3192 1556 3192 1228 3193 1557 3193 1555 3193 1555 3194 1557 3194 1251 3194 1558 3195 1555 3195 1251 3195 1558 3196 1524 3196 1555 3196 1558 3197 1559 3197 1524 3197 1524 3198 1559 3198 1560 3198 1231 3199 1524 3199 1560 3199 1231 3200 1232 3200 1524 3200 1524 3201 1232 3201 1253 3201 1561 3202 1524 3202 1253 3202 1561 3203 1254 3203 1524 3203 1524 3204 1254 3204 1562 3204 1563 3205 1524 3205 1562 3205 1283 3206 1536 3206 1569 3206 1569 3207 1536 3207 1564 3207 1570 3208 1564 3208 1565 3208 1262 3209 1565 3209 1566 3209 1261 3210 1566 3210 1215 3210 1259 3211 1215 3211 1571 3211 1572 3212 1571 3212 1217 3212 1568 3213 1217 3213 1567 3213 1553 3214 1567 3214 1219 3214 1553 3215 1568 3215 1567 3215 1569 3216 1564 3216 1570 3216 1570 3217 1565 3217 1262 3217 1262 3218 1566 3218 1261 3218 1261 3219 1215 3219 1259 3219 1259 3220 1571 3220 1572 3220 1572 3221 1217 3221 1568 3221 1573 3222 1219 3222 1555 3222 1579 3223 1573 3223 1555 3223 1579 3224 1599 3224 1573 3224 1579 3225 1574 3225 1599 3225 1579 3226 1328 3226 1574 3226 1579 3227 1327 3227 1328 3227 1579 3228 1325 3228 1327 3228 1579 3229 1575 3229 1325 3229 1579 3230 1576 3230 1575 3230 1579 3231 1577 3231 1576 3231 1579 3232 1348 3232 1577 3232 1579 3233 1347 3233 1348 3233 1579 3234 1346 3234 1347 3234 1579 3235 1578 3235 1346 3235 1579 3236 1323 3236 1578 3236 1579 3237 1580 3237 1323 3237 1323 3238 1580 3238 1343 3238 1343 3239 1580 3239 1342 3239 1342 3240 1580 3240 1581 3240 1581 3241 1580 3241 1582 3241 1582 3242 1580 3242 1583 3242 1583 3243 1580 3243 1318 3243 1318 3244 1580 3244 1316 3244 1316 3245 1580 3245 1584 3245 1584 3246 1580 3246 1585 3246 1585 3247 1580 3247 1586 3247 1586 3248 1580 3248 1587 3248 1587 3249 1580 3249 1619 3249 1313 3250 1619 3250 1371 3250 1313 3251 1587 3251 1619 3251 1530 3252 1528 3252 1533 3252 1533 3253 1283 3253 1527 3253 1527 3254 1299 3254 1545 3254 1594 3255 1527 3255 1545 3255 1594 3256 1588 3256 1527 3256 1594 3257 1385 3257 1588 3257 1594 3258 1410 3258 1385 3258 1594 3259 1589 3259 1410 3259 1594 3260 1409 3260 1589 3260 1594 3261 1382 3261 1409 3261 1594 3262 1591 3262 1382 3262 1594 3263 1590 3263 1591 3263 1594 3264 1592 3264 1590 3264 1594 3265 1593 3265 1592 3265 1594 3266 1596 3266 1593 3266 1594 3267 1595 3267 1596 3267 1594 3268 1407 3268 1595 3268 1594 3269 1406 3269 1407 3269 1594 3270 1599 3270 1406 3270 1406 3271 1599 3271 1405 3271 1405 3272 1599 3272 1597 3272 1597 3273 1599 3273 1598 3273 1598 3274 1599 3274 1403 3274 1403 3275 1599 3275 1401 3275 1401 3276 1599 3276 1374 3276 1374 3277 1599 3277 1373 3277 1373 3278 1599 3278 1600 3278 1600 3279 1599 3279 1400 3279 1400 3280 1599 3280 1574 3280 1426 3281 1574 3281 1352 3281 1601 3282 1352 3282 1602 3282 1424 3283 1602 3283 1355 3283 1423 3284 1355 3284 1603 3284 1397 3285 1603 3285 1357 3285 1613 3286 1357 3286 1614 3286 1615 3287 1614 3287 1330 3287 1421 3288 1330 3288 1358 3288 1588 3289 1358 3289 1619 3289 1588 3290 1421 3290 1358 3290 1588 3291 1420 3291 1421 3291 1588 3292 1419 3292 1420 3292 1588 3293 1604 3293 1419 3293 1588 3294 1606 3294 1604 3294 1588 3295 1605 3295 1606 3295 1588 3296 1395 3296 1605 3296 1588 3297 1607 3297 1395 3297 1588 3298 1608 3298 1607 3298 1588 3299 1416 3299 1608 3299 1588 3300 1609 3300 1416 3300 1588 3301 1415 3301 1609 3301 1588 3302 1390 3302 1415 3302 1588 3303 1610 3303 1390 3303 1588 3304 1387 3304 1610 3304 1588 3305 1611 3305 1387 3305 1588 3306 1414 3306 1611 3306 1588 3307 1412 3307 1414 3307 1588 3308 1612 3308 1412 3308 1588 3309 1385 3309 1612 3309 1370 3310 1371 3310 1619 3310 1369 3311 1619 3311 1622 3311 1369 3312 1370 3312 1619 3312 1400 3313 1574 3313 1426 3313 1426 3314 1352 3314 1601 3314 1601 3315 1602 3315 1424 3315 1424 3316 1355 3316 1423 3316 1423 3317 1603 3317 1397 3317 1397 3318 1357 3318 1613 3318 1613 3319 1614 3319 1615 3319 1615 3320 1330 3320 1421 3320 1358 3321 1332 3321 1619 3321 1619 3322 1332 3322 1616 3322 1361 3323 1619 3323 1616 3323 1361 3324 1617 3324 1619 3324 1619 3325 1617 3325 1363 3325 1618 3326 1619 3326 1363 3326 1618 3327 1335 3327 1619 3327 1619 3328 1335 3328 1620 3328 1366 3329 1619 3329 1620 3329 1366 3330 1367 3330 1619 3330 1619 3331 1367 3331 1368 3331 1621 3332 1619 3332 1368 3332 1621 3333 1622 3333 1619 3333 1624 3334 1168 3334 1623 3334 1624 3335 1625 3335 1168 3335 1624 3336 1626 3336 1625 3336 1625 3337 1626 3337 1198 3337 1198 3338 1626 3338 902 3338 1199 3339 902 3339 903 3339 1200 3340 903 3340 1675 3340 1627 3341 1675 3341 1628 3341 1167 3342 1628 3342 1676 3342 1165 3343 1676 3343 1629 3343 1677 3344 1629 3344 1630 3344 1164 3345 1630 3345 1632 3345 1631 3346 1632 3346 1633 3346 1163 3347 1633 3347 1634 3347 1635 3348 1634 3348 928 3348 1678 3349 928 3349 929 3349 1160 3350 929 3350 1636 3350 1158 3351 1636 3351 930 3351 1637 3352 930 3352 910 3352 1679 3353 910 3353 911 3353 1157 3354 911 3354 1638 3354 1156 3355 1638 3355 915 3355 1680 3356 915 3356 916 3356 1155 3357 916 3357 917 3357 1681 3358 917 3358 1639 3358 1682 3359 1639 3359 1683 3359 1684 3360 1683 3360 1640 3360 1151 3361 1640 3361 1641 3361 1642 3362 1641 3362 1643 3362 1644 3363 1643 3363 1645 3363 1150 3364 1645 3364 1647 3364 1646 3365 1647 3365 1648 3365 1685 3366 1648 3366 1649 3366 1686 3367 1649 3367 918 3367 1148 3368 918 3368 1687 3368 1688 3369 1687 3369 1689 3369 1690 3370 1689 3370 931 3370 1142 3371 931 3371 1650 3371 1691 3372 1650 3372 1651 3372 1692 3373 1651 3373 1693 3373 1652 3374 1693 3374 1654 3374 1653 3375 1654 3375 869 3375 1655 3376 869 3376 1657 3376 1656 3377 1657 3377 870 3377 1694 3378 870 3378 1658 3378 1695 3379 1658 3379 1659 3379 1696 3380 1659 3380 1660 3380 1697 3381 1660 3381 1661 3381 1144 3382 1661 3382 1662 3382 1145 3383 1662 3383 920 3383 1146 3384 920 3384 876 3384 1663 3385 876 3385 878 3385 1197 3386 878 3386 879 3386 1194 3387 879 3387 1664 3387 1195 3388 1664 3388 882 3388 1698 3389 882 3389 884 3389 1699 3390 884 3390 1666 3390 1665 3391 1666 3391 885 3391 1700 3392 885 3392 1667 3392 1201 3393 1667 3393 1668 3393 1701 3394 1668 3394 1669 3394 1702 3395 1669 3395 922 3395 1703 3396 922 3396 1670 3396 1704 3397 1670 3397 889 3397 1202 3398 889 3398 1671 3398 1203 3399 1671 3399 1672 3399 1705 3400 1672 3400 1673 3400 1706 3401 1673 3401 894 3401 1707 3402 894 3402 923 3402 1708 3403 923 3403 925 3403 1204 3404 925 3404 897 3404 1709 3405 897 3405 1674 3405 1710 3406 1674 3406 1623 3406 1168 3407 1710 3407 1623 3407 1198 3408 902 3408 1199 3408 1199 3409 903 3409 1200 3409 1200 3410 1675 3410 1627 3410 1627 3411 1628 3411 1167 3411 1167 3412 1676 3412 1165 3412 1165 3413 1629 3413 1677 3413 1677 3414 1630 3414 1164 3414 1164 3415 1632 3415 1631 3415 1631 3416 1633 3416 1163 3416 1163 3417 1634 3417 1635 3417 1635 3418 928 3418 1678 3418 1678 3419 929 3419 1160 3419 1160 3420 1636 3420 1158 3420 1158 3421 930 3421 1637 3421 1637 3422 910 3422 1679 3422 1679 3423 911 3423 1157 3423 1157 3424 1638 3424 1156 3424 1156 3425 915 3425 1680 3425 1680 3426 916 3426 1155 3426 1155 3427 917 3427 1681 3427 1681 3428 1639 3428 1682 3428 1682 3429 1683 3429 1684 3429 1684 3430 1640 3430 1151 3430 1151 3431 1641 3431 1642 3431 1642 3432 1643 3432 1644 3432 1644 3433 1645 3433 1150 3433 1150 3434 1647 3434 1646 3434 1646 3435 1648 3435 1685 3435 1685 3436 1649 3436 1686 3436 1686 3437 918 3437 1148 3437 1148 3438 1687 3438 1688 3438 1688 3439 1689 3439 1690 3439 1690 3440 931 3440 1142 3440 1142 3441 1650 3441 1691 3441 1691 3442 1651 3442 1692 3442 1692 3443 1693 3443 1652 3443 1652 3444 1654 3444 1653 3444 1653 3445 869 3445 1655 3445 1655 3446 1657 3446 1656 3446 1656 3447 870 3447 1694 3447 1694 3448 1658 3448 1695 3448 1695 3449 1659 3449 1696 3449 1696 3450 1660 3450 1697 3450 1697 3451 1661 3451 1144 3451 1144 3452 1662 3452 1145 3452 1145 3453 920 3453 1146 3453 1146 3454 876 3454 1663 3454 1663 3455 878 3455 1197 3455 1197 3456 879 3456 1194 3456 1194 3457 1664 3457 1195 3457 1195 3458 882 3458 1698 3458 1698 3459 884 3459 1699 3459 1699 3460 1666 3460 1665 3460 1665 3461 885 3461 1700 3461 1700 3462 1667 3462 1201 3462 1201 3463 1668 3463 1701 3463 1701 3464 1669 3464 1702 3464 1702 3465 922 3465 1703 3465 1703 3466 1670 3466 1704 3466 1704 3467 889 3467 1202 3467 1202 3468 1671 3468 1203 3468 1203 3469 1672 3469 1705 3469 1705 3470 1673 3470 1706 3470 1706 3471 894 3471 1707 3471 1707 3472 923 3472 1708 3472 1708 3473 925 3473 1204 3473 1204 3474 897 3474 1709 3474 1709 3475 1674 3475 1710 3475 1712 3476 1740 3476 1711 3476 1712 3477 461 3477 1740 3477 1712 3478 1713 3478 461 3478 461 3479 1713 3479 1714 3479 1714 3480 1713 3480 123 3480 1741 3481 123 3481 120 3481 459 3482 120 3482 119 3482 458 3483 119 3483 117 3483 1715 3484 117 3484 115 3484 1716 3485 115 3485 1717 3485 1742 3486 1717 3486 112 3486 1718 3487 112 3487 110 3487 1719 3488 110 3488 109 3488 1720 3489 109 3489 1721 3489 455 3490 1721 3490 1743 3490 456 3491 1743 3491 106 3491 1744 3492 106 3492 105 3492 1722 3493 105 3493 101 3493 1745 3494 101 3494 1723 3494 1724 3495 1723 3495 1746 3495 483 3496 1746 3496 97 3496 1725 3497 97 3497 1747 3497 1726 3498 1747 3498 94 3498 1748 3499 94 3499 93 3499 430 3500 93 3500 90 3500 1727 3501 90 3501 88 3501 1749 3502 88 3502 1750 3502 1728 3503 1750 3503 1729 3503 432 3504 1729 3504 86 3504 431 3505 86 3505 85 3505 1751 3506 85 3506 84 3506 482 3507 84 3507 82 3507 1752 3508 82 3508 1730 3508 1753 3509 1730 3509 1754 3509 1731 3510 1754 3510 1733 3510 1732 3511 1733 3511 80 3511 1755 3512 80 3512 79 3512 1756 3513 79 3513 1734 3513 435 3514 1734 3514 1736 3514 1735 3515 1736 3515 1738 3515 1737 3516 1738 3516 1757 3516 1758 3517 1757 3517 76 3517 436 3518 76 3518 75 3518 1739 3519 75 3519 1711 3519 1740 3520 1739 3520 1711 3520 1714 3521 123 3521 1741 3521 1741 3522 120 3522 459 3522 459 3523 119 3523 458 3523 458 3524 117 3524 1715 3524 1715 3525 115 3525 1716 3525 1716 3526 1717 3526 1742 3526 1742 3527 112 3527 1718 3527 1718 3528 110 3528 1719 3528 1719 3529 109 3529 1720 3529 1720 3530 1721 3530 455 3530 455 3531 1743 3531 456 3531 456 3532 106 3532 1744 3532 1744 3533 105 3533 1722 3533 1722 3534 101 3534 1745 3534 1745 3535 1723 3535 1724 3535 1724 3536 1746 3536 483 3536 483 3537 97 3537 1725 3537 1725 3538 1747 3538 1726 3538 1726 3539 94 3539 1748 3539 1748 3540 93 3540 430 3540 430 3541 90 3541 1727 3541 1727 3542 88 3542 1749 3542 1749 3543 1750 3543 1728 3543 1728 3544 1729 3544 432 3544 432 3545 86 3545 431 3545 431 3546 85 3546 1751 3546 1751 3547 84 3547 482 3547 482 3548 82 3548 1752 3548 1752 3549 1730 3549 1753 3549 1753 3550 1754 3550 1731 3550 1731 3551 1733 3551 1732 3551 1732 3552 80 3552 1755 3552 1755 3553 79 3553 1756 3553 1756 3554 1734 3554 435 3554 435 3555 1736 3555 1735 3555 1735 3556 1738 3556 1737 3556 1737 3557 1757 3557 1758 3557 1758 3558 76 3558 436 3558 436 3559 75 3559 1739 3559 652 3560 1760 3560 1759 3560 652 3561 1761 3561 1760 3561 652 3562 1762 3562 1761 3562 1761 3563 1762 3563 1763 3563 407 3564 1763 3564 408 3564 407 3565 1761 3565 1763 3565 407 3566 406 3566 1761 3566 1761 3567 406 3567 1760 3567 1760 3568 406 3568 1764 3568 1832 3569 1764 3569 1765 3569 1831 3570 1765 3570 1766 3570 1829 3571 1766 3571 405 3571 1828 3572 405 3572 1767 3572 1827 3573 1767 3573 421 3573 1825 3574 421 3574 1768 3574 1822 3575 1768 3575 1821 3575 1819 3576 1821 3576 1818 3576 1817 3577 1818 3577 419 3577 1769 3578 419 3578 418 3578 1815 3579 418 3579 1770 3579 1813 3580 1770 3580 417 3580 1811 3581 417 3581 416 3581 1810 3582 416 3582 415 3582 1807 3583 415 3583 1771 3583 1805 3584 1771 3584 414 3584 1804 3585 414 3585 1772 3585 1802 3586 1772 3586 413 3586 1801 3587 413 3587 412 3587 1799 3588 412 3588 411 3588 1798 3589 411 3589 1797 3589 1795 3590 1797 3590 1773 3590 1794 3591 1773 3591 1775 3591 1774 3592 1775 3592 1792 3592 1791 3593 1792 3593 410 3593 1790 3594 410 3594 1777 3594 1776 3595 1777 3595 409 3595 1788 3596 409 3596 1786 3596 1783 3597 1786 3597 1782 3597 1781 3598 1782 3598 1780 3598 1778 3599 1780 3599 408 3599 1763 3600 1778 3600 408 3600 1763 3601 653 3601 1778 3601 1763 3602 1762 3602 653 3602 653 3603 1779 3603 1778 3603 1778 3604 1779 3604 1781 3604 1780 3605 1778 3605 1781 3605 1779 3606 1784 3606 1781 3606 1781 3607 1784 3607 1783 3607 1782 3608 1781 3608 1783 3608 1784 3609 1785 3609 1783 3609 1783 3610 1785 3610 1788 3610 1786 3611 1783 3611 1788 3611 1785 3612 1787 3612 1788 3612 1788 3613 1787 3613 1776 3613 409 3614 1788 3614 1776 3614 1787 3615 1789 3615 1776 3615 1776 3616 1789 3616 1790 3616 1777 3617 1776 3617 1790 3617 1789 3618 654 3618 1790 3618 1790 3619 654 3619 1791 3619 410 3620 1790 3620 1791 3620 654 3621 1793 3621 1791 3621 1791 3622 1793 3622 1774 3622 1792 3623 1791 3623 1774 3623 1793 3624 655 3624 1774 3624 1774 3625 655 3625 1794 3625 1775 3626 1774 3626 1794 3626 655 3627 701 3627 1794 3627 1794 3628 701 3628 1795 3628 1773 3629 1794 3629 1795 3629 701 3630 1796 3630 1795 3630 1795 3631 1796 3631 1798 3631 1797 3632 1795 3632 1798 3632 1796 3633 700 3633 1798 3633 1798 3634 700 3634 1799 3634 411 3635 1798 3635 1799 3635 700 3636 1800 3636 1799 3636 1799 3637 1800 3637 1801 3637 412 3638 1799 3638 1801 3638 1800 3639 699 3639 1801 3639 1801 3640 699 3640 1802 3640 413 3641 1801 3641 1802 3641 699 3642 1803 3642 1802 3642 1802 3643 1803 3643 1804 3643 1772 3644 1802 3644 1804 3644 1803 3645 1806 3645 1804 3645 1804 3646 1806 3646 1805 3646 414 3647 1804 3647 1805 3647 1806 3648 630 3648 1805 3648 1805 3649 630 3649 1807 3649 1771 3650 1805 3650 1807 3650 630 3651 1808 3651 1807 3651 1807 3652 1808 3652 1810 3652 415 3653 1807 3653 1810 3653 1808 3654 1809 3654 1810 3654 1810 3655 1809 3655 1811 3655 416 3656 1810 3656 1811 3656 1809 3657 1812 3657 1811 3657 1811 3658 1812 3658 1813 3658 417 3659 1811 3659 1813 3659 1812 3660 1814 3660 1813 3660 1813 3661 1814 3661 1815 3661 1770 3662 1813 3662 1815 3662 1814 3663 628 3663 1815 3663 1815 3664 628 3664 1769 3664 418 3665 1815 3665 1769 3665 628 3666 1816 3666 1769 3666 1769 3667 1816 3667 1817 3667 419 3668 1769 3668 1817 3668 1816 3669 627 3669 1817 3669 1817 3670 627 3670 1819 3670 1818 3671 1817 3671 1819 3671 627 3672 1820 3672 1819 3672 1819 3673 1820 3673 1822 3673 1821 3674 1819 3674 1822 3674 1820 3675 1823 3675 1822 3675 1822 3676 1823 3676 1825 3676 1768 3677 1822 3677 1825 3677 1823 3678 1824 3678 1825 3678 1825 3679 1824 3679 1827 3679 421 3680 1825 3680 1827 3680 1824 3681 1826 3681 1827 3681 1827 3682 1826 3682 1828 3682 1767 3683 1827 3683 1828 3683 1826 3684 647 3684 1828 3684 1828 3685 647 3685 1829 3685 405 3686 1828 3686 1829 3686 647 3687 648 3687 1829 3687 1829 3688 648 3688 1831 3688 1766 3689 1829 3689 1831 3689 648 3690 1830 3690 1831 3690 1831 3691 1830 3691 1832 3691 1765 3692 1831 3692 1832 3692 1830 3693 1759 3693 1832 3693 1832 3694 1759 3694 1760 3694 1764 3695 1832 3695 1760 3695 803 3696 802 3696 1833 3696 1833 3697 802 3697 867 3697 325 3698 360 3698 322 3698 322 3699 360 3699 1443 3699

-
-
-
-
- - - - - - - - - - - - - - -
+ + + + + Blender User + Blender 2.80.75 commit date:2019-07-29, commit time:14:47, hash:f6cb5f54494e + + 2019-10-18T17:26:06 + 2019-10-18T17:26:06 + + Z_UP + + + + + + + + -18.29027 -2.807132 7.108109 -18.28789 -2.803207 -7.225351 -18.4542 -2.804668 -7.225353 -18.45416 -2.805671 7.108108 -17.51129 -7.562581 7.108107 -18.13753 -2.802688 -7.225351 -18.13805 -2.807651 7.108109 -17.51129 -7.562584 -7.225349 -19.1611 -3.303924 14.74619 -19.15712 -14.33217 6.927423 -17.45778 -7.520995 7.108107 -17.61632 -2.804998 -7.225351 -17.61393 -2.80534 7.108109 -17.11989 -7.317285 -7.225349 -17.11988 -7.317283 7.108107 -17.45778 -7.520998 -7.225349 -18.75542 -31.18391 -7.225356 -19.1611 -3.30393 -14.39424 -17.51779 -22.86888 7.097266 -17.3441 -7.444063 7.108107 -16.59643 -2.740397 -7.225352 -16.59214 -2.741631 7.108108 -16.74065 -7.148063 -7.225349 -16.74065 -7.14806 7.108107 -17.3441 -7.444066 -7.225349 -19.1611 -12.49527 -7.827778 -17.51779 -22.94233 -7.225353 -20.32443 -1.893906 16.15621 -19.14996 -21.65486 6.927423 -15.05644 -2.835295 -5.89529 -15.0515 -2.835576 5.847536 -9.448837 -4.995277 -5.002138 -16.21459 -6.964 6.880097 -16.21458 -6.964002 -6.98464 -19.1611 -23.25863 -7.827778 -18.36142 -31.18177 -7.225353 -20.32443 -1.893911 -15.80425 -19.93665 -15.37151 15.6862 -18.75542 -31.18431 7.108109 -19.93666 -15.37155 7.414493 -19.93666 -19.75257 7.414493 -13.25751 -2.757024 5.002161 -13.26217 -2.757176 -5.002138 -8.754265 -4.686562 -5.002138 -15.33109 -6.718466 5.926678 -9.106582 -4.818631 -5.002138 -9.782941 -5.236967 -5.002138 -15.99921 -6.898999 6.687479 -15.99921 -6.899002 -6.7813 -19.93666 -13.21588 -8.806267 -18.36142 -31.18177 7.108109 -19.1611 -32.44435 -14.39424 -20.32443 -33.85437 -15.80425 -20.32443 -15.37151 16.15621 -19.15712 -14.33217 14.74619 -20.32443 -15.37151 7.414493 -20.32443 -19.75417 7.414494 -19.93665 -19.75257 15.6862 -19.1611 -21.83734 14.74619 -11.73918 -2.804936 5.002158 -11.74152 -2.805398 -5.002141 -9.106582 -4.818726 5.00216 -9.448837 -4.995371 5.00216 -9.782941 -5.236966 5.00216 -10.22784 -5.441982 -5.002138 -15.3311 -6.718471 -5.978143 -19.93666 -22.52981 -8.806267 -19.93666 -13.21627 -12.23692 -20.32444 -13.21627 -12.23692 -19.1611 -12.49527 -12.72021 -19.1611 -32.44435 14.74619 -20.32443 -33.85437 16.15621 -20.32444 -22.52981 -8.806267 -20.32443 -19.75417 16.15621 -14.53399 -6.509587 5.141626 -11.01532 -2.818928 -5.002147 -11.01483 -2.818589 5.002152 -8.752799 -4.686468 5.00216 -7.727578 -2.425741 4.981668 -10.22784 -5.441981 5.00216 -10.93248 -5.662839 -5.002138 -14.53399 -6.509591 -5.149374 -19.1611 -23.25863 -12.72021 -20.32444 -22.53002 -12.23692 -20.32444 -13.21588 -8.806267 -13.67455 -6.31588 5.00216 -10.37883 -2.723644 5.002151 -10.37938 -2.723886 -5.002149 -8.395512 -4.558751 -5.002138 -8.39464 -4.55867 5.00216 -8.283473 -2.549775 5.00214 -10.93247 -5.662834 5.00216 -11.79288 -5.879734 -5.002138 -13.67456 -6.315889 -5.002137 -19.93666 -22.53002 -12.23692 -12.72028 -6.088834 5.00216 -9.681914 -2.672593 5.002151 -9.68252 -2.672665 -5.002149 -7.728229 -2.425685 -4.981696 -8.050505 -4.440812 -4.998857 -8.049942 -4.440735 4.998863 -8.284131 -2.549788 -5.00213 -11.79287 -5.879731 5.00216 -8.969379 -2.590292 5.002152 -12.72029 -6.08884 -5.002138 -8.970021 -2.590339 -5.002147 -7.735016 -4.390377 -4.983967 -7.734792 -4.390277 4.983973 -18.36142 -31.18178 -3.442146 -17.32831 -31.58288 7.108109 -17.12826 -24.75193 7.108109 -17.12827 -24.75192 -7.225354 -17.32831 -31.58288 -7.225353 -18.36142 -31.18178 3.447055 -16.99124 -31.66379 -7.225353 -17.50121 -31.41645 3.383049 -14.82705 -25.85596 5.05077 -16.47095 -31.73462 -7.225353 -17.4343 -31.43152 -3.493002 -17.34268 -31.44779 -3.495876 -17.09276 -31.49307 3.385874 -16.99124 -31.66379 7.108109 -14.00115 -27.07245 5.256899 -15.77115 -31.79508 -6.858729 -16.98543 -31.5146 -3.495172 -16.86448 -31.52877 -3.492974 -5.701147 -31.18256 -3.606174 -5.747577 -30.63138 -5.392441 -13.51243 -27.37114 5.33922 -16.28802 -31.58296 -3.488938 -16.1218 -31.59506 -3.473732 -14.91074 -31.84491 -5.869693 -5.701146 -31.78536 -3.606173 -5.74775 -30.63195 5.39706 -5.701148 -31.78536 3.628389 -5.701146 -31.18256 3.628389 -16.38215 -31.56387 3.33541 -16.47095 -31.73462 6.903867 -13.0402 -27.87269 5.314655 -15.38848 -31.64088 -3.161712 -15.18935 -31.65085 -3.15604 -14.82705 -25.85596 -5.052117 -6.495503 -29.71988 -5.481194 -14.35138 -31.68811 -3.152461 -14.00115 -27.07213 -5.256876 -6.496817 -29.71947 5.48251 -15.47287 -31.62433 3.108226 -15.77115 -31.79508 6.211913 -13.92402 -31.88389 -5.028375 -14.13308 -31.69591 -3.152461 -5.695003 -31.93428 -5.392441 -5.689583 -31.93428 5.392195 -7.335494 -29.46457 -5.438034 -14.91074 -31.84491 5.293445 -13.1824 -31.72674 -3.152461 -13.51255 -27.37061 -5.339544 -10.75126 -31.93428 -5.002139 -14.4283 -31.67416 3.106602 -13.92402 -31.88389 5.002161 -12.85966 -31.91184 -5.002139 -12.95946 -31.73233 -3.152461 -9.916847 -31.76819 -3.152462 -10.75126 -31.93428 5.002161 -10.24964 -31.76234 3.106602 -11.67921 -29.03663 -5.449679 -12.3029 -28.68889 -5.398295 -12.68795 -28.27891 -5.376673 -13.04026 -27.8724 -5.314841 -12.85966 -31.91184 5.002161 -13.31796 -31.71313 3.106602 -12.68791 -28.27927 5.376522 -12.13966 -31.75059 -3.152462 -10.87063 -31.76608 -3.152462 -7.336867 -29.46457 5.439172 -11.77866 -31.92866 5.002161 -12.30287 -28.68914 5.398161 -11.68059 -29.03663 5.450581 -12.21848 -31.74073 3.106602 -11.77866 -31.92866 -5.002139 -11.9257 -31.75395 -3.152462 -11.0612 -31.76496 -3.152462 -11.17936 -31.75754 3.106602 -4.782302 -2.53848 5.502376 -4.782302 -2.53848 -5.502354 -2.577479 -4.366902 0.1141811 -4.782302 -4.366902 -5.502354 -4.782302 -4.366902 5.502376 -4.493386 -2.53848 -5.730141 -2.577478 -4.366902 -0.1141602 -2.517118 -4.366902 -0.5663847 -4.493386 -4.366902 5.730164 -4.493386 -2.53848 5.730164 -2.147899 -2.53848 -0.09513169 -2.557305 -4.366902 -0.3416085 -2.457233 -4.366902 -0.7867254 -4.493386 -4.366902 -5.730142 -4.193141 -2.53848 -5.942763 -2.131088 -2.53848 -0.284672 -2.147899 -2.53848 0.09515273 -4.193141 -4.366901 5.942785 -2.378115 -4.366902 -1.000911 -2.047695 -2.53848 -0.6556028 -2.097599 -2.53848 -0.4719853 -1.720696 -4.366902 -1.923213 -4.193141 -2.53848 5.942785 -2.895186 -2.53848 -6.622735 -2.895187 -2.53848 6.599744 -2.557305 -4.366902 0.3416295 -3.882364 -4.366901 6.139673 -2.280386 -4.366902 -1.207261 -1.981763 -2.53848 -0.8340909 -4.193141 -4.366902 -5.942763 -1.883866 -4.366902 -1.763547 -1.544063 -4.366902 -2.067827 -3.882363 -2.53848 -6.139653 -2.131088 -2.53848 0.284693 -2.895187 -4.366901 6.599744 -3.561877 -4.366901 6.320316 -2.164809 -4.366902 -1.404166 -1.900322 -2.53848 -1.006049 -2.032291 -4.366902 -1.590078 -1.433914 -2.53848 -1.602675 -1.28672 -2.53848 -1.723188 -3.882364 -2.53848 6.139675 -3.232529 -2.53848 6.476191 -2.550748 -2.53848 6.694067 -2.517118 -4.366902 0.5664056 -3.232529 -4.366901 6.476191 -1.804008 -2.53848 -1.170137 -3.882363 -4.366902 -6.139654 -1.569889 -2.53848 -1.469621 -1.129452 -2.53848 -1.830214 -3.561877 -2.53848 -6.320293 -3.232528 -2.53848 -6.484205 -2.097599 -2.53848 0.4720064 -2.550748 -4.366901 6.694067 -2.457234 -4.366902 0.7867486 -3.561877 -2.53848 6.320316 -1.693577 -2.53848 -1.325064 -1.355341 -4.366902 -2.196259 -3.232528 -4.366902 -6.484205 -2.047695 -2.53848 0.6556257 -2.200125 -4.366901 6.762837 -2.378115 -4.366902 1.000932 -3.561877 -4.366902 -6.320294 -0.9633483 -2.53848 -1.922916 -2.895186 -4.366902 -6.622735 -0.9476441 -4.366902 2.400704 -1.981763 -2.53848 0.8341118 -2.200125 -2.53848 6.762837 -2.280386 -4.366902 1.207285 -0.7897033 -2.53848 -2.000567 -1.156017 -4.366902 -2.307501 -2.550748 -2.53848 -6.728307 -1.156018 -4.366902 2.307523 -1.900322 -2.53848 1.006072 -1.844242 -4.366901 6.810196 -0.7318515 -4.366902 2.4751 -2.164809 -4.366902 1.404187 -2.550748 -4.366902 -6.728307 -0.9476432 -4.366902 -2.400683 -1.355342 -4.366902 2.19628 -0.9633492 -2.53848 1.922937 -1.804008 -2.53848 1.170158 -1.844242 -2.538481 6.810196 -0.7897042 -2.53848 2.00059 -2.032291 -4.366902 1.5901 -0.6098766 -2.53848 -2.062563 -2.200125 -2.53848 -6.804254 -1.544064 -4.366902 2.067849 -1.129453 -2.53848 1.830235 -1.693577 -2.53848 1.325085 -1.569889 -2.53848 1.469643 -1.484043 -4.366901 6.840623 -0.5103331 -4.366902 2.530122 -0.6098776 -2.53848 2.062585 -1.883866 -4.366902 1.763569 -2.200125 -4.366902 -6.804254 -1.720697 -4.366902 1.923235 -1.286721 -2.53848 1.723209 -1.484043 -2.538481 6.840623 -1.120482 -2.538481 6.858843 -1.433915 -2.53848 1.602697 -0.7318515 -4.366902 -2.475078 -1.844242 -2.53848 -6.854451 -1.120482 -4.366901 6.858843 -0.4252787 -2.53848 2.108436 -0.7545202 -2.538481 6.869687 -0.4252777 -2.53848 -2.108415 -1.844242 -4.366902 -6.854452 -0.2848206 -4.366902 2.565343 -0.7545202 -4.366901 6.869687 -0.5103331 -4.366902 -2.530101 -1.484043 -2.538481 -6.883186 -0.2373515 -2.53848 2.137788 -0.3871298 -4.366901 6.877971 -0.2373505 -2.53848 -2.137767 -1.484043 -4.366902 -6.883186 -0.05707842 -4.366902 2.580488 -0.3871298 -2.538481 6.877971 -0.2848196 -4.366902 -2.565323 -1.120482 -2.538481 -6.895053 -1.120482 -4.366902 -6.895053 -0.04756647 -2.53848 2.150408 -0.01927673 -4.366901 6.886878 -0.04756551 -2.53848 -2.150388 -0.7545202 -2.538481 -6.894831 0.1425884 -2.53848 2.146199 0.1711071 -4.366902 2.575437 -0.01927673 -2.538481 6.886878 -0.05707746 -4.366902 -2.580467 -0.7545202 -4.366902 -6.894831 0.3480604 -4.366901 6.893309 -0.3871297 -2.538481 -6.887356 0.1425894 -2.53848 -2.146178 0.3979567 -4.366902 2.550228 0.3316296 -2.53848 2.125192 0.3480604 -2.538481 6.893309 -0.01927673 -2.538481 -6.877412 0.1711081 -4.366902 -2.575416 -0.3871297 -4.366902 -6.887357 0.7139107 -4.366901 6.892479 0.3480604 -2.538481 -6.86809 0.3316305 -2.53848 -2.12517 -0.01927673 -4.366902 -6.877412 0.6216933 -4.366902 2.505062 0.5180766 -2.53848 2.087553 0.7139107 -2.538481 6.892479 1.077307 -4.366901 6.879554 0.7139106 -2.538481 -6.856208 0.3979576 -4.366902 -2.550206 0.3480604 -4.366902 -6.868091 0.7004649 -2.53848 2.033577 1.077307 -2.538481 -6.836943 0.5180776 -2.53848 -2.087531 0.8405596 -4.366902 2.44029 1.437285 -4.366901 6.849752 0.8773753 -2.53848 1.963685 1.077307 -2.538481 6.879554 0.7139106 -4.366902 -6.856208 1.437285 -2.538481 -6.805463 0.6216943 -4.366902 -2.50504 1.052851 -4.366902 2.35642 1.047415 -2.53848 1.878424 1.077307 -4.366902 -6.836943 1.792893 -2.53848 -6.757056 0.7004659 -2.53848 -2.033555 1.792893 -4.366901 6.798493 1.437285 -2.53848 6.849752 2.143191 -4.366901 6.721493 1.256899 -4.366902 2.254106 1.209262 -2.53848 1.778462 1.437285 -4.366902 -6.805463 0.8405596 -4.366902 -2.440268 1.361642 -2.53848 1.664583 2.48725 -4.366901 6.614887 2.824159 -4.366901 6.475359 1.451115 -4.366902 2.134153 2.14319 -2.53848 -6.687237 1.792893 -4.366902 -6.757056 0.8773762 -2.53848 -1.963663 1.792893 -2.53848 6.798493 1.503365 -2.53848 1.537674 2.143191 -2.53848 6.721493 3.153028 -4.366901 6.310486 1.633971 -4.366902 1.997497 2.487248 -2.53848 -6.591878 1.052852 -4.366902 -2.356397 1.750497 -2.53848 1.248843 1.804039 -4.366902 1.845207 1.633323 -2.53848 1.398731 2.48725 -2.53848 6.614887 2.824159 -2.53848 6.475359 3.472986 -4.366901 6.128907 2.14319 -4.366902 -6.687237 1.047416 -2.53848 -1.878403 2.141581 -2.53848 0.1901088 1.853972 -2.53848 1.089181 1.959989 -4.366902 1.678475 3.153028 -2.53848 6.310486 2.487248 -4.366902 -6.591878 1.209262 -2.53848 -1.77844 1.2569 -4.366902 -2.254086 2.149994 -2.53848 9.56164e-6 2.116408 -2.53848 0.3787199 1.942936 -2.53848 0.9209927 2.224767 -4.366902 1.307015 2.100598 -4.366902 1.498609 3.472986 -2.53848 6.128909 2.824157 -2.53848 -6.467316 1.361642 -2.53848 -1.664561 2.824157 -4.366902 -6.467316 2.579994 -4.366902 9.30235e-6 2.569899 -4.366902 0.2281285 2.074669 -2.53848 0.5643675 2.331524 -4.366902 1.105189 2.016693 -2.53848 0.7455977 3.783186 -4.366901 5.931108 1.451116 -4.366902 -2.134131 1.503365 -2.53848 -1.537652 3.153028 -4.366902 -6.310464 2.141581 -2.53848 -0.1900879 2.569899 -4.366902 -0.2281076 2.53969 -4.366902 0.4544618 2.420034 -4.366902 0.8947151 3.783186 -2.53848 5.931108 3.153028 -2.53848 -6.310464 1.633972 -4.366902 -1.997475 1.633323 -2.53848 -1.39871 2.116408 -2.53848 -0.3786988 6.926874 -4.366902 0.3678986 6.926874 -4.366902 -0.3678797 6.851326 -4.366902 1.099783 2.489604 -4.366902 0.6772389 4.082807 -2.53848 5.71761 4.082807 -4.366902 5.71761 3.472985 -2.53848 -6.128887 3.472985 -4.366902 -6.128887 3.783185 -4.366902 -5.931088 1.804039 -4.366902 -1.845185 1.750497 -2.53848 -1.248822 2.074669 -2.53848 -0.5643466 2.53969 -4.366902 -0.4544408 6.936336 -4.366902 9.43832e-6 6.89851 -4.366902 0.7348138 6.89851 -4.366902 -0.7347929 6.785437 -4.366902 1.461841 6.701022 -4.366902 1.820033 6.47756 -4.366902 2.521025 4.371057 -2.53848 5.488976 4.371057 -4.366902 5.488976 3.783185 -2.53848 -5.931088 4.082807 -4.366902 -5.717589 1.959989 -4.366902 -1.678454 1.853972 -2.53848 -1.08916 2.489604 -4.366902 -0.6772181 2.016693 -2.53848 -0.7455768 6.851326 -4.366902 -1.099764 6.936336 -2.53848 9.46292e-6 6.926874 -2.53848 0.3678987 6.89851 -2.53848 -0.7347929 6.926874 -2.53848 -0.3678776 6.89851 -2.53848 0.7348138 6.851326 -2.53848 1.099783 6.598307 -4.366902 2.173404 6.339101 -4.366902 2.861972 4.647168 -2.53848 5.245814 4.647168 -4.366902 5.245814 4.082807 -2.53848 -5.717589 4.647167 -4.366902 -5.245794 2.100598 -4.366902 -1.498588 1.942936 -2.53848 -0.9209717 4.371056 -4.366902 -5.488956 2.420034 -4.366902 -0.8946942 6.701023 -4.366902 -1.820011 6.785439 -4.366902 -1.461822 6.851326 -2.53848 -1.099764 6.785437 -2.53848 1.461841 6.701022 -2.53848 1.820033 6.598307 -2.53848 2.173404 6.47756 -2.53848 2.521025 6.183299 -4.366902 3.195343 4.910417 -2.53848 4.988769 4.910417 -4.366902 4.988769 4.371056 -2.53848 -5.488956 4.910416 -4.366902 -4.988748 5.160099 -4.366902 -4.718496 2.224767 -4.366902 -1.306994 6.47756 -4.366902 -2.521003 2.331524 -4.366902 -1.105168 6.598308 -4.366902 -2.173384 6.785439 -2.53848 -1.461822 6.339101 -2.53848 2.861972 6.010567 -4.366902 3.520255 5.160101 -2.53848 4.718518 5.160101 -4.366902 4.718518 4.647167 -2.53848 -5.245794 4.910416 -2.53848 -4.988748 5.39556 -4.366902 -4.435756 5.616173 -4.366902 -4.141274 6.339102 -4.366902 -2.86195 6.1833 -4.366902 -3.195321 6.598308 -2.53848 -2.173384 6.701023 -2.53848 -1.820011 6.183299 -2.53848 3.195343 5.821358 -4.366902 3.835852 5.395561 -2.53848 4.435776 5.395561 -4.366902 4.435776 5.160099 -2.53848 -4.718497 5.39556 -2.53848 -4.435756 5.821357 -4.366902 -3.83583 6.339102 -2.53848 -2.86195 6.47756 -2.53848 -2.521003 6.010567 -4.366902 -3.520235 6.010567 -2.53848 3.520255 5.616174 -4.366902 4.141295 5.616174 -2.53848 4.141295 5.616173 -2.53848 -4.141274 5.821357 -2.53848 -3.83583 6.1833 -2.53848 -3.195321 6.010567 -2.53848 -3.520235 5.821358 -2.53848 3.835852 + + + + + + + + + + 0.00891453 0.9999604 6.99059e-5 0 -8.78974e-7 1 -0.003456711 0.999994 3.46176e-4 0 1.46853e-6 -1 -0.5756703 0.817682 5.87939e-5 -0.06036114 -0.01196467 0.9981049 -8.31127e-7 -3.55781e-7 1 -0.004410564 0.9999903 2.45637e-5 -1.12308e-5 -4.22155e-7 -1 -5.33756e-6 -4.23277e-7 1 2.65849e-5 -3.55656e-7 -1 4.32993e-6 0 -1 -0.5768907 0.8168215 -1.89078e-7 -0.1064495 -6.59018e-4 0.994318 0 -3.61429e-7 1 0.6136148 -0.7896056 2.85701e-7 -0.06222945 0.998062 1.04536e-4 -1.68669e-6 -4.39563e-7 -1 0 -4.38818e-7 1 1.51833e-6 -3.83874e-7 -1 0 -3.68601e-7 -1 0 -3.68788e-7 1 4.05277e-6 -3.84237e-7 1 -2.97933e-6 -3.61276e-7 -1 0.7061379 -0.007495105 -0.7080347 8.03905e-7 2.47919e-7 -1 0.771354 0.6364064 -1.52178e-7 0.9999999 -4.25091e-4 0 -0.1035759 -1.01223e-4 0.9946216 0.5604613 -0.8281807 1.81767e-7 0.06086397 0.9981461 -1.78835e-6 0.04016363 -0.6340742 -0.7722285 0.003344297 -0.05362385 0.9985557 0.3917919 0.07899439 -0.9166565 0.3742684 0.07526171 0.9242613 0.4074887 -0.9132103 2.40325e-7 0.4922159 -0.8704732 0 0.8294612 0 -0.5585644 4.23672e-6 -8.01807e-7 -1 0.7713542 0.6364061 0 0.7713553 0 0.6364048 -0.09240692 0.01505166 0.9956076 0.529888 0 0.8480677 -0.043518 0.9990527 4.21988e-5 0.3073905 0.9059094 -0.291272 0.6318882 -0.02971392 0.7744898 0.1779271 -0.3447362 -0.9216827 0.1880824 -0.2600015 -0.9471138 0.6633303 0.0189163 0.7480877 0.33025 -0.9438935 1.79069e-7 0.6473935 0.1752667 -0.7417299 0.7836841 0 -0.6211596 0.7713543 0 -0.6364061 0.7713543 0 0.6364061 0.7696033 2.77814e-4 0.6385223 -9.32361e-6 0.001305162 0.9999992 0.7999901 -0.6000133 3.19472e-6 4.49089e-6 0 1 0.9249662 0.3799127 0.01018387 0.2967268 0.6932862 -0.65674 0.4266776 -0.01229119 0.9043203 0.03169584 0.9994976 -3.00124e-5 0.1534166 -0.4092679 -0.8994238 0.7461882 0.03918266 0.664581 0.4586408 -0.8886218 -8.477e-6 0.5861148 -0.8102281 -7.68652e-6 0.1571593 -0.3410475 -0.9268158 0.2889328 -0.9573494 2.17698e-7 0.7261975 0.2160544 -0.6526544 0.7836843 0 -0.6211594 0.6806194 -0.7326373 8.29816e-5 -1 0 -1.60402e-6 1 0 0 1.32229e-7 -1 0 -1 0 4.70348e-7 -0.004116833 0.9999916 0 0.7713549 0 0.6364054 -7.02125e-7 -5.01459e-6 -1 0.7132295 -0.2178525 0.6662161 0.001161575 0.03674519 0.9993241 0.01884448 0.9998224 -3.48683e-5 0.3510073 -0.9363728 -8.93077e-6 0.2608562 -0.9653777 4.19203e-7 -0.006290733 0.01218712 0.999906 -0.01117891 0.01546287 0.999818 0.418514 -0.9082104 0 0.1273126 -0.4061843 -0.904879 0.6868563 0.1763786 -0.7050668 0.6848058 0.7287257 0 0 0 -1 0 -1 1.13265e-4 0.6808721 -0.7324023 0 0.9518822 -0.3064641 0 -1 0 -1.60401e-6 1 -2.45821e-7 0 0.7713525 -0.6364081 1.30589e-7 0.7713558 0 0.6364042 0.7713541 0 0.6364064 -8.63261e-6 -1.47224e-5 -1 0.2534892 -0.9673383 3.28435e-7 0.1812834 -0.09992992 0.9783406 -0.1478086 0.989016 -2.62775e-5 7.71851e-6 -3.90647e-6 1 0.3360661 -0.9418385 -2.16654e-5 -0.004080235 0.01091384 0.9999322 0.04201388 -0.0234375 0.9988421 0.2990869 -0.9542258 2.89588e-7 -4.83219e-6 9.55477e-6 1 0.1101375 -0.4369062 -0.8927389 0.230072 -0.3230053 -0.9180057 0.6846871 0.7288371 -4.59782e-5 0 1 -6.30835e-5 0 0 1 0 -1 1.13264e-4 0.528863 -1.31441e-7 0.8487073 -1.9973e-6 -7.18251e-6 -1 0.2198673 -0.9755299 1.9424e-7 3.41849e-6 6.75792e-7 1 -0.07330471 0.9973097 -2.00547e-5 -1.39556e-6 2.49699e-6 1 -0.003858387 0.01081329 0.9999341 -0.00384128 0.01078444 -0.9999346 0.3234673 -0.9462394 -2.04773e-5 -0.2177687 0.9760005 1.96623e-5 0.244439 -0.9696647 1.55048e-7 -1.10159e-6 3.10486e-6 1 1.7021e-5 -4.81305e-6 1 0.1013835 -0.4496486 -0.8874332 0.105764 -0.4445215 -0.8895026 0.5288673 0 0.8487045 0 1 -6.16505e-5 0 -5.26733e-6 -1 0.2314689 -0.9728424 4.75386e-7 0 3.19319e-6 1 5.60371e-7 2.96802e-6 1 -0.1147823 0.9933907 -2.65814e-7 0.007026553 0.007408857 0.9999479 0.03862869 -0.00849533 -0.9992176 0.006974697 0.007399976 -0.9999483 0.1580943 -0.987424 6.35616e-6 -0.05896902 0.9982599 2.53195e-6 -1.65744e-6 3.38689e-6 1 0.2199497 -0.9755113 2.87564e-7 -1.02882e-6 4.06584e-6 1 1.26204e-6 -4.2918e-6 -1 0.04703676 9.99322e-4 0.9988927 2.43317e-5 -2.0327e-6 -1 0.04698747 9.92151e-4 -0.998895 4.87175e-4 0.001254856 0.9999991 -8.62021e-7 0 -1 -0.02437508 7.13897e-4 0.9997027 0 -2.8142e-7 -1 -0.3617954 -0.9318513 0.02751845 0 0 -1 0.6611965 -0.01936459 0.7499629 0 -2.07525e-7 -1 -0.3619336 -0.932204 1.88041e-6 -0.1737486 -0.9841687 0.03498035 -0.1840273 -0.9822853 -0.03534692 0.1614558 0.2704387 0.9491022 0.4665954 0.04392296 -0.8833796 -0.1170215 -0.9923288 0.03986936 -0.1837518 -0.982176 0.0395683 0.07803958 0.325115 0.942449 0.0532155 0.3453095 0.9369789 -0.07713693 -0.9957233 0.05084478 0.7525494 0.1112185 -0.6490762 -0.09379309 -0.9945759 0.0449633 -0.10254 -0.993467 -0.05008918 0.2499215 0.1887985 0.9496813 -0.08352971 -0.9952422 0.05015802 -0.0517773 -0.996479 0.06594562 0.7055862 0.08615952 -0.7033667 0.3710276 0.05192995 0.9271688 -0.04461294 -0.996775 0.06670278 -0.04548054 0.1357436 -0.9896996 -0.08177858 -0.9946838 -0.06258219 0.6575725 -0.2997161 0.6912081 -0.0355311 -0.9947739 0.09572029 0.6480086 -0.02576076 -0.7611974 0.7730578 0.6343356 6.90515e-5 0.2908119 0.9567803 -1.24546e-6 0.6814787 -0.3244156 0.6560041 -0.03225904 -0.9948496 0.09609228 -0.1960297 -0.04964929 -0.9793403 -0.07664805 -0.06849771 0.9947025 -0.04759466 -0.9949661 -0.08818846 0.2756337 -0.1348794 0.951753 -0.02497971 -0.995142 0.09522861 0.02272105 -0.07082593 -0.9972299 0.0048846 -0.9965296 0.0830971 -0.07678657 -0.06762737 -0.9947515 0.004612207 -0.9964416 -0.08416068 -0.07354038 -0.07382494 -0.9945561 -0.002028226 -0.07722586 0.9970116 -0.03489816 -0.9943014 -0.1007314 -0.0221585 -0.9951936 0.09538668 -0.03132349 -0.07853513 -0.9964191 -0.002198696 -0.9959014 0.09042 -0.07376205 -0.07400232 0.9945265 -0.01278287 -0.1566236 -0.9875757 -0.005446016 -0.9957797 -0.09161448 -0.2680097 -0.08621585 0.9595507 -0.01563781 -0.9955627 0.09279179 -0.2679407 -0.08626145 -0.9595659 -0.005861222 -0.9959082 0.09018045 -0.01286888 -0.1569489 0.9875229 -6.64564e-4 -0.1214433 0.9925982 0.9776071 0.2104362 -0.001078963 -0.1964297 -0.2108039 -0.9575892 -0.01610332 -0.9953522 -0.09494632 -0.001594245 -0.1024392 0.9947381 -0.001594185 -0.1024759 -0.9947342 -0.01268285 -0.9955825 0.09303086 -6.64925e-4 -0.1214808 -0.9925936 -0.1989115 -0.2118685 0.9568417 -0.2170193 -0.1529704 0.9641073 -0.2170453 -0.1530079 -0.9640955 -0.07994097 -0.1251418 -0.9889131 -0.02613198 -0.9948462 -0.09797036 0.09804493 0.995182 1.25082e-5 0.487538 0.8731018 6.18688e-5 0.7288492 0.6846743 1.44508e-5 0.7556918 0.6549275 1.33428e-5 0.728182 0.6853839 2.51422e-5 0.5214868 0.8532595 2.56556e-5 0.8272711 0.5618031 0 0.4325577 0.9016063 0 0.006418943 -0.9999795 0 0.9964652 0.08400708 2.04176e-5 -0.008164405 -0.01239001 0.9998899 -1 3.9277e-7 -1.11193e-6 0.9991394 0.04141211 0.00238943 0.994191 -0.06201124 0.0879718 -1 -3.85549e-7 4.33023e-7 0.9951378 -0.01764816 -0.09689813 0.9986925 0.05112099 0 0 1 0 0 -1 0 -0.619134 0 -0.7852854 -0.6191368 0 0.7852832 0.9734929 -0.2287176 1.92771e-6 0 -1 -8.05888e-7 0 -1 9.26358e-7 0 -1 -3.75012e-7 -0.6191357 3.5442e-7 -0.7852841 -0.6191352 0 0.7852845 0.969686 -0.2287182 0.08600634 0.9734928 -0.2287176 0 0.969686 -0.2287182 0.08600634 1.27914e-6 -1 9.1107e-7 0 -1 1.02638e-6 0.9394142 -0.2287239 0.2553162 0.9582969 -0.2287204 0.1713301 -0.5779252 3.35674e-7 -0.8160898 -0.5779219 0 0.8160922 0 1 9.4845e-7 0.9696857 -0.2287186 -0.08600831 0.9582967 -0.2287204 0.1713316 0 -1 0 0.9131809 -0.2287287 0.3373187 0.913181 -0.2287287 0.3373184 0.9394143 -0.2287239 0.2553159 -0.5779213 0 -0.8160926 0 -1 -1.55562e-6 -0.5779221 0 0.816092 0 1 0 0 1 1.67719e-7 0.9696859 -0.2287185 -0.08600634 -5.34002e-7 -1 -5.1358e-7 0 -1 0 -0.5351732 0 0.8447424 0 -1 -3.61587e-7 0.8798032 -0.2287351 0.4166854 0.879803 -0.2287344 0.4166859 0 1 8.7543e-7 0 1 -1.0517e-6 -0.5351765 0 -0.8447403 0 -1 -5.52316e-7 0 -1 1.23975e-6 0.6808436 -0.2287655 0.6957861 0.6166915 -0.2287734 0.7532294 0.9582969 -0.2287203 -0.1713306 -3.27329e-7 -1 0 0 -1 0 -0.4910241 -1.06333e-6 0.8711461 0 -1 0 -0.5351771 -1.01111e-6 0.8447399 0 -1 2.3154e-7 0.8395466 -0.2287413 0.492787 0.8395479 -0.2287415 0.4927846 0 1 -2.84504e-7 -0.5351766 7.08869e-7 -0.8447402 0.7396767 -0.2287567 0.6328893 0.6166929 -0.2287732 0.7532283 0.6808431 -0.2287655 0.6957866 0.5476884 -0.2287816 0.8047959 -0.3439101 0 0.9390026 -0.2641217 0 0.9644895 0.9582968 -0.2287204 -0.1713309 -2.36317e-7 -1 4.02186e-7 -0.3439127 0 0.9390017 -0.4910175 0 0.8711497 -0.4277911 0 0.9038777 0.7927151 -0.228749 0.5650457 0.7927157 -0.228749 0.565045 -0.4910197 7.40203e-7 -0.8711486 0 -1 8.92321e-7 0.7396749 -0.2287573 0.6328911 0 1 -5.05824e-7 0.5476903 -0.2287811 0.8047949 -0.3798703 0 -0.9250398 -0.1924714 0 0.9813027 0.9394151 -0.2287243 -0.2553125 -1.84445e-7 -1 3.22932e-7 -0.4277934 0 0.9038766 -0.4910153 3.54444e-7 -0.871151 0 1 0 0.4744109 -0.2287889 0.850053 -0.3798702 0 -0.9250399 -0.4455528 0 -0.8952556 0 -1 0 0.939415 -0.2287244 -0.2553131 -0.1924713 0 0.9813027 0 -1 -2.95145e-7 -1.50483e-7 -1 0 0.913179 -0.2287291 -0.3373237 -0.4455575 5.29907e-7 -0.8952534 0 -1 -8.31732e-7 0 1 9.87128e-7 0.474412 -0.2287886 0.8500524 -0.2930448 0 -0.9560988 0 -1 -8.43146e-7 0 -1 0 0.9131799 -0.2287288 -0.3373214 -0.1319081 0 0.991262 0 -1 9.21876e-7 -1.26146e-7 -1 -1.60163e-7 0.8798055 -0.2287348 -0.4166807 0 -1 1.79344e-6 0.3973984 -0.2287955 0.8886659 0 1 -5.25634e-7 -0.2930473 0 -0.956098 0 -1 3.40919e-7 0.397396 -0.2287955 -0.8886671 0.8798044 -0.2287346 -0.4166831 0 1 0 -0.1319109 0 0.9912616 0 -1 0 0.3172833 -0.2288002 -0.9203162 0 -1 2.89242e-7 0.8395441 -0.2287418 -0.4927909 0.3974016 -0.2287952 0.8886646 0 1 3.10651e-7 -0.2116987 0 -0.977335 0 -1 3.47619e-7 0.4744162 -0.2287884 -0.8500502 0.474416 -0.2287888 -0.8500502 0.3974053 -0.2287944 -0.8886632 0 1 1.75572e-7 0.8395453 -0.2287412 -0.4927893 6.59486e-7 1 1.24591e-7 -0.08417332 0 0.9964512 0 -1 -3.50097e-7 0.3172755 -0.2288011 -0.9203186 0 -1 -2.66914e-7 0.7927166 -0.2287492 -0.5650436 0.3172803 -0.2288011 0.920317 -0.2116959 0 -0.9773356 0 -1 -3.5911e-7 0.5476849 -0.2287813 -0.8047984 0.5476859 -0.2287813 -0.8047977 0 1 0 0 1 1.37133e-7 0.7927172 -0.2287491 -0.5650429 0 1 0 0 -1 0 0.2346652 -0.2288054 -0.9447647 0 1 0 0 -1 7.52434e-7 0.7396765 -0.2287572 -0.6328892 0.3172786 -0.2288012 0.9203175 0 1 -2.69809e-7 -0.139667 0 -0.9901986 0 -1 0 0.6166927 -0.2287736 -0.7532283 0.6166934 -0.2287732 -0.7532278 0 1 0 0 1 0 0.7396774 -0.2287567 -0.6328883 0.680843 -0.2287653 -0.6957868 0 1 0 0 1 0 -0.05005532 0 0.9987465 0 -1 0 0.2346642 -0.2288054 -0.944765 0 1 0 0.6808438 -0.2287653 -0.6957861 0.2346717 -0.2288053 0.9447632 -0.139667 5.21538e-7 -0.9901986 0 -1 -5.90734e-7 -0.05005246 0 0.9987467 0 -1 0 0 1 0 0.1502196 -0.2288085 -0.9618113 -0.02961486 0 0.9995614 0.2346695 -0.2288057 0.9447636 6.46489e-7 1 -4.0865e-7 -0.07952582 5.20162e-7 -0.9968329 0 -1 9.24713e-7 -0.02961772 0 0.9995613 0 -1 0 0 1 2.63611e-7 0.1502214 -0.2288085 -0.9618109 -0.02254474 0 0.9997459 0.1502214 -0.2288087 0.9618108 0 1 0 -0.0795201 0 -0.9968333 0 -1 -2.98779e-7 0 -1 1.20731e-7 0 1 0 0.06459468 -0.2288104 -0.9713256 -0.02254188 0 0.9997459 0 -1 0 0.1502212 -0.2288086 0.961811 0 1 -2.05528e-7 -0.03262382 0 -0.9994678 0 1 1.32233e-7 0.06459343 -0.2288105 -0.9713257 -0.024208 0 0.9997071 0 -1 0 0.06459075 -0.2288106 0.9713259 0 -1 5.92582e-7 0 1 0 -0.03262668 0 -0.9994677 0 1 2.65636e-7 -0.02154523 -0.2288107 -0.9732325 -0.02420514 0 0.9997071 0 -1 2.42579e-7 0 1 -1.31093e-7 0.06459468 -0.2288104 0.9713256 0 -1 -3.00971e-7 6.07152e-4 0 -0.9999999 -0.02154564 -0.2288109 -0.9732325 0 1 0 -0.01750469 0 0.9998468 0 -1 0 0 1 0 -0.02154564 -0.2288109 0.9732325 6.10003e-4 0 -0.9999998 -0.1075152 -0.2288097 -0.9675158 0 1 1.33525e-7 -0.01750189 0 0.9998469 0 -1 2.42303e-7 0.02034288 0 -0.9997931 0 1 0 0 1 -1.32604e-7 -0.02154523 -0.2288109 0.9732325 -0.1075131 -0.2288097 -0.9675159 0 1 0 0.002266764 0 0.9999974 0 -1 0 0.02033436 5.21188e-7 -0.9997932 0.02702689 5.21615e-7 -0.9996348 0 1 0 -0.1075183 -0.2288097 0.9675154 0 -1 1.508e-7 -0.1926322 -0.228807 -0.9542223 0 1 1.34e-7 0.002266764 0 0.9999974 0 -1 0 0.02702403 0 -0.9996349 0 1 0 0.02537119 0 -0.9996782 0 1 0 -0.1075218 -0.2288096 0.967515 0 -1 0 -0.1926312 -0.2288075 -0.9542225 0 -1 0 0 1 1.37443e-7 0 1 0 0.03554427 0 0.9993681 0.03245973 5.22062e-7 -0.999473 0.02536547 5.21203e-7 -0.9996783 0 1 -1.38085e-7 -0.1926312 -0.2288072 0.9542226 0 -1 0 -0.2762484 -0.2288033 -0.9334538 0 -1 0 -0.2762467 -0.2288035 -0.9334543 0 1 0 0.03554713 0 0.9993681 0.03245973 0 -0.999473 0.05293989 0 -0.9985978 0 1 0 0 1 0 -0.1926301 -0.2288071 0.9542227 0 -1 0 0.08250397 0 0.9965907 -0.3576918 -0.2287984 -0.9053772 -0.3576888 -0.2287983 -0.9053785 0 1 1.44644e-7 0 1 0 0.08711928 0 -0.9961979 -6.4643e-7 1 -2.07654e-7 -0.2762469 -0.2288034 0.9334543 0 -1 1.2044e-7 0 -1 1.4914e-7 0.08250683 0 0.9965906 0 -1 -3.51264e-7 -0.436337 -0.2287921 -0.8702093 -0.4363372 -0.228792 -0.8702093 0 1 0 0.08711636 0 -0.9961981 0.1348811 0 -0.9908619 -0.2762495 -0.228803 0.9334535 0 -1 0 0.1426706 0 0.9897703 -6.61516e-7 1 0 0 -1 0 0 -1 9.33427e-7 -0.511545 -0.228785 -0.8282387 0 1 1.56386e-7 -0.5115466 -0.2287849 -0.8282376 0 1 -2.75485e-7 0.1348839 0 -0.9908615 -0.3576912 -0.2287982 0.9053775 0 -1 4.79696e-7 0.2146912 0 0.976682 0.1426677 0 0.9897707 0.2959649 0 0.9551989 0 -1 -1.45835e-7 0 -1 0 0 -1 0 -0.5827599 -0.2287771 -0.7797768 -0.5827585 -0.2287775 -0.7797778 0.1954694 0 -0.9807098 0 -1 -3.0157e-7 -0.3576905 -0.2287981 0.9053778 0.2146884 0 0.9766826 -0.6494048 -0.2287693 -0.7252159 0.2959648 0 0.955199 0.3826284 0 0.9239025 0.4481677 0 0.8939496 0 -1 1.44911e-7 -0.6494061 -0.2287693 -0.7252148 0 -1 0 0.2670854 0 -0.9636729 0.1954694 0 -0.9807099 0 1 1.28981e-7 -0.4363307 -0.2287921 0.8702124 0 -1 -3.53788e-7 0 1 0 0 1 1.95638e-7 -0.7109586 -0.2287611 -0.6649859 -0.7109597 -0.2287611 -0.6649847 0.3826233 0 0.9239045 0.4481653 0 0.8939507 0.4935671 0 0.8697078 0 -1 0 0.2670909 0 -0.9636714 -0.4363325 -0.228792 0.8702116 -0.8169277 -0.2287452 -0.5294383 -0.7669473 -0.2287529 -0.5995532 0 -1 1.00778e-6 -0.7669466 -0.228753 -0.5995539 0 1 3.80525e-6 0.4935647 -9.10129e-7 0.8697091 0.3467788 0 -0.937947 -0.5115514 -0.2287848 0.8282347 0 -1 -2.96976e-7 -0.9725407 -0.228718 -0.04304194 -0.964936 -0.2287197 -0.1287863 0 1 -1.43355e-7 -0.8605184 -0.228738 -0.4551782 0 1 1.46381e-7 -0.8605182 -0.228738 -0.4551784 -0.8169281 -0.228745 -0.5294377 0 -1 0 0 1 -2.29872e-6 0.5376478 -8.86089e-7 0.8431696 0 1 0 0.3467788 0 -0.937947 -0.5115482 -0.2287852 0.8282365 -0.5827581 -0.2287777 0.7797781 0 1 -2.1258e-7 0 1 5.91963e-7 -0.9725409 -0.2287182 -0.04304015 -0.9725409 -0.2287176 0.04304069 -0.9649356 -0.2287193 -0.1287899 -0.9497833 -0.2287222 -0.2135366 -0.8973719 -0.228731 -0.3773671 -0.8973736 -0.2287318 -0.3773624 1.25523e-7 -1 0 0 -1 0 0 1 -6.01949e-6 0.5376518 0 0.843167 0 -1 0 0 1 0 0.4304853 0 -0.9025976 -0.58276 -0.2287772 0.7797768 -0.649407 -0.2287698 0.7252137 -0.9725407 -0.2287182 0.04304236 0 -1 0 0 -1 0 -0.927203 -0.2287265 -0.2966124 -0.9497835 -0.228722 -0.2135357 1.46536e-7 -1 0 -0.9272031 -0.2287264 -0.2966121 0 1 2.10393e-6 0.5803072 0 0.8143977 -1.2776e-6 -1 5.29808e-7 0 1 0 0 -1 -1.01438e-6 0 -1 9.41451e-7 -0.7109578 -0.2287609 0.6649869 0 1 3.23719e-7 -0.6494086 -0.228769 0.7252125 0.4935626 0 -0.8697103 -0.9649361 -0.2287192 0.1287863 -0.9649356 -0.2287197 0.1287898 0 -1 -4.78838e-7 0 -1 1.90611e-6 0 -1 -2.38032e-6 0 -1 -9.5793e-7 0 1 3.8681e-6 0.5803091 0 0.8143964 0 -1 -4.04016e-7 0 1 0 0 -1 1.16366e-6 0.4935647 0 -0.8697092 -0.7109556 -0.2287616 0.664989 -0.7669469 -0.2287526 0.5995539 0 1 -1.77791e-7 -0.9497835 -0.228722 0.2135366 0 1 2.81008e-7 -0.9497835 -0.2287225 0.2135356 0.9996694 -7.08652e-7 0.02571213 0.9970254 0 0.07707452 0.9970254 0 -0.07707494 0 -1 1.00349e-6 0.9996694 0 -0.02571213 0.9917464 0 0.128215 0.9838409 0 0.1790449 0 -1 -3.71177e-7 0 -1 1.00234e-6 0 -1 2.37147e-6 0 1 5.91847e-7 0.6214314 0 0.7834687 0.6214313 0 0.7834687 0 1 1.7272e-7 0.5376498 0 -0.8431683 0.5376455 0 -0.8431711 0.5803077 0 -0.8143973 -0.766947 -0.2287532 0.5995536 0 1 3.99995e-7 -0.8169276 -0.2287449 0.5294385 0 -1 5.82496e-7 -0.9272032 -0.2287263 0.2966117 -0.9272027 -0.2287265 0.2966136 0 -1 1.3173e-6 0 -1 6.31887e-7 0.9996694 0 -0.02571231 0.9996694 7.08652e-7 0.02571213 0.9970254 0 0.07707446 0.9917465 0 -0.1282142 0.9970254 -7.08654e-7 -0.07707446 0.9917464 0 0.128215 0.983841 0 0.1790447 0.9733359 0 0.2293844 0.960256 0 0.2791212 0.9446361 0 0.3281201 0.9265144 0 0.3762596 0 -1 -8.86861e-7 0 1 -3.73745e-6 0.6609098 0 0.7504655 0.6609081 0 0.750467 0 1 0 0.5803098 0 -0.8143958 0.621431 0 -0.783469 -0.8605182 -0.2287379 0.4551787 -0.8169283 -0.2287452 0.5294373 0.6214308 0 -0.7834692 -0.8973736 -0.2287315 0.3773629 -0.8973718 -0.2287318 0.3773669 0 -1 -2.37554e-6 0.9838418 0 -0.1790404 0 1 2.03909e-7 0 1 -2.19168e-7 0 1 5.86589e-7 0 1 1.28497e-7 0 1 -4.72255e-7 0.9838418 0 -0.1790403 0 1 -1.84589e-7 0 1 -5.3609e-7 0.9733359 0 0.2293845 0.9446361 0 0.3281202 0.9265143 0 0.3762596 0.9059448 0 0.4233962 0 1 3.02553e-6 0.6986274 0 0.7154858 0 -1 1.40639e-7 0 1 0 0.6609097 0 -0.7504655 0.6986288 0 -0.7154844 0 -1 -5.87279e-7 0 -1 -7.34408e-7 -0.8605185 -0.2287377 0.4551781 0.6609082 0 -0.7504668 0 -1 -2.00093e-6 0 -1 6.66833e-7 0 -1 4.19036e-7 0.9602563 0 -0.2791196 0.9733351 0 -0.2293883 0.9733351 0 -0.2293882 0 1 -4.6987e-7 0 1 9.53615e-7 0 1 0 0 1 1.78899e-7 0 1 -1.52969e-6 0.8829777 0 0.469415 0.905945 0 0.4233954 0 1 1.25027e-6 0.7345025 0 0.678606 0.734501 0 0.6786076 0.7345024 0 -0.6786061 0.7345049 0 -0.6786034 0.7684289 0 -0.6399351 0.9265154 0 -0.3762571 0.9446345 0 -0.3281245 0 -1 -2.36565e-6 0.9446347 0 -0.3281239 0 1 6.96433e-7 0 1 1.34535e-6 0.8829779 0 0.4694148 0.8576728 0 0.5141959 0 -1 -3.62915e-7 0 1 -1.31432e-6 0.7684339 0 0.6399292 0.7684338 0 0.6399294 0 -1 1.15078e-6 0 1 0 0.7684315 -2.54878e-7 -0.6399322 0.8003237 0 -0.5995682 0.8003239 0 -0.599568 0 -1 5.894e-7 0.8300943 0 -0.5576232 0.905945 0 -0.4233956 0.9059448 0 -0.423396 0.9265151 0 -0.3762579 0.882978 0 -0.4694147 0 1 -4.54525e-7 0 1 0 0 1 -2.11784e-6 0.8576726 0 0.5141965 0.8300938 0 0.5576239 0 1 -1.34603e-6 0.8003218 0 0.5995707 0.8003227 0 0.5995696 0 1 1.34446e-7 0.8300952 0 -0.5576218 0 1 1.74324e-7 0.8576702 0 -0.5142003 0 1 2.88551e-7 0 1 1.72924e-7 0.8576698 0 -0.5142008 0 1 8.72498e-7 0.8300939 0 0.5576238 0 1 -3.62375e-7 0 1 1.57598e-7 0 1 0 0.03826105 0.9992679 3.05195e-6 -0.1740276 0.001794338 0.9847392 0.007916927 -0.9999687 0 -0.1729183 0 -0.9849362 -0.008784651 0.9999614 2.72311e-4 0.003414571 0.9999942 2.74341e-4 0.004433333 0.9999901 3.46483e-4 0.61363 -0.7895938 1.84082e-7 -0.0632146 0.998 3.42832e-5 1 -4.22913e-4 2.30122e-6 0.5604709 -0.8281742 2.83073e-7 0.06144779 0.9981103 6.74643e-5 0.4074906 -0.9132095 0 0.492206 -0.8704788 1.93735e-7 0.527307 5.15477e-4 0.8496747 -0.04374206 0.9990429 5.15689e-6 0.3302524 -0.9438927 2.46834e-7 0.7713541 0 -0.6364063 0.7999992 -0.6000012 0 4.64484e-6 0 1 0.9372467 0.3486672 -2.0733e-7 0.03154045 0.9995025 -5.34853e-5 0.4586373 -0.8886237 -8.35323e-6 0.5859647 -0.8103367 0 0.2889341 -0.9573491 2.36052e-7 -1.10669e-4 -1 5.04612e-6 -1 0 4.70348e-7 -0.004116892 0.9999915 0 0.0186274 0.9998266 -5.0553e-5 0.3501695 -0.9366863 -4.25274e-5 0.2608564 -0.9653776 0 0.418514 -0.9082104 0 0.9519261 -0.3063282 -8.541e-6 0.7713542 -0.636406 0 0.2534887 -0.9673384 0 -0.1476473 0.9890401 -1.56866e-5 0.3356041 -0.9420031 -4.02992e-5 0.2990852 -0.9542264 0 0.2198712 -0.975529 4.3281e-7 -0.073058 0.9973277 -2.77396e-6 0.3237132 -0.9461554 -1.10623e-5 -0.2178838 0.9759748 1.30456e-5 0.2444409 -0.9696643 3.22296e-7 0.2314665 -0.9728429 2.91547e-7 -0.114741 0.9933955 2.71588e-6 0.1578565 -0.9874621 -1.34354e-6 -0.05901825 0.9982569 -8.23463e-7 0.2199485 -0.9755116 2.00541e-7 -0.2600992 -0.965582 -1.05016e-6 -0.2651906 -0.9637055 -0.03075104 -0.2332604 -0.9718091 0.03430259 -0.2332337 -0.9716699 -0.0382058 -0.1347895 -0.9900507 0.0403912 -0.1097125 -0.9929133 0.04567652 -0.1506376 -0.9877304 -0.04119551 -0.1191237 -0.9914271 0.05368381 -0.1356385 -0.9894592 -0.05072242 -0.1066307 -0.9911439 0.07914352 0.7729217 0.6345014 4.56655e-5 0.2903637 0.9569164 3.67533e-5 0.1150554 0.001427948 0.9933581 -0.1286006 -0.9894446 -0.06679362 -0.02847123 -0.9951068 0.09461385 0.006672978 -0.9962341 0.08644759 0.123839 0.004997134 -0.9922899 0.006822764 -0.9960491 -0.08854144 -0.06647282 -0.9935155 -0.09224086 -0.005139768 -0.9957752 -0.09168207 -0.01548969 -0.9955623 0.09282165 -0.005446672 -0.995904 0.09025263 0.9792668 0.202575 -5.89549e-7 -0.01548641 -0.9953484 -0.09508818 0.01134645 0.613589 -0.7895441 0.1139687 -3.45555e-7 -0.9934844 0.0147069 0.585802 -0.8103208 0.003376483 0.6541934 -0.7563198 0.009999573 0.6180636 -0.7860646 0.03450828 0.983198 -0.1792514 0.04739111 0.9960278 -0.07538378 -0.08524858 -0.1539387 -0.9843961 0.1333158 0.6842208 -0.716986 -0.02497607 -0.9948477 -0.09825634 0.09804505 0.995182 1.25345e-5 0.4869835 0.8734113 1.99416e-5 0.7289125 0.6846069 1.98169e-5 0.7557594 0.6548495 1.85172e-5 0.7280675 0.6855055 1.44006e-5 0.5213203 0.8533611 3.61867e-5 0.8273391 0.5617028 1.67799e-5 0.4325571 0.9016066 0 0.005677819 -0.9999839 -2.7694e-5 0.005421757 -0.9999854 -1.27957e-6 -6.14677e-5 -1 0 -6.14677e-5 -1 -6.66384e-7 0.006428599 -0.9999794 2.6445e-6 0.9989288 0.04457008 0.01244676 0.9999786 -3.58778e-6 -0.006556808 0.9999942 -1.77136e-6 0.003439188 0.9990954 0.04031538 -0.01352953 0.9964371 0.08434039 -1.46739e-7 0.01295447 -0.006219983 0.9998968 -0.01368296 -0.2480235 0.9686574 -0.04414844 -0.225831 0.9731656 -0.05812323 -0.4067357 0.9116951 -0.1157022 -0.6821979 0.721955 0.1070063 2.42318e-7 0.9942584 0.003497183 0.5942924 0.8042415 0.001339018 0.6064042 0.7951555 0.008815884 0.5612089 0.8276273 0.007286071 0.5719444 0.8202602 0.02469545 0.9838438 0.1773182 0.01197183 0.5376733 0.8430684 -0.02455633 0.8089371 0.587382 0.03554207 0.9950934 0.09233647 0.03220367 0.9931375 0.1124311 0.04910176 0.9983043 0.03126782 0.04418712 0.9979839 0.04556018 -1 -5.05902e-7 0 -1 0 0 -1 3.61897e-7 4.25235e-7 0.9985926 0 0.05303752 1 3.60645e-4 0 -1 5.90355e-7 0 -1 -3.92817e-7 -1.11196e-6 0.99735 -0.07275342 0 0.9982391 0.01063555 -0.05835872 0.03825014 0.9992682 0 -0.1729357 0 0.9849332 0.007951974 -0.9999684 9.84989e-6 -0.1739761 0.001739144 -0.9847483 + + + + + + + + + + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + + + + + + + + + + + + + + +

0 0 0 2 0 1 3 0 2 3 1 3 4 1 4 0 1 5 1 2 6 6 2 7 5 2 8 1 3 9 7 3 10 2 3 11 2 4 12 8 4 13 3 4 14 3 5 15 9 5 16 4 5 17 0 6 18 4 6 19 10 6 20 11 7 21 6 7 22 12 7 23 5 8 24 13 8 25 1 8 26 14 9 27 6 9 28 0 9 29 7 10 30 1 10 31 15 10 32 2 11 33 7 11 34 16 11 35 2 12 36 17 12 37 8 12 38 4 13 39 9 13 40 18 13 41 19 14 42 0 14 43 10 14 44 4 15 45 15 15 46 10 15 47 20 16 48 12 16 49 21 16 50 5 17 51 11 17 52 22 17 53 23 18 54 12 18 55 6 18 56 5 19 57 22 19 58 13 19 59 13 20 60 24 20 61 1 20 62 19 21 63 14 21 64 0 21 65 23 22 66 6 22 67 14 22 68 1 23 69 24 23 70 15 23 71 2 24 72 16 24 73 25 24 74 16 25 75 7 25 76 26 25 77 17 26 78 27 26 79 8 26 80 18 27 81 7 27 82 4 27 83 18 28 84 9 28 85 28 28 86 19 29 87 15 29 88 24 29 89 29 30 90 21 30 91 30 30 92 31 31 93 11 31 94 20 31 95 21 32 96 12 32 97 32 32 98 22 33 99 11 33 100 33 33 101 32 34 102 12 34 103 23 34 104 23 35 105 13 35 106 22 35 107 14 36 108 24 36 109 13 36 110 25 37 111 16 37 112 34 37 113 26 38 114 35 38 115 16 38 116 36 39 117 27 39 118 17 39 119 37 40 120 8 40 121 27 40 122 38 41 123 18 41 124 28 41 125 40 42 126 9 42 127 39 42 128 30 43 129 42 43 130 29 43 131 20 44 132 29 44 133 43 44 134 21 45 135 44 45 136 30 45 137 45 46 138 31 46 139 20 46 140 46 47 141 11 47 142 31 47 143 21 48 144 32 48 145 47 48 146 33 49 147 23 49 148 22 49 149 33 50 150 11 50 151 48 50 152 34 51 153 49 51 154 25 51 155 17 52 156 52 52 157 36 52 158 53 53 159 37 53 160 27 53 161 54 54 162 8 54 163 37 54 164 38 55 165 50 55 166 18 55 167 39 56 168 54 56 169 37 56 170 55 57 171 40 57 172 39 57 173 40 58 174 58 58 175 28 58 176 43 59 177 29 59 178 42 59 179 44 60 180 41 60 181 30 60 182 41 61 183 60 61 184 42 61 185 45 62 186 20 62 187 43 62 188 21 63 189 47 63 190 44 63 191 61 64 192 31 64 193 45 64 194 62 65 195 46 65 196 31 65 197 64 66 198 11 66 199 46 66 200 48 67 201 32 67 202 33 67 203 48 68 204 11 68 205 65 68 206 66 69 207 49 69 208 34 69 209 49 70 210 67 70 211 25 70 212 68 71 213 36 71 214 52 71 215 51 72 216 17 72 217 69 72 218 55 73 219 37 73 220 53 73 221 56 74 222 84 74 223 72 74 224 56 75 225 57 75 226 40 75 227 58 76 228 57 76 229 70 76 230 60 77 231 43 77 232 42 77 233 74 78 234 41 78 235 44 78 236 74 79 237 59 79 238 41 79 239 75 80 240 59 80 241 76 80 242 43 81 243 61 81 244 45 81 245 47 82 246 65 82 247 44 82 248 78 83 249 62 83 250 61 83 251 63 84 252 62 84 253 78 84 254 46 85 255 79 85 256 64 85 257 80 86 258 11 86 259 64 86 260 65 87 261 11 87 262 81 87 263 34 88 264 82 88 265 66 88 266 66 89 267 72 89 268 49 89 269 67 90 270 49 90 271 68 90 272 25 91 273 67 91 274 69 91 275 51 92 276 38 92 277 70 92 278 68 93 279 52 93 280 83 93 281 51 94 282 69 94 283 82 94 284 52 95 285 70 95 286 71 95 287 70 96 288 57 96 289 71 96 290 71 97 291 57 97 292 73 97 293 60 98 294 75 98 295 43 98 296 81 99 297 44 99 298 65 99 299 85 100 300 59 100 301 74 100 302 76 101 303 87 101 304 75 101 305 85 102 306 76 102 307 59 102 308 77 103 309 88 103 310 89 103 311 77 104 312 78 104 313 61 104 314 90 105 315 63 105 316 78 105 317 64 106 318 91 106 319 80 106 320 90 107 321 79 107 322 63 107 323 92 108 324 11 108 325 80 108 326 81 109 327 11 109 328 93 109 329 66 110 330 82 110 331 94 110 332 66 111 333 94 111 334 72 111 335 49 89 336 72 89 337 84 89 338 67 112 339 68 112 340 94 112 341 68 113 342 49 113 343 84 113 344 67 114 345 82 114 346 69 114 347 94 112 348 68 112 349 83 112 350 43 115 351 75 115 352 87 115 353 93 116 354 74 116 355 81 116 356 95 117 357 86 117 358 76 117 359 86 118 360 97 118 361 87 118 362 95 119 363 76 119 364 85 119 365 77 120 366 89 120 367 78 120 368 98 121 369 88 121 370 43 121 371 99 122 372 89 122 373 88 122 374 98 123 375 90 123 376 78 123 377 80 124 378 102 124 379 92 124 380 79 125 381 103 125 382 91 125 383 90 126 384 103 126 385 79 126 386 104 127 387 11 127 388 92 127 389 93 128 390 11 128 391 104 128 392 94 129 393 82 129 394 67 129 395 72 130 396 94 130 397 83 130 398 97 131 399 43 131 400 87 131 401 104 132 402 85 132 403 93 132 404 102 133 405 86 133 406 95 133 407 102 134 408 96 134 409 86 134 410 96 135 411 105 135 412 97 135 413 78 136 414 89 136 415 100 136 416 98 137 417 43 137 418 101 137 419 98 138 420 99 138 421 88 138 422 100 139 423 106 139 424 107 139 425 101 140 426 103 140 427 90 140 428 91 141 429 96 141 430 102 141 431 92 142 432 95 142 433 104 142 434 91 143 435 103 143 436 96 143 437 97 144 438 105 144 439 43 144 440 107 145 441 78 145 442 100 145 443 43 146 444 105 146 445 101 146 446 98 147 447 106 147 448 99 147 449 109 148 450 18 148 451 50 148 452 35 149 453 26 149 454 112 149 455 109 150 456 110 150 457 18 150 458 114 151 459 26 151 460 111 151 461 118 152 462 35 152 463 112 152 464 112 153 465 26 153 466 114 153 467 109 154 468 116 154 469 110 154 470 117 155 471 114 155 472 111 155 473 113 156 474 109 156 475 50 156 476 112 157 477 119 157 478 118 157 479 120 158 480 109 158 481 115 158 482 109 159 483 122 159 484 116 159 485 117 160 486 111 160 487 123 160 488 114 161 489 125 161 490 124 161 491 119 162 492 114 162 493 124 162 494 109 163 495 121 163 496 128 163 497 109 164 498 128 164 499 122 164 500 129 165 501 123 165 502 130 165 503 123 166 504 111 166 505 131 166 506 125 167 507 117 167 508 129 167 509 137 168 510 120 168 511 136 168 512 128 169 513 121 169 514 138 169 515 130 170 516 123 170 517 139 170 518 139 171 519 131 171 520 140 171 521 131 172 522 111 172 523 141 172 524 138 173 525 121 173 526 137 173 527 140 174 528 131 174 529 143 174 530 144 175 531 131 175 532 141 175 533 147 176 534 136 176 535 146 176 536 147 177 537 138 177 538 137 177 539 143 178 540 148 178 541 149 178 542 144 179 543 148 179 544 131 179 545 145 180 546 127 180 547 142 180 548 152 181 549 145 181 550 142 181 551 147 182 552 153 182 553 138 182 554 149 183 555 148 183 556 154 183 557 144 184 558 155 184 559 148 184 560 151 185 561 145 185 562 162 185 563 153 186 564 146 186 565 157 186 566 138 187 567 153 187 568 158 187 569 154 188 570 159 188 571 160 188 572 148 189 573 155 189 574 159 189 575 161 190 576 150 190 577 132 190 578 142 191 579 150 191 580 156 191 581 151 192 582 163 192 583 134 192 584 152 193 585 142 193 586 156 193 587 168 194 588 138 194 589 158 194 590 158 195 591 157 195 592 169 195 593 160 196 594 159 196 595 171 196 596 159 197 597 155 197 598 167 197 599 172 198 600 156 198 601 161 198 602 173 199 603 162 199 604 145 199 605 152 200 606 156 200 607 164 200 608 181 201 609 162 201 610 174 201 611 168 202 612 170 202 613 138 202 614 171 203 615 178 203 616 179 203 617 166 204 618 159 204 619 167 204 620 180 205 621 156 205 622 172 205 623 176 206 624 162 206 625 173 206 626 175 207 627 174 207 628 162 207 629 18 208 630 111 208 631 26 208 632 156 209 633 165 209 634 164 209 635 174 210 636 177 210 637 181 210 638 168 211 639 174 211 640 170 211 641 166 212 642 178 212 643 159 212 644 179 213 645 178 213 646 180 213 647 178 214 648 165 214 649 156 214 650 176 215 651 175 215 652 162 215 653 170 216 654 174 216 655 175 216 656 166 217 657 165 217 658 178 217 659 169 218 660 136 218 661 113 218 662 169 219 663 168 219 664 158 219 665 173 220 666 164 220 667 176 220 668 164 221 669 175 221 670 176 221 671 165 222 672 170 222 673 175 222 674 138 223 675 166 223 676 167 223 677 128 224 678 167 224 679 155 224 680 128 225 681 144 225 682 122 225 683 116 226 684 144 226 685 141 226 686 141 227 687 110 227 688 116 227 689 108 228 690 113 228 691 38 228 692 133 229 693 126 229 694 127 229 695 119 230 696 125 230 697 129 230 698 72 231 699 83 231 700 52 231 701 58 232 702 38 232 703 28 232 704 8 233 705 9 233 706 3 233 707 84 234 708 55 234 709 27 234 710 2 235 711 69 235 712 17 235 713 16 236 714 82 236 715 34 236 716 183 237 717 182 237 718 187 237 719 189 238 720 193 238 721 185 238 722 183 239 723 187 239 724 185 239 725 186 240 726 190 240 727 182 240 728 187 237 729 182 237 730 191 237 731 192 241 732 184 241 733 188 241 734 185 242 735 193 242 736 188 242 737 190 243 738 186 243 739 184 243 740 185 244 741 194 244 742 189 244 743 184 238 744 185 238 745 188 238 746 187 245 747 195 245 748 185 245 749 182 246 750 190 246 751 191 246 752 187 237 753 191 237 754 196 237 755 192 247 756 188 247 757 197 247 758 198 248 759 184 248 760 192 248 761 197 249 762 188 249 763 193 249 764 190 250 765 184 250 766 199 250 767 185 251 768 200 251 769 194 251 770 201 252 771 189 252 772 194 252 773 202 253 774 193 253 775 189 253 776 196 254 777 195 254 778 187 254 779 185 238 780 195 238 781 203 238 782 191 255 783 190 255 784 199 255 785 196 237 786 191 237 787 204 237 788 192 256 789 197 256 790 205 256 791 192 237 792 206 237 793 198 237 794 198 257 795 207 257 796 184 257 797 197 258 798 193 258 799 202 258 800 208 259 801 199 259 802 184 259 803 185 238 804 209 238 805 200 238 806 210 260 807 194 260 808 200 260 809 201 261 810 194 261 811 210 261 812 202 262 813 189 262 814 201 262 815 196 263 816 211 263 817 195 263 818 185 264 819 203 264 820 212 264 821 203 238 822 195 238 823 213 238 824 191 265 825 199 265 826 204 265 827 196 237 828 204 237 829 214 237 830 192 266 831 205 266 832 206 266 833 205 267 834 197 267 835 202 267 836 198 237 837 206 237 838 215 237 839 215 268 840 207 268 841 198 268 842 216 269 843 184 269 844 207 269 845 208 270 846 184 270 847 217 270 848 204 271 849 199 271 850 208 271 851 185 272 852 218 272 853 209 272 854 219 273 855 200 273 856 209 273 857 210 274 858 200 274 859 219 274 860 201 275 861 210 275 862 205 275 863 201 276 864 205 276 865 202 276 866 214 277 867 211 277 868 196 277 869 213 278 870 195 278 871 211 278 872 185 279 873 212 279 874 220 279 875 221 280 876 212 280 877 203 280 878 222 281 879 203 281 880 213 281 881 214 237 882 204 237 883 223 237 884 205 237 885 224 237 886 206 237 887 215 237 888 206 237 889 225 237 890 215 282 891 226 282 892 207 282 893 216 283 894 207 283 895 226 283 896 216 284 897 227 284 898 184 284 899 223 285 900 208 285 901 217 285 902 184 286 903 227 286 904 217 286 905 204 287 906 208 287 907 223 287 908 185 288 909 220 288 910 218 288 911 228 289 912 209 289 913 218 289 914 219 290 915 209 290 916 228 290 917 205 291 918 210 291 919 219 291 920 214 292 921 229 292 922 211 292 923 229 238 924 213 238 925 211 238 926 230 293 927 220 293 928 212 293 929 221 294 930 203 294 931 222 294 932 230 295 933 212 295 934 221 295 935 222 296 936 213 296 937 231 296 938 214 237 939 223 237 940 232 237 941 233 237 942 224 237 943 205 237 944 224 297 945 216 297 946 206 297 947 234 237 948 215 237 949 225 237 950 206 298 951 235 298 952 225 298 953 234 299 954 226 299 955 215 299 956 216 300 957 226 300 958 236 300 959 224 301 960 227 301 961 216 301 962 223 302 963 217 302 964 237 302 965 237 303 966 217 303 967 227 303 968 238 304 969 218 304 970 220 304 971 228 305 972 218 305 973 238 305 974 228 237 975 205 237 976 219 237 977 232 306 978 229 306 979 214 306 980 229 307 981 239 307 982 213 307 983 238 308 984 220 308 985 230 308 986 221 237 987 222 237 988 205 237 989 221 237 990 205 237 991 230 237 992 205 309 993 222 309 994 231 309 995 231 310 996 213 310 997 239 310 998 232 237 999 223 237 1000 237 237 1001 205 311 1002 240 311 1003 233 311 1004 233 237 1005 237 237 1006 224 237 1007 206 298 1008 216 298 1009 235 298 1010 234 237 1011 225 237 1012 241 237 1013 225 312 1014 235 312 1015 242 312 1016 234 313 1017 236 313 1018 226 313 1019 216 314 1020 236 314 1021 243 314 1022 237 315 1023 227 315 1024 224 315 1025 228 237 1026 238 237 1027 205 237 1028 232 316 1029 244 316 1030 229 316 1031 229 238 1032 244 238 1033 239 238 1034 205 317 1035 238 317 1036 230 317 1037 245 237 1038 205 237 1039 231 237 1040 231 318 1041 239 318 1042 245 318 1043 232 237 1044 237 237 1045 233 237 1046 205 319 1047 246 319 1048 240 319 1049 233 320 1050 240 320 1051 244 320 1052 216 321 1053 247 321 1054 235 321 1055 241 322 1056 236 322 1057 234 322 1058 225 237 1059 248 237 1060 241 237 1061 225 323 1062 242 323 1063 249 323 1064 247 324 1065 242 324 1066 235 324 1067 216 325 1068 243 325 1069 250 325 1070 241 326 1071 243 326 1072 236 326 1073 233 327 1074 244 327 1075 232 327 1076 239 328 1077 244 328 1078 240 328 1079 245 329 1080 251 329 1081 205 329 1082 245 330 1083 239 330 1084 252 330 1085 253 331 1086 246 331 1087 205 331 1088 252 332 1089 240 332 1090 246 332 1091 216 333 1092 254 333 1093 247 333 1094 255 237 1095 248 237 1096 225 237 1097 248 334 1098 243 334 1099 241 334 1100 249 237 1101 255 237 1102 225 237 1103 249 335 1104 242 335 1105 256 335 1106 257 336 1107 242 336 1108 247 336 1109 216 337 1110 250 337 1111 258 337 1112 248 338 1113 250 338 1114 243 338 1115 239 339 1116 240 339 1117 252 339 1118 245 340 1119 252 340 1120 251 340 1121 205 341 1122 251 341 1123 253 341 1124 253 342 1125 259 342 1126 246 342 1127 252 238 1128 246 238 1129 260 238 1130 216 343 1131 261 343 1132 254 343 1133 262 344 1134 247 344 1135 254 344 1136 255 345 1137 250 345 1138 248 345 1139 249 346 1140 263 346 1141 255 346 1142 249 347 1143 256 347 1144 264 347 1145 256 348 1146 242 348 1147 257 348 1148 265 349 1149 257 349 1150 247 349 1151 216 350 1152 258 350 1153 266 350 1154 255 351 1155 258 351 1156 250 351 1157 251 352 1158 252 352 1159 260 352 1160 253 353 1161 251 353 1162 267 353 1163 268 354 1164 259 354 1165 253 354 1166 260 238 1167 246 238 1168 259 238 1169 216 355 1170 269 355 1171 261 355 1172 270 356 1173 254 356 1174 261 356 1175 262 357 1176 254 357 1177 270 357 1178 265 358 1179 247 358 1180 262 358 1181 249 359 1182 271 359 1183 263 359 1184 263 360 1185 258 360 1186 255 360 1187 249 361 1188 264 361 1189 272 361 1190 264 362 1191 256 362 1192 273 362 1193 256 363 1194 257 363 1195 274 363 1196 275 364 1197 257 364 1198 265 364 1199 216 365 1200 266 365 1201 276 365 1202 263 366 1203 266 366 1204 258 366 1205 251 367 1206 260 367 1207 267 367 1208 268 237 1209 253 237 1210 267 237 1211 268 368 1212 277 368 1213 259 368 1214 277 238 1215 260 238 1216 259 238 1217 216 369 1218 278 369 1219 269 369 1220 279 370 1221 261 370 1222 269 370 1223 270 371 1224 261 371 1225 279 371 1226 270 372 1227 280 372 1228 262 372 1229 262 373 1230 281 373 1231 265 373 1232 249 237 1233 272 237 1234 271 237 1235 271 374 1236 266 374 1237 263 374 1238 272 375 1239 264 375 1240 282 375 1241 264 362 1242 273 362 1243 280 362 1244 256 376 1245 274 376 1246 273 376 1247 275 377 1248 274 377 1249 257 377 1250 265 378 1251 281 378 1252 275 378 1253 216 379 1254 276 379 1255 278 379 1256 271 380 1257 276 380 1258 266 380 1259 267 381 1260 260 381 1261 283 381 1262 268 382 1263 267 382 1264 284 382 1265 284 383 1266 277 383 1267 268 383 1268 277 384 1269 283 384 1270 260 384 1271 282 385 1272 269 385 1273 278 385 1274 279 386 1275 269 386 1276 282 386 1277 270 387 1278 279 387 1279 280 387 1280 280 388 1281 281 388 1282 262 388 1283 272 389 1284 276 389 1285 271 389 1286 282 390 1287 278 390 1288 272 390 1289 282 391 1290 264 391 1291 279 391 1292 279 392 1293 264 392 1294 280 392 1295 280 393 1296 273 393 1297 285 393 1298 274 394 1299 285 394 1300 273 394 1301 286 395 1302 274 395 1303 275 395 1304 275 396 1305 281 396 1306 287 396 1307 272 397 1308 278 397 1309 276 397 1310 267 398 1311 283 398 1312 288 398 1313 267 237 1314 288 237 1315 284 237 1316 284 399 1317 289 399 1318 277 399 1319 277 400 1320 289 400 1321 283 400 1322 280 401 1323 285 401 1324 281 401 1325 290 402 1326 285 402 1327 274 402 1328 286 403 1329 275 403 1330 287 403 1331 286 404 1332 290 404 1333 274 404 1334 281 405 1335 291 405 1336 287 405 1337 288 406 1338 283 406 1339 292 406 1340 288 407 1341 293 407 1342 284 407 1343 293 408 1344 289 408 1345 284 408 1346 283 409 1347 289 409 1348 292 409 1349 281 410 1350 285 410 1351 291 410 1352 291 411 1353 285 411 1354 290 411 1355 286 412 1356 287 412 1357 294 412 1358 294 413 1359 290 413 1360 286 413 1361 287 414 1362 291 414 1363 295 414 1364 288 415 1365 292 415 1366 296 415 1367 296 416 1368 293 416 1369 288 416 1370 293 417 1371 297 417 1372 289 417 1373 292 418 1374 289 418 1375 297 418 1376 291 419 1377 290 419 1378 298 419 1379 287 420 1380 299 420 1381 294 420 1382 294 421 1383 298 421 1384 290 421 1385 287 422 1386 295 422 1387 299 422 1388 291 423 1389 298 423 1390 295 423 1391 296 424 1392 292 424 1393 300 424 1394 296 425 1395 301 425 1396 293 425 1397 301 426 1398 297 426 1399 293 426 1400 302 238 1401 292 238 1402 297 238 1403 294 427 1404 299 427 1405 303 427 1406 303 428 1407 298 428 1408 294 428 1409 299 429 1410 295 429 1411 304 429 1412 298 430 1413 304 430 1414 295 430 1415 296 431 1416 300 431 1417 305 431 1418 302 432 1419 300 432 1420 292 432 1421 306 433 1422 301 433 1423 296 433 1424 301 434 1425 302 434 1426 297 434 1427 303 435 1428 299 435 1429 307 435 1430 303 436 1431 308 436 1432 298 436 1433 299 437 1434 304 437 1435 309 437 1436 308 438 1437 304 438 1438 298 438 1439 306 439 1440 296 439 1441 305 439 1442 305 440 1443 300 440 1444 310 440 1445 302 441 1446 311 441 1447 300 441 1448 306 442 1449 302 442 1450 301 442 1451 307 443 1452 308 443 1453 303 443 1454 307 444 1455 299 444 1456 309 444 1457 309 445 1458 304 445 1459 312 445 1460 312 446 1461 304 446 1462 308 446 1463 306 447 1464 305 447 1465 313 447 1466 305 448 1467 310 448 1468 314 448 1469 300 238 1470 311 238 1471 310 238 1472 306 449 1473 311 449 1474 302 449 1475 307 450 1476 315 450 1477 308 450 1478 307 451 1479 309 451 1480 316 451 1481 309 452 1482 312 452 1483 317 452 1484 312 453 1485 308 453 1486 315 453 1487 313 454 1488 311 454 1489 306 454 1490 305 455 1491 318 455 1492 313 455 1493 314 456 1494 318 456 1495 305 456 1496 314 457 1497 310 457 1498 319 457 1499 310 238 1500 311 238 1501 320 238 1502 316 458 1503 315 458 1504 307 458 1505 316 459 1506 309 459 1507 317 459 1508 317 460 1509 312 460 1510 321 460 1511 312 461 1512 315 461 1513 321 461 1514 313 462 1515 320 462 1516 311 462 1517 318 463 1518 320 463 1519 313 463 1520 314 464 1521 322 464 1522 318 464 1523 314 465 1524 319 465 1525 323 465 1526 324 238 1527 319 238 1528 310 238 1529 324 466 1530 310 466 1531 320 466 1532 316 467 1533 325 467 1534 315 467 1535 326 468 1536 316 468 1537 317 468 1538 317 469 1539 321 469 1540 327 469 1541 315 470 1542 328 470 1543 321 470 1544 318 471 1545 324 471 1546 320 471 1547 329 472 1548 322 472 1549 314 472 1550 322 473 1551 324 473 1552 318 473 1553 329 474 1554 314 474 1555 323 474 1556 323 475 1557 319 475 1558 330 475 1559 324 476 1560 331 476 1561 319 476 1562 326 477 1563 325 477 1564 316 477 1565 325 478 1566 328 478 1567 315 478 1568 326 479 1569 317 479 1570 332 479 1571 317 480 1572 327 480 1573 332 480 1574 327 481 1575 321 481 1576 328 481 1577 329 482 1578 331 482 1579 322 482 1580 322 483 1581 331 483 1582 324 483 1583 329 484 1584 323 484 1585 333 484 1586 323 485 1587 330 485 1588 334 485 1589 319 486 1590 331 486 1591 330 486 1592 326 487 1593 335 487 1594 325 487 1595 325 488 1596 336 488 1597 328 488 1598 332 489 1599 335 489 1600 326 489 1601 332 490 1602 327 490 1603 337 490 1604 327 491 1605 328 491 1606 338 491 1607 329 492 1608 339 492 1609 331 492 1610 333 493 1611 339 493 1612 329 493 1613 323 494 1614 340 494 1615 333 494 1616 334 495 1617 340 495 1618 323 495 1619 334 496 1620 330 496 1621 341 496 1622 330 238 1623 331 238 1624 339 238 1625 335 497 1626 336 497 1627 325 497 1628 338 498 1629 328 498 1630 336 498 1631 332 499 1632 342 499 1633 335 499 1634 337 500 1635 342 500 1636 332 500 1637 337 501 1638 327 501 1639 343 501 1640 343 502 1641 327 502 1642 338 502 1643 333 493 1644 344 493 1645 339 493 1646 340 503 1647 344 503 1648 333 503 1649 334 504 1650 345 504 1651 340 504 1652 334 505 1653 341 505 1654 346 505 1655 344 506 1656 341 506 1657 330 506 1658 344 238 1659 330 238 1660 339 238 1661 335 507 1662 347 507 1663 336 507 1664 338 508 1665 336 508 1666 348 508 1667 349 509 1668 335 509 1669 342 509 1670 337 510 1671 350 510 1672 342 510 1673 343 511 1674 350 511 1675 337 511 1676 351 512 1677 343 512 1678 338 512 1679 340 513 1680 352 513 1681 344 513 1682 346 237 1683 345 237 1684 334 237 1685 345 514 1686 352 514 1687 340 514 1688 346 515 1689 341 515 1690 353 515 1691 344 238 1692 352 238 1693 341 238 1694 349 516 1695 347 516 1696 335 516 1697 348 517 1698 336 517 1699 347 517 1700 338 518 1701 348 518 1702 354 518 1703 349 519 1704 342 519 1705 355 519 1706 350 520 1707 356 520 1708 342 520 1709 343 521 1710 357 521 1711 350 521 1712 351 522 1713 338 522 1714 354 522 1715 351 523 1716 357 523 1717 343 523 1718 358 524 1719 345 524 1720 346 524 1721 345 525 1722 359 525 1723 352 525 1724 346 526 1725 353 526 1726 360 526 1727 341 527 1728 352 527 1729 353 527 1730 361 528 1731 347 528 1732 349 528 1733 348 529 1734 347 529 1735 361 529 1736 354 237 1737 348 237 1738 362 237 1739 363 530 1740 349 530 1741 355 530 1742 342 531 1743 356 531 1744 355 531 1745 350 532 1746 364 532 1747 356 532 1748 357 533 1749 364 533 1750 350 533 1751 354 534 1752 365 534 1753 351 534 1754 351 535 1755 365 535 1756 357 535 1757 358 237 1758 346 237 1759 366 237 1760 358 536 1761 359 536 1762 345 536 1763 353 537 1764 352 537 1765 359 537 1766 346 237 1767 360 237 1768 366 237 1769 360 538 1770 353 538 1771 367 538 1772 361 539 1773 349 539 1774 363 539 1775 368 237 1776 348 237 1777 361 237 1778 362 540 1779 369 540 1780 354 540 1781 362 237 1782 348 237 1783 370 237 1784 363 541 1785 355 541 1786 371 541 1787 371 542 1788 355 542 1789 356 542 1790 372 543 1791 356 543 1792 364 543 1793 357 544 1794 373 544 1795 364 544 1796 354 545 1797 369 545 1798 365 545 1799 365 546 1800 373 546 1801 357 546 1802 366 547 1803 374 547 1804 358 547 1805 358 548 1806 374 548 1807 359 548 1808 374 238 1809 353 238 1810 359 238 1811 366 549 1812 360 549 1813 375 549 1814 360 550 1815 367 550 1816 375 550 1817 374 551 1818 367 551 1819 353 551 1820 363 237 1821 376 237 1822 361 237 1823 377 552 1824 368 552 1825 361 552 1826 370 553 1827 348 553 1828 368 553 1829 362 554 1830 378 554 1831 369 554 1832 370 555 1833 378 555 1834 362 555 1835 363 237 1836 371 237 1837 372 237 1838 371 556 1839 356 556 1840 372 556 1841 372 557 1842 364 557 1843 379 557 1844 379 558 1845 364 558 1846 373 558 1847 369 559 1848 373 559 1849 365 559 1850 366 560 1851 380 560 1852 374 560 1853 381 237 1854 366 237 1855 375 237 1856 375 561 1857 367 561 1858 382 561 1859 374 238 1860 380 238 1861 367 238 1862 363 237 1863 383 237 1864 376 237 1865 376 237 1866 384 237 1867 361 237 1868 377 237 1869 361 237 1870 385 237 1871 377 562 1872 386 562 1873 368 562 1874 368 563 1875 387 563 1876 370 563 1877 378 564 1878 373 564 1879 369 564 1880 370 565 1881 387 565 1882 378 565 1883 363 566 1884 372 566 1885 379 566 1886 379 567 1887 373 567 1888 388 567 1889 389 568 1890 380 568 1891 366 568 1892 375 569 1893 382 569 1894 381 569 1895 381 237 1896 390 237 1897 366 237 1898 367 238 1899 391 238 1900 382 238 1901 367 570 1902 380 570 1903 391 570 1904 363 237 1905 366 237 1906 383 237 1907 376 571 1908 383 571 1909 392 571 1910 393 572 1911 384 572 1912 376 572 1913 361 573 1914 384 573 1915 394 573 1916 385 574 1917 395 574 1918 377 574 1919 396 575 1920 385 575 1921 361 575 1922 377 576 1923 395 576 1924 386 576 1925 368 577 1926 386 577 1927 387 577 1928 387 578 1929 373 578 1930 378 578 1931 363 579 1932 379 579 1933 388 579 1934 388 580 1935 373 580 1936 397 580 1937 363 581 1938 389 581 1939 366 581 1940 389 582 1941 391 582 1942 380 582 1943 381 583 1944 382 583 1945 398 583 1946 381 584 1947 398 584 1948 390 584 1949 366 585 1950 390 585 1951 399 585 1952 382 238 1953 391 238 1954 400 238 1955 366 586 1956 401 586 1957 383 586 1958 393 587 1959 376 587 1960 392 587 1961 383 588 1962 402 588 1963 392 588 1964 393 589 1965 403 589 1966 384 589 1967 396 237 1968 361 237 1969 394 237 1970 384 590 1971 403 590 1972 394 590 1973 385 591 1974 404 591 1975 395 591 1976 396 592 1977 404 592 1978 385 592 1979 395 593 1980 373 593 1981 386 593 1982 386 594 1983 373 594 1984 387 594 1985 363 595 1986 388 595 1987 405 595 1988 388 596 1989 397 596 1990 405 596 1991 397 597 1992 373 597 1993 404 597 1994 363 598 1995 406 598 1996 389 598 1997 406 599 1998 391 599 1999 389 599 2000 382 238 2001 400 238 2002 398 238 2003 390 600 2004 398 600 2005 407 600 2006 408 237 2007 366 237 2008 399 237 2009 390 601 2010 407 601 2011 399 601 2012 406 599 2013 400 599 2014 391 599 2015 409 237 2016 401 237 2017 366 237 2018 401 602 2019 402 602 2020 383 602 2021 392 238 2022 410 238 2023 393 238 2024 411 603 2025 392 603 2026 402 603 2027 412 604 2028 403 604 2029 393 604 2030 394 605 2031 413 605 2032 396 605 2033 394 606 2034 403 606 2035 413 606 2036 404 607 2037 373 607 2038 395 607 2039 396 608 2040 413 608 2041 404 608 2042 363 609 2043 405 609 2044 414 609 2045 405 610 2046 397 610 2047 415 610 2048 397 611 2049 404 611 2050 415 611 2051 363 612 2052 416 612 2053 406 612 2054 398 613 2055 400 613 2056 417 613 2057 418 614 2058 407 614 2059 398 614 2060 399 615 2061 419 615 2062 408 615 2063 408 616 2064 420 616 2065 366 616 2066 399 617 2067 407 617 2068 419 617 2069 416 618 2070 400 618 2071 406 618 2072 409 237 2073 366 237 2074 421 237 2075 409 619 2076 422 619 2077 401 619 2078 401 620 2079 422 620 2080 402 620 2081 392 621 2082 423 621 2083 410 621 2084 393 622 2085 410 622 2086 424 622 2087 411 623 2088 402 623 2089 425 623 2090 411 238 2091 423 238 2092 392 238 2093 412 238 2094 393 238 2095 424 238 2096 412 238 2097 426 238 2098 403 238 2099 403 238 2100 427 238 2101 413 238 2102 428 624 2103 404 624 2104 413 624 2105 363 625 2106 414 625 2107 429 625 2108 405 626 2109 415 626 2110 414 626 2111 404 627 2112 430 627 2113 415 627 2114 363 628 2115 431 628 2116 416 628 2117 418 629 2118 398 629 2119 417 629 2120 416 630 2121 417 630 2122 400 630 2123 418 238 2124 432 238 2125 407 238 2126 408 631 2127 419 631 2128 433 631 2129 408 632 2130 433 632 2131 420 632 2132 366 633 2133 420 633 2134 434 633 2135 407 238 2136 432 238 2137 419 238 2138 421 634 2139 435 634 2140 409 634 2141 366 635 2142 436 635 2143 421 635 2144 409 636 2145 435 636 2146 422 636 2147 422 238 2148 437 238 2149 402 238 2150 438 637 2151 410 637 2152 423 637 2153 439 638 2154 424 638 2155 410 638 2156 440 639 2157 411 639 2158 425 639 2159 402 640 2160 437 640 2161 425 640 2162 441 641 2163 423 641 2164 411 641 2165 442 642 2166 412 642 2167 424 642 2168 443 643 2169 426 643 2170 412 643 2171 403 644 2172 426 644 2173 427 644 2174 413 645 2175 427 645 2176 444 645 2177 428 238 2178 413 238 2179 444 238 2180 428 646 2181 445 646 2182 404 646 2183 363 647 2184 429 647 2185 446 647 2186 414 648 2187 430 648 2188 429 648 2189 414 649 2190 415 649 2191 430 649 2192 447 238 2193 430 238 2194 404 238 2195 363 650 2196 448 650 2197 431 650 2198 431 651 2199 417 651 2200 416 651 2201 431 652 2202 418 652 2203 417 652 2204 448 653 2205 432 653 2206 418 653 2207 449 238 2208 433 238 2209 419 238 2210 420 654 2211 433 654 2212 450 654 2213 451 655 2214 366 655 2215 434 655 2216 420 656 2217 450 656 2218 434 656 2219 419 657 2220 432 657 2221 452 657 2222 421 658 2223 453 658 2224 435 658 2225 451 237 2226 436 237 2227 366 237 2228 436 659 2229 453 659 2230 421 659 2231 454 660 2232 422 660 2233 435 660 2234 422 661 2235 455 661 2236 437 661 2237 438 662 2238 423 662 2239 441 662 2240 439 663 2241 410 663 2242 438 663 2243 442 664 2244 424 664 2245 439 664 2246 440 665 2247 425 665 2248 456 665 2249 441 666 2250 411 666 2251 440 666 2252 456 665 2253 425 665 2254 437 665 2255 443 667 2256 412 667 2257 442 667 2258 457 668 2259 426 668 2260 443 668 2261 457 669 2262 427 669 2263 426 669 2264 458 670 2265 444 670 2266 427 670 2267 459 671 2268 428 671 2269 444 671 2270 460 672 2271 445 672 2272 428 672 2273 404 673 2274 445 673 2275 461 673 2276 363 674 2277 446 674 2278 462 674 2279 429 675 2280 447 675 2281 446 675 2282 429 676 2283 430 676 2284 447 676 2285 447 238 2286 404 238 2287 463 238 2288 363 677 2289 464 677 2290 448 677 2291 448 678 2292 418 678 2293 431 678 2294 464 679 2295 432 679 2296 448 679 2297 449 238 2298 419 238 2299 452 238 2300 449 238 2301 465 238 2302 433 238 2303 433 238 2304 466 238 2305 450 238 2306 434 680 2307 467 680 2308 451 680 2309 434 681 2310 450 681 2311 467 681 2312 464 682 2313 452 682 2314 432 682 2315 453 238 2316 468 238 2317 435 238 2318 451 683 2319 469 683 2320 436 683 2321 436 684 2322 469 684 2323 453 684 2324 454 238 2325 435 238 2326 470 238 2327 454 685 2328 455 685 2329 422 685 2330 471 686 2331 437 686 2332 455 686 2333 363 687 2334 438 687 2335 441 687 2336 363 688 2337 439 688 2338 438 688 2339 363 689 2340 442 689 2341 439 689 2342 363 690 2343 440 690 2344 456 690 2345 363 691 2346 441 691 2347 440 691 2348 456 692 2349 437 692 2350 471 692 2351 363 693 2352 443 693 2353 442 693 2354 363 694 2355 457 694 2356 443 694 2357 458 695 2358 427 695 2359 457 695 2360 459 670 2361 444 670 2362 458 670 2363 460 696 2364 428 696 2365 459 696 2366 472 697 2367 445 697 2368 460 697 2369 473 238 2370 404 238 2371 461 238 2372 472 698 2373 461 698 2374 445 698 2375 363 699 2376 462 699 2377 474 699 2378 446 700 2379 463 700 2380 462 700 2381 446 700 2382 447 700 2383 463 700 2384 404 701 2385 475 701 2386 463 701 2387 363 702 2388 476 702 2389 464 702 2390 476 703 2391 449 703 2392 452 703 2393 477 704 2394 465 704 2395 449 704 2396 433 705 2397 465 705 2398 466 705 2399 450 706 2400 466 706 2401 478 706 2402 451 707 2403 467 707 2404 469 707 2405 479 238 2406 467 238 2407 450 238 2408 476 708 2409 452 708 2410 464 708 2411 453 709 2412 480 709 2413 468 709 2414 435 710 2415 468 710 2416 470 710 2417 481 711 2418 453 711 2419 469 711 2420 482 712 2421 454 712 2422 470 712 2423 483 713 2424 455 713 2425 454 713 2426 471 714 2427 455 714 2428 483 714 2429 363 715 2430 456 715 2431 471 715 2432 363 716 2433 458 716 2434 457 716 2435 363 717 2436 459 717 2437 458 717 2438 363 718 2439 460 718 2440 459 718 2441 363 719 2442 472 719 2443 460 719 2444 484 720 2445 473 720 2446 461 720 2447 473 238 2448 485 238 2449 404 238 2450 484 721 2451 461 721 2452 472 721 2453 363 722 2454 474 722 2455 486 722 2456 462 723 2457 475 723 2458 474 723 2459 462 724 2460 463 724 2461 475 724 2462 487 238 2463 475 238 2464 404 238 2465 363 237 2466 477 237 2467 476 237 2468 477 704 2469 449 704 2470 476 704 2471 488 725 2472 465 725 2473 477 725 2474 488 726 2475 466 726 2476 465 726 2477 479 238 2478 450 238 2479 478 238 2480 489 727 2481 478 727 2482 466 727 2483 467 238 2484 490 238 2485 469 238 2486 479 238 2487 490 238 2488 467 238 2489 481 238 2490 480 238 2491 453 238 2492 491 728 2493 468 728 2494 480 728 2495 492 729 2496 470 729 2497 468 729 2498 481 730 2499 469 730 2500 493 730 2501 482 731 2502 470 731 2503 492 731 2504 483 712 2505 454 712 2506 482 712 2507 363 732 2508 471 732 2509 483 732 2510 363 733 2511 484 733 2512 472 733 2513 494 734 2514 473 734 2515 484 734 2516 485 735 2517 473 735 2518 494 735 2519 404 736 2520 485 736 2521 495 736 2522 363 737 2523 486 737 2524 496 737 2525 474 738 2526 487 738 2527 486 738 2528 474 739 2529 475 739 2530 487 739 2531 487 740 2532 404 740 2533 495 740 2534 363 741 2535 488 741 2536 477 741 2537 489 742 2538 466 742 2539 488 742 2540 479 743 2541 478 743 2542 497 743 2543 497 744 2544 478 744 2545 489 744 2546 469 745 2547 490 745 2548 493 745 2549 479 746 2550 498 746 2551 490 746 2552 499 747 2553 480 747 2554 481 747 2555 491 748 2556 480 748 2557 499 748 2558 492 749 2559 468 749 2560 491 749 2561 500 750 2562 481 750 2563 493 750 2564 363 751 2565 482 751 2566 492 751 2567 363 752 2568 483 752 2569 482 752 2570 363 753 2571 494 753 2572 484 753 2573 501 754 2574 485 754 2575 494 754 2576 496 755 2577 495 755 2578 485 755 2579 363 756 2580 496 756 2581 501 756 2582 486 757 2583 495 757 2584 496 757 2585 486 758 2586 487 758 2587 495 758 2588 363 759 2589 489 759 2590 488 759 2591 479 760 2592 497 760 2593 498 760 2594 363 761 2595 497 761 2596 489 761 2597 498 762 2598 493 762 2599 490 762 2600 499 750 2601 481 750 2602 500 750 2603 363 763 2604 491 763 2605 499 763 2606 363 764 2607 492 764 2608 491 764 2609 500 765 2610 493 765 2611 498 765 2612 363 766 2613 501 766 2614 494 766 2615 501 767 2616 496 767 2617 485 767 2618 363 768 2619 498 768 2620 497 768 2621 363 769 2622 499 769 2623 500 769 2624 363 770 2625 500 770 2626 498 770 2627 78 771 2628 183 771 2629 98 771 2630 107 772 2631 182 772 2632 78 772 2633 107 773 2634 185 773 2635 186 773 2636 106 774 2637 183 774 2638 185 774 2639 0 775 2640 1 775 2641 2 775 2642 1 776 2643 0 776 2644 6 776 2645 11 777 2646 5 777 2647 6 777 2648 4 778 2649 7 778 2650 15 778 2651 20 779 2652 11 779 2653 12 779 2654 18 780 2655 26 780 2656 7 780 2657 19 781 2658 10 781 2659 15 781 2660 29 782 2661 20 782 2662 21 782 2663 23 783 2664 14 783 2665 13 783 2666 14 784 2667 19 784 2668 24 784 2669 40 785 2670 28 785 2671 9 785 2672 30 786 2673 41 786 2674 42 786 2675 33 787 2676 32 787 2677 23 787 2678 17 788 2679 51 788 2680 52 788 2681 39 789 2682 9 789 2683 54 789 2684 55 790 2685 56 790 2686 40 790 2687 40 791 2688 57 791 2689 58 791 2690 41 792 2691 59 792 2692 60 792 2693 61 793 2694 62 793 2695 31 793 2696 62 794 2697 63 794 2698 46 794 2699 48 795 2700 47 795 2701 32 795 2702 55 796 2703 39 796 2704 37 796 2705 56 797 2706 55 797 2707 84 797 2708 56 798 2709 73 798 2710 57 798 2711 75 799 2712 60 799 2713 59 799 2714 43 800 2715 77 800 2716 61 800 2717 47 801 2718 48 801 2719 65 801 2720 46 802 2721 63 802 2722 79 802 2723 51 803 2724 16 803 2725 38 803 2726 52 804 2727 51 804 2728 70 804 2729 81 805 2730 74 805 2731 44 805 2732 76 806 2733 86 806 2734 87 806 2735 77 807 2736 43 807 2737 88 807 2738 64 808 2739 79 808 2740 91 808 2741 93 809 2742 85 809 2743 74 809 2744 86 810 2745 96 810 2746 97 810 2747 99 811 2748 100 811 2749 89 811 2750 98 812 2751 101 812 2752 90 812 2753 80 813 2754 91 813 2755 102 813 2756 104 814 2757 95 814 2758 85 814 2759 96 815 2760 103 815 2761 105 815 2762 100 816 2763 99 816 2764 106 816 2765 101 817 2766 105 817 2767 103 817 2768 92 818 2769 102 818 2770 95 818 2771 118 819 2772 108 819 2773 35 819 2774 113 820 2775 115 820 2776 109 820 2777 112 821 2778 114 821 2779 119 821 2780 120 822 2781 121 822 2782 109 822 2783 114 823 2784 117 823 2785 125 823 2786 129 824 2787 117 824 2788 123 824 2789 137 825 2790 121 825 2791 120 825 2792 139 826 2793 123 826 2794 131 826 2795 147 827 2796 137 827 2797 136 827 2798 143 828 2799 131 828 2800 148 828 2801 145 829 2802 133 829 2803 127 829 2804 152 830 2805 173 830 2806 145 830 2807 151 831 2808 133 831 2809 145 831 2810 153 832 2811 147 832 2812 146 832 2813 154 833 2814 148 833 2815 159 833 2816 161 834 2817 156 834 2818 150 834 2819 142 835 2820 127 835 2821 150 835 2822 151 836 2823 162 836 2824 163 836 2825 158 837 2826 153 837 2827 157 837 2828 181 838 2829 163 838 2830 162 838 2831 171 839 2832 159 839 2833 178 839 2834 180 840 2835 178 840 2836 156 840 2837 18 841 2838 110 841 2839 111 841 2840 174 842 2841 168 842 2842 177 842 2843 113 843 2844 135 843 2845 169 843 2846 135 844 2847 134 844 2848 163 844 2849 169 845 2850 135 845 2851 177 845 2852 163 846 2853 181 846 2854 135 846 2855 181 847 2856 177 847 2857 135 847 2858 169 848 2859 157 848 2860 136 848 2861 157 849 2862 146 849 2863 136 849 2864 136 850 2865 120 850 2866 113 850 2867 120 851 2868 115 851 2869 113 851 2870 169 852 2871 177 852 2872 168 852 2873 173 853 2874 152 853 2875 164 853 2876 164 854 2877 165 854 2878 175 854 2879 165 855 2880 166 855 2881 170 855 2882 138 856 2883 170 856 2884 166 856 2885 128 857 2886 138 857 2887 167 857 2888 128 858 2889 155 858 2890 144 858 2891 116 859 2892 122 859 2893 144 859 2894 141 860 2895 111 860 2896 110 860 2897 38 861 2898 16 861 2899 108 861 2900 16 862 2901 35 862 2902 108 862 2903 108 863 2904 126 863 2905 113 863 2906 126 864 2907 135 864 2908 113 864 2909 113 865 2910 50 865 2911 38 865 2912 133 866 2913 151 866 2914 135 866 2915 151 867 2916 134 867 2917 135 867 2918 132 868 2919 150 868 2920 126 868 2921 150 869 2922 127 869 2923 126 869 2924 133 870 2925 135 870 2926 126 870 2927 126 871 2928 108 871 2929 130 871 2930 108 872 2931 118 872 2932 119 872 2933 119 873 2934 124 873 2935 125 873 2936 108 874 2937 119 874 2938 129 874 2939 130 875 2940 108 875 2941 129 875 2942 161 876 2943 132 876 2944 126 876 2945 180 877 2946 172 877 2947 126 877 2948 172 878 2949 161 878 2950 126 878 2951 171 879 2952 179 879 2953 126 879 2954 179 880 2955 180 880 2956 126 880 2957 154 881 2958 160 881 2959 130 881 2960 160 882 2961 171 882 2962 126 882 2963 130 883 2964 160 883 2965 126 883 2966 143 884 2967 149 884 2968 130 884 2969 149 885 2970 154 885 2971 130 885 2972 139 886 2973 140 886 2974 130 886 2975 140 887 2976 143 887 2977 130 887 2978 52 888 2979 71 888 2980 72 888 2981 71 889 2982 73 889 2983 56 889 2984 72 890 2985 71 890 2986 56 890 2987 58 891 2988 70 891 2989 38 891 2990 8 892 2991 54 892 2992 9 892 2993 53 889 2994 27 889 2995 55 889 2996 27 893 2997 36 893 2998 84 893 2999 36 894 3000 68 894 3001 84 894 3002 2 895 3003 25 895 3004 69 895 3005 16 896 3006 51 896 3007 82 896 3008 184 238 3009 186 238 3010 185 238 3011 78 897 3012 182 897 3013 183 897 3014 107 898 3015 186 898 3016 182 898 3017 107 899 3018 106 899 3019 185 899 3020 106 900 3021 98 900 3022 183 900 3023

+
+
+
+
+ + + + 0.001659 9.88787e-11 0 0.0111 9.88841e-11 -0.00165891 1.45027e-10 -0.0297 0 -1.45027e-10 -0.00165891 3e-4 0 0 0 1 + + + + + + + +
\ No newline at end of file diff --git a/models/rg_robot/meshes/ActiveHinge_FrameV1.dae b/models/rg_robot/meshes/ActiveHinge_FrameV1.dae new file mode 100644 index 0000000000..387e9ff21b --- /dev/null +++ b/models/rg_robot/meshes/ActiveHinge_FrameV1.dae @@ -0,0 +1,63 @@ + + + + + VCGLab + VCGLib | MeshLab + + Y_UP + do sep. 17 12:07:25 2015 + do sep. 17 12:07:25 2015 + + + + + + + + + 0.0062916 -0.0145 0.000173725 0.0062916 -0.017 0.000173725 0.00626647 -0.0145 0.000345828 0.00626647 -0.017 0.000345828 0.00622484 -0.0145 0.000514702 0.00609382 -0.0145 0.000836502 0.00566072 -0.017 0.00137577 0.00552252 -0.0145 0.00148137 0.00537477 -0.0145 0.00157314 0.00552252 -0.017 0.00148137 0.00537477 -0.017 0.00157314 0.00521886 -0.017 0.00165023 0.00419696 -0.0145 0.00177431 0.00436962 -0.017 0.00179527 0.00402713 -0.0145 0.00173678 0.00419696 -0.017 0.00177431 0.00402713 -0.017 0.00173678 0.00355025 -0.017 0.00152904 0.00327417 -0.017 0.00131808 0.00286763 -0.0145 0.000758522 0.00271889 -0.017 0.00026008 0.0027021 -0.0145 8.69641e-05 0.00275231 -0.0145 -0.000430768 0.00294846 -0.017 -0.000912528 0.00340711 -0.017 -0.00143024 0.00355025 -0.017 -0.00152904 0.00386171 -0.017 -0.00168303 0.00436962 -0.017 -0.00179527 0.0045435 -0.017 -0.00179947 0.00505623 -0.017 -0.0017119 0.00590341 -0.0145 -0.00112713 0.00600565 -0.0145 -0.000986423 0.00600565 -0.017 -0.000986423 0.00609382 -0.0145 -0.000836502 0.00609382 -0.017 -0.000836502 0.00622484 -0.017 0.000514702 0.00616711 -0.0145 0.000678771 0.00609382 -0.017 0.000836502 0.00590341 -0.017 0.00112713 0.00578808 -0.0145 0.00125732 0.00578808 -0.017 0.00125732 0.00521886 -0.0145 0.00165023 0.00488841 -0.0145 0.00175759 0.00488841 -0.017 0.00175759 0.00471697 -0.017 0.00178688 0.0045435 -0.0145 0.00179947 0.0045435 -0.017 0.00179947 0.00436962 -0.0145 0.00179527 0.00370226 -0.017 0.00161357 0.00315268 -0.017 0.00119362 0.00304377 -0.0145 0.00105801 0.00304377 -0.017 0.00105801 0.00286763 -0.017 0.000758522 0.00271889 -0.0145 0.00026008 0.0027021 -0.017 8.69641e-05 0.0027021 -0.0145 -8.69641e-05 0.00271889 -0.0145 -0.00026008 0.00280204 -0.0145 -0.000597434 0.00286763 -0.0145 -0.000758522 0.00294846 -0.0145 -0.000912528 0.00315268 -0.0145 -0.00119362 0.00327417 -0.0145 -0.00131808 0.00340711 -0.0145 -0.00143024 0.00370226 -0.0145 -0.00161357 0.00386171 -0.0145 -0.00168303 0.00402713 -0.0145 -0.00173678 0.00402713 -0.017 -0.00173678 0.00436962 -0.0145 -0.00179527 0.00471697 -0.017 -0.00178688 0.00505623 -0.0145 -0.0017119 0.00521886 -0.0145 -0.00165023 0.00578808 -0.017 -0.00125732 0.00590341 -0.017 -0.00112713 0.00626647 -0.0145 -0.000345828 0.0063 -0.0145 0 0.00524162 -0.017 0.000111782 0.00521668 -0.017 0.000221066 0.00600565 -0.017 0.000986423 0.00616711 -0.017 0.000678771 0.004875 -0.017 0.000649519 0.00477401 -0.017 0.000698155 0.00505623 -0.017 0.0017119 0.00433311 -0.017 0.000731196 0.00386171 -0.017 0.00168303 0.00422599 -0.017 0.000698155 0.004125 -0.017 0.000649519 0.00403238 -0.017 0.000586374 0.00340711 -0.017 0.00143024 0.00382427 -0.017 0.000325413 0.00294846 -0.017 0.000912528 0.00378332 -0.017 0.000221066 0.00280204 -0.017 0.000597434 0.00275231 -0.017 0.000430768 0.00375838 -0.017 0.000111782 0.00375 -0.017 0 0.00271889 -0.017 -0.00026008 0.0027021 -0.017 -8.69641e-05 0.00378332 -0.017 -0.000221066 0.00275231 -0.017 -0.000430768 0.00286763 -0.017 -0.000758522 0.00280204 -0.017 -0.000597434 0.00395021 -0.017 -0.000510129 0.00315268 -0.017 -0.00119362 0.00304377 -0.017 -0.00105801 0.00327417 -0.017 -0.00131808 0.00403238 -0.017 -0.000586374 0.004125 -0.017 -0.000649519 0.00370226 -0.017 -0.00161357 0.00419696 -0.017 -0.00177431 0.00444395 -0.017 -0.000747903 0.00455605 -0.017 -0.000747903 0.00488841 -0.017 -0.00175759 0.00466689 -0.017 -0.000731196 0.00537477 -0.017 -0.00157314 0.00521886 -0.017 -0.00165023 0.004875 -0.017 -0.000649519 0.00552252 -0.017 -0.00148137 0.00496762 -0.017 -0.000586374 0.00566072 -0.017 -0.00137577 0.00504979 -0.017 -0.000510129 0.00511968 -0.017 -0.00042249 0.00616711 -0.017 -0.000678771 0.00622484 -0.017 -0.000514702 0.00517573 -0.017 -0.000325413 0.00626647 -0.017 -0.000345828 0.0063 -0.017 0 0.0062916 -0.017 -0.000173725 0.00746568 -0.015 0.000452473 0.00746568 -0.0145 0.000452473 0.00742297 -0.0145 0.000675468 0.00736351 -0.0145 0.000894594 0.00696647 -0.015 0.00170779 0.00708856 -0.0145 0.00151636 0.00683025 -0.015 0.00188943 0.00696647 -0.0145 0.00170779 0.00683025 -0.0145 0.00188943 0.006345 -0.015 0.00236558 0.00616081 -0.015 0.00249834 0.0059671 -0.0145 0.00261679 0.00512003 -0.015 0.00293523 0.00489627 -0.0145 0.00297371 0.00467024 -0.015 0.00299517 0.00444323 -0.015 0.00299946 0.00467024 -0.0145 0.00299517 0.00444323 -0.0145 0.00299946 0.00399149 -0.0145 0.00295659 0.00376934 -0.0145 0.00290966 0.00355138 -0.0145 0.00284607 0.00313297 -0.015 0.00267044 0.00333885 -0.0145 0.00276618 0.00256681 -0.0145 0.00229407 0.00224292 -0.0145 0.00197626 0.00209992 -0.0145 0.0017999 0.00185591 -0.015 0.00141732 0.00167239 -0.015 0.00100231 0.00167239 -0.0145 0.00100231 0.00151932 -0.0145 0.000339923 0.00151932 -0.0145 -0.000339923 0.00155357 -0.0145 -0.000564374 0.00185591 -0.015 -0.00141732 0.00185591 -0.0145 -0.00141732 0.00224292 -0.0145 -0.00197626 0.00256681 -0.0145 -0.00229407 0.00333885 -0.015 -0.00276618 0.00355138 -0.0145 -0.00284607 0.00399149 -0.015 -0.00295659 0.00421654 -0.0145 -0.00298658 0.00467024 -0.0145 -0.00299517 0.00512003 -0.0145 -0.00293523 0.005765 -0.0145 -0.00272025 0.0059671 -0.0145 -0.00261679 0.00616081 -0.0145 -0.00249834 0.00651862 -0.015 -0.00221928 0.00696647 -0.015 -0.00170779 0.00708856 -0.0145 -0.00151636 0.00728766 -0.015 -0.0011086 0.00736351 -0.0145 -0.000894594 0.00742297 -0.015 -0.000675468 0.00746568 -0.015 -0.000452473 0.00742297 -0.0145 -0.000675468 0.00746568 -0.0145 -0.000452473 0.00749141 -0.0145 -0.000226886 0.0075 -0.0145 0 0.006345 -0.0145 0.00236558 0.00489627 -0.015 0.00297371 0.00421654 -0.015 0.00298658 0.00421654 -0.0145 0.00298658 0.00399149 -0.015 0.00295659 0.00355138 -0.015 0.00284607 0.00293493 -0.0145 0.0025594 0.00274584 -0.015 0.00243371 0.00256681 -0.015 0.00229407 0.00239885 -0.015 0.0021413 0.00224292 -0.015 0.00197626 0.00150215 -0.0145 -0.000113524 0.00155357 -0.015 -0.000564374 0.00197067 -0.015 -0.00161323 0.00209992 -0.015 -0.0017999 0.00209992 -0.0145 -0.0017999 0.00239885 -0.015 -0.0021413 0.00239885 -0.0145 -0.0021413 0.00256681 -0.015 -0.00229407 0.00274584 -0.015 -0.00243371 0.00293493 -0.015 -0.0025594 0.00293493 -0.0145 -0.0025594 0.00355138 -0.015 -0.00284607 0.00376934 -0.015 -0.00290966 0.00399149 -0.0145 -0.00295659 0.00421654 -0.015 -0.00298658 0.00489627 -0.015 -0.00297371 0.00534024 -0.015 -0.00287993 0.00616081 -0.015 -0.00249834 0.006345 -0.015 -0.00236558 0.00683025 -0.015 -0.00188943 0.00683025 -0.0145 -0.00188943 0.00696647 -0.0145 -0.00170779 0.00719583 -0.015 -0.00131625 0.00749141 -0.015 -0.000226886 0.00749141 -0.0145 0.000226886 0.00728766 -0.0145 0.0011086 0.00719583 -0.0145 0.00131625 0.00600565 -0.0145 0.000986423 0.00590341 -0.0145 0.00112713 0.00668068 -0.0145 0.00206025 0.00651862 -0.0145 0.00221928 0.00566072 -0.0145 0.00137577 0.005765 -0.0145 0.00272025 0.00616081 -0.0145 0.00249834 0.00505623 -0.0145 0.0017119 0.00534024 -0.0145 0.00287993 0.00512003 -0.0145 0.00293523 0.00555564 -0.0145 0.00280813 0.00471697 -0.0145 0.00178688 0.00386171 -0.0145 0.00168303 0.00370226 -0.0145 0.00161357 0.00313297 -0.0145 0.00267044 0.00355025 -0.0145 0.00152904 0.00340711 -0.0145 0.00143024 0.00274584 -0.0145 0.00243371 0.00327417 -0.0145 0.00131808 0.00239885 -0.0145 0.0021413 0.00315268 -0.0145 0.00119362 0.00280204 -0.0145 0.000597434 0.00175629 -0.0145 0.00121329 0.00197067 -0.0145 0.00161323 0.00294846 -0.0145 0.000912528 0.00185591 -0.0145 0.00141732 0.00160469 -0.0145 0.000785594 0.00275231 -0.0145 0.000430768 0.00150215 -0.0145 0.000113524 0.00155357 -0.0145 0.000564374 0.00160469 -0.0145 -0.000785594 0.00167239 -0.0145 -0.00100231 0.00175629 -0.0145 -0.00121329 0.00197067 -0.0145 -0.00161323 0.00304377 -0.0145 -0.00105801 0.00355025 -0.0145 -0.00152904 0.00313297 -0.0145 -0.00267044 0.00274584 -0.0145 -0.00243371 0.00419696 -0.0145 -0.00177431 0.00333885 -0.0145 -0.00276618 0.00376934 -0.0145 -0.00290966 0.00444323 -0.0145 -0.00299946 0.0045435 -0.0145 -0.00179947 0.00489627 -0.0145 -0.00297371 0.00471697 -0.0145 -0.00178688 0.00488841 -0.0145 -0.00175759 0.00534024 -0.0145 -0.00287993 0.00555564 -0.0145 -0.00280813 0.00537477 -0.0145 -0.00157314 0.00552252 -0.0145 -0.00148137 0.006345 -0.0145 -0.00236558 0.00651862 -0.0145 -0.00221928 0.00566072 -0.0145 -0.00137577 0.00578808 -0.0145 -0.00125732 0.00668068 -0.0145 -0.00206025 0.00719583 -0.0145 -0.00131625 0.00616711 -0.0145 -0.000678771 0.00728766 -0.0145 -0.0011086 0.00622484 -0.0145 -0.000514702 0.0062916 -0.0145 -0.000173725 -0.00727819 0.0171734 0.005 -0.00706543 0.0173259 0 -0.00727819 0.0171734 0 -0.00706543 0.0173259 -0.005 -0.00748053 0.0170074 0 -0.00706543 0.0173259 0.005 -0.00684315 0.0174641 0 -0.0066123 0.0175875 0 -0.00637388 0.0176955 0 -0.00637388 0.0176955 -0.005 -0.0066123 0.0175875 0.005 -0.00637388 0.0176955 0.005 -0.0061289 0.0177877 0 -0.00587842 0.0178637 0 -0.00536525 0.0179658 -0.005 -0.00562351 0.0179231 0 -0.00536525 0.0179658 0 -0.00510476 0.0179914 0 -0.00536525 0.0179658 0.005 -0.00510476 0.0179914 0.005 -0.00748053 0.0170074 -0.005 -0.00702821 0.0132607 -0.005 -0.00656067 0.0137282 -0.005 -0.00634638 0.0139092 -0.005 -0.00539835 0.0145048 -0.005 -0.00727819 0.0171734 -0.005 -0.00684315 0.0174641 -0.005 -0.0066123 0.0175875 -0.005 -0.0061289 0.0177877 -0.005 -0.00587842 0.0178637 -0.005 -0.00562351 0.0179231 -0.005 -0.00510476 0.0179914 -0.005 -0.00484315 0.018 -0.005 -0.00350928 0.0149921 -0.005 -0.00720916 0.0130464 -0.005 -0.00767157 0.0168284 -0.005 -0.00753362 0.0125891 -0.005 -0.00767612 0.0123475 -0.005 -0.00780484 0.0120984 -0.005 -0.00801942 0.0115803 -0.005 -0.00829213 -0.0102093 -0.005 -0.0095 -0.015 -0.005 -0.00810459 -0.0113131 -0.005 -0.0079194 -0.0118424 -0.005 -0.00767157 -0.0168284 -0.005 -0.00753362 -0.0125891 -0.005 -0.00748053 -0.0170074 -0.005 -0.00702821 -0.0132607 -0.005 -0.00706543 -0.0173259 -0.005 -0.00720916 -0.0130464 -0.005 -0.00727819 -0.0171734 -0.005 -0.00588579 -0.0144142 -0.005 -0.00637388 -0.0176955 -0.005 -0.00561873 -0.0146383 -0.005 -0.00531681 -0.0148126 -0.005 -0.00510476 -0.0179914 -0.005 -0.00498921 -0.0149319 -0.005 0.000346688 -0.015 -0.005 -0.00524382 -0.0179799 0.000436521 -0.00537865 -0.017964 0.000326534 -0.00536525 -0.0179658 0.005 -0.00546894 -0.0179507 0.000173488 -0.00587842 -0.0178637 0.005 -0.0061289 -0.0177877 0 -0.0066123 -0.0175875 -0.005 -0.0061289 -0.0177877 -0.005 -0.00587842 -0.0178637 -0.005 -0.00562351 -0.0179231 -0.005 -0.0055 -0.0179457 0 -0.00562351 -0.0179231 0 -0.0066123 -0.0175875 0 -0.00637388 -0.0176955 0 -0.00684315 -0.0174641 -0.005 -0.00706543 -0.0173259 0 -0.00684315 -0.0174641 0 -0.00706543 -0.0173259 0.005 -0.00727819 -0.0171734 0 -0.00748053 -0.0170074 0 -0.00748053 -0.0170074 0.005 -0.00767157 -0.0168284 0.005 -0.00587842 -0.0178637 0 -0.00536525 -0.0179658 -0.005 -0.00529391 -0.0179745 -0.000404493 -0.00533893 -0.0179692 -0.000367599 -0.00524382 -0.0179799 -0.000436521 -0.00518977 -0.017985 -0.000462589 -0.00484315 -0.018 -0.00047476 -0.00490107 -0.0179996 -0.000490115 -0.00549666 -0.0179463 -5.76878e-05 -0.00484315 -0.018 0.00047476 -0.00514235 0.0146194 0.005 -0.0061289 0.0177877 0.005 -0.00612229 0.0140778 0.005 -0.00634638 0.0139092 0.005 -0.00684315 0.0174641 0.005 -0.00656067 0.0137282 0.005 -0.00767157 0.0168284 0.005 -0.00753362 0.0125891 0.005 -0.00562351 0.0179231 0.005 -0.00564753 0.0143761 0.005 -0.00587842 0.0178637 0.005 -0.00748053 0.0170074 0.005 -0.00801942 0.0115803 0.005 -0.00817464 0.0110415 0.005 -0.00826856 0.0104888 0.005 -0.0083 0.00992893 0.005 -0.00826856 -0.0104888 0.005 -0.00829213 -0.0102093 0.005 -0.0079194 -0.0118424 0.005 -0.00767612 -0.0123475 0.005 -0.00727819 -0.0171734 0.005 -0.00588579 -0.0144142 0.005 -0.00684315 -0.0174641 0.005 -0.00575715 -0.0145321 0.005 -0.00561873 -0.0146383 0.005 -0.0066123 -0.0175875 0.005 -0.00547157 -0.0147321 0.005 -0.00637388 -0.0176955 0.005 -0.0061289 -0.0177877 0.005 -0.00562351 -0.0179231 0.005 -0.00510476 -0.0179914 0.005 -0.0105 0.002 0.005 -0.00434154 0.0148746 0.005 0.000346688 0.018 0.005 -0.00203376 -0.018 0.000180621 -0.00203376 -0.018 -0.000180621 -0.00207489 -0.018 -0.000263216 -0.0021305 -0.018 -0.000336848 -0.00245387 -0.018 -0.000497867 -0.00263683 -0.018 -0.000480913 -0.00296624 -0.018 -0.000180621 -0.00299149 -0.018 -9.18746e-05 -0.003 -0.018 0 -0.00296624 -0.018 0.000180621 -0.0028695 -0.018 0.000336848 -0.00280132 -0.018 0.000399009 -0.00272287 -0.018 0.000447582 -0.00254613 -0.018 0.000497867 -0.00245387 -0.018 0.000497867 -0.00484315 -0.018 0.005 -0.0021305 -0.018 0.000336848 0.000609339 -0.018 0.00520699 0.000346688 -0.018 -0.005 0.00145617 -0.018 -0.00574326 0.00175557 -0.018 0.00589221 0.00206225 -0.018 -0.00602556 0.00269412 -0.018 -0.0062441 0.00334511 -0.018 -0.00639658 0.00367562 -0.018 0.00644751 0.00378332 -0.018 0.000221066 0.004125 -0.018 0.000649519 0.00403238 -0.018 0.000586374 0.00400831 -0.018 0.00648138 0.0043423 -0.018 0.00649809 0.00504979 -0.018 0.000510129 0.00524162 -0.018 0.000111782 0.00501066 -0.018 0.00647991 0.00534325 -0.018 0.00644507 0.00567361 -0.018 0.00639317 0.00632414 -0.018 0.00623879 0.00600086 -0.018 0.00632435 -0.00476121 -0.018 0.000439291 -0.00462254 -0.018 0.000327906 -0.0045 -0.018 0 -0.00457023 -0.018 -0.000255541 -0.00462254 -0.018 -0.000327906 -0.00468688 -0.018 -0.000389814 -0.00484315 -0.018 -0.005 0.000346688 -0.018 0.005 0.000882288 -0.018 0.0054002 0.000882288 -0.018 -0.0054002 0.00237537 -0.018 -0.00614296 0.00334511 -0.018 0.00639658 0.00400831 -0.018 -0.00648138 0.00422599 -0.018 -0.000698155 0.004125 -0.018 -0.000649519 0.0043423 -0.018 -0.00649809 0.00496762 -0.018 -0.000586374 0.00504979 -0.018 -0.000510129 0.00501066 -0.018 -0.00647991 0.00521668 -0.018 -0.000221066 0.00600086 -0.018 -0.00632435 0.00664259 -0.018 -0.00613672 0.00632414 -0.018 -0.00623879 0.00695537 -0.018 -0.0060184 0.00756063 -0.018 -0.00573433 0.0081335 -0.018 -0.00538959 0.00979985 -0.018 -0.00376319 0.00998638 -0.018 -0.00348563 0.0106927 -0.018 -0.00197496 0.0109656 -0.018 -0.000667716 0.011 -0.018 0 0.0109914 -0.018 0.000334301 0.0108628 -0.018 0.00132837 0.0107861 -0.018 0.00165385 0.0103154 -0.018 0.0029036 0.00998638 -0.018 0.00348563 0.00959929 -0.018 0.00403078 0.00915825 -0.018 0.00453329 0.00866793 -0.018 0.00498783 0.00756063 -0.018 0.00573433 0.00433311 -0.018 0.000731196 0.00378332 -0.018 -0.000221066 -0.00490107 -0.0179996 0.000490115 -0.00495943 -0.0179983 0.000498351 -0.00501678 -0.0179962 0.000499718 -0.00513296 -0.0179895 0.000481997 -0.00518977 -0.017985 0.000462589 -0.00522287 -0.0165 0.000447582 -0.00536839 -0.015 0.000338064 -0.00530132 -0.0165 0.000399009 -0.00522139 -0.015 0.000448314 -0.00513683 -0.0165 0.000480913 -0.00504613 -0.0165 0.000497867 -0.00504449 -0.015 0.000498016 -0.00477713 -0.0165 0.000447582 -0.00477566 -0.015 0.000446844 -0.00469737 -0.015 0.000398013 -0.00457403 -0.015 0.000261813 -0.00450882 -0.015 -9.34949e-05 -0.00453376 -0.0165 -0.000180621 -0.00457489 -0.0165 -0.000263216 -0.00453436 -0.015 -0.000182157 -0.0047 -0.015 -0.0004 -0.00504778 -0.015 -0.000497712 -0.00513842 -0.015 -0.000480459 -0.00522434 -0.015 -0.000446844 -0.0053695 -0.0165 -0.000336848 -0.00546624 -0.0165 -0.000180621 -0.00546624 -0.0165 0.000180621 -0.00546564 -0.015 0.000182157 -0.00533893 -0.0179692 0.000367599 -0.00529391 -0.0179745 0.000404493 -0.00495387 -0.0165 0.000497867 -0.00507477 -0.0179933 0.000494377 -0.0053695 -0.0165 0.000336848 -0.00548634 -0.0179479 0.000116055 -0.00549666 -0.0179463 5.76878e-05 -0.0055 -0.0165 0 -0.00549149 -0.0165 9.18746e-05 -0.00541429 -0.017959 0.000279937 -0.00542424 -0.015 0.000264617 -0.00542511 -0.0165 0.000263216 -0.00544472 -0.0179545 0.000228531 -0.00549149 -0.0165 -9.18746e-05 -0.00548634 -0.0179479 -0.000116055 -0.00546894 -0.0179507 -0.000173488 -0.00544472 -0.0179545 -0.000228531 -0.00541429 -0.017959 -0.000279937 -0.00542511 -0.0165 -0.000263216 -0.00537061 -0.015 -0.000335628 -0.00546683 -0.015 -0.000179082 -0.00537865 -0.017964 -0.000326534 -0.00530132 -0.0165 -0.000399009 -0.00513683 -0.0165 -0.000480913 -0.00513296 -0.0179895 -0.000481997 -0.00507477 -0.0179933 -0.000494377 -0.00495387 -0.0165 -0.000497867 -0.00495943 -0.0179983 -0.000498351 -0.00477713 -0.0165 -0.000447582 -0.00501678 -0.0179962 -0.000499718 -0.00504613 -0.0165 -0.000497867 -0.00476121 -0.018 -0.000439291 -0.00469868 -0.0165 -0.000399009 -0.0046305 -0.0165 -0.000336848 -0.00453164 -0.018 -0.000175027 -0.00450851 -0.0165 -9.18746e-05 -0.00450797 -0.018 -8.89316e-05 -0.0045 -0.0165 0 -0.00453376 -0.0165 0.000180621 -0.00486317 -0.0165 0.000480913 -0.00450797 -0.018 8.89316e-05 -0.00450821 -0.015 9.02538e-05 -0.00450851 -0.0165 9.18746e-05 -0.00453164 -0.018 0.000175027 -0.00453317 -0.015 0.000179082 -0.00457023 -0.018 0.000255541 -0.00457489 -0.0165 0.000263216 -0.0046305 -0.0165 0.000336848 -0.00468688 -0.018 0.000389814 -0.00469868 -0.0165 0.000399009 -0.00495222 -0.015 0.000497712 -0.00486317 -0.0165 -0.000480913 -0.00522287 -0.0165 -0.000447582 -0.00549118 -0.015 9.34949e-05 -0.00585274 -0.0144465 -0.00373205 -0.00575715 -0.0145321 -0.005 -0.00540416 -0.0147693 -0.00373205 -0.00523694 -0.0148478 -0.00373205 -0.00488438 -0.0149569 -0.00373205 -0.00470206 -0.0149867 -0.00373205 -0.00464588 -0.0149924 -0.005 -0.00447157 -0.015 -0.005 -0.00447157 -0.015 -0.000848979 -0.00451777 -0.0149995 -0.00373205 -0.00588579 -0.0144142 -0.000464093 -0.00583485 -0.0144634 -0.000550482 -0.0057144 -0.014567 -0.000699741 -0.00564493 -0.0146196 -0.000764241 -0.00556343 -0.0146757 -0.00373205 -0.00529622 -0.0148221 -0.00095512 -0.00506319 -0.0149105 -0.00373205 -0.00509424 -0.0149006 -0.00099555 -0.00488673 -0.0149564 -0.000993564 -0.00548298 -0.0147254 -0.000875633 -0.00477961 -0.0149761 -0.000975412 -0.00481887 -0.0149696 -0.005 -0.00515561 -0.0148794 -0.005 -0.00547157 -0.0147321 -0.005 -0.00571368 -0.0145675 -0.00373205 0.00749141 -0.015 0.000226886 0.00742297 -0.015 0.000675468 0.0107861 -0.015 0.00165385 0.00736351 -0.015 0.000894594 0.00719583 -0.015 0.00131625 0.00708856 -0.015 0.00151636 0.00728766 -0.015 0.0011086 0.0101584 -0.015 0.00319885 0.00998638 -0.015 0.00348563 0.00668068 -0.015 0.00206025 0.00938524 -0.015 0.00428771 0.00651862 -0.015 0.00221928 0.00915825 -0.015 0.00453329 0.00866793 -0.015 0.00498783 0.00891894 -0.015 0.00476687 0.00840588 -0.015 0.00519558 0.0059671 -0.015 0.00261679 0.0081335 -0.015 0.00538959 0.005765 -0.015 0.00272025 0.00726165 -0.015 0.00588415 0.00695537 -0.015 0.0060184 0.00555564 -0.015 0.00280813 0.00664259 -0.015 0.00613672 0.00534024 -0.015 0.00287993 0.00632414 -0.015 0.00623879 0.00600086 -0.015 0.00632435 0.00534325 -0.015 0.00644507 0.00400831 -0.015 0.00648138 0.00376934 -0.015 0.00290966 0.000882288 -0.015 0.0054002 0.00333885 -0.015 0.00276618 0.000609339 -0.015 0.00520699 0.00293493 -0.015 0.0025594 0.000346688 -0.015 0.005 0.00209992 -0.015 0.0017999 0.00197067 -0.015 0.00161323 -0.00245387 -0.015 0.000497867 -0.00263683 -0.015 0.000480913 -0.00447157 -0.015 0.000848979 -0.00296624 -0.015 0.000180621 -0.00458513 -0.015 0.000909879 -0.00462939 -0.015 0.000335628 -0.00470557 -0.015 0.000955672 -0.00486158 -0.015 0.000480459 -0.00513525 -0.015 0.000481361 -0.00533899 -0.015 0.000940789 -0.0053 -0.015 0.0004 -0.00566886 -0.015 0.000743389 -0.0057589 -0.015 0.000651213 -0.00594849 -0.015 0.000316801 -0.00599792 -0.015 6.44264e-05 -0.0055 -0.015 1.6489e-06 -0.00549179 -0.015 -9.02538e-05 -0.00599792 -0.015 -6.44264e-05 0.00160469 -0.015 0.000785594 0.00155357 -0.015 0.000564374 -0.00207489 -0.015 0.000263216 -0.00203376 -0.015 0.000180621 0.00151932 -0.015 0.000339923 0.00150215 -0.015 0.000113524 0.00150215 -0.015 -0.000113524 -0.00200851 -0.015 -9.18746e-05 -0.00207489 -0.015 -0.000263216 -0.00254613 -0.015 -0.000497867 -0.00272287 -0.015 -0.000447582 0.00175629 -0.015 0.00121329 0.00151932 -0.015 -0.000339923 0.00167239 -0.015 -0.00100231 0.00160469 -0.015 -0.000785594 0.00175629 -0.015 -0.00121329 0.00224292 -0.015 -0.00197626 0.00145617 -0.015 -0.00574326 0.00313297 -0.015 -0.00267044 0.00444323 -0.015 -0.00299946 0.00467024 -0.015 -0.00299517 0.00567361 -0.015 -0.00639317 0.00512003 -0.015 -0.00293523 0.00632414 -0.015 -0.00623879 0.00555564 -0.015 -0.00280813 0.005765 -0.015 -0.00272025 0.0059671 -0.015 -0.00261679 0.00668068 -0.015 -0.00206025 0.00708856 -0.015 -0.00151636 0.00736351 -0.015 -0.000894594 0.00756063 -0.015 -0.00573433 0.00998638 -0.015 -0.00348563 0.0075 -0.015 0 0.0103154 -0.015 -0.0029036 0.0107861 -0.015 -0.00165385 0.0109227 -0.015 -0.000999364 0.0109656 -0.015 -0.000667716 0.00726165 -0.015 -0.00588415 -0.00545715 -0.015 0.000889389 -0.00542597 -0.015 -0.000261813 -0.0057589 -0.015 -0.000651213 -0.00583633 -0.015 -0.000548224 -0.00530263 -0.015 -0.000398013 -0.00556772 -0.015 -0.000823223 -0.00533899 -0.015 -0.000940789 -0.00521521 -0.015 -0.000976568 -0.00495551 -0.015 -0.000498016 -0.00470557 -0.015 -0.000955672 -0.00486476 -0.015 -0.000481361 -0.0048309 -0.015 -0.000985598 -0.00458513 -0.015 -0.000909879 -0.00477861 -0.015 -0.000448314 -0.00463161 -0.015 -0.000338064 -0.00457576 -0.015 -0.000264617 -0.00299149 -0.015 -9.18746e-05 -0.00292511 -0.015 -0.000263216 -0.00280132 -0.015 -0.000399009 -0.0045 -0.015 -1.6489e-06 -0.00521521 -0.015 0.000976568 -0.00447157 -0.015 0.005 -0.0045696 -0.0149976 0.000902639 -0.00451777 -0.0149995 0.001 -0.00464588 -0.0149924 0.005 -0.00470206 -0.0149867 0.001 -0.00481887 -0.0149696 0.005 -0.00499157 -0.0149312 0.000999964 -0.00519637 -0.014864 0.000980529 -0.00529622 -0.0148221 0.00095512 -0.00539218 -0.0147755 0.000919888 -0.00531681 -0.0148126 0.005 -0.00540416 -0.0147693 0.001 -0.00498921 -0.0149319 0.005 -0.00515561 -0.0148794 0.005 -0.00556343 -0.0146757 0.001 -0.00564493 -0.0146196 0.000764241 -0.00585274 -0.0144465 0.001 -0.00571433 -0.014567 0.001 -0.00591216 -0.0143878 -0.000409845 -0.00593521 -0.0143648 -0.000354106 -0.00598367 -0.0143163 -0.000179993 -0.00683553 -0.0134645 -0.005 -0.00595485 -0.0143451 0.000297079 -0.00593521 -0.0143648 0.000354106 -0.00467303 -0.0149898 0.000945035 -0.00477961 -0.0149761 0.000975412 -0.00488673 -0.0149564 0.000993564 -0.00495903 -0.015 0.00099916 -0.00509424 -0.0149006 0.00099555 -0.0048309 -0.015 0.000985598 -0.00508785 -0.015 0.000996134 -0.00556772 -0.015 0.000823223 -0.00556754 -0.014673 0.000823344 -0.00548298 -0.0147254 0.000875633 -0.00577716 -0.0145151 0.000629308 -0.00583485 -0.0144634 0.000550482 -0.0057144 -0.014567 0.000699741 -0.00583633 -0.015 0.000548224 -0.00588579 -0.0144142 0.000464093 -0.00589988 -0.015 0.000436133 -0.00591216 -0.0143878 0.000409845 -0.00597103 -0.014329 0.00023897 -0.00598135 -0.015 0.000192209 -0.00598367 -0.0143163 0.000179993 -0.00599273 -0.0143073 0.00012036 -0.00599818 -0.0143018 6.02899e-05 -0.006 -0.0143 0 -0.00599818 -0.0143018 -6.02899e-05 -0.00599273 -0.0143073 -0.00012036 -0.00598135 -0.015 -0.000192209 -0.00597103 -0.014329 -0.00023897 -0.00594849 -0.015 -0.000316801 -0.00595485 -0.0143451 -0.000297079 -0.00589988 -0.015 -0.000436133 -0.00556754 -0.014673 -0.000823344 -0.00577716 -0.0145151 -0.000629308 -0.00566886 -0.015 -0.000743389 -0.00545715 -0.015 -0.000889389 -0.00499157 -0.0149312 -0.000999964 -0.00508785 -0.015 -0.000996134 -0.00539218 -0.0147755 -0.000919888 -0.00519637 -0.014864 -0.000980529 -0.00495903 -0.015 -0.00099916 -0.00467303 -0.0149898 -0.000945035 -0.0045696 -0.0149976 -0.000902639 -0.00683553 -0.0134645 0.005 -0.00687425 -0.0134253 0 -0.00720916 -0.0130464 0.005 -0.0073778 -0.0128223 0.005 -0.00743169 -0.0127448 0 -0.00773317 -0.0122413 0 -0.00786124 -0.0119771 0 -0.00810459 -0.0113131 0.005 -0.00817464 -0.0110415 0.005 -0.00821267 -0.0108593 0 -0.00822936 -0.0107665 0.005 -0.0082997 -0.00998398 0 -0.0083 -0.00992893 0.005 -0.00707328 -0.0132095 0 -0.0073778 -0.0128223 -0.005 -0.00797358 -0.0117059 0 -0.00801942 -0.0115803 -0.005 -0.00817464 -0.0110415 -0.005 -0.00822936 -0.0107665 -0.005 -0.00828785 -0.0102773 0 -0.00767612 -0.0123475 -0.005 -0.00780484 -0.0120984 -0.005 -0.0081496 -0.0111461 0 -0.00826856 -0.0104888 -0.005 -0.00825881 -0.0105694 0 -0.00806981 -0.0114286 0 -0.00801942 -0.0115803 0.005 -0.00780484 -0.0120984 0.005 -0.00753362 -0.0125891 0.005 -0.00758982 -0.0124975 0 -0.00725931 -0.0129824 0 -0.00702821 -0.0132607 0.005 -0.0083 -0.00992893 -0.005 -0.00683553 0.0134645 -0.005 -0.00683553 0.0134645 0.005 -0.00687425 0.0134253 0 -0.00707328 0.0132095 0 -0.00806981 0.0114286 0 -0.00810459 0.0113131 -0.005 -0.0081496 0.0111461 0 -0.00821267 0.0108593 0 -0.00828785 0.0102773 0 -0.00829213 0.0102093 -0.005 -0.0083 0.00992893 -0.005 -0.00702821 0.0132607 0.005 -0.00720916 0.0130464 0.005 -0.00725931 0.0129824 0 -0.0073778 0.0128223 0.005 -0.00773317 0.0122413 0 -0.00786124 0.0119771 0 -0.00780484 0.0120984 0.005 -0.0079194 0.0118424 0.005 -0.00810459 0.0113131 0.005 -0.00822936 0.0107665 0.005 -0.00825881 0.0105694 0 -0.0082997 0.00998398 0 -0.00829213 0.0102093 0.005 -0.00767612 0.0123475 0.005 -0.00797358 0.0117059 0 -0.00826856 0.0104888 -0.005 -0.00822936 0.0107665 -0.005 -0.00817464 0.0110415 -0.005 -0.0079194 0.0118424 -0.005 -0.00758982 0.0124975 0 -0.00743169 0.0127448 0 -0.0073778 0.0128223 -0.005 -0.00579748 0.0142898 0 -0.00554129 0.0144332 0 -0.00500592 0.0146736 0 -0.00386943 0.0149588 0 -0.00322893 0.015 0.005 -0.00322893 0.015 -0.005 -0.00328398 0.0149997 0 -0.00628244 0.0139593 0 -0.00612229 0.0140778 -0.005 -0.00604481 0.0141317 0 -0.00564753 0.0143761 -0.005 -0.00514235 0.0146194 -0.005 -0.00488033 0.0147194 -0.005 -0.00472857 0.0147698 0 -0.00461311 0.0148046 -0.005 -0.00434154 0.0148746 -0.005 -0.00406646 0.0149294 -0.005 -0.00378875 0.0149686 -0.005 -0.00588909 0.0142336 -0.005 -0.00444606 0.0148496 0 -0.00415935 0.0149127 0 -0.00350928 0.0149921 0.005 -0.00378875 0.0149686 0.005 -0.00357731 0.0149879 0 -0.00406646 0.0149294 0.005 -0.00461311 0.0148046 0.005 -0.00488033 0.0147194 0.005 -0.00527713 0.0145612 0 -0.00539835 0.0145048 0.005 -0.00588909 0.0142336 0.005 -0.00650954 0.0137733 0 -0.00672533 0.0135742 0 -0.00676447 0.0135355 -0.005 0.000609339 0.015 0.00520699 0.00251824 0.015 -0.000833743 0.002696 0.015 -0.00116964 0.000346688 0.015 -0.005 0.000609339 0.015 -0.00520699 0.000882288 0.015 -0.0054002 0.00145617 0.015 -0.00574326 0.00175557 0.015 -0.00589221 0.0037103 0.015 -0.00199972 0.00237537 0.015 -0.00614296 0.00389013 0.015 -0.00206169 0.00407473 0.015 -0.00210752 0.00367562 0.015 -0.00644751 0.0043423 0.015 -0.00649809 0.00445244 0.015 -0.00214947 0.00467671 0.015 -0.0064976 0.00464259 0.015 -0.00214527 0.00501808 0.015 -0.00208665 0.00567361 0.015 -0.00639317 0.00600086 0.015 -0.00632435 0.0081335 0.015 -0.00538959 0.00600337 0.015 -0.001537 0.00891894 0.015 -0.00476687 0.00915825 0.015 -0.00453329 0.00979985 0.015 -0.00376319 0.00998638 0.015 -0.00348563 0.00644294 0.015 -0.000920587 0.0103154 0.015 -0.0029036 0.0106927 0.015 -0.00197496 0.00661641 0.015 -0.000378547 0.0108628 0.015 -0.00132837 0.0109227 0.015 -0.000999364 0.0109656 0.015 -0.000667716 0.0109227 0.015 0.000999364 0.00657467 0.015 0.000564115 0.0065167 0.015 0.000745267 0.00979985 0.015 0.00376319 0.0081335 0.015 0.00538959 0.00632414 0.015 0.00623879 0.00600086 0.015 0.00632435 0.00567361 0.015 0.00639317 0.00501066 0.015 0.00647991 0.00464259 0.015 0.00214527 0.00445244 0.015 0.00214947 0.00400831 0.015 0.00648138 0.00367562 0.015 0.00644751 0.00301765 0.015 0.00632872 0.00407473 0.015 0.00210752 0.00389013 0.015 0.00206169 0.0037103 0.015 0.00199972 0.00245231 0.015 0.000655333 0.00206225 0.015 0.00602556 0.00353666 0.015 -0.0019221 0.00206225 0.015 -0.00602556 0.00570927 0.015 -0.00177769 0.0065167 0.015 -0.000745267 0.0105829 0.015 -0.00229085 0.00657467 0.015 -0.000564115 0.0107861 0.015 0.00165385 0.00726165 0.015 0.00588415 0.00537738 0.015 0.00196283 0.00520047 0.015 0.00203269 0.00483164 0.015 0.00212427 0.0023521 0.015 9.51018e-05 0.000609339 -0.018 -0.00520699 0.000609339 -0.015 -0.00520699 0.000882288 -0.015 -0.0054002 0.00116481 -0.015 -0.00557912 0.00175557 -0.015 -0.00589221 0.00206225 -0.015 -0.00602556 0.00367562 -0.018 -0.00644751 0.00400831 -0.015 -0.00648138 0.00367562 -0.015 -0.00644751 0.0043423 -0.015 -0.00649809 0.00467671 -0.015 -0.0064976 0.00467671 -0.018 -0.0064976 0.00501066 -0.015 -0.00647991 0.00534325 -0.018 -0.00644507 0.00534325 -0.015 -0.00644507 0.00695537 -0.015 -0.0060184 0.00726165 -0.018 -0.00588415 0.0078515 -0.015 -0.00556933 0.0081335 -0.015 -0.00538959 0.00840588 -0.015 -0.00519559 0.00866793 -0.018 -0.00498783 0.00891894 -0.018 -0.00476687 0.00891894 -0.015 -0.00476687 0.00938524 -0.018 -0.00428771 0.00938524 -0.015 -0.00428771 0.0101584 -0.015 -0.00319885 0.0104571 -0.018 -0.00260066 0.0105829 -0.018 -0.00229085 0.0105829 -0.015 -0.00229085 0.0108628 -0.015 -0.00132837 0.011 -0.015 0 0.0109914 -0.015 0.000334301 0.0109656 -0.015 0.000667716 0.0109227 -0.015 0.000999364 0.0108628 -0.015 0.00132837 0.0106927 -0.015 0.00197496 0.0106927 -0.018 0.00197496 0.0105829 -0.015 0.00229085 0.0104571 -0.018 0.00260066 0.0104571 -0.015 0.00260066 0.0101584 -0.018 0.00319885 0.00959929 -0.015 0.00403078 0.00756063 -0.015 0.00573433 0.00695537 -0.018 0.0060184 0.00664259 -0.018 0.00613672 0.00567361 -0.015 0.00639317 0.00501066 -0.015 0.00647991 0.00467671 -0.015 0.0064976 0.00334511 -0.015 0.00639658 0.00301765 -0.015 0.00632872 0.00269412 -0.015 0.0062441 0.00269412 -0.018 0.0062441 0.00206225 -0.015 0.00602556 0.00175557 -0.015 0.00589221 0.00145617 -0.018 0.00574326 0.00145617 -0.015 0.00574326 0.00116481 -0.018 0.00557912 0.00116481 -0.018 -0.00557912 0.00175557 -0.018 -0.00589221 0.00237537 -0.015 -0.00614296 0.00269412 -0.015 -0.0062441 0.00301765 -0.018 -0.00632872 0.00301765 -0.015 -0.00632872 0.00334511 -0.015 -0.00639658 0.00567361 -0.018 -0.00639317 0.00600086 -0.015 -0.00632435 0.00664259 -0.015 -0.00613672 0.0078515 -0.018 -0.00556933 0.00840588 -0.018 -0.00519559 0.00866793 -0.015 -0.00498783 0.00915825 -0.015 -0.00453329 0.00915825 -0.018 -0.00453329 0.00959929 -0.015 -0.00403078 0.00959929 -0.018 -0.00403078 0.00979985 -0.015 -0.00376319 0.0101584 -0.018 -0.00319885 0.0103154 -0.018 -0.0029036 0.0104571 -0.015 -0.00260066 0.0106927 -0.015 -0.00197496 0.0107861 -0.018 -0.00165385 0.0108628 -0.018 -0.00132837 0.0109227 -0.018 -0.000999364 0.0109914 -0.015 -0.000334301 0.0109914 -0.018 -0.000334301 0.0109656 -0.018 0.000667716 0.0109227 -0.018 0.000999364 0.0105829 -0.018 0.00229085 0.0103154 -0.015 0.0029036 0.00979985 -0.015 0.00376319 0.00979985 -0.018 0.00376319 0.00938524 -0.018 0.00428771 0.00891894 -0.018 0.00476687 0.00840588 -0.018 0.00519558 0.0081335 -0.018 0.00538959 0.0078515 -0.015 0.00556933 0.0078515 -0.018 0.00556933 0.00726165 -0.018 0.00588415 0.00467671 -0.018 0.0064976 0.0043423 -0.015 0.00649809 0.00367562 -0.015 0.00644751 0.00301765 -0.018 0.00632872 0.00237537 -0.015 0.00614296 0.00237537 -0.018 0.00614296 0.00206225 -0.018 0.00602556 0.00116481 -0.015 0.00557912 0.000346688 0.015 0.005 0.000609339 0.018 0.00520699 0.000882288 0.015 0.0054002 0.00116481 0.015 0.00557912 0.00116481 0.018 0.00557912 0.00145617 0.018 0.00574326 0.00237537 0.015 0.00614296 0.00334511 0.015 0.00639658 0.0043423 0.018 0.00649809 0.0043423 0.015 0.00649809 0.00467671 0.015 0.0064976 0.00534325 0.015 0.00644507 0.00567361 0.018 0.00639317 0.00632414 0.018 0.00623879 0.00664259 0.015 0.00613672 0.00695537 0.015 0.0060184 0.00695537 0.018 0.0060184 0.00726165 0.018 0.00588415 0.00756063 0.015 0.00573433 0.00756063 0.018 0.00573433 0.0078515 0.015 0.00556933 0.00840588 0.018 0.00519558 0.00840588 0.015 0.00519558 0.00866793 0.015 0.00498783 0.00866793 0.018 0.00498783 0.00891894 0.015 0.00476687 0.00915825 0.018 0.00453329 0.00915825 0.015 0.00453329 0.00959929 0.015 0.00403078 0.00979985 0.018 0.00376319 0.00998638 0.018 0.00348563 0.00145617 0.015 0.00574326 0.00175557 0.015 0.00589221 0.00175557 0.018 0.00589221 0.00269412 0.015 0.0062441 0.00269412 0.018 0.0062441 0.00400831 0.018 0.00648138 0.0078515 0.018 0.00556933 0.00938524 0.015 0.00428771 0.00938524 0.018 0.00428771 0.00959929 0.018 0.00403078 0.00998638 0.015 0.00348563 0.0101584 0.015 0.00319885 0.0103154 0.015 0.0029036 0.0106927 0.015 0.00197496 0.0107861 0.018 0.00165385 0.0109914 0.018 0.000334301 0.0109914 0.015 0.000334301 0.011 0.015 0 0.0109914 0.015 -0.000334301 0.0107861 0.018 -0.00165385 0.0107861 0.015 -0.00165385 0.0104571 0.018 -0.00260066 0.0104571 0.015 -0.00260066 0.0103154 0.018 -0.0029036 0.0101584 0.015 -0.00319885 0.00979985 0.018 -0.00376319 0.00959929 0.015 -0.00403078 0.00959929 0.018 -0.00403078 0.00938524 0.015 -0.00428771 0.00840588 0.015 -0.00519559 0.0078515 0.018 -0.00556933 0.0078515 0.015 -0.00556933 0.00756063 0.018 -0.00573433 0.00756063 0.015 -0.00573433 0.00695537 0.015 -0.0060184 0.00695537 0.018 -0.0060184 0.00664259 0.015 -0.00613672 0.00400831 0.015 -0.00648138 0.00334511 0.015 -0.00639658 0.00334511 0.018 -0.00639658 0.00301765 0.015 -0.00632872 0.00301765 0.018 -0.00632872 0.00175557 0.018 -0.00589221 0.00116481 0.015 -0.00557912 0.0103154 0.018 0.0029036 0.0104571 0.018 0.00260066 0.0104571 0.015 0.00260066 0.0105829 0.015 0.00229085 0.0106927 0.018 0.00197496 0.0108628 0.015 0.00132837 0.0109227 0.018 0.000999364 0.0109656 0.015 0.000667716 0.0109227 0.018 -0.000999364 0.00998638 0.018 -0.00348563 0.00866793 0.015 -0.00498783 0.00866793 0.018 -0.00498783 0.00726165 0.015 -0.00588415 0.00632414 0.015 -0.00623879 0.00567361 0.018 -0.00639317 0.00534325 0.015 -0.00644507 0.00501066 0.015 -0.00647991 0.00501066 0.018 -0.00647991 0.00467671 0.018 -0.0064976 0.00400831 0.018 -0.00648138 0.00269412 0.015 -0.0062441 0.00269412 0.018 -0.0062441 0.00237537 0.018 -0.00614296 0.00145617 0.018 -0.00574326 -0.00484315 0.018 0.005 0.000609339 0.018 -0.00520699 0.000882288 0.018 -0.0054002 0.000882288 0.018 0.0054002 0.00116481 0.018 -0.00557912 0.00206225 0.018 0.00602556 0.0023521 0.018 -9.51018e-05 0.00206225 0.018 -0.00602556 0.00337055 0.018 -0.00182944 0.00353666 0.018 -0.0019221 0.0037103 0.018 -0.00199972 0.000346688 0.018 -0.005 0.0024024 0.018 0.000471793 0.00237537 0.018 0.00614296 0.002696 0.018 0.00116964 0.00306609 0.018 0.001602 0.00301765 0.018 0.00632872 0.00334511 0.018 0.00639658 0.00367562 0.018 0.00644751 0.0037103 0.018 0.00199972 0.00407473 0.018 0.00210752 0.00426265 0.018 0.00213686 0.00483164 0.018 0.00212427 0.00467671 0.018 0.0064976 0.00501808 0.018 0.00208665 0.00501066 0.018 0.00647991 0.00534325 0.018 0.00644507 0.00554742 0.018 0.00187761 0.00586165 0.018 0.00166386 0.00613333 0.018 0.00139812 0.00600086 0.018 0.00632435 0.0062505 0.018 0.0012483 0.00664159 0.018 0.000190017 0.00726165 0.018 -0.00588415 0.0081335 0.018 -0.00538959 0.00840588 0.018 -0.00519559 0.00915825 0.018 -0.00453329 0.00891894 0.018 -0.00476687 0.00938524 0.018 -0.00428771 0.0101584 0.018 -0.00319885 0.0105829 0.018 -0.00229085 0.0106927 0.018 -0.00197496 0.0108628 0.018 -0.00132837 0.0109656 0.018 -0.000667716 0.0109914 0.018 -0.000334301 0.011 0.018 0 0.00664259 0.018 0.00613672 0.0109656 0.018 0.000667716 0.0108628 0.018 0.00132837 0.0105829 0.018 0.00229085 0.0101584 0.018 0.00319885 0.00891894 0.018 0.00476687 0.0081335 0.018 0.00538959 0.00664259 0.018 -0.00613672 0.00632414 0.018 -0.00623879 0.00600086 0.018 -0.00632435 0.00534325 0.018 -0.00644507 0.0043423 0.018 -0.00649809 0.00426265 0.018 -0.00213686 0.00445244 0.018 -0.00214947 0.00367562 0.018 -0.00644751 0.00407473 0.018 -0.00210752 0.00657467 0.018 0.000564115 0.0065167 0.018 0.000745267 0.00644294 0.018 0.000920587 0.00537738 0.018 -0.00196283 0.00613333 0.018 -0.00139812 0.0062505 0.018 -0.0012483 0.00661641 0.018 -0.000378547 -0.011 -0.0126305 0.0134914 -0.0095 -0.0126305 0.0134914 -0.0095 -0.0127588 0.0134659 -0.011 -0.0127588 0.0134659 -0.011 -0.0132071 0.0132071 -0.0095 -0.013366 0.013 -0.0095 -0.0134239 0.0128827 -0.011 -0.0134659 0.0122412 -0.0095 -0.013366 0.012 -0.0095 -0.0132934 0.0118912 -0.011 -0.013 0.011634 -0.0095 -0.013 0.011634 -0.011 -0.0127588 0.0115341 -0.0095 -0.0126305 0.0115086 -0.011 -0.0125 0.0115 -0.011 -0.0123695 0.0115086 -0.011 -0.0121173 0.0115761 -0.0095 -0.0122412 0.0115341 -0.011 -0.012 0.011634 -0.0095 -0.012 0.011634 -0.0095 -0.0118912 0.0117066 -0.0095 -0.0117929 0.0117929 -0.0095 -0.0117066 0.0118912 -0.011 -0.011634 0.012 -0.0095 -0.0115341 0.0122412 -0.0095 -0.0115 0.0125 -0.011 -0.0115341 0.0127588 -0.011 -0.0115761 0.0128827 -0.0095 -0.0115341 0.0127588 -0.0095 -0.0117066 0.0131088 -0.0095 -0.0117929 0.0132071 -0.0095 -0.012 0.013366 -0.0095 -0.0122412 0.0134659 -0.011 -0.0128827 0.0134239 -0.0095 -0.013 0.013366 -0.011 -0.0132934 0.0131088 -0.011 -0.013366 0.013 -0.011 -0.0134239 0.0128827 -0.011 -0.0134914 0.0126305 -0.011 -0.0135 0.0125 -0.011 -0.0134239 0.0121173 -0.0095 -0.0132071 0.0117929 -0.011 -0.0122412 0.0115341 -0.011 -0.0117929 0.0117929 -0.0095 -0.011634 0.012 -0.0095 -0.0115761 0.0121173 -0.011 -0.0115341 0.0122412 -0.0095 -0.0115761 0.0128827 -0.011 -0.011634 0.013 -0.011 -0.0117929 0.0132071 -0.011 -0.0121173 0.0134239 -0.0095 -0.0121173 0.0134239 -0.0095 -0.0123695 0.0134914 -0.0095 -0.0126305 -0.0115086 -0.011 -0.0128827 -0.0115761 -0.0095 -0.0127588 -0.0115341 -0.011 -0.013 -0.011634 -0.011 -0.0131088 -0.0117066 -0.0095 -0.013 -0.011634 -0.0095 -0.0132071 -0.0117929 -0.011 -0.0134659 -0.0127588 -0.0095 -0.0132934 -0.0131088 -0.0095 -0.013 -0.013366 -0.011 -0.0125 -0.0135 -0.011 -0.0122412 -0.0134659 -0.0095 -0.0123695 -0.0134914 -0.011 -0.0118912 -0.0132934 -0.011 -0.0117929 -0.0132071 -0.0095 -0.0117929 -0.0132071 -0.0095 -0.0115761 -0.0128827 -0.0095 -0.0115086 -0.0126305 -0.0095 -0.0115 -0.0125 -0.0095 -0.0118912 -0.0117066 -0.011 -0.0121173 -0.0115761 -0.0095 -0.0121173 -0.0115761 -0.0095 -0.0125 -0.0115 -0.0095 -0.0128827 -0.0115761 -0.0095 -0.0131088 -0.0117066 -0.011 -0.013366 -0.012 -0.011 -0.0134239 -0.0121173 -0.0095 -0.0134239 -0.0121173 -0.011 -0.0134659 -0.0122412 -0.0095 -0.0134659 -0.0122412 -0.011 -0.0135 -0.0125 -0.011 -0.0134914 -0.0126305 -0.011 -0.0134239 -0.0128827 -0.0095 -0.0134239 -0.0128827 -0.0095 -0.0132071 -0.0132071 -0.011 -0.013 -0.013366 -0.011 -0.0128827 -0.0134239 -0.011 -0.0127588 -0.0134659 -0.0095 -0.0126305 -0.0134914 -0.0095 -0.0125 -0.0135 -0.011 -0.0123695 -0.0134914 -0.011 -0.0121173 -0.0134239 -0.0095 -0.0118912 -0.0132934 -0.011 -0.0117066 -0.0131088 -0.011 -0.011634 -0.013 -0.0095 -0.011634 -0.013 -0.011 -0.0115761 -0.0128827 -0.011 -0.0115086 -0.0126305 -0.011 -0.0115761 -0.0121173 -0.0095 -0.0115761 -0.0121173 -0.011 -0.011634 -0.012 -0.0095 -0.0117066 -0.0118912 -0.011 -0.0117929 -0.0117929 -0.0095 -0.0117929 -0.0117929 -0.011 -0.012 -0.011634 -0.011 0.0123695 0.0134914 -0.0095 0.0123695 0.0134914 -0.0095 0.0122412 0.0134659 -0.011 0.0117929 0.0132071 -0.0095 0.0118912 0.0132934 -0.011 0.0117066 0.0131088 -0.0095 0.0117066 0.0131088 -0.0095 0.011634 0.013 -0.0095 0.0115341 0.0127588 -0.0095 0.0115086 0.0126305 -0.011 0.0115086 0.0123695 -0.0095 0.0115341 0.0122412 -0.011 0.0121173 0.0115761 -0.0095 0.0121173 0.0115761 -0.011 0.0122412 0.0115341 -0.011 0.0123695 0.0115086 -0.0095 0.013 0.011634 -0.011 0.0132934 0.0118912 -0.0095 0.0132934 0.0118912 -0.011 0.0134239 0.0121173 -0.0095 0.0134239 0.0121173 -0.0095 0.0134914 0.0123695 -0.011 0.0134239 0.0128827 -0.0095 0.0134239 0.0128827 -0.0095 0.0132934 0.0131088 -0.0095 0.0132071 0.0132071 -0.0095 0.0131088 0.0132934 -0.0095 0.0126305 0.0134914 -0.0095 0.012 0.013366 -0.011 0.0115086 0.0126305 -0.011 0.0115 0.0125 -0.0095 0.0115 0.0125 -0.0095 0.0115086 0.0123695 -0.011 0.0115761 0.0121173 -0.011 0.011634 0.012 -0.011 0.0117066 0.0118912 -0.0095 0.0117066 0.0118912 -0.0095 0.0122412 0.0115341 -0.0095 0.0125 0.0115 -0.011 0.0126305 0.0115086 -0.0095 0.0126305 0.0115086 -0.0095 0.0127588 0.0115341 -0.011 0.0128827 0.0115761 -0.0095 0.0128827 0.0115761 -0.011 0.0131088 0.0117066 -0.011 0.013366 0.012 -0.0095 0.013366 0.012 -0.0095 0.0134659 0.0122412 -0.011 0.0134914 0.0123695 -0.0095 0.0135 0.0125 -0.011 0.0134914 0.0126305 -0.0095 0.0134914 0.0126305 -0.0095 0.0134659 0.0127588 -0.011 0.0132934 0.0131088 -0.011 0.0132071 0.0132071 -0.011 0.0131088 0.0132934 -0.011 0.0127588 0.0134659 -0.011 0.0126305 0.0134914 -0.011 0.0125 0.0135 -0.0095 0.0123695 -0.0115086 -0.011 0.0122412 -0.0115341 -0.011 0.0121173 -0.0115761 -0.0095 0.0117929 -0.0117929 -0.0095 0.011634 -0.012 -0.0095 0.0115761 -0.0121173 -0.0095 0.0115086 -0.0123695 -0.0095 0.0115 -0.0125 -0.0095 0.0115341 -0.0127588 -0.0095 0.0115761 -0.0128827 -0.011 0.0117066 -0.0131088 -0.0095 0.0117066 -0.0131088 -0.0095 0.012 -0.013366 -0.011 0.0121173 -0.0134239 -0.0095 0.0121173 -0.0134239 -0.011 0.0127588 -0.0134659 -0.0095 0.0127588 -0.0134659 -0.0095 0.0128827 -0.0134239 -0.011 0.013 -0.013366 -0.0095 0.013 -0.013366 -0.0095 0.0132071 -0.0132071 -0.0095 0.0132934 -0.0131088 -0.0095 0.0134239 -0.0128827 -0.011 0.0134659 -0.0127588 -0.0095 0.0132934 -0.0118912 -0.011 0.0131088 -0.0117066 -0.0095 0.0132071 -0.0117929 -0.0095 0.0125 -0.0115 -0.011 0.0125 -0.0115 -0.011 0.012 -0.011634 -0.0095 0.012 -0.011634 -0.011 0.0118912 -0.0117066 -0.0095 0.0118912 -0.0117066 -0.011 0.011634 -0.012 -0.011 0.0115761 -0.0121173 -0.011 0.0115341 -0.0122412 -0.0095 0.0115086 -0.0126305 -0.011 0.0117929 -0.0132071 -0.011 0.012 -0.013366 -0.0095 0.0122412 -0.0134659 -0.011 0.0123695 -0.0134914 -0.0095 0.0123695 -0.0134914 -0.011 0.0125 -0.0135 -0.011 0.0131088 -0.0132934 -0.011 0.0132934 -0.0131088 -0.0095 0.0135 -0.0125 -0.0095 0.0134914 -0.0123695 -0.011 0.0134659 -0.0122412 -0.011 0.0134239 -0.0121173 -0.011 0.013366 -0.012 -0.0095 0.013366 -0.012 -0.011 0.013 -0.011634 -0.011 0.0128827 -0.0115761 -0.0095 0.0128827 -0.0115761 -0.011 0.0126305 -0.0115086 -0.0105 -0.0045 -0.009 -0.0105 -0.0045 -0.012 -0.0105 0.0045 -0.012 -0.0105 0.0045 -0.009 -0.0095 -0.0125 0.0135 -0.0095 -0.0118912 0.0132934 -0.0095 -0.011634 0.013 -0.0095 -0.003 0.0155 -0.0095 -0.0115086 0.0126305 -0.0095 -0.0115086 0.0123695 -0.0095 -0.0121173 0.0115761 -0.0095 -0.0123695 0.0115086 -0.0095 -0.0125 0.0115 -0.0095 -0.0127588 0.0115341 -0.0095 -0.0128827 0.0115761 -0.0095 -0.0131088 0.0117066 -0.0095 -0.015 0.005 -0.0095 -0.0134239 0.0121173 -0.0095 -0.0134659 0.0122412 -0.0095 -0.0134914 0.0123695 -0.0095 -0.0135 0.0125 -0.0095 -0.0134914 0.0126305 -0.0095 -0.0134659 0.0127588 -0.0095 -0.0132934 0.0131088 -0.0095 -0.0131088 0.0132934 -0.0095 -0.0132071 0.0132071 -0.0095 -0.0128827 0.0134239 -0.0095 -0.0123695 -0.0115086 -0.0095 -0.0122412 -0.0115341 -0.0095 -0.012 -0.011634 -0.0095 -0.0055 -0.008 -0.0095 -0.011634 -0.012 -0.0095 -0.0115341 -0.0122412 -0.0095 -0.0115086 -0.0123695 -0.0095 -0.0115341 -0.0127588 -0.0095 -0.0117066 -0.0131088 -0.0095 -0.0055 -0.013 -0.0095 -0.012 -0.013366 -0.0095 -0.0121173 -0.0134239 -0.0095 -0.0122412 -0.0134659 -0.0095 -0.0128827 -0.0134239 -0.0095 -0.0127588 -0.0134659 -0.0095 -0.0131088 -0.0132934 -0.0095 -0.013366 -0.013 -0.0095 -0.0155 -0.0155 -0.0095 -0.0134659 -0.0127588 -0.0095 -0.0135 -0.0125 -0.0095 -0.0134914 -0.0126305 -0.0095 -0.0134914 -0.0123695 -0.0095 -0.013366 -0.012 -0.0095 -0.0132934 -0.0118912 -0.0095 -0.0155 0.0155 -0.0095 0.0125 0.0135 -0.0095 0.0127588 0.0134659 -0.0095 0.0128827 0.0134239 -0.0095 0.013 0.013366 -0.0095 0.0155 0.0155 -0.0095 0.013366 0.013 -0.0095 0.0132071 0.0117929 -0.0095 0.0131088 0.0117066 -0.0095 0.0123695 0.0115086 -0.0095 0.015 0.005 -0.0095 0.012 0.011634 -0.0095 0.0118912 0.0117066 -0.0095 0.0117929 0.0117929 -0.0095 0.003 0.005 -0.0095 0.011634 0.012 -0.0095 0.0115761 0.0121173 -0.0095 0.0115761 0.0128827 -0.0095 0.003 0.0155 -0.0095 0.0117929 0.0132071 -0.0095 0.0121173 0.0134239 -0.0095 0.0134659 -0.0122412 -0.0095 0.0134239 -0.0121173 -0.0095 0.0131088 -0.0117066 -0.0095 0.013 -0.011634 -0.0095 0.0126305 -0.0115086 -0.0095 0.0127588 -0.0115341 -0.0095 0.015 -0.005 -0.0095 0.0122412 -0.0115341 -0.0095 0.0121173 -0.0115761 -0.0095 0.0134914 -0.0126305 -0.0095 0.0134659 -0.0127588 -0.0095 0.013366 -0.013 -0.0095 0.0131088 -0.0132934 -0.0095 0.0126305 -0.0134914 -0.0095 0.0125 -0.0135 -0.0095 0.0055 -0.013 -0.0095 0.0118912 -0.0132934 -0.0095 0.0117929 -0.0132071 -0.0095 0.011634 -0.013 -0.0095 0.0115341 -0.0122412 -0.0095 0.0117066 -0.0118912 -0.0095 0.0055 -0.008 -0.0105 -0.002 0.0165 -0.0105 -0.002 0.005 -0.0095 -0.003 0.005 -0.011 -0.002 0.017 -0.0105 0.002 0.0165 -0.0095 0.0155 -0.0155 -0.011 -0.017 -0.017 -0.011 -0.0123695 0.0134914 -0.011 -0.0125 0.0135 -0.011 -0.0122412 0.0134659 -0.011 -0.013 0.013366 -0.011 -0.0131088 0.0132934 -0.011 -0.017 0.017 -0.011 -0.0134659 0.0127588 -0.011 -0.0134914 0.0123695 -0.011 -0.013366 0.012 -0.011 -0.0134914 -0.0123695 -0.011 -0.013366 -0.013 -0.011 -0.0132934 -0.0131088 -0.011 -0.0132071 -0.0132071 -0.011 -0.0131088 -0.0132934 -0.011 -0.0126305 -0.0134914 -0.011 -0.012 -0.013366 -0.011 -0.0115341 -0.0127588 -0.011 -0.0045 -0.012 -0.011 -0.0115 -0.0125 -0.011 -0.0115086 -0.0123695 -0.011 -0.0115341 -0.0122412 -0.011 -0.0117066 -0.0118912 -0.011 -0.0118912 -0.0117066 -0.011 -0.0122412 -0.0115341 -0.011 -0.0123695 -0.0115086 -0.011 -0.0125 -0.0115 -0.011 -0.0118912 0.0117066 -0.011 -0.002 0.005 -0.011 -0.0117066 0.0118912 -0.011 -0.0115761 0.0121173 -0.011 -0.0115086 0.0123695 -0.011 -0.0115 0.0125 -0.011 -0.0115086 0.0126305 -0.011 -0.0117066 0.0131088 -0.011 -0.0118912 0.0132934 -0.011 -0.012 0.013366 -0.011 -0.0132934 0.0118912 -0.011 -0.0132071 0.0117929 -0.011 -0.0131088 0.0117066 -0.011 -0.0126305 0.0115086 -0.011 -0.0126305 -0.0115086 -0.011 -0.0132934 -0.0118912 -0.011 -0.0132071 -0.0117929 -0.011 -0.0128827 0.0115761 -0.011 -0.0127588 -0.0115341 -0.011 -0.0045 -0.009 -0.011 0.0125 0.0115 -0.011 0.012 0.011634 -0.011 0.0118912 0.0117066 -0.011 0.0117929 0.0117929 -0.011 0.0115341 0.0122412 -0.011 0.002 0.005 -0.011 0.002 0.017 -0.011 0.0115341 0.0127588 -0.011 0.0115761 0.0128827 -0.011 0.011634 0.013 -0.011 0.0118912 0.0132934 -0.011 0.012 0.013366 -0.011 0.0121173 0.0134239 -0.011 0.0122412 0.0134659 -0.011 0.017 -0.017 -0.011 0.0118912 -0.0132934 -0.011 0.0115761 -0.0128827 -0.011 0.011634 -0.013 -0.011 0.0115341 -0.0127588 -0.011 0.0115086 -0.0126305 -0.011 0.0045 -0.012 -0.011 0.0115086 -0.0123695 -0.011 0.0115 -0.0125 -0.011 0.0117066 -0.0118912 -0.011 0.0117929 -0.0117929 -0.011 0.0045 -0.009 -0.011 0.0123695 -0.0115086 -0.011 0.0127588 -0.0115341 -0.011 0.0127588 0.0115341 -0.011 0.013 0.011634 -0.011 0.0134914 -0.0123695 -0.011 0.0134914 -0.0126305 -0.011 0.0135 -0.0125 -0.011 0.0134239 -0.0128827 -0.011 0.013366 -0.013 -0.011 0.0132071 -0.0132071 -0.011 0.0128827 -0.0134239 -0.011 0.0126305 -0.0134914 -0.011 0.0122412 -0.0134659 -0.011 0.0132071 -0.0117929 -0.011 0.0132071 0.0117929 -0.011 0.0132934 -0.0118912 -0.011 0.0134659 0.0122412 -0.011 0.0135 0.0125 -0.011 0.0134659 0.0127588 -0.011 0.017 0.017 -0.011 0.013366 0.013 -0.011 0.013 0.013366 -0.011 0.0128827 0.0134239 0.00665 0.015 0 0.00664159 0.015 0.000190017 0.00661641 0.018 0.000378547 0.00661641 0.015 0.000378547 0.00635398 0.018 0.0010887 0.00635398 0.015 0.0010887 0.00613333 0.015 0.00139812 0.00600337 0.015 0.001537 0.00570927 0.018 0.00177769 0.00586165 0.015 0.00166386 0.00570927 0.015 0.00177769 0.00554742 0.015 0.00187761 0.00537738 0.018 0.00196283 0.00501808 0.015 0.00208665 0.00464259 0.018 0.00214527 0.00426265 0.015 0.00213686 0.00353666 0.015 0.0019221 0.00321328 0.015 0.00172246 0.00306609 0.015 0.001602 0.00293011 0.018 0.001469 0.00293011 0.015 0.001469 0.00280643 0.018 0.0013245 0.00280643 0.015 0.0013245 0.00259968 0.018 0.00100563 0.002696 0.015 0.00116964 0.00259968 0.015 0.00100563 0.00251824 0.015 0.000833743 0.0023521 0.015 -9.51018e-05 0.00236891 0.015 -0.000284561 0.00245231 0.018 -0.000655333 0.00251824 0.018 -0.000833743 0.00245231 0.015 -0.000655333 0.00259968 0.018 -0.00100563 0.002696 0.018 -0.00116964 0.00259968 0.015 -0.00100563 0.00280643 0.015 -0.0013245 0.00293011 0.015 -0.001469 0.00306609 0.015 -0.001602 0.00321328 0.015 -0.00172246 0.00337055 0.015 -0.00182944 0.00389013 0.018 -0.00206169 0.00426265 0.015 -0.00213686 0.00501808 0.018 -0.00208665 0.00483164 0.015 -0.00212427 0.00520047 0.015 -0.00203269 0.00537738 0.015 -0.00196283 0.00554742 0.015 -0.00187761 0.00586165 0.015 -0.00166386 0.00613333 0.015 -0.00139812 0.0062505 0.015 -0.0012483 0.00635398 0.015 -0.0010887 0.00664159 0.015 -0.000190017 0.00644294 0.015 0.000920587 0.0062505 0.015 0.0012483 0.00600337 0.018 0.001537 0.00520047 0.018 0.00203269 0.00445244 0.018 0.00214947 0.00389013 0.018 0.00206169 0.00353666 0.018 0.0019221 0.00337055 0.018 0.00182944 0.00337055 0.015 0.00182944 0.00321328 0.018 0.00172246 0.00251824 0.018 0.000833743 0.00245231 0.018 0.000655333 0.0024024 0.015 0.000471793 0.00236891 0.018 0.000284561 0.00236891 0.015 0.000284561 0.0023521 0.018 9.51018e-05 0.00236891 0.018 -0.000284561 0.0024024 0.018 -0.000471793 0.0024024 0.015 -0.000471793 0.00280643 0.018 -0.0013245 0.00293011 0.018 -0.001469 0.00306609 0.018 -0.001602 0.00321328 0.018 -0.00172246 0.00464259 0.018 -0.00214527 0.00483164 0.018 -0.00212427 0.00520047 0.018 -0.00203269 0.00554742 0.018 -0.00187761 0.00570927 0.018 -0.00177769 0.00586165 0.018 -0.00166386 0.00600337 0.018 -0.001537 0.00635398 0.018 -0.0010887 0.00644294 0.018 -0.000920587 0.0065167 0.018 -0.000745267 0.00657467 0.018 -0.000564115 0.00664159 0.018 -0.000190017 0.00665 0.018 0 0.00525 -0.017 0 0.00524162 -0.017 -0.000111782 0.00521668 -0.017 -0.000221066 0.00517573 -0.018 -0.000325413 0.004875 -0.018 -0.000649519 0.00477401 -0.018 -0.000698155 0.00477401 -0.017 -0.000698155 0.00455605 -0.018 -0.000747903 0.00444395 -0.018 -0.000747903 0.00433311 -0.018 -0.000731196 0.00433311 -0.017 -0.000731196 0.00395021 -0.018 -0.000510129 0.00388032 -0.017 -0.00042249 0.00382427 -0.018 -0.000325413 0.00375838 -0.018 -0.000111782 0.00375 -0.018 0 0.00382427 -0.018 0.000325413 0.00395021 -0.018 0.000510129 0.00395021 -0.017 0.000510129 0.00444395 -0.017 0.000747903 0.00466689 -0.018 0.000731196 0.00477401 -0.018 0.000698155 0.00466689 -0.017 0.000731196 0.00496762 -0.017 0.000586374 0.00511968 -0.018 0.00042249 0.00504979 -0.017 0.000510129 0.00517573 -0.018 0.000325413 0.00511968 -0.017 0.00042249 0.00525 -0.018 0 0.00524162 -0.018 -0.000111782 0.00511968 -0.018 -0.00042249 0.00466689 -0.018 -0.000731196 0.00422599 -0.017 -0.000698155 0.00403238 -0.018 -0.000586374 0.00388032 -0.018 -0.00042249 0.00382427 -0.017 -0.000325413 0.00375838 -0.017 -0.000111782 0.00375838 -0.018 0.000111782 0.00388032 -0.018 0.00042249 0.00388032 -0.017 0.00042249 0.00422599 -0.018 0.000698155 0.00444395 -0.018 0.000747903 0.00455605 -0.018 0.000747903 0.00455605 -0.017 0.000747903 0.004875 -0.018 0.000649519 0.00496762 -0.018 0.000586374 0.00517573 -0.017 0.000325413 0.00521668 -0.018 0.000221066 -0.002 -0.015 0 -0.00200851 -0.0165 -9.18746e-05 -0.00203376 -0.0165 -0.000180621 -0.00203376 -0.015 -0.000180621 -0.00207489 -0.0165 -0.000263216 -0.00200851 -0.018 -9.18746e-05 -0.002 -0.018 0 -0.00200851 -0.018 9.18746e-05 -0.00207489 -0.018 0.000263216 -0.00219868 -0.018 0.000399009 -0.00254613 -0.0165 0.000497867 -0.00263683 -0.018 0.000480913 -0.00292511 -0.018 0.000263216 -0.00299149 -0.018 9.18746e-05 -0.0028695 -0.018 -0.000336848 -0.00272287 -0.0165 -0.000447582 -0.00280132 -0.018 -0.000399009 -0.00245387 -0.0165 -0.000497867 -0.00254613 -0.018 -0.000497867 -0.0021305 -0.0165 -0.000336848 -0.0021305 -0.015 -0.000336848 -0.00219868 -0.018 -0.000399009 -0.00219868 -0.0165 -0.000399009 -0.00227713 -0.018 -0.000447582 -0.00227713 -0.0165 -0.000447582 -0.00219868 -0.015 -0.000399009 -0.00227713 -0.015 -0.000447582 -0.00236317 -0.018 -0.000480913 -0.00236317 -0.015 -0.000480913 -0.00236317 -0.0165 -0.000480913 -0.00245387 -0.015 -0.000497867 -0.00254613 -0.0165 -0.000497867 -0.00263683 -0.0165 -0.000480913 -0.00272287 -0.018 -0.000447582 -0.00263683 -0.015 -0.000480913 -0.00280132 -0.0165 -0.000399009 -0.0028695 -0.0165 -0.000336848 -0.0028695 -0.015 -0.000336848 -0.00292511 -0.018 -0.000263216 -0.00292511 -0.0165 -0.000263216 -0.00296624 -0.0165 -0.000180621 -0.00296624 -0.015 -0.000180621 -0.00299149 -0.0165 -9.18746e-05 -0.003 -0.0165 0 -0.003 -0.015 0 -0.00299149 -0.0165 9.18746e-05 -0.00296624 -0.0165 0.000180621 -0.00299149 -0.015 9.18746e-05 -0.00292511 -0.0165 0.000263216 -0.00292511 -0.015 0.000263216 -0.0028695 -0.015 0.000336848 -0.0028695 -0.0165 0.000336848 -0.00280132 -0.0165 0.000399009 -0.00280132 -0.015 0.000399009 -0.00272287 -0.0165 0.000447582 -0.00272287 -0.015 0.000447582 -0.00263683 -0.0165 0.000480913 -0.00254613 -0.015 0.000497867 -0.00245387 -0.0165 0.000497867 -0.00236317 -0.018 0.000480913 -0.00236317 -0.0165 0.000480913 -0.00236317 -0.015 0.000480913 -0.00227713 -0.018 0.000447582 -0.00227713 -0.0165 0.000447582 -0.00227713 -0.015 0.000447582 -0.00219868 -0.015 0.000399009 -0.00219868 -0.0165 0.000399009 -0.0021305 -0.015 0.000336848 -0.0021305 -0.0165 0.000336848 -0.00207489 -0.0165 0.000263216 -0.00203376 -0.0165 0.000180621 -0.00200851 -0.015 9.18746e-05 -0.00200851 -0.0165 9.18746e-05 -0.002 -0.0165 0 -0.00676447 0.0135355 0.005 + + + + + + + + + + -0.998832 0 -0.0483122 -0.989506 0 -0.144492 -0.989506 0 -0.144492 -0.970941 0 -0.239317 -0.970941 0 -0.239317 -0.943313 0 -0.331905 -0.906874 0 -0.421402 -0.861972 0 -0.506957 -0.809015 0 -0.587788 -0.748512 0 -0.663122 -0.681016 0 -0.732269 -0.607162 0 -0.794578 -0.527642 0 -0.849467 -0.44319 0 -0.896428 -0.354605 0 -0.935016 -0.262708 0 -0.964875 -0.168357 0 -0.985726 -0.0724332 0 -0.997373 0.0241614 0 -0.999708 0.120539 0 -0.992709 0.215783 0 -0.976441 0.309018 0 -0.951056 0.399365 0 -0.916792 0.485982 0 -0.873969 0.568065 0 -0.822983 0.644843 0 -0.764315 0.715598 0 -0.698513 0.779676 0 -0.626183 0.83647 0 -0.548013 0.885455 0 -0.464725 0.926176 0 -0.377092 0.958246 0 -0.285946 0.981369 0 -0.19213 0.995332 0 -0.0965147 1 0 0 0.995332 0 0.0965147 0.981369 0 0.19213 0.958246 0 0.285946 0.926176 0 0.377092 0.885455 0 0.464725 0.83647 0 0.548013 0.779676 0 0.626183 0.715597 0 0.698513 0.644845 0 0.764314 0.568065 0 0.822984 0.485983 0 0.873968 0.399363 0 0.916793 0.309019 0 0.951056 0.215783 0 0.976441 0.120536 0 0.992709 0.0241641 0 0.999708 -0.0724359 0 0.997373 -0.168357 0 0.985726 -0.262707 0 0.964876 -0.354606 0 0.935016 -0.443189 0 0.896428 -0.527642 0 0.849467 -0.607162 0 0.794578 -0.681016 0 0.732269 -0.748512 0 0.663122 -0.809015 0 0.587788 -0.861972 0 0.506957 -0.906874 0 0.421402 -0.943313 0 0.331905 -0.970941 0 0.239317 -0.989506 0 0.144492 -0.998832 0 0.0483122 -0.998832 0 -0.0483122 -0.943313 0 -0.331905 -0.906874 0 -0.421402 -0.861972 0 -0.506957 -0.809015 0 -0.587788 -0.748512 0 -0.663122 -0.681016 0 -0.732269 -0.607162 0 -0.794578 -0.527642 0 -0.849467 -0.44319 0 -0.896428 -0.354605 0 -0.935016 -0.262708 0 -0.964875 -0.168357 0 -0.985726 -0.0724332 0 -0.997373 0.0241614 0 -0.999708 0.120539 0 -0.992709 0.215783 0 -0.976441 0.309018 0 -0.951056 0.399365 0 -0.916792 0.485982 0 -0.873969 0.568065 0 -0.822983 0.644843 0 -0.764315 0.715598 0 -0.698513 0.779676 0 -0.626183 0.83647 0 -0.548013 0.885455 0 -0.464725 0.926176 0 -0.377092 0.958246 0 -0.285946 0.981369 0 -0.19213 0.995332 0 -0.0965147 1 0 0 0.995332 0 0.0965147 0.981369 0 0.19213 0.958246 0 0.285946 0.926176 0 0.377092 0.885455 0 0.464725 0.83647 0 0.548013 0.779676 0 0.626183 0.715597 0 0.698513 0.644845 0 0.764314 0.568065 0 0.822984 0.485983 0 0.873968 0.399363 0 0.916793 0.309019 0 0.951056 0.215783 0 0.976441 0.120536 0 0.992709 0.0241641 0 0.999708 -0.0724359 0 0.997373 -0.168357 0 0.985726 -0.262707 0 0.964876 -0.354606 0 0.935016 -0.443189 0 0.896428 -0.527642 0 0.849467 -0.607162 0 0.794578 -0.681016 0 0.732269 -0.748512 0 0.663122 -0.809015 0 0.587788 -0.861972 0 0.506957 -0.906874 0 0.421402 -0.943313 0 0.331905 -0.970941 0 0.239317 -0.989506 0 0.144492 -0.998832 0 0.0483122 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0.999284 0 0.0378437 0.99356 0 0.113308 0.99356 0 0.113308 0.982145 0 0.188123 0.982145 0 0.188123 0.965105 0 0.261865 0.942537 0 0.334101 0.914569 0 0.404429 0.881362 0 0.472442 0.84311 0 0.537741 0.800022 0 0.59997 0.752361 0 0.658751 0.700384 0 0.713766 0.644397 0 0.764691 0.584718 0 0.811237 0.521692 0 0.853134 0.455675 0 0.890146 0.38705 0 0.922059 0.316208 0 0.94869 0.243554 0 0.969887 0.169505 0 0.985529 0.0944862 0 0.995526 0.0189219 0 0.999821 -0.056744 0 0.998389 -0.132092 0 0.991238 -0.206675 0 0.97841 -0.280083 0 0.959976 -0.35188 0 0.936045 -0.421664 0 0.906752 -0.489036 0 0.872264 -0.5536 0 0.832783 -0.615001 0 0.788527 -0.672872 0 0.739759 -0.726893 0 0.68675 -0.77675 0 0.62981 -0.822155 0 0.569264 -0.862854 0 0.505453 -0.898609 0 0.43875 -0.929219 0 0.369531 -0.954503 0 0.2982 -0.974323 0 0.225153 -0.988561 0 0.150825 -0.997136 0 0.0756306 -1 0 0 -0.997136 0 -0.0756306 -0.988561 0 -0.150825 -0.974323 0 -0.225153 -0.954503 0 -0.2982 -0.929219 0 -0.36953 -0.898609 0 -0.438751 -0.862854 0 -0.505453 -0.822155 0 -0.569263 -0.776749 0 -0.62981 -0.726893 0 -0.68675 -0.672872 0 -0.739758 -0.614999 0 -0.788528 -0.553601 0 -0.832782 -0.489035 0 -0.872264 -0.421664 0 -0.906752 -0.351881 0 -0.936045 -0.280082 0 -0.959976 -0.206676 0 -0.978409 -0.132089 0 -0.991238 -0.056746 0 -0.998389 0.0189239 0 -0.999821 0.0944852 0 -0.995526 0.169505 0 -0.985529 0.243553 0 -0.969888 0.316208 0 -0.94869 0.387051 0 -0.922058 0.455675 0 -0.890146 0.521693 0 -0.853133 0.584718 0 -0.811237 0.644398 0 -0.76469 0.700384 0 -0.713766 0.75236 0 -0.658752 0.800022 0 -0.599971 0.843111 0 -0.53774 0.881362 0 -0.472441 0.914569 0 -0.40443 0.942537 0 -0.334101 0.965105 0 -0.261865 0.982145 0 -0.188123 0.99356 0 -0.113308 0.999284 0 -0.0378437 0.999284 0 0.0378437 0.965105 0 0.261865 0.942537 0 0.334101 0.914569 0 0.404429 0.881362 0 0.472442 0.84311 0 0.537741 0.800022 0 0.59997 0.752361 0 0.658751 0.700384 0 0.713766 0.644397 0 0.764691 0.584718 0 0.811237 0.521692 0 0.853134 0.455675 0 0.890146 0.38705 0 0.922059 0.316208 0 0.94869 0.243554 0 0.969887 0.169505 0 0.985529 0.0944862 0 0.995526 0.0189219 0 0.999821 -0.056744 0 0.998389 -0.132092 0 0.991238 -0.206675 0 0.97841 -0.280083 0 0.959976 -0.35188 0 0.936045 -0.421664 0 0.906752 -0.489036 0 0.872264 -0.5536 0 0.832783 -0.615001 0 0.788527 -0.672872 0 0.739759 -0.726893 0 0.68675 -0.77675 0 0.62981 -0.822155 0 0.569264 -0.862854 0 0.505453 -0.898609 0 0.43875 -0.929219 0 0.369531 -0.954503 0 0.2982 -0.974323 0 0.225153 -0.988561 0 0.150825 -0.997136 0 0.0756306 -1 0 0 -0.997136 0 -0.0756306 -0.988561 0 -0.150825 -0.974323 0 -0.225153 -0.954503 0 -0.2982 -0.929219 0 -0.36953 -0.898609 0 -0.438751 -0.862854 0 -0.505453 -0.822155 0 -0.569263 -0.776749 0 -0.62981 -0.726893 0 -0.68675 -0.672872 0 -0.739758 -0.614999 0 -0.788528 -0.553601 0 -0.832782 -0.489035 0 -0.872264 -0.421664 0 -0.906752 -0.351881 0 -0.936045 -0.280082 0 -0.959976 -0.206676 0 -0.978409 -0.132089 0 -0.991238 -0.056746 0 -0.998389 0.0189239 0 -0.999821 0.0944852 0 -0.995526 0.169505 0 -0.985529 0.243553 0 -0.969888 0.316208 0 -0.94869 0.387051 0 -0.922058 0.455675 0 -0.890146 0.521693 0 -0.853133 0.584718 0 -0.811237 0.644398 0 -0.76469 0.700384 0 -0.713766 0.75236 0 -0.658752 0.800022 0 -0.599971 0.843111 0 -0.53774 0.881362 0 -0.472441 0.914569 0 -0.40443 0.942537 0 -0.334101 0.965105 0 -0.261865 0.982145 0 -0.188123 0.99356 0 -0.113308 0.999284 0 -0.0378437 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -0.683594 0.729862 0 -0.634391 0.773012 0 -0.683594 0.729862 0 -0.634391 0.773012 0 -0.582477 0.812847 0 -0.582477 0.812847 0 -0.528068 0.849202 0 -0.471395 0.881922 0 -0.528068 0.849202 0 -0.528068 0.849202 0 -0.582477 0.812847 0 -0.582477 0.812847 0 -0.634391 0.773012 0 -0.634391 0.773012 0 -0.683594 0.729862 0 -0.528068 0.849202 0 -0.471395 0.881922 0 -0.412715 0.91086 0 -0.471395 0.881922 0 -0.471395 0.881922 0 -0.412715 0.91086 0 -0.352244 0.935908 0 -0.412715 0.91086 0 -0.412715 0.91086 0 -0.352244 0.935908 0 -0.290292 0.956938 0 -0.352244 0.935908 0 -0.352244 0.935908 0 -0.290292 0.956938 0 -0.227074 0.973878 0 -0.290292 0.956938 0 -0.290292 0.956938 0 -0.227074 0.973878 0 -0.162887 0.986645 0 -0.227074 0.973878 0 -0.227074 0.973878 0 -0.162887 0.986645 0 -0.0980235 0.995184 0 -0.162887 0.986645 0 -0.162887 0.986645 0 -0.0980235 0.995184 0 -0.0327196 0.999465 0 -0.0980235 0.995184 0 -0.0980235 0.995184 0 -0.0327196 0.999465 0 -0.0327196 0.999465 0 -0.707107 0.707107 0 -0.707107 0.707107 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -0.0327196 -0.999464 0.000470076 -0.0362764 -0.999342 0.000355063 -0.050655 -0.998716 7.32984e-05 -0.0651592 -0.997875 -2.35692e-05 -0.0795809 -0.996828 6.6908e-05 -0.0935748 -0.995612 0.000330822 -0.0980165 -0.995185 0.000467256 -0.106598 -0.994302 0.00023619 -0.118338 -0.992973 5.1418e-05 -0.128888 -0.991659 -9.49023e-06 -0.138381 -0.990379 1.82147e-05 -0.146776 -0.98917 0.000107175 -0.153942 -0.98808 0.000229086 -0.162893 -0.986644 0.00042587 -0.164304 -0.98641 0.000395846 -0.160231 -0.987079 0.000513614 -0.22708 -0.973876 0 -0.290285 -0.95694 0 -0.290285 -0.95694 0 -0.352244 -0.935908 0 -0.412715 -0.91086 0 -0.352244 -0.935908 0 -0.352244 -0.935908 0 -0.290285 -0.95694 0 -0.290285 -0.95694 0 -0.22708 -0.973876 0 -0.168327 -0.985731 -0.00028942 -0.179677 -0.983726 0 -0.179677 -0.983726 0 -0.168327 -0.985731 0.00028942 -0.352244 -0.935908 0 -0.412715 -0.91086 0 -0.47139 -0.881925 0 -0.412715 -0.91086 0 -0.412715 -0.91086 0 -0.47139 -0.881925 0 -0.528073 -0.849199 0 -0.47139 -0.881925 0 -0.47139 -0.881925 0 -0.528073 -0.849199 0 -0.582473 -0.81285 0 -0.528073 -0.849199 0 -0.528073 -0.849199 0 -0.582473 -0.81285 0 -0.634395 -0.773009 0 -0.582473 -0.81285 0 -0.582473 -0.81285 0 -0.634395 -0.773009 0 -0.683594 -0.729862 0 -0.634395 -0.773009 0 -0.634395 -0.773009 0 -0.683594 -0.729862 0 -0.683594 -0.729862 0 -0.22708 -0.973876 0 -0.22708 -0.973876 0 -0.162893 -0.986644 -0.000425871 -0.153942 -0.98808 -0.000229087 -0.146776 -0.98917 -0.000107175 -0.138381 -0.990379 -1.82147e-05 -0.128888 -0.991659 9.49028e-06 -0.118338 -0.992973 -5.14181e-05 -0.106598 -0.994302 -0.00023619 -0.0980165 -0.995185 -0.000467256 -0.0935748 -0.995612 -0.000330822 -0.0795809 -0.996828 -6.69081e-05 -0.0651592 -0.997875 2.35692e-05 -0.0506551 -0.998716 -7.32984e-05 -0.0362764 -0.999342 -0.000355063 -0.0327196 -0.999464 -0.000470076 -0.00723547 -0.999974 0 -0.021736 -0.999764 -0.000186251 -0.160231 -0.987079 -0.000513614 -0.164304 -0.98641 -0.000395847 -0.021736 -0.999764 0.000186251 -0.00723547 -0.999974 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -0.25625 0.000544515 -0.96661 -0.183751 -0.0013881 -0.982972 -0.139744 0.000198665 -0.990188 -0.0238385 -0.000234483 -0.999716 0.0917362 0.000567817 -0.995783 0.208095 -0.000171356 -0.978109 0.323317 0.000151185 -0.946291 0.361238 -0.00137112 -0.932473 0.434419 0.000581245 -0.900711 0.526432 -0.00109899 -0.850217 0.671257 -9.96553e-05 -0.741225 0.526432 9.95675e-05 -0.850217 0.523626 -9.96296e-05 -0.851948 0.361239 9.95943e-05 -0.932473 0.358163 -9.95346e-05 -0.933659 0.180506 -9.97222e-05 -0.983574 -0.00329683 -9.96899e-05 -0.999995 -0.186987 -9.93821e-05 -0.982362 -0.364318 -9.9908e-05 -0.931275 -0.52923 -9.95481e-05 -0.848479 -0.67613 -9.95838e-05 -0.736783 -0.8 -9.96429e-05 -0.599999 -0.896627 -9.94707e-05 -0.442787 -0.962723 -9.94966e-05 -0.27049 -0.996033 -9.96683e-05 -0.08898 -0.995425 -9.98119e-05 0.0955515 -0.960919 -9.94888e-05 0.276829 -0.893688 -9.96102e-05 0.44869 -0.796024 -9.97734e-05 0.605265 -0.67126 -9.94507e-05 0.741222 -0.523624 -9.97908e-05 0.85195 -0.358163 -9.95346e-05 0.933659 -0.180507 -9.96672e-05 0.983574 0.00329683 -9.96909e-05 0.999995 0.186987 -9.94401e-05 0.982362 0.364318 -9.9908e-05 0.931275 0.529227 -9.9551e-05 0.84848 0.676132 -9.979e-05 0.73678 0.799998 -9.9399e-05 0.600003 0.896629 -9.94657e-05 0.442783 0.962723 -9.94966e-05 0.27049 0.996033 -9.99775e-05 0.08898 0.995425 -9.95083e-05 -0.0955465 0.960918 -9.94835e-05 -0.276833 0.893688 -9.96102e-05 -0.44869 0.796026 -9.97778e-05 -0.605262 0.673696 9.94464e-05 -0.739008 0.673696 -0.00144642 -0.739008 0.633932 0.000306549 -0.773389 0.538659 -0.000428945 -0.842524 0 -0.00123719 -0.999999 0 9.94963e-05 -1 0.183751 -0.00122689 -0.982972 0.183751 9.96876e-05 -0.982973 0.718779 0.000408034 -0.695238 0.79427 -0.000681206 -0.607565 0.860564 0.000544208 -0.509342 0.915317 0.000275827 -0.402733 0.957003 -0.000355689 -0.290079 0.961825 -0.00115465 -0.273663 0.984739 0.000590446 -0.174039 0.995733 -0.00138175 -0.092271 0.995734 9.92669e-05 -0.092271 0.798019 -0.000988712 -0.602631 0.79802 9.96255e-05 -0.602632 0.895162 -0.00143893 -0.445739 0.895163 9.9727e-05 -0.445739 0.998329 0 -0.0577803 0.998329 0 0.0577803 0.984739 0.000590445 0.174039 0.957003 -0.000355689 0.290079 0.915317 0.000275826 0.402733 0.895162 -0.00143893 0.445739 0.860564 0.000544209 0.509342 0.798019 -0.000988712 0.602631 0.79802 9.93926e-05 0.602632 0.995733 -0.00138175 0.092271 0.995734 9.96805e-05 0.092271 0.961825 -0.00115465 0.273663 0.961826 9.97558e-05 0.273663 0.79427 -0.000681205 0.607565 0.718779 0.000408034 0.695238 0.633932 0.000306549 0.773389 0.538659 -0.000428944 0.842524 0.526432 -0.00109899 0.850217 0.434419 0.000581245 0.900711 0.361238 -0.00137112 0.932473 0.361239 9.97262e-05 0.932473 0.673696 -0.00144642 0.739008 0.673696 9.96759e-05 0.739008 0.323317 0.000151185 0.946291 0.208095 -0.000171356 0.978109 0.0917362 0.000567817 0.995783 -0.0238385 -0.000234483 0.999716 -0.139744 0.000198665 0.990188 -0.183751 -0.0013881 0.982972 -0.25625 0.000544515 0.96661 -0.361237 -0.000996949 0.932474 -0.361237 9.94842e-05 0.932474 0.183751 -0.00122689 0.982972 0.183751 9.95392e-05 0.982973 0 -0.00123719 0.999999 0 9.94963e-05 1 -0.397238 0.000854862 0.917715 -0.526434 -0.000889911 0.850215 -0.526435 9.9728e-05 0.850216 -0.554129 0.000742073 0.83243 -0.673696 -0.000771507 0.739008 -0.673696 9.94469e-05 0.739008 -0.69335 0.000617272 0.7206 -0.798017 -0.000641271 0.602635 -0.798017 9.96299e-05 0.602635 -0.810466 0.000480815 0.585785 -0.895163 -0.000498757 0.445739 -0.895163 9.9727e-05 0.445739 -0.901733 0.000332524 0.432294 -0.961825 -0.000344311 0.273663 -0.961826 9.94373e-05 0.273663 -0.964243 0.00017214 0.265019 -0.995734 -0.000178165 0.092266 -0.995734 9.95705e-05 0.092266 -0.996006 0 0.0892853 -0.996006 0 -0.0892853 -0.964243 0.000172141 -0.265019 -0.901733 0.000332524 -0.432294 -0.810466 0.000480815 -0.585785 -0.69335 0.000617272 -0.7206 -0.554129 0.000742073 -0.83243 -0.397238 0.000854862 -0.917715 -0.361237 -0.000996949 -0.932474 -0.361237 9.98363e-05 -0.932474 -0.995734 -0.000178165 -0.092266 -0.995734 9.9686e-05 -0.092266 -0.961825 -0.000344312 -0.273663 -0.961826 9.97558e-05 -0.273663 -0.895163 -0.000498757 -0.445739 -0.895163 9.9595e-05 -0.445739 -0.798017 -0.000641271 -0.602635 -0.798017 9.96359e-05 -0.602635 -0.673696 -0.000771508 -0.739008 -0.673696 9.96759e-05 -0.739008 -0.526434 -0.000889911 -0.850215 -0.526435 9.92926e-05 -0.850216 -0.183751 9.95962e-05 -0.982973 -0.183751 9.96876e-05 0.982973 0.526432 9.92897e-05 0.850217 0.895163 9.98729e-05 0.445739 0.961826 9.97359e-05 -0.273663 0.698906 0.715214 0 0.656518 0.754309 -0.00141851 0.584113 0.811671 -0.00155813 0.506653 0.862149 -0.00163681 0.424946 0.905217 -0.00167739 0.339606 0.940566 -0.00167764 0.251371 0.967889 -0.00163617 0.160998 0.986954 -0.00155339 0.0692439 0.997599 -0.00142909 0.0115511 0.999933 0 0.043615 0.999048 0.00116857 0.694405 0.719584 -8.8689e-05 0.667494 0.744615 0.000195495 0.637228 0.770675 5.80769e-06 0.60419 0.79684 -6.84057e-06 0.584113 0.811672 0.000718656 0.567509 0.823367 4.19222e-05 0.526992 0.84987 -3.97978e-05 0.483122 0.875553 -0.000108043 0.43651 0.899699 0.000144366 0.424947 0.905218 0.000698829 0.387457 0.921888 -0.000255682 0.339606 0.940568 0.000560342 0.337044 0.941489 0.000420774 0.285757 0.958302 -0.000222042 0.251371 0.967891 0.000752034 0.233877 0.972266 2.44881e-05 0.180866 0.983508 -2.29672e-05 0.127477 0.991842 -0.000205558 0.0747538 0.997202 0.000349675 0.069244 0.9976 0.000609149 0.024628 0.999697 -0.000209578 0.656519 0.754309 0.000677117 0.506653 0.86215 0.00074696 0.160998 0.986955 0.000756183 0.13053 0.991444 0.00131385 0.216441 0.976295 0.00141793 0.300704 0.953716 0.00148072 0.382686 0.923877 0.00150182 0.461744 0.887012 0.00148028 0.5373 0.84339 0.00141779 0.608759 0.793354 0.00131005 0.675592 0.737275 0.00116933 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0.0115511 0.999933 0 0.0271187 0.999621 0.00476308 0.0365984 0.99933 -0.000289352 0.043615 0.999048 -0.000457388 0.0749486 0.997187 0.000128327 0.107818 0.994171 -9.36162e-05 0.134745 0.99088 -0.000290984 0.180891 0.983503 0.000171082 0.216441 0.976296 -0.000454776 0.233879 0.972266 1.60013e-05 0.285767 0.958299 -1.68111e-05 0.336966 0.941517 0.000176029 0.387551 0.921848 -0.000183187 0.436433 0.899737 0.000117331 0.460972 0.887415 -0.000448019 0.481834 0.876255 0.00354509 0.486041 0.873936 6.18426e-05 0.461744 0.887013 -0.000469233 0.130837 0.991328 0.012227 0.300705 0.953717 -0.000444362 0.382686 0.923878 -0.000354932 0.525874 0.850562 -8.59096e-05 0.566219 0.824255 0.00012904 0.604189 0.796841 8.34033e-06 0.637227 0.770676 -4.40552e-06 0.636913 0.770936 8.53037e-05 0.6714 0.741095 -0.000188725 0.668346 0.74385 -0.00132099 0.693961 0.720012 0.000593974 0.698906 0.715214 0 0.675592 0.737275 -0.000370665 0.526713 0.850043 0.00066441 0.567851 0.823131 -0.000701809 0.603692 0.797217 -0.000221928 0.60876 0.793354 -0.000411886 0.537301 0.843391 -0.000449423 0.13053 0.991444 -0.000416476 0.707094 0.707119 0 0.707107 0.707107 1.45596e-07 0.707116 0.707098 3.30121e-07 0.707118 0.707096 3.90061e-07 0.707107 0.707107 0 0.707065 0.707149 -1.70418e-06 0.707148 0.707066 1.92235e-06 0.707107 0.707107 5.89018e-08 0.707107 0.707107 0 0.707107 0.707107 -5.89018e-08 0.707065 0.707149 1.70314e-06 0.707148 0.707066 -1.92235e-06 0.707107 0.707107 0 0.707107 0.707107 0 0.707094 0.707119 0 0.707107 0.707107 -1.45972e-07 0.707116 0.707098 -3.30121e-07 0.707118 0.707096 -3.90061e-07 0.707107 0.707107 0 -0.439106 0.369928 -0.818743 -0.384082 -0.220127 -0.896675 -0.350965 0.157317 -0.923079 -0.231438 0.0836565 -0.969246 -0.105137 0.0468918 -0.993352 0.0234816 0.0227904 -0.999465 0.146769 0.00362297 -0.989164 0.0420142 -0.0031706 -0.999112 -0.283731 -0.0907117 -0.954604 -0.174804 -0.0441804 -0.983612 -0.065559 -0.0193518 -0.997661 0.151839 0.00776109 -0.988375 0.244409 -0.0056085 -0.969656 0.277663 0.0106217 -0.96062 0.398887 0.00913562 -0.916955 0.513493 0.00530999 -0.858077 0.606941 -3.17129e-05 -0.794747 0.525927 3.2245e-05 -0.85053 0.341864 -0.0064954 -0.939727 0.436334 -0.0040366 -0.899776 0.619569 0.00405362 -0.784932 0.6794 -0.00251554 -0.733764 0.715357 0.00479568 -0.698742 0.799271 0.00295146 -0.600964 0.861337 -0.000245601 -0.508035 0.807044 0.00025165 -0.590491 0.745876 -0.00200062 -0.666082 0.869911 0.00260873 -0.493201 0.899237 -0.00075746 -0.437461 0.924412 0.00215518 -0.381389 0.926107 0.00278393 -0.37725 0.963471 0.00129292 -0.267811 0.945392 -0.000633193 -0.325934 0.966928 0.00289128 -0.255032 0.977774 -0.000437226 -0.209663 0.988663 0.000663046 -0.150151 0.991694 0.00294958 -0.128586 0.999545 0.000191369 -0.0301493 0.995907 -0.000166834 -0.0903806 0.999996 0.00296758 0 0.999545 0.000191369 0.0301493 0.995907 -0.000166834 0.0903806 0.991694 0.00294958 0.128586 0.977774 -0.000437226 0.209663 0.988663 0.000663046 0.150151 0.966928 0.00289128 0.255032 0.96347 0.00129292 0.267811 0.945392 -0.000633193 0.325934 0.926107 0.00278393 0.37725 0.899237 -0.00075746 0.437461 0.924412 0.00215518 0.381389 0.869911 0.00260873 0.493201 0.861337 -0.000245595 0.508035 0.807044 0.00025165 0.590491 0.799271 0.00295146 0.600964 0.715357 0.00479568 0.698742 0.619569 0.00405361 0.784932 0.525927 3.2245e-05 0.85053 0.606941 -3.17129e-05 0.794747 0.745876 -0.00200062 0.666082 0.6794 -0.00251554 0.733764 0.513493 0.00530998 0.858077 0.436334 -0.00403659 0.899776 0.398887 0.00913562 0.916955 0.277663 0.0106217 0.96062 0.151839 0.00776109 0.988375 0.0420142 -0.0031706 0.999112 0.146769 0.00362297 0.989164 0.341864 -0.00649541 0.939727 0.244409 -0.0056085 0.969656 0.0234816 0.0227904 0.999465 -0.065559 -0.0193518 0.997661 -0.105137 0.0468918 0.993352 -0.231438 0.0836565 0.969246 -0.350965 0.157317 0.923079 -0.439106 0.369927 0.818743 -0.384082 -0.220127 0.896675 -0.174804 -0.0441804 0.983612 -0.283731 -0.0907116 0.954604 0.710991 0.703201 0 0.735067 0.677994 0.000307103 0.773595 0.63368 0.000345191 0.809447 0.587192 0.000376021 0.842514 0.538675 0.000400009 0.872676 0.4883 0.000417181 0.899828 0.436245 0.000427547 0.923879 0.382685 0.000430988 0.944747 0.327801 0.000427634 0.962355 0.271794 0.000417298 0.976648 0.214845 0.000400141 0.987573 0.157161 0.000376054 0.995094 0.0989298 0.000345264 0.999185 0.0403602 0.000307408 0.999985 0.00551506 0 0.999607 0.0280463 -0.000248095 0.764036 0.645174 0.000289632 0.773595 0.63368 -0.000345191 0.799011 0.601316 0.000323658 0.809447 0.587192 -0.000376021 0.83147 0.55557 0.000351316 0.861313 0.508074 0.000371987 0.888445 0.458983 0.000385647 0.912783 0.408446 0.000392518 0.934249 0.356623 0.000392538 0.952776 0.303675 0.000385541 0.968303 0.249776 0.000371862 0.980785 0.195089 0.000351195 0.990181 0.139794 0.000323682 0.996462 0.0840493 0.000289299 0.999607 0.0280463 0.000248095 0.999185 0.0403602 -0.000307408 0.842514 0.538675 -0.000400009 0.872676 0.4883 -0.00041718 0.899828 0.436245 -0.000427547 0.923879 0.382685 -0.000430987 0.944747 0.327801 -0.000427634 0.962355 0.271794 -0.000417298 0.976648 0.214845 -0.000400141 0.987573 0.157161 -0.000376054 0.995094 0.0989298 -0.000345264 0.996462 0.0840493 -0.000289299 0.990181 0.139794 -0.000323682 0.980785 0.195089 -0.000351195 0.968303 0.249776 -0.000371862 0.952776 0.303675 -0.000385541 0.934249 0.356623 -0.000392538 0.912783 0.408446 -0.000392519 0.888445 0.458983 -0.000385647 0.861313 0.508074 -0.000371987 0.83147 0.55557 -0.000351316 0.799011 0.601316 -0.000323658 0.764036 0.645174 -0.000289632 0.726661 0.686996 -0.000248178 0.735067 0.677994 -0.000307103 0.726661 0.686996 0.000248176 1 0 0 1 0 0 0.710991 -0.703201 0 0.735067 -0.677994 -0.000307229 0.773595 -0.63368 -0.000345191 0.809447 -0.587192 -0.000376021 0.842514 -0.538675 -0.000400009 0.872676 -0.4883 -0.000417181 0.899827 -0.436247 -0.000427503 0.923879 -0.382684 -0.000430952 0.944747 -0.327801 -0.000427634 0.962356 -0.271793 -0.000417319 0.976648 -0.214846 -0.000400166 0.987573 -0.15716 -0.000376092 0.995094 -0.0989302 -0.000345277 0.999185 -0.0403602 -0.000307408 0.999985 -0.00551506 0 0.999607 -0.0280463 0.000248095 0.764037 -0.645172 -0.00028954 0.773595 -0.63368 0.000345191 0.799011 -0.601316 -0.000323658 0.809447 -0.587192 0.000376021 0.83147 -0.55557 -0.000351316 0.861313 -0.508074 -0.000371987 0.888445 -0.458983 -0.000385647 0.912783 -0.408446 -0.000392594 0.934249 -0.356623 -0.000392538 0.952776 -0.303675 -0.000385541 0.968303 -0.249776 -0.000371816 0.980785 -0.19509 -0.000351171 0.990181 -0.139793 -0.000323649 0.996462 -0.0840493 -0.000289299 0.999607 -0.0280463 -0.000248095 0.999185 -0.0403602 0.000307408 0.842514 -0.538675 0.000400009 0.872676 -0.4883 0.00041718 0.899827 -0.436247 0.000427503 0.923879 -0.382684 0.000430953 0.944747 -0.327801 0.000427634 0.962356 -0.271793 0.000417319 0.976648 -0.214846 0.000400166 0.987573 -0.15716 0.000376092 0.995094 -0.0989302 0.000345277 0.996462 -0.0840493 0.000289299 0.990181 -0.139793 0.000323649 0.980785 -0.19509 0.000351171 0.968303 -0.249776 0.000371816 0.952776 -0.303675 0.000385541 0.934249 -0.356623 0.000392538 0.912783 -0.408446 0.000392595 0.888445 -0.458983 0.000385647 0.861313 -0.508074 0.000371987 0.83147 -0.55557 0.000351316 0.799011 -0.601316 0.000323658 0.764037 -0.645172 0.00028954 0.72666 -0.686998 0.000248151 0.735067 -0.677994 0.000307229 0.72666 -0.686998 -0.000248151 0.703197 -0.710995 0 0.677994 -0.735068 0.000307138 0.633681 -0.773595 0.000345016 0.587195 -0.809446 0.000376 0.538672 -0.842515 0.000399708 0.488302 -0.872675 0.000417414 0.436247 -0.899827 0.000427397 0.382674 -0.923884 0.000430763 0.32781 -0.944744 0.000427922 0.271791 -0.962356 0.000417541 0.214849 -0.976647 0.00040045 0.157155 -0.987574 0.000376174 0.0989393 -0.995093 0.000344914 0.0403632 -0.999185 0.000306992 0.00546438 -0.999985 0 0.0280464 -0.999607 -0.000248649 0.645171 -0.764038 0.000289651 0.633681 -0.773595 -0.000345016 0.601319 -0.799009 0.000323751 0.587195 -0.809446 -0.000376 0.555565 -0.831473 0.000351507 0.508083 -0.861308 0.000372007 0.458979 -0.888447 0.000385589 0.40844 -0.912785 0.000392906 0.356629 -0.934246 0.000392197 0.303678 -0.952775 0.00038544 0.249777 -0.968303 0.000371496 0.195086 -0.980786 0.000351134 0.139783 -0.990182 0.000323714 0.0840561 -0.996461 0.000289766 0.0280464 -0.999607 0.000248649 0.0403632 -0.999185 -0.000306992 0.538672 -0.842515 -0.000399709 0.488302 -0.872675 -0.000417414 0.436247 -0.899827 -0.000427396 0.382674 -0.923884 -0.000430762 0.32781 -0.944744 -0.000427922 0.271791 -0.962356 -0.00041754 0.214849 -0.976647 -0.00040045 0.157155 -0.987574 -0.000376174 0.0989393 -0.995093 -0.000344914 0.0840561 -0.996461 -0.000289766 0.139783 -0.990182 -0.000323714 0.195086 -0.980786 -0.000351134 0.249777 -0.968303 -0.000371496 0.303678 -0.952775 -0.00038544 0.356629 -0.934246 -0.000392197 0.40844 -0.912785 -0.000392906 0.458979 -0.888447 -0.000385588 0.508083 -0.861308 -0.000372007 0.555565 -0.831473 -0.000351507 0.601319 -0.799009 -0.00032375 0.645171 -0.764038 -0.000289651 0.686995 -0.726662 -0.000248124 0.677994 -0.735068 -0.000307138 0.686995 -0.726662 0.000248123 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -0.618972 0 -0.785413 -0.618972 0 -0.785413 -0.577758 0 -0.816208 -0.535015 0 -0.844843 -0.490857 0 -0.87124 -0.445398 0 -0.895333 -0.398761 0 -0.917055 -0.351068 0 -0.93635 -0.302447 0 -0.953166 -0.253024 0 -0.96746 -0.202931 0 -0.979193 -0.152302 0 -0.988334 -0.101271 0 -0.994859 -0.101271 0 -0.994859 -0.0499705 0 -0.998751 -0.0499705 0 -0.998751 0.0014635 0 -0.999999 0.052892 0 -0.9986 0.10418 0 -0.994558 0.155193 0 -0.987884 0.205797 0 -0.978595 0.255855 0 -0.966715 0.305234 0 -0.952277 0.353806 0 -0.935319 0.401443 0 -0.915884 0.448014 0 -0.894026 0.493405 0 -0.8698 0.537484 0 -0.843274 0.580145 0 -0.814513 0.621266 0 -0.7836 0.660748 0 -0.750608 0.698476 0 -0.715634 0.734356 0 -0.678764 0.768297 0 -0.640094 0.8002 0 -0.599734 0.829983 0 -0.557788 0.857573 0 -0.514362 0.882894 0 -0.469573 0.905876 0 -0.423543 0.926458 0 -0.376398 0.944592 0 -0.328247 0.960224 0 -0.279232 0.973314 0 -0.229478 0.983828 0 -0.179117 0.991739 0 -0.128273 0.997023 0 -0.0771073 0.999669 0 -0.0257247 0.999669 0 0.0257247 0.997023 0 0.0771073 0.991739 0 0.128273 0.983828 0 0.179117 0.973314 0 0.229478 0.960224 0 0.279232 0.944592 0 0.328247 0.926458 0 0.376398 0.905876 0 0.423543 0.882894 0 0.469573 0.857573 0 0.514362 0.829984 0 0.557788 0.800199 0 0.599734 0.768297 0 0.640094 0.734356 0 0.678765 0.698476 0 0.715633 0.660747 0 0.750609 0.621266 0 0.7836 0.580146 0 0.814513 0.537484 0 0.843274 0.493405 0 0.8698 0.448014 0 0.894026 0.401442 0 0.915884 0.353809 0 0.935318 0.305234 0 0.952277 0.255853 0 0.966716 0.205797 0 0.978595 0.155192 0 0.987884 0.104181 0 0.994558 0.052892 0 0.9986 0.0014635 0 0.999999 -0.0499691 0 0.998751 -0.101271 0 0.994859 -0.152303 0 0.988334 -0.202931 0 0.979193 -0.253025 0 0.96746 -0.302446 0 0.953167 -0.351068 0 0.93635 -0.398761 0 0.917055 -0.445398 0 0.895333 -0.490857 0 0.87124 -0.535015 0 0.844843 -0.577758 0 0.816208 -0.618972 0 0.785413 -0.618972 0 0.785413 -0.577758 0 -0.816208 -0.535015 0 -0.844843 -0.490857 0 -0.87124 -0.445398 0 -0.895333 -0.398761 0 -0.917055 -0.351068 0 -0.93635 -0.302447 0 -0.953166 -0.253024 0 -0.96746 -0.202931 0 -0.979193 -0.152302 0 -0.988334 0.0014635 0 -0.999999 0.052892 0 -0.9986 0.10418 0 -0.994558 0.155193 0 -0.987884 0.205797 0 -0.978595 0.255855 0 -0.966715 0.305234 0 -0.952277 0.353806 0 -0.935319 0.401443 0 -0.915884 0.448014 0 -0.894026 0.493405 0 -0.8698 0.537484 0 -0.843274 0.580145 0 -0.814513 0.621266 0 -0.7836 0.660748 0 -0.750608 0.698476 0 -0.715634 0.734356 0 -0.678764 0.768297 0 -0.640094 0.8002 0 -0.599734 0.829983 0 -0.557788 0.857573 0 -0.514362 0.882894 0 -0.469573 0.905876 0 -0.423543 0.926458 0 -0.376398 0.944592 0 -0.328247 0.960224 0 -0.279232 0.973314 0 -0.229478 0.983828 0 -0.179117 0.991739 0 -0.128273 0.997023 0 -0.0771073 0.999669 0 -0.0257247 0.999669 0 0.0257247 0.997023 0 0.0771073 0.991739 0 0.128273 0.983828 0 0.179117 0.973314 0 0.229478 0.960224 0 0.279232 0.944592 0 0.328247 0.926458 0 0.376398 0.905876 0 0.423543 0.882894 0 0.469573 0.857573 0 0.514362 0.829984 0 0.557788 0.800199 0 0.599734 0.768297 0 0.640094 0.734356 0 0.678765 0.698476 0 0.715633 0.660747 0 0.750609 0.621266 0 0.7836 0.580146 0 0.814513 0.537484 0 0.843274 0.493405 0 0.8698 0.448014 0 0.894026 0.401442 0 0.915884 0.353809 0 0.935318 0.305234 0 0.952277 0.255853 0 0.966716 0.205797 0 0.978595 0.155192 0 0.987884 0.104181 0 0.994558 0.052892 0 0.9986 0.0014635 0 0.999999 -0.0499691 0 0.998751 -0.101271 0 0.994859 -0.152303 0 0.988334 -0.202931 0 0.979193 -0.253025 0 0.96746 -0.302446 0 0.953167 -0.351068 0 0.93635 -0.398761 0 0.917055 -0.445398 0 0.895333 -0.490857 0 0.87124 -0.535015 0 0.844843 -0.577758 0 0.816208 -0.618972 0 0.785413 -0.618972 0 0.785413 -0.577758 0 0.816208 -0.535015 0 0.844843 -0.490857 0 0.87124 -0.445398 0 0.895333 -0.398761 0 0.917055 -0.351068 0 0.93635 -0.302446 0 0.953167 -0.253025 0 0.96746 -0.202931 0 0.979193 -0.152303 0 0.988334 -0.101271 0 0.994859 -0.0499691 0 0.998751 0.0014635 0 0.999999 0.052892 0 0.9986 0.104181 0 0.994558 0.155192 0 0.987884 0.205797 0 0.978595 0.255853 0 0.966716 0.305234 0 0.952277 0.353809 0 0.935318 0.401442 0 0.915884 0.448014 0 0.894026 0.493405 0 0.8698 0.537484 0 0.843274 0.580146 0 0.814513 0.621266 0 0.7836 0.660747 0 0.750609 0.698476 0 0.715633 0.734356 0 0.678765 0.768297 0 0.640094 0.800199 0 0.599734 0.829984 0 0.557788 0.857573 0 0.514362 0.829984 0 0.557788 -0.577758 0 0.816208 -0.535015 0 0.844843 -0.490857 0 0.87124 -0.445398 0 0.895333 -0.398761 0 0.917055 -0.351068 0 0.93635 -0.302446 0 0.953167 -0.253025 0 0.96746 -0.202931 0 0.979193 -0.152303 0 0.988334 -0.101271 0 0.994859 -0.0499691 0 0.998751 0.0014635 0 0.999999 0.052892 0 0.9986 0.104181 0 0.994558 0.155192 0 0.987884 0.205797 0 0.978595 0.255853 0 0.966716 0.305234 0 0.952277 0.353809 0 0.935318 0.401442 0 0.915884 0.448014 0 0.894026 0.493405 0 0.8698 0.537484 0 0.843274 0.580146 0 0.814513 0.621266 0 0.7836 0.660747 0 0.750609 0.698476 0 0.715633 0.734356 0 0.678765 0.768297 0 0.640094 0.800199 0 0.599734 0.857573 0 0.514362 0.882894 0 0.469573 0.882894 0 0.469573 0.905876 0 0.423543 0.926458 0 0.376398 0.944592 0 0.328247 0.960224 0 0.279232 0.973314 0 0.229478 0.983828 0 0.179117 0.991739 0 0.128273 0.997023 0 0.0771073 0.999669 0 0.0257247 0.999669 0 -0.0257247 0.997023 0 -0.0771073 0.991739 0 -0.128273 0.983828 0 -0.179117 0.973314 0 -0.229478 0.960224 0 -0.279232 0.944592 0 -0.328247 0.926458 0 -0.376398 0.905876 0 -0.423543 0.882894 0 -0.469573 0.857573 0 -0.514362 0.829983 0 -0.557788 0.829983 0 -0.557788 0.8002 0 -0.599734 0.8002 0 -0.599734 0.768297 0 -0.640094 0.734356 0 -0.678764 0.698476 0 -0.715634 0.660748 0 -0.750608 0.621266 0 -0.7836 0.580145 0 -0.814513 0.537484 0 -0.843274 0.493405 0 -0.8698 0.448014 0 -0.894026 0.401443 0 -0.915884 0.353806 0 -0.935319 0.305234 0 -0.952277 0.255855 0 -0.966715 0.205797 0 -0.978595 0.155193 0 -0.987884 0.10418 0 -0.994558 0.052892 0 -0.9986 0.0014635 0 -0.999999 -0.0499705 0 -0.998751 -0.101271 0 -0.994859 -0.152302 0 -0.988334 -0.202931 0 -0.979193 -0.253024 0 -0.96746 -0.302447 0 -0.953166 -0.351068 0 -0.93635 -0.398761 0 -0.917055 -0.445398 0 -0.895333 -0.490857 0 -0.87124 -0.535015 0 -0.844843 -0.577758 0 -0.816208 -0.618972 0 -0.785413 -0.618972 0 -0.785413 0.905876 0 0.423543 0.926458 0 0.376398 0.944592 0 0.328247 0.960224 0 0.279232 0.973314 0 0.229478 0.983828 0 0.179117 0.991739 0 0.128273 0.997023 0 0.0771073 0.999669 0 0.0257247 0.999669 0 -0.0257247 0.997023 0 -0.0771073 0.991739 0 -0.128273 0.983828 0 -0.179117 0.973314 0 -0.229478 0.960224 0 -0.279232 0.944592 0 -0.328247 0.926458 0 -0.376398 0.905876 0 -0.423543 0.882894 0 -0.469573 0.857573 0 -0.514362 0.768297 0 -0.640094 0.734356 0 -0.678764 0.698476 0 -0.715634 0.660748 0 -0.750608 0.621266 0 -0.7836 0.580145 0 -0.814513 0.537484 0 -0.843274 0.493405 0 -0.8698 0.448014 0 -0.894026 0.401443 0 -0.915884 0.353806 0 -0.935319 0.305234 0 -0.952277 0.255855 0 -0.966715 0.205797 0 -0.978595 0.155193 0 -0.987884 0.10418 0 -0.994558 0.052892 0 -0.9986 0.0014635 0 -0.999999 -0.0499705 0 -0.998751 -0.101271 0 -0.994859 -0.152302 0 -0.988334 -0.202931 0 -0.979193 -0.253024 0 -0.96746 -0.302447 0 -0.953166 -0.351068 0 -0.93635 -0.398761 0 -0.917055 -0.445398 0 -0.895333 -0.490857 0 -0.87124 -0.535015 0 -0.844843 -0.577758 0 -0.816208 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0.0653962 -0.997859 0 0.195096 -0.980784 0 0.195096 -0.980784 0 0.321437 -0.946931 0 0.321437 -0.946931 0 0.442286 -0.896874 0 0.555572 -0.831468 0 0.659346 -0.75184 0 0.751839 -0.659346 0 0.831468 -0.555572 0 0.896874 -0.442286 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653962 0 0.997859 0.0653966 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896876 0.442283 0 0.831468 0.555572 0 0.751839 0.659346 0 0.659342 0.751843 0 0.555572 0.831468 0 0.442286 0.896874 0 0.321437 0.946931 0 0.195096 0.980784 0 0.0653962 0.997859 0 -0.0653966 0.997859 0 -0.195096 0.980784 0 -0.321437 0.946931 0 -0.442283 0.896876 0 -0.555572 0.831468 0 -0.659342 0.751843 0 -0.751843 0.659342 0 -0.831468 0.555572 0 -0.896876 0.442283 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653966 0 -0.997859 -0.0653962 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896874 -0.442286 0 -0.831468 -0.555572 0 -0.751843 -0.659342 0 -0.659346 -0.75184 0 -0.555572 -0.831468 0 -0.442283 -0.896876 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653966 -0.997859 0 0.0653962 -0.997859 0 0.442286 -0.896874 0 0.555572 -0.831468 0 0.659346 -0.75184 0 0.751839 -0.659346 0 0.831468 -0.555572 0 0.896874 -0.442286 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653962 0 0.997859 0.0653966 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896876 0.442283 0 0.831468 0.555572 0 0.751839 0.659346 0 0.659342 0.751843 0 0.555572 0.831468 0 0.442286 0.896874 0 0.321437 0.946931 0 0.195096 0.980784 0 0.0653962 0.997859 0 -0.0653966 0.997859 0 -0.195096 0.980784 0 -0.321437 0.946931 0 -0.442283 0.896876 0 -0.555572 0.831468 0 -0.659342 0.751843 0 -0.751843 0.659342 0 -0.831468 0.555572 0 -0.896876 0.442283 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653966 0 -0.997859 -0.0653962 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896874 -0.442286 0 -0.831468 -0.555572 0 -0.751843 -0.659342 0 -0.659346 -0.75184 0 -0.555572 -0.831468 0 -0.442283 -0.896876 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653966 -0.997859 0 0.0653962 -0.997859 0 0.195096 -0.980784 0 0.195096 -0.980784 0 0.321437 -0.946931 0 0.321437 -0.946931 0 0.442286 -0.896874 0 0.555572 -0.831468 0 0.659346 -0.75184 0 0.751836 -0.65935 0 0.831468 -0.555572 0 0.896876 -0.442283 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653966 0 0.997859 0.0653962 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896874 0.442286 0 0.83147 0.555569 0 0.751833 0.659354 0 0.659346 0.75184 0 0.555572 0.831468 0 0.442291 0.896871 0 0.321443 0.946929 0 0.195082 0.980787 0 0.0654033 0.997859 0 -0.0654037 0.997859 0 -0.195082 0.980787 0 -0.321443 0.946929 0 -0.442288 0.896873 0 -0.555572 0.831468 0 -0.659346 0.75184 0 -0.751837 0.65935 0 -0.83147 0.555569 0 -0.896874 0.442286 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653962 0 -0.997859 -0.0653966 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896876 -0.442283 0 -0.831468 -0.555572 0 -0.75184 -0.659346 0 -0.659346 -0.75184 0 -0.555572 -0.831468 0 -0.442283 -0.896876 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653966 -0.997859 0 0.0653962 -0.997859 0 0.442286 -0.896874 0 0.555572 -0.831468 0 0.659346 -0.75184 0 0.751836 -0.65935 0 0.831468 -0.555572 0 0.896876 -0.442283 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653966 0 0.997859 0.0653962 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896874 0.442286 0 0.83147 0.555569 0 0.751833 0.659354 0 0.659346 0.75184 0 0.555572 0.831468 0 0.442291 0.896871 0 0.321443 0.946929 0 0.195082 0.980787 0 0.0654033 0.997859 0 -0.0654037 0.997859 0 -0.195082 0.980787 0 -0.321443 0.946929 0 -0.442288 0.896873 0 -0.555572 0.831468 0 -0.659346 0.75184 0 -0.751837 0.65935 0 -0.83147 0.555569 0 -0.896874 0.442286 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653962 0 -0.997859 -0.0653966 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896876 -0.442283 0 -0.831468 -0.555572 0 -0.75184 -0.659346 0 -0.659346 -0.75184 0 -0.555572 -0.831468 0 -0.442283 -0.896876 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653966 -0.997859 0 0.0653966 -0.997859 0 0.195096 -0.980784 0 0.195096 -0.980784 0 0.321437 -0.946931 0 0.321437 -0.946931 0 0.442283 -0.896876 0 0.555572 -0.831468 0 0.659342 -0.751843 0 0.751846 -0.659338 0 0.831468 -0.555572 0 0.896874 -0.442286 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653962 0 0.997859 0.0653966 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896876 0.442283 0 0.831468 0.555572 0 0.751846 0.659338 0 0.659338 0.751846 0 0.555572 0.831468 0 0.442283 0.896876 0 0.321437 0.946931 0 0.195096 0.980784 0 0.0653966 0.997859 0 -0.0653962 0.997859 0 -0.195096 0.980784 0 -0.321437 0.946931 0 -0.442286 0.896874 0 -0.555572 0.831468 0 -0.659338 0.751846 0 -0.751843 0.659342 0 -0.831468 0.555572 0 -0.896876 0.442283 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653966 0 -0.997859 -0.0653962 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896874 -0.442286 0 -0.831468 -0.555572 0 -0.751843 -0.659342 0 -0.659342 -0.751843 0 -0.555572 -0.831468 0 -0.442286 -0.896874 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653962 -0.997859 0 0.0653966 -0.997859 0 0.442283 -0.896876 0 0.555572 -0.831468 0 0.659342 -0.751843 0 0.751846 -0.659338 0 0.831468 -0.555572 0 0.896874 -0.442286 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653962 0 0.997859 0.0653966 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896876 0.442283 0 0.831468 0.555572 0 0.751846 0.659338 0 0.659338 0.751846 0 0.555572 0.831468 0 0.442283 0.896876 0 0.321437 0.946931 0 0.195096 0.980784 0 0.0653966 0.997859 0 -0.0653962 0.997859 0 -0.195096 0.980784 0 -0.321437 0.946931 0 -0.442286 0.896874 0 -0.555572 0.831468 0 -0.659338 0.751846 0 -0.751843 0.659342 0 -0.831468 0.555572 0 -0.896876 0.442283 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653966 0 -0.997859 -0.0653962 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896874 -0.442286 0 -0.831468 -0.555572 0 -0.751843 -0.659342 0 -0.659342 -0.751843 0 -0.555572 -0.831468 0 -0.442286 -0.896874 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653962 -0.997859 0 0.0653966 -0.997859 0 0.195096 -0.980784 0 0.195096 -0.980784 0 0.321437 -0.946931 0 0.321437 -0.946931 0 0.442283 -0.896876 0 0.555572 -0.831468 0 0.659342 -0.751843 0 0.751843 -0.659342 0 0.831468 -0.555572 0 0.896876 -0.442283 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653966 0 0.997859 0.0653962 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896874 0.442286 0 0.83147 0.555569 0 0.75184 0.659346 0 0.659342 0.751843 0 0.555572 0.831468 0 0.442288 0.896873 0 0.321443 0.946929 0 0.195082 0.980787 0 0.0654037 0.997859 0 -0.0654033 0.997859 0 -0.195082 0.980787 0 -0.321443 0.946929 0 -0.442291 0.896871 0 -0.555572 0.831468 0 -0.659342 0.751843 0 -0.751837 0.65935 0 -0.83147 0.555569 0 -0.896874 0.442286 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653962 0 -0.997859 -0.0653966 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896876 -0.442283 0 -0.831468 -0.555572 0 -0.75184 -0.659346 0 -0.659342 -0.751843 0 -0.555572 -0.831468 0 -0.442286 -0.896874 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653962 -0.997859 0 0.0653966 -0.997859 0 0.442283 -0.896876 0 0.555572 -0.831468 0 0.659342 -0.751843 0 0.751843 -0.659342 0 0.831468 -0.555572 0 0.896876 -0.442283 0 0.946931 -0.321437 0 0.980784 -0.195096 0 0.997859 -0.0653966 0 0.997859 0.0653962 0 0.980784 0.195096 0 0.946931 0.321437 0 0.896874 0.442286 0 0.83147 0.555569 0 0.75184 0.659346 0 0.659342 0.751843 0 0.555572 0.831468 0 0.442288 0.896873 0 0.321443 0.946929 0 0.195082 0.980787 0 0.0654037 0.997859 0 -0.0654033 0.997859 0 -0.195082 0.980787 0 -0.321443 0.946929 0 -0.442291 0.896871 0 -0.555572 0.831468 0 -0.659342 0.751843 0 -0.751837 0.65935 0 -0.83147 0.555569 0 -0.896874 0.442286 0 -0.946931 0.321437 0 -0.980784 0.195096 0 -0.997859 0.0653962 0 -0.997859 -0.0653966 0 -0.980784 -0.195096 0 -0.946931 -0.321437 0 -0.896876 -0.442283 0 -0.831468 -0.555572 0 -0.75184 -0.659346 0 -0.659342 -0.751843 0 -0.555572 -0.831468 0 -0.442286 -0.896874 0 -0.321437 -0.946931 0 -0.195096 -0.980784 0 -0.0653962 -0.997859 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 0 -0.707107 0.707107 0 -0.707107 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 0.707107 0 0.707107 0.707107 0 0.707106 0 0.707107 0.707107 4.43524e-08 0.707107 0.707107 0 0.707106 0 1 0 0 1 0 0 0 1 0 0 1 0 1 0 0 1 0 0 -1 0 0 -1 0 0.707106 0 0.707107 0.707107 -4.43524e-08 0.707107 0.707107 0 0.707106 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 0 -0.707107 0.707107 0 -0.707107 0.707107 -0.707107 0 0.707107 -0.707107 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -0.999021 0 -0.0442321 -0.991203 0 -0.132349 -0.991203 0 -0.132349 -0.975626 0 -0.219442 -0.975626 0 -0.219442 -0.952414 0 -0.304808 -0.921749 0 -0.387787 -0.88387 0 -0.467733 -0.839072 0 -0.54402 -0.787708 0 -0.616049 -0.730178 0 -0.683257 -0.666938 0 -0.745113 -0.59847 0 -0.801145 -0.525324 0 -0.850902 -0.448068 0 -0.894 -0.367302 0 -0.930102 -0.283661 0 -0.958925 -0.197803 0 -0.980242 -0.110393 0 -0.993888 -0.0221222 0 -0.999755 0.0663227 0 -0.997798 0.15425 0 -0.988032 0.240966 0 -0.970534 0.3258 0 -0.945439 0.408085 0 -0.912944 0.487174 0 -0.873305 0.562449 0 -0.826832 0.633323 0 -0.773888 0.699242 0 -0.714885 0.759688 0 -0.650288 0.814187 0 -0.580603 0.862314 0 -0.506375 0.903694 0 -0.428178 0.938 0 -0.346636 0.964966 0 -0.262376 0.984378 0 -0.17607 0.996087 0 -0.0883811 1 0 0 0.996087 0 0.0883811 0.984378 0 0.17607 0.964966 0 0.262376 0.938 0 0.346636 0.903694 0 0.428178 0.862314 0 0.506375 0.814187 0 0.580603 0.759688 0 0.650288 0.699241 0 0.714886 0.633325 0 0.773886 0.562448 0 0.826833 0.487175 0 0.873305 0.408084 0 0.912945 0.3258 0 0.945439 0.240967 0 0.970533 0.154249 0 0.988032 0.0663239 0 0.997798 -0.0221234 0 0.999755 -0.110394 0 0.993888 -0.197801 0 0.980242 -0.283662 0 0.958924 -0.367303 0 0.930102 -0.448067 0 0.894 -0.525325 0 0.850902 -0.59847 0 0.801145 -0.666938 0 0.745113 -0.730178 0 0.683257 -0.787708 0 0.616049 -0.839072 0 0.544021 -0.88387 0 0.467732 -0.921749 0 0.387787 -0.952414 0 0.304808 -0.975626 0 0.219442 -0.991203 0 0.132349 -0.999021 0 0.0442321 -0.999021 0 -0.0442321 -0.952414 0 -0.304808 -0.921749 0 -0.387787 -0.88387 0 -0.467733 -0.839072 0 -0.54402 -0.787708 0 -0.616049 -0.730178 0 -0.683257 -0.666938 0 -0.745113 -0.59847 0 -0.801145 -0.525324 0 -0.850902 -0.448068 0 -0.894 -0.367302 0 -0.930102 -0.283661 0 -0.958925 -0.197803 0 -0.980242 -0.110393 0 -0.993888 -0.0221222 0 -0.999755 0.0663227 0 -0.997798 0.15425 0 -0.988032 0.240966 0 -0.970534 0.3258 0 -0.945439 0.408085 0 -0.912944 0.487174 0 -0.873305 0.562449 0 -0.826832 0.633323 0 -0.773888 0.699242 0 -0.714885 0.759688 0 -0.650288 0.814187 0 -0.580603 0.862314 0 -0.506375 0.903694 0 -0.428178 0.938 0 -0.346636 0.964966 0 -0.262376 0.984378 0 -0.17607 0.996087 0 -0.0883811 1 0 0 0.996087 0 0.0883811 0.984378 0 0.17607 0.964966 0 0.262376 0.938 0 0.346636 0.903694 0 0.428178 0.862314 0 0.506375 0.814187 0 0.580603 0.759688 0 0.650288 0.699241 0 0.714886 0.633325 0 0.773886 0.562448 0 0.826833 0.487175 0 0.873305 0.408084 0 0.912945 0.3258 0 0.945439 0.240967 0 0.970533 0.154249 0 0.988032 0.0663239 0 0.997798 -0.0221234 0 0.999755 -0.110394 0 0.993888 -0.197801 0 0.980242 -0.283662 0 0.958924 -0.367303 0 0.930102 -0.448067 0 0.894 -0.525325 0 0.850902 -0.59847 0 0.801145 -0.666938 0 0.745113 -0.730178 0 0.683257 -0.787708 0 0.616049 -0.839072 0 0.544021 -0.88387 0 0.467732 -0.921749 0 0.387787 -0.952414 0 0.304808 -0.975626 0 0.219442 -0.991203 0 0.132349 -0.999021 0 0.0442321 -0.997204 0 0.0747334 -0.974929 0 0.222517 -0.974929 0 0.222517 -0.930873 0 0.365342 -0.930873 0 0.365342 -0.866026 0 0.499999 -0.781829 0 0.623493 -0.680174 0 0.73305 -0.563319 0 0.82624 -0.433886 0 0.900968 -0.294755 0 0.955573 -0.149042 0 0.988831 0 0 1 0.149042 0 0.988831 0.294755 0 0.955573 0.433884 0 0.900969 0.563321 0 0.826238 0.680174 0 0.73305 0.781828 0 0.623494 0.866026 0 0.499999 0.930874 0 0.36534 0.974928 0 0.222519 0.997204 0 0.0747313 0.997204 0 -0.0747313 0.974928 0 -0.222519 0.930874 0 -0.36534 0.866026 0 -0.499999 0.781828 0 -0.623494 0.680174 0 -0.73305 0.563321 0 -0.826238 0.433884 0 -0.900969 0.294755 0 -0.955573 0.149042 0 -0.988831 0 0 -1 -0.149042 0 -0.988831 -0.294755 0 -0.955573 -0.433886 0 -0.900968 -0.563319 0 -0.82624 -0.680174 0 -0.73305 -0.781829 0 -0.623493 -0.866026 0 -0.499999 -0.930873 0 -0.365342 -0.974929 0 -0.222517 -0.997204 0 -0.0747334 -0.997204 0 0.0747334 -0.866026 0 0.499999 -0.781829 0 0.623493 -0.680174 0 0.73305 -0.563319 0 0.82624 -0.433886 0 0.900968 -0.294755 0 0.955573 -0.149042 0 0.988831 0 0 1 0.149042 0 0.988831 0.294755 0 0.955573 0.433884 0 0.900969 0.563321 0 0.826238 0.680174 0 0.73305 0.781828 0 0.623494 0.866026 0 0.499999 0.930874 0 0.36534 0.974928 0 0.222519 0.997204 0 0.0747313 0.997204 0 -0.0747313 0.974928 0 -0.222519 0.930874 0 -0.36534 0.866026 0 -0.499999 0.781828 0 -0.623494 0.680174 0 -0.73305 0.563321 0 -0.826238 0.433884 0 -0.900969 0.294755 0 -0.955573 0.149042 0 -0.988831 0 0 -1 -0.149042 0 -0.988831 -0.294755 0 -0.955573 -0.433886 0 -0.900968 -0.563319 0 -0.82624 -0.680174 0 -0.73305 -0.781829 0 -0.623493 -0.866026 0 -0.499999 -0.930873 0 -0.365342 -0.974929 0 -0.222517 -0.997204 0 -0.0747334 -0.995734 0 0.0922685 -0.961826 0 0.273663 -0.961826 0 0.273663 -0.895163 0 0.445739 -0.798018 0 0.602633 -0.895163 0 0.445739 -0.895163 0 0.445739 -0.961826 0 0.273663 -0.961826 0 0.273663 -0.995734 0 0.0922685 -0.995734 0 -0.0922685 -0.961826 0 -0.273663 -0.895163 0 -0.445739 -0.798018 0 -0.602633 -0.673696 0 -0.739008 -0.526433 0 -0.850216 -0.361238 0 -0.932474 -0.183751 0 -0.982973 0 0 -1 0.183751 0 -0.982973 0.361238 0 -0.932474 0.526435 0 -0.850216 0.673696 0 -0.739008 0.798017 0 -0.602635 0.895163 0 -0.445739 0.961826 0 -0.273663 0.995734 0 -0.092266 0.995734 0 0.092266 0.961826 0 0.273663 0.895163 0 0.445739 0.798017 0 0.602635 0.673696 0 0.739008 0.526435 0 0.850216 0.361238 0 0.932474 0.183751 0 0.982973 0 0 1 -0.183751 0 0.982973 -0.361238 0 0.932474 -0.526433 0 0.850216 -0.673696 0 0.739008 -0.798018 0 0.602633 -0.798018 0 0.602633 -0.895163 0 0.445739 -0.798018 0 0.602633 -0.673696 0 0.739008 -0.673696 0 0.739008 -0.673696 0 0.739008 -0.526433 0 0.850216 -0.526433 0 0.850216 -0.526433 0 0.850216 -0.361238 0 0.932474 -0.361238 0 0.932474 -0.361238 0 0.932474 -0.183751 0 0.982973 -0.183751 0 0.982973 -0.183751 0 0.982973 0 0 1 0 0 1 0 0 1 0.183751 0 0.982973 0.183751 0 0.982973 0.183751 0 0.982973 0.361238 0 0.932474 0.361238 0 0.932474 0.361238 0 0.932474 0.526435 0 0.850216 0.526435 0 0.850216 0.526435 0 0.850216 0.673696 0 0.739008 0.673696 0 0.739008 0.673696 0 0.739008 0.798017 0 0.602635 0.798017 0 0.602635 0.798017 0 0.602635 0.895163 0 0.445739 0.895163 0 0.445739 0.895163 0 0.445739 0.961826 0 0.273663 0.961826 0 0.273663 0.961826 0 0.273663 0.995734 0 0.092266 0.995734 0 0.092266 0.995734 0 0.092266 0.995734 0 -0.092266 0.995734 0 -0.092266 0.995734 0 -0.092266 0.961826 0 -0.273663 0.961826 0 -0.273663 0.961826 0 -0.273663 0.895163 0 -0.445739 0.895163 0 -0.445739 0.895163 0 -0.445739 0.798017 0 -0.602635 0.798017 0 -0.602635 0.798017 0 -0.602635 0.673696 0 -0.739008 0.673696 0 -0.739008 0.673696 0 -0.739008 0.526435 0 -0.850216 0.526435 0 -0.850216 0.526435 0 -0.850216 0.361238 0 -0.932474 0.361238 0 -0.932474 0.361238 0 -0.932474 0.183751 0 -0.982973 0.183751 0 -0.982973 0.183751 0 -0.982973 0 0 -1 0 0 -1 0 0 -1 -0.183751 0 -0.982973 -0.183751 0 -0.982973 -0.183751 0 -0.982973 -0.361238 0 -0.932474 -0.361238 0 -0.932474 -0.361238 0 -0.932474 -0.526433 0 -0.850216 -0.526433 0 -0.850216 -0.526433 0 -0.850216 -0.673696 0 -0.739008 -0.673696 0 -0.739008 -0.673696 0 -0.739008 -0.798018 0 -0.602633 -0.798018 0 -0.602633 -0.798018 0 -0.602633 -0.895163 0 -0.445739 -0.895163 0 -0.445739 -0.895163 0 -0.445739 -0.961826 0 -0.273663 -0.961826 0 -0.273663 -0.961826 0 -0.273663 -0.995734 0 -0.0922685 -0.995734 0 -0.0922685 -0.995734 0 -0.0922685 -0.995734 0 0.0922685 -0.995734 0 0.0922685 0.707109 -0.707105 0 0.707109 -0.707105 0 -0.707107 -0.707107 0 -0.707107 -0.707107 0 + + + + + + + + + + + + + + +

1 0 0 0 125 0 1 1 2 1 0 1 1 2 3 2 2 2 2 3 3 3 4 3 4 4 3 4 35 4 36 5 35 5 78 5 5 6 78 6 37 6 221 7 37 7 77 7 222 8 77 8 38 8 39 9 38 9 40 9 225 10 40 10 6 10 7 11 6 11 9 11 8 12 9 12 10 12 41 13 10 13 11 13 228 14 11 14 81 14 42 15 81 15 43 15 232 16 43 16 44 16 45 17 44 17 46 17 47 18 46 18 13 18 12 19 13 19 15 19 14 20 15 20 16 20 233 21 16 21 83 21 234 22 83 22 48 22 236 23 48 23 17 23 237 24 17 24 87 24 239 25 87 25 18 25 241 26 18 26 49 26 50 27 49 27 51 27 245 28 51 28 89 28 19 29 89 29 52 29 242 30 52 30 91 30 248 31 91 31 92 31 53 32 92 32 20 32 21 33 20 33 54 33 55 34 54 34 96 34 56 35 96 35 95 35 22 36 95 36 98 36 57 37 98 37 100 37 58 38 100 38 99 38 59 39 99 39 23 39 255 40 23 40 103 40 60 41 103 41 102 41 61 42 102 42 104 42 62 43 104 43 24 43 256 44 24 44 25 44 63 45 25 45 107 45 64 46 107 46 26 46 65 47 26 47 66 47 259 48 66 48 108 48 67 49 108 49 27 49 263 50 27 50 28 50 265 51 28 51 68 51 266 52 68 52 111 52 69 53 111 53 29 53 70 54 29 54 114 54 269 55 114 55 113 55 270 56 113 56 116 56 273 57 116 57 118 57 274 58 118 58 71 58 30 59 71 59 72 59 31 60 72 60 32 60 33 61 32 61 34 61 277 62 34 62 121 62 279 63 121 63 122 63 73 64 122 64 124 64 280 65 124 65 126 65 74 66 126 66 125 66 0 67 74 67 125 67 4 68 35 68 36 68 36 69 78 69 5 69 5 70 37 70 221 70 221 71 77 71 222 71 222 72 38 72 39 72 39 73 40 73 225 73 225 74 6 74 7 74 7 75 9 75 8 75 8 76 10 76 41 76 41 77 11 77 228 77 228 78 81 78 42 78 42 79 43 79 232 79 232 80 44 80 45 80 45 81 46 81 47 81 47 82 13 82 12 82 12 83 15 83 14 83 14 84 16 84 233 84 233 85 83 85 234 85 234 86 48 86 236 86 236 87 17 87 237 87 237 88 87 88 239 88 239 89 18 89 241 89 241 90 49 90 50 90 50 91 51 91 245 91 245 92 89 92 19 92 19 93 52 93 242 93 242 94 91 94 248 94 248 95 92 95 53 95 53 96 20 96 21 96 21 97 54 97 55 97 55 98 96 98 56 98 56 99 95 99 22 99 22 100 98 100 57 100 57 101 100 101 58 101 58 102 99 102 59 102 59 103 23 103 255 103 255 104 103 104 60 104 60 105 102 105 61 105 61 106 104 106 62 106 62 107 24 107 256 107 256 108 25 108 63 108 63 109 107 109 64 109 64 110 26 110 65 110 65 111 66 111 259 111 259 112 108 112 67 112 67 113 27 113 263 113 263 114 28 114 265 114 265 115 68 115 266 115 266 116 111 116 69 116 69 117 29 117 70 117 70 118 114 118 269 118 269 119 113 119 270 119 270 120 116 120 273 120 273 121 118 121 274 121 274 122 71 122 30 122 30 123 72 123 31 123 31 124 32 124 33 124 33 125 34 125 277 125 277 126 121 126 279 126 279 127 122 127 73 127 73 128 124 128 280 128 280 129 126 129 74 129 75 130 1 130 1711 130 75 131 3 131 1 131 75 132 76 132 3 132 3 133 76 133 35 133 35 134 76 134 78 134 78 135 76 135 1757 135 37 136 1757 136 1738 136 77 137 1738 137 38 137 77 138 37 138 1738 138 78 139 1757 139 37 139 1738 140 1736 140 38 140 38 141 1736 141 40 141 40 142 1736 142 6 142 6 143 1736 143 1734 143 9 144 1734 144 79 144 10 145 79 145 11 145 10 146 9 146 79 146 6 147 1734 147 9 147 79 148 80 148 11 148 11 149 80 149 81 149 81 150 80 150 1733 150 43 151 1733 151 44 151 43 152 81 152 1733 152 1733 153 1754 153 44 153 44 154 1754 154 46 154 46 155 1754 155 1730 155 13 156 1730 156 15 156 13 157 46 157 1730 157 1730 158 82 158 15 158 15 159 82 159 16 159 16 160 82 160 84 160 83 161 84 161 48 161 83 162 16 162 84 162 84 163 85 163 48 163 48 164 85 164 17 164 17 165 85 165 86 165 87 166 86 166 18 166 87 167 17 167 86 167 86 168 1729 168 18 168 18 169 1729 169 49 169 49 170 1729 170 51 170 51 171 1729 171 1750 171 89 172 1750 172 88 172 52 173 88 173 91 173 52 174 89 174 88 174 51 175 1750 175 89 175 88 176 90 176 91 176 91 177 90 177 92 177 92 178 90 178 93 178 20 179 93 179 54 179 20 180 92 180 93 180 93 181 94 181 54 181 54 182 94 182 96 182 96 183 94 183 1747 183 95 184 1747 184 98 184 95 185 96 185 1747 185 1747 186 97 186 98 186 98 187 97 187 100 187 100 188 97 188 1746 188 99 189 1746 189 23 189 99 190 100 190 1746 190 1746 191 1723 191 23 191 23 192 1723 192 103 192 103 193 1723 193 101 193 102 194 101 194 104 194 102 195 103 195 101 195 101 196 105 196 104 196 104 197 105 197 24 197 24 198 105 198 25 198 25 199 105 199 106 199 107 200 106 200 1743 200 26 201 1743 201 66 201 26 202 107 202 1743 202 25 203 106 203 107 203 1743 204 1721 204 66 204 66 205 1721 205 108 205 108 206 1721 206 109 206 27 207 109 207 28 207 27 208 108 208 109 208 109 209 110 209 28 209 28 210 110 210 68 210 68 211 110 211 112 211 111 212 112 212 29 212 111 213 68 213 112 213 112 214 1717 214 29 214 29 215 1717 215 114 215 114 216 1717 216 115 216 113 217 115 217 116 217 113 218 114 218 115 218 115 219 117 219 116 219 116 220 117 220 118 220 118 221 117 221 119 221 71 222 119 222 72 222 71 223 118 223 119 223 119 224 120 224 72 224 72 225 120 225 32 225 32 226 120 226 34 226 34 227 120 227 123 227 121 228 123 228 1713 228 122 229 1713 229 124 229 122 230 121 230 1713 230 34 231 123 231 121 231 1713 232 1712 232 124 232 124 233 1712 233 126 233 126 234 1712 234 1711 234 125 235 1711 235 1 235 125 236 126 236 1711 236 218 237 591 237 182 237 218 238 127 238 591 238 218 239 128 239 127 239 127 240 128 240 592 240 592 241 128 241 129 241 594 242 129 242 130 242 597 243 130 243 219 243 595 244 219 244 220 244 596 245 220 245 132 245 131 246 132 246 134 246 133 247 134 247 135 247 600 248 135 248 223 248 602 249 223 249 224 249 136 250 224 250 183 250 137 251 183 251 227 251 607 252 227 252 138 252 609 253 138 253 226 253 612 254 226 254 231 254 614 255 231 255 229 255 139 256 229 256 230 256 184 257 230 257 140 257 141 258 140 258 143 258 142 259 143 259 144 259 185 260 144 260 186 260 187 261 186 261 145 261 619 262 145 262 146 262 188 263 146 263 147 263 621 264 147 264 149 264 148 265 149 265 235 265 623 266 235 266 189 266 190 267 189 267 238 267 191 268 238 268 150 268 192 269 150 269 240 269 193 270 240 270 151 270 625 271 151 271 152 271 626 272 152 272 244 272 153 273 244 273 246 273 656 274 246 274 243 274 154 275 243 275 155 275 645 276 155 276 247 276 646 277 247 277 250 277 649 278 250 278 156 278 650 279 156 279 249 279 651 280 249 280 194 280 657 281 194 281 157 281 195 282 157 282 158 282 659 283 158 283 251 283 658 284 251 284 252 284 660 285 252 285 253 285 159 286 253 286 160 286 196 287 160 287 254 287 197 288 254 288 198 288 661 289 198 289 161 289 199 290 161 290 200 290 201 291 200 291 162 291 202 292 162 292 258 292 203 293 258 293 204 293 663 294 204 294 257 294 163 295 257 295 260 295 205 296 260 296 164 296 206 297 164 297 261 297 165 298 261 298 207 298 208 299 207 299 166 299 664 300 166 300 262 300 665 301 262 301 167 301 209 302 167 302 264 302 667 303 264 303 168 303 210 304 168 304 267 304 669 305 267 305 268 305 670 306 268 306 169 306 671 307 169 307 170 307 211 308 170 308 171 308 212 309 171 309 271 309 172 310 271 310 272 310 672 311 272 311 275 311 213 312 275 312 214 312 173 313 214 313 215 313 673 314 215 314 174 314 216 315 174 315 276 315 175 316 276 316 278 316 674 317 278 317 176 317 177 318 176 318 179 318 178 319 179 319 180 319 217 320 180 320 181 320 677 321 181 321 182 321 591 322 677 322 182 322 592 323 129 323 594 323 594 324 130 324 597 324 597 325 219 325 595 325 595 326 220 326 596 326 596 327 132 327 131 327 131 328 134 328 133 328 133 329 135 329 600 329 600 330 223 330 602 330 602 331 224 331 136 331 136 332 183 332 137 332 137 333 227 333 607 333 607 334 138 334 609 334 609 335 226 335 612 335 612 336 231 336 614 336 614 337 229 337 139 337 139 338 230 338 184 338 184 339 140 339 141 339 141 340 143 340 142 340 142 341 144 341 185 341 185 342 186 342 187 342 187 343 145 343 619 343 619 344 146 344 188 344 188 345 147 345 621 345 621 346 149 346 148 346 148 347 235 347 623 347 623 348 189 348 190 348 190 349 238 349 191 349 191 350 150 350 192 350 192 351 240 351 193 351 193 352 151 352 625 352 625 353 152 353 626 353 626 354 244 354 153 354 153 355 246 355 656 355 656 356 243 356 154 356 154 357 155 357 645 357 645 358 247 358 646 358 646 359 250 359 649 359 649 360 156 360 650 360 650 361 249 361 651 361 651 362 194 362 657 362 657 363 157 363 195 363 195 364 158 364 659 364 659 365 251 365 658 365 658 366 252 366 660 366 660 367 253 367 159 367 159 368 160 368 196 368 196 369 254 369 197 369 197 370 198 370 661 370 661 371 161 371 199 371 199 372 200 372 201 372 201 373 162 373 202 373 202 374 258 374 203 374 203 375 204 375 663 375 663 376 257 376 163 376 163 377 260 377 205 377 205 378 164 378 206 378 206 379 261 379 165 379 165 380 207 380 208 380 208 381 166 381 664 381 664 382 262 382 665 382 665 383 167 383 209 383 209 384 264 384 667 384 667 385 168 385 210 385 210 386 267 386 669 386 669 387 268 387 670 387 670 388 169 388 671 388 671 389 170 389 211 389 211 390 171 390 212 390 212 391 271 391 172 391 172 392 272 392 672 392 672 393 275 393 213 393 213 394 214 394 173 394 173 395 215 395 673 395 673 396 174 396 216 396 216 397 276 397 175 397 175 398 278 398 674 398 674 399 176 399 177 399 177 400 179 400 178 400 178 401 180 401 217 401 217 402 181 402 677 402 0 403 218 403 74 403 0 404 128 404 218 404 0 405 2 405 128 405 128 406 2 406 129 406 129 407 2 407 4 407 130 408 4 408 36 408 219 409 36 409 220 409 219 410 130 410 36 410 129 411 4 411 130 411 36 412 5 412 220 412 220 413 5 413 132 413 132 414 5 414 221 414 134 415 221 415 222 415 135 416 222 416 223 416 135 417 134 417 222 417 132 418 221 418 134 418 222 419 39 419 223 419 223 420 39 420 224 420 224 421 39 421 225 421 183 422 225 422 7 422 227 423 7 423 8 423 138 424 8 424 226 424 138 425 227 425 8 425 224 426 225 426 183 426 183 427 7 427 227 427 8 428 41 428 226 428 226 429 41 429 231 429 231 430 41 430 228 430 229 431 228 431 42 431 230 432 42 432 140 432 230 433 229 433 42 433 231 434 228 434 229 434 42 435 232 435 140 435 140 436 232 436 143 436 143 437 232 437 45 437 144 438 45 438 47 438 186 439 47 439 12 439 145 440 12 440 146 440 145 441 186 441 12 441 143 442 45 442 144 442 144 443 47 443 186 443 12 444 14 444 146 444 146 445 14 445 147 445 147 446 14 446 233 446 149 447 233 447 234 447 235 448 234 448 236 448 189 449 236 449 238 449 189 450 235 450 236 450 147 451 233 451 149 451 149 452 234 452 235 452 236 453 237 453 238 453 238 454 237 454 150 454 150 455 237 455 239 455 240 456 239 456 241 456 151 457 241 457 152 457 151 458 240 458 241 458 150 459 239 459 240 459 241 460 50 460 152 460 152 461 50 461 244 461 244 462 50 462 245 462 246 463 245 463 19 463 243 464 19 464 242 464 155 465 242 465 247 465 155 466 243 466 242 466 244 467 245 467 246 467 246 468 19 468 243 468 242 469 248 469 247 469 247 470 248 470 250 470 250 471 248 471 53 471 156 472 53 472 21 472 249 473 21 473 55 473 194 474 55 474 157 474 194 475 249 475 55 475 250 476 53 476 156 476 156 477 21 477 249 477 55 478 56 478 157 478 157 479 56 479 158 479 158 480 56 480 22 480 251 481 22 481 57 481 252 482 57 482 253 482 252 483 251 483 57 483 158 484 22 484 251 484 57 485 58 485 253 485 253 486 58 486 160 486 160 487 58 487 59 487 254 488 59 488 255 488 198 489 255 489 60 489 161 490 60 490 200 490 161 491 198 491 60 491 160 492 59 492 254 492 254 493 255 493 198 493 60 494 61 494 200 494 200 495 61 495 162 495 162 496 61 496 62 496 258 497 62 497 256 497 204 498 256 498 257 498 204 499 258 499 256 499 162 500 62 500 258 500 256 501 63 501 257 501 257 502 63 502 260 502 260 503 63 503 64 503 164 504 64 504 65 504 261 505 65 505 259 505 207 506 259 506 166 506 207 507 261 507 259 507 260 508 64 508 164 508 164 509 65 509 261 509 259 510 67 510 166 510 166 511 67 511 262 511 262 512 67 512 263 512 167 513 263 513 265 513 264 514 265 514 266 514 168 515 266 515 267 515 168 516 264 516 266 516 262 517 263 517 167 517 167 518 265 518 264 518 266 519 69 519 267 519 267 520 69 520 268 520 268 521 69 521 70 521 169 522 70 522 269 522 170 523 269 523 171 523 170 524 169 524 269 524 268 525 70 525 169 525 269 526 270 526 171 526 171 527 270 527 271 527 271 528 270 528 273 528 272 529 273 529 274 529 275 530 274 530 30 530 214 531 30 531 215 531 214 532 275 532 30 532 271 533 273 533 272 533 272 534 274 534 275 534 30 535 31 535 215 535 215 536 31 536 174 536 174 537 31 537 33 537 276 538 33 538 277 538 278 539 277 539 176 539 278 540 276 540 277 540 174 541 33 541 276 541 277 542 279 542 176 542 176 543 279 543 179 543 179 544 279 544 73 544 180 545 73 545 280 545 181 546 280 546 74 546 182 547 74 547 218 547 182 548 181 548 74 548 179 549 73 549 180 549 180 550 280 550 181 550 316 551 377 551 285 551 301 552 285 552 306 552 301 553 316 553 285 553 281 554 283 554 382 554 281 555 282 555 283 555 281 556 286 556 282 556 282 557 286 557 287 557 307 558 287 558 308 558 307 559 282 559 287 559 307 560 284 560 282 560 282 561 284 561 283 561 283 562 284 562 306 562 285 563 283 563 306 563 285 564 382 564 283 564 285 565 377 565 382 565 286 566 375 566 287 566 287 567 375 567 288 567 308 568 288 568 290 568 308 569 287 569 288 569 375 570 291 570 288 570 288 571 291 571 289 571 290 572 289 572 309 572 290 573 288 573 289 573 291 574 292 574 289 574 289 575 292 575 293 575 309 576 293 576 310 576 309 577 289 577 293 577 292 578 372 578 293 578 293 579 372 579 294 579 310 580 294 580 311 580 310 581 293 581 294 581 372 582 381 582 294 582 294 583 381 583 296 583 311 584 296 584 295 584 311 585 294 585 296 585 381 586 379 586 296 586 296 587 379 587 297 587 295 588 297 588 312 588 295 589 296 589 297 589 379 590 299 590 297 590 297 591 299 591 298 591 312 592 298 592 313 592 312 593 297 593 298 593 299 594 300 594 298 594 298 595 300 595 1136 595 313 596 298 596 1136 596 377 597 316 597 1488 597 1488 598 316 598 1505 598 301 599 315 599 316 599 301 600 302 600 315 600 301 601 306 601 302 601 302 602 306 602 802 602 802 603 306 603 867 603 867 604 306 604 303 604 303 605 306 605 304 605 304 606 306 606 843 606 843 607 306 607 853 607 853 608 306 608 845 608 845 609 306 609 305 609 305 610 306 610 846 610 846 611 306 611 847 611 847 612 306 612 849 612 849 613 306 613 284 613 850 614 284 614 851 614 850 615 849 615 284 615 284 616 307 616 851 616 851 617 307 617 308 617 290 618 851 618 308 618 290 619 309 619 851 619 851 620 309 620 310 620 311 621 851 621 310 621 311 622 295 622 851 622 851 623 295 623 312 623 313 624 851 624 312 624 313 625 852 625 851 625 313 626 314 626 852 626 313 627 840 627 314 627 313 628 1147 628 840 628 840 629 1147 629 871 629 315 630 834 630 316 630 316 631 834 631 317 631 318 632 316 632 317 632 318 633 319 633 316 633 316 634 319 634 831 634 1505 635 831 635 320 635 807 636 1505 636 320 636 807 637 830 637 1505 637 1505 638 830 638 829 638 828 639 1505 639 829 639 828 640 811 640 1505 640 1505 641 811 641 812 641 322 642 812 642 801 642 321 643 322 643 801 643 321 644 792 644 322 644 322 645 792 645 787 645 786 646 322 646 787 646 786 647 323 647 322 647 322 648 323 648 785 648 324 649 322 649 785 649 324 650 325 650 322 650 324 651 790 651 325 651 325 652 790 652 789 652 326 653 325 653 789 653 326 654 783 654 325 654 325 655 783 655 330 655 327 656 330 656 328 656 331 657 328 657 725 657 329 658 725 658 353 658 329 659 331 659 725 659 316 660 831 660 1505 660 1505 661 812 661 322 661 325 662 330 662 327 662 327 663 328 663 331 663 353 664 725 664 345 664 345 665 725 665 332 665 333 666 332 666 346 666 333 667 345 667 332 667 346 668 332 668 347 668 347 669 332 669 567 669 334 670 347 670 567 670 334 671 348 671 347 671 334 672 589 672 348 672 348 673 589 673 362 673 362 674 589 674 335 674 588 675 362 675 335 675 588 676 336 676 362 676 588 677 337 677 336 677 336 678 337 678 448 678 448 679 337 679 587 679 572 680 448 680 587 680 572 681 573 681 448 681 448 682 573 682 423 682 423 683 573 683 338 683 401 684 485 684 420 684 401 685 486 685 485 685 401 686 515 686 486 686 401 687 487 687 515 687 401 688 488 688 487 688 401 689 339 689 488 689 401 690 341 690 339 690 339 691 341 691 513 691 513 692 341 692 512 692 512 693 341 693 340 693 340 694 341 694 521 694 521 695 341 695 524 695 524 696 341 696 342 696 342 697 341 697 400 697 517 698 400 698 518 698 517 699 342 699 400 699 343 700 361 700 400 700 343 701 344 701 361 701 343 702 399 702 344 702 344 703 399 703 352 703 333 704 352 704 345 704 333 705 344 705 352 705 333 706 346 706 344 706 344 707 346 707 361 707 361 708 346 708 347 708 350 709 347 709 348 709 349 710 348 710 369 710 349 711 350 711 348 711 349 712 400 712 350 712 349 713 518 713 400 713 399 714 398 714 352 714 352 715 398 715 351 715 345 716 351 716 353 716 345 717 352 717 351 717 398 718 396 718 351 718 351 719 396 719 355 719 353 720 355 720 329 720 353 721 351 721 355 721 396 722 393 722 355 722 355 723 393 723 354 723 329 724 354 724 331 724 329 725 355 725 354 725 393 726 356 726 354 726 354 727 356 727 357 727 331 728 357 728 327 728 331 729 354 729 357 729 356 730 391 730 357 730 357 731 391 731 358 731 327 732 358 732 325 732 327 733 357 733 358 733 391 734 359 734 358 734 358 735 359 735 360 735 325 736 358 736 360 736 361 737 347 737 350 737 400 738 361 738 350 738 362 739 527 739 348 739 362 740 528 740 527 740 362 741 529 741 528 741 362 742 533 742 529 742 362 743 364 743 533 743 362 744 363 744 364 744 362 745 365 745 363 745 362 746 336 746 365 746 365 747 336 747 366 747 366 748 336 748 536 748 536 749 336 749 537 749 537 750 336 750 541 750 541 751 336 751 539 751 539 752 336 752 448 752 368 753 448 753 367 753 368 754 539 754 448 754 527 755 526 755 348 755 348 756 526 756 369 756 485 757 484 757 420 757 420 758 484 758 370 758 300 759 861 759 1136 759 300 760 371 760 861 760 300 761 299 761 371 761 371 762 299 762 863 762 863 763 299 763 379 763 380 764 379 764 381 764 864 765 381 765 372 765 373 766 372 766 292 766 374 767 292 767 291 767 376 768 291 768 375 768 286 769 376 769 375 769 286 770 1833 770 376 770 286 771 803 771 1833 771 286 772 281 772 803 772 803 773 281 773 813 773 813 774 281 774 382 774 814 775 382 775 377 775 816 776 377 776 378 776 816 777 814 777 377 777 863 778 379 778 380 778 380 779 381 779 864 779 864 780 372 780 373 780 373 781 292 781 374 781 374 782 291 782 376 782 813 783 382 783 814 783 1488 784 819 784 377 784 1488 785 820 785 819 785 1488 786 383 786 820 786 1488 787 821 787 383 787 1488 788 384 788 821 788 1488 789 822 789 384 789 1488 790 385 790 822 790 1488 791 825 791 385 791 1488 792 386 792 825 792 1488 793 1492 793 386 793 386 794 1492 794 781 794 781 795 1492 795 1523 795 1443 796 781 796 1523 796 1443 797 388 797 781 797 1443 798 387 798 388 798 1443 799 779 799 387 799 1443 800 777 800 779 800 1443 801 776 801 777 801 1443 802 795 802 776 802 1443 803 389 803 795 803 1443 804 796 804 389 804 1443 805 360 805 796 805 796 806 360 806 390 806 390 807 360 807 797 807 797 808 360 808 772 808 772 809 360 809 771 809 771 810 360 810 359 810 800 811 359 811 391 811 769 812 391 812 356 812 393 813 769 813 356 813 393 814 392 814 769 814 393 815 394 815 392 815 393 816 395 816 394 816 393 817 397 817 395 817 393 818 396 818 397 818 397 819 396 819 714 819 714 820 396 820 398 820 399 821 714 821 398 821 399 822 343 822 714 822 714 823 343 823 400 823 341 824 714 824 400 824 341 825 717 825 714 825 341 826 401 826 717 826 717 827 401 827 716 827 716 828 401 828 420 828 709 829 420 829 707 829 709 830 716 830 420 830 1492 831 402 831 1523 831 1523 832 402 832 1522 832 1522 833 402 833 1579 833 1555 834 1522 834 1579 834 771 835 359 835 800 835 800 836 391 836 769 836 707 837 420 837 704 837 704 838 420 838 449 838 624 839 704 839 449 839 819 840 826 840 377 840 377 841 826 841 378 841 861 842 860 842 1136 842 1136 843 860 843 403 843 859 844 1136 844 403 844 859 845 857 845 1136 845 1136 846 857 846 856 846 839 847 1136 847 856 847 839 848 404 848 1136 848 839 849 1037 849 404 849 1766 850 1765 850 449 850 405 851 449 851 1767 851 405 852 1766 852 449 852 1765 853 1764 853 449 853 449 854 1764 854 406 854 423 855 406 855 407 855 448 856 407 856 408 856 1780 857 448 857 408 857 1780 858 1782 858 448 858 448 859 1782 859 1786 859 409 860 448 860 1786 860 409 861 1777 861 448 861 448 862 1777 862 410 862 1792 863 448 863 410 863 1792 864 1775 864 448 864 448 865 1775 865 1773 865 1797 866 448 866 1773 866 1797 867 411 867 448 867 448 868 411 868 412 868 413 869 448 869 412 869 413 870 444 870 448 870 413 871 420 871 444 871 413 872 1772 872 420 872 420 873 1772 873 414 873 1771 874 420 874 414 874 1771 875 415 875 420 875 420 876 415 876 416 876 417 877 420 877 416 877 417 878 1770 878 420 878 420 879 1770 879 418 879 419 880 420 880 418 880 419 881 1818 881 420 881 420 882 1818 882 1821 882 1768 883 420 883 1821 883 1768 884 421 884 420 884 420 885 421 885 1767 885 449 886 420 886 1767 886 449 887 406 887 423 887 422 888 423 888 932 888 450 889 932 889 451 889 988 890 451 890 989 890 986 891 989 891 424 891 425 892 424 892 990 892 1035 893 990 893 426 893 1034 894 426 894 452 894 983 895 452 895 427 895 1032 896 427 896 993 896 453 897 993 897 428 897 429 898 428 898 938 898 1726 899 938 899 1725 899 1726 900 429 900 938 900 1726 901 1748 901 429 901 429 902 1748 902 430 902 1727 903 429 903 430 903 1727 904 1749 904 429 904 429 905 1749 905 1728 905 433 906 1728 906 432 906 431 907 433 907 432 907 431 908 1751 908 433 908 433 909 1751 909 482 909 434 910 482 910 1752 910 1753 911 434 911 1752 911 1753 912 1731 912 434 912 434 913 1731 913 1029 913 1029 914 1731 914 1732 914 1755 915 1029 915 1732 915 1755 916 1756 916 1029 916 1029 917 1756 917 437 917 437 918 1756 918 435 918 1735 919 437 919 435 919 1735 920 1737 920 437 920 437 921 1737 921 1758 921 436 922 437 922 1758 922 436 923 1739 923 437 923 437 924 1739 924 438 924 438 925 1739 925 439 925 439 926 1739 926 441 926 441 927 1739 927 945 927 440 928 945 928 976 928 440 929 441 929 945 929 423 930 407 930 448 930 370 931 442 931 420 931 420 932 442 932 560 932 443 933 420 933 560 933 443 934 557 934 420 934 420 935 557 935 555 935 552 936 420 936 555 936 552 937 444 937 420 937 444 938 548 938 448 938 448 939 548 939 546 939 445 940 448 940 546 940 445 941 446 941 448 941 448 942 446 942 447 942 543 943 448 943 447 943 543 944 367 944 448 944 449 945 423 945 422 945 422 946 932 946 450 946 450 947 451 947 988 947 988 948 989 948 986 948 986 949 424 949 425 949 425 950 990 950 1035 950 1035 951 426 951 1034 951 1034 952 452 952 983 952 983 953 427 953 1032 953 1032 954 993 954 453 954 453 955 428 955 429 955 454 956 1722 956 938 956 454 957 1744 957 1722 957 454 958 456 958 1744 958 454 959 455 959 456 959 454 960 1720 960 455 960 454 961 457 961 1720 961 1720 962 457 962 1719 962 1719 963 457 963 1718 963 1718 964 457 964 1742 964 1742 965 457 965 943 965 1716 966 943 966 1715 966 1716 967 1742 967 943 967 1715 968 943 968 458 968 458 969 943 969 460 969 459 970 460 970 1741 970 459 971 458 971 460 971 945 972 1739 972 460 972 460 973 1739 973 1740 973 461 974 460 974 1740 974 461 975 1714 975 460 975 460 976 1714 976 1741 976 996 977 462 977 945 977 945 978 462 978 464 978 463 979 945 979 464 979 463 980 465 980 945 980 945 981 465 981 948 981 466 982 945 982 948 982 466 983 999 983 945 983 945 984 999 984 467 984 1000 985 945 985 467 985 1000 986 952 986 945 986 945 987 952 987 953 987 1003 988 945 988 953 988 1003 989 955 989 945 989 945 990 955 990 1005 990 468 991 945 991 1005 991 468 992 469 992 945 992 945 993 469 993 1007 993 1008 994 945 994 1007 994 1008 995 958 995 945 995 945 996 958 996 959 996 470 997 945 997 959 997 470 998 1011 998 945 998 945 999 1011 999 1012 999 1013 1000 945 1000 1012 1000 1013 1001 471 1001 945 1001 945 1002 471 1002 1015 1002 472 1003 945 1003 1015 1003 472 1004 473 1004 945 1004 945 1005 473 1005 1016 1005 1017 1006 945 1006 1016 1006 1017 1007 474 1007 945 1007 945 1008 474 1008 475 1008 968 1009 945 1009 475 1009 968 1010 1018 1010 945 1010 945 1011 1018 1011 970 1011 476 1012 945 1012 970 1012 476 1013 972 1013 945 1013 945 1014 972 1014 477 1014 1021 1015 945 1015 477 1015 1021 1016 478 1016 945 1016 945 1017 478 1017 1022 1017 479 1018 945 1018 1022 1018 479 1019 1023 1019 945 1019 945 1020 1023 1020 480 1020 1024 1021 945 1021 480 1021 1024 1022 1025 1022 945 1022 945 1023 1025 1023 1027 1023 481 1024 945 1024 1027 1024 481 1025 1028 1025 945 1025 945 1026 1028 1026 975 1026 976 1027 945 1027 975 1027 434 1028 433 1028 482 1028 433 1029 429 1029 1728 1029 1722 1030 1745 1030 938 1030 938 1031 1745 1031 1724 1031 483 1032 938 1032 1724 1032 483 1033 1725 1033 938 1033 484 1034 551 1034 370 1034 484 1035 514 1035 551 1035 484 1036 485 1036 514 1036 514 1037 485 1037 486 1037 494 1038 486 1038 515 1038 493 1039 515 1039 487 1039 488 1040 493 1040 487 1040 488 1041 489 1041 493 1041 488 1042 339 1042 489 1042 489 1043 339 1043 491 1043 637 1044 491 1044 490 1044 637 1045 489 1045 491 1045 637 1046 492 1046 489 1046 489 1047 492 1047 493 1047 493 1048 492 1048 635 1048 494 1049 635 1049 495 1049 514 1050 495 1050 562 1050 551 1051 562 1051 634 1051 496 1052 634 1052 497 1052 561 1053 497 1053 498 1053 559 1054 498 1054 632 1054 558 1055 632 1055 499 1055 550 1056 499 1056 556 1056 554 1057 556 1057 553 1057 549 1058 553 1058 702 1058 547 1059 702 1059 500 1059 501 1060 500 1060 503 1060 502 1061 503 1061 698 1061 545 1062 698 1062 697 1062 544 1063 697 1063 504 1063 540 1064 504 1064 696 1064 563 1065 696 1065 693 1065 538 1066 693 1066 691 1066 542 1067 691 1067 505 1067 535 1068 505 1068 506 1068 564 1069 506 1069 507 1069 534 1070 507 1070 687 1070 508 1071 687 1071 531 1071 530 1072 531 1072 684 1072 509 1073 684 1073 532 1073 525 1074 532 1074 643 1074 519 1075 643 1075 642 1075 520 1076 642 1076 565 1076 510 1077 565 1077 511 1077 523 1078 511 1078 522 1078 516 1079 522 1079 490 1079 491 1080 516 1080 490 1080 491 1081 512 1081 516 1081 491 1082 513 1082 512 1082 491 1083 339 1083 513 1083 514 1084 486 1084 494 1084 495 1085 514 1085 494 1085 494 1086 515 1086 493 1086 635 1087 494 1087 493 1087 512 1088 340 1088 516 1088 516 1089 340 1089 521 1089 523 1090 521 1090 524 1090 510 1091 524 1091 342 1091 517 1092 510 1092 342 1092 517 1093 520 1093 510 1093 517 1094 518 1094 520 1094 520 1095 518 1095 519 1095 642 1096 520 1096 519 1096 516 1097 521 1097 523 1097 522 1098 516 1098 523 1098 523 1099 524 1099 510 1099 511 1100 523 1100 510 1100 518 1101 349 1101 519 1101 519 1102 349 1102 369 1102 525 1103 369 1103 526 1103 509 1104 526 1104 527 1104 528 1105 509 1105 527 1105 528 1106 530 1106 509 1106 528 1107 529 1107 530 1107 530 1108 529 1108 508 1108 531 1109 530 1109 508 1109 519 1110 369 1110 525 1110 643 1111 519 1111 525 1111 525 1112 526 1112 509 1112 532 1113 525 1113 509 1113 529 1114 533 1114 508 1114 508 1115 533 1115 364 1115 534 1116 364 1116 363 1116 365 1117 534 1117 363 1117 365 1118 564 1118 534 1118 365 1119 366 1119 564 1119 564 1120 366 1120 535 1120 506 1121 564 1121 535 1121 508 1122 364 1122 534 1122 687 1123 508 1123 534 1123 366 1124 536 1124 535 1124 535 1125 536 1125 537 1125 542 1126 537 1126 541 1126 538 1127 541 1127 539 1127 368 1128 538 1128 539 1128 368 1129 563 1129 538 1129 368 1130 367 1130 563 1130 563 1131 367 1131 540 1131 696 1132 563 1132 540 1132 535 1133 537 1133 542 1133 505 1134 535 1134 542 1134 542 1135 541 1135 538 1135 691 1136 542 1136 538 1136 367 1137 543 1137 540 1137 540 1138 543 1138 544 1138 504 1139 540 1139 544 1139 543 1140 447 1140 544 1140 544 1141 447 1141 545 1141 697 1142 544 1142 545 1142 447 1143 446 1143 545 1143 545 1144 446 1144 502 1144 698 1145 545 1145 502 1145 446 1146 445 1146 502 1146 502 1147 445 1147 501 1147 503 1148 502 1148 501 1148 445 1149 546 1149 501 1149 501 1150 546 1150 547 1150 500 1151 501 1151 547 1151 546 1152 548 1152 547 1152 547 1153 548 1153 549 1153 702 1154 547 1154 549 1154 548 1155 444 1155 549 1155 549 1156 444 1156 552 1156 554 1157 552 1157 555 1157 550 1158 555 1158 557 1158 558 1159 557 1159 443 1159 559 1160 443 1160 560 1160 561 1161 560 1161 442 1161 496 1162 442 1162 370 1162 551 1163 496 1163 370 1163 551 1164 634 1164 496 1164 549 1165 552 1165 554 1165 553 1166 549 1166 554 1166 554 1167 555 1167 550 1167 556 1168 554 1168 550 1168 550 1169 557 1169 558 1169 499 1170 550 1170 558 1170 558 1171 443 1171 559 1171 632 1172 558 1172 559 1172 559 1173 560 1173 561 1173 498 1174 559 1174 561 1174 561 1175 442 1175 496 1175 497 1176 561 1176 496 1176 514 1177 562 1177 551 1177 563 1178 693 1178 538 1178 564 1179 507 1179 534 1179 530 1180 684 1180 509 1180 520 1181 565 1181 510 1181 332 1182 576 1182 566 1182 567 1183 566 1183 590 1183 334 1184 590 1184 580 1184 589 1185 580 1185 568 1185 335 1186 568 1186 569 1186 588 1187 569 1187 582 1187 337 1188 582 1188 570 1188 587 1189 570 1189 571 1189 572 1190 571 1190 575 1190 573 1191 575 1191 574 1191 573 1192 572 1192 575 1192 576 1193 577 1193 566 1193 566 1194 577 1194 759 1194 590 1195 759 1195 578 1195 579 1196 590 1196 578 1196 579 1197 580 1197 590 1197 579 1198 758 1198 580 1198 580 1199 758 1199 585 1199 568 1200 585 1200 764 1200 581 1201 568 1201 764 1201 581 1202 569 1202 568 1202 581 1203 765 1203 569 1203 569 1204 765 1204 582 1204 582 1205 765 1205 583 1205 762 1206 582 1206 583 1206 762 1207 570 1207 582 1207 762 1208 584 1208 570 1208 570 1209 584 1209 586 1209 571 1210 586 1210 767 1210 768 1211 571 1211 767 1211 768 1212 575 1212 571 1212 768 1213 574 1213 575 1213 566 1214 759 1214 590 1214 580 1215 585 1215 568 1215 570 1216 586 1216 571 1216 572 1217 587 1217 571 1217 587 1218 337 1218 570 1218 337 1219 588 1219 582 1219 588 1220 335 1220 569 1220 335 1221 589 1221 568 1221 589 1222 334 1222 580 1222 334 1223 567 1223 590 1223 567 1224 332 1224 566 1224 591 1225 963 1225 677 1225 591 1226 964 1226 963 1226 591 1227 127 1227 964 1227 964 1228 127 1228 965 1228 965 1229 127 1229 592 1229 966 1230 592 1230 593 1230 966 1231 965 1231 592 1231 592 1232 594 1232 593 1232 593 1233 594 1233 967 1233 967 1234 594 1234 597 1234 969 1235 597 1235 595 1235 971 1236 595 1236 596 1236 1019 1237 596 1237 598 1237 1019 1238 971 1238 596 1238 967 1239 597 1239 969 1239 969 1240 595 1240 971 1240 596 1241 131 1241 598 1241 598 1242 131 1242 599 1242 599 1243 131 1243 133 1243 1020 1244 133 1244 973 1244 1020 1245 599 1245 133 1245 133 1246 600 1246 973 1246 973 1247 600 1247 601 1247 601 1248 600 1248 602 1248 603 1249 602 1249 136 1249 605 1250 136 1250 604 1250 605 1251 603 1251 136 1251 601 1252 602 1252 603 1252 136 1253 137 1253 604 1253 604 1254 137 1254 606 1254 606 1255 137 1255 607 1255 608 1256 607 1256 1026 1256 608 1257 606 1257 607 1257 607 1258 609 1258 1026 1258 1026 1259 609 1259 974 1259 974 1260 609 1260 610 1260 610 1261 609 1261 612 1261 611 1262 612 1262 614 1262 613 1263 614 1263 615 1263 613 1264 611 1264 614 1264 610 1265 612 1265 611 1265 614 1266 139 1266 615 1266 615 1267 139 1267 616 1267 616 1268 139 1268 977 1268 977 1269 139 1269 184 1269 617 1270 184 1270 141 1270 978 1271 141 1271 979 1271 978 1272 617 1272 141 1272 977 1273 184 1273 617 1273 141 1274 142 1274 979 1274 979 1275 142 1275 1030 1275 1030 1276 142 1276 618 1276 618 1277 142 1277 185 1277 1031 1278 185 1278 980 1278 1031 1279 618 1279 185 1279 185 1280 187 1280 980 1280 980 1281 187 1281 981 1281 981 1282 187 1282 619 1282 982 1283 619 1283 1033 1283 982 1284 981 1284 619 1284 619 1285 188 1285 1033 1285 1033 1286 188 1286 984 1286 984 1287 188 1287 985 1287 985 1288 188 1288 621 1288 987 1289 621 1289 148 1289 1036 1290 148 1290 620 1290 1036 1291 987 1291 148 1291 985 1292 621 1292 987 1292 148 1293 623 1293 620 1293 620 1294 623 1294 622 1294 622 1295 623 1295 190 1295 624 1296 190 1296 191 1296 192 1297 624 1297 191 1297 192 1298 193 1298 624 1298 624 1299 193 1299 625 1299 626 1300 624 1300 625 1300 626 1301 153 1301 624 1301 624 1302 153 1302 1823 1302 1820 1303 624 1303 1823 1303 1820 1304 627 1304 624 1304 624 1305 627 1305 704 1305 704 1306 627 1306 1816 1306 629 1307 1816 1307 628 1307 1814 1308 629 1308 628 1308 1814 1309 1812 1309 629 1309 629 1310 1812 1310 1809 1310 1808 1311 629 1311 1809 1311 1808 1312 630 1312 629 1312 629 1313 630 1313 556 1313 499 1314 629 1314 556 1314 499 1315 631 1315 629 1315 499 1316 632 1316 631 1316 631 1317 632 1317 633 1317 633 1318 632 1318 498 1318 497 1319 633 1319 498 1319 497 1320 733 1320 633 1320 497 1321 634 1321 733 1321 733 1322 634 1322 731 1322 731 1323 634 1323 562 1323 495 1324 731 1324 562 1324 495 1325 734 1325 731 1325 495 1326 635 1326 734 1326 734 1327 635 1327 703 1327 703 1328 635 1328 492 1328 636 1329 492 1329 637 1329 490 1330 636 1330 637 1330 490 1331 683 1331 636 1331 490 1332 522 1332 683 1332 683 1333 522 1333 511 1333 735 1334 511 1334 565 1334 642 1335 735 1335 565 1335 642 1336 638 1336 735 1336 642 1337 639 1337 638 1337 642 1338 741 1338 639 1338 642 1339 743 1339 741 1339 642 1340 640 1340 743 1340 642 1341 746 1341 640 1341 642 1342 641 1342 746 1342 642 1343 644 1343 641 1343 642 1344 643 1344 644 1344 644 1345 643 1345 753 1345 753 1346 643 1346 532 1346 755 1347 532 1347 757 1347 755 1348 753 1348 532 1348 622 1349 190 1349 624 1349 1823 1350 153 1350 1824 1350 1824 1351 153 1351 656 1351 1826 1352 656 1352 154 1352 645 1353 1826 1353 154 1353 645 1354 647 1354 1826 1354 645 1355 646 1355 647 1355 647 1356 646 1356 648 1356 648 1357 646 1357 649 1357 1830 1358 649 1358 650 1358 1759 1359 650 1359 651 1359 338 1360 651 1360 933 1360 338 1361 1759 1361 651 1361 338 1362 652 1362 1759 1362 338 1363 1762 1363 652 1363 338 1364 653 1364 1762 1364 338 1365 1779 1365 653 1365 338 1366 1784 1366 1779 1366 338 1367 1785 1367 1784 1367 338 1368 1787 1368 1785 1368 338 1369 1789 1369 1787 1369 338 1370 573 1370 1789 1370 1789 1371 573 1371 654 1371 654 1372 573 1372 574 1372 1793 1373 574 1373 655 1373 1793 1374 654 1374 574 1374 1824 1375 656 1375 1826 1375 648 1376 649 1376 1830 1376 1830 1377 650 1377 1759 1377 657 1378 662 1378 651 1378 657 1379 195 1379 662 1379 662 1380 195 1380 659 1380 658 1381 662 1381 659 1381 658 1382 660 1382 662 1382 662 1383 660 1383 159 1383 196 1384 662 1384 159 1384 196 1385 197 1385 662 1385 662 1386 197 1386 661 1386 199 1387 662 1387 661 1387 199 1388 201 1388 662 1388 662 1389 201 1389 202 1389 203 1390 662 1390 202 1390 203 1391 663 1391 662 1391 662 1392 663 1392 163 1392 936 1393 163 1393 205 1393 937 1394 205 1394 991 1394 937 1395 936 1395 205 1395 662 1396 163 1396 936 1396 205 1397 206 1397 991 1397 991 1398 206 1398 992 1398 992 1399 206 1399 994 1399 994 1400 206 1400 165 1400 995 1401 165 1401 208 1401 940 1402 208 1402 939 1402 940 1403 995 1403 208 1403 994 1404 165 1404 995 1404 208 1405 664 1405 939 1405 939 1406 664 1406 941 1406 941 1407 664 1407 942 1407 942 1408 664 1408 665 1408 944 1409 665 1409 946 1409 944 1410 942 1410 665 1410 665 1411 209 1411 946 1411 946 1412 209 1412 666 1412 666 1413 209 1413 667 1413 997 1414 667 1414 668 1414 997 1415 666 1415 667 1415 667 1416 210 1416 668 1416 668 1417 210 1417 998 1417 998 1418 210 1418 947 1418 947 1419 210 1419 669 1419 682 1420 669 1420 670 1420 675 1421 670 1421 671 1421 211 1422 675 1422 671 1422 211 1423 212 1423 675 1423 675 1424 212 1424 172 1424 672 1425 675 1425 172 1425 672 1426 213 1426 675 1426 675 1427 213 1427 173 1427 673 1428 675 1428 173 1428 673 1429 216 1429 675 1429 675 1430 216 1430 175 1430 674 1431 675 1431 175 1431 674 1432 177 1432 675 1432 675 1433 177 1433 178 1433 217 1434 675 1434 178 1434 217 1435 677 1435 675 1435 675 1436 677 1436 949 1436 949 1437 677 1437 950 1437 950 1438 677 1438 951 1438 951 1439 677 1439 1001 1439 1001 1440 677 1440 954 1440 954 1441 677 1441 1002 1441 1002 1442 677 1442 956 1442 956 1443 677 1443 1004 1443 1004 1444 677 1444 1006 1444 1006 1445 677 1445 676 1445 676 1446 677 1446 957 1446 957 1447 677 1447 678 1447 678 1448 677 1448 1009 1448 1009 1449 677 1449 960 1449 960 1450 677 1450 1010 1450 1010 1451 677 1451 679 1451 679 1452 677 1452 961 1452 961 1453 677 1453 680 1453 680 1454 677 1454 681 1454 681 1455 677 1455 1014 1455 1014 1456 677 1456 962 1456 962 1457 677 1457 963 1457 947 1458 669 1458 682 1458 682 1459 670 1459 675 1459 683 1460 511 1460 735 1460 532 1461 684 1461 757 1461 757 1462 684 1462 686 1462 686 1463 684 1463 531 1463 685 1464 531 1464 687 1464 760 1465 687 1465 688 1465 760 1466 685 1466 687 1466 686 1467 531 1467 685 1467 687 1468 507 1468 688 1468 688 1469 507 1469 761 1469 761 1470 507 1470 506 1470 689 1471 506 1471 690 1471 689 1472 761 1472 506 1472 506 1473 505 1473 690 1473 690 1474 505 1474 763 1474 763 1475 505 1475 766 1475 766 1476 505 1476 691 1476 694 1477 691 1477 693 1477 692 1478 693 1478 695 1478 692 1479 694 1479 693 1479 766 1480 691 1480 694 1480 693 1481 696 1481 695 1481 695 1482 696 1482 574 1482 574 1483 696 1483 504 1483 697 1484 574 1484 504 1484 697 1485 698 1485 574 1485 574 1486 698 1486 503 1486 500 1487 574 1487 503 1487 500 1488 702 1488 574 1488 574 1489 702 1489 1803 1489 699 1490 574 1490 1803 1490 699 1491 1800 1491 574 1491 574 1492 1800 1492 700 1492 1796 1493 574 1493 700 1493 1796 1494 701 1494 574 1494 574 1495 701 1495 655 1495 702 1496 553 1496 1803 1496 1803 1497 553 1497 1806 1497 1806 1498 553 1498 556 1498 630 1499 1806 1499 556 1499 703 1500 492 1500 636 1500 704 1501 1816 1501 629 1501 662 1502 935 1502 651 1502 651 1503 935 1503 934 1503 933 1504 651 1504 934 1504 704 1505 629 1505 706 1505 705 1506 706 1506 629 1506 705 1507 704 1507 706 1507 705 1508 707 1508 704 1508 705 1509 728 1509 707 1509 707 1510 728 1510 708 1510 709 1511 708 1511 729 1511 730 1512 709 1512 729 1512 730 1513 716 1513 709 1513 730 1514 710 1514 716 1514 716 1515 710 1515 732 1515 717 1516 732 1516 711 1516 714 1517 711 1517 712 1517 713 1518 714 1518 712 1518 713 1519 715 1519 714 1519 713 1520 737 1520 715 1520 715 1521 737 1521 397 1521 714 1522 715 1522 397 1522 728 1523 729 1523 708 1523 716 1524 732 1524 717 1524 717 1525 711 1525 714 1525 397 1526 737 1526 718 1526 395 1527 718 1527 719 1527 721 1528 719 1528 740 1528 738 1529 721 1529 740 1529 738 1530 394 1530 721 1530 738 1531 720 1531 394 1531 738 1532 739 1532 720 1532 720 1533 739 1533 742 1533 392 1534 720 1534 742 1534 392 1535 394 1535 720 1535 737 1536 736 1536 718 1536 718 1537 736 1537 719 1537 395 1538 719 1538 721 1538 394 1539 395 1539 721 1539 395 1540 397 1540 718 1540 709 1541 707 1541 708 1541 576 1542 332 1542 722 1542 722 1543 332 1543 723 1543 723 1544 332 1544 756 1544 756 1545 332 1545 754 1545 754 1546 332 1546 724 1546 724 1547 332 1547 752 1547 752 1548 332 1548 751 1548 751 1549 332 1549 725 1549 750 1550 725 1550 769 1550 749 1551 769 1551 392 1551 748 1552 392 1552 747 1552 748 1553 749 1553 392 1553 751 1554 725 1554 750 1554 750 1555 769 1555 749 1555 742 1556 744 1556 392 1556 392 1557 744 1557 727 1557 726 1558 392 1558 727 1558 726 1559 745 1559 392 1559 392 1560 745 1560 747 1560 629 1561 631 1561 705 1561 705 1562 631 1562 728 1562 728 1563 631 1563 633 1563 729 1564 633 1564 733 1564 730 1565 733 1565 731 1565 710 1566 731 1566 734 1566 732 1567 734 1567 711 1567 732 1568 710 1568 734 1568 728 1569 633 1569 729 1569 729 1570 733 1570 730 1570 730 1571 731 1571 710 1571 734 1572 703 1572 711 1572 711 1573 703 1573 712 1573 712 1574 703 1574 636 1574 713 1575 636 1575 683 1575 737 1576 683 1576 735 1576 736 1577 735 1577 719 1577 736 1578 737 1578 735 1578 712 1579 636 1579 713 1579 713 1580 683 1580 737 1580 735 1581 638 1581 719 1581 719 1582 638 1582 740 1582 740 1583 638 1583 639 1583 738 1584 639 1584 741 1584 739 1585 741 1585 742 1585 739 1586 738 1586 741 1586 740 1587 639 1587 738 1587 741 1588 743 1588 742 1588 742 1589 743 1589 744 1589 744 1590 743 1590 727 1590 727 1591 743 1591 640 1591 726 1592 640 1592 745 1592 726 1593 727 1593 640 1593 640 1594 746 1594 745 1594 745 1595 746 1595 747 1595 747 1596 746 1596 748 1596 748 1597 746 1597 641 1597 749 1598 641 1598 750 1598 749 1599 748 1599 641 1599 641 1600 644 1600 750 1600 750 1601 644 1601 751 1601 751 1602 644 1602 752 1602 752 1603 644 1603 753 1603 724 1604 753 1604 754 1604 724 1605 752 1605 753 1605 753 1606 755 1606 754 1606 754 1607 755 1607 756 1607 756 1608 755 1608 723 1608 723 1609 755 1609 757 1609 722 1610 757 1610 576 1610 722 1611 723 1611 757 1611 757 1612 686 1612 576 1612 576 1613 686 1613 577 1613 577 1614 686 1614 759 1614 759 1615 686 1615 685 1615 578 1616 685 1616 760 1616 579 1617 760 1617 688 1617 758 1618 688 1618 585 1618 758 1619 579 1619 688 1619 759 1620 685 1620 578 1620 578 1621 760 1621 579 1621 688 1622 761 1622 585 1622 585 1623 761 1623 764 1623 764 1624 761 1624 689 1624 581 1625 689 1625 690 1625 765 1626 690 1626 763 1626 583 1627 763 1627 762 1627 583 1628 765 1628 763 1628 764 1629 689 1629 581 1629 581 1630 690 1630 765 1630 763 1631 766 1631 762 1631 762 1632 766 1632 584 1632 584 1633 766 1633 694 1633 586 1634 694 1634 692 1634 767 1635 692 1635 695 1635 768 1636 695 1636 574 1636 768 1637 767 1637 695 1637 584 1638 694 1638 586 1638 586 1639 692 1639 767 1639 769 1640 725 1640 770 1640 800 1641 770 1641 782 1641 771 1642 782 1642 799 1642 772 1643 799 1643 773 1643 797 1644 773 1644 798 1644 390 1645 798 1645 774 1645 796 1646 774 1646 775 1646 389 1647 775 1647 784 1647 795 1648 784 1648 794 1648 776 1649 794 1649 791 1649 777 1650 791 1650 778 1650 779 1651 778 1651 793 1651 387 1652 793 1652 788 1652 388 1653 788 1653 780 1653 781 1654 780 1654 801 1654 781 1655 388 1655 780 1655 330 1656 782 1656 328 1656 330 1657 799 1657 782 1657 330 1658 783 1658 799 1658 799 1659 783 1659 773 1659 773 1660 783 1660 326 1660 798 1661 326 1661 789 1661 774 1662 789 1662 790 1662 775 1663 790 1663 324 1663 784 1664 324 1664 785 1664 794 1665 785 1665 323 1665 791 1666 323 1666 786 1666 778 1667 786 1667 787 1667 793 1668 787 1668 792 1668 788 1669 792 1669 321 1669 780 1670 321 1670 801 1670 780 1671 788 1671 321 1671 773 1672 326 1672 798 1672 798 1673 789 1673 774 1673 774 1674 790 1674 775 1674 775 1675 324 1675 784 1675 784 1676 785 1676 794 1676 794 1677 323 1677 791 1677 791 1678 786 1678 778 1678 778 1679 787 1679 793 1679 793 1680 792 1680 788 1680 388 1681 387 1681 788 1681 387 1682 779 1682 793 1682 779 1683 777 1683 778 1683 777 1684 776 1684 791 1684 776 1685 795 1685 794 1685 795 1686 389 1686 784 1686 389 1687 796 1687 775 1687 796 1688 390 1688 774 1688 390 1689 797 1689 798 1689 797 1690 772 1690 773 1690 772 1691 771 1691 799 1691 771 1692 800 1692 782 1692 800 1693 769 1693 770 1693 328 1694 782 1694 770 1694 725 1695 328 1695 770 1695 781 1696 801 1696 386 1696 386 1697 801 1697 812 1697 802 1698 803 1698 804 1698 302 1699 804 1699 805 1699 315 1700 805 1700 815 1700 834 1701 815 1701 833 1701 317 1702 833 1702 832 1702 318 1703 832 1703 817 1703 319 1704 817 1704 818 1704 831 1705 818 1705 827 1705 320 1706 827 1706 806 1706 807 1707 806 1707 808 1707 830 1708 808 1708 809 1708 829 1709 809 1709 823 1709 828 1710 823 1710 810 1710 811 1711 810 1711 824 1711 812 1712 824 1712 386 1712 812 1713 811 1713 824 1713 814 1714 805 1714 813 1714 814 1715 815 1715 805 1715 814 1716 816 1716 815 1716 815 1717 816 1717 833 1717 833 1718 816 1718 378 1718 832 1719 378 1719 826 1719 817 1720 826 1720 819 1720 818 1721 819 1721 820 1721 827 1722 820 1722 383 1722 806 1723 383 1723 821 1723 808 1724 821 1724 384 1724 809 1725 384 1725 822 1725 823 1726 822 1726 385 1726 810 1727 385 1727 825 1727 824 1728 825 1728 386 1728 824 1729 810 1729 825 1729 833 1730 378 1730 832 1730 832 1731 826 1731 817 1731 817 1732 819 1732 818 1732 818 1733 820 1733 827 1733 827 1734 383 1734 806 1734 806 1735 821 1735 808 1735 808 1736 384 1736 809 1736 809 1737 822 1737 823 1737 823 1738 385 1738 810 1738 811 1739 828 1739 810 1739 828 1740 829 1740 823 1740 829 1741 830 1741 809 1741 830 1742 807 1742 808 1742 807 1743 320 1743 806 1743 320 1744 831 1744 827 1744 831 1745 319 1745 818 1745 319 1746 318 1746 817 1746 318 1747 317 1747 832 1747 317 1748 834 1748 833 1748 834 1749 315 1749 815 1749 315 1750 302 1750 805 1750 302 1751 802 1751 804 1751 813 1752 805 1752 804 1752 803 1753 813 1753 804 1753 1833 1754 867 1754 866 1754 376 1755 866 1755 865 1755 374 1756 865 1756 842 1756 373 1757 842 1757 844 1757 864 1758 844 1758 835 1758 380 1759 835 1759 836 1759 863 1760 836 1760 862 1760 371 1761 862 1761 837 1761 861 1762 837 1762 848 1762 860 1763 848 1763 854 1763 403 1764 854 1764 855 1764 859 1765 855 1765 838 1765 857 1766 838 1766 858 1766 856 1767 858 1767 841 1767 839 1768 841 1768 840 1768 839 1769 856 1769 841 1769 304 1770 865 1770 303 1770 304 1771 842 1771 865 1771 304 1772 843 1772 842 1772 842 1773 843 1773 844 1773 844 1774 843 1774 853 1774 835 1775 853 1775 845 1775 836 1776 845 1776 305 1776 862 1777 305 1777 846 1777 837 1778 846 1778 847 1778 848 1779 847 1779 849 1779 854 1780 849 1780 850 1780 855 1781 850 1781 851 1781 838 1782 851 1782 852 1782 858 1783 852 1783 314 1783 841 1784 314 1784 840 1784 841 1785 858 1785 314 1785 844 1786 853 1786 835 1786 835 1787 845 1787 836 1787 836 1788 305 1788 862 1788 862 1789 846 1789 837 1789 837 1790 847 1790 848 1790 848 1791 849 1791 854 1791 854 1792 850 1792 855 1792 855 1793 851 1793 838 1793 838 1794 852 1794 858 1794 856 1795 857 1795 858 1795 857 1796 859 1796 838 1796 859 1797 403 1797 855 1797 403 1798 860 1798 854 1798 860 1799 861 1799 848 1799 861 1800 371 1800 837 1800 371 1801 863 1801 862 1801 863 1802 380 1802 836 1802 380 1803 864 1803 835 1803 864 1804 373 1804 844 1804 373 1805 374 1805 842 1805 374 1806 376 1806 865 1806 376 1807 1833 1807 866 1807 303 1808 865 1808 866 1808 867 1809 303 1809 866 1809 1037 1810 839 1810 931 1810 868 1811 931 1811 1039 1811 868 1812 1037 1812 931 1812 871 1813 1693 1813 840 1813 871 1814 1654 1814 1693 1814 871 1815 869 1815 1654 1815 871 1816 1657 1816 869 1816 871 1817 870 1817 1657 1817 871 1818 1658 1818 870 1818 871 1819 1659 1819 1658 1819 871 1820 1660 1820 1659 1820 871 1821 872 1821 1660 1821 1660 1822 872 1822 1661 1822 1661 1823 872 1823 873 1823 1111 1824 1661 1824 873 1824 1111 1825 1662 1825 1661 1825 1111 1826 874 1826 1662 1826 1662 1827 874 1827 875 1827 920 1828 875 1828 921 1828 876 1829 921 1829 877 1829 1132 1830 876 1830 877 1830 1132 1831 878 1831 876 1831 1132 1832 1108 1832 878 1832 878 1833 1108 1833 879 1833 879 1834 1108 1834 1106 1834 880 1835 879 1835 1106 1835 880 1836 1664 1836 879 1836 880 1837 1105 1837 1664 1837 1664 1838 1105 1838 882 1838 882 1839 1105 1839 881 1839 883 1840 882 1840 881 1840 883 1841 884 1841 882 1841 883 1842 1128 1842 884 1842 884 1843 1128 1843 1666 1843 1666 1844 1128 1844 1127 1844 886 1845 1666 1845 1127 1845 886 1846 885 1846 1666 1846 886 1847 887 1847 885 1847 885 1848 887 1848 1667 1848 1667 1849 887 1849 1125 1849 1104 1850 1667 1850 1125 1850 1104 1851 1668 1851 1667 1851 1104 1852 1102 1852 1668 1852 1668 1853 1102 1853 1124 1853 1669 1854 1124 1854 1101 1854 922 1855 1101 1855 1099 1855 888 1856 922 1856 1099 1856 888 1857 1670 1857 922 1857 888 1858 1097 1858 1670 1858 1670 1859 1097 1859 889 1859 889 1860 1097 1860 1122 1860 890 1861 889 1861 1122 1861 890 1862 1671 1862 889 1862 890 1863 891 1863 1671 1863 1671 1864 891 1864 1096 1864 1672 1865 1096 1865 1094 1865 892 1866 1672 1866 1094 1866 892 1867 1673 1867 1672 1867 892 1868 893 1868 1673 1868 1673 1869 893 1869 894 1869 894 1870 893 1870 1092 1870 895 1871 894 1871 1092 1871 895 1872 923 1872 894 1872 895 1873 1090 1873 923 1873 923 1874 1090 1874 924 1874 925 1875 924 1875 896 1875 1088 1876 925 1876 896 1876 1088 1877 897 1877 925 1877 1088 1878 898 1878 897 1878 897 1879 898 1879 899 1879 1674 1880 899 1880 900 1880 1086 1881 1674 1881 900 1881 1086 1882 1623 1882 1674 1882 1086 1883 1085 1883 1623 1883 1623 1884 1085 1884 1084 1884 1624 1885 1084 1885 1119 1885 901 1886 1624 1886 1119 1886 901 1887 1626 1887 1624 1887 901 1888 1117 1888 1626 1888 1626 1889 1117 1889 926 1889 902 1890 926 1890 1081 1890 1115 1891 902 1891 1081 1891 1115 1892 903 1892 902 1892 1115 1893 1114 1893 903 1893 903 1894 1114 1894 1080 1894 1079 1895 903 1895 1080 1895 1079 1896 1078 1896 903 1896 903 1897 1078 1897 904 1897 1065 1898 903 1898 904 1898 1065 1899 1075 1899 903 1899 903 1900 1075 1900 1064 1900 1062 1901 903 1901 1064 1901 1062 1902 1060 1902 903 1902 903 1903 1060 1903 1059 1903 905 1904 903 1904 1059 1904 905 1905 1057 1905 903 1905 903 1906 1057 1906 1675 1906 1675 1907 1057 1907 1628 1907 1628 1908 1057 1908 1676 1908 1676 1909 1057 1909 1629 1909 1629 1910 1057 1910 1630 1910 1630 1911 1057 1911 1632 1911 1632 1912 1057 1912 1633 1912 1633 1913 1057 1913 1055 1913 1634 1914 1055 1914 927 1914 928 1915 927 1915 1052 1915 1051 1916 928 1916 1052 1916 1051 1917 929 1917 928 1917 1051 1918 906 1918 929 1918 929 1919 906 1919 907 1919 1636 1920 907 1920 908 1920 930 1921 908 1921 1048 1921 909 1922 930 1922 1048 1922 909 1923 910 1923 930 1923 909 1924 1047 1924 910 1924 910 1925 1047 1925 911 1925 911 1926 1047 1926 1046 1926 912 1927 911 1927 1046 1927 912 1928 1638 1928 911 1928 912 1929 913 1929 1638 1929 1638 1930 913 1930 915 1930 915 1931 913 1931 1044 1931 914 1932 915 1932 1044 1932 914 1933 916 1933 915 1933 914 1934 1071 1934 916 1934 916 1935 1071 1935 917 1935 917 1936 1071 1936 1043 1936 919 1937 917 1937 1043 1937 919 1938 1639 1938 917 1938 919 1939 1683 1939 1639 1939 919 1940 1640 1940 1683 1940 919 1941 1641 1941 1640 1941 919 1942 1643 1942 1641 1942 919 1943 1645 1943 1643 1943 919 1944 1647 1944 1645 1944 919 1945 1648 1945 1647 1945 919 1946 1649 1946 1648 1946 919 1947 918 1947 1649 1947 919 1948 1687 1948 918 1948 919 1949 1689 1949 1687 1949 919 1950 931 1950 1689 1950 919 1951 1069 1951 931 1951 931 1952 1069 1952 1068 1952 1040 1953 931 1953 1068 1953 1040 1954 1039 1954 931 1954 1662 1955 875 1955 920 1955 920 1956 921 1956 876 1956 1668 1957 1124 1957 1669 1957 1669 1958 1101 1958 922 1958 1671 1959 1096 1959 1672 1959 923 1960 924 1960 925 1960 897 1961 899 1961 1674 1961 1623 1962 1084 1962 1624 1962 1626 1963 926 1963 902 1963 1633 1964 1055 1964 1634 1964 1634 1965 927 1965 928 1965 929 1966 907 1966 1636 1966 1636 1967 908 1967 930 1967 1693 1968 1651 1968 840 1968 840 1969 1651 1969 1650 1969 931 1970 840 1970 1650 1970 931 1971 839 1971 840 1971 423 1972 338 1972 932 1972 932 1973 338 1973 933 1973 451 1974 933 1974 934 1974 989 1975 934 1975 935 1975 424 1976 935 1976 662 1976 990 1977 662 1977 936 1977 426 1978 936 1978 937 1978 452 1979 937 1979 991 1979 427 1980 991 1980 992 1980 993 1981 992 1981 994 1981 428 1982 994 1982 995 1982 938 1983 995 1983 940 1983 939 1984 938 1984 940 1984 939 1985 454 1985 938 1985 939 1986 941 1986 454 1986 454 1987 941 1987 457 1987 457 1988 941 1988 942 1988 943 1989 942 1989 944 1989 460 1990 944 1990 946 1990 945 1991 946 1991 666 1991 996 1992 666 1992 997 1992 462 1993 997 1993 668 1993 464 1994 668 1994 998 1994 463 1995 998 1995 947 1995 465 1996 947 1996 682 1996 948 1997 682 1997 675 1997 466 1998 675 1998 949 1998 999 1999 949 1999 950 1999 467 2000 950 2000 951 2000 1000 2001 951 2001 1001 2001 952 2002 1001 2002 954 2002 953 2003 954 2003 1002 2003 1003 2004 1002 2004 956 2004 955 2005 956 2005 1004 2005 1005 2006 1004 2006 1006 2006 468 2007 1006 2007 676 2007 469 2008 676 2008 957 2008 1007 2009 957 2009 678 2009 1008 2010 678 2010 1009 2010 958 2011 1009 2011 960 2011 959 2012 960 2012 1010 2012 470 2013 1010 2013 679 2013 1011 2014 679 2014 961 2014 1012 2015 961 2015 680 2015 1013 2016 680 2016 681 2016 471 2017 681 2017 1014 2017 1015 2018 1014 2018 962 2018 472 2019 962 2019 963 2019 473 2020 963 2020 964 2020 1016 2021 964 2021 965 2021 1017 2022 965 2022 966 2022 474 2023 966 2023 593 2023 475 2024 593 2024 967 2024 968 2025 967 2025 969 2025 1018 2026 969 2026 971 2026 970 2027 971 2027 1019 2027 476 2028 1019 2028 598 2028 972 2029 598 2029 599 2029 477 2030 599 2030 1020 2030 1021 2031 1020 2031 973 2031 478 2032 973 2032 601 2032 1022 2033 601 2033 603 2033 479 2034 603 2034 605 2034 1023 2035 605 2035 604 2035 480 2036 604 2036 606 2036 1024 2037 606 2037 608 2037 1025 2038 608 2038 1026 2038 1027 2039 1026 2039 974 2039 481 2040 974 2040 610 2040 1028 2041 610 2041 611 2041 975 2042 611 2042 613 2042 976 2043 613 2043 615 2043 440 2044 615 2044 616 2044 441 2045 616 2045 977 2045 439 2046 977 2046 617 2046 438 2047 617 2047 978 2047 437 2048 978 2048 979 2048 1029 2049 979 2049 1030 2049 434 2050 1030 2050 618 2050 433 2051 618 2051 1031 2051 429 2052 1031 2052 980 2052 453 2053 980 2053 981 2053 1032 2054 981 2054 982 2054 983 2055 982 2055 1033 2055 1034 2056 1033 2056 984 2056 1035 2057 984 2057 985 2057 425 2058 985 2058 987 2058 986 2059 987 2059 1036 2059 988 2060 1036 2060 620 2060 450 2061 620 2061 622 2061 422 2062 622 2062 624 2062 449 2063 422 2063 624 2063 932 2064 933 2064 451 2064 451 2065 934 2065 989 2065 989 2066 935 2066 424 2066 424 2067 662 2067 990 2067 990 2068 936 2068 426 2068 426 2069 937 2069 452 2069 452 2070 991 2070 427 2070 427 2071 992 2071 993 2071 993 2072 994 2072 428 2072 428 2073 995 2073 938 2073 457 2074 942 2074 943 2074 943 2075 944 2075 460 2075 460 2076 946 2076 945 2076 945 2077 666 2077 996 2077 996 2078 997 2078 462 2078 462 2079 668 2079 464 2079 464 2080 998 2080 463 2080 463 2081 947 2081 465 2081 465 2082 682 2082 948 2082 948 2083 675 2083 466 2083 466 2084 949 2084 999 2084 999 2085 950 2085 467 2085 467 2086 951 2086 1000 2086 1000 2087 1001 2087 952 2087 952 2088 954 2088 953 2088 953 2089 1002 2089 1003 2089 1003 2090 956 2090 955 2090 955 2091 1004 2091 1005 2091 1005 2092 1006 2092 468 2092 468 2093 676 2093 469 2093 469 2094 957 2094 1007 2094 1007 2095 678 2095 1008 2095 1008 2096 1009 2096 958 2096 958 2097 960 2097 959 2097 959 2098 1010 2098 470 2098 470 2099 679 2099 1011 2099 1011 2100 961 2100 1012 2100 1012 2101 680 2101 1013 2101 1013 2102 681 2102 471 2102 471 2103 1014 2103 1015 2103 1015 2104 962 2104 472 2104 472 2105 963 2105 473 2105 473 2106 964 2106 1016 2106 1016 2107 965 2107 1017 2107 1017 2108 966 2108 474 2108 474 2109 593 2109 475 2109 475 2110 967 2110 968 2110 968 2111 969 2111 1018 2111 1018 2112 971 2112 970 2112 970 2113 1019 2113 476 2113 476 2114 598 2114 972 2114 972 2115 599 2115 477 2115 477 2116 1020 2116 1021 2116 1021 2117 973 2117 478 2117 478 2118 601 2118 1022 2118 1022 2119 603 2119 479 2119 479 2120 605 2120 1023 2120 1023 2121 604 2121 480 2121 480 2122 606 2122 1024 2122 1024 2123 608 2123 1025 2123 1025 2124 1026 2124 1027 2124 1027 2125 974 2125 481 2125 481 2126 610 2126 1028 2126 1028 2127 611 2127 975 2127 975 2128 613 2128 976 2128 976 2129 615 2129 440 2129 440 2130 616 2130 441 2130 441 2131 977 2131 439 2131 439 2132 617 2132 438 2132 438 2133 978 2133 437 2133 437 2134 979 2134 1029 2134 1029 2135 1030 2135 434 2135 434 2136 618 2136 433 2136 433 2137 1031 2137 429 2137 429 2138 980 2138 453 2138 453 2139 981 2139 1032 2139 1032 2140 982 2140 983 2140 983 2141 1033 2141 1034 2141 1034 2142 984 2142 1035 2142 1035 2143 985 2143 425 2143 425 2144 987 2144 986 2144 986 2145 1036 2145 988 2145 988 2146 620 2146 450 2146 450 2147 622 2147 422 2147 1037 2148 868 2148 404 2148 404 2149 868 2149 1038 2149 1038 2150 868 2150 1039 2150 1139 2151 1039 2151 1040 2151 1041 2152 1040 2152 1068 2152 1042 2153 1068 2153 1069 2153 1070 2154 1069 2154 919 2154 1141 2155 919 2155 1043 2155 1149 2156 1043 2156 1071 2156 1072 2157 1071 2157 914 2157 1152 2158 914 2158 1044 2158 1153 2159 1044 2159 913 2159 1154 2160 913 2160 912 2160 1073 2161 912 2161 1046 2161 1045 2162 1046 2162 1047 2162 1159 2163 1047 2163 909 2163 1161 2164 909 2164 1048 2164 1162 2165 1048 2165 908 2165 1049 2166 908 2166 907 2166 1166 2167 907 2167 906 2167 1050 2168 906 2168 1051 2168 1182 2169 1051 2169 1052 2169 1053 2170 1052 2170 927 2170 1054 2171 927 2171 1055 2171 1056 2172 1055 2172 1057 2172 1074 2173 1057 2173 905 2173 1188 2174 905 2174 1059 2174 1058 2175 1059 2175 1060 2175 1061 2176 1060 2176 1062 2176 1187 2177 1062 2177 1064 2177 1063 2178 1064 2178 1075 2178 1076 2179 1075 2179 1065 2179 1077 2180 1065 2180 904 2180 1066 2181 904 2181 1078 2181 1067 2182 1078 2182 1186 2182 1067 2183 1066 2183 1078 2183 1038 2184 1039 2184 1139 2184 1139 2185 1040 2185 1041 2185 1041 2186 1068 2186 1042 2186 1042 2187 1069 2187 1070 2187 1070 2188 919 2188 1141 2188 1141 2189 1043 2189 1149 2189 1149 2190 1071 2190 1072 2190 1072 2191 914 2191 1152 2191 1152 2192 1044 2192 1153 2192 1153 2193 913 2193 1154 2193 1154 2194 912 2194 1073 2194 1073 2195 1046 2195 1045 2195 1045 2196 1047 2196 1159 2196 1159 2197 909 2197 1161 2197 1161 2198 1048 2198 1162 2198 1162 2199 908 2199 1049 2199 1049 2200 907 2200 1166 2200 1166 2201 906 2201 1050 2201 1050 2202 1051 2202 1182 2202 1182 2203 1052 2203 1053 2203 1053 2204 927 2204 1054 2204 1054 2205 1055 2205 1056 2205 1056 2206 1057 2206 1074 2206 1074 2207 905 2207 1188 2207 1188 2208 1059 2208 1058 2208 1058 2209 1060 2209 1061 2209 1061 2210 1062 2210 1187 2210 1187 2211 1064 2211 1063 2211 1063 2212 1075 2212 1076 2212 1076 2213 1065 2213 1077 2213 1077 2214 904 2214 1066 2214 1078 2215 1079 2215 1186 2215 1186 2216 1079 2216 1112 2216 1112 2217 1079 2217 1080 2217 1113 2218 1080 2218 1114 2218 1185 2219 1114 2219 1115 2219 1116 2220 1115 2220 1081 2220 1082 2221 1081 2221 926 2221 1184 2222 926 2222 1117 2222 1118 2223 1117 2223 901 2223 1183 2224 901 2224 1119 2224 1083 2225 1119 2225 1084 2225 1181 2226 1084 2226 1085 2226 1180 2227 1085 2227 1086 2227 1179 2228 1086 2228 900 2228 1120 2229 900 2229 899 2229 1178 2230 899 2230 898 2230 1087 2231 898 2231 1088 2231 1177 2232 1088 2232 896 2232 1176 2233 896 2233 924 2233 1089 2234 924 2234 1090 2234 1091 2235 1090 2235 895 2235 1175 2236 895 2236 1092 2236 1121 2237 1092 2237 893 2237 892 2238 1121 2238 893 2238 892 2239 1093 2239 1121 2239 892 2240 1094 2240 1093 2240 1093 2241 1094 2241 1095 2241 1095 2242 1094 2242 1096 2242 1174 2243 1096 2243 891 2243 1172 2244 891 2244 890 2244 1173 2245 890 2245 1122 2245 1123 2246 1122 2246 1097 2246 1171 2247 1097 2247 888 2247 1170 2248 888 2248 1099 2248 1098 2249 1099 2249 1101 2249 1100 2250 1101 2250 1124 2250 1169 2251 1124 2251 1102 2251 1103 2252 1102 2252 1104 2252 1189 2253 1104 2253 1125 2253 1190 2254 1125 2254 887 2254 1191 2255 887 2255 886 2255 1126 2256 886 2256 1127 2256 1192 2257 1127 2257 1128 2257 1129 2258 1128 2258 883 2258 1130 2259 883 2259 881 2259 1193 2260 881 2260 1105 2260 1131 2261 1105 2261 880 2261 1196 2262 880 2262 1106 2262 1107 2263 1106 2263 1108 2263 1109 2264 1108 2264 1132 2264 1133 2265 1132 2265 877 2265 1134 2266 877 2266 921 2266 1143 2267 921 2267 875 2267 1110 2268 875 2268 874 2268 1135 2269 874 2269 1111 2269 1140 2270 1111 2270 873 2270 1138 2271 873 2271 872 2271 1137 2272 872 2272 871 2272 1147 2273 1137 2273 871 2273 1112 2274 1080 2274 1113 2274 1113 2275 1114 2275 1185 2275 1185 2276 1115 2276 1116 2276 1116 2277 1081 2277 1082 2277 1082 2278 926 2278 1184 2278 1184 2279 1117 2279 1118 2279 1118 2280 901 2280 1183 2280 1183 2281 1119 2281 1083 2281 1083 2282 1084 2282 1181 2282 1181 2283 1085 2283 1180 2283 1180 2284 1086 2284 1179 2284 1179 2285 900 2285 1120 2285 1120 2286 899 2286 1178 2286 1178 2287 898 2287 1087 2287 1087 2288 1088 2288 1177 2288 1177 2289 896 2289 1176 2289 1176 2290 924 2290 1089 2290 1089 2291 1090 2291 1091 2291 1091 2292 895 2292 1175 2292 1175 2293 1092 2293 1121 2293 1095 2294 1096 2294 1174 2294 1174 2295 891 2295 1172 2295 1172 2296 890 2296 1173 2296 1173 2297 1122 2297 1123 2297 1123 2298 1097 2298 1171 2298 1171 2299 888 2299 1170 2299 1170 2300 1099 2300 1098 2300 1098 2301 1101 2301 1100 2301 1100 2302 1124 2302 1169 2302 1169 2303 1102 2303 1103 2303 1103 2304 1104 2304 1189 2304 1189 2305 1125 2305 1190 2305 1190 2306 887 2306 1191 2306 1191 2307 886 2307 1126 2307 1126 2308 1127 2308 1192 2308 1192 2309 1128 2309 1129 2309 1129 2310 883 2310 1130 2310 1130 2311 881 2311 1193 2311 1193 2312 1105 2312 1131 2312 1131 2313 880 2313 1196 2313 1196 2314 1106 2314 1107 2314 1107 2315 1108 2315 1109 2315 1109 2316 1132 2316 1133 2316 1133 2317 877 2317 1134 2317 1134 2318 921 2318 1143 2318 1143 2319 875 2319 1110 2319 1110 2320 874 2320 1135 2320 1135 2321 1111 2321 1140 2321 1140 2322 873 2322 1138 2322 1138 2323 872 2323 1137 2323 313 2324 1136 2324 1147 2324 1147 2325 1136 2325 404 2325 1137 2326 404 2326 1038 2326 1138 2327 1038 2327 1139 2327 1140 2328 1139 2328 1041 2328 1135 2329 1041 2329 1042 2329 1110 2330 1042 2330 1070 2330 1143 2331 1070 2331 1141 2331 1142 2332 1141 2332 1690 2332 1142 2333 1143 2333 1141 2333 1142 2334 1691 2334 1143 2334 1143 2335 1691 2335 1692 2335 1652 2336 1143 2336 1692 2336 1652 2337 1653 2337 1143 2337 1143 2338 1653 2338 1655 2338 1656 2339 1143 2339 1655 2339 1656 2340 1694 2340 1143 2340 1143 2341 1694 2341 1695 2341 1696 2342 1143 2342 1695 2342 1696 2343 1697 2343 1143 2343 1143 2344 1697 2344 1144 2344 1145 2345 1143 2345 1144 2345 1145 2346 1146 2346 1143 2346 1143 2347 1146 2347 1134 2347 1134 2348 1146 2348 1663 2348 1133 2349 1663 2349 1109 2349 1133 2350 1134 2350 1663 2350 1147 2351 404 2351 1137 2351 1137 2352 1038 2352 1138 2352 1138 2353 1139 2353 1140 2353 1140 2354 1041 2354 1135 2354 1135 2355 1042 2355 1110 2355 1110 2356 1070 2356 1143 2356 1690 2357 1141 2357 1688 2357 1688 2358 1141 2358 1149 2358 1148 2359 1149 2359 1686 2359 1148 2360 1688 2360 1149 2360 1072 2361 1646 2361 1149 2361 1072 2362 1150 2362 1646 2362 1072 2363 1644 2363 1150 2363 1072 2364 1642 2364 1644 2364 1072 2365 1152 2365 1642 2365 1642 2366 1152 2366 1151 2366 1151 2367 1152 2367 1684 2367 1684 2368 1152 2368 1153 2368 1682 2369 1153 2369 1681 2369 1682 2370 1684 2370 1153 2370 1153 2371 1154 2371 1681 2371 1681 2372 1154 2372 1155 2372 1155 2373 1154 2373 1680 2373 1680 2374 1154 2374 1073 2374 1156 2375 1073 2375 1157 2375 1156 2376 1680 2376 1073 2376 1073 2377 1045 2377 1157 2377 1157 2378 1045 2378 1679 2378 1679 2379 1045 2379 1637 2379 1637 2380 1045 2380 1159 2380 1158 2381 1159 2381 1161 2381 1160 2382 1161 2382 1678 2382 1160 2383 1158 2383 1161 2383 1637 2384 1159 2384 1158 2384 1161 2385 1162 2385 1678 2385 1678 2386 1162 2386 1635 2386 1635 2387 1162 2387 1163 2387 1163 2388 1162 2388 1049 2388 1631 2389 1049 2389 1164 2389 1631 2390 1163 2390 1049 2390 1049 2391 1166 2391 1164 2391 1164 2392 1166 2392 1677 2392 1677 2393 1166 2393 1165 2393 1165 2394 1166 2394 1167 2394 1167 2395 1166 2395 1050 2395 1627 2396 1050 2396 1200 2396 1627 2397 1167 2397 1050 2397 1182 2398 1168 2398 1050 2398 1182 2399 1710 2399 1168 2399 1182 2400 1103 2400 1710 2400 1182 2401 1169 2401 1103 2401 1182 2402 1100 2402 1169 2402 1182 2403 1098 2403 1100 2403 1182 2404 1170 2404 1098 2404 1182 2405 1171 2405 1170 2405 1182 2406 1123 2406 1171 2406 1182 2407 1173 2407 1123 2407 1182 2408 1172 2408 1173 2408 1182 2409 1174 2409 1172 2409 1182 2410 1095 2410 1174 2410 1182 2411 1093 2411 1095 2411 1182 2412 1121 2412 1093 2412 1182 2413 1175 2413 1121 2413 1182 2414 1091 2414 1175 2414 1182 2415 1089 2415 1091 2415 1182 2416 1176 2416 1089 2416 1182 2417 1177 2417 1176 2417 1182 2418 1087 2418 1177 2418 1182 2419 1178 2419 1087 2419 1182 2420 1120 2420 1178 2420 1182 2421 1179 2421 1120 2421 1182 2422 1180 2422 1179 2422 1182 2423 1181 2423 1180 2423 1182 2424 1083 2424 1181 2424 1182 2425 1183 2425 1083 2425 1182 2426 1118 2426 1183 2426 1182 2427 1184 2427 1118 2427 1182 2428 1082 2428 1184 2428 1182 2429 1116 2429 1082 2429 1182 2430 1185 2430 1116 2430 1182 2431 1113 2431 1185 2431 1182 2432 1112 2432 1113 2432 1182 2433 1186 2433 1112 2433 1182 2434 1067 2434 1186 2434 1182 2435 1066 2435 1067 2435 1182 2436 1077 2436 1066 2436 1182 2437 1076 2437 1077 2437 1182 2438 1063 2438 1076 2438 1182 2439 1187 2439 1063 2439 1182 2440 1061 2440 1187 2440 1182 2441 1058 2441 1061 2441 1182 2442 1188 2442 1058 2442 1182 2443 1074 2443 1188 2443 1182 2444 1056 2444 1074 2444 1182 2445 1054 2445 1056 2445 1182 2446 1053 2446 1054 2446 1189 2447 1700 2447 1103 2447 1189 2448 1190 2448 1700 2448 1700 2449 1190 2449 1665 2449 1665 2450 1190 2450 1191 2450 1699 2451 1191 2451 1126 2451 1192 2452 1699 2452 1126 2452 1192 2453 1698 2453 1699 2453 1192 2454 1129 2454 1698 2454 1698 2455 1129 2455 1130 2455 1195 2456 1130 2456 1193 2456 1131 2457 1195 2457 1193 2457 1131 2458 1194 2458 1195 2458 1131 2459 1196 2459 1194 2459 1194 2460 1196 2460 1107 2460 1197 2461 1107 2461 1109 2461 1663 2462 1197 2462 1109 2462 1665 2463 1191 2463 1699 2463 1698 2464 1130 2464 1195 2464 1194 2465 1107 2465 1197 2465 1168 2466 1625 2466 1050 2466 1050 2467 1625 2467 1198 2467 1199 2468 1050 2468 1198 2468 1199 2469 1200 2469 1050 2469 1646 2470 1685 2470 1149 2470 1149 2471 1685 2471 1686 2471 1700 2472 1201 2472 1103 2472 1103 2473 1201 2473 1701 2473 1702 2474 1103 2474 1701 2474 1702 2475 1703 2475 1103 2475 1103 2476 1703 2476 1704 2476 1202 2477 1103 2477 1704 2477 1202 2478 1203 2478 1103 2478 1103 2479 1203 2479 1705 2479 1706 2480 1103 2480 1705 2480 1706 2481 1707 2481 1103 2481 1103 2482 1707 2482 1708 2482 1204 2483 1103 2483 1708 2483 1204 2484 1709 2484 1103 2484 1103 2485 1709 2485 1710 2485 1206 2486 1205 2486 1431 2486 1206 2487 1208 2487 1205 2487 1206 2488 1207 2488 1208 2488 1208 2489 1207 2489 1238 2489 1238 2490 1207 2490 1453 2490 1531 2491 1453 2491 1239 2491 1532 2492 1239 2492 1451 2492 1209 2493 1451 2493 1452 2493 1240 2494 1452 2494 1450 2494 1241 2495 1450 2495 1210 2495 1242 2496 1210 2496 1211 2496 1534 2497 1211 2497 1449 2497 1243 2498 1449 2498 1448 2498 1244 2499 1448 2499 1447 2499 1535 2500 1447 2500 1446 2500 1212 2501 1446 2501 1445 2501 1245 2502 1445 2502 1444 2502 1536 2503 1444 2503 1213 2503 1564 2504 1213 2504 1214 2504 1565 2505 1214 2505 1246 2505 1566 2506 1246 2506 1442 2506 1215 2507 1442 2507 1216 2507 1571 2508 1216 2508 1441 2508 1217 2509 1441 2509 1440 2509 1567 2510 1440 2510 1218 2510 1219 2511 1218 2511 1439 2511 1220 2512 1439 2512 1438 2512 1247 2513 1438 2513 1222 2513 1221 2514 1222 2514 1437 2514 1223 2515 1437 2515 1224 2515 1554 2516 1224 2516 1225 2516 1248 2517 1225 2517 1226 2517 1556 2518 1226 2518 1227 2518 1228 2519 1227 2519 1249 2519 1557 2520 1249 2520 1250 2520 1251 2521 1250 2521 1229 2521 1558 2522 1229 2522 1436 2522 1559 2523 1436 2523 1230 2523 1560 2524 1230 2524 1435 2524 1231 2525 1435 2525 1233 2525 1232 2526 1233 2526 1252 2526 1253 2527 1252 2527 1433 2527 1561 2528 1433 2528 1234 2528 1254 2529 1234 2529 1235 2529 1562 2530 1235 2530 1432 2530 1563 2531 1432 2531 1236 2531 1255 2532 1236 2532 1256 2532 1530 2533 1256 2533 1237 2533 1528 2534 1237 2534 1257 2534 1529 2535 1257 2535 1431 2535 1205 2536 1529 2536 1431 2536 1238 2537 1453 2537 1531 2537 1531 2538 1239 2538 1532 2538 1532 2539 1451 2539 1209 2539 1209 2540 1452 2540 1240 2540 1240 2541 1450 2541 1241 2541 1241 2542 1210 2542 1242 2542 1242 2543 1211 2543 1534 2543 1534 2544 1449 2544 1243 2544 1243 2545 1448 2545 1244 2545 1244 2546 1447 2546 1535 2546 1535 2547 1446 2547 1212 2547 1212 2548 1445 2548 1245 2548 1245 2549 1444 2549 1536 2549 1536 2550 1213 2550 1564 2550 1564 2551 1214 2551 1565 2551 1565 2552 1246 2552 1566 2552 1566 2553 1442 2553 1215 2553 1215 2554 1216 2554 1571 2554 1571 2555 1441 2555 1217 2555 1217 2556 1440 2556 1567 2556 1567 2557 1218 2557 1219 2557 1219 2558 1439 2558 1220 2558 1220 2559 1438 2559 1247 2559 1247 2560 1222 2560 1221 2560 1221 2561 1437 2561 1223 2561 1223 2562 1224 2562 1554 2562 1554 2563 1225 2563 1248 2563 1248 2564 1226 2564 1556 2564 1556 2565 1227 2565 1228 2565 1228 2566 1249 2566 1557 2566 1557 2567 1250 2567 1251 2567 1251 2568 1229 2568 1558 2568 1558 2569 1436 2569 1559 2569 1559 2570 1230 2570 1560 2570 1560 2571 1435 2571 1231 2571 1231 2572 1233 2572 1232 2572 1232 2573 1252 2573 1253 2573 1253 2574 1433 2574 1561 2574 1561 2575 1234 2575 1254 2575 1254 2576 1235 2576 1562 2576 1562 2577 1432 2577 1563 2577 1563 2578 1236 2578 1255 2578 1255 2579 1256 2579 1530 2579 1530 2580 1237 2580 1528 2580 1528 2581 1257 2581 1529 2581 1258 2582 1568 2582 1280 2582 1258 2583 1572 2583 1568 2583 1258 2584 1260 2584 1572 2584 1572 2585 1260 2585 1259 2585 1259 2586 1260 2586 1281 2586 1261 2587 1281 2587 1263 2587 1262 2588 1263 2588 1282 2588 1570 2589 1282 2589 1264 2589 1569 2590 1264 2590 1477 2590 1283 2591 1477 2591 1476 2591 1284 2592 1476 2592 1285 2592 1286 2593 1285 2593 1287 2593 1537 2594 1287 2594 1475 2594 1288 2595 1475 2595 1473 2595 1289 2596 1473 2596 1474 2596 1265 2597 1474 2597 1472 2597 1290 2598 1472 2598 1291 2598 1538 2599 1291 2599 1470 2599 1539 2600 1470 2600 1266 2600 1540 2601 1266 2601 1292 2601 1541 2602 1292 2602 1469 2602 1293 2603 1469 2603 1267 2603 1294 2604 1267 2604 1467 2604 1295 2605 1467 2605 1468 2605 1542 2606 1468 2606 1296 2606 1268 2607 1296 2607 1297 2607 1298 2608 1297 2608 1270 2608 1269 2609 1270 2609 1466 2609 1299 2610 1466 2610 1465 2610 1543 2611 1465 2611 1464 2611 1271 2612 1464 2612 1300 2612 1272 2613 1300 2613 1273 2613 1301 2614 1273 2614 1462 2614 1302 2615 1462 2615 1303 2615 1304 2616 1303 2616 1274 2616 1544 2617 1274 2617 1461 2617 1305 2618 1461 2618 1275 2618 1546 2619 1275 2619 1276 2619 1547 2620 1276 2620 1460 2620 1548 2621 1460 2621 1459 2621 1306 2622 1459 2622 1307 2622 1308 2623 1307 2623 1458 2623 1549 2624 1458 2624 1309 2624 1310 2625 1309 2625 1311 2625 1550 2626 1311 2626 1277 2626 1312 2627 1277 2627 1456 2627 1278 2628 1456 2628 1279 2628 1551 2629 1279 2629 1455 2629 1552 2630 1455 2630 1454 2630 1553 2631 1454 2631 1280 2631 1568 2632 1553 2632 1280 2632 1259 2633 1281 2633 1261 2633 1261 2634 1263 2634 1262 2634 1262 2635 1282 2635 1570 2635 1570 2636 1264 2636 1569 2636 1569 2637 1477 2637 1283 2637 1283 2638 1476 2638 1284 2638 1284 2639 1285 2639 1286 2639 1286 2640 1287 2640 1537 2640 1537 2641 1475 2641 1288 2641 1288 2642 1473 2642 1289 2642 1289 2643 1474 2643 1265 2643 1265 2644 1472 2644 1290 2644 1290 2645 1291 2645 1538 2645 1538 2646 1470 2646 1539 2646 1539 2647 1266 2647 1540 2647 1540 2648 1292 2648 1541 2648 1541 2649 1469 2649 1293 2649 1293 2650 1267 2650 1294 2650 1294 2651 1467 2651 1295 2651 1295 2652 1468 2652 1542 2652 1542 2653 1296 2653 1268 2653 1268 2654 1297 2654 1298 2654 1298 2655 1270 2655 1269 2655 1269 2656 1466 2656 1299 2656 1299 2657 1465 2657 1543 2657 1543 2658 1464 2658 1271 2658 1271 2659 1300 2659 1272 2659 1272 2660 1273 2660 1301 2660 1301 2661 1462 2661 1302 2661 1302 2662 1303 2662 1304 2662 1304 2663 1274 2663 1544 2663 1544 2664 1461 2664 1305 2664 1305 2665 1275 2665 1546 2665 1546 2666 1276 2666 1547 2666 1547 2667 1460 2667 1548 2667 1548 2668 1459 2668 1306 2668 1306 2669 1307 2669 1308 2669 1308 2670 1458 2670 1549 2670 1549 2671 1309 2671 1310 2671 1310 2672 1311 2672 1550 2672 1550 2673 1277 2673 1312 2673 1312 2674 1456 2674 1278 2674 1278 2675 1279 2675 1551 2675 1551 2676 1455 2676 1552 2676 1552 2677 1454 2677 1553 2677 1314 2678 1313 2678 1479 2678 1314 2679 1587 2679 1313 2679 1314 2680 1315 2680 1587 2680 1587 2681 1315 2681 1586 2681 1586 2682 1315 2682 1498 2682 1585 2683 1498 2683 1341 2683 1584 2684 1341 2684 1317 2684 1316 2685 1317 2685 1497 2685 1318 2686 1497 2686 1319 2686 1583 2687 1319 2687 1320 2687 1582 2688 1320 2688 1495 2688 1581 2689 1495 2689 1321 2689 1342 2690 1321 2690 1322 2690 1343 2691 1322 2691 1344 2691 1323 2692 1344 2692 1345 2692 1578 2693 1345 2693 1324 2693 1346 2694 1324 2694 1494 2694 1347 2695 1494 2695 1493 2695 1348 2696 1493 2696 1349 2696 1577 2697 1349 2697 1491 2697 1576 2698 1491 2698 1490 2698 1575 2699 1490 2699 1489 2699 1325 2700 1489 2700 1326 2700 1327 2701 1326 2701 1350 2701 1328 2702 1350 2702 1487 2702 1574 2703 1487 2703 1351 2703 1352 2704 1351 2704 1353 2704 1602 2705 1353 2705 1354 2705 1355 2706 1354 2706 1356 2706 1603 2707 1356 2707 1329 2707 1357 2708 1329 2708 1486 2708 1614 2709 1486 2709 1485 2709 1330 2710 1485 2710 1331 2710 1358 2711 1331 2711 1359 2711 1332 2712 1359 2712 1333 2712 1616 2713 1333 2713 1360 2713 1361 2714 1360 2714 1334 2714 1617 2715 1334 2715 1362 2715 1363 2716 1362 2716 1364 2716 1618 2717 1364 2717 1365 2717 1335 2718 1365 2718 1336 2718 1620 2719 1336 2719 1484 2719 1366 2720 1484 2720 1337 2720 1367 2721 1337 2721 1338 2721 1368 2722 1338 2722 1339 2722 1621 2723 1339 2723 1482 2723 1622 2724 1482 2724 1481 2724 1369 2725 1481 2725 1480 2725 1370 2726 1480 2726 1340 2726 1371 2727 1340 2727 1479 2727 1313 2728 1371 2728 1479 2728 1586 2729 1498 2729 1585 2729 1585 2730 1341 2730 1584 2730 1584 2731 1317 2731 1316 2731 1316 2732 1497 2732 1318 2732 1318 2733 1319 2733 1583 2733 1583 2734 1320 2734 1582 2734 1582 2735 1495 2735 1581 2735 1581 2736 1321 2736 1342 2736 1342 2737 1322 2737 1343 2737 1343 2738 1344 2738 1323 2738 1323 2739 1345 2739 1578 2739 1578 2740 1324 2740 1346 2740 1346 2741 1494 2741 1347 2741 1347 2742 1493 2742 1348 2742 1348 2743 1349 2743 1577 2743 1577 2744 1491 2744 1576 2744 1576 2745 1490 2745 1575 2745 1575 2746 1489 2746 1325 2746 1325 2747 1326 2747 1327 2747 1327 2748 1350 2748 1328 2748 1328 2749 1487 2749 1574 2749 1574 2750 1351 2750 1352 2750 1352 2751 1353 2751 1602 2751 1602 2752 1354 2752 1355 2752 1355 2753 1356 2753 1603 2753 1603 2754 1329 2754 1357 2754 1357 2755 1486 2755 1614 2755 1614 2756 1485 2756 1330 2756 1330 2757 1331 2757 1358 2757 1358 2758 1359 2758 1332 2758 1332 2759 1333 2759 1616 2759 1616 2760 1360 2760 1361 2760 1361 2761 1334 2761 1617 2761 1617 2762 1362 2762 1363 2762 1363 2763 1364 2763 1618 2763 1618 2764 1365 2764 1335 2764 1335 2765 1336 2765 1620 2765 1620 2766 1484 2766 1366 2766 1366 2767 1337 2767 1367 2767 1367 2768 1338 2768 1368 2768 1368 2769 1339 2769 1621 2769 1621 2770 1482 2770 1622 2770 1622 2771 1481 2771 1369 2771 1369 2772 1480 2772 1370 2772 1370 2773 1340 2773 1371 2773 1372 2774 1600 2774 1399 2774 1372 2775 1373 2775 1600 2775 1372 2776 1506 2776 1373 2776 1373 2777 1506 2777 1374 2777 1374 2778 1506 2778 1507 2778 1401 2779 1507 2779 1402 2779 1403 2780 1402 2780 1404 2780 1598 2781 1404 2781 1375 2781 1597 2782 1375 2782 1519 2782 1405 2783 1519 2783 1376 2783 1406 2784 1376 2784 1377 2784 1407 2785 1377 2785 1518 2785 1595 2786 1518 2786 1378 2786 1596 2787 1378 2787 1379 2787 1593 2788 1379 2788 1408 2788 1592 2789 1408 2789 1380 2789 1590 2790 1380 2790 1381 2790 1591 2791 1381 2791 1517 2791 1382 2792 1517 2792 1383 2792 1409 2793 1383 2793 1516 2793 1589 2794 1516 2794 1515 2794 1410 2795 1515 2795 1384 2795 1385 2796 1384 2796 1386 2796 1612 2797 1386 2797 1411 2797 1412 2798 1411 2798 1413 2798 1414 2799 1413 2799 1513 2799 1611 2800 1513 2800 1512 2800 1387 2801 1512 2801 1388 2801 1610 2802 1388 2802 1389 2802 1390 2803 1389 2803 1391 2803 1415 2804 1391 2804 1511 2804 1609 2805 1511 2805 1392 2805 1416 2806 1392 2806 1393 2806 1608 2807 1393 2807 1510 2807 1607 2808 1510 2808 1394 2808 1395 2809 1394 2809 1509 2809 1605 2810 1509 2810 1508 2810 1606 2811 1508 2811 1417 2811 1604 2812 1417 2812 1418 2812 1419 2813 1418 2813 1499 2813 1420 2814 1499 2814 1500 2814 1421 2815 1500 2815 1422 2815 1615 2816 1422 2816 1396 2816 1613 2817 1396 2817 1398 2817 1397 2818 1398 2818 1501 2818 1423 2819 1501 2819 1502 2819 1424 2820 1502 2820 1425 2820 1601 2821 1425 2821 1504 2821 1426 2822 1504 2822 1503 2822 1400 2823 1503 2823 1399 2823 1600 2824 1400 2824 1399 2824 1374 2825 1507 2825 1401 2825 1401 2826 1402 2826 1403 2826 1403 2827 1404 2827 1598 2827 1598 2828 1375 2828 1597 2828 1597 2829 1519 2829 1405 2829 1405 2830 1376 2830 1406 2830 1406 2831 1377 2831 1407 2831 1407 2832 1518 2832 1595 2832 1595 2833 1378 2833 1596 2833 1596 2834 1379 2834 1593 2834 1593 2835 1408 2835 1592 2835 1592 2836 1380 2836 1590 2836 1590 2837 1381 2837 1591 2837 1591 2838 1517 2838 1382 2838 1382 2839 1383 2839 1409 2839 1409 2840 1516 2840 1589 2840 1589 2841 1515 2841 1410 2841 1410 2842 1384 2842 1385 2842 1385 2843 1386 2843 1612 2843 1612 2844 1411 2844 1412 2844 1412 2845 1413 2845 1414 2845 1414 2846 1513 2846 1611 2846 1611 2847 1512 2847 1387 2847 1387 2848 1388 2848 1610 2848 1610 2849 1389 2849 1390 2849 1390 2850 1391 2850 1415 2850 1415 2851 1511 2851 1609 2851 1609 2852 1392 2852 1416 2852 1416 2853 1393 2853 1608 2853 1608 2854 1510 2854 1607 2854 1607 2855 1394 2855 1395 2855 1395 2856 1509 2856 1605 2856 1605 2857 1508 2857 1606 2857 1606 2858 1417 2858 1604 2858 1604 2859 1418 2859 1419 2859 1419 2860 1499 2860 1420 2860 1420 2861 1500 2861 1421 2861 1421 2862 1422 2862 1615 2862 1615 2863 1396 2863 1613 2863 1613 2864 1398 2864 1397 2864 1397 2865 1501 2865 1423 2865 1423 2866 1502 2866 1424 2866 1424 2867 1425 2867 1601 2867 1601 2868 1504 2868 1426 2868 1426 2869 1503 2869 1400 2869 1428 2870 1463 2870 1429 2870 1429 2871 1463 2871 1514 2871 1427 2872 1430 2872 1457 2872 1457 2873 1430 2873 1520 2873 1457 2874 1463 2874 1427 2874 1427 2875 1463 2875 1428 2875 1430 2876 1429 2876 1520 2876 1520 2877 1429 2877 1514 2877 1429 2878 1430 2878 1594 2878 1594 2879 1430 2879 1599 2879 1427 2880 1573 2880 1430 2880 1430 2881 1573 2881 1599 2881 1206 2882 1431 2882 1478 2882 1207 2883 1478 2883 1453 2883 1207 2884 1206 2884 1478 2884 1431 2885 1257 2885 1478 2885 1478 2886 1257 2886 1237 2886 1256 2887 1478 2887 1237 2887 1256 2888 1434 2888 1478 2888 1256 2889 1236 2889 1434 2889 1434 2890 1236 2890 1432 2890 1235 2891 1434 2891 1432 2891 1235 2892 1234 2892 1434 2892 1434 2893 1234 2893 1433 2893 1252 2894 1434 2894 1433 2894 1252 2895 1233 2895 1434 2895 1434 2896 1233 2896 1435 2896 1230 2897 1434 2897 1435 2897 1230 2898 1436 2898 1434 2898 1434 2899 1436 2899 1229 2899 1250 2900 1434 2900 1229 2900 1250 2901 1523 2901 1434 2901 1250 2902 1249 2902 1523 2902 1523 2903 1249 2903 1227 2903 1226 2904 1523 2904 1227 2904 1226 2905 1225 2905 1523 2905 1523 2906 1225 2906 1224 2906 1443 2907 1224 2907 1437 2907 1222 2908 1443 2908 1437 2908 1222 2909 1438 2909 1443 2909 1443 2910 1438 2910 1439 2910 1218 2911 1443 2911 1439 2911 1218 2912 1440 2912 1443 2912 1443 2913 1440 2913 1441 2913 1216 2914 1443 2914 1441 2914 1216 2915 1442 2915 1443 2915 1443 2916 1442 2916 1246 2916 1214 2917 1443 2917 1246 2917 1214 2918 1213 2918 1443 2918 1443 2919 1213 2919 1444 2919 1445 2920 1443 2920 1444 2920 1445 2921 1478 2921 1443 2921 1445 2922 1446 2922 1478 2922 1478 2923 1446 2923 1447 2923 1448 2924 1478 2924 1447 2924 1448 2925 1449 2925 1478 2925 1478 2926 1449 2926 1211 2926 1210 2927 1478 2927 1211 2927 1210 2928 1450 2928 1478 2928 1478 2929 1450 2929 1452 2929 1451 2930 1478 2930 1452 2930 1451 2931 1239 2931 1478 2931 1478 2932 1239 2932 1453 2932 1523 2933 1224 2933 1443 2933 1258 2934 1280 2934 322 2934 1260 2935 322 2935 1281 2935 1260 2936 1258 2936 322 2936 1280 2937 1454 2937 322 2937 322 2938 1454 2938 1455 2938 1279 2939 322 2939 1455 2939 1279 2940 1457 2940 322 2940 1279 2941 1456 2941 1457 2941 1457 2942 1456 2942 1277 2942 1311 2943 1457 2943 1277 2943 1311 2944 1309 2944 1457 2944 1457 2945 1309 2945 1458 2945 1307 2946 1457 2946 1458 2946 1307 2947 1463 2947 1457 2947 1307 2948 1459 2948 1463 2948 1463 2949 1459 2949 1460 2949 1276 2950 1463 2950 1460 2950 1276 2951 1275 2951 1463 2951 1463 2952 1275 2952 1461 2952 1274 2953 1463 2953 1461 2953 1274 2954 1303 2954 1463 2954 1463 2955 1303 2955 1462 2955 1273 2956 1463 2956 1462 2956 1273 2957 1300 2957 1463 2957 1463 2958 1300 2958 1464 2958 1465 2959 1463 2959 1464 2959 1465 2960 1471 2960 1463 2960 1465 2961 1466 2961 1471 2961 1471 2962 1466 2962 1270 2962 1297 2963 1471 2963 1270 2963 1297 2964 1296 2964 1471 2964 1471 2965 1296 2965 1468 2965 1467 2966 1471 2966 1468 2966 1467 2967 1267 2967 1471 2967 1471 2968 1267 2968 1469 2968 1292 2969 1471 2969 1469 2969 1292 2970 1266 2970 1471 2970 1471 2971 1266 2971 1470 2971 1291 2972 1471 2972 1470 2972 1291 2973 1472 2973 1471 2973 1471 2974 1472 2974 1474 2974 1473 2975 1471 2975 1474 2975 1473 2976 1475 2976 1471 2976 1471 2977 1475 2977 1287 2977 322 2978 1287 2978 1285 2978 1476 2979 322 2979 1285 2979 1476 2980 1477 2980 322 2980 322 2981 1477 2981 1264 2981 1282 2982 322 2982 1264 2982 1282 2983 1263 2983 322 2983 322 2984 1263 2984 1281 2984 1471 2985 1287 2985 322 2985 1443 2986 1471 2986 322 2986 1443 2987 1478 2987 1471 2987 1314 2988 1479 2988 1483 2988 1315 2989 1483 2989 1498 2989 1315 2990 1314 2990 1483 2990 1479 2991 1340 2991 1483 2991 1483 2992 1340 2992 1480 2992 1481 2993 1483 2993 1480 2993 1481 2994 1482 2994 1483 2994 1483 2995 1482 2995 1339 2995 1338 2996 1483 2996 1339 2996 1338 2997 1337 2997 1483 2997 1483 2998 1337 2998 1484 2998 1336 2999 1483 2999 1484 2999 1336 3000 1365 3000 1483 3000 1483 3001 1365 3001 1364 3001 1362 3002 1483 3002 1364 3002 1362 3003 1334 3003 1483 3003 1483 3004 1334 3004 1360 3004 1488 3005 1360 3005 1333 3005 1359 3006 1488 3006 1333 3006 1359 3007 1331 3007 1488 3007 1488 3008 1331 3008 1485 3008 1486 3009 1488 3009 1485 3009 1486 3010 1329 3010 1488 3010 1488 3011 1329 3011 1356 3011 1354 3012 1488 3012 1356 3012 1354 3013 1353 3013 1488 3013 1488 3014 1353 3014 1351 3014 1487 3015 1488 3015 1351 3015 1487 3016 1350 3016 1488 3016 1488 3017 1350 3017 1326 3017 1489 3018 1488 3018 1326 3018 1489 3019 1492 3019 1488 3019 1489 3020 1490 3020 1492 3020 1492 3021 1490 3021 1491 3021 1349 3022 1492 3022 1491 3022 1349 3023 1493 3023 1492 3023 1492 3024 1493 3024 1494 3024 1496 3025 1494 3025 1324 3025 1345 3026 1496 3026 1324 3026 1345 3027 1344 3027 1496 3027 1496 3028 1344 3028 1322 3028 1321 3029 1496 3029 1322 3029 1321 3030 1495 3030 1496 3030 1496 3031 1495 3031 1320 3031 1319 3032 1496 3032 1320 3032 1319 3033 1497 3033 1496 3033 1496 3034 1497 3034 1317 3034 1341 3035 1496 3035 1317 3035 1341 3036 1498 3036 1496 3036 1496 3037 1498 3037 1483 3037 1483 3038 1360 3038 1488 3038 1505 3039 1483 3039 1488 3039 1505 3040 1526 3040 1483 3040 1505 3041 1499 3041 1526 3041 1505 3042 1500 3042 1499 3042 1505 3043 1422 3043 1500 3043 1505 3044 1396 3044 1422 3044 1505 3045 1398 3045 1396 3045 1505 3046 1501 3046 1398 3046 1505 3047 1502 3047 1501 3047 1505 3048 1425 3048 1502 3048 1505 3049 1504 3049 1425 3049 1505 3050 1503 3050 1504 3050 1505 3051 1399 3051 1503 3051 1505 3052 1372 3052 1399 3052 1505 3053 1506 3053 1372 3053 1505 3054 1507 3054 1506 3054 1505 3055 1520 3055 1507 3055 1505 3056 322 3056 1520 3056 1520 3057 322 3057 1457 3057 1492 3058 1494 3058 1496 3058 1499 3059 1418 3059 1526 3059 1526 3060 1418 3060 1417 3060 1508 3061 1526 3061 1417 3061 1508 3062 1509 3062 1526 3062 1526 3063 1509 3063 1394 3063 1510 3064 1526 3064 1394 3064 1510 3065 1393 3065 1526 3065 1526 3066 1393 3066 1392 3066 1511 3067 1526 3067 1392 3067 1511 3068 1391 3068 1526 3068 1526 3069 1391 3069 1389 3069 1388 3070 1526 3070 1389 3070 1388 3071 1512 3071 1526 3071 1526 3072 1512 3072 1513 3072 1413 3073 1526 3073 1513 3073 1413 3074 1411 3074 1526 3074 1526 3075 1411 3075 1386 3075 1514 3076 1386 3076 1384 3076 1515 3077 1514 3077 1384 3077 1515 3078 1516 3078 1514 3078 1514 3079 1516 3079 1383 3079 1517 3080 1514 3080 1383 3080 1517 3081 1381 3081 1514 3081 1514 3082 1381 3082 1380 3082 1408 3083 1514 3083 1380 3083 1408 3084 1379 3084 1514 3084 1514 3085 1379 3085 1378 3085 1518 3086 1514 3086 1378 3086 1518 3087 1377 3087 1514 3087 1514 3088 1377 3088 1520 3088 1520 3089 1377 3089 1376 3089 1519 3090 1520 3090 1376 3090 1519 3091 1375 3091 1520 3091 1520 3092 1375 3092 1404 3092 1402 3093 1520 3093 1404 3093 1402 3094 1507 3094 1520 3094 1526 3095 1386 3095 1514 3095 1471 3096 1514 3096 1463 3096 1471 3097 1526 3097 1514 3097 1525 3098 402 3098 1496 3098 1496 3099 402 3099 1492 3099 1521 3100 1434 3100 1522 3100 1522 3101 1434 3101 1523 3101 1434 3102 1521 3102 1478 3102 1478 3103 1521 3103 1533 3103 1533 3104 1521 3104 1524 3104 1522 3105 1555 3105 1521 3105 1521 3106 1555 3106 1524 3106 1429 3107 1594 3107 1428 3107 1428 3108 1594 3108 1545 3108 1427 3109 1428 3109 1573 3109 1573 3110 1428 3110 1545 3110 402 3111 1525 3111 1579 3111 1579 3112 1525 3112 1580 3112 1496 3113 1483 3113 1525 3113 1525 3114 1483 3114 1619 3114 1580 3115 1525 3115 1619 3115 1588 3116 1619 3116 1526 3116 1526 3117 1619 3117 1483 3117 1588 3118 1526 3118 1527 3118 1527 3119 1526 3119 1471 3119 1533 3120 1527 3120 1478 3120 1478 3121 1527 3121 1471 3121 1528 3122 1529 3122 1533 3122 1530 3123 1533 3123 1524 3123 1255 3124 1524 3124 1563 3124 1255 3125 1530 3125 1524 3125 1529 3126 1205 3126 1533 3126 1533 3127 1205 3127 1208 3127 1238 3128 1533 3128 1208 3128 1238 3129 1531 3129 1533 3129 1533 3130 1531 3130 1532 3130 1209 3131 1533 3131 1532 3131 1209 3132 1240 3132 1533 3132 1533 3133 1240 3133 1241 3133 1242 3134 1533 3134 1241 3134 1242 3135 1534 3135 1533 3135 1533 3136 1534 3136 1243 3136 1244 3137 1533 3137 1243 3137 1244 3138 1535 3138 1533 3138 1533 3139 1535 3139 1212 3139 1245 3140 1533 3140 1212 3140 1245 3141 1536 3141 1533 3141 1533 3142 1536 3142 1283 3142 1527 3143 1283 3143 1284 3143 1286 3144 1527 3144 1284 3144 1286 3145 1537 3145 1527 3145 1527 3146 1537 3146 1288 3146 1289 3147 1527 3147 1288 3147 1289 3148 1265 3148 1527 3148 1527 3149 1265 3149 1290 3149 1538 3150 1527 3150 1290 3150 1538 3151 1539 3151 1527 3151 1527 3152 1539 3152 1540 3152 1541 3153 1527 3153 1540 3153 1541 3154 1293 3154 1527 3154 1527 3155 1293 3155 1294 3155 1295 3156 1527 3156 1294 3156 1295 3157 1542 3157 1527 3157 1527 3158 1542 3158 1268 3158 1298 3159 1527 3159 1268 3159 1298 3160 1269 3160 1527 3160 1527 3161 1269 3161 1299 3161 1545 3162 1299 3162 1543 3162 1271 3163 1545 3163 1543 3163 1271 3164 1272 3164 1545 3164 1545 3165 1272 3165 1301 3165 1302 3166 1545 3166 1301 3166 1302 3167 1304 3167 1545 3167 1545 3168 1304 3168 1544 3168 1305 3169 1545 3169 1544 3169 1305 3170 1546 3170 1545 3170 1545 3171 1546 3171 1547 3171 1548 3172 1545 3172 1547 3172 1548 3173 1306 3173 1545 3173 1545 3174 1306 3174 1573 3174 1573 3175 1306 3175 1308 3175 1549 3176 1573 3176 1308 3176 1549 3177 1310 3177 1573 3177 1573 3178 1310 3178 1550 3178 1312 3179 1573 3179 1550 3179 1312 3180 1278 3180 1573 3180 1573 3181 1278 3181 1551 3181 1552 3182 1573 3182 1551 3182 1552 3183 1553 3183 1573 3183 1573 3184 1553 3184 1219 3184 1555 3185 1219 3185 1220 3185 1247 3186 1555 3186 1220 3186 1247 3187 1221 3187 1555 3187 1555 3188 1221 3188 1223 3188 1554 3189 1555 3189 1223 3189 1554 3190 1248 3190 1555 3190 1555 3191 1248 3191 1556 3191 1228 3192 1555 3192 1556 3192 1228 3193 1557 3193 1555 3193 1555 3194 1557 3194 1251 3194 1558 3195 1555 3195 1251 3195 1558 3196 1524 3196 1555 3196 1558 3197 1559 3197 1524 3197 1524 3198 1559 3198 1560 3198 1231 3199 1524 3199 1560 3199 1231 3200 1232 3200 1524 3200 1524 3201 1232 3201 1253 3201 1561 3202 1524 3202 1253 3202 1561 3203 1254 3203 1524 3203 1524 3204 1254 3204 1562 3204 1563 3205 1524 3205 1562 3205 1283 3206 1536 3206 1569 3206 1569 3207 1536 3207 1564 3207 1570 3208 1564 3208 1565 3208 1262 3209 1565 3209 1566 3209 1261 3210 1566 3210 1215 3210 1259 3211 1215 3211 1571 3211 1572 3212 1571 3212 1217 3212 1568 3213 1217 3213 1567 3213 1553 3214 1567 3214 1219 3214 1553 3215 1568 3215 1567 3215 1569 3216 1564 3216 1570 3216 1570 3217 1565 3217 1262 3217 1262 3218 1566 3218 1261 3218 1261 3219 1215 3219 1259 3219 1259 3220 1571 3220 1572 3220 1572 3221 1217 3221 1568 3221 1573 3222 1219 3222 1555 3222 1579 3223 1573 3223 1555 3223 1579 3224 1599 3224 1573 3224 1579 3225 1574 3225 1599 3225 1579 3226 1328 3226 1574 3226 1579 3227 1327 3227 1328 3227 1579 3228 1325 3228 1327 3228 1579 3229 1575 3229 1325 3229 1579 3230 1576 3230 1575 3230 1579 3231 1577 3231 1576 3231 1579 3232 1348 3232 1577 3232 1579 3233 1347 3233 1348 3233 1579 3234 1346 3234 1347 3234 1579 3235 1578 3235 1346 3235 1579 3236 1323 3236 1578 3236 1579 3237 1580 3237 1323 3237 1323 3238 1580 3238 1343 3238 1343 3239 1580 3239 1342 3239 1342 3240 1580 3240 1581 3240 1581 3241 1580 3241 1582 3241 1582 3242 1580 3242 1583 3242 1583 3243 1580 3243 1318 3243 1318 3244 1580 3244 1316 3244 1316 3245 1580 3245 1584 3245 1584 3246 1580 3246 1585 3246 1585 3247 1580 3247 1586 3247 1586 3248 1580 3248 1587 3248 1587 3249 1580 3249 1619 3249 1313 3250 1619 3250 1371 3250 1313 3251 1587 3251 1619 3251 1530 3252 1528 3252 1533 3252 1533 3253 1283 3253 1527 3253 1527 3254 1299 3254 1545 3254 1594 3255 1527 3255 1545 3255 1594 3256 1588 3256 1527 3256 1594 3257 1385 3257 1588 3257 1594 3258 1410 3258 1385 3258 1594 3259 1589 3259 1410 3259 1594 3260 1409 3260 1589 3260 1594 3261 1382 3261 1409 3261 1594 3262 1591 3262 1382 3262 1594 3263 1590 3263 1591 3263 1594 3264 1592 3264 1590 3264 1594 3265 1593 3265 1592 3265 1594 3266 1596 3266 1593 3266 1594 3267 1595 3267 1596 3267 1594 3268 1407 3268 1595 3268 1594 3269 1406 3269 1407 3269 1594 3270 1599 3270 1406 3270 1406 3271 1599 3271 1405 3271 1405 3272 1599 3272 1597 3272 1597 3273 1599 3273 1598 3273 1598 3274 1599 3274 1403 3274 1403 3275 1599 3275 1401 3275 1401 3276 1599 3276 1374 3276 1374 3277 1599 3277 1373 3277 1373 3278 1599 3278 1600 3278 1600 3279 1599 3279 1400 3279 1400 3280 1599 3280 1574 3280 1426 3281 1574 3281 1352 3281 1601 3282 1352 3282 1602 3282 1424 3283 1602 3283 1355 3283 1423 3284 1355 3284 1603 3284 1397 3285 1603 3285 1357 3285 1613 3286 1357 3286 1614 3286 1615 3287 1614 3287 1330 3287 1421 3288 1330 3288 1358 3288 1588 3289 1358 3289 1619 3289 1588 3290 1421 3290 1358 3290 1588 3291 1420 3291 1421 3291 1588 3292 1419 3292 1420 3292 1588 3293 1604 3293 1419 3293 1588 3294 1606 3294 1604 3294 1588 3295 1605 3295 1606 3295 1588 3296 1395 3296 1605 3296 1588 3297 1607 3297 1395 3297 1588 3298 1608 3298 1607 3298 1588 3299 1416 3299 1608 3299 1588 3300 1609 3300 1416 3300 1588 3301 1415 3301 1609 3301 1588 3302 1390 3302 1415 3302 1588 3303 1610 3303 1390 3303 1588 3304 1387 3304 1610 3304 1588 3305 1611 3305 1387 3305 1588 3306 1414 3306 1611 3306 1588 3307 1412 3307 1414 3307 1588 3308 1612 3308 1412 3308 1588 3309 1385 3309 1612 3309 1370 3310 1371 3310 1619 3310 1369 3311 1619 3311 1622 3311 1369 3312 1370 3312 1619 3312 1400 3313 1574 3313 1426 3313 1426 3314 1352 3314 1601 3314 1601 3315 1602 3315 1424 3315 1424 3316 1355 3316 1423 3316 1423 3317 1603 3317 1397 3317 1397 3318 1357 3318 1613 3318 1613 3319 1614 3319 1615 3319 1615 3320 1330 3320 1421 3320 1358 3321 1332 3321 1619 3321 1619 3322 1332 3322 1616 3322 1361 3323 1619 3323 1616 3323 1361 3324 1617 3324 1619 3324 1619 3325 1617 3325 1363 3325 1618 3326 1619 3326 1363 3326 1618 3327 1335 3327 1619 3327 1619 3328 1335 3328 1620 3328 1366 3329 1619 3329 1620 3329 1366 3330 1367 3330 1619 3330 1619 3331 1367 3331 1368 3331 1621 3332 1619 3332 1368 3332 1621 3333 1622 3333 1619 3333 1624 3334 1168 3334 1623 3334 1624 3335 1625 3335 1168 3335 1624 3336 1626 3336 1625 3336 1625 3337 1626 3337 1198 3337 1198 3338 1626 3338 902 3338 1199 3339 902 3339 903 3339 1200 3340 903 3340 1675 3340 1627 3341 1675 3341 1628 3341 1167 3342 1628 3342 1676 3342 1165 3343 1676 3343 1629 3343 1677 3344 1629 3344 1630 3344 1164 3345 1630 3345 1632 3345 1631 3346 1632 3346 1633 3346 1163 3347 1633 3347 1634 3347 1635 3348 1634 3348 928 3348 1678 3349 928 3349 929 3349 1160 3350 929 3350 1636 3350 1158 3351 1636 3351 930 3351 1637 3352 930 3352 910 3352 1679 3353 910 3353 911 3353 1157 3354 911 3354 1638 3354 1156 3355 1638 3355 915 3355 1680 3356 915 3356 916 3356 1155 3357 916 3357 917 3357 1681 3358 917 3358 1639 3358 1682 3359 1639 3359 1683 3359 1684 3360 1683 3360 1640 3360 1151 3361 1640 3361 1641 3361 1642 3362 1641 3362 1643 3362 1644 3363 1643 3363 1645 3363 1150 3364 1645 3364 1647 3364 1646 3365 1647 3365 1648 3365 1685 3366 1648 3366 1649 3366 1686 3367 1649 3367 918 3367 1148 3368 918 3368 1687 3368 1688 3369 1687 3369 1689 3369 1690 3370 1689 3370 931 3370 1142 3371 931 3371 1650 3371 1691 3372 1650 3372 1651 3372 1692 3373 1651 3373 1693 3373 1652 3374 1693 3374 1654 3374 1653 3375 1654 3375 869 3375 1655 3376 869 3376 1657 3376 1656 3377 1657 3377 870 3377 1694 3378 870 3378 1658 3378 1695 3379 1658 3379 1659 3379 1696 3380 1659 3380 1660 3380 1697 3381 1660 3381 1661 3381 1144 3382 1661 3382 1662 3382 1145 3383 1662 3383 920 3383 1146 3384 920 3384 876 3384 1663 3385 876 3385 878 3385 1197 3386 878 3386 879 3386 1194 3387 879 3387 1664 3387 1195 3388 1664 3388 882 3388 1698 3389 882 3389 884 3389 1699 3390 884 3390 1666 3390 1665 3391 1666 3391 885 3391 1700 3392 885 3392 1667 3392 1201 3393 1667 3393 1668 3393 1701 3394 1668 3394 1669 3394 1702 3395 1669 3395 922 3395 1703 3396 922 3396 1670 3396 1704 3397 1670 3397 889 3397 1202 3398 889 3398 1671 3398 1203 3399 1671 3399 1672 3399 1705 3400 1672 3400 1673 3400 1706 3401 1673 3401 894 3401 1707 3402 894 3402 923 3402 1708 3403 923 3403 925 3403 1204 3404 925 3404 897 3404 1709 3405 897 3405 1674 3405 1710 3406 1674 3406 1623 3406 1168 3407 1710 3407 1623 3407 1198 3408 902 3408 1199 3408 1199 3409 903 3409 1200 3409 1200 3410 1675 3410 1627 3410 1627 3411 1628 3411 1167 3411 1167 3412 1676 3412 1165 3412 1165 3413 1629 3413 1677 3413 1677 3414 1630 3414 1164 3414 1164 3415 1632 3415 1631 3415 1631 3416 1633 3416 1163 3416 1163 3417 1634 3417 1635 3417 1635 3418 928 3418 1678 3418 1678 3419 929 3419 1160 3419 1160 3420 1636 3420 1158 3420 1158 3421 930 3421 1637 3421 1637 3422 910 3422 1679 3422 1679 3423 911 3423 1157 3423 1157 3424 1638 3424 1156 3424 1156 3425 915 3425 1680 3425 1680 3426 916 3426 1155 3426 1155 3427 917 3427 1681 3427 1681 3428 1639 3428 1682 3428 1682 3429 1683 3429 1684 3429 1684 3430 1640 3430 1151 3430 1151 3431 1641 3431 1642 3431 1642 3432 1643 3432 1644 3432 1644 3433 1645 3433 1150 3433 1150 3434 1647 3434 1646 3434 1646 3435 1648 3435 1685 3435 1685 3436 1649 3436 1686 3436 1686 3437 918 3437 1148 3437 1148 3438 1687 3438 1688 3438 1688 3439 1689 3439 1690 3439 1690 3440 931 3440 1142 3440 1142 3441 1650 3441 1691 3441 1691 3442 1651 3442 1692 3442 1692 3443 1693 3443 1652 3443 1652 3444 1654 3444 1653 3444 1653 3445 869 3445 1655 3445 1655 3446 1657 3446 1656 3446 1656 3447 870 3447 1694 3447 1694 3448 1658 3448 1695 3448 1695 3449 1659 3449 1696 3449 1696 3450 1660 3450 1697 3450 1697 3451 1661 3451 1144 3451 1144 3452 1662 3452 1145 3452 1145 3453 920 3453 1146 3453 1146 3454 876 3454 1663 3454 1663 3455 878 3455 1197 3455 1197 3456 879 3456 1194 3456 1194 3457 1664 3457 1195 3457 1195 3458 882 3458 1698 3458 1698 3459 884 3459 1699 3459 1699 3460 1666 3460 1665 3460 1665 3461 885 3461 1700 3461 1700 3462 1667 3462 1201 3462 1201 3463 1668 3463 1701 3463 1701 3464 1669 3464 1702 3464 1702 3465 922 3465 1703 3465 1703 3466 1670 3466 1704 3466 1704 3467 889 3467 1202 3467 1202 3468 1671 3468 1203 3468 1203 3469 1672 3469 1705 3469 1705 3470 1673 3470 1706 3470 1706 3471 894 3471 1707 3471 1707 3472 923 3472 1708 3472 1708 3473 925 3473 1204 3473 1204 3474 897 3474 1709 3474 1709 3475 1674 3475 1710 3475 1712 3476 1740 3476 1711 3476 1712 3477 461 3477 1740 3477 1712 3478 1713 3478 461 3478 461 3479 1713 3479 1714 3479 1714 3480 1713 3480 123 3480 1741 3481 123 3481 120 3481 459 3482 120 3482 119 3482 458 3483 119 3483 117 3483 1715 3484 117 3484 115 3484 1716 3485 115 3485 1717 3485 1742 3486 1717 3486 112 3486 1718 3487 112 3487 110 3487 1719 3488 110 3488 109 3488 1720 3489 109 3489 1721 3489 455 3490 1721 3490 1743 3490 456 3491 1743 3491 106 3491 1744 3492 106 3492 105 3492 1722 3493 105 3493 101 3493 1745 3494 101 3494 1723 3494 1724 3495 1723 3495 1746 3495 483 3496 1746 3496 97 3496 1725 3497 97 3497 1747 3497 1726 3498 1747 3498 94 3498 1748 3499 94 3499 93 3499 430 3500 93 3500 90 3500 1727 3501 90 3501 88 3501 1749 3502 88 3502 1750 3502 1728 3503 1750 3503 1729 3503 432 3504 1729 3504 86 3504 431 3505 86 3505 85 3505 1751 3506 85 3506 84 3506 482 3507 84 3507 82 3507 1752 3508 82 3508 1730 3508 1753 3509 1730 3509 1754 3509 1731 3510 1754 3510 1733 3510 1732 3511 1733 3511 80 3511 1755 3512 80 3512 79 3512 1756 3513 79 3513 1734 3513 435 3514 1734 3514 1736 3514 1735 3515 1736 3515 1738 3515 1737 3516 1738 3516 1757 3516 1758 3517 1757 3517 76 3517 436 3518 76 3518 75 3518 1739 3519 75 3519 1711 3519 1740 3520 1739 3520 1711 3520 1714 3521 123 3521 1741 3521 1741 3522 120 3522 459 3522 459 3523 119 3523 458 3523 458 3524 117 3524 1715 3524 1715 3525 115 3525 1716 3525 1716 3526 1717 3526 1742 3526 1742 3527 112 3527 1718 3527 1718 3528 110 3528 1719 3528 1719 3529 109 3529 1720 3529 1720 3530 1721 3530 455 3530 455 3531 1743 3531 456 3531 456 3532 106 3532 1744 3532 1744 3533 105 3533 1722 3533 1722 3534 101 3534 1745 3534 1745 3535 1723 3535 1724 3535 1724 3536 1746 3536 483 3536 483 3537 97 3537 1725 3537 1725 3538 1747 3538 1726 3538 1726 3539 94 3539 1748 3539 1748 3540 93 3540 430 3540 430 3541 90 3541 1727 3541 1727 3542 88 3542 1749 3542 1749 3543 1750 3543 1728 3543 1728 3544 1729 3544 432 3544 432 3545 86 3545 431 3545 431 3546 85 3546 1751 3546 1751 3547 84 3547 482 3547 482 3548 82 3548 1752 3548 1752 3549 1730 3549 1753 3549 1753 3550 1754 3550 1731 3550 1731 3551 1733 3551 1732 3551 1732 3552 80 3552 1755 3552 1755 3553 79 3553 1756 3553 1756 3554 1734 3554 435 3554 435 3555 1736 3555 1735 3555 1735 3556 1738 3556 1737 3556 1737 3557 1757 3557 1758 3557 1758 3558 76 3558 436 3558 436 3559 75 3559 1739 3559 652 3560 1760 3560 1759 3560 652 3561 1761 3561 1760 3561 652 3562 1762 3562 1761 3562 1761 3563 1762 3563 1763 3563 407 3564 1763 3564 408 3564 407 3565 1761 3565 1763 3565 407 3566 406 3566 1761 3566 1761 3567 406 3567 1760 3567 1760 3568 406 3568 1764 3568 1832 3569 1764 3569 1765 3569 1831 3570 1765 3570 1766 3570 1829 3571 1766 3571 405 3571 1828 3572 405 3572 1767 3572 1827 3573 1767 3573 421 3573 1825 3574 421 3574 1768 3574 1822 3575 1768 3575 1821 3575 1819 3576 1821 3576 1818 3576 1817 3577 1818 3577 419 3577 1769 3578 419 3578 418 3578 1815 3579 418 3579 1770 3579 1813 3580 1770 3580 417 3580 1811 3581 417 3581 416 3581 1810 3582 416 3582 415 3582 1807 3583 415 3583 1771 3583 1805 3584 1771 3584 414 3584 1804 3585 414 3585 1772 3585 1802 3586 1772 3586 413 3586 1801 3587 413 3587 412 3587 1799 3588 412 3588 411 3588 1798 3589 411 3589 1797 3589 1795 3590 1797 3590 1773 3590 1794 3591 1773 3591 1775 3591 1774 3592 1775 3592 1792 3592 1791 3593 1792 3593 410 3593 1790 3594 410 3594 1777 3594 1776 3595 1777 3595 409 3595 1788 3596 409 3596 1786 3596 1783 3597 1786 3597 1782 3597 1781 3598 1782 3598 1780 3598 1778 3599 1780 3599 408 3599 1763 3600 1778 3600 408 3600 1763 3601 653 3601 1778 3601 1763 3602 1762 3602 653 3602 653 3603 1779 3603 1778 3603 1778 3604 1779 3604 1781 3604 1780 3605 1778 3605 1781 3605 1779 3606 1784 3606 1781 3606 1781 3607 1784 3607 1783 3607 1782 3608 1781 3608 1783 3608 1784 3609 1785 3609 1783 3609 1783 3610 1785 3610 1788 3610 1786 3611 1783 3611 1788 3611 1785 3612 1787 3612 1788 3612 1788 3613 1787 3613 1776 3613 409 3614 1788 3614 1776 3614 1787 3615 1789 3615 1776 3615 1776 3616 1789 3616 1790 3616 1777 3617 1776 3617 1790 3617 1789 3618 654 3618 1790 3618 1790 3619 654 3619 1791 3619 410 3620 1790 3620 1791 3620 654 3621 1793 3621 1791 3621 1791 3622 1793 3622 1774 3622 1792 3623 1791 3623 1774 3623 1793 3624 655 3624 1774 3624 1774 3625 655 3625 1794 3625 1775 3626 1774 3626 1794 3626 655 3627 701 3627 1794 3627 1794 3628 701 3628 1795 3628 1773 3629 1794 3629 1795 3629 701 3630 1796 3630 1795 3630 1795 3631 1796 3631 1798 3631 1797 3632 1795 3632 1798 3632 1796 3633 700 3633 1798 3633 1798 3634 700 3634 1799 3634 411 3635 1798 3635 1799 3635 700 3636 1800 3636 1799 3636 1799 3637 1800 3637 1801 3637 412 3638 1799 3638 1801 3638 1800 3639 699 3639 1801 3639 1801 3640 699 3640 1802 3640 413 3641 1801 3641 1802 3641 699 3642 1803 3642 1802 3642 1802 3643 1803 3643 1804 3643 1772 3644 1802 3644 1804 3644 1803 3645 1806 3645 1804 3645 1804 3646 1806 3646 1805 3646 414 3647 1804 3647 1805 3647 1806 3648 630 3648 1805 3648 1805 3649 630 3649 1807 3649 1771 3650 1805 3650 1807 3650 630 3651 1808 3651 1807 3651 1807 3652 1808 3652 1810 3652 415 3653 1807 3653 1810 3653 1808 3654 1809 3654 1810 3654 1810 3655 1809 3655 1811 3655 416 3656 1810 3656 1811 3656 1809 3657 1812 3657 1811 3657 1811 3658 1812 3658 1813 3658 417 3659 1811 3659 1813 3659 1812 3660 1814 3660 1813 3660 1813 3661 1814 3661 1815 3661 1770 3662 1813 3662 1815 3662 1814 3663 628 3663 1815 3663 1815 3664 628 3664 1769 3664 418 3665 1815 3665 1769 3665 628 3666 1816 3666 1769 3666 1769 3667 1816 3667 1817 3667 419 3668 1769 3668 1817 3668 1816 3669 627 3669 1817 3669 1817 3670 627 3670 1819 3670 1818 3671 1817 3671 1819 3671 627 3672 1820 3672 1819 3672 1819 3673 1820 3673 1822 3673 1821 3674 1819 3674 1822 3674 1820 3675 1823 3675 1822 3675 1822 3676 1823 3676 1825 3676 1768 3677 1822 3677 1825 3677 1823 3678 1824 3678 1825 3678 1825 3679 1824 3679 1827 3679 421 3680 1825 3680 1827 3680 1824 3681 1826 3681 1827 3681 1827 3682 1826 3682 1828 3682 1767 3683 1827 3683 1828 3683 1826 3684 647 3684 1828 3684 1828 3685 647 3685 1829 3685 405 3686 1828 3686 1829 3686 647 3687 648 3687 1829 3687 1829 3688 648 3688 1831 3688 1766 3689 1829 3689 1831 3689 648 3690 1830 3690 1831 3690 1831 3691 1830 3691 1832 3691 1765 3692 1831 3692 1832 3692 1830 3693 1759 3693 1832 3693 1832 3694 1759 3694 1760 3694 1764 3695 1832 3695 1760 3695 803 3696 802 3696 1833 3696 1833 3697 802 3697 867 3697 325 3698 360 3698 322 3698 322 3699 360 3699 1443 3699

+
+
+
+
+ + + + + + + + + + + + + + +
diff --git a/models/rg_robot/meshes/CoreComponent.dae b/models/rg_robot/meshes/CoreComponent.dae index b577138fb0..de295a5476 100644 --- a/models/rg_robot/meshes/CoreComponent.dae +++ b/models/rg_robot/meshes/CoreComponent.dae @@ -1,63 +1,168 @@ - - - - - VCGLab - VCGLib | MeshLab - - Y_UP - do sep. 17 12:01:33 2015 - do sep. 17 12:01:33 2015 - - - - - - - - - -0.0445 0.0445 -0.019 -0.0385 0.0445 0.019 -0.0445 0.0385 -0.019 -0.0445 0.0385 0.019 -0.0385 0.0445 -0.019 -0.0385 0.0405 -0.019 0.0385 -0.0445 -0.019 0.0385 -0.0445 0.019 0.0385 -0.0405 0.019 0.0405 -0.0385 -0.019 0.0445 -0.0385 -0.019 0.0385 -0.0405 -0.019 -0.0445 -0.0445 0.019 -0.0445 -0.0385 -0.019 -0.0405 -0.0385 -0.019 -0.0385 -0.0405 -0.019 -0.0445 -0.0445 -0.019 -0.0385 -0.0445 0.019 -0.0385 -0.0445 -0.019 -0.0445 -0.0385 0.019 0.0385 0.0405 -0.019 0.0385 0.0445 -0.019 0.0385 0.0445 0.019 0.0405 0.0385 0.019 0.0385 0.0405 0.019 0.0405 -0.0085 -0.0178873 0.0405 -0.0065 -0.014 0.0375 -0.0065 -0.019 0.0405 0.0075 -0.014 0.0405 0.0095 -0.0178873 0.0398324 -0.0085 -0.019 0.0398324 0.0095 -0.019 0.0405 0.0095 -0.019 0.0375 0.0075 -0.019 0.0405 -0.0085 -0.019 -0.0085 -0.0405 -0.019 0.0075 -0.0375 -0.019 0.0075 -0.0405 -0.014 0.0095 -0.0405 -0.0178873 -0.0085 -0.0405 -0.0178873 -0.0065 -0.0405 -0.014 -0.0085 -0.0398324 -0.019 -0.0065 -0.0375 -0.019 0.0095 -0.0405 -0.019 0.0095 -0.0398324 -0.019 -0.0405 -0.0225 -0.014 -0.0405 -0.0325 -0.014 -0.0405 -0.0345 -0.0178873 -0.0398324 -0.0205 -0.019 -0.0405 -0.0345 -0.019 -0.0405 -0.0205 -0.0178873 -0.0405 -0.0205 -0.019 -0.0375 -0.0325 -0.019 -0.0398324 -0.0345 -0.019 -0.0375 -0.0225 -0.019 0.0345 0.0405 -0.019 0.0225 0.0405 -0.014 0.0225 0.0375 -0.019 0.0205 0.0398324 -0.019 0.0325 0.0405 -0.014 0.0345 0.0405 -0.0178873 0.0325 0.0375 -0.019 0.0205 0.0405 -0.0178873 0.0205 0.0405 -0.019 0.0345 0.0398324 -0.019 0.0085 0.0405 -0.019 -0.0095 0.0405 -0.019 0.0065 0.0405 -0.014 -0.0075 0.0405 -0.014 -0.0095 0.0405 -0.0178873 -0.0095 0.0398324 -0.019 0.0085 0.0398324 -0.019 0.0085 0.0405 -0.0178873 0.0065 0.0375 -0.019 -0.0075 0.0375 -0.019 -0.0405 0.0205 -0.019 -0.0405 0.0325 -0.014 -0.0375 0.0225 -0.019 -0.0405 0.0225 -0.014 -0.0405 0.0345 -0.0178873 -0.0375 0.0325 -0.019 -0.0405 0.0205 -0.0178873 -0.0398324 0.0345 -0.019 -0.0398324 0.0205 -0.019 -0.0405 0.0345 -0.019 0.0405 -0.0325 -0.014 0.0405 -0.0225 -0.014 0.0375 -0.0225 -0.019 0.0405 -0.0205 -0.0178873 0.0398324 -0.0345 -0.019 0.0405 -0.0345 -0.0178873 0.0375 -0.0325 -0.019 0.0398324 -0.0205 -0.019 0.0405 -0.0345 -0.019 0.0405 -0.0205 -0.019 -0.0345 -0.0405 -0.0178873 -0.0325 -0.0405 -0.014 -0.0225 -0.0375 -0.019 -0.0225 -0.0405 -0.014 -0.0205 -0.0405 -0.0178873 -0.0345 -0.0398324 -0.019 -0.0325 -0.0375 -0.019 -0.0205 -0.0398324 -0.019 -0.0205 -0.0405 -0.019 -0.0345 -0.0405 -0.019 -0.0405 0.0065 -0.014 -0.0405 -0.0095 -0.019 -0.0405 -0.0075 -0.014 -0.0405 -0.0095 -0.0178873 -0.0405 0.0085 -0.0178873 -0.0398324 0.0085 -0.019 -0.0405 0.0085 -0.019 -0.0375 0.0065 -0.019 -0.0375 -0.0075 -0.019 -0.0398324 -0.0095 -0.019 0.0405 0.0205 -0.0178873 0.0405 0.0205 -0.019 0.0405 0.0345 -0.0178873 0.0375 0.0225 -0.019 0.0405 0.0225 -0.014 0.0405 0.0325 -0.014 0.0375 0.0325 -0.019 0.0405 0.0345 -0.019 0.0398324 0.0205 -0.019 0.0398324 0.0345 -0.019 0.0345 -0.0405 -0.0178873 0.0325 -0.0405 -0.014 0.0345 -0.0405 -0.019 0.0325 -0.0375 -0.019 0.0345 -0.0398324 -0.019 0.0225 -0.0405 -0.014 0.0225 -0.0375 -0.019 0.0205 -0.0398324 -0.019 0.0205 -0.0405 -0.0178873 0.0205 -0.0405 -0.019 -0.0205 0.0405 -0.0178873 -0.0205 0.0405 -0.019 -0.0225 0.0405 -0.014 -0.0325 0.0375 -0.019 -0.0325 0.0405 -0.014 -0.0345 0.0405 -0.0178873 -0.0205 0.0398324 -0.019 -0.0345 0.0398324 -0.019 -0.0225 0.0375 -0.019 -0.0345 0.0405 -0.019 0.01555 -0.04599 0.02 0.01555 -0.046 -0.022 0.01555 -0.04599 -0.022 0.0203 -0.046 -0.022 0.0203 -0.046 0.02 0.01555 -0.046 0.02 0.01704 -0.0445 0.02 0.0203 -0.0445 0.02 0.01704 -0.0445 -0.022 0.0203 -0.0445 -0.022 0.046 0.01555 -0.022 0.046 0.01555 0.02 0.046 0.0203 -0.022 0.0445 0.01704 -0.022 0.04599 0.01555 -0.022 0.046 0.0203 0.02 0.0445 0.0203 0.02 0.04599 0.01555 0.02 0.0445 0.01704 0.02 0.0445 0.0203 -0.022 -0.01555 0.04599 -0.022 -0.0203 0.046 -0.022 -0.01704 0.0445 0.02 -0.01555 0.046 -0.022 -0.01555 0.04599 0.02 -0.01555 0.046 0.02 -0.0203 0.046 0.02 -0.01704 0.0445 -0.022 -0.0203 0.0445 0.02 -0.0203 0.0445 -0.022 0.01555 0.046 -0.022 0.01704 0.0445 0.02 0.0203 0.046 0.02 0.01555 0.04599 0.02 0.01555 0.046 0.02 0.01555 0.04599 -0.022 0.01704 0.0445 -0.022 0.0203 0.0445 0.02 0.0203 0.0445 -0.022 0.0203 0.046 -0.022 -0.01555 -0.046 0.02 -0.01555 -0.04599 0.02 -0.01555 -0.04599 -0.022 -0.0203 -0.046 -0.022 -0.01555 -0.046 -0.022 -0.0203 -0.046 0.02 -0.01704 -0.0445 -0.022 -0.01704 -0.0445 0.02 -0.0203 -0.0445 -0.022 -0.0203 -0.0445 0.02 0.04599 -0.01555 -0.022 0.046 -0.01555 0.02 0.0445 -0.01704 0.02 0.04599 -0.01555 0.02 0.046 -0.0203 -0.022 0.046 -0.0203 0.02 0.0445 -0.0203 -0.022 0.046 -0.01555 -0.022 0.0445 -0.01704 -0.022 0.0445 -0.0203 0.02 -0.046 0.01505 0.02 -0.04599 0.01505 -0.022 -0.046 0.01505 -0.022 -0.0445 0.01654 -0.022 -0.0445 0.01654 0.02 -0.04599 0.01505 0.02 -0.046 0.0198 0.02 -0.046 0.0198 -0.022 -0.0445 0.01654 -0.022 -0.0445 0.0198 -0.022 -0.0445 0.01654 0.02 -0.0445 0.0198 0.02 -0.046 -0.01605 -0.022 -0.04599 -0.01605 -0.022 -0.046 -0.01605 0.02 -0.04599 -0.01605 0.02 -0.0445 -0.01754 0.02 -0.0445 -0.01754 -0.022 -0.0445 -0.01754 0.02 -0.0445 -0.01754 -0.022 -0.0445 -0.0208 0.02 -0.046 -0.0208 0.02 -0.0445 -0.0208 -0.022 -0.046 -0.0208 -0.022 -0.0445 -0.0385 -0.019 -0.0445 -0.0385 0.019 -0.0445 0.0385 -0.019 -0.0405 -0.0345 0.014 -0.0405 -0.0129259 0.0126173 -0.0405 -0.0133092 0.0111243 -0.0405 -0.0126874 0.0107297 -0.0405 -0.0134924 0.0115868 -0.0405 -0.0385 0.019 -0.0405 -0.0135 -0.007 -0.0405 -0.0135 0.0095 -0.0405 -0.0345 -0.014 -0.0405 -0.0129259 -0.0123784 -0.0405 0.0116015 0.012092 -0.0405 -0.0115312 0.011961 -0.0405 0.0123189 0.0126855 -0.0405 0.0115413 0.0118574 -0.0405 -0.0114998 0.0117122 -0.0405 -0.0134301 -0.0136518 -0.0405 0.0117182 0.0111682 -0.0405 0.0116015 0.0113805 -0.0405 -0.0116235 0.0112303 -0.0405 -0.0131376 -0.0140543 -0.0405 0.0123189 0.010787 -0.0405 0.0125607 0.0107718 -0.0405 0.0132044 0.0110747 -0.0405 0.0130178 0.0109203 -0.0405 -0.0114997 -0.0132835 -0.0405 -0.0121909 -0.0142349 -0.0405 -0.0095 -0.014 -0.0405 -0.0115311 -0.0135323 -0.0405 0.013436 0.0119766 -0.0405 -0.0124372 -0.0142819 -0.0405 0.0203475 0.014 -0.0405 0.0015 -0.019 -0.0405 0.011691 -0.0127473 -0.0405 0.0085 -0.014 -0.0405 0.0135 -0.007 -0.0405 0.0115079 -0.0132097 -0.0405 0.0125628 -0.012337 -0.0405 0.0130358 -0.0124907 -0.0405 0.0123126 -0.0143174 -0.0405 0.0203475 -0.014 -0.0405 0.0134686 -0.0130864 -0.0405 0.012809 -0.0142861 -0.0405 0.0130358 -0.0141794 -0.0405 0.0385 0.019 -0.0405 -0.0124372 0.0127105 -0.0405 -0.0202877 0.014 -0.0445 -0.0126874 0.0126947 -0.0445 -0.0134924 0.0118376 -0.0445 -0.0134924 0.0115868 -0.0445 -0.01343 0.011344 -0.0445 -0.0133092 0.0111243 -0.0445 -0.0131376 0.0109415 -0.0445 -0.0135 0.0095 -0.0445 -0.0129259 0.0108072 -0.0445 -0.0345 0.014 -0.0445 -0.0345 -0.014 -0.0445 -0.0202877 -0.014 -0.0445 -0.011964 0.0125567 -0.0445 -0.0116235 0.0121941 -0.0445 -0.0117709 0.0123969 -0.0445 -0.0129259 -0.0123784 -0.0445 -0.0121909 0.0126635 -0.0445 -0.0115312 0.011961 -0.0445 -0.0114998 0.0117122 -0.0445 0.0115413 0.0118574 -0.0445 0.0116015 0.0113805 -0.0445 -0.0115312 0.0114635 -0.0445 -0.0134924 -0.0134089 -0.0445 -0.0135 -0.007 -0.0445 -0.011964 -0.0124389 -0.0445 -0.0133093 -0.0138715 -0.0445 -0.0131376 -0.0140543 -0.0445 -0.0126874 -0.0142661 -0.0445 0.0120885 0.0108619 -0.0445 0.0123189 0.010787 -0.0445 0.0125607 0.0107718 -0.0445 0.0127986 0.0108172 -0.0445 0.0130178 0.0109203 -0.0445 0.0132044 0.0110747 -0.0445 0.0133468 0.0112707 -0.0445 0.013436 0.0114959 -0.0445 -0.0117708 -0.0125988 -0.0445 -0.0114997 -0.0132835 -0.0445 -0.0117708 -0.0139683 -0.0445 -0.0095 -0.014 -0.0445 0.013436 0.0119766 -0.0445 0.0133468 0.0122018 -0.0445 0.0132044 0.0123978 -0.0445 0.0130178 0.0125522 -0.0445 0.0127986 0.0126553 -0.0445 0.0125607 0.0127007 -0.0445 0.0134664 0.0117363 -0.0445 -0.0124372 -0.0142819 -0.0445 -0.0121909 -0.0142349 -0.0445 -0.0045 -0.019 -0.0445 0.0345 0.014 -0.0445 0.0118626 -0.0125646 -0.0445 0.0125628 -0.012337 -0.0445 0.012809 -0.012384 -0.0445 0.0135 -0.007 -0.0445 0.0130358 -0.0124907 -0.0445 0.013229 -0.0126505 -0.0445 0.0118626 -0.0141056 -0.0445 0.0120742 -0.0142399 -0.0445 0.0123126 -0.0143174 -0.0445 0.0125628 -0.0143331 -0.0445 0.0134686 -0.0130864 -0.0445 0.0133763 -0.0128533 -0.0445 0.012809 -0.0142861 -0.0445 0.0130358 -0.0141794 -0.0445 0.013229 -0.0140196 -0.0445 0.0133763 -0.0138168 -0.0445 0.0134686 -0.0135838 -0.0445 0.0135 -0.0133351 -0.0445 0.0203475 -0.014 -0.0445 0.0345 -0.014 -0.0445 0.0385 -0.019 -0.0445 -0.0202877 0.014 -0.0445 0.0385 0.019 -0.0445 -0.0124372 0.0127105 -0.0445 0.0203475 0.014 -0.0445 -0.011964 0.0108677 -0.0445 -0.0117709 0.0110275 -0.0445 -0.0385 -0.019 -0.0405 0.0385 -0.019 -0.0445 0.0035 -0.019 -0.0445 0.0085 -0.014 -0.0445 0.0035 -0.014 -0.0405 0.0015 -0.014 -0.0445 0.0085 -0.014 -0.0445 0.0135 0.0095 -0.0405 0.0135 0.0095 -0.0445 -0.0095 -0.014 -0.0405 -0.0025 -0.014 -0.0445 -0.0045 -0.014 -0.0445 0.0203475 0.014 -0.0405 0.0345 0.014 -0.0445 0.0203475 -0.014 -0.0405 0.0345 -0.014 -0.0445 0.0345 0.014 -0.0445 0.0345 -0.014 -0.0405 0.0125628 -0.0143331 -0.0445 0.012809 -0.0142861 -0.0445 0.0130358 -0.0141794 -0.0405 0.0135 -0.0133351 -0.0445 0.0134686 -0.0130864 -0.0445 0.0133763 -0.0128533 -0.0445 0.013229 -0.0140196 -0.0405 0.013229 -0.0140196 -0.0405 0.013229 -0.0126505 -0.0405 0.0133763 -0.0128533 -0.0405 0.0133763 -0.0138168 -0.0445 0.0133763 -0.0138168 -0.0405 0.0134686 -0.0135838 -0.0445 0.0134686 -0.0135838 -0.0445 0.013229 -0.0126505 -0.0445 0.0135 -0.0133351 -0.0445 0.012809 -0.012384 -0.0445 0.0130358 -0.0124907 -0.0405 0.012809 -0.012384 -0.0445 0.0123126 -0.0123528 -0.0445 0.0125628 -0.012337 -0.0405 0.0123126 -0.0123528 -0.0405 0.0120742 -0.0124302 -0.0445 0.0120742 -0.0124302 -0.0445 0.011691 -0.0127473 -0.0405 0.0118626 -0.0125646 -0.0445 0.0115702 -0.0129669 -0.0405 0.0115702 -0.0129669 -0.0445 0.0115079 -0.0132097 -0.0405 0.0115079 -0.0134604 -0.0445 0.0115079 -0.0134604 -0.0445 0.011691 -0.0139229 -0.0445 0.0115702 -0.0137032 -0.0405 0.0115702 -0.0137032 -0.0405 0.011691 -0.0139229 -0.0405 0.0118626 -0.0141056 -0.0405 0.0120742 -0.0142399 -0.0445 0.0125628 -0.0143331 -0.0445 0.0127986 0.0108172 -0.0445 0.013436 0.0119766 -0.0405 0.0127986 0.0108172 -0.0445 0.0130178 0.0109203 -0.0445 0.0133468 0.0122018 -0.0405 0.0134664 0.0117363 -0.0445 0.0132044 0.0110747 -0.0445 0.0132044 0.0123978 -0.0405 0.0133468 0.0122018 -0.0445 0.0133468 0.0112707 -0.0405 0.0133468 0.0112707 -0.0405 0.0132044 0.0123978 -0.0445 0.013436 0.0114959 -0.0405 0.013436 0.0114959 -0.0445 0.0130178 0.0125522 -0.0405 0.0130178 0.0125522 -0.0445 0.0134664 0.0117363 -0.0405 0.0127986 0.0126553 -0.0445 0.0125607 0.0127007 -0.0445 0.0127986 0.0126553 -0.0405 0.0125607 0.0127007 -0.0445 0.0120885 0.0126106 -0.0445 0.0123189 0.0126855 -0.0405 0.0120885 0.0126106 -0.0445 0.0117182 0.0123043 -0.0405 0.0117182 0.0123043 -0.0445 0.011884 0.0124808 -0.0405 0.011884 0.0124808 -0.0445 0.0116015 0.012092 -0.0445 0.0115413 0.0116151 -0.0405 0.0115413 0.0116151 -0.0445 0.0117182 0.0111682 -0.0405 0.011884 0.0109917 -0.0445 0.011884 0.0109917 -0.0405 0.0120885 0.0108619 -0.0445 0.0125607 0.0107718 -0.0445 -0.0114997 -0.0132835 -0.0445 -0.0116234 -0.0128016 -0.0445 -0.0115311 -0.0130348 -0.0405 -0.011964 -0.0141281 -0.0405 -0.0117708 -0.0139683 -0.0445 -0.011964 -0.0141281 -0.0405 -0.0115311 -0.0130348 -0.0405 -0.0116234 -0.0128016 -0.0405 -0.0116234 -0.0137655 -0.0405 -0.0117708 -0.0125988 -0.0445 -0.0116234 -0.0137655 -0.0445 -0.0115311 -0.0135323 -0.0405 -0.0121909 -0.0123322 -0.0445 -0.0121909 -0.0123322 -0.0405 -0.011964 -0.0124389 -0.0445 -0.0124372 -0.0122852 -0.0405 -0.0124372 -0.0122852 -0.0445 -0.0126874 -0.0123009 -0.0405 -0.0126874 -0.0123009 -0.0445 -0.0131376 -0.0125128 -0.0405 -0.0131376 -0.0125128 -0.0445 -0.0133093 -0.0126956 -0.0405 -0.0133093 -0.0126956 -0.0445 -0.0134301 -0.0129153 -0.0405 -0.0134301 -0.0129153 -0.0445 -0.0134924 -0.0131582 -0.0405 -0.0134924 -0.0131582 -0.0405 -0.0134924 -0.0134089 -0.0445 -0.0134301 -0.0136518 -0.0405 -0.0133093 -0.0138715 -0.0405 -0.0129259 -0.0141887 -0.0445 -0.0129259 -0.0141887 -0.0405 -0.0126874 -0.0142661 -0.0445 -0.0121909 0.0107609 -0.0405 -0.0121909 0.0107609 -0.0445 -0.0114998 0.0117122 -0.0405 -0.011964 0.0108677 -0.0405 -0.0117709 0.0110275 -0.0445 -0.0116235 0.0121941 -0.0405 -0.0116235 0.0121941 -0.0445 -0.0116235 0.0112303 -0.0445 -0.0117709 0.0123969 -0.0405 -0.0115312 0.0114635 -0.0405 -0.0117709 0.0123969 -0.0405 -0.011964 0.0125567 -0.0445 -0.0121909 0.0126635 -0.0405 -0.0121909 0.0126635 -0.0445 -0.011964 0.0125567 -0.0445 -0.0124372 0.0127105 -0.0445 -0.0126874 0.0126947 -0.0405 -0.0126874 0.0126947 -0.0445 -0.0131376 0.0124829 -0.0405 -0.0131376 0.0124829 -0.0445 -0.0129259 0.0126173 -0.0445 -0.0133092 0.0123001 -0.0445 -0.01343 0.0120804 -0.0405 -0.0133092 0.0123001 -0.0405 -0.01343 0.0120804 -0.0405 -0.0134924 0.0118376 -0.0445 -0.0134924 0.0118376 -0.0445 -0.0134924 0.0115868 -0.0405 -0.01343 0.011344 -0.0445 -0.01343 0.011344 -0.0405 -0.0131376 0.0109415 -0.0445 -0.0133092 0.0111243 -0.0445 -0.0131376 0.0109415 -0.0405 -0.0129259 0.0108072 -0.0445 -0.0126874 0.0107297 -0.0445 -0.0129259 0.0108072 -0.0405 -0.0124372 0.0107139 -0.0445 -0.0124372 0.0107139 -0.0445 -0.0202877 -0.014 -0.0405 -0.0202877 -0.014 0.0131376 0.0445 0.0124829 0.0134924 0.0445 0.0118376 0.0202877 0.0445 0.014 0.0135 0.0445 0.0095 0.0131376 0.0445 0.0109415 0.0133092 0.0445 0.0111243 0.0129259 0.0445 0.0108072 0.0126874 0.0445 0.0107297 0.0345 0.0445 0.014 0.0345 0.0445 -0.014 0.0133093 0.0445 -0.0126956 0.0131376 0.0445 -0.0125128 -0.0117182 0.0445 0.0123043 0.0117709 0.0445 0.0123969 -0.011884 0.0445 0.0124808 0.0129259 0.0445 -0.0123784 -0.0120885 0.0445 0.0126106 -0.0116015 0.0445 0.012092 -0.0123189 0.0445 0.0126855 0.0121909 0.0445 0.0126635 0.0134924 0.0445 -0.0131582 0.0115312 0.0445 0.011961 0.0134924 0.0445 -0.0134089 0.0115312 0.0445 0.0114635 0.0135 0.0445 -0.007 0.0116235 0.0445 0.0112303 0.0133093 0.0445 -0.0138715 0.0131376 0.0445 -0.0140543 -0.0120885 0.0445 0.0108619 -0.0123189 0.0445 0.010787 -0.0132044 0.0445 0.0110747 -0.0135 0.0445 0.0095 0.0095 0.0445 -0.014 0.0115311 0.0445 -0.0130348 0.0116234 0.0445 -0.0128016 -0.013436 0.0445 0.0119766 -0.0133468 0.0445 0.0122018 -0.0125607 0.0445 0.0127007 0.0035 0.0445 -0.014 0.0035 0.0445 -0.019 0.0202877 0.0445 -0.014 0.0124372 0.0445 -0.0142819 -0.0085 0.0445 -0.014 -0.0118626 0.0445 -0.0125646 -0.0135 0.0445 -0.007 -0.011691 0.0445 -0.0127473 -0.0120742 0.0445 -0.0124302 -0.0115079 0.0445 -0.0132097 -0.0115702 0.0445 -0.0129669 -0.012809 0.0445 -0.012384 -0.011691 0.0445 -0.0139229 -0.0133763 0.0445 -0.0128533 -0.0123126 0.0445 -0.0143174 -0.0045 0.0445 -0.019 -0.0133763 0.0445 -0.0138168 -0.0134686 0.0445 -0.0135838 -0.0203475 0.0445 -0.014 0.011964 0.0445 0.0108677 0.0117709 0.0445 0.0110275 -0.011884 0.0445 0.0109917 -0.0203475 0.0445 0.014 0.0133092 0.0405 0.0123001 0.0129259 0.0405 0.0126173 0.0202877 0.0405 0.014 0.01343 0.0405 0.0120804 0.01343 0.0405 0.011344 0.0131376 0.0405 0.0109415 0.0129259 0.0405 0.0108072 0.0134924 0.0405 0.0115868 0.0121909 0.0405 0.0107609 0.0135 0.0405 -0.007 0.0202877 0.0405 -0.014 -0.0117182 0.0405 0.0123043 -0.011884 0.0405 0.0124808 -0.0120885 0.0405 0.0126106 -0.0123189 0.0405 0.0126855 0.0134924 0.0405 -0.0131582 -0.0116015 0.0405 0.012092 0.0114998 0.0405 0.0117122 -0.0115413 0.0405 0.0118574 0.0115312 0.0405 0.0114635 0.0121909 0.0405 -0.0123322 0.0116235 0.0405 0.0112303 0.0129259 0.0405 -0.0141887 -0.011884 0.0405 0.0109917 -0.0120885 0.0405 0.0108619 -0.0123189 0.0405 0.010787 -0.0130178 0.0405 0.0109203 0.0095 0.0405 -0.014 0.0115311 0.0405 -0.0130348 0.011964 0.0405 -0.0141281 0.0114997 0.0405 -0.0132835 -0.0203475 0.0405 0.014 -0.0127986 0.0405 0.0126553 -0.0125607 0.0405 0.0127007 -0.013436 0.0405 0.0114959 -0.0345 0.0405 0.014 -0.0025 0.0405 -0.014 -0.0120742 0.0405 -0.0124302 -0.0085 0.0405 -0.014 -0.0115702 0.0405 -0.0129669 -0.0123126 0.0405 -0.0123528 -0.0125628 0.0405 -0.012337 -0.0115079 0.0405 -0.0134604 -0.013229 0.0405 -0.0126505 -0.0118626 0.0405 -0.0141056 -0.012809 0.0405 -0.0142861 -0.0135 0.0405 -0.0133351 -0.0345 0.0405 -0.014 -0.0385 0.0405 0.019 0.0124372 0.0405 0.0127105 0.011964 0.0405 0.0108677 0.0135 0.0405 0.0095 0.0117709 0.0405 0.0110275 -0.0025 0.0405 -0.019 -0.0045 0.0445 -0.014 -0.0135 0.0405 -0.007 -0.0135 0.0405 0.0095 0.0015 0.0405 -0.014 0.0015 0.0405 -0.019 -0.0345 0.0445 0.014 -0.0203475 0.0405 -0.014 -0.0345 0.0445 -0.014 -0.0125628 0.0445 -0.0143331 -0.012809 0.0445 -0.0142861 -0.0134686 0.0405 -0.0130864 -0.0135 0.0445 -0.0133351 -0.0134686 0.0445 -0.0130864 -0.0130358 0.0445 -0.0141794 -0.0130358 0.0405 -0.0141794 -0.0133763 0.0405 -0.0128533 -0.013229 0.0445 -0.0140196 -0.0133763 0.0405 -0.0138168 -0.013229 0.0405 -0.0140196 -0.0134686 0.0405 -0.0135838 -0.0130358 0.0405 -0.0124907 -0.013229 0.0445 -0.0126505 -0.012809 0.0405 -0.012384 -0.0130358 0.0445 -0.0124907 -0.0125628 0.0445 -0.012337 -0.0123126 0.0445 -0.0123528 -0.0118626 0.0405 -0.0125646 -0.011691 0.0405 -0.0127473 -0.0115079 0.0405 -0.0132097 -0.0115079 0.0445 -0.0134604 -0.0115702 0.0405 -0.0137032 -0.0115702 0.0445 -0.0137032 -0.011691 0.0405 -0.0139229 -0.0118626 0.0445 -0.0141056 -0.0120742 0.0445 -0.0142399 -0.0123126 0.0405 -0.0143174 -0.0120742 0.0405 -0.0142399 -0.0125628 0.0405 -0.0143331 -0.0125607 0.0445 0.0107718 -0.0127986 0.0445 0.0108172 -0.013436 0.0405 0.0119766 -0.0134664 0.0405 0.0117363 -0.0134664 0.0445 0.0117363 -0.0127986 0.0405 0.0108172 -0.0130178 0.0445 0.0109203 -0.0132044 0.0405 0.0110747 -0.0133468 0.0405 0.0122018 -0.0133468 0.0405 0.0112707 -0.0133468 0.0445 0.0112707 -0.013436 0.0445 0.0114959 -0.0132044 0.0405 0.0123978 -0.0132044 0.0445 0.0123978 -0.0130178 0.0445 0.0125522 -0.0130178 0.0405 0.0125522 -0.0127986 0.0445 0.0126553 -0.0115413 0.0445 0.0118574 -0.0115413 0.0445 0.0116151 -0.0115413 0.0405 0.0116151 -0.0116015 0.0405 0.0113805 -0.0116015 0.0445 0.0113805 -0.0117182 0.0405 0.0111682 -0.0117182 0.0445 0.0111682 -0.0125607 0.0405 0.0107718 0.0124372 0.0405 -0.0142819 0.0121909 0.0405 -0.0142349 0.0114997 0.0445 -0.0132835 0.0121909 0.0445 -0.0142349 0.011964 0.0445 -0.0141281 0.0117708 0.0445 -0.0139683 0.0116234 0.0405 -0.0128016 0.0116234 0.0445 -0.0137655 0.0117708 0.0405 -0.0139683 0.0117708 0.0445 -0.0125988 0.0116234 0.0405 -0.0137655 0.0115311 0.0405 -0.0135323 0.0115311 0.0445 -0.0135323 0.011964 0.0405 -0.0124389 0.0117708 0.0405 -0.0125988 0.011964 0.0445 -0.0124389 0.0121909 0.0445 -0.0123322 0.0124372 0.0445 -0.0122852 0.0126874 0.0405 -0.0123009 0.0124372 0.0405 -0.0122852 0.0129259 0.0405 -0.0123784 0.0126874 0.0445 -0.0123009 0.0131376 0.0405 -0.0125128 0.0134301 0.0405 -0.0129153 0.0133093 0.0405 -0.0126956 0.0134301 0.0445 -0.0129153 0.0134924 0.0405 -0.0134089 0.0134301 0.0445 -0.0136518 0.0134301 0.0405 -0.0136518 0.0133093 0.0405 -0.0138715 0.0131376 0.0405 -0.0140543 0.0129259 0.0445 -0.0141887 0.0126874 0.0405 -0.0142661 0.0126874 0.0445 -0.0142661 0.0115312 0.0405 0.011961 0.0121909 0.0445 0.0107609 0.0116235 0.0445 0.0121941 0.0116235 0.0405 0.0121941 0.0117709 0.0405 0.0123969 0.011964 0.0405 0.0125567 0.0114998 0.0445 0.0117122 0.011964 0.0445 0.0125567 0.0121909 0.0405 0.0126635 0.0124372 0.0445 0.0127105 0.0131376 0.0405 0.0124829 0.0129259 0.0445 0.0126173 0.0126874 0.0405 0.0126947 0.0126874 0.0445 0.0126947 0.0133092 0.0445 0.0123001 0.01343 0.0445 0.0120804 0.0134924 0.0405 0.0118376 0.0134924 0.0445 0.0115868 0.01343 0.0445 0.011344 0.0133092 0.0405 0.0111243 0.0124372 0.0445 0.0107139 0.0124372 0.0405 0.0107139 0.0126874 0.0405 0.0107297 0.0345 0.0405 0.014 0.0345 0.0405 -0.014 0.0405 0.0133092 0.0123001 0.0405 0.0131376 0.0124829 0.0405 0.0126874 0.0126947 0.0405 0.0134924 0.0118376 0.0405 0.0135 0.0095 0.0405 0.0133092 0.0111243 0.0405 0.0129259 0.0108072 0.0405 0.0124372 0.0107139 0.0405 0.0126874 0.0107297 0.0405 0.0345 0.014 0.0405 0.0135 -0.007 0.0405 0.0131376 -0.0125128 0.0405 -0.0120885 0.0126106 0.0405 0.0121909 0.0126635 0.0405 -0.0117182 0.0123043 0.0405 -0.0116015 0.012092 0.0405 0.0116235 0.0121941 0.0405 0.0115312 0.011961 0.0405 0.0124372 0.0127105 0.0405 0.0124372 -0.0122852 0.0405 -0.0115413 0.0116151 0.0405 -0.0115413 0.0118574 0.0405 0.0115312 0.0114635 0.0405 0.011964 -0.0124389 0.0405 0.0121909 -0.0123322 0.0405 0.0129259 -0.0141887 0.0405 -0.0127986 0.0108172 0.0405 -0.0130178 0.0109203 0.0405 0.0095 -0.014 0.0405 0.0116234 -0.0137655 0.0405 -0.013436 0.0119766 0.0405 -0.0133468 0.0122018 0.0405 -0.0127986 0.0126553 0.0405 -0.0130178 0.0125522 0.0405 -0.0125607 0.0127007 0.0405 -0.0203475 0.014 0.0405 -0.0135 0.0095 0.0405 -0.0085 -0.014 0.0405 -0.0015 -0.014 0.0405 -0.0118626 -0.0125646 0.0405 -0.0135 -0.007 0.0405 -0.011691 -0.0127473 0.0405 -0.0123126 -0.0123528 0.0405 -0.0120742 -0.0124302 0.0405 -0.0125628 -0.012337 0.0405 -0.012809 -0.012384 0.0405 -0.0115702 -0.0137032 0.0405 -0.0130358 -0.0124907 0.0405 -0.0118626 -0.0141056 0.0405 -0.0135 -0.0133351 0.0405 -0.0203475 -0.014 0.0405 -0.0130358 -0.0141794 0.0405 -0.012809 -0.0142861 0.0405 -0.013229 -0.0140196 0.0405 -0.0134686 -0.0135838 0.0405 -0.0125628 -0.0143331 0.0405 -0.0015 -0.019 0.0405 -0.0345 -0.014 0.0405 -0.0385 0.019 0.0405 -0.0123189 0.0126855 0.0405 0.011964 0.0108677 0.0405 0.0202877 -0.014 0.0445 0.0345 0.014 0.0445 0.0131376 0.0124829 0.0445 0.01343 0.0120804 0.0445 0.0202877 0.014 0.0445 0.0134924 0.0115868 0.0445 0.0135 0.0095 0.0445 0.0121909 0.0107609 0.0445 0.011964 0.0108677 0.0445 0.0345 -0.014 0.0445 0.0385 -0.019 0.0445 -0.011884 0.0124808 0.0445 0.0133093 -0.0126956 0.0445 0.0129259 -0.0123784 0.0445 0.0121909 0.0126635 0.0445 -0.0123189 0.0126855 0.0445 0.0134301 -0.0129153 0.0445 0.0115312 0.011961 0.0445 -0.0115413 0.0118574 0.0445 0.0202877 -0.014 0.0445 0.0114998 0.0117122 0.0445 -0.0115413 0.0116151 0.0445 0.0134924 -0.0134089 0.0445 0.0131376 -0.0140543 0.0445 0.0129259 -0.0141887 0.0445 -0.0120885 0.0108619 0.0445 -0.0127986 0.0108172 0.0445 -0.0132044 0.0110747 0.0445 -0.0133468 0.0112707 0.0445 0.0117708 -0.0139683 0.0445 -0.0134664 0.0117363 0.0445 -0.0125607 0.0127007 0.0445 0.0095 -0.014 0.0445 0.0124372 -0.0142819 0.0445 -0.0135 -0.007 0.0445 -0.0115079 -0.0132097 0.0445 -0.0125628 -0.012337 0.0445 -0.0115702 -0.0137032 0.0445 -0.012809 -0.012384 0.0445 -0.011691 -0.0139229 0.0445 -0.0130358 -0.0124907 0.0445 -0.013229 -0.0126505 0.0445 -0.0118626 -0.0141056 0.0445 -0.0133763 -0.0128533 0.0445 -0.0120742 -0.0142399 0.0445 -0.0135 -0.0133351 0.0445 -0.0133763 -0.0138168 0.0445 -0.012809 -0.0142861 0.0445 -0.0385 0.019 0.0445 -0.0345 -0.014 0.0445 0.0385 0.019 0.0445 0.0124372 0.0127105 0.0445 -0.011884 0.0109917 0.0445 0.0117709 0.0110275 0.0405 0.0385 -0.019 0.0445 -0.0035 -0.014 0.0445 -0.0085 -0.014 0.0445 -0.0135 0.0095 0.0445 0.0135 -0.007 0.0405 0.0025 -0.014 0.0445 0.0045 -0.014 0.0445 0.0045 -0.019 0.0445 -0.0203475 0.014 0.0405 -0.0345 0.014 0.0445 -0.0345 0.014 0.0445 -0.0203475 -0.014 0.0445 -0.0125628 -0.0143331 0.0445 -0.0134686 -0.0130864 0.0405 -0.0134686 -0.0130864 0.0445 -0.0130358 -0.0141794 0.0445 -0.013229 -0.0140196 0.0405 -0.0133763 -0.0138168 0.0405 -0.0133763 -0.0128533 0.0445 -0.0134686 -0.0135838 0.0405 -0.013229 -0.0126505 0.0445 -0.0123126 -0.0123528 0.0445 -0.0120742 -0.0124302 0.0445 -0.0118626 -0.0125646 0.0445 -0.011691 -0.0127473 0.0405 -0.0115702 -0.0129669 0.0445 -0.0115702 -0.0129669 0.0405 -0.0115079 -0.0132097 0.0405 -0.0115079 -0.0134604 0.0445 -0.0115079 -0.0134604 0.0405 -0.011691 -0.0139229 0.0405 -0.0120742 -0.0142399 0.0405 -0.0123126 -0.0143174 0.0445 -0.0123126 -0.0143174 0.0445 -0.0130178 0.0109203 0.0445 -0.0133468 0.0122018 0.0445 -0.013436 0.0119766 0.0405 -0.0134664 0.0117363 0.0405 -0.0132044 0.0110747 0.0405 -0.0133468 0.0112707 0.0405 -0.013436 0.0114959 0.0445 -0.013436 0.0114959 0.0445 -0.0130178 0.0125522 0.0445 -0.0132044 0.0123978 0.0405 -0.0132044 0.0123978 0.0445 -0.0127986 0.0126553 0.0445 -0.0120885 0.0126106 0.0445 -0.0117182 0.0123043 0.0405 -0.011884 0.0124808 0.0445 -0.0116015 0.012092 0.0445 -0.0116015 0.0113805 0.0405 -0.0116015 0.0113805 0.0445 -0.0117182 0.0111682 0.0405 -0.0117182 0.0111682 0.0405 -0.011884 0.0109917 0.0405 -0.0120885 0.0108619 0.0405 -0.0123189 0.010787 0.0445 -0.0123189 0.010787 0.0405 -0.0125607 0.0107718 0.0445 -0.0125607 0.0107718 0.0405 0.0124372 -0.0142819 0.0405 0.0121909 -0.0142349 0.0445 0.0115311 -0.0130348 0.0445 0.0114997 -0.0132835 0.0445 0.011964 -0.0141281 0.0445 0.0121909 -0.0142349 0.0445 0.0116234 -0.0128016 0.0405 0.0115311 -0.0130348 0.0405 0.0114997 -0.0132835 0.0405 0.011964 -0.0141281 0.0405 0.0116234 -0.0128016 0.0405 0.0117708 -0.0139683 0.0445 0.0116234 -0.0137655 0.0445 0.0115311 -0.0135323 0.0405 0.0115311 -0.0135323 0.0445 0.0117708 -0.0125988 0.0405 0.0117708 -0.0125988 0.0445 0.011964 -0.0124389 0.0445 0.0124372 -0.0122852 0.0445 0.0121909 -0.0123322 0.0445 0.0126874 -0.0123009 0.0405 0.0126874 -0.0123009 0.0405 0.0129259 -0.0123784 0.0445 0.0131376 -0.0125128 0.0405 0.0133093 -0.0126956 0.0405 0.0134301 -0.0129153 0.0405 0.0134924 -0.0131582 0.0445 0.0134924 -0.0131582 0.0405 0.0134924 -0.0134089 0.0445 0.0134301 -0.0136518 0.0405 0.0134301 -0.0136518 0.0405 0.0133093 -0.0138715 0.0445 0.0133093 -0.0138715 0.0405 0.0131376 -0.0140543 0.0445 0.0126874 -0.0142661 0.0405 0.0126874 -0.0142661 0.0405 0.0121909 0.0107609 0.0405 0.0114998 0.0117122 0.0445 0.0116235 0.0121941 0.0405 0.0117709 0.0110275 0.0445 0.0117709 0.0123969 0.0405 0.0116235 0.0112303 0.0445 0.0115312 0.0114635 0.0445 0.0116235 0.0112303 0.0445 0.011964 0.0125567 0.0405 0.011964 0.0125567 0.0405 0.0117709 0.0123969 0.0445 0.0129259 0.0126173 0.0405 0.0129259 0.0126173 0.0445 0.0126874 0.0126947 0.0445 0.0133092 0.0123001 0.0405 0.01343 0.0120804 0.0445 0.0134924 0.0118376 0.0405 0.0134924 0.0115868 0.0405 0.01343 0.011344 0.0445 0.01343 0.011344 0.0445 0.0133092 0.0111243 0.0405 0.0131376 0.0109415 0.0445 0.0129259 0.0108072 0.0445 0.0131376 0.0109415 0.0445 0.0124372 0.0107139 0.0445 0.0126874 0.0107297 0.0405 0.0202877 0.014 0.0405 0.0345 -0.014 -0.0385 -0.0405 0.019 -0.0385 -0.0445 -0.019 0.0385 -0.0445 -0.019 -0.0385 -0.0445 0.019 -0.0131376 -0.0445 0.0124829 -0.0126874 -0.0445 0.0126947 -0.0134924 -0.0445 0.0115868 -0.0135 -0.0445 0.0095 -0.0131376 -0.0445 0.0109415 -0.0126874 -0.0445 0.0107297 -0.0121909 -0.0445 0.0107609 -0.0133093 -0.0445 -0.0126956 -0.0135 -0.0445 -0.007 -0.0117709 -0.0445 0.0123969 -0.0116235 -0.0445 0.0121941 -0.011964 -0.0445 0.0125567 0.011884 -0.0445 0.0124808 -0.0134301 -0.0445 -0.0129153 0.0116015 -0.0445 0.012092 -0.0115312 -0.0445 0.011961 -0.0129259 -0.0445 -0.0123784 -0.0202877 -0.0445 -0.014 0.0115413 -0.0445 0.0118574 -0.0134924 -0.0445 -0.0134089 0.0115413 -0.0445 0.0116151 -0.0115312 -0.0445 0.0114635 -0.0131376 -0.0445 -0.0140543 -0.0133093 -0.0445 -0.0138715 0.0135 -0.0445 0.0095 0.0127986 -0.0445 0.0108172 0.0130178 -0.0445 0.0109203 0.013436 -0.0445 0.0114959 -0.0117708 -0.0445 -0.0125988 -0.0115311 -0.0445 -0.0130348 0.0132044 -0.0445 0.0123978 0.0133468 -0.0445 0.0122018 0.0203475 -0.0445 0.014 -0.0035 -0.0445 -0.019 -0.0121909 -0.0445 -0.0142349 0.0045 -0.0445 -0.014 0.0115702 -0.0445 -0.0129669 0.0123126 -0.0445 -0.0123528 0.0115079 -0.0445 -0.0132097 0.0115079 -0.0445 -0.0134604 0.0130358 -0.0445 -0.0124907 0.012809 -0.0445 -0.012384 0.011691 -0.0445 -0.0139229 0.0085 -0.0445 -0.014 0.0120742 -0.0445 -0.0142399 0.0123126 -0.0445 -0.0143174 0.0125628 -0.0445 -0.0143331 0.0134686 -0.0445 -0.0130864 0.0135 -0.0445 -0.007 0.0133763 -0.0445 -0.0128533 0.013229 -0.0445 -0.0140196 0.0345 -0.0445 0.014 0.0385 -0.0445 0.019 0.0045 -0.0445 -0.019 -0.0202877 -0.0445 0.014 0.011884 -0.0445 0.0109917 0.0117182 -0.0445 0.0111682 0.0203475 -0.0445 -0.014 -0.0133092 -0.0405 0.0123001 -0.0131376 -0.0405 0.0124829 -0.0202877 -0.0405 0.014 -0.0126874 -0.0405 0.0126947 -0.0134924 -0.0405 0.0118376 -0.0134924 -0.0405 0.0115868 -0.0133092 -0.0405 0.0111243 -0.0131376 -0.0405 0.0109415 -0.0129259 -0.0405 0.0108072 -0.0124372 -0.0405 0.0107139 -0.0345 -0.0405 0.014 -0.0135 -0.0405 0.0095 -0.0133093 -0.0405 -0.0126956 -0.0131376 -0.0405 -0.0125128 -0.0117709 -0.0405 0.0123969 0.0120885 -0.0405 0.0126106 0.011884 -0.0405 0.0124808 0.0117182 -0.0405 0.0123043 -0.0126874 -0.0405 -0.0123009 -0.0115312 -0.0405 0.011961 -0.0124372 -0.0405 -0.0122852 0.0116015 -0.0405 0.0113805 -0.0121909 -0.0405 -0.0123322 -0.0131376 -0.0405 -0.0140543 -0.0129259 -0.0405 -0.0141887 0.011884 -0.0405 0.0109917 0.0125607 -0.0405 0.0107718 0.0127986 -0.0405 0.0108172 0.0130178 -0.0405 0.0109203 0.013436 -0.0405 0.0114959 -0.0117708 -0.0405 -0.0125988 -0.0117708 -0.0405 -0.0139683 -0.0116234 -0.0405 -0.0137655 -0.0115311 -0.0405 -0.0135323 -0.0114997 -0.0405 -0.0132835 0.0130178 -0.0405 0.0125522 0.0203475 -0.0405 0.014 0.0135 -0.0405 0.0095 -0.0015 -0.0405 -0.014 -0.0124372 -0.0405 -0.0142819 0.0345 -0.0405 0.014 0.0025 -0.0405 -0.014 0.011691 -0.0405 -0.0127473 0.0115702 -0.0405 -0.0129669 0.0120742 -0.0405 -0.0124302 0.0085 -0.0405 -0.014 0.0125628 -0.0405 -0.012337 0.0115079 -0.0405 -0.0134604 0.0115702 -0.0405 -0.0137032 0.0135 -0.0405 -0.007 0.0025 -0.0405 -0.019 0.0125628 -0.0405 -0.0143331 0.012809 -0.0405 -0.0142861 0.0133763 -0.0405 -0.0128533 0.0133763 -0.0405 -0.0138168 0.0345 -0.0405 -0.014 0.0123189 -0.0405 0.0126855 -0.0117709 -0.0405 0.0110275 -0.0135 -0.0405 -0.007 -0.0095 -0.0445 -0.014 -0.0035 -0.0445 -0.014 -0.0095 -0.0405 -0.014 0.0203475 -0.0405 -0.014 0.0345 -0.0445 -0.014 0.0134686 -0.0405 -0.0130864 0.0135 -0.0405 -0.0133351 0.0135 -0.0445 -0.0133351 0.012809 -0.0445 -0.0142861 0.0130358 -0.0445 -0.0141794 0.0130358 -0.0405 -0.0141794 0.013229 -0.0405 -0.0140196 0.0133763 -0.0445 -0.0138168 0.013229 -0.0405 -0.0126505 0.013229 -0.0445 -0.0126505 0.0134686 -0.0445 -0.0135838 0.0134686 -0.0405 -0.0135838 0.0130358 -0.0405 -0.0124907 0.012809 -0.0405 -0.012384 0.0123126 -0.0405 -0.0123528 0.0125628 -0.0445 -0.012337 0.0118626 -0.0405 -0.0125646 0.0120742 -0.0445 -0.0124302 0.0118626 -0.0445 -0.0125646 0.011691 -0.0445 -0.0127473 0.0115079 -0.0405 -0.0132097 0.0115702 -0.0445 -0.0137032 0.011691 -0.0405 -0.0139229 0.0118626 -0.0405 -0.0141056 0.0118626 -0.0445 -0.0141056 0.0120742 -0.0405 -0.0142399 0.0123126 -0.0405 -0.0143174 0.0125607 -0.0445 0.0107718 0.013436 -0.0405 0.0119766 0.0133468 -0.0405 0.0122018 0.0134664 -0.0445 0.0117363 0.013436 -0.0445 0.0119766 0.0132044 -0.0445 0.0110747 0.0132044 -0.0405 0.0110747 0.0133468 -0.0405 0.0112707 0.0133468 -0.0445 0.0112707 0.0134664 -0.0405 0.0117363 0.0132044 -0.0405 0.0123978 0.0130178 -0.0445 0.0125522 0.0127986 -0.0445 0.0126553 0.0125607 -0.0405 0.0127007 0.0127986 -0.0405 0.0126553 0.0125607 -0.0445 0.0127007 0.0123189 -0.0445 0.0126855 0.0120885 -0.0445 0.0126106 0.0117182 -0.0445 0.0123043 0.0116015 -0.0405 0.012092 0.0115413 -0.0405 0.0118574 0.0115413 -0.0405 0.0116151 0.0116015 -0.0445 0.0113805 0.0117182 -0.0405 0.0111682 0.0120885 -0.0405 0.0108619 0.0120885 -0.0445 0.0108619 0.0123189 -0.0445 0.010787 0.0123189 -0.0405 0.010787 -0.0121909 -0.0405 -0.0142349 -0.0114997 -0.0445 -0.0132835 -0.0115311 -0.0405 -0.0130348 -0.011964 -0.0405 -0.0141281 -0.011964 -0.0445 -0.0141281 -0.0117708 -0.0445 -0.0139683 -0.0116234 -0.0405 -0.0128016 -0.0116234 -0.0445 -0.0128016 -0.0116234 -0.0445 -0.0137655 -0.0115311 -0.0445 -0.0135323 -0.011964 -0.0405 -0.0124389 -0.011964 -0.0445 -0.0124389 -0.0121909 -0.0445 -0.0123322 -0.0124372 -0.0445 -0.0122852 -0.0126874 -0.0445 -0.0123009 -0.0129259 -0.0405 -0.0123784 -0.0131376 -0.0445 -0.0125128 -0.0134301 -0.0405 -0.0129153 -0.0134924 -0.0405 -0.0131582 -0.0134924 -0.0445 -0.0131582 -0.0134924 -0.0405 -0.0134089 -0.0134301 -0.0445 -0.0136518 -0.0134301 -0.0405 -0.0136518 -0.0133093 -0.0405 -0.0138715 -0.0129259 -0.0445 -0.0141887 -0.0126874 -0.0405 -0.0142661 -0.0126874 -0.0445 -0.0142661 -0.0124372 -0.0445 -0.0142819 -0.0124372 -0.0445 0.0107139 -0.011964 -0.0445 0.0108677 -0.0121909 -0.0405 0.0107609 -0.011964 -0.0405 0.0108677 -0.0117709 -0.0445 0.0110275 -0.0116235 -0.0405 0.0121941 -0.0116235 -0.0405 0.0112303 -0.0116235 -0.0445 0.0112303 -0.0115312 -0.0405 0.0114635 -0.0114998 -0.0405 0.0117122 -0.0114998 -0.0445 0.0117122 -0.011964 -0.0405 0.0125567 -0.0121909 -0.0445 0.0126635 -0.0121909 -0.0405 0.0126635 -0.0124372 -0.0445 0.0127105 -0.0124372 -0.0405 0.0127105 -0.0129259 -0.0405 0.0126173 -0.0129259 -0.0445 0.0126173 -0.0133092 -0.0445 0.0123001 -0.01343 -0.0405 0.0120804 -0.01343 -0.0445 0.0120804 -0.0134924 -0.0445 0.0118376 -0.01343 -0.0445 0.011344 -0.01343 -0.0405 0.011344 -0.0133092 -0.0445 0.0111243 -0.0129259 -0.0445 0.0108072 -0.0126874 -0.0405 0.0107297 -0.0345 -0.0445 0.014 -0.0202877 -0.0405 -0.014 -0.0345 -0.0445 -0.014 -0.0345 -0.0405 -0.014 0.0025 -0.0105 -0.019 -0.0025 0.0025 -0.019 -0.0025 -0.0025 -0.019 -0.0355 -0.0145 -0.019 -0.0145 -0.0105 -0.019 -0.0105 -0.0105 -0.019 -0.0025 -0.0105 -0.019 0.0105 -0.0105 -0.019 0.0105 -0.0145 -0.019 0.0025 -0.0025 -0.022 -0.0105 -0.0105 -0.022 -0.0025 -0.0025 -0.022 -0.0105 0.0025 -0.019 0.0025 0.0105 -0.019 0.0025 0.0025 -0.019 0.0025 -0.0025 -0.019 0.0105 -0.0025 -0.022 0.0105 -0.0025 -0.019 0.0145 -0.0105 -0.019 0.0105 0.0025 -0.019 -0.0355 -0.0355 -0.019 0.0355 0.0355 -0.019 0.0445 0.0445 -0.019 0.0145 0.0355 -0.019 -0.0035 -0.0445 -0.019 -0.0105 -0.0355 -0.019 -0.0355 0.0105 -0.019 -0.0355 -0.0105 -0.019 0.0105 -0.0355 -0.019 -0.0355 0.0355 -0.019 -0.0445 0.0035 -0.019 0.0355 -0.0355 -0.019 0.0045 -0.0445 -0.019 0.0355 -0.0105 -0.019 -0.0105 0.0355 -0.019 0.0355 0.0105 -0.019 0.0105 0.0355 -0.019 -0.0355 -0.0145 -0.022 -0.0145 -0.0145 -0.019 -0.0145 -0.0105 -0.022 0.0145 -0.0355 -0.019 0.0355 -0.0145 -0.019 0.0145 -0.0145 -0.019 -0.0105 -0.0145 -0.022 -0.0025 -0.0105 -0.022 0.0025 -0.0105 -0.022 0.0145 -0.0145 -0.022 0.0105 -0.0145 -0.022 -0.0105 -0.0025 -0.019 -0.0025 0.0025 -0.022 0.0025 0.0025 -0.022 -0.0145 0.0105 -0.019 -0.0145 0.0145 -0.019 -0.0025 0.0105 -0.019 -0.0105 0.0105 -0.019 0.0105 0.0105 -0.019 0.0145 0.0105 -0.019 -0.0105 0.0025 -0.022 0.0105 0.0105 -0.022 0.0405 0.0025 -0.019 0.0445 -0.0035 -0.022 0.0445 -0.0035 -0.019 0.0445 -0.0445 -0.022 0.0445 -0.0445 -0.019 -0.0015 -0.0405 -0.022 -0.0015 -0.0405 -0.019 -0.0445 -0.0445 -0.022 -0.0035 -0.0445 -0.022 -0.0445 -0.0045 -0.019 -0.0445 -0.0045 -0.022 -0.0405 -0.0025 -0.019 -0.0405 -0.0025 -0.022 -0.0025 0.0405 -0.022 0.0445 0.0445 -0.022 0.0355 -0.0355 -0.022 0.0145 -0.0355 -0.022 0.0355 0.0105 -0.022 -0.0355 -0.0355 -0.022 0.0105 0.0145 -0.019 -0.0105 0.0355 -0.022 -0.0355 -0.0105 -0.022 -0.0145 0.0355 -0.019 -0.0355 0.0355 -0.022 -0.0355 0.0145 -0.019 -0.0145 -0.0355 -0.019 -0.0105 -0.0145 -0.019 0.0035 0.0445 -0.022 0.0445 0.0045 -0.022 0.0355 0.0145 -0.022 0.0355 0.0355 -0.022 -0.0405 0.0015 -0.022 -0.0445 0.0035 -0.022 -0.0355 0.0105 -0.022 0.0105 -0.0355 -0.022 0.0025 -0.0405 -0.022 0.0045 -0.0445 -0.022 -0.0355 0.0145 -0.022 -0.0445 0.0445 -0.022 -0.0045 0.0445 -0.022 0.0355 -0.0145 -0.022 0.0355 -0.0105 -0.022 0.0405 -0.0015 -0.022 0.0105 0.0355 -0.022 0.0015 0.0405 -0.022 0.0405 0.0025 -0.022 -0.0145 -0.0355 -0.022 -0.0105 -0.0355 -0.022 -0.0145 -0.0145 -0.022 -0.0105 -0.0025 -0.022 -0.0105 0.0105 -0.022 0.0145 -0.0105 -0.022 0.0105 -0.0105 -0.022 0.0105 0.0025 -0.022 -0.0105 0.0145 -0.022 -0.0105 0.0145 -0.019 0.0355 0.0145 -0.019 0.0145 0.0145 -0.022 0.0145 0.0145 -0.019 0.0145 0.0355 -0.022 -0.0145 0.0105 -0.022 -0.0145 0.0145 -0.022 0.0025 0.0105 -0.022 -0.0025 0.0105 -0.022 0.0145 0.0105 -0.022 0.0105 0.0145 -0.022 -0.0145 0.0355 -0.022 -0.04048 0.04048 0.019 -0.04048 -0.04048 0.019 0.04048 0.04048 0.019 0.0445 0.0445 0.019 0.04048 -0.04048 0.019 -0.0370829 -0.0370829 0.0204071 -0.0370829 0.0370829 0.0204071 0.0370829 -0.0370829 0.0204071 -0.0445 0.0445 0.019 -0.0445 0.0445 0.0201 0.0445 -0.0445 0.019 0.0445 -0.0445 0.0201 0.0445 0.0445 0.0201 -0.0355 0.0355 0.02199 0.0370829 0.0370829 0.0204071 -0.0445 -0.0445 0.0201 0.0426 0.0426 0.022 -0.0426 0.0426 0.022 -0.0426 -0.0426 0.022 0.0426 -0.0426 0.022 -0.0355 -0.0355 0.02199 0.0355 -0.0355 0.02199 0.0355 0.0355 0.02199 0.0355 -0.0355 0.022 -0.0355 -0.0355 0.022 -0.0355 0.0355 0.022 0.0355 0.0355 0.022 - - - - - - - - - - 0 1 0 0 1 0 1 0 0 1 0 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -0.707107 0.707107 0 -0.707107 0.707107 0 0 1 0 0 1 0 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 -1 0 0 -1 0 0 0 1 0 0 1 0 0.707107 0.707107 0 0.707107 0.707107 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 1 0 0 1 0 0 0 -1 0 0 -1 0 -0.707107 -0.707107 0 -0.707107 -0.707107 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 1 0 0 1 0 0 1 0 0 1 0 0 -0.857493 0 0.514496 -0.857493 0 0.514496 -0.606339 0.707107 0.363803 -0.606339 0.707107 0.363803 -0.606339 -0.707107 0.363803 -0.606339 -0.707107 0.363803 0 1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.857493 0.514496 0 0.857493 0.514496 0.707107 0.606339 0.363803 0.707107 0.606339 0.363803 -0.707107 0.606339 0.363803 -0.707107 0.606339 0.363803 1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.857493 0 0.514496 0.857493 0 0.514496 0.606339 -0.707107 0.363803 0.606339 -0.707107 0.363803 0.606339 0.707107 0.363803 0.606339 0.707107 0.363803 0 -1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 1 0 0 1 0 0 -0.857493 0.514496 0 -0.857493 0.514496 -0.707107 -0.606339 0.363803 -0.707107 -0.606339 0.363803 0.707107 -0.606339 0.363803 0.707107 -0.606339 0.363803 -1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 1 0 0 1 0 0 -0.857493 0.514496 0 -0.857493 0.514496 -0.707107 -0.606339 0.363803 -0.707107 -0.606339 0.363803 0.707107 -0.606339 0.363803 0.707107 -0.606339 0.363803 -1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.857493 0 0.514496 0.857493 0 0.514496 0.606339 -0.707107 0.363803 0.606339 -0.707107 0.363803 0.606339 0.707107 0.363803 0.606339 0.707107 0.363803 0 -1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 1 0 0 1 0 0 1 0 0 1 0 0 -0.857493 0 0.514496 -0.857493 0 0.514496 -0.606339 0.707107 0.363803 -0.606339 0.707107 0.363803 -0.606339 -0.707107 0.363803 -0.606339 -0.707107 0.363803 0 1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.857493 0.514496 0 0.857493 0.514496 0.707107 0.606339 0.363803 0.707107 0.606339 0.363803 -0.707107 0.606339 0.363803 -0.707107 0.606339 0.363803 1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.857493 0 0.514496 0.857493 0 0.514496 0.606339 -0.707107 0.363803 0.606339 -0.707107 0.363803 0.606339 0.707107 0.363803 0.606339 0.707107 0.363803 0 -1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 1 0 0 1 0 0 1 0 0 1 0 0 -0.857493 0 0.514496 -0.857493 0 0.514496 -0.606339 0.707107 0.363803 -0.606339 0.707107 0.363803 -0.606339 -0.707107 0.363803 -0.606339 -0.707107 0.363803 0 1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.857493 0.514496 0 0.857493 0.514496 0.707107 0.606339 0.363803 0.707107 0.606339 0.363803 -0.707107 0.606339 0.363803 -0.707107 0.606339 0.363803 1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 1 0 0 1 0 0 -0.857493 0.514496 0 -0.857493 0.514496 -0.707107 -0.606339 0.363803 -0.707107 -0.606339 0.363803 0.707107 -0.606339 0.363803 0.707107 -0.606339 0.363803 -1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 -0.707107 0.707106 0 -0.707107 0.707106 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0 1 0 0 1 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 -0.707107 -0.707107 0 -0.707107 -0.707107 0 1 0 0 1 0 0 0 0 1 0 0 1 0 0 1 -1 0 0 -1 0 0 0 1 0 0 1 0 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0.707107 -0.707107 0 0.707107 -0.707107 0 0 1 0 0 1 0 0 0 1 0 0 1 0 0 1 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 0 1 0 0 1 0 0 1 -0.707107 -0.707107 0 -0.707107 -0.707107 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 1 0 0 1 0 0 1 0 0 1 0 0 0 0 1 0 0 1 0 0 1 0.707107 0.707106 0 0.707107 0.707106 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 0 1 0 0 1 0 0 1 -0.707107 0.707107 0 -0.707107 0.707107 0 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0.707106 -0.707107 0 0.707106 -0.707107 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 1 0 0 1 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0.707106 0.707107 0 0.707106 0.707107 0 -1 0 0 -1 0 0 0 0 1 0 0 1 0 0 1 1 0 0 1 0 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 1 0 0 1 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -0.447214 -0.894427 0 -0.447214 -0.894427 0 0 0 1 0 0 1 0 -0.813734 0.581238 0 -0.813734 0.581238 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 1 0 0 1 0 0 0.868243 0.496139 0 0.868243 0.496139 0 0 1 0 0 1 -0.447214 0.894427 0 -0.447214 0.894427 0 0 0 -1 0 0 -1 0 1 0 0 1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 -0.187382 0.982287 0 -0.187382 0.982287 0 -0.425778 0.904828 0 -0.425778 0.904828 0 -0.992115 -0.125334 0 -0.992115 -0.125334 0 -0.929775 -0.368128 0 -0.637422 0.770515 0 -0.637422 0.770515 0 -0.929775 -0.368128 0 -0.809017 0.587785 0 -0.809017 0.587785 0 -0.809017 -0.587785 0 -0.809017 -0.587785 0 -0.929776 0.368127 0 -0.929776 0.368127 0 -0.992115 0.125335 0 -0.637424 -0.770513 0 -0.992115 0.125335 0 -0.637424 -0.770513 0 -0.425778 -0.904828 0 -0.187382 -0.982287 0 -0.425778 -0.904828 0 -0.187382 -0.982287 0 0.06279 -0.998027 0 0.06279 -0.998027 0 0.309017 -0.951056 0 0.309017 -0.951056 0 0.535832 -0.844325 0 0.728969 -0.684547 0 0.535832 -0.844325 0 0.728969 -0.684547 0 0.876304 -0.481759 0 0.876304 -0.481759 0 0.968585 -0.248683 0 0.968585 -0.248683 0 1 0 0 1 0 0 0.968585 0.248682 0 0.968585 0.248682 0 0.876303 0.481761 0 0.876303 0.481761 0 0.72897 0.684545 0 0.72897 0.684545 0 0.535829 0.844327 0 0.535829 0.844327 0 0.309017 0.951056 0 0.309017 0.951056 0 0.06279 0.998027 0 0.06279 0.998027 0 -0.18737 0.982289 0 -0.18737 0.982289 0 -0.992116 -0.12532 0 -0.425784 0.904825 0 -0.425784 0.904825 0 -0.929775 -0.368128 0 -0.992116 -0.12532 0 -0.637418 0.770518 0 -0.637418 0.770518 0 -0.809017 -0.587785 0 -0.929775 -0.368128 0 -0.809021 0.58778 0 -0.809021 0.58778 0 -0.809017 -0.587785 0 -0.929773 0.368133 0 -0.929773 0.368133 0 -0.992116 0.12532 0 -0.637415 -0.77052 0 -0.992116 0.12532 0 -0.637415 -0.77052 0 -0.425784 -0.904825 0 -0.425784 -0.904825 0 -0.187385 -0.982287 0 -0.187385 -0.982287 0 0.0628046 -0.998026 0 0.0628046 -0.998026 0 0.309006 -0.95106 0 0.535834 -0.844323 0 0.309006 -0.95106 0 0.535834 -0.844323 0 0.728962 -0.684555 0 0.876307 -0.481753 0 0.728962 -0.684555 0 0.876307 -0.481753 0 0.968583 -0.248692 0 0.968583 -0.248692 0 1 0 0 1 0 0 0.968581 0.248697 0 0.968581 0.248697 0 0.876311 0.481747 0 0.876311 0.481747 0 0.728963 0.684553 0 0.728963 0.684553 0 0.535832 0.844325 0 0.535832 0.844325 0 0.30901 0.951059 0 0.30901 0.951059 0 0.0627893 0.998027 0 0.0627893 0.998027 0 -0.187379 0.982288 0 -0.187379 0.982288 0 -0.425778 0.904828 0 -0.425778 0.904828 0 -0.992114 -0.125342 0 -0.992114 -0.125342 0 -0.929779 -0.368118 0 -0.637425 0.770512 0 -0.637425 0.770512 0 -0.929779 -0.368118 0 -0.809012 0.587792 0 -0.809012 0.587792 0 -0.809012 -0.587792 0 -0.809012 -0.587792 0 -0.92978 0.368117 0 -0.92978 0.368117 0 -0.992114 0.125342 0 -0.637425 -0.770512 0 -0.992114 0.125342 0 -0.637425 -0.770512 0 -0.425781 -0.904826 0 -0.187379 -0.982288 0 -0.425781 -0.904826 0 -0.187379 -0.982288 0 0.0627922 -0.998027 0 0.0627922 -0.998027 0 0.309018 -0.951056 0 0.309018 -0.951056 0 0.535826 -0.844329 0 0.728969 -0.684547 0 0.535826 -0.844329 0 0.728969 -0.684547 0 0.876307 -0.481753 0 0.876307 -0.481753 0 0.968583 -0.248691 0 0.968583 -0.248691 0 1 0 0 1 0 0 0.968583 0.24869 0 0.968583 0.24869 0 0.876307 0.481753 0 0.876307 0.481753 0 0.728969 0.684547 0 0.728969 0.684547 0 0.535826 0.844329 0 0.535826 0.844329 0 0.309018 0.951056 0 0.309018 0.951056 0 0.0627885 0.998027 0 0.0627885 0.998027 0 -0.187379 0.982288 0 -0.187379 0.982288 0 -0.425779 0.904827 0 -0.425779 0.904827 0 -0.992114 -0.125337 0 -0.992114 -0.125337 0 -0.929774 -0.368131 0 -0.63742 0.770516 0 -0.63742 0.770516 0 -0.929774 -0.368131 0 -0.809018 0.587784 0 -0.809018 0.587784 0 -0.809017 -0.587785 0 -0.809017 -0.587785 0 -0.929774 0.368131 0 -0.929774 0.368131 0 -0.992114 0.125337 0 -0.63742 -0.770516 0 -0.992114 0.125337 0 -0.63742 -0.770516 0 -0.425782 -0.904826 0 -0.187379 -0.982288 0 -0.425782 -0.904826 0 -0.187379 -0.982288 0 0.0627897 -0.998027 0 0.0627897 -0.998027 0 0.309008 -0.95106 0 0.309008 -0.95106 0 0.535835 -0.844323 0 0.728967 -0.684549 0 0.535835 -0.844323 0 0.728967 -0.684549 0 0.876305 -0.481757 0 0.876305 -0.481757 0 0.968584 -0.248688 0 0.968584 -0.248688 0 1 0 0 1 0 0 0.968584 0.248688 0 0.968584 0.248688 0 0.876308 0.481751 0 0.876308 0.481751 0 0.728967 0.684549 0 0.728967 0.684549 0 0.535835 0.844323 0 0.535835 0.844323 0 0.309008 0.95106 0 0.309008 0.95106 0 0.0627897 0.998027 0 0.0627897 0.998027 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 1 0 0 1 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 0 1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0.894427 0.447214 0 0.894427 0.447214 0 0 0 1 0 0 1 0.813734 0 0.581238 0.813734 0 0.581238 1 0 0 1 0 0 0 0 -1 0 0 -1 -1 0 0 -1 0 0 -0.868243 0 0.496139 -0.868243 0 0.496139 0 0 1 0 0 1 -0.894427 0.447214 0 -0.894427 0.447214 0 0 0 -1 0 0 -1 -1 0 0 -1 0 0 1 0 0 1 0 0 0 0 1 0 0 1 0.187382 0 0.982287 0.187382 0 0.982287 0.992115 0 -0.125327 0.425778 0 0.904828 0.425778 0 0.904828 0.929775 0 -0.368128 0.992115 0 -0.125327 0.637422 0 0.770515 0.637422 0 0.770515 0.809017 0 -0.587785 0.929775 0 -0.368128 0.809017 0 0.587785 0.809017 0 0.587785 0.809017 0 -0.587785 0.929776 0 0.368127 0.929776 0 0.368127 0.992115 0 0.125327 0.637424 0 -0.770513 0.992115 0 0.125327 0.637424 0 -0.770513 0.425778 0 -0.904828 0.425778 0 -0.904828 0.187382 0 -0.982287 0.187382 0 -0.982287 -0.06279 0 -0.998027 -0.06279 0 -0.998027 -0.309017 0 -0.951056 -0.535832 0 -0.844325 -0.309017 0 -0.951056 -0.535832 0 -0.844325 -0.728969 0 -0.684547 -0.876304 0 -0.481759 -0.728969 0 -0.684547 -0.876304 0 -0.481759 -0.968585 0 -0.248683 -0.968585 0 -0.248683 -1 0 0 -1 0 0 -0.968585 0 0.248682 -0.968585 0 0.248682 -0.876303 0 0.481761 -0.876303 0 0.481761 -0.72897 0 0.684545 -0.72897 0 0.684545 -0.535829 0 0.844327 -0.535829 0 0.844327 -0.309017 0 0.951056 -0.309017 0 0.951056 -0.06279 0 0.998027 -0.06279 0 0.998027 0.187372 0 0.982289 0.187372 0 0.982289 0.992115 0 -0.125331 0.425781 0 0.904826 0.425781 0 0.904826 0.929775 0 -0.368128 0.992115 0 -0.125331 0.637422 0 0.770515 0.637422 0 0.770515 0.809017 0 -0.587785 0.929775 0 -0.368128 0.809021 0 0.58778 0.809021 0 0.58778 0.809017 0 -0.587785 0.929773 0 0.368133 0.929773 0 0.368133 0.992115 0 0.125331 0.637419 0 -0.770517 0.992115 0 0.125331 0.637419 0 -0.770517 0.425781 0 -0.904826 0.425781 0 -0.904826 0.187387 0 -0.982286 0.187387 0 -0.982286 -0.0628041 0 -0.998026 -0.0628041 0 -0.998026 -0.309006 0 -0.95106 -0.535834 0 -0.844323 -0.309006 0 -0.95106 -0.535834 0 -0.844323 -0.728962 0 -0.684555 -0.876307 0 -0.481753 -0.728962 0 -0.684555 -0.876307 0 -0.481753 -0.968583 0 -0.248692 -0.968583 0 -0.248692 -1 0 0 -1 0 0 -0.968581 0 0.248697 -0.968581 0 0.248697 -0.876311 0 0.481747 -0.876311 0 0.481747 -0.728963 0 0.684553 -0.728963 0 0.684553 -0.535832 0 0.844325 -0.535832 0 0.844325 -0.30901 0 0.951059 -0.30901 0 0.951059 -0.0627888 0 0.998027 -0.0627888 0 0.998027 0.187379 0 0.982288 0.187379 0 0.982288 0.992114 0 -0.125342 0.425778 0 0.904828 0.425778 0 0.904828 0.929779 0 -0.368118 0.992114 0 -0.125342 0.637425 0 0.770512 0.637425 0 0.770512 0.809012 0 -0.587792 0.929779 0 -0.368118 0.809012 0 0.587792 0.809012 0 0.587792 0.809012 0 -0.587792 0.92978 0 0.368117 0.92978 0 0.368117 0.992114 0 0.125342 0.637425 0 -0.770512 0.992114 0 0.125342 0.637425 0 -0.770512 0.425781 0 -0.904826 0.425781 0 -0.904826 0.187379 0 -0.982288 0.187379 0 -0.982288 -0.0627922 0 -0.998027 -0.0627922 0 -0.998027 -0.309016 0 -0.951057 -0.535829 0 -0.844327 -0.309016 0 -0.951057 -0.535829 0 -0.844327 -0.728972 0 -0.684543 -0.876304 0 -0.481758 -0.728972 0 -0.684543 -0.876304 0 -0.481758 -0.968583 0 -0.248691 -0.968583 0 -0.248691 -1 0 0 -1 0 0 -0.968583 0 0.24869 -0.968583 0 0.24869 -0.876304 0 0.481758 -0.876304 0 0.481758 -0.728972 0 0.684543 -0.728972 0 0.684543 -0.535829 0 0.844327 -0.535829 0 0.844327 -0.309016 0 0.951057 -0.309016 0 0.951057 -0.0627885 0 0.998027 -0.0627885 0 0.998027 0.187379 0 0.982288 0.187379 0 0.982288 0.992114 0 -0.125337 0.425779 0 0.904827 0.425779 0 0.904827 0.929774 0 -0.368131 0.992114 0 -0.125337 0.63742 0 0.770516 0.63742 0 0.770516 0.809017 0 -0.587785 0.929774 0 -0.368131 0.809018 0 0.587784 0.809018 0 0.587784 0.809017 0 -0.587785 0.929774 0 0.368131 0.929774 0 0.368131 0.992114 0 0.125337 0.63742 0 -0.770516 0.992114 0 0.125337 0.63742 0 -0.770516 0.425782 0 -0.904826 0.425782 0 -0.904826 0.187379 0 -0.982288 0.187379 0 -0.982288 -0.0627897 0 -0.998027 -0.0627897 0 -0.998027 -0.309008 0 -0.95106 -0.535835 0 -0.844323 -0.309008 0 -0.95106 -0.535835 0 -0.844323 -0.72897 0 -0.684545 -0.876299 0 -0.481768 -0.72897 0 -0.684545 -0.876299 0 -0.481768 -0.968585 0 -0.248681 -0.968585 0 -0.248681 -1 0 0 -1 0 0 -0.968585 0 0.248681 -0.968585 0 0.248681 -0.876302 0 0.481762 -0.876302 0 0.481762 -0.72897 0 0.684545 -0.72897 0 0.684545 -0.535835 0 0.844323 -0.535835 0 0.844323 -0.309008 0 0.95106 -0.309008 0 0.95106 -0.0627897 0 0.998027 -0.0627897 0 0.998027 0 0 -1 0 0 -1 1 0 0 1 0 0 -1 0 0 -1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 1 0 0 1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0.447214 0.894427 0 0.447214 0.894427 0 0 0 1 0 0 1 0 0.813734 0.581238 0 0.813734 0.581238 0 1 0 0 1 0 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -0.868243 0.496139 0 -0.868243 0.496139 0 0 1 0 0 1 0.447214 -0.894427 0 0.447214 -0.894427 0 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 1 0 0 1 0 0 0 1 0 0 1 0 0.187382 0.982287 0 0.187382 0.982287 0 0.992115 -0.125327 0 0.425778 0.904828 0 0.425778 0.904828 0 0.929775 -0.368128 0 0.992115 -0.125327 0 0.637422 0.770515 0 0.637422 0.770515 0 0.809017 -0.587785 0 0.929775 -0.368128 0 0.809017 0.587785 0 0.809017 0.587785 0 0.809017 -0.587785 0 0.929776 0.368127 0 0.929776 0.368127 0 0.992115 0.125327 0 0.637424 -0.770513 0 0.992115 0.125327 0 0.637424 -0.770513 0 0.425778 -0.904828 0 0.425778 -0.904828 0 0.187382 -0.982287 0 0.187382 -0.982287 0 -0.06279 -0.998027 0 -0.06279 -0.998027 0 -0.309017 -0.951056 0 -0.535832 -0.844325 0 -0.309017 -0.951056 0 -0.535832 -0.844325 0 -0.728969 -0.684547 0 -0.876304 -0.481759 0 -0.728969 -0.684547 0 -0.876304 -0.481759 0 -0.968585 -0.248683 0 -0.968585 -0.248683 0 -1 0 0 -1 0 0 -0.968585 0.248682 0 -0.968585 0.248682 0 -0.876303 0.481761 0 -0.876303 0.481761 0 -0.72897 0.684545 0 -0.72897 0.684545 0 -0.535829 0.844327 0 -0.535829 0.844327 0 -0.309017 0.951056 0 -0.309017 0.951056 0 -0.06279 0.998027 0 -0.06279 0.998027 0 0.187372 0.982289 0 0.187372 0.982289 0 0.992115 -0.125331 0 0.425781 0.904826 0 0.425781 0.904826 0 0.929775 -0.368128 0 0.992115 -0.125331 0 0.637422 0.770515 0 0.637422 0.770515 0 0.809017 -0.587785 0 0.929775 -0.368128 0 0.809021 0.58778 0 0.809021 0.58778 0 0.809017 -0.587785 0 0.929773 0.368133 0 0.929773 0.368133 0 0.992115 0.125331 0 0.637419 -0.770517 0 0.992115 0.125331 0 0.637419 -0.770517 0 0.425781 -0.904826 0 0.425781 -0.904826 0 0.187387 -0.982286 0 0.187387 -0.982286 0 -0.0628041 -0.998026 0 -0.0628041 -0.998026 0 -0.309006 -0.95106 0 -0.535834 -0.844323 0 -0.309006 -0.95106 0 -0.535834 -0.844323 0 -0.728962 -0.684555 0 -0.876307 -0.481753 0 -0.728962 -0.684555 0 -0.876307 -0.481753 0 -0.968583 -0.248692 0 -0.968583 -0.248692 0 -1 0 0 -1 0 0 -0.968581 0.248697 0 -0.968581 0.248697 0 -0.876311 0.481747 0 -0.876311 0.481747 0 -0.728963 0.684553 0 -0.728963 0.684553 0 -0.535832 0.844325 0 -0.535832 0.844325 0 -0.30901 0.951059 0 -0.30901 0.951059 0 -0.0627888 0.998027 0 -0.0627888 0.998027 0 0.187379 0.982288 0 0.187379 0.982288 0 0.992114 -0.125342 0 0.425778 0.904828 0 0.425778 0.904828 0 0.929779 -0.368118 0 0.992114 -0.125342 0 0.637425 0.770512 0 0.637425 0.770512 0 0.809012 -0.587792 0 0.929779 -0.368118 0 0.809012 0.587792 0 0.809012 0.587792 0 0.809012 -0.587792 0 0.92978 0.368117 0 0.92978 0.368117 0 0.992114 0.125342 0 0.637425 -0.770512 0 0.992114 0.125342 0 0.637425 -0.770512 0 0.425781 -0.904826 0 0.425781 -0.904826 0 0.187379 -0.982288 0 0.187379 -0.982288 0 -0.0627922 -0.998027 0 -0.0627922 -0.998027 0 -0.309016 -0.951057 0 -0.535829 -0.844327 0 -0.309016 -0.951057 0 -0.535829 -0.844327 0 -0.728972 -0.684543 0 -0.876304 -0.481758 0 -0.728972 -0.684543 0 -0.876304 -0.481758 0 -0.968583 -0.248691 0 -0.968583 -0.248691 0 -1 0 0 -1 0 0 -0.968583 0.24869 0 -0.968583 0.24869 0 -0.876304 0.481758 0 -0.876304 0.481758 0 -0.728972 0.684543 0 -0.728972 0.684543 0 -0.535829 0.844327 0 -0.535829 0.844327 0 -0.309016 0.951057 0 -0.309016 0.951057 0 -0.0627885 0.998027 0 -0.0627885 0.998027 0 0.187379 0.982288 0 0.187379 0.982288 0 0.992114 -0.125337 0 0.425779 0.904827 0 0.425779 0.904827 0 0.929774 -0.368131 0 0.992114 -0.125337 0 0.63742 0.770516 0 0.63742 0.770516 0 0.809017 -0.587785 0 0.929774 -0.368131 0 0.809018 0.587784 0 0.809018 0.587784 0 0.809017 -0.587785 0 0.929774 0.368131 0 0.929774 0.368131 0 0.992114 0.125337 0 0.63742 -0.770516 0 0.992114 0.125337 0 0.63742 -0.770516 0 0.425782 -0.904826 0 0.425782 -0.904826 0 0.187379 -0.982288 0 0.187379 -0.982288 0 -0.0627897 -0.998027 0 -0.0627897 -0.998027 0 -0.309008 -0.95106 0 -0.535835 -0.844323 0 -0.309008 -0.95106 0 -0.535835 -0.844323 0 -0.72897 -0.684545 0 -0.876299 -0.481768 0 -0.72897 -0.684545 0 -0.876299 -0.481768 0 -0.968585 -0.248681 0 -0.968585 -0.248681 0 -1 0 0 -1 0 0 -0.968585 0.248681 0 -0.968585 0.248681 0 -0.876302 0.481762 0 -0.876302 0.481762 0 -0.72897 0.684545 0 -0.72897 0.684545 0 -0.535835 0.844323 0 -0.535835 0.844323 0 -0.309008 0.95106 0 -0.309008 0.95106 0 -0.0627897 0.998027 0 -0.0627897 0.998027 0 0 -1 0 0 -1 0 1 0 0 1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0 0 1 -1 0 0 -1 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -0.894427 -0.447214 0 -0.894427 -0.447214 0 0 0 1 0 0 1 -0.813734 0 0.581238 -0.813734 0 0.581238 -1 0 0 -1 0 0 0 0 -1 0 0 -1 1 0 0 1 0 0 0.868243 0 0.496139 0.868243 0 0.496139 0 0 1 0 0 1 0.894427 -0.447214 0 0.894427 -0.447214 0 0 0 -1 0 0 -1 1 0 0 1 0 0 -1 0 0 -1 0 0 0 0 1 0 0 1 -0.187382 0 0.982287 -0.187382 0 0.982287 -0.992115 0 -0.125334 -0.425778 0 0.904828 -0.425778 0 0.904828 -0.929775 0 -0.368128 -0.992115 0 -0.125334 -0.637422 0 0.770515 -0.637422 0 0.770515 -0.809017 0 -0.587785 -0.929775 0 -0.368128 -0.809017 0 0.587785 -0.809017 0 0.587785 -0.809017 0 -0.587785 -0.929776 0 0.368127 -0.929776 0 0.368127 -0.992115 0 0.125335 -0.637424 0 -0.770513 -0.992115 0 0.125335 -0.637424 0 -0.770513 -0.425778 0 -0.904828 -0.425778 0 -0.904828 -0.187382 0 -0.982287 -0.187382 0 -0.982287 0.06279 0 -0.998027 0.06279 0 -0.998027 0.309017 0 -0.951056 0.535832 0 -0.844325 0.309017 0 -0.951056 0.535832 0 -0.844325 0.728969 0 -0.684547 0.876304 0 -0.481759 0.728969 0 -0.684547 0.876304 0 -0.481759 0.968585 0 -0.248683 0.968585 0 -0.248683 1 0 0 1 0 0 0.968585 0 0.248682 0.968585 0 0.248682 0.876303 0 0.481761 0.876303 0 0.481761 0.72897 0 0.684545 0.72897 0 0.684545 0.535829 0 0.844327 0.535829 0 0.844327 0.309017 0 0.951056 0.309017 0 0.951056 0.06279 0 0.998027 0.06279 0 0.998027 -0.18737 0 0.982289 -0.18737 0 0.982289 -0.992116 0 -0.12532 -0.425784 0 0.904825 -0.425784 0 0.904825 -0.929775 0 -0.368128 -0.992116 0 -0.12532 -0.637418 0 0.770518 -0.637418 0 0.770518 -0.809017 0 -0.587785 -0.929775 0 -0.368128 -0.809021 0 0.58778 -0.809021 0 0.58778 -0.809017 0 -0.587785 -0.929773 0 0.368133 -0.929773 0 0.368133 -0.992116 0 0.12532 -0.637415 0 -0.77052 -0.992116 0 0.12532 -0.637415 0 -0.77052 -0.425784 0 -0.904825 -0.425784 0 -0.904825 -0.187385 0 -0.982287 -0.187385 0 -0.982287 0.0628046 0 -0.998026 0.0628046 0 -0.998026 0.309006 0 -0.95106 0.535834 0 -0.844323 0.309006 0 -0.95106 0.535834 0 -0.844323 0.728962 0 -0.684555 0.876307 0 -0.481753 0.728962 0 -0.684555 0.876307 0 -0.481753 0.968583 0 -0.248692 0.968583 0 -0.248692 1 0 0 1 0 0 0.968581 0 0.248697 0.968581 0 0.248697 0.876311 0 0.481747 0.876311 0 0.481747 0.728963 0 0.684553 0.728963 0 0.684553 0.535832 0 0.844325 0.535832 0 0.844325 0.30901 0 0.951059 0.30901 0 0.951059 0.0627893 0 0.998027 0.0627893 0 0.998027 -0.187379 0 0.982288 -0.187379 0 0.982288 -0.992114 0 -0.125342 -0.425778 0 0.904828 -0.425778 0 0.904828 -0.929779 0 -0.368118 -0.992114 0 -0.125342 -0.637425 0 0.770512 -0.637425 0 0.770512 -0.809012 0 -0.587792 -0.929779 0 -0.368118 -0.809012 0 0.587792 -0.809012 0 0.587792 -0.809012 0 -0.587792 -0.92978 0 0.368117 -0.92978 0 0.368117 -0.992114 0 0.125342 -0.637425 0 -0.770512 -0.992114 0 0.125342 -0.637425 0 -0.770512 -0.425781 0 -0.904826 -0.425781 0 -0.904826 -0.187379 0 -0.982288 -0.187379 0 -0.982288 0.0627922 0 -0.998027 0.0627922 0 -0.998027 0.309018 0 -0.951056 0.535826 0 -0.844329 0.309018 0 -0.951056 0.535826 0 -0.844329 0.728969 0 -0.684547 0.876307 0 -0.481753 0.728969 0 -0.684547 0.876307 0 -0.481753 0.968583 0 -0.248691 0.968583 0 -0.248691 1 0 0 1 0 0 0.968583 0 0.24869 0.968583 0 0.24869 0.876307 0 0.481753 0.876307 0 0.481753 0.728969 0 0.684547 0.728969 0 0.684547 0.535826 0 0.844329 0.535826 0 0.844329 0.309018 0 0.951056 0.309018 0 0.951056 0.0627885 0 0.998027 0.0627885 0 0.998027 -0.187379 0 0.982288 -0.187379 0 0.982288 -0.992114 0 -0.125337 -0.425779 0 0.904827 -0.425779 0 0.904827 -0.929774 0 -0.368131 -0.992114 0 -0.125337 -0.63742 0 0.770516 -0.63742 0 0.770516 -0.809017 0 -0.587785 -0.929774 0 -0.368131 -0.809018 0 0.587784 -0.809018 0 0.587784 -0.809017 0 -0.587785 -0.929774 0 0.368131 -0.929774 0 0.368131 -0.992114 0 0.125337 -0.63742 0 -0.770516 -0.992114 0 0.125337 -0.63742 0 -0.770516 -0.425782 0 -0.904826 -0.425782 0 -0.904826 -0.187379 0 -0.982288 -0.187379 0 -0.982288 0.0627897 0 -0.998027 0.0627897 0 -0.998027 0.309008 0 -0.95106 0.535835 0 -0.844323 0.309008 0 -0.95106 0.535835 0 -0.844323 0.728967 0 -0.684549 0.876305 0 -0.481757 0.728967 0 -0.684549 0.876305 0 -0.481757 0.968584 0 -0.248688 0.968584 0 -0.248688 1 0 0 1 0 0 0.968584 0 0.248688 0.968584 0 0.248688 0.876308 0 0.481751 0.876308 0 0.481751 0.728967 0 0.684549 0.728967 0 0.684549 0.535835 0 0.844323 0.535835 0 0.844323 0.309008 0 0.95106 0.309008 0 0.95106 0.0627897 0 0.998027 0.0627897 0 0.998027 0 0 -1 0 0 -1 -1 0 0 -1 0 0 1 0 0 1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 -1 0 0 -1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 1 0 0 1 0 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0 0 1 0 1 0 0 1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 1 0 0 1 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -1 0 0 -1 0 0 1 0 0 1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -1 0 0 -1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 1 0 0 1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 0.447214 -0.894427 0 0.447214 -0.894427 0 1 0 0 1 0 0 0.447214 0.894427 0 0.447214 0.894427 0 1 0 0 1 0 0 0 -1 0 0 -1 0 -0.894427 -0.447214 0 -0.894427 -0.447214 0 0 -1 0 0 -1 0 0.894427 -0.447214 0 0.894427 -0.447214 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -0.447214 0.894427 0 -0.447214 0.894427 0 -1 0 0 -1 0 0 -0.447214 -0.894427 0 -0.447214 -0.894427 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0.894427 0.447214 0 0.894427 0.447214 0 0 1 0 0 1 0 -0.894427 0.447214 0 -0.894427 0.447214 0 0 1 0 0 1 0 1 0 0 1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 0 0 1 0 0 1 0 -1 0 0 -1 0 1 0 0 1 0 0 0 0 1 0 0 1 0 -1 0 0 -1 0 1 0 0 1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0.382684 0 -0.923879 0.382684 0 -0.923879 0 0.382684 -0.923879 0 0.382684 -0.923879 0 -0.382683 -0.92388 0 -0.382683 -0.92388 -0.382683 0 -0.92388 -0.382683 0 -0.92388 -1 0 0 -1 0 0 0 1 0 0 1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 0.707106 0 -0.707107 0.707106 0 -0.707107 0 0.707106 -0.707107 0 0.707106 -0.707107 0 -0.707107 -0.707106 0 -0.707107 -0.707106 -0.707107 0 -0.707106 -0.707107 0 -0.707106 -0.707107 0 0.707106 -0.707107 0 0.707106 0 0.707107 0.707106 0 0.707107 0.707106 0 -0.707107 0.707106 0 -0.707107 0.707106 0.707107 0 0.707106 0.707107 0 0.707106 1 0 0 1 0 0 0 1 0 0 1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 - - - - - - - - - - - - - - -

4 0 0 0 1370 0 4 1 1370 1 1 1 4 2 1 2 629 2 5 3 4 3 629 3 5 4 629 4 275 4 356 5 5 5 275 5 356 6 275 6 3 6 2 7 356 7 3 7 0 8 2 8 3 8 0 9 3 9 1370 9 2 10 0 10 356 10 0 11 4 11 5 11 356 12 0 12 5 12 275 13 1370 13 3 13 629 14 1 14 1370 14 629 15 1370 15 275 15 6 16 1299 16 1372 16 6 17 1372 17 7 17 6 18 7 18 8 18 11 19 6 19 8 19 11 20 8 20 815 20 9 21 11 21 815 21 9 22 815 22 866 22 10 23 9 23 866 23 1299 24 10 24 866 24 1299 25 866 25 1372 25 10 26 1299 26 9 26 1299 27 6 27 11 27 9 28 1299 28 11 28 815 29 1372 29 866 29 8 30 7 30 1372 30 8 31 1372 31 815 31 13 32 16 32 12 32 13 33 12 33 19 33 13 34 19 34 237 34 14 35 13 35 237 35 14 36 237 36 996 36 15 37 14 37 996 37 15 38 996 38 17 38 18 39 15 39 17 39 16 40 18 40 17 40 16 41 17 41 12 41 18 42 16 42 15 42 16 43 13 43 14 43 15 44 16 44 14 44 996 45 12 45 17 45 237 46 19 46 12 46 237 47 12 47 996 47 828 48 1258 48 1365 48 828 49 1365 49 868 49 828 50 868 50 23 50 872 51 828 51 23 51 872 52 23 52 24 52 20 53 872 53 24 53 20 54 24 54 22 54 21 55 20 55 22 55 1258 56 21 56 22 56 1258 57 22 57 1365 57 21 58 1258 58 20 58 1258 59 828 59 872 59 20 60 1258 60 872 60 24 61 1365 61 22 61 23 62 868 62 1365 62 23 63 1365 63 24 63 25 64 34 64 26 64 26 65 34 65 32 65 32 66 29 66 28 66 26 67 32 67 28 67 27 68 26 68 28 68 27 69 28 69 33 69 33 70 28 70 29 70 33 71 29 71 31 71 25 72 26 72 27 72 30 73 25 73 27 73 31 74 29 74 32 74 34 75 25 75 30 75 30 76 27 76 34 76 27 77 33 77 32 77 33 78 31 78 32 78 34 79 27 79 32 79 39 80 35 80 40 80 40 81 35 81 43 81 43 82 38 82 37 82 40 83 43 83 37 83 42 84 40 84 37 84 42 85 37 85 36 85 36 86 37 86 38 86 36 87 38 87 44 87 39 88 40 88 42 88 41 89 39 89 42 89 44 90 38 90 43 90 35 91 39 91 41 91 41 92 42 92 35 92 42 93 36 93 43 93 36 94 44 94 43 94 35 95 42 95 43 95 50 96 51 96 45 96 45 97 51 97 49 97 49 98 47 98 46 98 45 99 49 99 46 99 54 100 45 100 46 100 54 101 46 101 52 101 52 102 46 102 47 102 52 103 47 103 53 103 50 104 45 104 54 104 48 105 50 105 54 105 53 106 47 106 49 106 51 107 50 107 48 107 48 108 54 108 51 108 54 109 52 109 49 109 52 110 53 110 49 110 51 111 54 111 49 111 60 112 55 112 59 112 59 113 55 113 63 113 63 114 62 114 56 114 59 115 63 115 56 115 61 116 59 116 56 116 61 117 56 117 57 117 57 118 56 118 62 118 57 119 62 119 58 119 60 120 59 120 61 120 64 121 60 121 61 121 58 122 62 122 63 122 55 123 60 123 64 123 64 124 61 124 55 124 61 125 57 125 63 125 57 126 58 126 63 126 55 127 61 127 63 127 72 128 65 128 67 128 67 129 65 129 66 129 66 130 69 130 68 130 67 131 66 131 68 131 73 132 67 132 68 132 73 133 68 133 74 133 74 134 68 134 69 134 74 135 69 135 70 135 72 136 67 136 73 136 71 137 72 137 73 137 70 138 69 138 66 138 65 139 72 139 71 139 71 140 73 140 65 140 73 141 74 141 66 141 74 142 70 142 66 142 65 143 73 143 66 143 79 144 84 144 76 144 76 145 84 145 75 145 75 146 81 146 78 146 76 147 75 147 78 147 80 148 76 148 78 148 80 149 78 149 77 149 77 150 78 150 81 150 77 151 81 151 83 151 79 152 76 152 80 152 82 153 79 153 80 153 83 154 81 154 75 154 84 155 79 155 82 155 82 156 80 156 84 156 80 157 77 157 75 157 77 158 83 158 75 158 84 159 80 159 75 159 90 160 93 160 85 160 85 161 93 161 94 161 94 162 88 162 86 162 85 163 94 163 86 163 91 164 85 164 86 164 91 165 86 165 87 165 87 166 86 166 88 166 87 167 88 167 92 167 90 168 85 168 91 168 89 169 90 169 91 169 92 170 88 170 94 170 93 171 90 171 89 171 89 172 91 172 93 172 91 173 87 173 94 173 87 174 92 174 94 174 93 175 91 175 94 175 95 176 104 176 96 176 96 177 104 177 103 177 103 178 99 178 98 178 96 179 103 179 98 179 101 180 96 180 98 180 101 181 98 181 97 181 97 182 98 182 99 182 97 183 99 183 102 183 95 184 96 184 101 184 100 185 95 185 101 185 102 186 99 186 103 186 104 187 95 187 100 187 100 188 101 188 104 188 101 189 97 189 103 189 97 190 102 190 103 190 104 191 101 191 103 191 109 192 111 192 105 192 105 193 111 193 106 193 106 194 108 194 107 194 105 195 106 195 107 195 112 196 105 196 107 196 112 197 107 197 113 197 113 198 107 198 108 198 113 199 108 199 114 199 109 200 105 200 112 200 110 201 109 201 112 201 114 202 108 202 106 202 111 203 109 203 110 203 110 204 112 204 111 204 112 205 113 205 106 205 113 206 114 206 106 206 111 207 112 207 106 207 115 208 116 208 119 208 119 209 116 209 122 209 122 210 117 210 120 210 119 211 122 211 120 211 118 212 119 212 120 212 118 213 120 213 121 213 121 214 120 214 117 214 121 215 117 215 124 215 115 216 119 216 118 216 123 217 115 217 118 217 124 218 117 218 122 218 116 219 115 219 123 219 123 220 118 220 116 220 118 221 121 221 122 221 121 222 124 222 122 222 116 223 118 223 122 223 133 224 134 224 130 224 130 225 134 225 127 225 127 226 125 226 126 226 130 227 127 227 126 227 131 228 130 228 126 228 131 229 126 229 128 229 128 230 126 230 125 230 128 231 125 231 129 231 133 232 130 232 131 232 132 233 133 233 131 233 129 234 125 234 127 234 134 235 133 235 132 235 132 236 131 236 134 236 131 237 128 237 127 237 128 238 129 238 127 238 134 239 131 239 127 239 135 240 136 240 137 240 137 241 136 241 144 241 144 242 140 242 139 242 137 243 144 243 139 243 143 244 137 244 139 244 143 245 139 245 138 245 138 246 139 246 140 246 138 247 140 247 142 247 135 248 137 248 143 248 141 249 135 249 143 249 142 250 140 250 144 250 136 251 135 251 141 251 141 252 143 252 136 252 143 253 138 253 144 253 138 254 142 254 144 254 136 255 143 255 144 255 146 256 145 256 147 256 146 257 150 257 145 257 153 258 154 258 148 258 153 259 146 259 147 259 153 260 148 260 146 260 145 261 151 261 153 261 147 262 145 262 153 262 149 263 150 263 146 263 148 264 149 264 146 264 149 265 152 265 151 265 145 266 150 266 151 266 150 267 149 267 151 267 153 268 151 268 152 268 153 269 152 269 154 269 152 270 149 270 154 270 154 271 149 271 148 271 155 272 162 272 159 272 155 273 156 273 162 273 158 274 164 274 157 274 158 275 155 275 159 275 158 276 157 276 155 276 162 277 163 277 158 277 159 278 162 278 158 278 160 279 156 279 155 279 157 280 160 280 155 280 160 281 161 281 163 281 162 282 156 282 163 282 156 283 160 283 163 283 158 284 163 284 161 284 158 285 161 285 164 285 161 286 160 286 164 286 164 287 160 287 157 287 168 288 169 288 165 288 168 289 170 289 169 289 172 290 174 290 166 290 172 291 168 291 165 291 172 292 166 292 168 292 169 293 167 293 172 293 165 294 169 294 172 294 171 295 170 295 168 295 166 296 171 296 168 296 171 297 173 297 167 297 169 298 170 298 167 298 170 299 171 299 167 299 172 300 167 300 173 300 172 301 173 301 174 301 173 302 171 302 174 302 174 303 171 303 166 303 179 304 180 304 178 304 179 305 175 305 180 305 176 306 182 306 177 306 176 307 179 307 178 307 176 308 177 308 179 308 180 309 181 309 176 309 178 310 180 310 176 310 184 311 175 311 179 311 177 312 184 312 179 312 184 313 183 313 181 313 180 314 175 314 181 314 175 315 184 315 181 315 176 316 181 316 183 316 176 317 183 317 182 317 183 318 184 318 182 318 182 319 184 319 177 319 185 320 187 320 186 320 185 321 189 321 187 321 192 322 194 322 190 322 192 323 185 323 186 323 192 324 190 324 185 324 187 325 191 325 192 325 186 326 187 326 192 326 188 327 189 327 185 327 190 328 188 328 185 328 188 329 193 329 191 329 187 330 189 330 191 330 189 331 188 331 191 331 192 332 191 332 193 332 192 333 193 333 194 333 193 334 188 334 194 334 194 335 188 335 190 335 196 336 195 336 198 336 196 337 202 337 195 337 197 338 204 338 200 338 197 339 196 339 198 339 197 340 200 340 196 340 195 341 203 341 197 341 198 342 195 342 197 342 199 343 202 343 196 343 200 344 199 344 196 344 199 345 201 345 203 345 195 346 202 346 203 346 202 347 199 347 203 347 197 348 203 348 201 348 197 349 201 349 204 349 201 350 199 350 204 350 204 351 199 351 200 351 205 352 206 352 210 352 205 353 207 353 206 353 215 354 216 354 211 354 215 355 205 355 210 355 215 356 211 356 205 356 206 357 208 357 209 357 210 358 206 358 209 358 212 359 207 359 205 359 211 360 212 360 205 360 212 361 214 361 213 361 206 362 207 362 213 362 207 363 212 363 213 363 215 364 213 364 214 364 215 365 214 365 216 365 214 366 212 366 216 366 216 367 212 367 211 367 217 368 220 368 218 368 217 369 219 369 220 369 224 370 227 370 228 370 224 371 217 371 218 371 224 372 228 372 217 372 220 373 221 373 222 373 218 374 220 374 222 374 226 375 219 375 217 375 228 376 226 376 217 376 226 377 225 377 223 377 220 378 219 378 223 378 219 379 226 379 223 379 224 380 223 380 225 380 224 381 225 381 227 381 225 382 226 382 227 382 227 383 226 383 228 383 230 384 237 384 350 384 350 385 237 385 275 385 229 386 14 386 230 386 230 387 14 387 237 387 350 388 275 388 231 388 231 389 275 389 356 389 277 390 237 390 232 390 503 391 499 391 277 391 233 392 277 392 499 392 504 393 503 393 277 393 497 394 277 394 233 394 505 395 504 395 277 395 276 396 277 396 497 396 236 397 505 397 277 397 239 398 508 398 236 398 239 399 234 399 508 399 239 400 510 400 234 400 239 401 513 401 510 401 239 402 235 402 513 402 239 403 516 403 235 403 239 404 236 404 277 404 481 405 516 405 239 405 483 406 481 406 239 406 14 407 240 407 232 407 14 408 232 408 237 408 238 409 239 409 277 409 519 410 240 410 14 410 469 411 238 411 519 411 469 412 467 412 238 412 438 413 491 413 490 413 436 414 490 414 486 414 436 415 438 415 490 415 241 416 238 416 467 416 434 417 493 417 491 417 434 418 491 418 438 418 471 419 469 419 519 419 242 420 436 420 486 420 242 421 486 421 243 421 465 422 238 422 241 422 244 423 493 423 434 423 244 424 276 424 493 424 473 425 471 425 519 425 245 426 242 426 243 426 245 427 243 427 246 427 463 428 238 428 465 428 474 429 473 429 519 429 441 430 246 430 489 430 441 431 245 431 246 431 459 432 238 432 463 432 249 433 489 433 250 433 249 434 441 434 489 434 247 435 474 435 519 435 461 436 238 436 459 436 248 437 249 437 250 437 248 438 250 438 484 438 476 439 247 439 519 439 456 440 238 440 461 440 251 441 476 441 519 441 477 442 251 442 519 442 479 443 477 443 519 443 363 444 445 444 443 444 363 445 252 445 445 445 363 446 253 446 252 446 363 447 413 447 253 447 363 448 255 448 413 448 363 449 254 449 255 449 363 450 421 450 254 450 363 451 424 451 421 451 258 452 238 452 456 452 258 453 453 453 256 453 258 454 454 454 453 454 258 455 456 455 454 455 258 456 450 456 257 456 258 457 451 457 450 457 258 458 455 458 451 458 258 459 259 459 455 459 258 460 256 460 259 460 262 461 260 461 416 461 262 462 419 462 260 462 262 463 422 463 419 463 262 464 426 464 422 464 262 465 428 465 426 465 262 466 424 466 363 466 262 467 431 467 428 467 262 468 244 468 431 468 262 469 416 469 424 469 1306 470 365 470 258 470 1306 471 479 471 519 471 1306 472 258 472 257 472 1306 473 261 473 479 473 1306 474 257 474 261 474 1306 475 519 475 14 475 266 476 262 476 363 476 368 477 275 477 262 477 265 478 360 478 263 478 398 479 266 479 265 479 264 480 398 480 265 480 395 481 266 481 398 481 400 482 264 482 265 482 394 483 266 483 395 483 267 484 400 484 265 484 268 485 266 485 394 485 402 486 267 486 265 486 391 487 266 487 268 487 406 488 402 488 265 488 269 489 266 489 391 489 407 490 406 490 265 490 381 491 266 491 269 491 408 492 407 492 265 492 382 493 266 493 381 493 409 494 408 494 265 494 270 495 409 495 265 495 270 496 265 496 263 496 373 497 270 497 263 497 271 498 272 498 376 498 271 499 382 499 272 499 271 500 273 500 373 500 271 501 266 501 382 501 271 502 274 502 273 502 271 503 380 503 274 503 271 504 383 504 380 504 271 505 385 505 383 505 271 506 376 506 385 506 271 507 373 507 263 507 370 508 275 508 368 508 356 509 370 509 271 509 356 510 275 510 370 510 356 511 271 511 263 511 275 512 237 512 277 512 275 513 277 513 276 513 275 514 276 514 262 514 262 515 276 515 244 515 239 516 363 516 483 516 483 517 363 517 484 517 484 518 363 518 443 518 484 519 443 519 248 519 271 520 262 520 266 520 277 521 519 521 238 521 286 522 230 522 349 522 349 523 498 523 501 523 498 524 349 524 500 524 349 525 501 525 502 525 500 526 349 526 278 526 349 527 502 527 279 527 278 528 349 528 351 528 349 529 279 529 280 529 280 530 281 530 284 530 281 531 282 531 284 531 282 532 283 532 284 532 283 533 285 533 284 533 285 534 514 534 284 534 514 535 517 535 284 535 349 536 280 536 284 536 284 537 517 537 480 537 284 538 480 538 353 538 286 539 287 539 355 539 230 540 286 540 355 540 349 541 284 541 300 541 355 542 287 542 288 542 288 543 300 543 468 543 300 544 466 544 468 544 291 545 289 545 437 545 290 546 291 546 435 546 291 547 437 547 435 547 466 548 300 548 292 548 289 549 293 549 432 549 437 550 289 550 432 550 288 551 468 551 470 551 290 552 435 552 439 552 294 553 290 553 439 553 292 554 300 554 464 554 432 555 293 555 433 555 293 556 351 556 433 556 288 557 470 557 472 557 294 558 439 558 296 558 295 559 294 559 296 559 464 560 300 560 462 560 288 561 472 561 299 561 298 562 295 562 440 562 295 563 296 563 440 563 462 564 300 564 460 564 487 565 298 565 297 565 298 566 440 566 297 566 288 567 299 567 475 567 460 568 300 568 301 568 487 569 297 569 442 569 354 570 487 570 442 570 288 571 475 571 302 571 301 572 300 572 313 572 288 573 302 573 303 573 288 574 303 574 478 574 288 575 478 575 304 575 444 576 305 576 362 576 305 577 306 577 362 577 306 578 307 578 362 578 307 579 308 579 362 579 308 580 309 580 362 580 309 581 310 581 362 581 310 582 311 582 362 582 311 583 312 583 362 583 313 584 300 584 316 584 314 585 449 585 316 585 449 586 448 586 316 586 448 587 313 587 316 587 325 588 452 588 316 588 452 589 315 589 316 589 315 590 457 590 316 590 457 591 458 591 316 591 458 592 314 592 316 592 323 593 317 593 352 593 317 594 318 594 352 594 318 595 319 595 352 595 319 596 320 596 352 596 320 597 321 597 352 597 362 598 312 598 352 598 321 599 322 599 352 599 322 600 433 600 352 600 312 601 323 601 352 601 316 602 366 602 326 602 288 603 304 603 326 603 325 604 316 604 326 604 304 605 324 605 326 605 324 606 325 606 326 606 355 607 288 607 326 607 362 608 352 608 331 608 352 609 350 609 327 609 357 610 359 610 358 610 358 611 331 611 328 611 358 612 328 612 397 612 328 613 331 613 396 613 358 614 397 614 399 614 396 615 331 615 392 615 358 616 399 616 401 616 392 617 331 617 329 617 358 618 401 618 403 618 329 619 331 619 330 619 358 620 403 620 405 620 330 621 331 621 332 621 358 622 405 622 404 622 332 623 331 623 333 623 358 624 404 624 334 624 333 625 331 625 339 625 358 626 334 626 335 626 357 627 358 627 335 627 357 628 335 628 336 628 357 629 336 629 337 629 357 630 337 630 340 630 345 631 338 631 346 631 338 632 339 632 346 632 339 633 331 633 346 633 340 634 341 634 346 634 341 635 342 635 346 635 342 636 343 636 346 636 343 637 344 637 346 637 344 638 345 638 346 638 357 639 340 639 346 639 327 640 350 640 347 640 346 641 347 641 348 641 347 642 350 642 348 642 357 643 346 643 348 643 349 644 230 644 350 644 351 645 349 645 350 645 352 646 351 646 350 646 433 647 351 647 352 647 353 648 362 648 284 648 354 649 362 649 353 649 444 650 362 650 354 650 442 651 444 651 354 651 331 652 352 652 346 652 300 653 288 653 349 653 355 654 326 654 14 654 326 655 1306 655 14 655 348 656 356 656 357 656 357 657 356 657 263 657 357 658 360 658 359 658 357 659 263 659 360 659 358 660 359 660 265 660 359 661 360 661 265 661 361 662 265 662 266 662 331 663 361 663 266 663 331 664 266 664 363 664 362 665 331 665 363 665 362 666 363 666 239 666 284 667 362 667 239 667 284 668 239 668 300 668 300 669 239 669 238 669 300 670 238 670 258 670 364 671 300 671 258 671 316 672 258 672 366 672 366 673 258 673 365 673 366 674 365 674 326 674 365 675 1306 675 326 675 371 676 262 676 367 676 368 677 262 677 371 677 262 678 271 678 369 678 262 679 369 679 367 679 370 680 368 680 371 680 370 681 371 681 372 681 369 682 370 682 372 682 271 683 370 683 369 683 410 684 373 684 273 684 374 685 410 685 273 685 374 686 273 686 274 686 375 687 374 687 274 687 388 688 376 688 272 688 377 689 388 689 272 689 378 690 377 690 272 690 375 691 274 691 380 691 379 692 375 692 380 692 378 693 272 693 382 693 379 694 380 694 383 694 384 695 379 695 383 695 387 696 378 696 381 696 378 697 382 697 381 697 384 698 383 698 385 698 386 699 384 699 385 699 388 700 386 700 385 700 390 701 387 701 269 701 388 702 385 702 376 702 387 703 381 703 269 703 389 704 390 704 391 704 393 705 389 705 391 705 390 706 269 706 391 706 393 707 391 707 268 707 392 708 393 708 394 708 393 709 268 709 394 709 396 710 392 710 395 710 392 711 394 711 395 711 328 712 396 712 398 712 397 713 328 713 398 713 396 714 395 714 398 714 397 715 398 715 264 715 399 716 397 716 400 716 397 717 264 717 400 717 399 718 400 718 267 718 401 719 399 719 267 719 403 720 401 720 267 720 403 721 267 721 402 721 403 722 402 722 406 722 405 723 403 723 406 723 404 724 405 724 406 724 404 725 406 725 407 725 404 726 407 726 408 726 334 727 404 727 408 727 334 728 408 728 409 728 335 729 334 729 409 729 335 730 409 730 270 730 336 731 335 731 270 731 336 732 270 732 373 732 410 733 336 733 373 733 446 734 253 734 413 734 411 735 446 735 413 735 412 736 427 736 416 736 411 737 413 737 255 737 414 738 411 738 255 738 415 739 412 739 260 739 412 740 416 740 260 740 414 741 255 741 254 741 417 742 414 742 254 742 418 743 415 743 419 743 415 744 260 744 419 744 417 745 254 745 421 745 420 746 417 746 421 746 418 747 419 747 422 747 420 748 421 748 424 748 423 749 420 749 424 749 427 750 423 750 424 750 425 751 418 751 426 751 427 752 424 752 416 752 418 753 422 753 426 753 430 754 425 754 428 754 425 755 426 755 428 755 429 756 430 756 431 756 430 757 428 757 431 757 433 758 429 758 244 758 429 759 431 759 244 759 432 760 433 760 434 760 437 761 432 761 434 761 433 762 244 762 434 762 437 763 434 763 438 763 435 764 437 764 436 764 439 765 435 765 436 765 437 766 438 766 436 766 439 767 436 767 242 767 439 768 242 768 245 768 296 769 439 769 245 769 440 770 296 770 245 770 440 771 245 771 441 771 440 772 441 772 249 772 297 773 440 773 249 773 442 774 297 774 249 774 442 775 249 775 248 775 442 776 248 776 443 776 444 777 442 777 443 777 444 778 443 778 445 778 305 779 444 779 445 779 305 780 445 780 252 780 306 781 305 781 252 781 306 782 252 782 253 782 446 783 306 783 253 783 324 784 261 784 257 784 325 785 324 785 257 785 325 786 257 786 450 786 452 787 325 787 450 787 447 788 256 788 453 788 449 789 447 789 453 789 448 790 449 790 453 790 452 791 450 791 451 791 315 792 452 792 451 792 448 793 453 793 454 793 315 794 451 794 455 794 457 795 315 795 455 795 313 796 448 796 456 796 448 797 454 797 456 797 457 798 455 798 259 798 458 799 457 799 259 799 447 800 458 800 259 800 301 801 313 801 461 801 447 802 259 802 256 802 313 803 456 803 461 803 460 804 301 804 459 804 462 805 460 805 459 805 301 806 461 806 459 806 462 807 459 807 463 807 464 808 462 808 465 808 462 809 463 809 465 809 292 810 464 810 241 810 464 811 465 811 241 811 466 812 292 812 467 812 468 813 466 813 467 813 292 814 241 814 467 814 468 815 467 815 469 815 470 816 468 816 471 816 468 817 469 817 471 817 470 818 471 818 473 818 472 819 470 819 473 819 299 820 472 820 473 820 299 821 473 821 474 821 299 822 474 822 247 822 475 823 299 823 247 823 302 824 475 824 247 824 302 825 247 825 476 825 302 826 476 826 251 826 303 827 302 827 251 827 303 828 251 828 477 828 478 829 303 829 477 829 478 830 477 830 479 830 304 831 478 831 479 831 304 832 479 832 261 832 324 833 304 833 261 833 517 834 516 834 481 834 480 835 517 835 481 835 480 836 481 836 483 836 353 837 480 837 483 837 482 838 246 838 243 838 294 839 482 839 243 839 485 840 294 840 243 840 353 841 483 841 484 841 354 842 353 842 484 842 485 843 243 843 486 843 354 844 484 844 250 844 487 845 354 845 250 845 488 846 485 846 490 846 485 847 486 847 490 847 487 848 250 848 489 848 298 849 487 849 489 849 482 850 298 850 489 850 494 851 488 851 491 851 482 852 489 852 246 852 488 853 490 853 491 853 492 854 494 854 493 854 495 855 492 855 493 855 494 856 491 856 493 856 495 857 493 857 276 857 496 858 495 858 497 858 495 859 276 859 497 859 500 860 496 860 233 860 496 861 497 861 233 861 498 862 500 862 499 862 501 863 498 863 499 863 500 864 233 864 499 864 501 865 499 865 503 865 502 866 501 866 504 866 501 867 503 867 504 867 502 868 504 868 505 868 506 869 502 869 505 869 507 870 506 870 505 870 507 871 505 871 236 871 507 872 236 872 508 872 509 873 507 873 508 873 511 874 509 874 508 874 511 875 508 875 234 875 511 876 234 876 510 876 512 877 511 877 510 877 512 878 510 878 513 878 515 879 512 879 513 879 515 880 513 880 235 880 514 881 515 881 235 881 514 882 235 882 516 882 517 883 514 883 516 883 349 884 277 884 286 884 286 885 277 885 232 885 518 886 519 886 277 886 349 887 518 887 277 887 286 888 232 888 240 888 287 889 286 889 240 889 287 890 240 890 519 890 518 891 287 891 519 891 24 892 22 892 629 892 629 893 22 893 1 893 20 894 21 894 24 894 24 895 21 895 22 895 629 896 1 896 5 896 5 897 1 897 4 897 522 898 22 898 528 898 746 899 520 899 522 899 743 900 522 900 520 900 747 901 746 901 522 901 745 902 522 902 743 902 521 903 747 903 522 903 741 904 522 904 745 904 749 905 521 905 522 905 523 906 750 906 749 906 523 907 525 907 750 907 523 908 524 908 525 908 523 909 526 909 524 909 523 910 527 910 526 910 523 911 752 911 527 911 523 912 749 912 522 912 733 913 752 913 523 913 577 914 733 914 523 914 21 915 529 915 528 915 21 916 528 916 22 916 544 917 523 917 522 917 560 918 529 918 21 918 530 919 544 919 560 919 530 920 531 920 544 920 534 921 739 921 533 921 532 922 533 922 734 922 532 923 534 923 533 923 535 924 544 924 531 924 536 925 539 925 739 925 536 926 739 926 534 926 723 927 530 927 560 927 537 928 532 928 734 928 537 929 734 929 541 929 719 930 544 930 535 930 538 931 539 931 536 931 538 932 741 932 539 932 540 933 723 933 560 933 690 934 537 934 541 934 690 935 541 935 738 935 715 936 544 936 719 936 542 937 540 937 560 937 691 938 738 938 543 938 691 939 690 939 738 939 714 940 544 940 715 940 694 941 691 941 543 941 694 942 543 942 545 942 725 943 542 943 560 943 713 944 544 944 714 944 696 945 545 945 578 945 696 946 694 946 545 946 546 947 725 947 560 947 707 948 544 948 713 948 547 949 546 949 560 949 729 950 547 950 560 950 731 951 729 951 560 951 551 952 548 952 579 952 551 953 549 953 548 953 551 954 673 954 549 954 551 955 674 955 673 955 551 956 679 956 674 956 551 957 550 957 679 957 551 958 683 958 550 958 551 959 684 959 683 959 552 960 544 960 707 960 552 961 553 961 700 961 552 962 554 962 553 962 552 963 707 963 554 963 552 964 702 964 701 964 552 965 703 965 702 965 552 966 705 966 703 966 552 967 710 967 705 967 552 968 700 968 710 968 580 969 555 969 677 969 580 970 556 970 555 970 580 971 686 971 556 971 580 972 687 972 686 972 580 973 689 973 687 973 580 974 557 974 689 974 580 975 538 975 557 975 580 976 684 976 551 976 580 977 677 977 684 977 559 978 558 978 552 978 559 979 731 979 560 979 559 980 552 980 701 980 559 981 561 981 731 981 559 982 701 982 561 982 559 983 560 983 21 983 564 984 580 984 551 984 640 985 1 985 580 985 573 986 562 986 635 986 563 987 564 987 562 987 565 988 563 988 562 988 566 989 564 989 563 989 568 990 565 990 562 990 660 991 564 991 566 991 567 992 568 992 562 992 659 993 564 993 660 993 664 994 567 994 562 994 569 995 564 995 659 995 666 996 664 996 562 996 658 997 564 997 569 997 570 998 666 998 562 998 656 999 564 999 658 999 668 1000 570 1000 562 1000 571 1001 564 1001 656 1001 669 1002 562 1002 573 1002 669 1003 668 1003 562 1003 572 1004 669 1004 573 1004 643 1005 572 1005 573 1005 644 1006 643 1006 573 1006 576 1007 647 1007 646 1007 576 1008 571 1008 647 1008 576 1009 648 1009 644 1009 576 1010 651 1010 648 1010 576 1011 574 1011 651 1011 576 1012 575 1012 574 1012 576 1013 646 1013 575 1013 576 1014 564 1014 571 1014 576 1015 644 1015 573 1015 642 1016 1 1016 640 1016 4 1017 642 1017 576 1017 4 1018 1 1018 642 1018 4 1019 576 1019 573 1019 1 1020 22 1020 522 1020 1 1021 522 1021 741 1021 1 1022 741 1022 580 1022 580 1023 741 1023 538 1023 523 1024 551 1024 577 1024 577 1025 551 1025 578 1025 578 1026 551 1026 579 1026 578 1027 579 1027 696 1027 576 1028 580 1028 564 1028 522 1029 560 1029 544 1029 755 1030 24 1030 583 1030 583 1031 742 1031 581 1031 742 1032 583 1032 582 1032 583 1033 581 1033 584 1033 582 1034 583 1034 744 1034 583 1035 584 1035 748 1035 744 1036 583 1036 630 1036 583 1037 748 1037 588 1037 588 1038 585 1038 632 1038 585 1039 751 1039 632 1039 751 1040 586 1040 632 1040 586 1041 587 1041 632 1041 587 1042 754 1042 632 1042 754 1043 753 1043 632 1043 583 1044 588 1044 632 1044 632 1045 753 1045 589 1045 632 1046 589 1046 631 1046 755 1047 756 1047 20 1047 24 1048 755 1048 20 1048 583 1049 632 1049 590 1049 20 1050 756 1050 591 1050 591 1051 590 1051 722 1051 590 1052 720 1052 722 1052 736 1053 737 1053 593 1053 735 1054 736 1054 592 1054 736 1055 593 1055 592 1055 720 1056 590 1056 718 1056 737 1057 740 1057 594 1057 593 1058 737 1058 594 1058 591 1059 722 1059 721 1059 735 1060 592 1060 597 1060 732 1061 735 1061 597 1061 718 1062 590 1062 716 1062 594 1063 740 1063 595 1063 740 1064 630 1064 595 1064 591 1065 721 1065 596 1065 732 1066 597 1066 599 1066 598 1067 732 1067 599 1067 716 1068 590 1068 717 1068 591 1069 596 1069 724 1069 600 1070 598 1070 692 1070 598 1071 599 1071 692 1071 717 1072 590 1072 601 1072 600 1073 692 1073 693 1073 602 1074 600 1074 693 1074 591 1075 724 1075 726 1075 601 1076 590 1076 711 1076 633 1077 602 1077 695 1077 602 1078 693 1078 695 1078 591 1079 726 1079 727 1079 711 1080 590 1080 712 1080 591 1081 727 1081 728 1081 591 1082 728 1082 603 1082 591 1083 603 1083 730 1083 604 1084 605 1084 637 1084 605 1085 606 1085 637 1085 606 1086 697 1086 637 1086 697 1087 678 1087 637 1087 678 1088 607 1088 637 1088 607 1089 680 1089 637 1089 680 1090 682 1090 637 1090 682 1091 615 1091 637 1091 712 1092 590 1092 608 1092 611 1093 609 1093 608 1093 609 1094 704 1094 608 1094 704 1095 712 1095 608 1095 699 1096 610 1096 608 1096 610 1097 706 1097 608 1097 706 1098 708 1098 608 1098 708 1099 709 1099 608 1099 709 1100 611 1100 608 1100 676 1101 675 1101 612 1101 675 1102 681 1102 612 1102 681 1103 685 1103 612 1103 685 1104 688 1104 612 1104 688 1105 613 1105 612 1105 613 1106 614 1106 612 1106 614 1107 595 1107 612 1107 637 1108 615 1108 612 1108 615 1109 676 1109 612 1109 608 1110 638 1110 639 1110 591 1111 730 1111 639 1111 699 1112 608 1112 639 1112 730 1113 698 1113 639 1113 698 1114 699 1114 639 1114 20 1115 591 1115 639 1115 637 1116 612 1116 636 1116 612 1117 629 1117 616 1117 634 1118 617 1118 619 1118 619 1119 636 1119 661 1119 619 1120 661 1120 662 1120 661 1121 636 1121 618 1121 619 1122 662 1122 620 1122 618 1123 636 1123 621 1123 619 1124 620 1124 663 1124 621 1125 636 1125 622 1125 619 1126 663 1126 623 1126 622 1127 636 1127 657 1127 619 1128 623 1128 665 1128 657 1129 636 1129 655 1129 619 1130 665 1130 667 1130 655 1131 636 1131 624 1131 619 1132 667 1132 625 1132 624 1133 636 1133 650 1133 634 1134 619 1134 671 1134 619 1135 625 1135 671 1135 634 1136 671 1136 670 1136 634 1137 670 1137 672 1137 634 1138 672 1138 626 1138 627 1139 645 1139 641 1139 645 1140 650 1140 641 1140 626 1141 649 1141 641 1141 649 1142 653 1142 641 1142 653 1143 652 1143 641 1143 652 1144 654 1144 641 1144 654 1145 627 1145 641 1145 650 1146 636 1146 641 1146 634 1147 626 1147 641 1147 616 1148 629 1148 628 1148 641 1149 628 1149 5 1149 628 1150 629 1150 5 1150 634 1151 641 1151 5 1151 583 1152 24 1152 629 1152 630 1153 583 1153 629 1153 612 1154 630 1154 629 1154 595 1155 630 1155 612 1155 631 1156 637 1156 632 1156 633 1157 637 1157 631 1157 604 1158 637 1158 633 1158 695 1159 604 1159 633 1159 636 1160 612 1160 641 1160 590 1161 591 1161 583 1161 639 1162 559 1162 20 1162 20 1163 559 1163 21 1163 634 1164 5 1164 573 1164 5 1165 4 1165 573 1165 635 1166 617 1166 634 1166 635 1167 634 1167 573 1167 619 1168 617 1168 635 1168 619 1169 635 1169 562 1169 619 1170 562 1170 564 1170 636 1171 619 1171 564 1171 636 1172 564 1172 551 1172 637 1173 636 1173 551 1173 637 1174 551 1174 523 1174 632 1175 637 1175 523 1175 632 1176 523 1176 590 1176 590 1177 523 1177 544 1177 590 1178 544 1178 552 1178 608 1179 590 1179 552 1179 638 1180 608 1180 558 1180 608 1181 552 1181 558 1181 559 1182 638 1182 558 1182 559 1183 639 1183 638 1183 616 1184 580 1184 612 1184 640 1185 580 1185 616 1185 580 1186 576 1186 641 1186 580 1187 641 1187 612 1187 642 1188 640 1188 616 1188 642 1189 616 1189 628 1189 576 1190 642 1190 628 1190 576 1191 628 1191 641 1191 672 1192 643 1192 644 1192 626 1193 672 1193 644 1193 645 1194 627 1194 646 1194 626 1195 644 1195 648 1195 649 1196 626 1196 648 1196 650 1197 645 1197 647 1197 645 1198 646 1198 647 1198 649 1199 648 1199 651 1199 653 1200 649 1200 651 1200 624 1201 650 1201 571 1201 650 1202 647 1202 571 1202 653 1203 651 1203 574 1203 652 1204 653 1204 574 1204 624 1205 571 1205 656 1205 652 1206 574 1206 575 1206 654 1207 652 1207 575 1207 627 1208 654 1208 575 1208 655 1209 624 1209 658 1209 627 1210 575 1210 646 1210 624 1211 656 1211 658 1211 657 1212 655 1212 569 1212 655 1213 658 1213 569 1213 622 1214 657 1214 659 1214 657 1215 569 1215 659 1215 621 1216 622 1216 660 1216 622 1217 659 1217 660 1217 618 1218 621 1218 566 1218 661 1219 618 1219 566 1219 621 1220 660 1220 566 1220 661 1221 566 1221 563 1221 662 1222 661 1222 565 1222 620 1223 662 1223 565 1223 661 1224 563 1224 565 1224 620 1225 565 1225 568 1225 620 1226 568 1226 567 1226 663 1227 620 1227 567 1227 623 1228 663 1228 567 1228 623 1229 567 1229 664 1229 623 1230 664 1230 666 1230 665 1231 623 1231 666 1231 667 1232 665 1232 666 1232 667 1233 666 1233 570 1233 667 1234 570 1234 668 1234 625 1235 667 1235 668 1235 625 1236 668 1236 669 1236 671 1237 625 1237 669 1237 671 1238 669 1238 572 1238 670 1239 671 1239 572 1239 670 1240 572 1240 643 1240 672 1241 670 1241 643 1241 697 1242 673 1242 674 1242 678 1243 697 1243 674 1243 675 1244 676 1244 677 1244 678 1245 674 1245 679 1245 607 1246 678 1246 679 1246 681 1247 675 1247 555 1247 675 1248 677 1248 555 1248 607 1249 679 1249 550 1249 680 1250 607 1250 550 1250 685 1251 681 1251 556 1251 681 1252 555 1252 556 1252 680 1253 550 1253 683 1253 682 1254 680 1254 683 1254 685 1255 556 1255 686 1255 682 1256 683 1256 684 1256 615 1257 682 1257 684 1257 676 1258 615 1258 684 1258 688 1259 685 1259 687 1259 676 1260 684 1260 677 1260 685 1261 686 1261 687 1261 613 1262 688 1262 689 1262 688 1263 687 1263 689 1263 614 1264 613 1264 557 1264 613 1265 689 1265 557 1265 595 1266 614 1266 538 1266 614 1267 557 1267 538 1267 594 1268 595 1268 536 1268 593 1269 594 1269 536 1269 595 1270 538 1270 536 1270 593 1271 536 1271 534 1271 592 1272 593 1272 532 1272 597 1273 592 1273 532 1273 593 1274 534 1274 532 1274 597 1275 532 1275 537 1275 597 1276 537 1276 690 1276 599 1277 597 1277 690 1277 692 1278 599 1278 690 1278 692 1279 690 1279 691 1279 692 1280 691 1280 694 1280 693 1281 692 1281 694 1281 695 1282 693 1282 694 1282 695 1283 694 1283 696 1283 695 1284 696 1284 579 1284 604 1285 695 1285 579 1285 604 1286 579 1286 548 1286 605 1287 604 1287 548 1287 605 1288 548 1288 549 1288 606 1289 605 1289 549 1289 606 1290 549 1290 673 1290 697 1291 606 1291 673 1291 698 1292 561 1292 701 1292 699 1293 698 1293 701 1293 609 1294 611 1294 700 1294 699 1295 701 1295 702 1295 610 1296 699 1296 702 1296 704 1297 609 1297 553 1297 609 1298 700 1298 553 1298 610 1299 702 1299 703 1299 706 1300 610 1300 703 1300 712 1301 704 1301 554 1301 704 1302 553 1302 554 1302 706 1303 703 1303 705 1303 708 1304 706 1304 705 1304 712 1305 554 1305 707 1305 708 1306 705 1306 710 1306 709 1307 708 1307 710 1307 611 1308 709 1308 710 1308 711 1309 712 1309 713 1309 611 1310 710 1310 700 1310 712 1311 707 1311 713 1311 601 1312 711 1312 714 1312 711 1313 713 1313 714 1313 717 1314 601 1314 715 1314 601 1315 714 1315 715 1315 716 1316 717 1316 719 1316 717 1317 715 1317 719 1317 718 1318 716 1318 535 1318 720 1319 718 1319 535 1319 716 1320 719 1320 535 1320 720 1321 535 1321 531 1321 722 1322 720 1322 530 1322 721 1323 722 1323 530 1323 720 1324 531 1324 530 1324 721 1325 530 1325 723 1325 721 1326 723 1326 540 1326 596 1327 721 1327 540 1327 724 1328 596 1328 540 1328 724 1329 540 1329 542 1329 724 1330 542 1330 725 1330 726 1331 724 1331 725 1331 727 1332 726 1332 725 1332 727 1333 725 1333 546 1333 727 1334 546 1334 547 1334 728 1335 727 1335 547 1335 728 1336 547 1336 729 1336 603 1337 728 1337 729 1337 603 1338 729 1338 731 1338 730 1339 603 1339 731 1339 730 1340 731 1340 561 1340 698 1341 730 1341 561 1341 753 1342 752 1342 733 1342 589 1343 753 1343 733 1343 732 1344 598 1344 738 1344 589 1345 733 1345 577 1345 631 1346 589 1346 577 1346 735 1347 732 1347 541 1347 732 1348 738 1348 541 1348 631 1349 577 1349 578 1349 633 1350 631 1350 578 1350 736 1351 735 1351 734 1351 735 1352 541 1352 734 1352 633 1353 578 1353 545 1353 602 1354 633 1354 545 1354 736 1355 734 1355 533 1355 602 1356 545 1356 543 1356 600 1357 602 1357 543 1357 598 1358 600 1358 543 1358 737 1359 736 1359 739 1359 598 1360 543 1360 738 1360 736 1361 533 1361 739 1361 740 1362 737 1362 539 1362 737 1363 739 1363 539 1363 630 1364 740 1364 741 1364 740 1365 539 1365 741 1365 744 1366 630 1366 745 1366 630 1367 741 1367 745 1367 582 1368 744 1368 743 1368 742 1369 582 1369 743 1369 744 1370 745 1370 743 1370 742 1371 743 1371 520 1371 581 1372 742 1372 746 1372 584 1373 581 1373 746 1373 742 1374 520 1374 746 1374 584 1375 746 1375 747 1375 584 1376 747 1376 521 1376 748 1377 584 1377 521 1377 588 1378 748 1378 521 1378 588 1379 521 1379 749 1379 588 1380 749 1380 750 1380 585 1381 588 1381 750 1381 751 1382 585 1382 750 1382 751 1383 750 1383 525 1383 751 1384 525 1384 524 1384 586 1385 751 1385 524 1385 586 1386 524 1386 526 1386 587 1387 586 1387 526 1387 587 1388 526 1388 527 1388 754 1389 587 1389 527 1389 754 1390 527 1390 752 1390 753 1391 754 1391 752 1391 583 1392 522 1392 528 1392 755 1393 583 1393 528 1393 591 1394 560 1394 522 1394 583 1395 591 1395 522 1395 755 1396 528 1396 529 1396 756 1397 755 1397 529 1397 756 1398 529 1398 591 1398 591 1399 529 1399 560 1399 868 1400 23 1400 866 1400 866 1401 23 1401 815 1401 828 1402 872 1402 868 1402 868 1403 872 1403 23 1403 866 1404 815 1404 10 1404 10 1405 815 1405 9 1405 994 1406 23 1406 766 1406 757 1407 758 1407 994 1407 980 1408 994 1408 758 1408 983 1409 757 1409 994 1409 759 1410 994 1410 980 1410 760 1411 983 1411 994 1411 775 1412 994 1412 759 1412 985 1413 760 1413 994 1413 761 1414 986 1414 985 1414 761 1415 762 1415 986 1415 761 1416 989 1416 762 1416 761 1417 763 1417 989 1417 761 1418 765 1418 763 1418 761 1419 764 1419 765 1419 761 1420 985 1420 994 1420 968 1421 764 1421 761 1421 817 1422 968 1422 761 1422 872 1423 995 1423 766 1423 872 1424 766 1424 23 1424 767 1425 761 1425 994 1425 818 1426 995 1426 872 1426 956 1427 767 1427 818 1427 956 1428 768 1428 767 1428 920 1429 977 1429 978 1429 771 1430 978 1430 773 1430 771 1431 920 1431 978 1431 954 1432 767 1432 768 1432 769 1433 770 1433 977 1433 769 1434 977 1434 920 1434 957 1435 956 1435 818 1435 772 1436 771 1436 773 1436 772 1437 773 1437 774 1437 953 1438 767 1438 954 1438 816 1439 770 1439 769 1439 816 1440 775 1440 770 1440 958 1441 957 1441 818 1441 778 1442 772 1442 774 1442 778 1443 774 1443 969 1443 776 1444 767 1444 953 1444 960 1445 958 1445 818 1445 777 1446 969 1446 779 1446 777 1447 778 1447 969 1447 781 1448 767 1448 776 1448 923 1449 777 1449 779 1449 923 1450 779 1450 973 1450 962 1451 960 1451 818 1451 780 1452 767 1452 781 1452 925 1453 973 1453 971 1453 925 1454 923 1454 973 1454 963 1455 962 1455 818 1455 948 1456 767 1456 780 1456 965 1457 963 1457 818 1457 782 1458 965 1458 818 1458 967 1459 782 1459 818 1459 793 1460 927 1460 926 1460 793 1461 928 1461 927 1461 793 1462 930 1462 928 1462 793 1463 783 1463 930 1463 793 1464 784 1464 783 1464 793 1465 910 1465 784 1465 793 1466 911 1466 910 1466 793 1467 912 1467 911 1467 785 1468 767 1468 948 1468 785 1469 939 1469 940 1469 785 1470 942 1470 939 1470 785 1471 948 1471 942 1471 785 1472 941 1472 933 1472 785 1473 943 1473 941 1473 785 1474 786 1474 943 1474 785 1475 946 1475 786 1475 785 1476 940 1476 946 1476 792 1477 787 1477 909 1477 792 1478 788 1478 787 1478 792 1479 916 1479 788 1479 792 1480 790 1480 916 1480 792 1481 789 1481 790 1481 792 1482 791 1482 789 1482 792 1483 816 1483 791 1483 792 1484 912 1484 793 1484 792 1485 909 1485 912 1485 1295 1486 877 1486 785 1486 1295 1487 967 1487 818 1487 1295 1488 785 1488 933 1488 1295 1489 932 1489 967 1489 1295 1490 933 1490 932 1490 1295 1491 818 1491 872 1491 797 1492 792 1492 793 1492 881 1493 815 1493 792 1493 794 1494 795 1494 813 1494 796 1495 797 1495 794 1495 798 1496 796 1496 794 1496 800 1497 797 1497 796 1497 897 1498 798 1498 794 1498 799 1499 797 1499 800 1499 899 1500 897 1500 794 1500 801 1501 797 1501 799 1501 900 1502 899 1502 794 1502 802 1503 797 1503 801 1503 803 1504 900 1504 794 1504 804 1505 797 1505 802 1505 902 1506 803 1506 794 1506 892 1507 797 1507 804 1507 805 1508 902 1508 794 1508 890 1509 797 1509 892 1509 903 1510 805 1510 794 1510 904 1511 794 1511 813 1511 904 1512 903 1512 794 1512 812 1513 904 1513 813 1513 807 1514 886 1514 806 1514 807 1515 890 1515 886 1515 807 1516 809 1516 812 1516 807 1517 808 1517 809 1517 807 1518 810 1518 808 1518 807 1519 889 1519 810 1519 807 1520 811 1520 889 1520 807 1521 806 1521 811 1521 807 1522 797 1522 890 1522 807 1523 812 1523 813 1523 814 1524 815 1524 881 1524 9 1525 814 1525 807 1525 9 1526 807 1526 813 1526 9 1527 815 1527 814 1527 815 1528 23 1528 994 1528 815 1529 994 1529 775 1529 815 1530 775 1530 792 1530 792 1531 775 1531 816 1531 761 1532 793 1532 817 1532 817 1533 793 1533 971 1533 971 1534 793 1534 926 1534 971 1535 926 1535 925 1535 807 1536 792 1536 797 1536 994 1537 818 1537 767 1537 819 1538 868 1538 822 1538 822 1539 820 1539 982 1539 820 1540 822 1540 979 1540 822 1541 982 1541 821 1541 979 1542 822 1542 981 1542 822 1543 821 1543 984 1543 981 1544 822 1544 869 1544 822 1545 984 1545 823 1545 823 1546 987 1546 824 1546 987 1547 988 1547 824 1547 988 1548 991 1548 824 1548 991 1549 990 1549 824 1549 990 1550 993 1550 824 1550 993 1551 992 1551 824 1551 822 1552 823 1552 824 1552 824 1553 992 1553 825 1553 824 1554 825 1554 826 1554 819 1555 827 1555 828 1555 868 1556 819 1556 828 1556 822 1557 824 1557 876 1557 828 1558 827 1558 837 1558 837 1559 876 1559 830 1559 876 1560 955 1560 830 1560 972 1561 976 1561 829 1561 970 1562 972 1562 919 1562 972 1563 829 1563 919 1563 955 1564 876 1564 831 1564 976 1565 832 1565 918 1565 829 1566 976 1566 918 1566 837 1567 830 1567 834 1567 970 1568 919 1568 921 1568 835 1569 970 1569 921 1569 831 1570 876 1570 952 1570 918 1571 832 1571 833 1571 832 1572 869 1572 833 1572 837 1573 834 1573 959 1573 835 1574 921 1574 836 1574 838 1575 835 1575 836 1575 952 1576 876 1576 950 1576 837 1577 959 1577 840 1577 974 1578 838 1578 839 1578 838 1579 836 1579 839 1579 950 1580 876 1580 951 1580 974 1581 839 1581 922 1581 975 1582 974 1582 922 1582 837 1583 840 1583 961 1583 951 1584 876 1584 949 1584 871 1585 975 1585 924 1585 975 1586 922 1586 924 1586 837 1587 961 1587 964 1587 949 1588 876 1588 947 1588 837 1589 964 1589 841 1589 837 1590 841 1590 842 1590 837 1591 842 1591 966 1591 870 1592 843 1592 875 1592 843 1593 929 1593 875 1593 929 1594 931 1594 875 1594 931 1595 844 1595 875 1595 844 1596 906 1596 875 1596 906 1597 845 1597 875 1597 845 1598 846 1598 875 1598 846 1599 913 1599 875 1599 947 1600 876 1600 850 1600 935 1601 934 1601 850 1601 934 1602 938 1602 850 1602 938 1603 947 1603 850 1603 937 1604 936 1604 850 1604 936 1605 847 1605 850 1605 847 1606 944 1606 850 1606 944 1607 945 1607 850 1607 945 1608 935 1608 850 1608 848 1609 908 1609 880 1609 908 1610 907 1610 880 1610 907 1611 915 1611 880 1611 915 1612 914 1612 880 1612 914 1613 917 1613 880 1613 917 1614 849 1614 880 1614 849 1615 833 1615 880 1615 875 1616 913 1616 880 1616 913 1617 848 1617 880 1617 850 1618 878 1618 879 1618 837 1619 966 1619 879 1619 937 1620 850 1620 879 1620 966 1621 851 1621 879 1621 851 1622 937 1622 879 1622 828 1623 837 1623 879 1623 875 1624 880 1624 852 1624 880 1625 866 1625 882 1625 1297 1626 873 1626 874 1626 874 1627 852 1627 895 1627 874 1628 895 1628 896 1628 895 1629 852 1629 894 1629 874 1630 896 1630 898 1630 894 1631 852 1631 893 1631 874 1632 898 1632 853 1632 893 1633 852 1633 854 1633 874 1634 853 1634 901 1634 854 1635 852 1635 856 1635 874 1636 901 1636 855 1636 856 1637 852 1637 858 1637 874 1638 855 1638 857 1638 858 1639 852 1639 859 1639 874 1640 857 1640 860 1640 859 1641 852 1641 861 1641 1297 1642 874 1642 862 1642 874 1643 860 1643 862 1643 1297 1644 862 1644 905 1644 1297 1645 905 1645 884 1645 1297 1646 884 1646 865 1646 863 1647 885 1647 883 1647 885 1648 861 1648 883 1648 861 1649 852 1649 883 1649 865 1650 887 1650 883 1650 887 1651 888 1651 883 1651 888 1652 864 1652 883 1652 864 1653 891 1653 883 1653 891 1654 863 1654 883 1654 1297 1655 865 1655 883 1655 882 1656 866 1656 867 1656 883 1657 867 1657 10 1657 867 1658 866 1658 10 1658 1297 1659 883 1659 10 1659 822 1660 868 1660 866 1660 869 1661 822 1661 866 1661 880 1662 869 1662 866 1662 833 1663 869 1663 880 1663 826 1664 875 1664 824 1664 871 1665 875 1665 826 1665 870 1666 875 1666 871 1666 924 1667 870 1667 871 1667 852 1668 880 1668 883 1668 876 1669 837 1669 822 1669 828 1670 879 1670 872 1670 879 1671 1295 1671 872 1671 10 1672 9 1672 1297 1672 1297 1673 9 1673 813 1673 813 1674 795 1674 873 1674 1297 1675 813 1675 873 1675 874 1676 873 1676 794 1676 873 1677 795 1677 794 1677 874 1678 794 1678 797 1678 852 1679 874 1679 797 1679 852 1680 797 1680 793 1680 875 1681 852 1681 793 1681 875 1682 793 1682 761 1682 824 1683 875 1683 761 1683 824 1684 761 1684 876 1684 876 1685 761 1685 767 1685 876 1686 767 1686 785 1686 850 1687 876 1687 785 1687 850 1688 785 1688 878 1688 878 1689 785 1689 877 1689 878 1690 877 1690 1295 1690 878 1691 1295 1691 879 1691 881 1692 792 1692 880 1692 881 1693 880 1693 882 1693 792 1694 807 1694 883 1694 792 1695 883 1695 880 1695 814 1696 881 1696 882 1696 814 1697 882 1697 867 1697 883 1698 814 1698 867 1698 807 1699 814 1699 883 1699 884 1700 812 1700 809 1700 865 1701 884 1701 809 1701 885 1702 863 1702 806 1702 865 1703 809 1703 808 1703 887 1704 865 1704 808 1704 861 1705 885 1705 886 1705 885 1706 806 1706 886 1706 887 1707 808 1707 810 1707 888 1708 887 1708 810 1708 859 1709 861 1709 890 1709 861 1710 886 1710 890 1710 888 1711 810 1711 889 1711 864 1712 888 1712 889 1712 859 1713 890 1713 892 1713 864 1714 889 1714 811 1714 891 1715 864 1715 811 1715 863 1716 891 1716 811 1716 858 1717 859 1717 804 1717 863 1718 811 1718 806 1718 859 1719 892 1719 804 1719 856 1720 858 1720 802 1720 858 1721 804 1721 802 1721 854 1722 856 1722 801 1722 856 1723 802 1723 801 1723 893 1724 854 1724 799 1724 854 1725 801 1725 799 1725 894 1726 893 1726 800 1726 895 1727 894 1727 800 1727 893 1728 799 1728 800 1728 895 1729 800 1729 796 1729 896 1730 895 1730 798 1730 898 1731 896 1731 798 1731 895 1732 796 1732 798 1732 898 1733 798 1733 897 1733 898 1734 897 1734 899 1734 853 1735 898 1735 899 1735 901 1736 853 1736 899 1736 901 1737 899 1737 900 1737 901 1738 900 1738 803 1738 855 1739 901 1739 803 1739 857 1740 855 1740 803 1740 857 1741 803 1741 902 1741 857 1742 902 1742 805 1742 860 1743 857 1743 805 1743 860 1744 805 1744 903 1744 862 1745 860 1745 903 1745 862 1746 903 1746 904 1746 905 1747 862 1747 904 1747 905 1748 904 1748 812 1748 884 1749 905 1749 812 1749 931 1750 930 1750 783 1750 844 1751 931 1751 783 1751 908 1752 848 1752 909 1752 844 1753 783 1753 784 1753 906 1754 844 1754 784 1754 907 1755 908 1755 787 1755 908 1756 909 1756 787 1756 906 1757 784 1757 910 1757 845 1758 906 1758 910 1758 915 1759 907 1759 788 1759 907 1760 787 1760 788 1760 845 1761 910 1761 911 1761 846 1762 845 1762 911 1762 915 1763 788 1763 916 1763 846 1764 911 1764 912 1764 913 1765 846 1765 912 1765 848 1766 913 1766 912 1766 914 1767 915 1767 790 1767 848 1768 912 1768 909 1768 915 1769 916 1769 790 1769 917 1770 914 1770 789 1770 914 1771 790 1771 789 1771 849 1772 917 1772 791 1772 917 1773 789 1773 791 1773 833 1774 849 1774 816 1774 849 1775 791 1775 816 1775 918 1776 833 1776 769 1776 829 1777 918 1777 769 1777 833 1778 816 1778 769 1778 829 1779 769 1779 920 1779 919 1780 829 1780 771 1780 921 1781 919 1781 771 1781 829 1782 920 1782 771 1782 921 1783 771 1783 772 1783 921 1784 772 1784 778 1784 836 1785 921 1785 778 1785 839 1786 836 1786 778 1786 839 1787 778 1787 777 1787 839 1788 777 1788 923 1788 922 1789 839 1789 923 1789 924 1790 922 1790 923 1790 924 1791 923 1791 925 1791 924 1792 925 1792 926 1792 870 1793 924 1793 926 1793 870 1794 926 1794 927 1794 843 1795 870 1795 927 1795 843 1796 927 1796 928 1796 929 1797 843 1797 928 1797 929 1798 928 1798 930 1798 931 1799 929 1799 930 1799 851 1800 932 1800 933 1800 937 1801 851 1801 933 1801 934 1802 935 1802 940 1802 937 1803 933 1803 941 1803 936 1804 937 1804 941 1804 938 1805 934 1805 939 1805 934 1806 940 1806 939 1806 936 1807 941 1807 943 1807 847 1808 936 1808 943 1808 947 1809 938 1809 942 1809 938 1810 939 1810 942 1810 847 1811 943 1811 786 1811 944 1812 847 1812 786 1812 947 1813 942 1813 948 1813 944 1814 786 1814 946 1814 945 1815 944 1815 946 1815 935 1816 945 1816 946 1816 949 1817 947 1817 780 1817 935 1818 946 1818 940 1818 947 1819 948 1819 780 1819 951 1820 949 1820 781 1820 949 1821 780 1821 781 1821 950 1822 951 1822 776 1822 951 1823 781 1823 776 1823 952 1824 950 1824 953 1824 950 1825 776 1825 953 1825 831 1826 952 1826 954 1826 955 1827 831 1827 954 1827 952 1828 953 1828 954 1828 955 1829 954 1829 768 1829 830 1830 955 1830 956 1830 834 1831 830 1831 956 1831 955 1832 768 1832 956 1832 834 1833 956 1833 957 1833 834 1834 957 1834 958 1834 959 1835 834 1835 958 1835 840 1836 959 1836 958 1836 840 1837 958 1837 960 1837 840 1838 960 1838 962 1838 961 1839 840 1839 962 1839 964 1840 961 1840 962 1840 964 1841 962 1841 963 1841 964 1842 963 1842 965 1842 841 1843 964 1843 965 1843 841 1844 965 1844 782 1844 842 1845 841 1845 782 1845 842 1846 782 1846 967 1846 966 1847 842 1847 967 1847 966 1848 967 1848 932 1848 851 1849 966 1849 932 1849 992 1850 764 1850 968 1850 825 1851 992 1851 968 1851 835 1852 838 1852 969 1852 825 1853 968 1853 817 1853 826 1854 825 1854 817 1854 970 1855 835 1855 774 1855 835 1856 969 1856 774 1856 826 1857 817 1857 971 1857 871 1858 826 1858 971 1858 972 1859 970 1859 773 1859 970 1860 774 1860 773 1860 871 1861 971 1861 973 1861 975 1862 871 1862 973 1862 972 1863 773 1863 978 1863 975 1864 973 1864 779 1864 974 1865 975 1865 779 1865 838 1866 974 1866 779 1866 976 1867 972 1867 977 1867 838 1868 779 1868 969 1868 972 1869 978 1869 977 1869 832 1870 976 1870 770 1870 976 1871 977 1871 770 1871 869 1872 832 1872 775 1872 832 1873 770 1873 775 1873 981 1874 869 1874 759 1874 869 1875 775 1875 759 1875 979 1876 981 1876 980 1876 820 1877 979 1877 980 1877 981 1878 759 1878 980 1878 820 1879 980 1879 758 1879 982 1880 820 1880 757 1880 821 1881 982 1881 757 1881 820 1882 758 1882 757 1882 821 1883 757 1883 983 1883 821 1884 983 1884 760 1884 984 1885 821 1885 760 1885 823 1886 984 1886 760 1886 823 1887 760 1887 985 1887 823 1888 985 1888 986 1888 987 1889 823 1889 986 1889 988 1890 987 1890 986 1890 988 1891 986 1891 762 1891 988 1892 762 1892 989 1892 991 1893 988 1893 989 1893 991 1894 989 1894 763 1894 990 1895 991 1895 763 1895 990 1896 763 1896 765 1896 993 1897 990 1897 765 1897 993 1898 765 1898 764 1898 992 1899 993 1899 764 1899 822 1900 994 1900 766 1900 819 1901 822 1901 766 1901 837 1902 818 1902 994 1902 822 1903 837 1903 994 1903 819 1904 766 1904 995 1904 827 1905 819 1905 995 1905 827 1906 995 1906 837 1906 837 1907 995 1907 818 1907 996 1908 999 1908 8 1908 8 1909 999 1909 1052 1909 15 1910 997 1910 996 1910 996 1911 997 1911 999 1911 8 1912 1052 1912 11 1912 11 1913 1052 1913 998 1913 1054 1914 999 1914 1232 1914 1223 1915 1000 1915 1054 1915 1222 1916 1054 1916 1000 1916 1225 1917 1223 1917 1054 1917 1001 1918 1054 1918 1222 1918 1226 1919 1225 1919 1054 1919 1219 1920 1054 1920 1001 1920 1002 1921 1226 1921 1054 1921 1003 1922 1227 1922 1002 1922 1003 1923 1229 1923 1227 1923 1003 1924 1004 1924 1229 1924 1003 1925 1230 1925 1004 1925 1003 1926 1005 1926 1230 1926 1003 1927 1205 1927 1005 1927 1003 1928 1002 1928 1054 1928 1006 1929 1205 1929 1003 1929 1206 1930 1006 1930 1003 1930 997 1931 1234 1931 1232 1931 997 1932 1232 1932 999 1932 1008 1933 1003 1933 1054 1933 1017 1934 1234 1934 997 1934 1007 1935 1008 1935 1017 1935 1007 1936 1193 1936 1008 1936 1012 1937 1011 1937 1009 1937 1167 1938 1009 1938 1010 1938 1167 1939 1012 1939 1009 1939 1016 1940 1008 1940 1193 1940 1166 1941 1217 1941 1011 1941 1166 1942 1011 1942 1012 1942 1013 1943 1007 1943 1017 1943 1014 1944 1167 1944 1010 1944 1014 1945 1010 1945 1015 1945 1191 1946 1008 1946 1016 1946 1165 1947 1217 1947 1166 1947 1165 1948 1219 1948 1217 1948 1196 1949 1013 1949 1017 1949 1018 1950 1014 1950 1015 1950 1018 1951 1015 1951 1215 1951 1190 1952 1008 1952 1191 1952 1019 1953 1196 1953 1017 1953 1020 1954 1215 1954 1021 1954 1020 1955 1018 1955 1215 1955 1189 1956 1008 1956 1190 1956 1171 1957 1021 1957 1212 1957 1171 1958 1020 1958 1021 1958 1198 1959 1019 1959 1017 1959 1188 1960 1008 1960 1189 1960 1056 1961 1171 1961 1212 1961 1056 1962 1212 1962 1209 1962 1023 1963 1198 1963 1017 1963 1028 1964 1008 1964 1188 1964 1022 1965 1023 1965 1017 1965 1201 1966 1022 1966 1017 1966 1203 1967 1201 1967 1017 1967 1024 1968 1174 1968 1055 1968 1024 1969 1175 1969 1174 1969 1024 1970 1149 1970 1175 1970 1024 1971 1025 1971 1149 1971 1024 1972 1026 1972 1025 1972 1024 1973 1154 1973 1026 1973 1024 1974 1157 1974 1154 1974 1024 1975 1027 1975 1157 1975 1117 1976 1008 1976 1028 1976 1117 1977 1029 1977 1178 1977 1117 1978 1184 1978 1029 1978 1117 1979 1028 1979 1184 1979 1117 1980 1181 1980 1034 1980 1117 1981 1182 1981 1181 1981 1117 1982 1185 1982 1182 1982 1117 1983 1186 1983 1185 1983 1117 1984 1178 1984 1186 1984 1032 1985 1153 1985 1152 1985 1032 1986 1031 1986 1153 1986 1032 1987 1030 1987 1031 1987 1032 1988 1160 1988 1030 1988 1032 1989 1161 1989 1160 1989 1032 1990 1027 1990 1024 1990 1032 1991 1164 1991 1161 1991 1032 1992 1165 1992 1164 1992 1032 1993 1152 1993 1027 1993 1033 1994 1118 1994 1117 1994 1033 1995 1203 1995 1017 1995 1033 1996 1117 1996 1034 1996 1033 1997 1204 1997 1203 1997 1033 1998 1034 1998 1204 1998 1033 1999 1017 1999 997 1999 1048 2000 1032 2000 1024 2000 1051 2001 1052 2001 1032 2001 1053 2002 1043 2002 1035 2002 1140 2003 1048 2003 1043 2003 1141 2004 1140 2004 1043 2004 1139 2005 1048 2005 1140 2005 1036 2006 1141 2006 1043 2006 1037 2007 1048 2007 1139 2007 1038 2008 1036 2008 1043 2008 1137 2009 1048 2009 1037 2009 1039 2010 1038 2010 1043 2010 1041 2011 1048 2011 1137 2011 1143 2012 1039 2012 1043 2012 1040 2013 1048 2013 1041 2013 1042 2014 1143 2014 1043 2014 1131 2015 1048 2015 1040 2015 1146 2016 1042 2016 1043 2016 1049 2017 1048 2017 1131 2017 1044 2018 1146 2018 1043 2018 1044 2019 1043 2019 1053 2019 1045 2020 1044 2020 1053 2020 1046 2021 1045 2021 1053 2021 1125 2022 1046 2022 1053 2022 1057 2023 1047 2023 1124 2023 1057 2024 1049 2024 1047 2024 1057 2025 1048 2025 1049 2025 1057 2026 1126 2026 1125 2026 1057 2027 1050 2027 1126 2027 1057 2028 1129 2028 1050 2028 1057 2029 1132 2029 1129 2029 1057 2030 1124 2030 1132 2030 1057 2031 1125 2031 1053 2031 1121 2032 1052 2032 1051 2032 998 2033 1121 2033 1057 2033 998 2034 1052 2034 1121 2034 998 2035 1057 2035 1053 2035 1052 2036 999 2036 1054 2036 1052 2037 1054 2037 1219 2037 1052 2038 1219 2038 1032 2038 1032 2039 1219 2039 1165 2039 1003 2040 1024 2040 1206 2040 1206 2041 1024 2041 1209 2041 1209 2042 1024 2042 1055 2042 1209 2043 1055 2043 1056 2043 1057 2044 1032 2044 1048 2044 1054 2045 1017 2045 1008 2045 1068 2046 996 2046 1060 2046 1060 2047 1059 2047 1058 2047 1059 2048 1060 2048 1221 2048 1060 2049 1058 2049 1224 2049 1221 2050 1060 2050 1061 2050 1060 2051 1224 2051 1062 2051 1061 2052 1060 2052 1220 2052 1060 2053 1062 2053 1063 2053 1063 2054 1228 2054 1069 2054 1228 2055 1064 2055 1069 2055 1064 2056 1065 2056 1069 2056 1065 2057 1066 2057 1069 2057 1066 2058 1231 2058 1069 2058 1231 2059 1067 2059 1069 2059 1060 2060 1063 2060 1069 2060 1069 2061 1067 2061 1207 2061 1069 2062 1207 2062 1208 2062 1068 2063 1235 2063 15 2063 996 2064 1068 2064 15 2064 1060 2065 1069 2065 1116 2065 15 2066 1235 2066 1233 2066 1233 2067 1116 2067 1070 2067 1116 2068 1071 2068 1070 2068 1072 2069 1216 2069 1074 2069 1210 2070 1072 2070 1075 2070 1072 2071 1074 2071 1075 2071 1071 2072 1116 2072 1192 2072 1216 2073 1218 2073 1073 2073 1074 2074 1216 2074 1073 2074 1233 2075 1070 2075 1194 2075 1210 2076 1075 2076 1168 2076 1077 2077 1210 2077 1168 2077 1192 2078 1116 2078 1076 2078 1073 2079 1218 2079 1114 2079 1218 2080 1220 2080 1114 2080 1233 2081 1194 2081 1195 2081 1077 2082 1168 2082 1169 2082 1214 2083 1077 2083 1169 2083 1076 2084 1116 2084 1078 2084 1233 2085 1195 2085 1197 2085 1213 2086 1214 2086 1170 2086 1214 2087 1169 2087 1170 2087 1078 2088 1116 2088 1080 2088 1211 2089 1213 2089 1079 2089 1213 2090 1170 2090 1079 2090 1233 2091 1197 2091 1199 2091 1080 2092 1116 2092 1187 2092 1211 2093 1079 2093 1172 2093 1115 2094 1211 2094 1172 2094 1233 2095 1199 2095 1200 2095 1187 2096 1116 2096 1088 2096 1233 2097 1200 2097 1081 2097 1233 2098 1081 2098 1082 2098 1233 2099 1082 2099 1202 2099 1083 2100 1173 2100 1095 2100 1173 2101 1176 2101 1095 2101 1176 2102 1084 2102 1095 2102 1084 2103 1085 2103 1095 2103 1085 2104 1086 2104 1095 2104 1086 2105 1155 2105 1095 2105 1155 2106 1156 2106 1095 2106 1156 2107 1087 2107 1095 2107 1088 2108 1116 2108 1119 2108 1092 2109 1179 2109 1119 2109 1179 2110 1183 2110 1119 2110 1183 2111 1088 2111 1119 2111 1177 2112 1180 2112 1119 2112 1180 2113 1089 2113 1119 2113 1089 2114 1090 2114 1119 2114 1090 2115 1091 2115 1119 2115 1091 2116 1092 2116 1119 2116 1158 2117 1150 2117 1094 2117 1150 2118 1151 2118 1094 2118 1151 2119 1159 2119 1094 2119 1159 2120 1093 2120 1094 2120 1093 2121 1163 2121 1094 2121 1095 2122 1087 2122 1094 2122 1163 2123 1162 2123 1094 2123 1162 2124 1114 2124 1094 2124 1087 2125 1158 2125 1094 2125 1119 2126 1096 2126 1301 2126 1233 2127 1202 2127 1301 2127 1177 2128 1119 2128 1301 2128 1202 2129 1097 2129 1301 2129 1097 2130 1177 2130 1301 2130 15 2131 1233 2131 1301 2131 1095 2132 1094 2132 1107 2132 1094 2133 8 2133 1098 2133 1108 2134 1099 2134 1103 2134 1103 2135 1107 2135 1138 2135 1103 2136 1138 2136 1100 2136 1138 2137 1107 2137 1102 2137 1103 2138 1100 2138 1101 2138 1102 2139 1107 2139 1136 2139 1103 2140 1101 2140 1142 2140 1136 2141 1107 2141 1104 2141 1103 2142 1142 2142 1105 2142 1104 2143 1107 2143 1135 2143 1103 2144 1105 2144 1106 2144 1135 2145 1107 2145 1134 2145 1103 2146 1106 2146 1144 2146 1134 2147 1107 2147 1130 2147 1103 2148 1144 2148 1145 2148 1130 2149 1107 2149 1111 2149 1108 2150 1103 2150 1147 2150 1103 2151 1145 2151 1147 2151 1108 2152 1147 2152 1148 2152 1108 2153 1148 2153 1109 2153 1108 2154 1109 2154 1110 2154 1123 2155 1122 2155 1120 2155 1122 2156 1111 2156 1120 2156 1110 2157 1127 2157 1120 2157 1111 2158 1107 2158 1120 2158 1127 2159 1128 2159 1120 2159 1128 2160 1112 2160 1120 2160 1112 2161 1133 2161 1120 2161 1133 2162 1123 2162 1120 2162 1108 2163 1110 2163 1120 2163 1098 2164 8 2164 1113 2164 1120 2165 1113 2165 11 2165 1113 2166 8 2166 11 2166 1108 2167 1120 2167 11 2167 1060 2168 996 2168 8 2168 1220 2169 1060 2169 8 2169 1094 2170 1220 2170 8 2170 1114 2171 1220 2171 1094 2171 1208 2172 1095 2172 1069 2172 1115 2173 1095 2173 1208 2173 1083 2174 1095 2174 1115 2174 1172 2175 1083 2175 1115 2175 1107 2176 1094 2176 1120 2176 1116 2177 1233 2177 1060 2177 1301 2178 1033 2178 15 2178 15 2179 1033 2179 997 2179 1108 2180 11 2180 1053 2180 11 2181 998 2181 1053 2181 1035 2182 1099 2182 1108 2182 1035 2183 1108 2183 1053 2183 1103 2184 1099 2184 1035 2184 1103 2185 1035 2185 1043 2185 1103 2186 1043 2186 1048 2186 1107 2187 1103 2187 1048 2187 1107 2188 1048 2188 1024 2188 1095 2189 1107 2189 1024 2189 1095 2190 1024 2190 1003 2190 1069 2191 1095 2191 1003 2191 1069 2192 1003 2192 1116 2192 1116 2193 1003 2193 1008 2193 1116 2194 1008 2194 1117 2194 1119 2195 1116 2195 1117 2195 1096 2196 1119 2196 1118 2196 1119 2197 1117 2197 1118 2197 1301 2198 1096 2198 1118 2198 1033 2199 1301 2199 1118 2199 1098 2200 1032 2200 1094 2200 1051 2201 1032 2201 1098 2201 1032 2202 1057 2202 1120 2202 1032 2203 1120 2203 1094 2203 1121 2204 1051 2204 1098 2204 1121 2205 1098 2205 1113 2205 1120 2206 1121 2206 1113 2206 1057 2207 1121 2207 1120 2207 1109 2208 1046 2208 1125 2208 1110 2209 1109 2209 1125 2209 1122 2210 1123 2210 1124 2210 1110 2211 1125 2211 1126 2211 1127 2212 1110 2212 1126 2212 1111 2213 1122 2213 1047 2213 1122 2214 1124 2214 1047 2214 1127 2215 1126 2215 1050 2215 1128 2216 1127 2216 1050 2216 1130 2217 1111 2217 1049 2217 1111 2218 1047 2218 1049 2218 1128 2219 1050 2219 1129 2219 1112 2220 1128 2220 1129 2220 1130 2221 1049 2221 1131 2221 1112 2222 1129 2222 1132 2222 1133 2223 1112 2223 1132 2223 1123 2224 1133 2224 1132 2224 1134 2225 1130 2225 1040 2225 1123 2226 1132 2226 1124 2226 1130 2227 1131 2227 1040 2227 1135 2228 1134 2228 1041 2228 1134 2229 1040 2229 1041 2229 1104 2230 1135 2230 1137 2230 1135 2231 1041 2231 1137 2231 1136 2232 1104 2232 1037 2232 1104 2233 1137 2233 1037 2233 1102 2234 1136 2234 1139 2234 1138 2235 1102 2235 1139 2235 1136 2236 1037 2236 1139 2236 1138 2237 1139 2237 1140 2237 1100 2238 1138 2238 1141 2238 1101 2239 1100 2239 1141 2239 1138 2240 1140 2240 1141 2240 1101 2241 1141 2241 1036 2241 1101 2242 1036 2242 1038 2242 1142 2243 1101 2243 1038 2243 1105 2244 1142 2244 1038 2244 1105 2245 1038 2245 1039 2245 1105 2246 1039 2246 1143 2246 1106 2247 1105 2247 1143 2247 1144 2248 1106 2248 1143 2248 1144 2249 1143 2249 1042 2249 1144 2250 1042 2250 1146 2250 1145 2251 1144 2251 1146 2251 1145 2252 1146 2252 1044 2252 1147 2253 1145 2253 1044 2253 1147 2254 1044 2254 1045 2254 1148 2255 1147 2255 1045 2255 1148 2256 1045 2256 1046 2256 1109 2257 1148 2257 1046 2257 1084 2258 1149 2258 1025 2258 1085 2259 1084 2259 1025 2259 1150 2260 1158 2260 1152 2260 1085 2261 1025 2261 1026 2261 1086 2262 1085 2262 1026 2262 1151 2263 1150 2263 1153 2263 1150 2264 1152 2264 1153 2264 1086 2265 1026 2265 1154 2265 1155 2266 1086 2266 1154 2266 1159 2267 1151 2267 1031 2267 1151 2268 1153 2268 1031 2268 1155 2269 1154 2269 1157 2269 1156 2270 1155 2270 1157 2270 1159 2271 1031 2271 1030 2271 1156 2272 1157 2272 1027 2272 1087 2273 1156 2273 1027 2273 1158 2274 1087 2274 1027 2274 1093 2275 1159 2275 1160 2275 1158 2276 1027 2276 1152 2276 1159 2277 1030 2277 1160 2277 1163 2278 1093 2278 1161 2278 1093 2279 1160 2279 1161 2279 1162 2280 1163 2280 1164 2280 1163 2281 1161 2281 1164 2281 1114 2282 1162 2282 1165 2282 1162 2283 1164 2283 1165 2283 1073 2284 1114 2284 1166 2284 1074 2285 1073 2285 1166 2285 1114 2286 1165 2286 1166 2286 1074 2287 1166 2287 1012 2287 1075 2288 1074 2288 1167 2288 1168 2289 1075 2289 1167 2289 1074 2290 1012 2290 1167 2290 1168 2291 1167 2291 1014 2291 1168 2292 1014 2292 1018 2292 1169 2293 1168 2293 1018 2293 1170 2294 1169 2294 1018 2294 1170 2295 1018 2295 1020 2295 1170 2296 1020 2296 1171 2296 1079 2297 1170 2297 1171 2297 1172 2298 1079 2298 1171 2298 1172 2299 1171 2299 1056 2299 1172 2300 1056 2300 1055 2300 1083 2301 1172 2301 1055 2301 1083 2302 1055 2302 1174 2302 1173 2303 1083 2303 1174 2303 1173 2304 1174 2304 1175 2304 1176 2305 1173 2305 1175 2305 1176 2306 1175 2306 1149 2306 1084 2307 1176 2307 1149 2307 1097 2308 1204 2308 1034 2308 1177 2309 1097 2309 1034 2309 1179 2310 1092 2310 1178 2310 1177 2311 1034 2311 1181 2311 1180 2312 1177 2312 1181 2312 1183 2313 1179 2313 1029 2313 1179 2314 1178 2314 1029 2314 1180 2315 1181 2315 1182 2315 1089 2316 1180 2316 1182 2316 1088 2317 1183 2317 1184 2317 1183 2318 1029 2318 1184 2318 1089 2319 1182 2319 1185 2319 1090 2320 1089 2320 1185 2320 1088 2321 1184 2321 1028 2321 1090 2322 1185 2322 1186 2322 1091 2323 1090 2323 1186 2323 1092 2324 1091 2324 1186 2324 1187 2325 1088 2325 1188 2325 1092 2326 1186 2326 1178 2326 1088 2327 1028 2327 1188 2327 1080 2328 1187 2328 1189 2328 1187 2329 1188 2329 1189 2329 1078 2330 1080 2330 1190 2330 1080 2331 1189 2331 1190 2331 1076 2332 1078 2332 1191 2332 1078 2333 1190 2333 1191 2333 1192 2334 1076 2334 1016 2334 1071 2335 1192 2335 1016 2335 1076 2336 1191 2336 1016 2336 1071 2337 1016 2337 1193 2337 1070 2338 1071 2338 1007 2338 1194 2339 1070 2339 1007 2339 1071 2340 1193 2340 1007 2340 1194 2341 1007 2341 1013 2341 1194 2342 1013 2342 1196 2342 1195 2343 1194 2343 1196 2343 1197 2344 1195 2344 1196 2344 1197 2345 1196 2345 1019 2345 1197 2346 1019 2346 1198 2346 1199 2347 1197 2347 1198 2347 1200 2348 1199 2348 1198 2348 1200 2349 1198 2349 1023 2349 1200 2350 1023 2350 1022 2350 1081 2351 1200 2351 1022 2351 1081 2352 1022 2352 1201 2352 1082 2353 1081 2353 1201 2353 1082 2354 1201 2354 1203 2354 1202 2355 1082 2355 1203 2355 1202 2356 1203 2356 1204 2356 1097 2357 1202 2357 1204 2357 1067 2358 1205 2358 1006 2358 1207 2359 1067 2359 1006 2359 1077 2360 1214 2360 1215 2360 1207 2361 1006 2361 1206 2361 1208 2362 1207 2362 1206 2362 1210 2363 1077 2363 1015 2363 1077 2364 1215 2364 1015 2364 1208 2365 1206 2365 1209 2365 1115 2366 1208 2366 1209 2366 1072 2367 1210 2367 1010 2367 1210 2368 1015 2368 1010 2368 1115 2369 1209 2369 1212 2369 1211 2370 1115 2370 1212 2370 1072 2371 1010 2371 1009 2371 1211 2372 1212 2372 1021 2372 1213 2373 1211 2373 1021 2373 1214 2374 1213 2374 1021 2374 1216 2375 1072 2375 1011 2375 1214 2376 1021 2376 1215 2376 1072 2377 1009 2377 1011 2377 1218 2378 1216 2378 1217 2378 1216 2379 1011 2379 1217 2379 1220 2380 1218 2380 1219 2380 1218 2381 1217 2381 1219 2381 1061 2382 1220 2382 1001 2382 1220 2383 1219 2383 1001 2383 1221 2384 1061 2384 1222 2384 1059 2385 1221 2385 1222 2385 1061 2386 1001 2386 1222 2386 1059 2387 1222 2387 1000 2387 1058 2388 1059 2388 1223 2388 1224 2389 1058 2389 1223 2389 1059 2390 1000 2390 1223 2390 1224 2391 1223 2391 1225 2391 1224 2392 1225 2392 1226 2392 1062 2393 1224 2393 1226 2393 1063 2394 1062 2394 1226 2394 1063 2395 1226 2395 1002 2395 1063 2396 1002 2396 1227 2396 1228 2397 1063 2397 1227 2397 1064 2398 1228 2398 1227 2398 1064 2399 1227 2399 1229 2399 1064 2400 1229 2400 1004 2400 1065 2401 1064 2401 1004 2401 1065 2402 1004 2402 1230 2402 1066 2403 1065 2403 1230 2403 1066 2404 1230 2404 1005 2404 1231 2405 1066 2405 1005 2405 1231 2406 1005 2406 1205 2406 1067 2407 1231 2407 1205 2407 1060 2408 1054 2408 1068 2408 1068 2409 1054 2409 1232 2409 1233 2410 1017 2410 1054 2410 1060 2411 1233 2411 1054 2411 1068 2412 1232 2412 1234 2412 1235 2413 1068 2413 1234 2413 1235 2414 1234 2414 1233 2414 1233 2415 1234 2415 1017 2415 1238 2416 1242 2416 1236 2416 1251 2417 1238 2417 1236 2417 1247 2418 1280 2418 1242 2418 1238 2419 1247 2419 1242 2419 1237 2420 1248 2420 1284 2420 1237 2421 1284 2421 1238 2421 1250 2422 1238 2422 1251 2422 1250 2423 1237 2423 1238 2423 1255 2424 1251 2424 1253 2424 1255 2425 1250 2425 1251 2425 1240 2426 1263 2426 1239 2426 1240 2427 1239 2427 1274 2427 1240 2428 1274 2428 1321 2428 1241 2429 1240 2429 1321 2429 1242 2430 1241 2430 1321 2430 1244 2431 1236 2431 1242 2431 1244 2432 1242 2432 1321 2432 1243 2433 1236 2433 1244 2433 1254 2434 1244 2434 1278 2434 1254 2435 1243 2435 1244 2435 1269 2436 1278 2436 1277 2436 1269 2437 1254 2437 1278 2437 1236 2438 1281 2438 1245 2438 1236 2439 1245 2439 1251 2439 1242 2440 1280 2440 1246 2440 1242 2441 1246 2441 1241 2441 1281 2442 1280 2442 1247 2442 1281 2443 1247 2443 1245 2443 1344 2444 1247 2444 1238 2444 1284 2445 1344 2445 1238 2445 1284 2446 1240 2446 1241 2446 1287 2447 1284 2447 1248 2447 1287 2448 1240 2448 1284 2448 1290 2449 1287 2449 1248 2449 1237 2450 1285 2450 1293 2450 1237 2451 1293 2451 1248 2451 1289 2452 1237 2452 1250 2452 1249 2453 1289 2453 1250 2453 1251 2454 1245 2454 1252 2454 1251 2455 1252 2455 1253 2455 1348 2456 1286 2456 1250 2456 1255 2457 1348 2457 1250 2457 1253 2458 1243 2458 1254 2458 1255 2459 1253 2459 1254 2459 1292 2460 1291 2460 1255 2460 1292 2461 1255 2461 1254 2461 1351 2462 1271 2462 879 2462 1320 2463 1256 2463 16 2463 1258 2464 1351 2464 879 2464 1258 2465 559 2465 1259 2465 1239 2466 16 2466 1256 2466 1258 2467 1257 2467 1351 2467 1258 2468 1259 2468 1257 2468 1304 2469 16 2469 1239 2469 1260 2470 1320 2470 16 2470 1263 2471 1304 2471 1239 2471 1261 2472 1320 2472 1260 2472 1306 2473 1304 2473 1263 2473 1301 2474 1261 2474 1260 2474 263 2475 1306 2475 1263 2475 1262 2476 1266 2476 263 2476 1262 2477 263 2477 1263 2477 1264 2478 1108 2478 1268 2478 1264 2479 1301 2479 1108 2479 1264 2480 1261 2480 1301 2480 1276 2481 1264 2481 1268 2481 1319 2482 1266 2482 1262 2482 0 2483 1319 2483 1265 2483 0 2484 1266 2484 1319 2484 1299 2485 1267 2485 1276 2485 1299 2486 1276 2486 1268 2486 1277 2487 1267 2487 1299 2487 1317 2488 0 2488 1265 2488 573 2489 1317 2489 1270 2489 573 2490 0 2490 1317 2490 573 2491 1270 2491 634 2491 1297 2492 1269 2492 1277 2492 1297 2493 1277 2493 1299 2493 1297 2494 813 2494 1269 2494 639 2495 634 2495 1270 2495 1272 2496 639 2496 1270 2496 1271 2497 813 2497 1295 2497 1271 2498 1269 2498 813 2498 559 2499 639 2499 1272 2499 879 2500 1271 2500 1295 2500 1259 2501 559 2501 1272 2501 1273 2502 1343 2502 1274 2502 1239 2503 1273 2503 1274 2503 1240 2504 1275 2504 1316 2504 1240 2505 1316 2505 1263 2505 1274 2506 1320 2506 1261 2506 1321 2507 1274 2507 1261 2507 1279 2508 1283 2508 1244 2508 1321 2509 1279 2509 1244 2509 1244 2510 1264 2510 1276 2510 1278 2511 1244 2511 1276 2511 1282 2512 1335 2512 1277 2512 1278 2513 1282 2513 1277 2513 1243 2514 1347 2514 1281 2514 1243 2515 1281 2515 1236 2515 1269 2516 1336 2516 1346 2516 1269 2517 1346 2517 1254 2517 1273 2518 1316 2518 1275 2518 1343 2519 1273 2519 1275 2519 1279 2520 1343 2520 1275 2520 1279 2521 1275 2521 1246 2521 1279 2522 1246 2522 1280 2522 1280 2523 1281 2523 1283 2523 1279 2524 1280 2524 1283 2524 1283 2525 1281 2525 1347 2525 1282 2526 1283 2526 1346 2526 1283 2527 1347 2527 1346 2527 1335 2528 1282 2528 1336 2528 1282 2529 1346 2529 1336 2529 1241 2530 1344 2530 1284 2530 1241 2531 1246 2531 1344 2531 1344 2532 1293 2532 1285 2532 1247 2533 1344 2533 1285 2533 1245 2534 1247 2534 1286 2534 1247 2535 1285 2535 1286 2535 1252 2536 1245 2536 1348 2536 1245 2537 1286 2537 1348 2537 1355 2538 1275 2538 1240 2538 1287 2539 1355 2539 1240 2539 1288 2540 1319 2540 1262 2540 1288 2541 1262 2541 1287 2541 1288 2542 1287 2542 1290 2542 1350 2543 1288 2543 1290 2543 1289 2544 1350 2544 1290 2544 1314 2545 1289 2545 1249 2545 1314 2546 1249 2546 1291 2546 1314 2547 1350 2547 1289 2547 1353 2548 1291 2548 1292 2548 1353 2549 1314 2549 1291 2549 1351 2550 1292 2550 1271 2550 1351 2551 1353 2551 1292 2551 1293 2552 1345 2552 1290 2552 1248 2553 1293 2553 1290 2553 1358 2554 1285 2554 1237 2554 1289 2555 1358 2555 1237 2555 1250 2556 1286 2556 1357 2556 1250 2557 1357 2557 1249 2557 1253 2558 1252 2558 1243 2558 1252 2559 1347 2559 1243 2559 1291 2560 1294 2560 1348 2560 1291 2561 1348 2561 1255 2561 1254 2562 1346 2562 1359 2562 1254 2563 1359 2563 1292 2563 1295 2564 1340 2564 879 2564 879 2565 1340 2565 1323 2565 1337 2566 1295 2566 813 2566 1337 2567 1340 2567 1295 2567 1297 2568 1337 2568 813 2568 1296 2569 1337 2569 1297 2569 1299 2570 1296 2570 1297 2570 1299 2571 1298 2571 1296 2571 1331 2572 1298 2572 1299 2572 1268 2573 1331 2573 1299 2573 1268 2574 1108 2574 1331 2574 1108 2575 1330 2575 1331 2575 1300 2576 1108 2576 1301 2576 1300 2577 1330 2577 1108 2577 1303 2578 1301 2578 1260 2578 1303 2579 1300 2579 1301 2579 1302 2580 1303 2580 1260 2580 16 2581 1302 2581 1260 2581 1304 2582 1305 2582 16 2582 1305 2583 1302 2583 16 2583 1304 2584 1306 2584 1305 2584 1306 2585 1307 2585 1305 2585 1326 2586 1306 2586 263 2586 1326 2587 1307 2587 1306 2587 1266 2588 1326 2588 263 2588 1327 2589 1326 2589 1266 2589 0 2590 1333 2590 1327 2590 0 2591 1327 2591 1266 2591 573 2592 1334 2592 1333 2592 573 2593 1333 2593 0 2593 634 2594 1308 2594 573 2594 573 2595 1308 2595 1334 2595 639 2596 1308 2596 634 2596 1339 2597 1308 2597 639 2597 1322 2598 639 2598 559 2598 1322 2599 1339 2599 639 2599 1309 2600 1322 2600 559 2600 1258 2601 1309 2601 559 2601 879 2602 1309 2602 1258 2602 879 2603 1323 2603 1309 2603 1277 2604 1335 2604 1267 2604 1335 2605 1310 2605 1267 2605 1310 2606 1311 2606 1276 2606 1267 2607 1310 2607 1276 2607 1271 2608 1312 2608 1336 2608 1271 2609 1336 2609 1269 2609 1329 2610 1342 2610 1261 2610 1264 2611 1329 2611 1261 2611 1257 2612 1325 2612 1324 2612 1257 2613 1324 2613 1351 2613 1320 2614 1313 2614 1256 2614 1320 2615 1341 2615 1313 2615 1259 2616 1354 2616 1325 2616 1259 2617 1325 2617 1257 2617 1256 2618 1273 2618 1239 2618 1256 2619 1313 2619 1273 2619 1272 2620 1314 2620 1353 2620 1259 2621 1272 2621 1353 2621 1270 2622 1315 2622 1338 2622 1270 2623 1338 2623 1272 2623 1316 2624 1328 2624 1262 2624 1263 2625 1316 2625 1262 2625 1317 2626 1288 2626 1350 2626 1270 2627 1317 2627 1350 2627 1265 2628 1318 2628 1317 2628 1318 2629 1361 2629 1317 2629 1332 2630 1318 2630 1265 2630 1319 2631 1332 2631 1265 2631 1343 2632 1341 2632 1320 2632 1274 2633 1343 2633 1320 2633 1261 2634 1342 2634 1279 2634 1261 2635 1279 2635 1321 2635 1283 2636 1329 2636 1264 2636 1244 2637 1283 2637 1264 2637 1276 2638 1311 2638 1282 2638 1276 2639 1282 2639 1278 2639 1338 2640 1322 2640 1354 2640 1323 2641 1312 2641 1324 2641 1302 2642 1313 2642 1341 2642 1313 2643 1302 2643 1273 2643 1323 2644 1324 2644 1309 2644 1324 2645 1325 2645 1309 2645 1325 2646 1354 2646 1309 2646 1273 2647 1302 2647 1305 2647 1354 2648 1322 2648 1309 2648 1302 2649 1341 2649 1303 2649 1273 2650 1305 2650 1316 2650 1303 2651 1341 2651 1342 2651 1316 2652 1305 2652 1307 2652 1303 2653 1342 2653 1300 2653 1316 2654 1307 2654 1326 2654 1326 2655 1327 2655 1328 2655 1316 2656 1326 2656 1328 2656 1331 2657 1330 2657 1329 2657 1330 2658 1300 2658 1329 2658 1300 2659 1342 2659 1329 2659 1331 2660 1329 2660 1311 2660 1328 2661 1327 2661 1332 2661 1311 2662 1310 2662 1298 2662 1331 2663 1311 2663 1298 2663 1318 2664 1332 2664 1333 2664 1332 2665 1327 2665 1333 2665 1318 2666 1333 2666 1361 2666 1298 2667 1310 2667 1335 2667 1315 2668 1361 2668 1334 2668 1361 2669 1333 2669 1334 2669 1308 2670 1315 2670 1334 2670 1335 2671 1336 2671 1296 2671 1336 2672 1337 2672 1296 2672 1298 2673 1335 2673 1296 2673 1315 2674 1308 2674 1339 2674 1315 2675 1339 2675 1338 2675 1340 2676 1337 2676 1312 2676 1337 2677 1336 2677 1312 2677 1338 2678 1339 2678 1322 2678 1340 2679 1312 2679 1323 2679 1342 2680 1341 2680 1343 2680 1342 2681 1343 2681 1279 2681 1246 2682 1275 2682 1344 2682 1293 2683 1344 2683 1355 2683 1344 2684 1275 2684 1355 2684 1293 2685 1355 2685 1345 2685 1311 2686 1329 2686 1283 2686 1311 2687 1283 2687 1282 2687 1346 2688 1347 2688 1252 2688 1346 2689 1252 2689 1348 2689 1348 2690 1294 2690 1359 2690 1346 2691 1348 2691 1359 2691 1286 2692 1285 2692 1358 2692 1286 2693 1358 2693 1357 2693 1328 2694 1355 2694 1287 2694 1262 2695 1328 2695 1287 2695 1288 2696 1356 2696 1332 2696 1288 2697 1332 2697 1319 2697 1345 2698 1358 2698 1289 2698 1290 2699 1345 2699 1289 2699 1314 2700 1360 2700 1349 2700 1314 2701 1349 2701 1350 2701 1357 2702 1294 2702 1291 2702 1249 2703 1357 2703 1291 2703 1351 2704 1324 2704 1352 2704 1351 2705 1352 2705 1353 2705 1359 2706 1312 2706 1271 2706 1292 2707 1359 2707 1271 2707 1353 2708 1352 2708 1354 2708 1353 2709 1354 2709 1259 2709 1338 2710 1360 2710 1314 2710 1272 2711 1338 2711 1314 2711 1350 2712 1349 2712 1315 2712 1350 2713 1315 2713 1270 2713 1361 2714 1356 2714 1288 2714 1317 2715 1361 2715 1288 2715 1328 2716 1332 2716 1356 2716 1355 2717 1328 2717 1356 2717 1345 2718 1355 2718 1356 2718 1345 2719 1356 2719 1349 2719 1345 2720 1349 2720 1358 2720 1357 2721 1358 2721 1360 2721 1294 2722 1357 2722 1360 2722 1358 2723 1349 2723 1360 2723 1359 2724 1294 2724 1352 2724 1294 2725 1360 2725 1352 2725 1312 2726 1359 2726 1324 2726 1359 2727 1352 2727 1324 2727 1352 2728 1360 2728 1338 2728 1352 2729 1338 2729 1354 2729 1349 2730 1356 2730 1361 2730 1349 2731 1361 2731 1315 2731 1363 2732 12 2732 1362 2732 12 2733 1363 2733 1372 2733 1362 2734 12 2734 1370 2734 1372 2735 1363 2735 1366 2735 1366 2736 1364 2736 1365 2736 1372 2737 1366 2737 1365 2737 1364 2738 1370 2738 1365 2738 1362 2739 1370 2739 1364 2739 1362 2740 1367 2740 1363 2740 1368 2741 1367 2741 1362 2741 1363 2742 1367 2742 1366 2742 1366 2743 1367 2743 1369 2743 1364 2744 1368 2744 1362 2744 1376 2745 1368 2745 1364 2745 1366 2746 1369 2746 1364 2746 1364 2747 1369 2747 1376 2747 1370 2748 12 2748 1377 2748 1371 2749 1370 2749 1377 2749 1374 2750 1365 2750 1370 2750 1374 2751 1370 2751 1371 2751 12 2752 1372 2752 1373 2752 1377 2753 12 2753 1373 2753 1373 2754 1372 2754 1365 2754 1373 2755 1365 2755 1374 2755 1375 2756 1382 2756 1367 2756 1375 2757 1367 2757 1368 2757 1367 2758 1382 2758 1383 2758 1369 2759 1367 2759 1383 2759 1384 2760 1375 2760 1368 2760 1384 2761 1368 2761 1376 2761 1369 2762 1383 2762 1384 2762 1376 2763 1369 2763 1384 2763 1377 2764 1380 2764 1379 2764 1371 2765 1377 2765 1379 2765 1378 2766 1371 2766 1379 2766 1374 2767 1371 2767 1378 2767 1381 2768 1380 2768 1377 2768 1381 2769 1377 2769 1373 2769 1381 2770 1373 2770 1378 2770 1378 2771 1373 2771 1374 2771 1386 2772 1382 2772 1375 2772 1386 2773 1375 2773 1387 2773 1383 2774 1382 2774 1386 2774 1385 2775 1383 2775 1386 2775 1387 2776 1375 2776 1384 2776 1387 2777 1384 2777 1388 2777 1384 2778 1383 2778 1385 2778 1388 2779 1384 2779 1385 2779 1379 2780 1380 2780 1386 2780 1381 2781 1386 2781 1380 2781 1385 2782 1386 2782 1381 2782 1387 2783 1379 2783 1386 2783 1378 2784 1388 2784 1385 2784 1378 2785 1385 2785 1381 2785 1378 2786 1379 2786 1387 2786 1378 2787 1387 2787 1388 2787

-
-
-
-
- - - - - - - - - - - - - - -
+ + + + + Blender User + Blender 2.80.75 commit date:2019-07-29, commit time:14:47, hash:f6cb5f54494e + + 2019-10-18T18:24:27 + 2019-10-18T18:24:27 + + Z_UP + + + + + + + + 0 0 0 1 + + + 0.8 0.8 0.8 1 + + + 0.5 + + + + + + + + + + + 0 0 0 1 + + + 0.8 0.8 0.8 1 + + + 0.5 + + + + + + + + + + + 0 0 0 1 + + + 0.8 0.8 0.8 1 + + + 0.5 + + + + + + + + + + + 0 0 0 1 + + + 0.8 0.8 0.8 1 + + + 0.5 + + + + + + + + + + + + + + + + + + + + + + + + + + -44.38883 -44.3888 -19.76802 -40.38883 -38.52968 34 -46.5 -25.9 20 -46.5 -25.9 -19.76802 -44.38883 -28 -19.76802 -44.38883 -38 25.23198 -44.38883 -38 -12.76802 -40.38883 -38 25.23198 -44.38883 -30.89942 -12.76802 -44.38883 -4.529686 -19.76802 -44.38883 -4.529686 -12.76802 -44.38883 -24.5 13.46619 -44.38883 -14 -12.76802 -40.38883 -14 -12.76802 -44.38883 -17 33.99999 -10.40003 -10.52969 -16.76802 -10.40003 -14.4 -16.76802 -10.40003 -14.4 -19.76802 -10.40003 -10.52969 -19.76802 -14.40003 -10.52969 -16.76802 -14.40003 -10.52969 -19.76802 -35.38883 -10.52969 -19.76802 -2.388828 -2.529686 -19.76802 -2.388828 -10.52969 -19.76802 -2.388828 -2.529686 -16.76802 -2.388828 -10.52969 -16.76802 -10.40003 -2.529686 -16.76802 -10.40003 -2.529686 -19.76802 -44.38883 -30.89942 20 -40.38883 -22.5 25.23198 -44.38883 -28 20 -40.38883 -28 20 -40.38883 -38.52968 -12.76802 -46.5 -30.89942 -19.76802 -46.5 -30.89942 20 -44.38883 -30.89942 -19.76802 -40.38883 -30.90233 20 -40.38883 -38 -12.76802 -40.38883 -17 23.76618 -40.38883 -17 33.99999 -40.38883 -30.90233 -12.76802 -40.38883 -2.529686 -19.76802 -40.38883 -24.5 -4 -40.38883 -24.5 13.46619 -40.38883 -2.529686 -12.76802 -44.38883 -24.5 -4 -44.38883 -17 23.76618 -35.38883 -14.4 -16.76802 -35.38883 -10.52969 -16.76802 -35.38883 -14.4 -19.76802 -44.38883 -22.5 25.23198 -44.3888 -44.38883 34 -38.52968 -40.38883 34 -25.9 -46.5 20 -25.9 -46.5 -19.76802 -28 -44.38883 -19.76802 -38 -44.38883 25.23198 -38 -44.38883 -12.76802 -38 -40.38883 25.23198 -30.89941 -44.38883 -12.76802 -4.529684 -44.38883 -19.76802 -4.529684 -44.38883 -12.76802 -24.5 -44.38883 13.46619 -14 -44.38883 -12.76802 -14 -40.38883 -12.76802 -17 -44.38883 33.99999 -14.40001 -14.4 -16.76802 -14.40001 -14.4 -19.76802 -35.3888 -35.38883 -16.76802 -10.40001 -35.38883 -19.76802 -30.89941 -44.38883 20 -22.5 -40.38883 25.23198 -28 -44.38883 20 -28 -40.38883 20 -38.52968 -40.38883 -12.76802 -30.89941 -46.5 -19.76802 -30.89941 -46.5 20 -30.89941 -44.38883 -19.76802 -30.90232 -40.38883 20 -38 -40.38883 -12.76802 -17 -40.38883 23.76618 -17 -40.38883 33.99999 -30.90232 -40.38883 -12.76802 -2.529684 -40.38883 -19.76802 -24.5 -40.38883 -4 -24.5 -40.38883 13.46619 -2.529684 -40.38883 -12.76802 -24.5 -44.38883 -4 -17 -44.38883 23.76618 -35.3888 -35.38883 -19.76802 -14.40001 -35.38883 -16.76802 -10.40001 -35.38883 -16.76802 -14.40001 -35.38883 -19.76802 -22.5 -44.38883 25.23198 44.38883 -44.3888 -19.76802 40.38883 -38.52968 34 46.5 -25.9 20 46.5 -25.9 -19.76802 44.38883 -28 -19.76802 44.38883 -38 25.23198 44.38883 -38 -12.76802 40.38883 -38 25.23198 44.38883 -30.89942 -12.76802 44.38883 -4.529686 -19.76802 44.38883 -4.529686 -12.76802 44.38883 -24.5 13.46619 44.38883 -14 -12.76802 40.38883 -14 -12.76802 44.38883 -17 33.99999 10.40003 -10.52969 -16.76802 10.40003 -14.4 -16.76802 10.40003 -14.4 -19.76802 10.40003 -10.52969 -19.76802 14.40003 -10.52969 -16.76802 14.40003 -10.52969 -19.76802 35.38883 -10.52969 -19.76802 0 -14.4 -19.76802 0 -14.4 -16.76802 2.388828 -2.529686 -19.76802 2.388828 -10.52969 -19.76802 2.388828 -2.529686 -16.76802 2.388828 -10.52969 -16.76802 10.40003 -2.529686 -16.76802 10.40003 -2.529686 -19.76802 44.38883 -30.89942 20 40.38883 -22.5 25.23198 44.38883 -28 20 40.38883 -28 20 40.38883 -38.52968 -12.76802 46.5 -30.89942 -19.76802 46.5 -30.89942 20 44.38883 -30.89942 -19.76802 40.38883 -30.90233 20 40.38883 -38 -12.76802 40.38883 -17 23.76618 40.38883 -17 33.99999 40.38883 -30.90233 -12.76802 40.38883 -2.529686 -19.76802 40.38883 -24.5 -4 40.38883 -24.5 13.46619 40.38883 -2.529686 -12.76802 44.38883 -24.5 -4 44.38883 -17 23.76618 35.38883 -14.4 -16.76802 35.38883 -10.52969 -16.76802 35.38883 -14.4 -19.76802 44.38883 -22.5 25.23198 44.3888 -44.38883 34 38.52968 -40.38883 34 25.9 -46.5 20 25.9 -46.5 -19.76802 28 -44.38883 -19.76802 38 -44.38883 25.23198 38 -44.38883 -12.76802 38 -40.38883 25.23198 30.89941 -44.38883 -12.76802 4.529684 -44.38883 -19.76802 0 -40.38883 32.63093 4.529684 -44.38883 -12.76802 24.5 -44.38883 13.46619 14 -44.38883 -12.76802 14 -40.38883 -12.76802 0 -40.38883 26.1367 0 -44.38883 32.63093 17 -44.38883 33.99999 14.40001 -14.4 -16.76802 14.40001 -14.4 -19.76802 35.3888 -35.38883 -16.76802 10.40001 -35.38883 -19.76802 30.89941 -44.38883 20 0 -40.38883 -19.76802 0 -35.38883 -16.76802 0 -40.38883 -12.76802 22.5 -40.38883 25.23198 28 -44.38883 20 28 -40.38883 20 38.52968 -40.38883 -12.76802 30.89941 -46.5 -19.76802 30.89941 -46.5 20 30.89941 -44.38883 -19.76802 30.90232 -40.38883 20 38 -40.38883 -12.76802 17 -40.38883 23.76618 17 -40.38883 33.99999 30.90232 -40.38883 -12.76802 2.529684 -40.38883 -19.76802 24.5 -40.38883 -4 24.5 -40.38883 13.46619 2.529684 -40.38883 -12.76802 24.5 -44.38883 -4 0 -44.38883 26.1367 17 -44.38883 23.76618 35.3888 -35.38883 -19.76802 14.40001 -35.38883 -16.76802 10.40001 -35.38883 -16.76802 14.40001 -35.38883 -19.76802 0 -35.38883 -19.76802 22.5 -44.38883 25.23198 -44.38883 44.3888 -19.76802 -40.38883 38.52968 34 -46.5 25.9 20 -46.5 25.9 -19.76802 -44.38883 28 -19.76802 -44.38883 38 25.23198 -44.38883 38 -12.76802 -40.38883 38 25.23198 -44.38883 30.89942 -12.76802 -44.38883 4.529686 -19.76802 -40.38883 0 32.63093 -44.38883 4.529686 -12.76802 -44.38883 24.5 13.46619 -44.38883 14 -12.76802 -40.38883 14 -12.76802 -40.38883 0 26.1367 -44.38883 0 32.63093 -44.38883 17 33.99999 -10.40003 10.52969 -16.76802 -10.40003 14.4 -16.76802 -10.40003 14.4 -19.76802 -10.40003 10.52969 -19.76802 -14.40003 10.52969 -16.76802 -14.40003 10.52969 -19.76802 -35.38883 10.52969 -19.76802 -2.388828 2.529686 -19.76802 -2.388828 10.52969 -19.76802 -2.388828 2.529686 -16.76802 -2.388828 10.52969 -16.76802 -10.40003 2.529686 -16.76802 -10.40003 2.529686 -19.76802 -44.38883 30.89942 20 -40.38883 0 -19.76802 -35.38883 0 -16.76802 -40.38883 0 -12.76802 -14.40003 0 -16.76802 -14.40003 0 -19.76802 -40.38883 22.5 25.23198 -44.38883 28 20 -40.38883 28 20 -40.38883 38.52968 -12.76802 -46.5 30.89942 -19.76802 -46.5 30.89942 20 -44.38883 30.89942 -19.76802 -40.38883 30.90233 20 -40.38883 38 -12.76802 -40.38883 17 23.76618 -40.38883 17 33.99999 -40.38883 30.90233 -12.76802 -40.38883 2.529686 -19.76802 -40.38883 24.5 -4 -40.38883 24.5 13.46619 -40.38883 2.529686 -12.76802 -44.38883 24.5 -4 -44.38883 0 26.1367 -44.38883 17 23.76618 -35.38883 14.4 -16.76802 -35.38883 10.52969 -16.76802 -35.38883 14.4 -19.76802 -35.38883 0 -19.76802 -44.38883 22.5 25.23198 -44.3888 44.38883 34 -38.52968 40.38883 34 -25.9 46.5 20 -25.9 46.5 -19.76802 -28 44.38883 -19.76802 -38 44.38883 25.23198 -38 44.38883 -12.76802 -38 40.38883 25.23198 -30.89941 44.38883 -12.76802 -4.529684 44.38883 -19.76802 -4.529684 44.38883 -12.76802 -24.5 44.38883 13.46619 -14 44.38883 -12.76802 -14 40.38883 -12.76802 -17 44.38883 33.99999 -14.40001 14.4 -16.76802 -14.40001 14.4 -19.76802 -35.3888 35.38883 -16.76802 -10.40001 35.38883 -19.76802 -30.89941 44.38883 20 -22.5 40.38883 25.23198 -28 44.38883 20 -28 40.38883 20 -38.52968 40.38883 -12.76802 -30.89941 46.5 -19.76802 -30.89941 46.5 20 -30.89941 44.38883 -19.76802 -30.90232 40.38883 20 -38 40.38883 -12.76802 -17 40.38883 23.76618 -17 40.38883 33.99999 -30.90232 40.38883 -12.76802 -2.529684 40.38883 -19.76802 -24.5 40.38883 -4 -24.5 40.38883 13.46619 -2.529684 40.38883 -12.76802 -24.5 44.38883 -4 -17 44.38883 23.76618 -35.3888 35.38883 -19.76802 -14.40001 35.38883 -16.76802 -10.40001 35.38883 -16.76802 -14.40001 35.38883 -19.76802 -22.5 44.38883 25.23198 44.38883 44.3888 -19.76802 40.38883 38.52968 34 46.5 25.9 20 46.5 25.9 -19.76802 44.38883 28 -19.76802 44.38883 38 25.23198 44.38883 38 -12.76802 40.38883 38 25.23198 44.38883 30.89942 -12.76802 44.38883 4.529686 -19.76802 40.38883 0 32.63093 44.38883 4.529686 -12.76802 44.38883 24.5 13.46619 44.38883 14 -12.76802 40.38883 14 -12.76802 40.38883 0 26.1367 44.38883 0 32.63093 44.38883 17 33.99999 10.40003 10.52969 -16.76802 10.40003 14.4 -16.76802 10.40003 14.4 -19.76802 10.40003 10.52969 -19.76802 14.40003 10.52969 -16.76802 14.40003 10.52969 -19.76802 35.38883 10.52969 -19.76802 0 14.4 -19.76802 0 14.4 -16.76802 2.388828 2.529686 -19.76802 2.388828 10.52969 -19.76802 2.388828 2.529686 -16.76802 2.388828 10.52969 -16.76802 10.40003 2.529686 -16.76802 10.40003 2.529686 -19.76802 44.38883 30.89942 20 40.38883 0 -19.76802 35.38883 0 -16.76802 40.38883 0 -12.76802 14.40003 0 -16.76802 14.40003 0 -19.76802 40.38883 22.5 25.23198 44.38883 28 20 40.38883 28 20 40.38883 38.52968 -12.76802 46.5 30.89942 -19.76802 46.5 30.89942 20 44.38883 30.89942 -19.76802 40.38883 30.90233 20 40.38883 38 -12.76802 40.38883 17 23.76618 40.38883 17 33.99999 40.38883 30.90233 -12.76802 40.38883 2.529686 -19.76802 40.38883 24.5 -4 40.38883 24.5 13.46619 40.38883 2.529686 -12.76802 44.38883 24.5 -4 44.38883 0 26.1367 44.38883 17 23.76618 35.38883 14.4 -16.76802 35.38883 10.52969 -16.76802 35.38883 14.4 -19.76802 35.38883 0 -19.76802 44.38883 22.5 25.23198 44.3888 44.38883 34 38.52968 40.38883 34 25.9 46.5 20 25.9 46.5 -19.76802 28 44.38883 -19.76802 38 44.38883 25.23198 38 44.38883 -12.76802 38 40.38883 25.23198 30.89941 44.38883 -12.76802 4.529684 44.38883 -19.76802 0 40.38883 32.63093 4.529684 44.38883 -12.76802 24.5 44.38883 13.46619 14 44.38883 -12.76802 14 40.38883 -12.76802 0 40.38883 26.1367 0 44.38883 32.63093 17 44.38883 33.99999 14.40001 14.4 -16.76802 14.40001 14.4 -19.76802 35.3888 35.38883 -16.76802 10.40001 35.38883 -19.76802 30.89941 44.38883 20 0 40.38883 -19.76802 0 35.38883 -16.76802 0 40.38883 -12.76802 22.5 40.38883 25.23198 28 44.38883 20 28 40.38883 20 38.52968 40.38883 -12.76802 30.89941 46.5 -19.76802 30.89941 46.5 20 30.89941 44.38883 -19.76802 30.90232 40.38883 20 38 40.38883 -12.76802 17 40.38883 23.76618 17 40.38883 33.99999 30.90232 40.38883 -12.76802 2.529684 40.38883 -19.76802 24.5 40.38883 -4 24.5 40.38883 13.46619 2.529684 40.38883 -12.76802 24.5 44.38883 -4 0 44.38883 26.1367 17 44.38883 23.76618 35.3888 35.38883 -19.76802 14.40001 35.38883 -16.76802 10.40001 35.38883 -16.76802 14.40001 35.38883 -19.76802 0 35.38883 -19.76802 22.5 44.38883 25.23198 0 0 -19.76802 0 0 -16.76802 + + + + + + + + + + -1 -1.01663e-6 2.66867e-6 -1 0 0 0.7052268 0.7089819 0 -1 -4.26428e-6 5.67576e-7 0 1 0 1 0 0 -7.27175e-4 -0.9999998 0 -0.4472137 0.8944273 0 0 0 1 0 0.4593691 -0.8882455 0 0.6409615 0.767573 0 -0.4623684 0.886688 0 0 -1 0 0 1 0 -1 0 1 1.21166e-6 0 0 -1 0 -1.31628e-6 0 1 7.22848e-7 0 1 0 2.78558e-7 1 0 -0.6892326 0.7245402 0.7089819 0.7052268 0 -1.81856e-7 -1 -3.55358e-7 1 1.60619e-6 0 -0.9999998 -7.28294e-4 0 0.8944273 -0.4472135 0 0.4593687 0 -0.8882457 0.6409617 0 0.7675731 -0.4623683 0 0.8866881 -3.80948e-7 0 1 7.26999e-7 0 -1 1 1.02991e-6 0 0 -4.76837e-7 1 0 -9.03453e-7 1 0 0.624695 0.7808689 0 -1 7.75931e-7 3.54366e-7 0 1 -0.6892326 0 0.7245402 0.4034973 0.4034985 0.8212059 0.7071075 0.7071062 0 -5.84743e-7 0 -1 0.624695 0 0.7808688 -0.7052271 0.7089815 0 1 -4.08661e-6 5.67576e-7 -1 9.85536e-6 0 7.27175e-4 -0.9999998 0 0.4472137 0.8944273 0 -1.66286e-7 0 1 -1 0 0 0 0 -1 0 -1 0 -1 1.21166e-6 0 1.31436e-6 0 1 -3.6228e-7 0 -1 1 -3.87966e-7 0 0 3.54366e-7 1 -1 2.90932e-7 0 -1.39279e-7 -1 -1.01664e-6 -0.7089814 0.7052272 0 1.81856e-7 -1 -4.24442e-7 -1 1.60619e-6 0 0.9999998 -7.28294e-4 0 -0.8944273 -0.4472135 0 -0.4593687 0 -0.8882457 -0.6409617 0 0.7675731 0.4623683 0 0.8866881 -4.49104e-7 0 -1 -1 1.02991e-6 0 0 -4.76837e-7 1 0 0.6246954 0.7808687 0 1 -7.75931e-7 0 -1 1.55186e-6 -1.39279e-7 -4.08026e-7 1 0.6892326 0 0.7245402 -0.4034973 0.4034985 0.8212059 -0.7071075 0.7071062 0 7.98445e-7 0 -1 1 9.98421e-7 0 -3.23109e-7 0 -1 -0.6246935 0 0.7808701 1.97954e-6 -1 -2.58553e-6 0.7052271 -0.7089815 0 -1 4.08661e-6 5.67576e-7 1 -9.85536e-6 0 -7.27175e-4 0.9999998 0 -0.4472137 -0.8944273 0 1.66286e-7 0 1 0 -0.4593691 -0.8882455 0 -0.6409615 0.767573 0 0.4623684 0.886688 1 0 0 0 0 -1 0 1 0 1 -1.21166e-6 0 -1.31436e-6 0 1 3.6228e-7 0 -1 -1 3.87966e-7 0 0 -3.54366e-7 1 0 0.6892326 0.7245402 1 -2.90932e-7 0 1.39279e-7 1 -1.01664e-6 0.7089814 -0.7052272 0 -1.81856e-7 1 -4.24442e-7 1 -1.60619e-6 0 -0.9999998 7.28294e-4 0 0.8944273 0.4472135 0 4.49104e-7 0 -1 1 -1.02991e-6 0 0 4.76837e-7 1 0 9.03453e-7 1 0 -0.6246954 0.7808687 0 -1 -7.75931e-7 0 1 1.55186e-6 1.39279e-7 4.08026e-7 1 0.4034973 -0.4034985 0.8212059 0.7071075 -0.7071062 0 -7.98445e-7 0 -1 -1 -9.98421e-7 0 3.23109e-7 0 -1 0.6246935 0 0.7808701 -1.97954e-6 1 -2.58553e-6 1 1.01663e-6 2.66867e-6 -0.7052268 -0.7089819 0 1 4.26428e-6 5.67576e-7 7.27175e-4 0.9999998 0 0.4472137 -0.8944273 0 0 0 1 0 1 0 -1 -1.21166e-6 0 1.31628e-6 0 1 -7.22848e-7 0 1 0 -2.78558e-7 1 -0.7089819 -0.7052268 0 1.81856e-7 1 -3.55358e-7 -1 -1.60619e-6 0 0.9999998 7.28294e-4 0 -0.8944273 0.4472135 0 3.80948e-7 0 1 -7.26999e-7 0 -1 -1 -1.02991e-6 0 0 4.76837e-7 1 0 -0.624695 0.7808689 0 1 7.75931e-7 -3.54366e-7 0 1 -0.4034973 -0.4034985 0.8212059 -0.7071075 -0.7071062 0 5.84743e-7 0 -1 -0.624695 0 0.7808688 0.7052279 0.7089807 0 1 9.85536e-6 0 -7.27175e-4 -0.9999998 0 0 0.4593683 -0.888246 -4.64237e-7 0 -1 0 -1 0 1 1.45399e-6 0 -5.79648e-7 0 -1 0 -6.57839e-7 1 0 -6.57838e-7 1 1 0 0 1 2.90932e-7 0 1.39279e-7 -1 -1.01664e-6 2.51371e-7 0 -1 0.7089804 0.7052282 0 -4.17962e-6 -1 0 -0.9999998 -7.25532e-4 0 0.8944273 -0.4472137 0 0.4593686 0 -0.8882458 -0.4623684 0 0.886688 0 1 -3.64897e-7 -1 1.45396e-6 0 -1 -1.21166e-7 0 1 9.69328e-7 0 -1.31568e-6 0 1 0 0.624695 0.7808688 0 0.6246949 0.7808691 0 0.6246951 0.7808689 0 0.6246956 0.7808684 0 0.6246953 0.7808687 0 0.624695 0.780869 0 0.6246951 0.7808688 1.39279e-7 -4.08026e-7 1 0.7071065 0.7071071 0 -1 3.11003e-7 0 0.6246951 0 0.7808689 0.6246951 0 0.7808688 0.624695 0 0.7808689 0.6246948 0 0.7808691 0.6246852 0 0.7808768 0.6246936 6.81224e-7 0.78087 0 -1 -3.71504e-7 -1.97954e-6 -1 -2.58553e-6 1 -1.01663e-6 2.66867e-6 -0.7052279 0.7089807 0 7.27176e-4 -0.9999998 0 0 -1 0 -1 1.45399e-6 0 5.79648e-7 0 -1 -1 -5.62558e-7 0 -1.49324e-7 0 -1 -0.7089804 0.7052282 0 4.17962e-6 -1 0 0.9999998 -7.255e-4 0 -0.8944273 -0.4472137 0 -0.4593686 0 -0.8882458 0.4623684 0 0.886688 1 1.45396e-6 0 1 -1.21166e-7 0 -1 9.69328e-7 0 1.31568e-6 0 1 0 0.6246952 0.7808687 0 0.6246961 0.7808679 -0.7071065 0.7071071 0 1 -2.25577e-7 0 1 3.11003e-7 0 1 5.62558e-7 0 -0.6246951 0 0.7808688 -0.6246951 0 0.7808689 -0.6246935 6.81224e-7 0.7808701 -0.6246852 0 0.7808768 -0.6246951 0 0.7808688 -0.624695 0 0.7808689 -0.6246949 0 0.780869 0 -1 2.81279e-7 -1 1.01663e-6 2.66867e-6 0.7052279 -0.7089807 0 -7.27176e-4 0.9999998 0 0 -0.4593683 -0.888246 0 1 0 1 -1.45399e-6 0 0 6.57839e-7 1 0 6.57838e-7 1 1.49324e-7 0 -1 0.7089804 -0.7052282 0 -4.17962e-6 1 0 -0.9999998 7.255e-4 0 0.8944273 0.4472137 0 -1 -1.45396e-6 0 -1 1.21166e-7 0 1 -9.69328e-7 0 0 -0.6246949 0.7808691 0 -0.624695 0.7808688 0 -0.624695 0.780869 0 -0.6246952 0.7808687 0 -0.6246961 0.7808679 0 -0.6246951 0.7808689 0.7071065 -0.7071071 0 -1 2.25577e-7 0 -1 -3.11003e-7 0 0.6246935 -6.81224e-7 0.7808701 0.6246951 0 0.7808688 0.6246949 0 0.780869 0 1 2.81279e-7 -0.7052279 -0.7089807 0 -1 -9.85536e-6 0 7.27175e-4 0.9999998 0 4.64237e-7 0 -1 0 1 0 -1 -1.45399e-6 0 -1 0 0 -1 -2.90932e-7 0 -1.39279e-7 1 -1.01664e-6 -2.51371e-7 0 -1 -0.7089804 -0.7052282 0 4.17962e-6 1 0 0.9999998 7.25532e-4 0 -0.8944273 0.4472137 0 0 -1 -3.64897e-7 1 -1.45396e-6 0 1 1.21166e-7 0 -1 -9.69328e-7 0 0 -0.6246956 0.7808684 0 -0.6246953 0.7808687 0 -0.6246951 0.7808688 -1.39279e-7 4.08026e-7 1 -0.7071065 -0.7071071 0 1 -3.11003e-7 0 -0.6246948 0 0.7808691 -0.6246936 -6.81224e-7 0.78087 0 1 -3.71504e-7 1.97954e-6 1 -2.58553e-6 + + + + + + + + + + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + + + + + + + + + + + + + + +

14 0 0 5 0 1 51 0 2 34 1 3 3 1 4 33 1 5 30 2 6 3 2 7 2 2 8 5 3 9 0 3 10 51 3 11 5 4 12 37 4 13 6 4 14 7 5 15 32 5 16 37 5 17 36 6 18 8 6 19 40 6 20 41 7 21 10 7 22 44 7 23 42 4 24 11 4 25 43 4 26 13 8 27 10 8 28 12 8 29 252 9 30 43 9 31 11 9 32 45 10 33 13 10 34 12 10 35 208 11 36 46 11 37 38 11 38 38 4 39 14 4 40 39 4 41 1 5 42 29 5 43 39 5 44 16 8 45 25 8 46 15 8 47 17 12 48 23 12 49 116 12 50 15 13 51 66 13 52 16 13 53 18 12 54 67 12 55 20 12 56 26 8 57 19 8 58 15 8 59 47 8 60 19 8 61 48 8 62 49 12 63 20 12 64 67 12 65 234 1 66 19 1 67 233 1 68 49 14 69 66 14 70 47 14 71 48 4 72 20 4 73 21 4 74 9 12 75 21 12 76 49 12 77 68 15 78 49 15 79 47 15 80 257 12 81 41 12 82 230 12 83 22 1 84 25 1 85 24 1 86 27 16 87 24 16 88 26 16 89 15 4 90 23 4 91 18 4 92 18 5 93 26 5 94 15 5 95 35 16 96 34 16 97 33 16 98 31 17 99 28 17 100 36 17 101 34 18 102 30 18 103 2 18 104 41 1 105 232 1 106 230 1 107 21 5 108 231 5 109 48 5 110 17 16 111 117 16 112 16 16 113 20 12 114 27 12 115 18 12 116 213 5 117 38 5 118 43 5 119 46 1 120 252 1 121 11 1 122 5 12 123 29 12 124 7 12 125 1 19 126 14 19 127 51 19 128 29 20 129 30 20 130 31 20 131 29 5 132 31 5 133 43 5 134 36 5 135 42 5 136 43 5 137 61 8 171 64 8 172 63 8 173 1 39 255 74 39 256 32 39 257 27 12 261 416 12 262 22 12 263 417 8 264 26 8 265 24 8 266 25 8 267 417 8 268 24 8 269 23 12 270 416 12 271 116 12 272 6 1 273 35 1 274 0 1 275 40 8 276 6 8 277 37 8 278 45 1 279 4 1 280 30 1 281 89 40 282 35 40 283 4 40 284 42 5 285 40 5 286 13 5 287 47 41 288 48 41 289 13 41 290 108 5 297 99 5 298 146 5 299 97 5 300 130 5 301 129 5 302 97 42 303 126 42 304 96 42 305 94 43 306 99 43 307 147 43 308 133 4 309 99 4 310 100 4 311 101 44 312 128 44 313 95 44 314 102 45 315 132 45 316 136 45 317 104 46 318 137 46 319 140 46 320 105 4 321 138 4 322 139 4 323 107 47 324 104 47 325 140 47 326 139 9 327 358 9 328 105 9 329 107 10 330 141 10 331 106 10 332 142 11 333 312 11 334 134 11 335 108 4 336 134 4 337 135 4 338 125 48 339 95 48 340 135 48 341 110 8 342 121 8 343 117 8 344 111 12 345 119 12 346 112 12 347 109 8 348 165 8 349 113 8 350 166 12 351 112 12 352 114 12 353 113 8 354 122 8 355 109 8 356 143 8 357 113 8 358 165 8 359 114 49 360 145 49 361 166 49 362 113 5 363 340 5 364 339 5 365 165 50 366 145 50 367 143 50 368 114 4 369 144 4 370 115 4 371 103 12 372 115 12 373 137 12 374 145 51 375 167 51 376 143 51 377 137 12 378 363 12 379 336 12 380 121 5 381 118 5 382 120 5 383 120 16 384 123 16 385 122 16 386 119 4 387 109 4 388 112 4 389 122 1 390 112 1 391 109 1 392 130 16 393 131 16 394 129 16 395 124 52 396 127 52 397 132 52 398 126 8 399 130 8 400 96 8 401 338 5 402 137 5 403 336 5 404 337 1 405 115 1 406 144 1 407 117 16 408 111 16 409 110 16 410 114 53 411 123 53 412 340 53 413 317 1 414 134 1 415 312 1 416 358 54 417 142 54 418 105 54 419 125 12 420 99 12 421 101 12 422 95 55 423 108 55 424 135 55 425 126 20 426 125 20 427 127 20 428 125 1 429 134 1 430 139 1 431 132 56 432 138 56 433 136 56 434 158 8 468 161 8 469 188 8 470 176 75 552 95 75 553 128 75 554 416 12 558 123 12 559 118 12 560 122 8 561 417 8 562 120 8 563 417 8 564 121 8 565 120 8 566 119 76 567 416 76 568 118 76 569 100 5 570 131 5 571 102 5 572 136 8 573 100 8 574 102 8 575 141 77 576 105 77 577 126 77 578 192 78 579 145 78 580 98 78 581 138 1 582 107 1 583 136 1 584 167 79 585 133 79 586 136 79 587 215 1 594 203 1 595 258 1 596 201 1 597 240 1 598 239 1 599 201 81 600 236 81 601 200 81 602 198 82 603 203 82 604 259 82 605 243 16 606 203 16 607 204 16 608 205 83 609 238 83 610 199 83 611 206 84 612 242 84 613 246 84 614 209 85 615 247 85 616 250 85 617 210 16 618 248 16 619 249 16 620 212 86 621 209 86 622 250 86 623 249 87 624 252 87 625 210 87 626 212 88 627 251 88 628 211 88 629 253 89 630 208 89 631 244 89 632 215 16 633 244 16 634 245 16 635 235 90 636 199 90 637 245 90 638 217 8 639 226 8 640 328 8 641 218 12 642 224 12 643 219 12 644 216 8 645 274 8 646 220 8 647 275 12 648 219 12 649 221 12 650 220 8 651 227 8 652 216 8 653 254 8 654 220 8 655 274 8 656 221 91 657 256 91 658 275 91 659 220 1 660 234 1 661 233 1 662 274 92 663 256 92 664 254 92 665 221 16 666 255 16 667 222 16 668 207 12 669 222 12 670 247 12 671 256 93 672 276 93 673 254 93 674 247 12 675 257 12 676 230 12 677 226 1 678 223 1 679 225 1 680 225 4 681 228 4 682 227 4 683 224 16 684 216 16 685 219 16 686 227 5 687 219 5 688 216 5 689 240 4 690 241 4 691 239 4 692 229 94 693 237 94 694 242 94 695 236 8 696 240 8 697 200 8 698 232 1 699 247 1 700 230 1 701 231 5 702 222 5 703 255 5 704 328 4 705 218 4 706 217 4 707 221 95 708 228 95 709 234 95 710 213 5 711 244 5 712 208 5 713 252 96 714 253 96 715 210 96 716 235 12 717 203 12 718 205 12 719 199 97 720 215 97 721 245 97 722 236 98 723 235 98 724 237 98 725 235 5 726 244 5 727 249 5 728 242 99 729 248 99 730 246 99 731 269 8 765 272 8 766 294 8 767 282 115 849 199 115 850 238 115 851 416 12 855 228 12 856 223 12 857 227 8 858 417 8 859 225 8 860 417 8 861 226 8 862 225 8 863 224 116 864 416 116 865 223 116 866 204 1 867 241 1 868 206 1 869 246 8 870 204 8 871 206 8 872 251 117 873 210 117 874 236 117 875 297 118 876 256 118 877 202 118 878 248 5 879 212 5 880 246 5 881 276 119 882 243 119 883 246 119 884 319 121 891 307 121 892 365 121 893 346 5 894 305 5 895 345 5 896 342 122 897 305 122 898 304 122 899 307 123 900 302 123 901 365 123 902 307 16 903 349 16 904 308 16 905 309 1 906 344 1 907 349 1 908 348 124 909 310 124 910 352 124 911 353 125 912 313 125 913 356 125 914 354 16 915 314 16 916 355 16 917 316 8 918 313 8 919 315 8 920 358 87 921 355 87 922 314 87 923 357 88 924 316 88 925 315 88 926 312 89 927 359 89 928 350 89 929 350 16 930 319 16 931 351 16 932 303 1 933 341 1 934 351 1 935 321 8 936 332 8 937 320 8 938 322 12 939 330 12 940 327 12 941 320 126 942 383 126 943 321 126 944 323 12 945 384 12 946 325 12 947 333 8 948 324 8 949 320 8 950 360 8 951 324 8 952 361 8 953 362 12 954 325 12 955 384 12 956 340 5 957 324 5 958 339 5 959 362 127 960 383 127 961 360 127 962 361 16 963 325 16 964 326 16 965 311 12 966 326 12 967 362 12 968 385 128 969 362 128 970 360 128 971 363 12 972 353 12 973 336 12 974 329 5 975 332 5 976 331 5 977 334 4 978 331 4 979 333 4 980 320 16 981 330 16 982 323 16 983 323 1 984 333 1 985 320 1 986 347 4 987 346 4 988 345 4 989 343 129 990 335 129 991 348 129 992 346 130 993 342 130 994 304 130 995 353 5 996 338 5 997 336 5 998 326 1 999 337 1 1000 361 1 1001 322 4 1002 328 4 1003 321 4 1004 325 12 1005 334 12 1006 323 12 1007 317 1 1008 350 1 1009 355 1 1010 359 5 1011 358 5 1012 314 5 1013 307 12 1014 341 12 1015 309 12 1016 303 131 1017 319 131 1018 365 131 1019 341 98 1020 342 98 1021 343 98 1022 341 1 1023 343 1 1024 355 1 1025 348 1 1026 354 1 1027 355 1 1028 376 8 1062 379 8 1063 378 8 1064 303 145 1146 394 145 1147 344 145 1148 334 12 1152 416 12 1153 329 12 1154 417 8 1155 333 8 1156 331 8 1157 332 8 1158 417 8 1159 331 8 1160 330 12 1161 416 12 1162 327 12 1163 308 5 1164 347 5 1165 302 5 1166 352 8 1167 308 8 1168 349 8 1169 357 5 1170 306 5 1171 342 5 1172 410 146 1173 347 146 1174 306 146 1175 354 1 1176 352 1 1177 316 1 1178 360 147 1179 361 147 1180 316 147 1181 14 1 1188 50 1 1189 5 1 1190 34 1 1191 2 1 1192 3 1 1193 30 148 1194 4 148 1195 3 148 1196 5 1 1197 6 1 1198 0 1 1199 5 4 1200 7 4 1201 37 4 1202 7 149 1203 1 149 1204 32 149 1205 36 150 1206 28 150 1207 8 150 1208 41 7 1209 9 7 1210 10 7 1211 42 4 1212 45 4 1213 11 4 1214 13 86 1215 44 86 1216 10 86 1217 252 151 1218 213 151 1219 43 151 1220 45 10 1221 42 10 1222 13 10 1223 208 11 1224 214 11 1225 46 11 1226 38 4 1227 46 4 1228 14 4 1229 1 5 1230 7 5 1231 29 5 1232 16 8 1233 117 8 1234 25 8 1235 17 12 1236 18 12 1237 23 12 1238 15 8 1239 19 8 1240 66 8 1241 18 152 1242 17 152 1243 67 152 1244 26 8 1245 233 8 1246 19 8 1247 47 8 1248 66 8 1249 19 8 1250 49 12 1251 21 12 1252 20 12 1253 234 1 1254 20 1 1255 19 1 1256 49 153 1257 67 153 1258 66 153 1259 48 4 1260 19 4 1261 20 4 1262 9 12 1263 41 12 1264 21 12 1265 68 154 1266 89 154 1267 49 154 1268 257 155 1269 21 155 1270 41 155 1271 22 1 1272 23 1 1273 25 1 1274 27 16 1275 22 16 1276 24 16 1277 15 4 1278 25 4 1279 23 4 1280 18 5 1281 27 5 1282 26 5 1283 35 16 1284 28 16 1285 34 16 1286 31 156 1287 30 156 1288 28 156 1289 34 157 1290 28 157 1291 30 157 1292 41 1 1293 44 1 1294 232 1 1295 21 5 1296 257 5 1297 231 5 1298 17 16 1299 116 16 1300 117 16 1301 20 95 1302 234 95 1303 27 95 1304 213 5 1305 208 5 1306 38 5 1307 46 1 1308 214 1 1309 252 1 1310 5 12 1311 50 12 1312 29 12 1313 1 55 1314 39 55 1315 14 55 1316 29 20 1317 50 20 1318 30 20 1319 31 5 1320 36 5 1321 43 5 1322 43 158 1323 38 158 1324 29 158 1325 38 5 1326 39 5 1327 29 5 1328 36 159 1329 40 159 1330 42 159 1331 61 8 1377 86 8 1378 64 8 1379 1 181 1482 52 181 1483 74 181 1484 27 12 1485 234 12 1486 416 12 1487 417 8 1488 233 8 1489 26 8 1490 25 8 1491 117 8 1492 417 8 1493 23 116 1494 22 116 1495 416 116 1496 6 1 1497 8 1 1498 35 1 1499 40 8 1500 8 8 1501 6 8 1502 10 1 1503 9 1 1504 12 1 1505 9 1 1506 4 1 1507 12 1 1508 50 1 1509 14 1 1510 46 1 1511 50 1 1512 46 1 1513 11 1 1514 45 182 1515 12 182 1516 4 182 1517 30 1 1518 50 1 1519 11 1 1520 11 1 1521 45 1 1522 30 1 1523 4 12 1524 9 12 1525 49 12 1526 49 118 1527 89 118 1528 4 118 1529 89 12 1530 0 12 1531 35 12 1532 35 12 1533 33 12 1534 4 12 1535 33 12 1536 3 12 1537 4 12 1538 48 183 1539 231 183 1540 44 183 1541 231 184 1542 232 184 1543 44 184 1544 44 185 1545 13 185 1546 48 185 1547 13 186 1548 40 186 1549 47 186 1550 32 187 1551 68 187 1552 37 187 1553 68 188 1554 47 188 1555 40 188 1556 37 119 1557 68 119 1558 40 119 1559 108 191 1584 147 191 1585 99 191 1586 97 5 1587 96 5 1588 130 5 1589 97 192 1590 98 192 1591 126 192 1592 94 5 1593 100 5 1594 99 5 1595 133 4 1596 101 4 1597 99 4 1598 101 1 1599 133 1 1600 128 1 1601 102 193 1602 124 193 1603 132 193 1604 104 46 1605 103 46 1606 137 46 1607 105 4 1608 141 4 1609 138 4 1610 107 8 1611 106 8 1612 104 8 1613 139 151 1614 317 151 1615 358 151 1616 107 10 1617 138 10 1618 141 10 1619 142 11 1620 318 11 1621 312 11 1622 108 4 1623 142 4 1624 134 4 1625 125 1 1626 101 1 1627 95 1 1628 110 8 1629 109 8 1630 121 8 1631 111 12 1632 116 12 1633 119 12 1634 109 126 1635 110 126 1636 165 126 1637 166 12 1638 111 12 1639 112 12 1640 113 8 1641 339 8 1642 122 8 1643 143 8 1644 144 8 1645 113 8 1646 114 12 1647 115 12 1648 145 12 1649 113 5 1650 114 5 1651 340 5 1652 165 194 1653 166 194 1654 145 194 1655 114 4 1656 113 4 1657 144 4 1658 103 12 1659 145 12 1660 115 12 1661 145 195 1662 192 195 1663 167 195 1664 137 196 1665 115 196 1666 363 196 1667 121 5 1668 119 5 1669 118 5 1670 120 16 1671 118 16 1672 123 16 1673 119 4 1674 121 4 1675 109 4 1676 122 1 1677 123 1 1678 112 1 1679 130 16 1680 124 16 1681 131 16 1682 124 156 1683 126 156 1684 127 156 1685 126 157 1686 124 157 1687 130 157 1688 338 5 1689 140 5 1690 137 5 1691 337 1 1692 363 1 1693 115 1 1694 117 16 1695 116 16 1696 111 16 1697 114 12 1698 112 12 1699 123 12 1700 317 1 1701 139 1 1702 134 1 1703 358 5 1704 318 5 1705 142 5 1706 125 12 1707 146 12 1708 99 12 1709 95 19 1710 147 19 1711 108 19 1712 126 20 1713 146 20 1714 125 20 1715 139 1 1716 132 1 1717 127 1 1718 125 1 1719 135 1 1720 134 1 1721 139 197 1722 127 197 1723 125 197 1724 132 1 1725 139 1 1726 138 1 1727 158 8 1773 160 8 1774 161 8 1775 176 211 1878 148 211 1879 95 211 1880 416 12 1881 340 12 1882 123 12 1883 122 8 1884 339 8 1885 417 8 1886 417 8 1887 117 8 1888 121 8 1889 119 12 1890 116 12 1891 416 12 1892 100 5 1893 94 5 1894 131 5 1895 136 8 1896 133 8 1897 100 8 1898 98 5 1899 103 5 1900 106 5 1901 103 5 1902 104 5 1903 106 5 1904 142 5 1905 108 5 1906 146 5 1907 105 212 1908 142 212 1909 146 212 1910 98 213 1911 106 213 1912 141 213 1913 105 214 1914 146 214 1915 126 214 1916 98 5 1917 141 5 1918 126 5 1919 98 12 1920 97 12 1921 129 12 1922 131 12 1923 94 12 1924 192 12 1925 98 12 1926 129 12 1927 131 12 1928 145 12 1929 103 12 1930 98 12 1931 131 12 1932 192 12 1933 98 12 1934 338 215 1935 337 215 1936 140 215 1937 337 216 1938 144 216 1939 140 216 1940 143 217 1941 167 217 1942 136 217 1943 167 218 1944 128 218 1945 133 218 1946 144 219 1947 143 219 1948 107 219 1949 140 220 1950 144 220 1951 107 220 1952 136 221 1953 107 221 1954 143 221 1955 215 223 1980 259 223 1981 203 223 1982 201 1 1983 200 1 1984 240 1 1985 201 224 1986 202 224 1987 236 224 1988 198 1 1989 204 1 1990 203 1 1991 243 16 1992 205 16 1993 203 16 1994 205 5 1995 243 5 1996 238 5 1997 206 225 1998 229 225 1999 242 225 2000 209 85 2001 207 85 2002 247 85 2003 210 16 2004 251 16 2005 248 16 2006 212 8 2007 211 8 2008 209 8 2009 249 226 2010 213 226 2011 252 226 2012 212 88 2013 248 88 2014 251 88 2015 253 89 2016 214 89 2017 208 89 2018 215 16 2019 253 16 2020 244 16 2021 235 5 2022 205 5 2023 199 5 2024 217 8 2025 216 8 2026 226 8 2027 218 12 2028 327 12 2029 224 12 2030 216 13 2031 217 13 2032 274 13 2033 275 12 2034 218 12 2035 219 12 2036 220 8 2037 233 8 2038 227 8 2039 254 8 2040 255 8 2041 220 8 2042 221 12 2043 222 12 2044 256 12 2045 220 1 2046 221 1 2047 234 1 2048 274 227 2049 275 227 2050 256 227 2051 221 16 2052 220 16 2053 255 16 2054 207 12 2055 256 12 2056 222 12 2057 256 228 2058 297 228 2059 276 228 2060 247 155 2061 222 155 2062 257 155 2063 226 1 2064 224 1 2065 223 1 2066 225 4 2067 223 4 2068 228 4 2069 224 16 2070 226 16 2071 216 16 2072 227 5 2073 228 5 2074 219 5 2075 240 4 2076 229 4 2077 241 4 2078 229 229 2079 236 229 2080 237 229 2081 236 230 2082 229 230 2083 240 230 2084 232 1 2085 250 1 2086 247 1 2087 231 5 2088 257 5 2089 222 5 2090 328 4 2091 327 4 2092 218 4 2093 221 12 2094 219 12 2095 228 12 2096 213 5 2097 249 5 2098 244 5 2099 252 1 2100 214 1 2101 253 1 2102 235 12 2103 258 12 2104 203 12 2105 199 131 2106 259 131 2107 215 131 2108 236 98 2109 258 98 2110 235 98 2111 249 5 2112 242 5 2113 237 5 2114 235 5 2115 245 5 2116 244 5 2117 249 214 2118 237 214 2119 235 214 2120 242 5 2121 249 5 2122 248 5 2123 269 8 2169 271 8 2170 272 8 2171 282 245 2274 260 245 2275 199 245 2276 416 12 2277 234 12 2278 228 12 2279 227 8 2280 233 8 2281 417 8 2282 417 8 2283 328 8 2284 226 8 2285 224 12 2286 327 12 2287 416 12 2288 204 1 2289 198 1 2290 241 1 2291 246 8 2292 243 8 2293 204 8 2294 202 1 2295 207 1 2296 211 1 2297 207 1 2298 209 1 2299 211 1 2300 253 1 2301 215 1 2302 258 1 2303 210 246 2304 253 246 2305 258 246 2306 202 247 2307 211 247 2308 251 247 2309 210 197 2310 258 197 2311 236 197 2312 202 1 2313 251 1 2314 236 1 2315 202 12 2316 201 12 2317 239 12 2318 241 12 2319 198 12 2320 297 12 2321 202 12 2322 239 12 2323 241 12 2324 256 12 2325 207 12 2326 202 12 2327 241 12 2328 297 12 2329 202 12 2330 232 184 2331 231 184 2332 250 184 2333 231 183 2334 255 183 2335 250 183 2336 254 248 2337 276 248 2338 246 248 2339 276 187 2340 238 187 2341 243 187 2342 255 249 2343 254 249 2344 212 249 2345 250 185 2346 255 185 2347 212 185 2348 246 250 2349 212 250 2350 254 250 2351 319 5 2376 364 5 2377 307 5 2378 346 5 2379 304 5 2380 305 5 2381 342 252 2382 306 252 2383 305 252 2384 307 5 2385 308 5 2386 302 5 2387 307 16 2388 309 16 2389 349 16 2390 309 253 2391 303 253 2392 344 253 2393 348 254 2394 335 254 2395 310 254 2396 353 125 2397 311 125 2398 313 125 2399 354 16 2400 357 16 2401 314 16 2402 316 47 2403 356 47 2404 313 47 2405 358 226 2406 317 226 2407 355 226 2408 357 88 2409 354 88 2410 316 88 2411 312 89 2412 318 89 2413 359 89 2414 350 16 2415 359 16 2416 319 16 2417 303 1 2418 309 1 2419 341 1 2420 321 8 2421 328 8 2422 332 8 2423 322 12 2424 323 12 2425 330 12 2426 320 8 2427 324 8 2428 383 8 2429 323 255 2430 322 255 2431 384 255 2432 333 8 2433 339 8 2434 324 8 2435 360 8 2436 383 8 2437 324 8 2438 362 12 2439 326 12 2440 325 12 2441 340 5 2442 325 5 2443 324 5 2444 362 256 2445 384 256 2446 383 256 2447 361 16 2448 324 16 2449 325 16 2450 311 12 2451 353 12 2452 326 12 2453 385 257 2454 410 257 2455 362 257 2456 363 196 2457 326 196 2458 353 196 2459 329 5 2460 330 5 2461 332 5 2462 334 4 2463 329 4 2464 331 4 2465 320 16 2466 332 16 2467 330 16 2468 323 1 2469 334 1 2470 333 1 2471 347 4 2472 335 4 2473 346 4 2474 343 229 2475 342 229 2476 335 229 2477 346 230 2478 335 230 2479 342 230 2480 353 5 2481 356 5 2482 338 5 2483 326 1 2484 363 1 2485 337 1 2486 322 4 2487 327 4 2488 328 4 2489 325 53 2490 340 53 2491 334 53 2492 317 1 2493 312 1 2494 350 1 2495 359 5 2496 318 5 2497 358 5 2498 307 12 2499 364 12 2500 341 12 2501 303 97 2502 351 97 2503 319 97 2504 341 98 2505 364 98 2506 342 98 2507 343 1 2508 348 1 2509 355 1 2510 355 258 2511 350 258 2512 341 258 2513 350 1 2514 351 1 2515 341 1 2516 348 259 2517 352 259 2518 354 259 2519 376 8 2565 406 8 2566 379 8 2567 303 274 2670 366 274 2671 394 274 2672 334 12 2673 340 12 2674 416 12 2675 417 8 2676 339 8 2677 333 8 2678 332 8 2679 328 8 2680 417 8 2681 330 76 2682 329 76 2683 416 76 2684 308 5 2685 310 5 2686 347 5 2687 352 8 2688 310 8 2689 308 8 2690 313 5 2691 311 5 2692 315 5 2693 311 5 2694 306 5 2695 315 5 2696 364 5 2697 319 5 2698 359 5 2699 364 5 2700 359 5 2701 314 5 2702 357 275 2703 315 275 2704 306 275 2705 342 5 2706 364 5 2707 314 5 2708 314 5 2709 357 5 2710 342 5 2711 306 12 2712 311 12 2713 362 12 2714 362 78 2715 410 78 2716 306 78 2717 410 12 2718 302 12 2719 347 12 2720 347 12 2721 345 12 2722 306 12 2723 345 12 2724 305 12 2725 306 12 2726 361 216 2727 337 216 2728 356 216 2729 337 215 2730 338 215 2731 356 215 2732 356 220 2733 316 220 2734 361 220 2735 316 276 2736 352 276 2737 360 276 2738 344 218 2739 385 218 2740 349 218 2741 385 277 2742 360 277 2743 352 277 2744 349 79 2745 385 79 2746 352 79 2747

+
+ + + + +

65 16 138 56 16 139 93 16 140 54 16 141 76 16 142 75 16 143 89 12 144 92 12 145 55 12 146 54 21 147 72 21 148 53 21 149 0 22 150 56 22 151 51 22 152 79 23 153 56 23 154 57 23 155 82 8 156 57 8 157 59 8 158 58 4 159 74 4 160 52 4 161 59 24 162 78 24 163 82 24 164 61 25 165 83 25 166 86 25 167 62 5 168 84 5 169 85 5 170 85 26 174 190 26 175 62 26 176 64 27 177 87 27 178 63 27 179 88 28 180 157 28 181 80 28 182 65 5 183 80 5 184 81 5 185 71 4 186 52 4 187 81 4 188 90 29 189 16 29 190 66 29 191 59 1 192 77 1 193 75 1 194 17 30 195 92 30 196 67 30 197 66 1 198 92 1 199 90 1 200 17 31 201 91 31 202 69 31 203 60 12 204 69 12 205 83 12 206 68 4 207 92 4 208 89 4 209 83 12 210 196 12 211 170 12 212 70 32 213 73 32 214 78 32 215 72 33 216 76 33 217 53 33 218 172 16 219 83 16 220 170 16 221 171 4 222 69 4 223 91 4 224 84 4 225 64 4 226 82 4 227 68 34 228 79 34 229 82 34 230 162 4 231 80 4 232 157 4 233 190 35 234 88 35 235 62 35 236 71 12 237 56 12 238 58 12 239 52 36 240 65 36 241 81 36 242 72 37 243 71 37 244 73 37 245 71 4 246 80 4 247 85 4 248 78 4 249 84 4 250 82 4 251 74 38 252 68 38 253 32 38 254 52 8 258 1 8 259 51 8 260 87 16 291 62 16 292 72 16 293 57 16 294 77 16 295 59 16 296 164 57 435 152 57 436 147 57 437 178 16 438 150 16 439 177 16 440 192 12 441 179 12 442 151 12 443 174 58 444 150 58 445 149 58 446 152 59 447 94 59 448 147 59 449 152 60 450 181 60 451 153 60 452 153 8 453 184 8 454 155 8 455 154 4 456 176 4 457 181 4 458 180 61 459 155 61 460 184 61 461 185 62 462 158 62 463 188 62 464 186 1 465 159 1 466 187 1 467 190 63 471 187 63 472 159 63 473 189 64 474 161 64 475 160 64 476 157 65 477 191 65 478 182 65 479 182 1 480 164 1 481 183 1 482 148 4 483 173 4 484 183 4 485 193 8 486 110 8 487 194 8 488 177 5 489 179 5 490 155 5 491 195 66 492 111 66 493 166 66 494 195 5 495 165 5 496 193 5 497 194 67 498 111 67 499 168 67 500 156 12 501 168 12 502 195 12 503 195 4 504 167 4 505 192 4 506 196 12 507 185 12 508 170 12 509 175 68 510 169 68 511 180 68 512 178 33 513 174 33 514 149 33 515 185 16 516 172 16 517 170 16 518 168 4 519 171 4 520 194 4 521 186 4 522 184 4 523 161 4 524 193 69 525 194 69 526 161 69 527 162 70 528 182 70 529 187 70 530 191 71 531 190 71 532 159 71 533 152 12 534 173 12 535 154 12 536 148 72 537 164 72 538 147 72 539 173 73 540 174 73 541 175 73 542 173 4 543 175 4 544 187 4 545 180 4 546 186 4 547 187 4 548 176 74 549 128 74 550 167 74 551 148 8 555 147 8 556 95 8 557 189 16 588 151 16 589 174 16 590 153 80 591 179 80 592 94 80 593 273 100 732 264 100 733 259 100 734 284 4 735 262 4 736 283 4 737 297 12 738 285 12 739 263 12 740 280 101 741 262 101 742 261 101 743 264 102 744 198 102 745 259 102 746 264 103 747 287 103 748 265 103 749 265 8 750 290 8 751 267 8 752 266 16 753 282 16 754 287 16 755 286 104 756 267 104 757 290 104 758 291 105 759 269 105 760 294 105 761 292 5 762 270 5 763 293 5 764 408 26 768 293 26 769 270 26 770 295 27 771 272 27 772 271 27 773 375 28 774 296 28 775 288 28 776 288 5 777 273 5 778 289 5 779 260 16 780 279 16 781 289 16 782 298 8 783 217 8 784 299 8 785 283 1 786 285 1 787 267 1 788 300 106 789 218 106 790 275 106 791 300 1 792 274 1 793 298 1 794 299 107 795 218 107 796 277 107 797 268 12 798 277 12 799 300 12 800 300 16 801 276 16 802 297 16 803 414 12 804 291 12 805 388 12 806 281 108 807 278 108 808 286 108 809 284 109 810 280 109 811 261 109 812 291 4 813 390 4 814 388 4 815 277 16 816 389 16 817 299 16 818 292 16 819 290 16 820 272 16 821 298 110 822 299 110 823 272 110 824 380 111 825 288 111 826 293 111 827 296 112 828 408 112 829 270 112 830 264 12 831 279 12 832 266 12 833 260 113 834 273 113 835 259 113 836 279 37 837 280 37 838 281 37 839 279 16 840 281 16 841 293 16 842 286 16 843 292 16 844 293 16 845 282 114 846 238 114 847 276 114 848 260 8 852 259 8 853 199 8 854 295 4 885 263 4 886 280 4 887 265 120 888 285 120 889 198 120 890 382 4 1029 370 4 1030 415 4 1031 368 4 1032 396 4 1033 395 4 1034 410 12 1035 413 12 1036 369 12 1037 368 132 1038 392 132 1039 367 132 1040 302 133 1041 370 133 1042 365 133 1043 399 134 1044 370 134 1045 371 134 1046 402 8 1047 371 8 1048 373 8 1049 372 16 1050 394 16 1051 366 16 1052 373 135 1053 398 135 1054 402 135 1055 376 136 1056 403 136 1057 406 136 1058 377 1 1059 404 1 1060 405 1 1061 405 63 1065 408 63 1066 377 63 1067 379 64 1068 407 64 1069 378 64 1070 409 65 1071 375 65 1072 400 65 1073 382 1 1074 400 1 1075 401 1 1076 391 16 1077 366 16 1078 401 16 1079 411 137 1080 321 137 1081 383 137 1082 373 5 1083 397 5 1084 395 5 1085 322 138 1086 413 138 1087 384 138 1088 383 5 1089 413 5 1090 411 5 1091 322 139 1092 412 139 1093 386 139 1094 374 12 1095 386 12 1096 403 12 1097 385 16 1098 413 16 1099 410 16 1100 403 12 1101 414 12 1102 388 12 1103 387 140 1104 393 140 1105 398 140 1106 392 109 1107 396 109 1108 367 109 1109 390 4 1110 403 4 1111 388 4 1112 389 16 1113 386 16 1114 412 16 1115 404 16 1116 379 16 1117 402 16 1118 385 141 1119 399 141 1120 402 141 1121 380 16 1122 400 16 1123 375 16 1124 408 142 1125 409 142 1126 377 142 1127 391 12 1128 370 12 1129 372 12 1130 366 143 1131 382 143 1132 401 143 1133 392 73 1134 391 73 1135 393 73 1136 391 16 1137 400 16 1138 405 16 1139 398 16 1140 404 16 1141 402 16 1142 394 144 1143 385 144 1144 344 144 1145 366 8 1149 303 8 1150 365 8 1151 407 4 1182 377 4 1183 392 4 1184 371 4 1185 397 4 1186 373 4 1187 65 160 1332 51 160 1333 56 160 1334 54 16 1335 53 16 1336 76 16 1337 55 12 1338 54 12 1339 75 12 1340 77 161 1341 0 161 1342 89 161 1343 55 12 1344 75 12 1345 77 12 1346 92 12 1347 60 12 1348 55 12 1349 77 12 1350 89 12 1351 55 12 1352 54 162 1353 55 162 1354 72 162 1355 0 163 1356 57 163 1357 56 163 1358 79 5 1359 58 5 1360 56 5 1361 82 8 1362 79 8 1363 57 8 1364 58 4 1365 79 4 1366 74 4 1367 59 164 1368 70 164 1369 78 164 1370 61 165 1371 60 165 1372 83 165 1373 62 5 1374 87 5 1375 84 5 1376 85 166 1380 162 166 1381 190 166 1382 64 27 1383 84 27 1384 87 27 1385 88 167 1386 163 167 1387 157 167 1388 65 5 1389 88 5 1390 80 5 1391 71 168 1392 58 168 1393 52 168 1394 90 8 1395 91 8 1396 16 8 1397 75 169 1398 76 169 1399 59 169 1400 76 1 1401 70 1 1402 59 1 1403 17 12 1404 69 12 1405 92 12 1406 66 170 1407 67 170 1408 92 170 1409 17 171 1410 16 171 1411 91 171 1412 60 12 1413 92 12 1414 69 12 1415 68 4 1416 90 4 1417 92 4 1418 83 12 1419 69 12 1420 196 12 1421 70 172 1422 72 172 1423 73 172 1424 72 8 1425 70 8 1426 76 8 1427 172 16 1428 86 16 1429 83 16 1430 171 4 1431 196 4 1432 69 4 1433 172 173 1434 171 173 1435 86 173 1436 171 174 1437 91 174 1438 86 174 1439 90 175 1440 68 175 1441 82 175 1442 68 176 1443 74 176 1444 79 176 1445 91 177 1446 90 177 1447 64 177 1448 86 178 1449 91 178 1450 64 178 1451 82 179 1452 64 179 1453 90 179 1454 162 70 1455 85 70 1456 80 70 1457 190 16 1458 163 16 1459 88 16 1460 71 12 1461 93 12 1462 56 12 1463 52 180 1464 51 180 1465 65 180 1466 72 37 1467 93 37 1468 71 37 1469 85 4 1470 78 4 1471 73 4 1472 71 4 1473 81 4 1474 80 4 1475 85 4 1476 73 4 1477 71 4 1478 78 4 1479 85 4 1480 84 4 1481 55 189 1560 60 189 1561 63 189 1562 60 16 1563 61 16 1564 63 16 1565 88 16 1566 65 16 1567 93 16 1568 62 16 1569 88 16 1570 93 16 1571 55 16 1572 63 16 1573 87 16 1574 62 16 1575 93 16 1576 72 16 1577 55 16 1578 87 16 1579 72 16 1580 57 190 1581 0 190 1582 77 190 1583 164 16 1728 197 16 1729 152 16 1730 178 16 1731 149 16 1732 150 16 1733 151 12 1734 156 12 1735 195 12 1736 195 12 1737 192 12 1738 151 12 1739 192 198 1740 94 198 1741 179 198 1742 179 12 1743 177 12 1744 151 12 1745 177 12 1746 150 12 1747 151 12 1748 174 199 1749 151 199 1750 150 199 1751 152 200 1752 153 200 1753 94 200 1754 152 1 1755 154 1 1756 181 1 1757 153 8 1758 181 8 1759 184 8 1760 154 4 1761 148 4 1762 176 4 1763 180 201 1764 169 201 1765 155 201 1766 185 202 1767 156 202 1768 158 202 1769 186 1 1770 189 1 1771 159 1 1772 190 203 1776 162 203 1777 187 203 1778 189 64 1779 186 64 1780 161 64 1781 157 204 1782 163 204 1783 191 204 1784 182 1 1785 191 1 1786 164 1 1787 148 4 1788 154 4 1789 173 4 1790 193 137 1791 165 137 1792 110 137 1793 169 5 1794 178 5 1795 155 5 1796 178 205 1797 177 205 1798 155 205 1799 195 12 1800 168 12 1801 111 12 1802 195 206 1803 166 206 1804 165 206 1805 194 207 1806 110 207 1807 111 207 1808 156 12 1809 185 12 1810 168 12 1811 195 4 1812 193 4 1813 167 4 1814 196 12 1815 168 12 1816 185 12 1817 175 208 1818 174 208 1819 169 208 1820 178 8 1821 169 8 1822 174 8 1823 185 16 1824 188 16 1825 172 16 1826 168 4 1827 196 4 1828 171 4 1829 194 174 1830 171 174 1831 188 174 1832 171 173 1833 172 173 1834 188 173 1835 188 178 1836 161 178 1837 194 178 1838 161 209 1839 184 209 1840 193 209 1841 176 210 1842 167 210 1843 181 210 1844 167 175 1845 193 175 1846 184 175 1847 181 34 1848 167 34 1849 184 34 1850 162 4 1851 157 4 1852 182 4 1853 191 16 1854 163 16 1855 190 16 1856 152 12 1857 197 12 1858 173 12 1859 148 143 1860 183 143 1861 164 143 1862 173 73 1863 197 73 1864 174 73 1865 175 4 1866 180 4 1867 187 4 1868 187 4 1869 182 4 1870 173 4 1871 182 4 1872 183 4 1873 173 4 1874 180 4 1875 184 4 1876 186 4 1877 158 16 1956 156 16 1957 160 16 1958 156 16 1959 151 16 1960 160 16 1961 197 16 1962 164 16 1963 191 16 1964 197 16 1965 191 16 1966 159 16 1967 189 16 1968 160 16 1969 151 16 1970 174 222 1971 197 222 1972 159 222 1973 159 16 1974 189 16 1975 174 16 1976 153 16 1977 155 16 1978 179 16 1979 273 4 2124 301 4 2125 264 4 2126 284 4 2127 261 4 2128 262 4 2129 263 12 2130 268 12 2131 300 12 2132 300 12 2133 297 12 2134 263 12 2135 297 231 2136 198 231 2137 285 231 2138 285 12 2139 283 12 2140 263 12 2141 283 12 2142 262 12 2143 263 12 2144 280 232 2145 263 232 2146 262 232 2147 264 233 2148 265 233 2149 198 233 2150 264 5 2151 266 5 2152 287 5 2153 265 8 2154 287 8 2155 290 8 2156 266 16 2157 260 16 2158 282 16 2159 286 234 2160 278 234 2161 267 234 2162 291 235 2163 268 235 2164 269 235 2165 292 5 2166 295 5 2167 270 5 2168 408 166 2172 380 166 2173 293 166 2174 295 27 2175 292 27 2176 272 27 2177 375 167 2178 381 167 2179 296 167 2180 288 5 2181 296 5 2182 273 5 2183 260 16 2184 266 16 2185 279 16 2186 298 29 2187 274 29 2188 217 29 2189 278 1 2190 284 1 2191 267 1 2192 284 236 2193 283 236 2194 267 236 2195 300 12 2196 277 12 2197 218 12 2198 300 237 2199 275 237 2200 274 237 2201 299 238 2202 217 238 2203 218 238 2204 268 12 2205 291 12 2206 277 12 2207 300 16 2208 298 16 2209 276 16 2210 414 12 2211 277 12 2212 291 12 2213 281 172 2214 280 172 2215 278 172 2216 284 8 2217 278 8 2218 280 8 2219 291 4 2220 294 4 2221 390 4 2222 277 16 2223 414 16 2224 389 16 2225 299 239 2226 389 239 2227 294 239 2228 389 240 2229 390 240 2230 294 240 2231 294 241 2232 272 241 2233 299 241 2234 272 242 2235 290 242 2236 298 242 2237 282 243 2238 276 243 2239 287 243 2240 276 244 2241 298 244 2242 290 244 2243 287 141 2244 276 141 2245 290 141 2246 380 16 2247 375 16 2248 288 16 2249 296 4 2250 381 4 2251 408 4 2252 264 12 2253 301 12 2254 279 12 2255 260 36 2256 289 36 2257 273 36 2258 279 37 2259 301 37 2260 280 37 2261 281 16 2262 286 16 2263 293 16 2264 293 16 2265 288 16 2266 279 16 2267 288 16 2268 289 16 2269 279 16 2270 286 16 2271 290 16 2272 292 16 2273 269 4 2352 268 4 2353 271 4 2354 268 4 2355 263 4 2356 271 4 2357 301 4 2358 273 4 2359 296 4 2360 301 4 2361 296 4 2362 270 4 2363 295 4 2364 271 4 2365 263 4 2366 280 251 2367 301 251 2368 270 251 2369 270 4 2370 295 4 2371 280 4 2372 265 4 2373 267 4 2374 285 4 2375 382 260 2520 365 260 2521 370 260 2522 368 4 2523 367 4 2524 396 4 2525 369 12 2526 368 12 2527 395 12 2528 397 261 2529 302 261 2530 410 261 2531 369 12 2532 395 12 2533 397 12 2534 413 12 2535 374 12 2536 369 12 2537 397 12 2538 410 12 2539 369 12 2540 368 262 2541 369 262 2542 392 262 2543 302 263 2544 371 263 2545 370 263 2546 399 1 2547 372 1 2548 370 1 2549 402 8 2550 399 8 2551 371 8 2552 372 16 2553 399 16 2554 394 16 2555 373 264 2556 387 264 2557 398 264 2558 376 265 2559 374 265 2560 403 265 2561 377 1 2562 407 1 2563 404 1 2564 405 203 2568 380 203 2569 408 203 2570 379 64 2571 404 64 2572 407 64 2573 409 204 2574 381 204 2575 375 204 2576 382 1 2577 409 1 2578 400 1 2579 391 266 2580 372 266 2581 366 266 2582 411 8 2583 412 8 2584 321 8 2585 395 267 2586 396 267 2587 373 267 2588 396 5 2589 387 5 2590 373 5 2591 322 12 2592 386 12 2593 413 12 2594 383 268 2595 384 268 2596 413 268 2597 322 269 2598 321 269 2599 412 269 2600 374 12 2601 413 12 2602 386 12 2603 385 16 2604 411 16 2605 413 16 2606 403 12 2607 386 12 2608 414 12 2609 387 208 2610 392 208 2611 393 208 2612 392 8 2613 387 8 2614 396 8 2615 390 4 2616 406 4 2617 403 4 2618 389 16 2619 414 16 2620 386 16 2621 390 240 2622 389 240 2623 406 240 2624 389 239 2625 412 239 2626 406 239 2627 411 244 2628 385 244 2629 402 244 2630 385 270 2631 394 270 2632 399 270 2633 412 271 2634 411 271 2635 379 271 2636 406 241 2637 412 241 2638 379 241 2639 402 272 2640 379 272 2641 411 272 2642 380 111 2643 405 111 2644 400 111 2645 408 4 2646 381 4 2647 409 4 2648 391 12 2649 415 12 2650 370 12 2651 366 273 2652 365 273 2653 382 273 2654 392 73 2655 415 73 2656 391 73 2657 405 16 2658 398 16 2659 393 16 2660 391 16 2661 401 16 2662 400 16 2663 405 16 2664 393 16 2665 391 16 2666 398 16 2667 405 16 2668 404 16 2669 369 278 2748 374 278 2749 378 278 2750 374 4 2751 376 4 2752 378 4 2753 409 4 2754 382 4 2755 415 4 2756 377 4 2757 409 4 2758 415 4 2759 369 4 2760 378 4 2761 407 4 2762 377 4 2763 415 4 2764 392 4 2765 369 4 2766 407 4 2767 392 4 2768 371 279 2769 302 279 2770 397 279 2771

+
+
+
+
+ + + + 0.001 0 0 0 0 0.001 0 0 0 0 0.001 -0.0105 0 0 0 1 + + + + + + + + + + + + + + + + +
\ No newline at end of file diff --git a/models/rg_robot/meshes/CoreComponentV1.dae b/models/rg_robot/meshes/CoreComponentV1.dae new file mode 100644 index 0000000000..b577138fb0 --- /dev/null +++ b/models/rg_robot/meshes/CoreComponentV1.dae @@ -0,0 +1,63 @@ + + + + + VCGLab + VCGLib | MeshLab + + Y_UP + do sep. 17 12:01:33 2015 + do sep. 17 12:01:33 2015 + + + + + + + + + -0.0445 0.0445 -0.019 -0.0385 0.0445 0.019 -0.0445 0.0385 -0.019 -0.0445 0.0385 0.019 -0.0385 0.0445 -0.019 -0.0385 0.0405 -0.019 0.0385 -0.0445 -0.019 0.0385 -0.0445 0.019 0.0385 -0.0405 0.019 0.0405 -0.0385 -0.019 0.0445 -0.0385 -0.019 0.0385 -0.0405 -0.019 -0.0445 -0.0445 0.019 -0.0445 -0.0385 -0.019 -0.0405 -0.0385 -0.019 -0.0385 -0.0405 -0.019 -0.0445 -0.0445 -0.019 -0.0385 -0.0445 0.019 -0.0385 -0.0445 -0.019 -0.0445 -0.0385 0.019 0.0385 0.0405 -0.019 0.0385 0.0445 -0.019 0.0385 0.0445 0.019 0.0405 0.0385 0.019 0.0385 0.0405 0.019 0.0405 -0.0085 -0.0178873 0.0405 -0.0065 -0.014 0.0375 -0.0065 -0.019 0.0405 0.0075 -0.014 0.0405 0.0095 -0.0178873 0.0398324 -0.0085 -0.019 0.0398324 0.0095 -0.019 0.0405 0.0095 -0.019 0.0375 0.0075 -0.019 0.0405 -0.0085 -0.019 -0.0085 -0.0405 -0.019 0.0075 -0.0375 -0.019 0.0075 -0.0405 -0.014 0.0095 -0.0405 -0.0178873 -0.0085 -0.0405 -0.0178873 -0.0065 -0.0405 -0.014 -0.0085 -0.0398324 -0.019 -0.0065 -0.0375 -0.019 0.0095 -0.0405 -0.019 0.0095 -0.0398324 -0.019 -0.0405 -0.0225 -0.014 -0.0405 -0.0325 -0.014 -0.0405 -0.0345 -0.0178873 -0.0398324 -0.0205 -0.019 -0.0405 -0.0345 -0.019 -0.0405 -0.0205 -0.0178873 -0.0405 -0.0205 -0.019 -0.0375 -0.0325 -0.019 -0.0398324 -0.0345 -0.019 -0.0375 -0.0225 -0.019 0.0345 0.0405 -0.019 0.0225 0.0405 -0.014 0.0225 0.0375 -0.019 0.0205 0.0398324 -0.019 0.0325 0.0405 -0.014 0.0345 0.0405 -0.0178873 0.0325 0.0375 -0.019 0.0205 0.0405 -0.0178873 0.0205 0.0405 -0.019 0.0345 0.0398324 -0.019 0.0085 0.0405 -0.019 -0.0095 0.0405 -0.019 0.0065 0.0405 -0.014 -0.0075 0.0405 -0.014 -0.0095 0.0405 -0.0178873 -0.0095 0.0398324 -0.019 0.0085 0.0398324 -0.019 0.0085 0.0405 -0.0178873 0.0065 0.0375 -0.019 -0.0075 0.0375 -0.019 -0.0405 0.0205 -0.019 -0.0405 0.0325 -0.014 -0.0375 0.0225 -0.019 -0.0405 0.0225 -0.014 -0.0405 0.0345 -0.0178873 -0.0375 0.0325 -0.019 -0.0405 0.0205 -0.0178873 -0.0398324 0.0345 -0.019 -0.0398324 0.0205 -0.019 -0.0405 0.0345 -0.019 0.0405 -0.0325 -0.014 0.0405 -0.0225 -0.014 0.0375 -0.0225 -0.019 0.0405 -0.0205 -0.0178873 0.0398324 -0.0345 -0.019 0.0405 -0.0345 -0.0178873 0.0375 -0.0325 -0.019 0.0398324 -0.0205 -0.019 0.0405 -0.0345 -0.019 0.0405 -0.0205 -0.019 -0.0345 -0.0405 -0.0178873 -0.0325 -0.0405 -0.014 -0.0225 -0.0375 -0.019 -0.0225 -0.0405 -0.014 -0.0205 -0.0405 -0.0178873 -0.0345 -0.0398324 -0.019 -0.0325 -0.0375 -0.019 -0.0205 -0.0398324 -0.019 -0.0205 -0.0405 -0.019 -0.0345 -0.0405 -0.019 -0.0405 0.0065 -0.014 -0.0405 -0.0095 -0.019 -0.0405 -0.0075 -0.014 -0.0405 -0.0095 -0.0178873 -0.0405 0.0085 -0.0178873 -0.0398324 0.0085 -0.019 -0.0405 0.0085 -0.019 -0.0375 0.0065 -0.019 -0.0375 -0.0075 -0.019 -0.0398324 -0.0095 -0.019 0.0405 0.0205 -0.0178873 0.0405 0.0205 -0.019 0.0405 0.0345 -0.0178873 0.0375 0.0225 -0.019 0.0405 0.0225 -0.014 0.0405 0.0325 -0.014 0.0375 0.0325 -0.019 0.0405 0.0345 -0.019 0.0398324 0.0205 -0.019 0.0398324 0.0345 -0.019 0.0345 -0.0405 -0.0178873 0.0325 -0.0405 -0.014 0.0345 -0.0405 -0.019 0.0325 -0.0375 -0.019 0.0345 -0.0398324 -0.019 0.0225 -0.0405 -0.014 0.0225 -0.0375 -0.019 0.0205 -0.0398324 -0.019 0.0205 -0.0405 -0.0178873 0.0205 -0.0405 -0.019 -0.0205 0.0405 -0.0178873 -0.0205 0.0405 -0.019 -0.0225 0.0405 -0.014 -0.0325 0.0375 -0.019 -0.0325 0.0405 -0.014 -0.0345 0.0405 -0.0178873 -0.0205 0.0398324 -0.019 -0.0345 0.0398324 -0.019 -0.0225 0.0375 -0.019 -0.0345 0.0405 -0.019 0.01555 -0.04599 0.02 0.01555 -0.046 -0.022 0.01555 -0.04599 -0.022 0.0203 -0.046 -0.022 0.0203 -0.046 0.02 0.01555 -0.046 0.02 0.01704 -0.0445 0.02 0.0203 -0.0445 0.02 0.01704 -0.0445 -0.022 0.0203 -0.0445 -0.022 0.046 0.01555 -0.022 0.046 0.01555 0.02 0.046 0.0203 -0.022 0.0445 0.01704 -0.022 0.04599 0.01555 -0.022 0.046 0.0203 0.02 0.0445 0.0203 0.02 0.04599 0.01555 0.02 0.0445 0.01704 0.02 0.0445 0.0203 -0.022 -0.01555 0.04599 -0.022 -0.0203 0.046 -0.022 -0.01704 0.0445 0.02 -0.01555 0.046 -0.022 -0.01555 0.04599 0.02 -0.01555 0.046 0.02 -0.0203 0.046 0.02 -0.01704 0.0445 -0.022 -0.0203 0.0445 0.02 -0.0203 0.0445 -0.022 0.01555 0.046 -0.022 0.01704 0.0445 0.02 0.0203 0.046 0.02 0.01555 0.04599 0.02 0.01555 0.046 0.02 0.01555 0.04599 -0.022 0.01704 0.0445 -0.022 0.0203 0.0445 0.02 0.0203 0.0445 -0.022 0.0203 0.046 -0.022 -0.01555 -0.046 0.02 -0.01555 -0.04599 0.02 -0.01555 -0.04599 -0.022 -0.0203 -0.046 -0.022 -0.01555 -0.046 -0.022 -0.0203 -0.046 0.02 -0.01704 -0.0445 -0.022 -0.01704 -0.0445 0.02 -0.0203 -0.0445 -0.022 -0.0203 -0.0445 0.02 0.04599 -0.01555 -0.022 0.046 -0.01555 0.02 0.0445 -0.01704 0.02 0.04599 -0.01555 0.02 0.046 -0.0203 -0.022 0.046 -0.0203 0.02 0.0445 -0.0203 -0.022 0.046 -0.01555 -0.022 0.0445 -0.01704 -0.022 0.0445 -0.0203 0.02 -0.046 0.01505 0.02 -0.04599 0.01505 -0.022 -0.046 0.01505 -0.022 -0.0445 0.01654 -0.022 -0.0445 0.01654 0.02 -0.04599 0.01505 0.02 -0.046 0.0198 0.02 -0.046 0.0198 -0.022 -0.0445 0.01654 -0.022 -0.0445 0.0198 -0.022 -0.0445 0.01654 0.02 -0.0445 0.0198 0.02 -0.046 -0.01605 -0.022 -0.04599 -0.01605 -0.022 -0.046 -0.01605 0.02 -0.04599 -0.01605 0.02 -0.0445 -0.01754 0.02 -0.0445 -0.01754 -0.022 -0.0445 -0.01754 0.02 -0.0445 -0.01754 -0.022 -0.0445 -0.0208 0.02 -0.046 -0.0208 0.02 -0.0445 -0.0208 -0.022 -0.046 -0.0208 -0.022 -0.0445 -0.0385 -0.019 -0.0445 -0.0385 0.019 -0.0445 0.0385 -0.019 -0.0405 -0.0345 0.014 -0.0405 -0.0129259 0.0126173 -0.0405 -0.0133092 0.0111243 -0.0405 -0.0126874 0.0107297 -0.0405 -0.0134924 0.0115868 -0.0405 -0.0385 0.019 -0.0405 -0.0135 -0.007 -0.0405 -0.0135 0.0095 -0.0405 -0.0345 -0.014 -0.0405 -0.0129259 -0.0123784 -0.0405 0.0116015 0.012092 -0.0405 -0.0115312 0.011961 -0.0405 0.0123189 0.0126855 -0.0405 0.0115413 0.0118574 -0.0405 -0.0114998 0.0117122 -0.0405 -0.0134301 -0.0136518 -0.0405 0.0117182 0.0111682 -0.0405 0.0116015 0.0113805 -0.0405 -0.0116235 0.0112303 -0.0405 -0.0131376 -0.0140543 -0.0405 0.0123189 0.010787 -0.0405 0.0125607 0.0107718 -0.0405 0.0132044 0.0110747 -0.0405 0.0130178 0.0109203 -0.0405 -0.0114997 -0.0132835 -0.0405 -0.0121909 -0.0142349 -0.0405 -0.0095 -0.014 -0.0405 -0.0115311 -0.0135323 -0.0405 0.013436 0.0119766 -0.0405 -0.0124372 -0.0142819 -0.0405 0.0203475 0.014 -0.0405 0.0015 -0.019 -0.0405 0.011691 -0.0127473 -0.0405 0.0085 -0.014 -0.0405 0.0135 -0.007 -0.0405 0.0115079 -0.0132097 -0.0405 0.0125628 -0.012337 -0.0405 0.0130358 -0.0124907 -0.0405 0.0123126 -0.0143174 -0.0405 0.0203475 -0.014 -0.0405 0.0134686 -0.0130864 -0.0405 0.012809 -0.0142861 -0.0405 0.0130358 -0.0141794 -0.0405 0.0385 0.019 -0.0405 -0.0124372 0.0127105 -0.0405 -0.0202877 0.014 -0.0445 -0.0126874 0.0126947 -0.0445 -0.0134924 0.0118376 -0.0445 -0.0134924 0.0115868 -0.0445 -0.01343 0.011344 -0.0445 -0.0133092 0.0111243 -0.0445 -0.0131376 0.0109415 -0.0445 -0.0135 0.0095 -0.0445 -0.0129259 0.0108072 -0.0445 -0.0345 0.014 -0.0445 -0.0345 -0.014 -0.0445 -0.0202877 -0.014 -0.0445 -0.011964 0.0125567 -0.0445 -0.0116235 0.0121941 -0.0445 -0.0117709 0.0123969 -0.0445 -0.0129259 -0.0123784 -0.0445 -0.0121909 0.0126635 -0.0445 -0.0115312 0.011961 -0.0445 -0.0114998 0.0117122 -0.0445 0.0115413 0.0118574 -0.0445 0.0116015 0.0113805 -0.0445 -0.0115312 0.0114635 -0.0445 -0.0134924 -0.0134089 -0.0445 -0.0135 -0.007 -0.0445 -0.011964 -0.0124389 -0.0445 -0.0133093 -0.0138715 -0.0445 -0.0131376 -0.0140543 -0.0445 -0.0126874 -0.0142661 -0.0445 0.0120885 0.0108619 -0.0445 0.0123189 0.010787 -0.0445 0.0125607 0.0107718 -0.0445 0.0127986 0.0108172 -0.0445 0.0130178 0.0109203 -0.0445 0.0132044 0.0110747 -0.0445 0.0133468 0.0112707 -0.0445 0.013436 0.0114959 -0.0445 -0.0117708 -0.0125988 -0.0445 -0.0114997 -0.0132835 -0.0445 -0.0117708 -0.0139683 -0.0445 -0.0095 -0.014 -0.0445 0.013436 0.0119766 -0.0445 0.0133468 0.0122018 -0.0445 0.0132044 0.0123978 -0.0445 0.0130178 0.0125522 -0.0445 0.0127986 0.0126553 -0.0445 0.0125607 0.0127007 -0.0445 0.0134664 0.0117363 -0.0445 -0.0124372 -0.0142819 -0.0445 -0.0121909 -0.0142349 -0.0445 -0.0045 -0.019 -0.0445 0.0345 0.014 -0.0445 0.0118626 -0.0125646 -0.0445 0.0125628 -0.012337 -0.0445 0.012809 -0.012384 -0.0445 0.0135 -0.007 -0.0445 0.0130358 -0.0124907 -0.0445 0.013229 -0.0126505 -0.0445 0.0118626 -0.0141056 -0.0445 0.0120742 -0.0142399 -0.0445 0.0123126 -0.0143174 -0.0445 0.0125628 -0.0143331 -0.0445 0.0134686 -0.0130864 -0.0445 0.0133763 -0.0128533 -0.0445 0.012809 -0.0142861 -0.0445 0.0130358 -0.0141794 -0.0445 0.013229 -0.0140196 -0.0445 0.0133763 -0.0138168 -0.0445 0.0134686 -0.0135838 -0.0445 0.0135 -0.0133351 -0.0445 0.0203475 -0.014 -0.0445 0.0345 -0.014 -0.0445 0.0385 -0.019 -0.0445 -0.0202877 0.014 -0.0445 0.0385 0.019 -0.0445 -0.0124372 0.0127105 -0.0445 0.0203475 0.014 -0.0445 -0.011964 0.0108677 -0.0445 -0.0117709 0.0110275 -0.0445 -0.0385 -0.019 -0.0405 0.0385 -0.019 -0.0445 0.0035 -0.019 -0.0445 0.0085 -0.014 -0.0445 0.0035 -0.014 -0.0405 0.0015 -0.014 -0.0445 0.0085 -0.014 -0.0445 0.0135 0.0095 -0.0405 0.0135 0.0095 -0.0445 -0.0095 -0.014 -0.0405 -0.0025 -0.014 -0.0445 -0.0045 -0.014 -0.0445 0.0203475 0.014 -0.0405 0.0345 0.014 -0.0445 0.0203475 -0.014 -0.0405 0.0345 -0.014 -0.0445 0.0345 0.014 -0.0445 0.0345 -0.014 -0.0405 0.0125628 -0.0143331 -0.0445 0.012809 -0.0142861 -0.0445 0.0130358 -0.0141794 -0.0405 0.0135 -0.0133351 -0.0445 0.0134686 -0.0130864 -0.0445 0.0133763 -0.0128533 -0.0445 0.013229 -0.0140196 -0.0405 0.013229 -0.0140196 -0.0405 0.013229 -0.0126505 -0.0405 0.0133763 -0.0128533 -0.0405 0.0133763 -0.0138168 -0.0445 0.0133763 -0.0138168 -0.0405 0.0134686 -0.0135838 -0.0445 0.0134686 -0.0135838 -0.0445 0.013229 -0.0126505 -0.0445 0.0135 -0.0133351 -0.0445 0.012809 -0.012384 -0.0445 0.0130358 -0.0124907 -0.0405 0.012809 -0.012384 -0.0445 0.0123126 -0.0123528 -0.0445 0.0125628 -0.012337 -0.0405 0.0123126 -0.0123528 -0.0405 0.0120742 -0.0124302 -0.0445 0.0120742 -0.0124302 -0.0445 0.011691 -0.0127473 -0.0405 0.0118626 -0.0125646 -0.0445 0.0115702 -0.0129669 -0.0405 0.0115702 -0.0129669 -0.0445 0.0115079 -0.0132097 -0.0405 0.0115079 -0.0134604 -0.0445 0.0115079 -0.0134604 -0.0445 0.011691 -0.0139229 -0.0445 0.0115702 -0.0137032 -0.0405 0.0115702 -0.0137032 -0.0405 0.011691 -0.0139229 -0.0405 0.0118626 -0.0141056 -0.0405 0.0120742 -0.0142399 -0.0445 0.0125628 -0.0143331 -0.0445 0.0127986 0.0108172 -0.0445 0.013436 0.0119766 -0.0405 0.0127986 0.0108172 -0.0445 0.0130178 0.0109203 -0.0445 0.0133468 0.0122018 -0.0405 0.0134664 0.0117363 -0.0445 0.0132044 0.0110747 -0.0445 0.0132044 0.0123978 -0.0405 0.0133468 0.0122018 -0.0445 0.0133468 0.0112707 -0.0405 0.0133468 0.0112707 -0.0405 0.0132044 0.0123978 -0.0445 0.013436 0.0114959 -0.0405 0.013436 0.0114959 -0.0445 0.0130178 0.0125522 -0.0405 0.0130178 0.0125522 -0.0445 0.0134664 0.0117363 -0.0405 0.0127986 0.0126553 -0.0445 0.0125607 0.0127007 -0.0445 0.0127986 0.0126553 -0.0405 0.0125607 0.0127007 -0.0445 0.0120885 0.0126106 -0.0445 0.0123189 0.0126855 -0.0405 0.0120885 0.0126106 -0.0445 0.0117182 0.0123043 -0.0405 0.0117182 0.0123043 -0.0445 0.011884 0.0124808 -0.0405 0.011884 0.0124808 -0.0445 0.0116015 0.012092 -0.0445 0.0115413 0.0116151 -0.0405 0.0115413 0.0116151 -0.0445 0.0117182 0.0111682 -0.0405 0.011884 0.0109917 -0.0445 0.011884 0.0109917 -0.0405 0.0120885 0.0108619 -0.0445 0.0125607 0.0107718 -0.0445 -0.0114997 -0.0132835 -0.0445 -0.0116234 -0.0128016 -0.0445 -0.0115311 -0.0130348 -0.0405 -0.011964 -0.0141281 -0.0405 -0.0117708 -0.0139683 -0.0445 -0.011964 -0.0141281 -0.0405 -0.0115311 -0.0130348 -0.0405 -0.0116234 -0.0128016 -0.0405 -0.0116234 -0.0137655 -0.0405 -0.0117708 -0.0125988 -0.0445 -0.0116234 -0.0137655 -0.0445 -0.0115311 -0.0135323 -0.0405 -0.0121909 -0.0123322 -0.0445 -0.0121909 -0.0123322 -0.0405 -0.011964 -0.0124389 -0.0445 -0.0124372 -0.0122852 -0.0405 -0.0124372 -0.0122852 -0.0445 -0.0126874 -0.0123009 -0.0405 -0.0126874 -0.0123009 -0.0445 -0.0131376 -0.0125128 -0.0405 -0.0131376 -0.0125128 -0.0445 -0.0133093 -0.0126956 -0.0405 -0.0133093 -0.0126956 -0.0445 -0.0134301 -0.0129153 -0.0405 -0.0134301 -0.0129153 -0.0445 -0.0134924 -0.0131582 -0.0405 -0.0134924 -0.0131582 -0.0405 -0.0134924 -0.0134089 -0.0445 -0.0134301 -0.0136518 -0.0405 -0.0133093 -0.0138715 -0.0405 -0.0129259 -0.0141887 -0.0445 -0.0129259 -0.0141887 -0.0405 -0.0126874 -0.0142661 -0.0445 -0.0121909 0.0107609 -0.0405 -0.0121909 0.0107609 -0.0445 -0.0114998 0.0117122 -0.0405 -0.011964 0.0108677 -0.0405 -0.0117709 0.0110275 -0.0445 -0.0116235 0.0121941 -0.0405 -0.0116235 0.0121941 -0.0445 -0.0116235 0.0112303 -0.0445 -0.0117709 0.0123969 -0.0405 -0.0115312 0.0114635 -0.0405 -0.0117709 0.0123969 -0.0405 -0.011964 0.0125567 -0.0445 -0.0121909 0.0126635 -0.0405 -0.0121909 0.0126635 -0.0445 -0.011964 0.0125567 -0.0445 -0.0124372 0.0127105 -0.0445 -0.0126874 0.0126947 -0.0405 -0.0126874 0.0126947 -0.0445 -0.0131376 0.0124829 -0.0405 -0.0131376 0.0124829 -0.0445 -0.0129259 0.0126173 -0.0445 -0.0133092 0.0123001 -0.0445 -0.01343 0.0120804 -0.0405 -0.0133092 0.0123001 -0.0405 -0.01343 0.0120804 -0.0405 -0.0134924 0.0118376 -0.0445 -0.0134924 0.0118376 -0.0445 -0.0134924 0.0115868 -0.0405 -0.01343 0.011344 -0.0445 -0.01343 0.011344 -0.0405 -0.0131376 0.0109415 -0.0445 -0.0133092 0.0111243 -0.0445 -0.0131376 0.0109415 -0.0405 -0.0129259 0.0108072 -0.0445 -0.0126874 0.0107297 -0.0445 -0.0129259 0.0108072 -0.0405 -0.0124372 0.0107139 -0.0445 -0.0124372 0.0107139 -0.0445 -0.0202877 -0.014 -0.0405 -0.0202877 -0.014 0.0131376 0.0445 0.0124829 0.0134924 0.0445 0.0118376 0.0202877 0.0445 0.014 0.0135 0.0445 0.0095 0.0131376 0.0445 0.0109415 0.0133092 0.0445 0.0111243 0.0129259 0.0445 0.0108072 0.0126874 0.0445 0.0107297 0.0345 0.0445 0.014 0.0345 0.0445 -0.014 0.0133093 0.0445 -0.0126956 0.0131376 0.0445 -0.0125128 -0.0117182 0.0445 0.0123043 0.0117709 0.0445 0.0123969 -0.011884 0.0445 0.0124808 0.0129259 0.0445 -0.0123784 -0.0120885 0.0445 0.0126106 -0.0116015 0.0445 0.012092 -0.0123189 0.0445 0.0126855 0.0121909 0.0445 0.0126635 0.0134924 0.0445 -0.0131582 0.0115312 0.0445 0.011961 0.0134924 0.0445 -0.0134089 0.0115312 0.0445 0.0114635 0.0135 0.0445 -0.007 0.0116235 0.0445 0.0112303 0.0133093 0.0445 -0.0138715 0.0131376 0.0445 -0.0140543 -0.0120885 0.0445 0.0108619 -0.0123189 0.0445 0.010787 -0.0132044 0.0445 0.0110747 -0.0135 0.0445 0.0095 0.0095 0.0445 -0.014 0.0115311 0.0445 -0.0130348 0.0116234 0.0445 -0.0128016 -0.013436 0.0445 0.0119766 -0.0133468 0.0445 0.0122018 -0.0125607 0.0445 0.0127007 0.0035 0.0445 -0.014 0.0035 0.0445 -0.019 0.0202877 0.0445 -0.014 0.0124372 0.0445 -0.0142819 -0.0085 0.0445 -0.014 -0.0118626 0.0445 -0.0125646 -0.0135 0.0445 -0.007 -0.011691 0.0445 -0.0127473 -0.0120742 0.0445 -0.0124302 -0.0115079 0.0445 -0.0132097 -0.0115702 0.0445 -0.0129669 -0.012809 0.0445 -0.012384 -0.011691 0.0445 -0.0139229 -0.0133763 0.0445 -0.0128533 -0.0123126 0.0445 -0.0143174 -0.0045 0.0445 -0.019 -0.0133763 0.0445 -0.0138168 -0.0134686 0.0445 -0.0135838 -0.0203475 0.0445 -0.014 0.011964 0.0445 0.0108677 0.0117709 0.0445 0.0110275 -0.011884 0.0445 0.0109917 -0.0203475 0.0445 0.014 0.0133092 0.0405 0.0123001 0.0129259 0.0405 0.0126173 0.0202877 0.0405 0.014 0.01343 0.0405 0.0120804 0.01343 0.0405 0.011344 0.0131376 0.0405 0.0109415 0.0129259 0.0405 0.0108072 0.0134924 0.0405 0.0115868 0.0121909 0.0405 0.0107609 0.0135 0.0405 -0.007 0.0202877 0.0405 -0.014 -0.0117182 0.0405 0.0123043 -0.011884 0.0405 0.0124808 -0.0120885 0.0405 0.0126106 -0.0123189 0.0405 0.0126855 0.0134924 0.0405 -0.0131582 -0.0116015 0.0405 0.012092 0.0114998 0.0405 0.0117122 -0.0115413 0.0405 0.0118574 0.0115312 0.0405 0.0114635 0.0121909 0.0405 -0.0123322 0.0116235 0.0405 0.0112303 0.0129259 0.0405 -0.0141887 -0.011884 0.0405 0.0109917 -0.0120885 0.0405 0.0108619 -0.0123189 0.0405 0.010787 -0.0130178 0.0405 0.0109203 0.0095 0.0405 -0.014 0.0115311 0.0405 -0.0130348 0.011964 0.0405 -0.0141281 0.0114997 0.0405 -0.0132835 -0.0203475 0.0405 0.014 -0.0127986 0.0405 0.0126553 -0.0125607 0.0405 0.0127007 -0.013436 0.0405 0.0114959 -0.0345 0.0405 0.014 -0.0025 0.0405 -0.014 -0.0120742 0.0405 -0.0124302 -0.0085 0.0405 -0.014 -0.0115702 0.0405 -0.0129669 -0.0123126 0.0405 -0.0123528 -0.0125628 0.0405 -0.012337 -0.0115079 0.0405 -0.0134604 -0.013229 0.0405 -0.0126505 -0.0118626 0.0405 -0.0141056 -0.012809 0.0405 -0.0142861 -0.0135 0.0405 -0.0133351 -0.0345 0.0405 -0.014 -0.0385 0.0405 0.019 0.0124372 0.0405 0.0127105 0.011964 0.0405 0.0108677 0.0135 0.0405 0.0095 0.0117709 0.0405 0.0110275 -0.0025 0.0405 -0.019 -0.0045 0.0445 -0.014 -0.0135 0.0405 -0.007 -0.0135 0.0405 0.0095 0.0015 0.0405 -0.014 0.0015 0.0405 -0.019 -0.0345 0.0445 0.014 -0.0203475 0.0405 -0.014 -0.0345 0.0445 -0.014 -0.0125628 0.0445 -0.0143331 -0.012809 0.0445 -0.0142861 -0.0134686 0.0405 -0.0130864 -0.0135 0.0445 -0.0133351 -0.0134686 0.0445 -0.0130864 -0.0130358 0.0445 -0.0141794 -0.0130358 0.0405 -0.0141794 -0.0133763 0.0405 -0.0128533 -0.013229 0.0445 -0.0140196 -0.0133763 0.0405 -0.0138168 -0.013229 0.0405 -0.0140196 -0.0134686 0.0405 -0.0135838 -0.0130358 0.0405 -0.0124907 -0.013229 0.0445 -0.0126505 -0.012809 0.0405 -0.012384 -0.0130358 0.0445 -0.0124907 -0.0125628 0.0445 -0.012337 -0.0123126 0.0445 -0.0123528 -0.0118626 0.0405 -0.0125646 -0.011691 0.0405 -0.0127473 -0.0115079 0.0405 -0.0132097 -0.0115079 0.0445 -0.0134604 -0.0115702 0.0405 -0.0137032 -0.0115702 0.0445 -0.0137032 -0.011691 0.0405 -0.0139229 -0.0118626 0.0445 -0.0141056 -0.0120742 0.0445 -0.0142399 -0.0123126 0.0405 -0.0143174 -0.0120742 0.0405 -0.0142399 -0.0125628 0.0405 -0.0143331 -0.0125607 0.0445 0.0107718 -0.0127986 0.0445 0.0108172 -0.013436 0.0405 0.0119766 -0.0134664 0.0405 0.0117363 -0.0134664 0.0445 0.0117363 -0.0127986 0.0405 0.0108172 -0.0130178 0.0445 0.0109203 -0.0132044 0.0405 0.0110747 -0.0133468 0.0405 0.0122018 -0.0133468 0.0405 0.0112707 -0.0133468 0.0445 0.0112707 -0.013436 0.0445 0.0114959 -0.0132044 0.0405 0.0123978 -0.0132044 0.0445 0.0123978 -0.0130178 0.0445 0.0125522 -0.0130178 0.0405 0.0125522 -0.0127986 0.0445 0.0126553 -0.0115413 0.0445 0.0118574 -0.0115413 0.0445 0.0116151 -0.0115413 0.0405 0.0116151 -0.0116015 0.0405 0.0113805 -0.0116015 0.0445 0.0113805 -0.0117182 0.0405 0.0111682 -0.0117182 0.0445 0.0111682 -0.0125607 0.0405 0.0107718 0.0124372 0.0405 -0.0142819 0.0121909 0.0405 -0.0142349 0.0114997 0.0445 -0.0132835 0.0121909 0.0445 -0.0142349 0.011964 0.0445 -0.0141281 0.0117708 0.0445 -0.0139683 0.0116234 0.0405 -0.0128016 0.0116234 0.0445 -0.0137655 0.0117708 0.0405 -0.0139683 0.0117708 0.0445 -0.0125988 0.0116234 0.0405 -0.0137655 0.0115311 0.0405 -0.0135323 0.0115311 0.0445 -0.0135323 0.011964 0.0405 -0.0124389 0.0117708 0.0405 -0.0125988 0.011964 0.0445 -0.0124389 0.0121909 0.0445 -0.0123322 0.0124372 0.0445 -0.0122852 0.0126874 0.0405 -0.0123009 0.0124372 0.0405 -0.0122852 0.0129259 0.0405 -0.0123784 0.0126874 0.0445 -0.0123009 0.0131376 0.0405 -0.0125128 0.0134301 0.0405 -0.0129153 0.0133093 0.0405 -0.0126956 0.0134301 0.0445 -0.0129153 0.0134924 0.0405 -0.0134089 0.0134301 0.0445 -0.0136518 0.0134301 0.0405 -0.0136518 0.0133093 0.0405 -0.0138715 0.0131376 0.0405 -0.0140543 0.0129259 0.0445 -0.0141887 0.0126874 0.0405 -0.0142661 0.0126874 0.0445 -0.0142661 0.0115312 0.0405 0.011961 0.0121909 0.0445 0.0107609 0.0116235 0.0445 0.0121941 0.0116235 0.0405 0.0121941 0.0117709 0.0405 0.0123969 0.011964 0.0405 0.0125567 0.0114998 0.0445 0.0117122 0.011964 0.0445 0.0125567 0.0121909 0.0405 0.0126635 0.0124372 0.0445 0.0127105 0.0131376 0.0405 0.0124829 0.0129259 0.0445 0.0126173 0.0126874 0.0405 0.0126947 0.0126874 0.0445 0.0126947 0.0133092 0.0445 0.0123001 0.01343 0.0445 0.0120804 0.0134924 0.0405 0.0118376 0.0134924 0.0445 0.0115868 0.01343 0.0445 0.011344 0.0133092 0.0405 0.0111243 0.0124372 0.0445 0.0107139 0.0124372 0.0405 0.0107139 0.0126874 0.0405 0.0107297 0.0345 0.0405 0.014 0.0345 0.0405 -0.014 0.0405 0.0133092 0.0123001 0.0405 0.0131376 0.0124829 0.0405 0.0126874 0.0126947 0.0405 0.0134924 0.0118376 0.0405 0.0135 0.0095 0.0405 0.0133092 0.0111243 0.0405 0.0129259 0.0108072 0.0405 0.0124372 0.0107139 0.0405 0.0126874 0.0107297 0.0405 0.0345 0.014 0.0405 0.0135 -0.007 0.0405 0.0131376 -0.0125128 0.0405 -0.0120885 0.0126106 0.0405 0.0121909 0.0126635 0.0405 -0.0117182 0.0123043 0.0405 -0.0116015 0.012092 0.0405 0.0116235 0.0121941 0.0405 0.0115312 0.011961 0.0405 0.0124372 0.0127105 0.0405 0.0124372 -0.0122852 0.0405 -0.0115413 0.0116151 0.0405 -0.0115413 0.0118574 0.0405 0.0115312 0.0114635 0.0405 0.011964 -0.0124389 0.0405 0.0121909 -0.0123322 0.0405 0.0129259 -0.0141887 0.0405 -0.0127986 0.0108172 0.0405 -0.0130178 0.0109203 0.0405 0.0095 -0.014 0.0405 0.0116234 -0.0137655 0.0405 -0.013436 0.0119766 0.0405 -0.0133468 0.0122018 0.0405 -0.0127986 0.0126553 0.0405 -0.0130178 0.0125522 0.0405 -0.0125607 0.0127007 0.0405 -0.0203475 0.014 0.0405 -0.0135 0.0095 0.0405 -0.0085 -0.014 0.0405 -0.0015 -0.014 0.0405 -0.0118626 -0.0125646 0.0405 -0.0135 -0.007 0.0405 -0.011691 -0.0127473 0.0405 -0.0123126 -0.0123528 0.0405 -0.0120742 -0.0124302 0.0405 -0.0125628 -0.012337 0.0405 -0.012809 -0.012384 0.0405 -0.0115702 -0.0137032 0.0405 -0.0130358 -0.0124907 0.0405 -0.0118626 -0.0141056 0.0405 -0.0135 -0.0133351 0.0405 -0.0203475 -0.014 0.0405 -0.0130358 -0.0141794 0.0405 -0.012809 -0.0142861 0.0405 -0.013229 -0.0140196 0.0405 -0.0134686 -0.0135838 0.0405 -0.0125628 -0.0143331 0.0405 -0.0015 -0.019 0.0405 -0.0345 -0.014 0.0405 -0.0385 0.019 0.0405 -0.0123189 0.0126855 0.0405 0.011964 0.0108677 0.0405 0.0202877 -0.014 0.0445 0.0345 0.014 0.0445 0.0131376 0.0124829 0.0445 0.01343 0.0120804 0.0445 0.0202877 0.014 0.0445 0.0134924 0.0115868 0.0445 0.0135 0.0095 0.0445 0.0121909 0.0107609 0.0445 0.011964 0.0108677 0.0445 0.0345 -0.014 0.0445 0.0385 -0.019 0.0445 -0.011884 0.0124808 0.0445 0.0133093 -0.0126956 0.0445 0.0129259 -0.0123784 0.0445 0.0121909 0.0126635 0.0445 -0.0123189 0.0126855 0.0445 0.0134301 -0.0129153 0.0445 0.0115312 0.011961 0.0445 -0.0115413 0.0118574 0.0445 0.0202877 -0.014 0.0445 0.0114998 0.0117122 0.0445 -0.0115413 0.0116151 0.0445 0.0134924 -0.0134089 0.0445 0.0131376 -0.0140543 0.0445 0.0129259 -0.0141887 0.0445 -0.0120885 0.0108619 0.0445 -0.0127986 0.0108172 0.0445 -0.0132044 0.0110747 0.0445 -0.0133468 0.0112707 0.0445 0.0117708 -0.0139683 0.0445 -0.0134664 0.0117363 0.0445 -0.0125607 0.0127007 0.0445 0.0095 -0.014 0.0445 0.0124372 -0.0142819 0.0445 -0.0135 -0.007 0.0445 -0.0115079 -0.0132097 0.0445 -0.0125628 -0.012337 0.0445 -0.0115702 -0.0137032 0.0445 -0.012809 -0.012384 0.0445 -0.011691 -0.0139229 0.0445 -0.0130358 -0.0124907 0.0445 -0.013229 -0.0126505 0.0445 -0.0118626 -0.0141056 0.0445 -0.0133763 -0.0128533 0.0445 -0.0120742 -0.0142399 0.0445 -0.0135 -0.0133351 0.0445 -0.0133763 -0.0138168 0.0445 -0.012809 -0.0142861 0.0445 -0.0385 0.019 0.0445 -0.0345 -0.014 0.0445 0.0385 0.019 0.0445 0.0124372 0.0127105 0.0445 -0.011884 0.0109917 0.0445 0.0117709 0.0110275 0.0405 0.0385 -0.019 0.0445 -0.0035 -0.014 0.0445 -0.0085 -0.014 0.0445 -0.0135 0.0095 0.0445 0.0135 -0.007 0.0405 0.0025 -0.014 0.0445 0.0045 -0.014 0.0445 0.0045 -0.019 0.0445 -0.0203475 0.014 0.0405 -0.0345 0.014 0.0445 -0.0345 0.014 0.0445 -0.0203475 -0.014 0.0445 -0.0125628 -0.0143331 0.0445 -0.0134686 -0.0130864 0.0405 -0.0134686 -0.0130864 0.0445 -0.0130358 -0.0141794 0.0445 -0.013229 -0.0140196 0.0405 -0.0133763 -0.0138168 0.0405 -0.0133763 -0.0128533 0.0445 -0.0134686 -0.0135838 0.0405 -0.013229 -0.0126505 0.0445 -0.0123126 -0.0123528 0.0445 -0.0120742 -0.0124302 0.0445 -0.0118626 -0.0125646 0.0445 -0.011691 -0.0127473 0.0405 -0.0115702 -0.0129669 0.0445 -0.0115702 -0.0129669 0.0405 -0.0115079 -0.0132097 0.0405 -0.0115079 -0.0134604 0.0445 -0.0115079 -0.0134604 0.0405 -0.011691 -0.0139229 0.0405 -0.0120742 -0.0142399 0.0405 -0.0123126 -0.0143174 0.0445 -0.0123126 -0.0143174 0.0445 -0.0130178 0.0109203 0.0445 -0.0133468 0.0122018 0.0445 -0.013436 0.0119766 0.0405 -0.0134664 0.0117363 0.0405 -0.0132044 0.0110747 0.0405 -0.0133468 0.0112707 0.0405 -0.013436 0.0114959 0.0445 -0.013436 0.0114959 0.0445 -0.0130178 0.0125522 0.0445 -0.0132044 0.0123978 0.0405 -0.0132044 0.0123978 0.0445 -0.0127986 0.0126553 0.0445 -0.0120885 0.0126106 0.0445 -0.0117182 0.0123043 0.0405 -0.011884 0.0124808 0.0445 -0.0116015 0.012092 0.0445 -0.0116015 0.0113805 0.0405 -0.0116015 0.0113805 0.0445 -0.0117182 0.0111682 0.0405 -0.0117182 0.0111682 0.0405 -0.011884 0.0109917 0.0405 -0.0120885 0.0108619 0.0405 -0.0123189 0.010787 0.0445 -0.0123189 0.010787 0.0405 -0.0125607 0.0107718 0.0445 -0.0125607 0.0107718 0.0405 0.0124372 -0.0142819 0.0405 0.0121909 -0.0142349 0.0445 0.0115311 -0.0130348 0.0445 0.0114997 -0.0132835 0.0445 0.011964 -0.0141281 0.0445 0.0121909 -0.0142349 0.0445 0.0116234 -0.0128016 0.0405 0.0115311 -0.0130348 0.0405 0.0114997 -0.0132835 0.0405 0.011964 -0.0141281 0.0405 0.0116234 -0.0128016 0.0405 0.0117708 -0.0139683 0.0445 0.0116234 -0.0137655 0.0445 0.0115311 -0.0135323 0.0405 0.0115311 -0.0135323 0.0445 0.0117708 -0.0125988 0.0405 0.0117708 -0.0125988 0.0445 0.011964 -0.0124389 0.0445 0.0124372 -0.0122852 0.0445 0.0121909 -0.0123322 0.0445 0.0126874 -0.0123009 0.0405 0.0126874 -0.0123009 0.0405 0.0129259 -0.0123784 0.0445 0.0131376 -0.0125128 0.0405 0.0133093 -0.0126956 0.0405 0.0134301 -0.0129153 0.0405 0.0134924 -0.0131582 0.0445 0.0134924 -0.0131582 0.0405 0.0134924 -0.0134089 0.0445 0.0134301 -0.0136518 0.0405 0.0134301 -0.0136518 0.0405 0.0133093 -0.0138715 0.0445 0.0133093 -0.0138715 0.0405 0.0131376 -0.0140543 0.0445 0.0126874 -0.0142661 0.0405 0.0126874 -0.0142661 0.0405 0.0121909 0.0107609 0.0405 0.0114998 0.0117122 0.0445 0.0116235 0.0121941 0.0405 0.0117709 0.0110275 0.0445 0.0117709 0.0123969 0.0405 0.0116235 0.0112303 0.0445 0.0115312 0.0114635 0.0445 0.0116235 0.0112303 0.0445 0.011964 0.0125567 0.0405 0.011964 0.0125567 0.0405 0.0117709 0.0123969 0.0445 0.0129259 0.0126173 0.0405 0.0129259 0.0126173 0.0445 0.0126874 0.0126947 0.0445 0.0133092 0.0123001 0.0405 0.01343 0.0120804 0.0445 0.0134924 0.0118376 0.0405 0.0134924 0.0115868 0.0405 0.01343 0.011344 0.0445 0.01343 0.011344 0.0445 0.0133092 0.0111243 0.0405 0.0131376 0.0109415 0.0445 0.0129259 0.0108072 0.0445 0.0131376 0.0109415 0.0445 0.0124372 0.0107139 0.0445 0.0126874 0.0107297 0.0405 0.0202877 0.014 0.0405 0.0345 -0.014 -0.0385 -0.0405 0.019 -0.0385 -0.0445 -0.019 0.0385 -0.0445 -0.019 -0.0385 -0.0445 0.019 -0.0131376 -0.0445 0.0124829 -0.0126874 -0.0445 0.0126947 -0.0134924 -0.0445 0.0115868 -0.0135 -0.0445 0.0095 -0.0131376 -0.0445 0.0109415 -0.0126874 -0.0445 0.0107297 -0.0121909 -0.0445 0.0107609 -0.0133093 -0.0445 -0.0126956 -0.0135 -0.0445 -0.007 -0.0117709 -0.0445 0.0123969 -0.0116235 -0.0445 0.0121941 -0.011964 -0.0445 0.0125567 0.011884 -0.0445 0.0124808 -0.0134301 -0.0445 -0.0129153 0.0116015 -0.0445 0.012092 -0.0115312 -0.0445 0.011961 -0.0129259 -0.0445 -0.0123784 -0.0202877 -0.0445 -0.014 0.0115413 -0.0445 0.0118574 -0.0134924 -0.0445 -0.0134089 0.0115413 -0.0445 0.0116151 -0.0115312 -0.0445 0.0114635 -0.0131376 -0.0445 -0.0140543 -0.0133093 -0.0445 -0.0138715 0.0135 -0.0445 0.0095 0.0127986 -0.0445 0.0108172 0.0130178 -0.0445 0.0109203 0.013436 -0.0445 0.0114959 -0.0117708 -0.0445 -0.0125988 -0.0115311 -0.0445 -0.0130348 0.0132044 -0.0445 0.0123978 0.0133468 -0.0445 0.0122018 0.0203475 -0.0445 0.014 -0.0035 -0.0445 -0.019 -0.0121909 -0.0445 -0.0142349 0.0045 -0.0445 -0.014 0.0115702 -0.0445 -0.0129669 0.0123126 -0.0445 -0.0123528 0.0115079 -0.0445 -0.0132097 0.0115079 -0.0445 -0.0134604 0.0130358 -0.0445 -0.0124907 0.012809 -0.0445 -0.012384 0.011691 -0.0445 -0.0139229 0.0085 -0.0445 -0.014 0.0120742 -0.0445 -0.0142399 0.0123126 -0.0445 -0.0143174 0.0125628 -0.0445 -0.0143331 0.0134686 -0.0445 -0.0130864 0.0135 -0.0445 -0.007 0.0133763 -0.0445 -0.0128533 0.013229 -0.0445 -0.0140196 0.0345 -0.0445 0.014 0.0385 -0.0445 0.019 0.0045 -0.0445 -0.019 -0.0202877 -0.0445 0.014 0.011884 -0.0445 0.0109917 0.0117182 -0.0445 0.0111682 0.0203475 -0.0445 -0.014 -0.0133092 -0.0405 0.0123001 -0.0131376 -0.0405 0.0124829 -0.0202877 -0.0405 0.014 -0.0126874 -0.0405 0.0126947 -0.0134924 -0.0405 0.0118376 -0.0134924 -0.0405 0.0115868 -0.0133092 -0.0405 0.0111243 -0.0131376 -0.0405 0.0109415 -0.0129259 -0.0405 0.0108072 -0.0124372 -0.0405 0.0107139 -0.0345 -0.0405 0.014 -0.0135 -0.0405 0.0095 -0.0133093 -0.0405 -0.0126956 -0.0131376 -0.0405 -0.0125128 -0.0117709 -0.0405 0.0123969 0.0120885 -0.0405 0.0126106 0.011884 -0.0405 0.0124808 0.0117182 -0.0405 0.0123043 -0.0126874 -0.0405 -0.0123009 -0.0115312 -0.0405 0.011961 -0.0124372 -0.0405 -0.0122852 0.0116015 -0.0405 0.0113805 -0.0121909 -0.0405 -0.0123322 -0.0131376 -0.0405 -0.0140543 -0.0129259 -0.0405 -0.0141887 0.011884 -0.0405 0.0109917 0.0125607 -0.0405 0.0107718 0.0127986 -0.0405 0.0108172 0.0130178 -0.0405 0.0109203 0.013436 -0.0405 0.0114959 -0.0117708 -0.0405 -0.0125988 -0.0117708 -0.0405 -0.0139683 -0.0116234 -0.0405 -0.0137655 -0.0115311 -0.0405 -0.0135323 -0.0114997 -0.0405 -0.0132835 0.0130178 -0.0405 0.0125522 0.0203475 -0.0405 0.014 0.0135 -0.0405 0.0095 -0.0015 -0.0405 -0.014 -0.0124372 -0.0405 -0.0142819 0.0345 -0.0405 0.014 0.0025 -0.0405 -0.014 0.011691 -0.0405 -0.0127473 0.0115702 -0.0405 -0.0129669 0.0120742 -0.0405 -0.0124302 0.0085 -0.0405 -0.014 0.0125628 -0.0405 -0.012337 0.0115079 -0.0405 -0.0134604 0.0115702 -0.0405 -0.0137032 0.0135 -0.0405 -0.007 0.0025 -0.0405 -0.019 0.0125628 -0.0405 -0.0143331 0.012809 -0.0405 -0.0142861 0.0133763 -0.0405 -0.0128533 0.0133763 -0.0405 -0.0138168 0.0345 -0.0405 -0.014 0.0123189 -0.0405 0.0126855 -0.0117709 -0.0405 0.0110275 -0.0135 -0.0405 -0.007 -0.0095 -0.0445 -0.014 -0.0035 -0.0445 -0.014 -0.0095 -0.0405 -0.014 0.0203475 -0.0405 -0.014 0.0345 -0.0445 -0.014 0.0134686 -0.0405 -0.0130864 0.0135 -0.0405 -0.0133351 0.0135 -0.0445 -0.0133351 0.012809 -0.0445 -0.0142861 0.0130358 -0.0445 -0.0141794 0.0130358 -0.0405 -0.0141794 0.013229 -0.0405 -0.0140196 0.0133763 -0.0445 -0.0138168 0.013229 -0.0405 -0.0126505 0.013229 -0.0445 -0.0126505 0.0134686 -0.0445 -0.0135838 0.0134686 -0.0405 -0.0135838 0.0130358 -0.0405 -0.0124907 0.012809 -0.0405 -0.012384 0.0123126 -0.0405 -0.0123528 0.0125628 -0.0445 -0.012337 0.0118626 -0.0405 -0.0125646 0.0120742 -0.0445 -0.0124302 0.0118626 -0.0445 -0.0125646 0.011691 -0.0445 -0.0127473 0.0115079 -0.0405 -0.0132097 0.0115702 -0.0445 -0.0137032 0.011691 -0.0405 -0.0139229 0.0118626 -0.0405 -0.0141056 0.0118626 -0.0445 -0.0141056 0.0120742 -0.0405 -0.0142399 0.0123126 -0.0405 -0.0143174 0.0125607 -0.0445 0.0107718 0.013436 -0.0405 0.0119766 0.0133468 -0.0405 0.0122018 0.0134664 -0.0445 0.0117363 0.013436 -0.0445 0.0119766 0.0132044 -0.0445 0.0110747 0.0132044 -0.0405 0.0110747 0.0133468 -0.0405 0.0112707 0.0133468 -0.0445 0.0112707 0.0134664 -0.0405 0.0117363 0.0132044 -0.0405 0.0123978 0.0130178 -0.0445 0.0125522 0.0127986 -0.0445 0.0126553 0.0125607 -0.0405 0.0127007 0.0127986 -0.0405 0.0126553 0.0125607 -0.0445 0.0127007 0.0123189 -0.0445 0.0126855 0.0120885 -0.0445 0.0126106 0.0117182 -0.0445 0.0123043 0.0116015 -0.0405 0.012092 0.0115413 -0.0405 0.0118574 0.0115413 -0.0405 0.0116151 0.0116015 -0.0445 0.0113805 0.0117182 -0.0405 0.0111682 0.0120885 -0.0405 0.0108619 0.0120885 -0.0445 0.0108619 0.0123189 -0.0445 0.010787 0.0123189 -0.0405 0.010787 -0.0121909 -0.0405 -0.0142349 -0.0114997 -0.0445 -0.0132835 -0.0115311 -0.0405 -0.0130348 -0.011964 -0.0405 -0.0141281 -0.011964 -0.0445 -0.0141281 -0.0117708 -0.0445 -0.0139683 -0.0116234 -0.0405 -0.0128016 -0.0116234 -0.0445 -0.0128016 -0.0116234 -0.0445 -0.0137655 -0.0115311 -0.0445 -0.0135323 -0.011964 -0.0405 -0.0124389 -0.011964 -0.0445 -0.0124389 -0.0121909 -0.0445 -0.0123322 -0.0124372 -0.0445 -0.0122852 -0.0126874 -0.0445 -0.0123009 -0.0129259 -0.0405 -0.0123784 -0.0131376 -0.0445 -0.0125128 -0.0134301 -0.0405 -0.0129153 -0.0134924 -0.0405 -0.0131582 -0.0134924 -0.0445 -0.0131582 -0.0134924 -0.0405 -0.0134089 -0.0134301 -0.0445 -0.0136518 -0.0134301 -0.0405 -0.0136518 -0.0133093 -0.0405 -0.0138715 -0.0129259 -0.0445 -0.0141887 -0.0126874 -0.0405 -0.0142661 -0.0126874 -0.0445 -0.0142661 -0.0124372 -0.0445 -0.0142819 -0.0124372 -0.0445 0.0107139 -0.011964 -0.0445 0.0108677 -0.0121909 -0.0405 0.0107609 -0.011964 -0.0405 0.0108677 -0.0117709 -0.0445 0.0110275 -0.0116235 -0.0405 0.0121941 -0.0116235 -0.0405 0.0112303 -0.0116235 -0.0445 0.0112303 -0.0115312 -0.0405 0.0114635 -0.0114998 -0.0405 0.0117122 -0.0114998 -0.0445 0.0117122 -0.011964 -0.0405 0.0125567 -0.0121909 -0.0445 0.0126635 -0.0121909 -0.0405 0.0126635 -0.0124372 -0.0445 0.0127105 -0.0124372 -0.0405 0.0127105 -0.0129259 -0.0405 0.0126173 -0.0129259 -0.0445 0.0126173 -0.0133092 -0.0445 0.0123001 -0.01343 -0.0405 0.0120804 -0.01343 -0.0445 0.0120804 -0.0134924 -0.0445 0.0118376 -0.01343 -0.0445 0.011344 -0.01343 -0.0405 0.011344 -0.0133092 -0.0445 0.0111243 -0.0129259 -0.0445 0.0108072 -0.0126874 -0.0405 0.0107297 -0.0345 -0.0445 0.014 -0.0202877 -0.0405 -0.014 -0.0345 -0.0445 -0.014 -0.0345 -0.0405 -0.014 0.0025 -0.0105 -0.019 -0.0025 0.0025 -0.019 -0.0025 -0.0025 -0.019 -0.0355 -0.0145 -0.019 -0.0145 -0.0105 -0.019 -0.0105 -0.0105 -0.019 -0.0025 -0.0105 -0.019 0.0105 -0.0105 -0.019 0.0105 -0.0145 -0.019 0.0025 -0.0025 -0.022 -0.0105 -0.0105 -0.022 -0.0025 -0.0025 -0.022 -0.0105 0.0025 -0.019 0.0025 0.0105 -0.019 0.0025 0.0025 -0.019 0.0025 -0.0025 -0.019 0.0105 -0.0025 -0.022 0.0105 -0.0025 -0.019 0.0145 -0.0105 -0.019 0.0105 0.0025 -0.019 -0.0355 -0.0355 -0.019 0.0355 0.0355 -0.019 0.0445 0.0445 -0.019 0.0145 0.0355 -0.019 -0.0035 -0.0445 -0.019 -0.0105 -0.0355 -0.019 -0.0355 0.0105 -0.019 -0.0355 -0.0105 -0.019 0.0105 -0.0355 -0.019 -0.0355 0.0355 -0.019 -0.0445 0.0035 -0.019 0.0355 -0.0355 -0.019 0.0045 -0.0445 -0.019 0.0355 -0.0105 -0.019 -0.0105 0.0355 -0.019 0.0355 0.0105 -0.019 0.0105 0.0355 -0.019 -0.0355 -0.0145 -0.022 -0.0145 -0.0145 -0.019 -0.0145 -0.0105 -0.022 0.0145 -0.0355 -0.019 0.0355 -0.0145 -0.019 0.0145 -0.0145 -0.019 -0.0105 -0.0145 -0.022 -0.0025 -0.0105 -0.022 0.0025 -0.0105 -0.022 0.0145 -0.0145 -0.022 0.0105 -0.0145 -0.022 -0.0105 -0.0025 -0.019 -0.0025 0.0025 -0.022 0.0025 0.0025 -0.022 -0.0145 0.0105 -0.019 -0.0145 0.0145 -0.019 -0.0025 0.0105 -0.019 -0.0105 0.0105 -0.019 0.0105 0.0105 -0.019 0.0145 0.0105 -0.019 -0.0105 0.0025 -0.022 0.0105 0.0105 -0.022 0.0405 0.0025 -0.019 0.0445 -0.0035 -0.022 0.0445 -0.0035 -0.019 0.0445 -0.0445 -0.022 0.0445 -0.0445 -0.019 -0.0015 -0.0405 -0.022 -0.0015 -0.0405 -0.019 -0.0445 -0.0445 -0.022 -0.0035 -0.0445 -0.022 -0.0445 -0.0045 -0.019 -0.0445 -0.0045 -0.022 -0.0405 -0.0025 -0.019 -0.0405 -0.0025 -0.022 -0.0025 0.0405 -0.022 0.0445 0.0445 -0.022 0.0355 -0.0355 -0.022 0.0145 -0.0355 -0.022 0.0355 0.0105 -0.022 -0.0355 -0.0355 -0.022 0.0105 0.0145 -0.019 -0.0105 0.0355 -0.022 -0.0355 -0.0105 -0.022 -0.0145 0.0355 -0.019 -0.0355 0.0355 -0.022 -0.0355 0.0145 -0.019 -0.0145 -0.0355 -0.019 -0.0105 -0.0145 -0.019 0.0035 0.0445 -0.022 0.0445 0.0045 -0.022 0.0355 0.0145 -0.022 0.0355 0.0355 -0.022 -0.0405 0.0015 -0.022 -0.0445 0.0035 -0.022 -0.0355 0.0105 -0.022 0.0105 -0.0355 -0.022 0.0025 -0.0405 -0.022 0.0045 -0.0445 -0.022 -0.0355 0.0145 -0.022 -0.0445 0.0445 -0.022 -0.0045 0.0445 -0.022 0.0355 -0.0145 -0.022 0.0355 -0.0105 -0.022 0.0405 -0.0015 -0.022 0.0105 0.0355 -0.022 0.0015 0.0405 -0.022 0.0405 0.0025 -0.022 -0.0145 -0.0355 -0.022 -0.0105 -0.0355 -0.022 -0.0145 -0.0145 -0.022 -0.0105 -0.0025 -0.022 -0.0105 0.0105 -0.022 0.0145 -0.0105 -0.022 0.0105 -0.0105 -0.022 0.0105 0.0025 -0.022 -0.0105 0.0145 -0.022 -0.0105 0.0145 -0.019 0.0355 0.0145 -0.019 0.0145 0.0145 -0.022 0.0145 0.0145 -0.019 0.0145 0.0355 -0.022 -0.0145 0.0105 -0.022 -0.0145 0.0145 -0.022 0.0025 0.0105 -0.022 -0.0025 0.0105 -0.022 0.0145 0.0105 -0.022 0.0105 0.0145 -0.022 -0.0145 0.0355 -0.022 -0.04048 0.04048 0.019 -0.04048 -0.04048 0.019 0.04048 0.04048 0.019 0.0445 0.0445 0.019 0.04048 -0.04048 0.019 -0.0370829 -0.0370829 0.0204071 -0.0370829 0.0370829 0.0204071 0.0370829 -0.0370829 0.0204071 -0.0445 0.0445 0.019 -0.0445 0.0445 0.0201 0.0445 -0.0445 0.019 0.0445 -0.0445 0.0201 0.0445 0.0445 0.0201 -0.0355 0.0355 0.02199 0.0370829 0.0370829 0.0204071 -0.0445 -0.0445 0.0201 0.0426 0.0426 0.022 -0.0426 0.0426 0.022 -0.0426 -0.0426 0.022 0.0426 -0.0426 0.022 -0.0355 -0.0355 0.02199 0.0355 -0.0355 0.02199 0.0355 0.0355 0.02199 0.0355 -0.0355 0.022 -0.0355 -0.0355 0.022 -0.0355 0.0355 0.022 0.0355 0.0355 0.022 + + + + + + + + + + 0 1 0 0 1 0 1 0 0 1 0 0 0.707107 -0.707107 0 0.707107 -0.707107 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -0.707107 0.707107 0 -0.707107 0.707107 0 0 1 0 0 1 0 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 -1 0 0 -1 0 0 0 1 0 0 1 0 0.707107 0.707107 0 0.707107 0.707107 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 1 0 0 1 0 0 0 -1 0 0 -1 0 -0.707107 -0.707107 0 -0.707107 -0.707107 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 1 0 0 1 0 0 1 0 0 1 0 0 -0.857493 0 0.514496 -0.857493 0 0.514496 -0.606339 0.707107 0.363803 -0.606339 0.707107 0.363803 -0.606339 -0.707107 0.363803 -0.606339 -0.707107 0.363803 0 1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.857493 0.514496 0 0.857493 0.514496 0.707107 0.606339 0.363803 0.707107 0.606339 0.363803 -0.707107 0.606339 0.363803 -0.707107 0.606339 0.363803 1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.857493 0 0.514496 0.857493 0 0.514496 0.606339 -0.707107 0.363803 0.606339 -0.707107 0.363803 0.606339 0.707107 0.363803 0.606339 0.707107 0.363803 0 -1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 1 0 0 1 0 0 -0.857493 0.514496 0 -0.857493 0.514496 -0.707107 -0.606339 0.363803 -0.707107 -0.606339 0.363803 0.707107 -0.606339 0.363803 0.707107 -0.606339 0.363803 -1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 1 0 0 1 0 0 -0.857493 0.514496 0 -0.857493 0.514496 -0.707107 -0.606339 0.363803 -0.707107 -0.606339 0.363803 0.707107 -0.606339 0.363803 0.707107 -0.606339 0.363803 -1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.857493 0 0.514496 0.857493 0 0.514496 0.606339 -0.707107 0.363803 0.606339 -0.707107 0.363803 0.606339 0.707107 0.363803 0.606339 0.707107 0.363803 0 -1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 1 0 0 1 0 0 1 0 0 1 0 0 -0.857493 0 0.514496 -0.857493 0 0.514496 -0.606339 0.707107 0.363803 -0.606339 0.707107 0.363803 -0.606339 -0.707107 0.363803 -0.606339 -0.707107 0.363803 0 1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.857493 0.514496 0 0.857493 0.514496 0.707107 0.606339 0.363803 0.707107 0.606339 0.363803 -0.707107 0.606339 0.363803 -0.707107 0.606339 0.363803 1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.857493 0 0.514496 0.857493 0 0.514496 0.606339 -0.707107 0.363803 0.606339 -0.707107 0.363803 0.606339 0.707107 0.363803 0.606339 0.707107 0.363803 0 -1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 1 0 0 1 0 0 1 0 0 1 0 0 -0.857493 0 0.514496 -0.857493 0 0.514496 -0.606339 0.707107 0.363803 -0.606339 0.707107 0.363803 -0.606339 -0.707107 0.363803 -0.606339 -0.707107 0.363803 0 1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.857493 0.514496 0 0.857493 0.514496 0.707107 0.606339 0.363803 0.707107 0.606339 0.363803 -0.707107 0.606339 0.363803 -0.707107 0.606339 0.363803 1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 1 0 0 1 0 0 -0.857493 0.514496 0 -0.857493 0.514496 -0.707107 -0.606339 0.363803 -0.707107 -0.606339 0.363803 0.707107 -0.606339 0.363803 0.707107 -0.606339 0.363803 -1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 -0.707107 0.707106 0 -0.707107 0.707106 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0 1 0 0 1 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 -0.707107 -0.707107 0 -0.707107 -0.707107 0 1 0 0 1 0 0 0 0 1 0 0 1 0 0 1 -1 0 0 -1 0 0 0 1 0 0 1 0 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0.707107 -0.707107 0 0.707107 -0.707107 0 0 1 0 0 1 0 0 0 1 0 0 1 0 0 1 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 0 1 0 0 1 0 0 1 -0.707107 -0.707107 0 -0.707107 -0.707107 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 1 0 0 1 0 0 1 0 0 1 0 0 0 0 1 0 0 1 0 0 1 0.707107 0.707106 0 0.707107 0.707106 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 0 1 0 0 1 0 0 1 -0.707107 0.707107 0 -0.707107 0.707107 0 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0.707106 -0.707107 0 0.707106 -0.707107 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 1 0 0 1 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0.707106 0.707107 0 0.707106 0.707107 0 -1 0 0 -1 0 0 0 0 1 0 0 1 0 0 1 1 0 0 1 0 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 1 0 0 1 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -0.447214 -0.894427 0 -0.447214 -0.894427 0 0 0 1 0 0 1 0 -0.813734 0.581238 0 -0.813734 0.581238 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 1 0 0 1 0 0 0.868243 0.496139 0 0.868243 0.496139 0 0 1 0 0 1 -0.447214 0.894427 0 -0.447214 0.894427 0 0 0 -1 0 0 -1 0 1 0 0 1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 -0.187382 0.982287 0 -0.187382 0.982287 0 -0.425778 0.904828 0 -0.425778 0.904828 0 -0.992115 -0.125334 0 -0.992115 -0.125334 0 -0.929775 -0.368128 0 -0.637422 0.770515 0 -0.637422 0.770515 0 -0.929775 -0.368128 0 -0.809017 0.587785 0 -0.809017 0.587785 0 -0.809017 -0.587785 0 -0.809017 -0.587785 0 -0.929776 0.368127 0 -0.929776 0.368127 0 -0.992115 0.125335 0 -0.637424 -0.770513 0 -0.992115 0.125335 0 -0.637424 -0.770513 0 -0.425778 -0.904828 0 -0.187382 -0.982287 0 -0.425778 -0.904828 0 -0.187382 -0.982287 0 0.06279 -0.998027 0 0.06279 -0.998027 0 0.309017 -0.951056 0 0.309017 -0.951056 0 0.535832 -0.844325 0 0.728969 -0.684547 0 0.535832 -0.844325 0 0.728969 -0.684547 0 0.876304 -0.481759 0 0.876304 -0.481759 0 0.968585 -0.248683 0 0.968585 -0.248683 0 1 0 0 1 0 0 0.968585 0.248682 0 0.968585 0.248682 0 0.876303 0.481761 0 0.876303 0.481761 0 0.72897 0.684545 0 0.72897 0.684545 0 0.535829 0.844327 0 0.535829 0.844327 0 0.309017 0.951056 0 0.309017 0.951056 0 0.06279 0.998027 0 0.06279 0.998027 0 -0.18737 0.982289 0 -0.18737 0.982289 0 -0.992116 -0.12532 0 -0.425784 0.904825 0 -0.425784 0.904825 0 -0.929775 -0.368128 0 -0.992116 -0.12532 0 -0.637418 0.770518 0 -0.637418 0.770518 0 -0.809017 -0.587785 0 -0.929775 -0.368128 0 -0.809021 0.58778 0 -0.809021 0.58778 0 -0.809017 -0.587785 0 -0.929773 0.368133 0 -0.929773 0.368133 0 -0.992116 0.12532 0 -0.637415 -0.77052 0 -0.992116 0.12532 0 -0.637415 -0.77052 0 -0.425784 -0.904825 0 -0.425784 -0.904825 0 -0.187385 -0.982287 0 -0.187385 -0.982287 0 0.0628046 -0.998026 0 0.0628046 -0.998026 0 0.309006 -0.95106 0 0.535834 -0.844323 0 0.309006 -0.95106 0 0.535834 -0.844323 0 0.728962 -0.684555 0 0.876307 -0.481753 0 0.728962 -0.684555 0 0.876307 -0.481753 0 0.968583 -0.248692 0 0.968583 -0.248692 0 1 0 0 1 0 0 0.968581 0.248697 0 0.968581 0.248697 0 0.876311 0.481747 0 0.876311 0.481747 0 0.728963 0.684553 0 0.728963 0.684553 0 0.535832 0.844325 0 0.535832 0.844325 0 0.30901 0.951059 0 0.30901 0.951059 0 0.0627893 0.998027 0 0.0627893 0.998027 0 -0.187379 0.982288 0 -0.187379 0.982288 0 -0.425778 0.904828 0 -0.425778 0.904828 0 -0.992114 -0.125342 0 -0.992114 -0.125342 0 -0.929779 -0.368118 0 -0.637425 0.770512 0 -0.637425 0.770512 0 -0.929779 -0.368118 0 -0.809012 0.587792 0 -0.809012 0.587792 0 -0.809012 -0.587792 0 -0.809012 -0.587792 0 -0.92978 0.368117 0 -0.92978 0.368117 0 -0.992114 0.125342 0 -0.637425 -0.770512 0 -0.992114 0.125342 0 -0.637425 -0.770512 0 -0.425781 -0.904826 0 -0.187379 -0.982288 0 -0.425781 -0.904826 0 -0.187379 -0.982288 0 0.0627922 -0.998027 0 0.0627922 -0.998027 0 0.309018 -0.951056 0 0.309018 -0.951056 0 0.535826 -0.844329 0 0.728969 -0.684547 0 0.535826 -0.844329 0 0.728969 -0.684547 0 0.876307 -0.481753 0 0.876307 -0.481753 0 0.968583 -0.248691 0 0.968583 -0.248691 0 1 0 0 1 0 0 0.968583 0.24869 0 0.968583 0.24869 0 0.876307 0.481753 0 0.876307 0.481753 0 0.728969 0.684547 0 0.728969 0.684547 0 0.535826 0.844329 0 0.535826 0.844329 0 0.309018 0.951056 0 0.309018 0.951056 0 0.0627885 0.998027 0 0.0627885 0.998027 0 -0.187379 0.982288 0 -0.187379 0.982288 0 -0.425779 0.904827 0 -0.425779 0.904827 0 -0.992114 -0.125337 0 -0.992114 -0.125337 0 -0.929774 -0.368131 0 -0.63742 0.770516 0 -0.63742 0.770516 0 -0.929774 -0.368131 0 -0.809018 0.587784 0 -0.809018 0.587784 0 -0.809017 -0.587785 0 -0.809017 -0.587785 0 -0.929774 0.368131 0 -0.929774 0.368131 0 -0.992114 0.125337 0 -0.63742 -0.770516 0 -0.992114 0.125337 0 -0.63742 -0.770516 0 -0.425782 -0.904826 0 -0.187379 -0.982288 0 -0.425782 -0.904826 0 -0.187379 -0.982288 0 0.0627897 -0.998027 0 0.0627897 -0.998027 0 0.309008 -0.95106 0 0.309008 -0.95106 0 0.535835 -0.844323 0 0.728967 -0.684549 0 0.535835 -0.844323 0 0.728967 -0.684549 0 0.876305 -0.481757 0 0.876305 -0.481757 0 0.968584 -0.248688 0 0.968584 -0.248688 0 1 0 0 1 0 0 0.968584 0.248688 0 0.968584 0.248688 0 0.876308 0.481751 0 0.876308 0.481751 0 0.728967 0.684549 0 0.728967 0.684549 0 0.535835 0.844323 0 0.535835 0.844323 0 0.309008 0.95106 0 0.309008 0.95106 0 0.0627897 0.998027 0 0.0627897 0.998027 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 1 0 0 1 0 0 0 1 0 0 1 0 0 1 0 0 1 1 0 0 1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0.894427 0.447214 0 0.894427 0.447214 0 0 0 1 0 0 1 0.813734 0 0.581238 0.813734 0 0.581238 1 0 0 1 0 0 0 0 -1 0 0 -1 -1 0 0 -1 0 0 -0.868243 0 0.496139 -0.868243 0 0.496139 0 0 1 0 0 1 -0.894427 0.447214 0 -0.894427 0.447214 0 0 0 -1 0 0 -1 -1 0 0 -1 0 0 1 0 0 1 0 0 0 0 1 0 0 1 0.187382 0 0.982287 0.187382 0 0.982287 0.992115 0 -0.125327 0.425778 0 0.904828 0.425778 0 0.904828 0.929775 0 -0.368128 0.992115 0 -0.125327 0.637422 0 0.770515 0.637422 0 0.770515 0.809017 0 -0.587785 0.929775 0 -0.368128 0.809017 0 0.587785 0.809017 0 0.587785 0.809017 0 -0.587785 0.929776 0 0.368127 0.929776 0 0.368127 0.992115 0 0.125327 0.637424 0 -0.770513 0.992115 0 0.125327 0.637424 0 -0.770513 0.425778 0 -0.904828 0.425778 0 -0.904828 0.187382 0 -0.982287 0.187382 0 -0.982287 -0.06279 0 -0.998027 -0.06279 0 -0.998027 -0.309017 0 -0.951056 -0.535832 0 -0.844325 -0.309017 0 -0.951056 -0.535832 0 -0.844325 -0.728969 0 -0.684547 -0.876304 0 -0.481759 -0.728969 0 -0.684547 -0.876304 0 -0.481759 -0.968585 0 -0.248683 -0.968585 0 -0.248683 -1 0 0 -1 0 0 -0.968585 0 0.248682 -0.968585 0 0.248682 -0.876303 0 0.481761 -0.876303 0 0.481761 -0.72897 0 0.684545 -0.72897 0 0.684545 -0.535829 0 0.844327 -0.535829 0 0.844327 -0.309017 0 0.951056 -0.309017 0 0.951056 -0.06279 0 0.998027 -0.06279 0 0.998027 0.187372 0 0.982289 0.187372 0 0.982289 0.992115 0 -0.125331 0.425781 0 0.904826 0.425781 0 0.904826 0.929775 0 -0.368128 0.992115 0 -0.125331 0.637422 0 0.770515 0.637422 0 0.770515 0.809017 0 -0.587785 0.929775 0 -0.368128 0.809021 0 0.58778 0.809021 0 0.58778 0.809017 0 -0.587785 0.929773 0 0.368133 0.929773 0 0.368133 0.992115 0 0.125331 0.637419 0 -0.770517 0.992115 0 0.125331 0.637419 0 -0.770517 0.425781 0 -0.904826 0.425781 0 -0.904826 0.187387 0 -0.982286 0.187387 0 -0.982286 -0.0628041 0 -0.998026 -0.0628041 0 -0.998026 -0.309006 0 -0.95106 -0.535834 0 -0.844323 -0.309006 0 -0.95106 -0.535834 0 -0.844323 -0.728962 0 -0.684555 -0.876307 0 -0.481753 -0.728962 0 -0.684555 -0.876307 0 -0.481753 -0.968583 0 -0.248692 -0.968583 0 -0.248692 -1 0 0 -1 0 0 -0.968581 0 0.248697 -0.968581 0 0.248697 -0.876311 0 0.481747 -0.876311 0 0.481747 -0.728963 0 0.684553 -0.728963 0 0.684553 -0.535832 0 0.844325 -0.535832 0 0.844325 -0.30901 0 0.951059 -0.30901 0 0.951059 -0.0627888 0 0.998027 -0.0627888 0 0.998027 0.187379 0 0.982288 0.187379 0 0.982288 0.992114 0 -0.125342 0.425778 0 0.904828 0.425778 0 0.904828 0.929779 0 -0.368118 0.992114 0 -0.125342 0.637425 0 0.770512 0.637425 0 0.770512 0.809012 0 -0.587792 0.929779 0 -0.368118 0.809012 0 0.587792 0.809012 0 0.587792 0.809012 0 -0.587792 0.92978 0 0.368117 0.92978 0 0.368117 0.992114 0 0.125342 0.637425 0 -0.770512 0.992114 0 0.125342 0.637425 0 -0.770512 0.425781 0 -0.904826 0.425781 0 -0.904826 0.187379 0 -0.982288 0.187379 0 -0.982288 -0.0627922 0 -0.998027 -0.0627922 0 -0.998027 -0.309016 0 -0.951057 -0.535829 0 -0.844327 -0.309016 0 -0.951057 -0.535829 0 -0.844327 -0.728972 0 -0.684543 -0.876304 0 -0.481758 -0.728972 0 -0.684543 -0.876304 0 -0.481758 -0.968583 0 -0.248691 -0.968583 0 -0.248691 -1 0 0 -1 0 0 -0.968583 0 0.24869 -0.968583 0 0.24869 -0.876304 0 0.481758 -0.876304 0 0.481758 -0.728972 0 0.684543 -0.728972 0 0.684543 -0.535829 0 0.844327 -0.535829 0 0.844327 -0.309016 0 0.951057 -0.309016 0 0.951057 -0.0627885 0 0.998027 -0.0627885 0 0.998027 0.187379 0 0.982288 0.187379 0 0.982288 0.992114 0 -0.125337 0.425779 0 0.904827 0.425779 0 0.904827 0.929774 0 -0.368131 0.992114 0 -0.125337 0.63742 0 0.770516 0.63742 0 0.770516 0.809017 0 -0.587785 0.929774 0 -0.368131 0.809018 0 0.587784 0.809018 0 0.587784 0.809017 0 -0.587785 0.929774 0 0.368131 0.929774 0 0.368131 0.992114 0 0.125337 0.63742 0 -0.770516 0.992114 0 0.125337 0.63742 0 -0.770516 0.425782 0 -0.904826 0.425782 0 -0.904826 0.187379 0 -0.982288 0.187379 0 -0.982288 -0.0627897 0 -0.998027 -0.0627897 0 -0.998027 -0.309008 0 -0.95106 -0.535835 0 -0.844323 -0.309008 0 -0.95106 -0.535835 0 -0.844323 -0.72897 0 -0.684545 -0.876299 0 -0.481768 -0.72897 0 -0.684545 -0.876299 0 -0.481768 -0.968585 0 -0.248681 -0.968585 0 -0.248681 -1 0 0 -1 0 0 -0.968585 0 0.248681 -0.968585 0 0.248681 -0.876302 0 0.481762 -0.876302 0 0.481762 -0.72897 0 0.684545 -0.72897 0 0.684545 -0.535835 0 0.844323 -0.535835 0 0.844323 -0.309008 0 0.95106 -0.309008 0 0.95106 -0.0627897 0 0.998027 -0.0627897 0 0.998027 0 0 -1 0 0 -1 1 0 0 1 0 0 -1 0 0 -1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 1 0 0 1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0.447214 0.894427 0 0.447214 0.894427 0 0 0 1 0 0 1 0 0.813734 0.581238 0 0.813734 0.581238 0 1 0 0 1 0 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -0.868243 0.496139 0 -0.868243 0.496139 0 0 1 0 0 1 0.447214 -0.894427 0 0.447214 -0.894427 0 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 1 0 0 1 0 0 0 1 0 0 1 0 0.187382 0.982287 0 0.187382 0.982287 0 0.992115 -0.125327 0 0.425778 0.904828 0 0.425778 0.904828 0 0.929775 -0.368128 0 0.992115 -0.125327 0 0.637422 0.770515 0 0.637422 0.770515 0 0.809017 -0.587785 0 0.929775 -0.368128 0 0.809017 0.587785 0 0.809017 0.587785 0 0.809017 -0.587785 0 0.929776 0.368127 0 0.929776 0.368127 0 0.992115 0.125327 0 0.637424 -0.770513 0 0.992115 0.125327 0 0.637424 -0.770513 0 0.425778 -0.904828 0 0.425778 -0.904828 0 0.187382 -0.982287 0 0.187382 -0.982287 0 -0.06279 -0.998027 0 -0.06279 -0.998027 0 -0.309017 -0.951056 0 -0.535832 -0.844325 0 -0.309017 -0.951056 0 -0.535832 -0.844325 0 -0.728969 -0.684547 0 -0.876304 -0.481759 0 -0.728969 -0.684547 0 -0.876304 -0.481759 0 -0.968585 -0.248683 0 -0.968585 -0.248683 0 -1 0 0 -1 0 0 -0.968585 0.248682 0 -0.968585 0.248682 0 -0.876303 0.481761 0 -0.876303 0.481761 0 -0.72897 0.684545 0 -0.72897 0.684545 0 -0.535829 0.844327 0 -0.535829 0.844327 0 -0.309017 0.951056 0 -0.309017 0.951056 0 -0.06279 0.998027 0 -0.06279 0.998027 0 0.187372 0.982289 0 0.187372 0.982289 0 0.992115 -0.125331 0 0.425781 0.904826 0 0.425781 0.904826 0 0.929775 -0.368128 0 0.992115 -0.125331 0 0.637422 0.770515 0 0.637422 0.770515 0 0.809017 -0.587785 0 0.929775 -0.368128 0 0.809021 0.58778 0 0.809021 0.58778 0 0.809017 -0.587785 0 0.929773 0.368133 0 0.929773 0.368133 0 0.992115 0.125331 0 0.637419 -0.770517 0 0.992115 0.125331 0 0.637419 -0.770517 0 0.425781 -0.904826 0 0.425781 -0.904826 0 0.187387 -0.982286 0 0.187387 -0.982286 0 -0.0628041 -0.998026 0 -0.0628041 -0.998026 0 -0.309006 -0.95106 0 -0.535834 -0.844323 0 -0.309006 -0.95106 0 -0.535834 -0.844323 0 -0.728962 -0.684555 0 -0.876307 -0.481753 0 -0.728962 -0.684555 0 -0.876307 -0.481753 0 -0.968583 -0.248692 0 -0.968583 -0.248692 0 -1 0 0 -1 0 0 -0.968581 0.248697 0 -0.968581 0.248697 0 -0.876311 0.481747 0 -0.876311 0.481747 0 -0.728963 0.684553 0 -0.728963 0.684553 0 -0.535832 0.844325 0 -0.535832 0.844325 0 -0.30901 0.951059 0 -0.30901 0.951059 0 -0.0627888 0.998027 0 -0.0627888 0.998027 0 0.187379 0.982288 0 0.187379 0.982288 0 0.992114 -0.125342 0 0.425778 0.904828 0 0.425778 0.904828 0 0.929779 -0.368118 0 0.992114 -0.125342 0 0.637425 0.770512 0 0.637425 0.770512 0 0.809012 -0.587792 0 0.929779 -0.368118 0 0.809012 0.587792 0 0.809012 0.587792 0 0.809012 -0.587792 0 0.92978 0.368117 0 0.92978 0.368117 0 0.992114 0.125342 0 0.637425 -0.770512 0 0.992114 0.125342 0 0.637425 -0.770512 0 0.425781 -0.904826 0 0.425781 -0.904826 0 0.187379 -0.982288 0 0.187379 -0.982288 0 -0.0627922 -0.998027 0 -0.0627922 -0.998027 0 -0.309016 -0.951057 0 -0.535829 -0.844327 0 -0.309016 -0.951057 0 -0.535829 -0.844327 0 -0.728972 -0.684543 0 -0.876304 -0.481758 0 -0.728972 -0.684543 0 -0.876304 -0.481758 0 -0.968583 -0.248691 0 -0.968583 -0.248691 0 -1 0 0 -1 0 0 -0.968583 0.24869 0 -0.968583 0.24869 0 -0.876304 0.481758 0 -0.876304 0.481758 0 -0.728972 0.684543 0 -0.728972 0.684543 0 -0.535829 0.844327 0 -0.535829 0.844327 0 -0.309016 0.951057 0 -0.309016 0.951057 0 -0.0627885 0.998027 0 -0.0627885 0.998027 0 0.187379 0.982288 0 0.187379 0.982288 0 0.992114 -0.125337 0 0.425779 0.904827 0 0.425779 0.904827 0 0.929774 -0.368131 0 0.992114 -0.125337 0 0.63742 0.770516 0 0.63742 0.770516 0 0.809017 -0.587785 0 0.929774 -0.368131 0 0.809018 0.587784 0 0.809018 0.587784 0 0.809017 -0.587785 0 0.929774 0.368131 0 0.929774 0.368131 0 0.992114 0.125337 0 0.63742 -0.770516 0 0.992114 0.125337 0 0.63742 -0.770516 0 0.425782 -0.904826 0 0.425782 -0.904826 0 0.187379 -0.982288 0 0.187379 -0.982288 0 -0.0627897 -0.998027 0 -0.0627897 -0.998027 0 -0.309008 -0.95106 0 -0.535835 -0.844323 0 -0.309008 -0.95106 0 -0.535835 -0.844323 0 -0.72897 -0.684545 0 -0.876299 -0.481768 0 -0.72897 -0.684545 0 -0.876299 -0.481768 0 -0.968585 -0.248681 0 -0.968585 -0.248681 0 -1 0 0 -1 0 0 -0.968585 0.248681 0 -0.968585 0.248681 0 -0.876302 0.481762 0 -0.876302 0.481762 0 -0.72897 0.684545 0 -0.72897 0.684545 0 -0.535835 0.844323 0 -0.535835 0.844323 0 -0.309008 0.95106 0 -0.309008 0.95106 0 -0.0627897 0.998027 0 -0.0627897 0.998027 0 0 -1 0 0 -1 0 1 0 0 1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0 0 1 -1 0 0 -1 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -0.894427 -0.447214 0 -0.894427 -0.447214 0 0 0 1 0 0 1 -0.813734 0 0.581238 -0.813734 0 0.581238 -1 0 0 -1 0 0 0 0 -1 0 0 -1 1 0 0 1 0 0 0.868243 0 0.496139 0.868243 0 0.496139 0 0 1 0 0 1 0.894427 -0.447214 0 0.894427 -0.447214 0 0 0 -1 0 0 -1 1 0 0 1 0 0 -1 0 0 -1 0 0 0 0 1 0 0 1 -0.187382 0 0.982287 -0.187382 0 0.982287 -0.992115 0 -0.125334 -0.425778 0 0.904828 -0.425778 0 0.904828 -0.929775 0 -0.368128 -0.992115 0 -0.125334 -0.637422 0 0.770515 -0.637422 0 0.770515 -0.809017 0 -0.587785 -0.929775 0 -0.368128 -0.809017 0 0.587785 -0.809017 0 0.587785 -0.809017 0 -0.587785 -0.929776 0 0.368127 -0.929776 0 0.368127 -0.992115 0 0.125335 -0.637424 0 -0.770513 -0.992115 0 0.125335 -0.637424 0 -0.770513 -0.425778 0 -0.904828 -0.425778 0 -0.904828 -0.187382 0 -0.982287 -0.187382 0 -0.982287 0.06279 0 -0.998027 0.06279 0 -0.998027 0.309017 0 -0.951056 0.535832 0 -0.844325 0.309017 0 -0.951056 0.535832 0 -0.844325 0.728969 0 -0.684547 0.876304 0 -0.481759 0.728969 0 -0.684547 0.876304 0 -0.481759 0.968585 0 -0.248683 0.968585 0 -0.248683 1 0 0 1 0 0 0.968585 0 0.248682 0.968585 0 0.248682 0.876303 0 0.481761 0.876303 0 0.481761 0.72897 0 0.684545 0.72897 0 0.684545 0.535829 0 0.844327 0.535829 0 0.844327 0.309017 0 0.951056 0.309017 0 0.951056 0.06279 0 0.998027 0.06279 0 0.998027 -0.18737 0 0.982289 -0.18737 0 0.982289 -0.992116 0 -0.12532 -0.425784 0 0.904825 -0.425784 0 0.904825 -0.929775 0 -0.368128 -0.992116 0 -0.12532 -0.637418 0 0.770518 -0.637418 0 0.770518 -0.809017 0 -0.587785 -0.929775 0 -0.368128 -0.809021 0 0.58778 -0.809021 0 0.58778 -0.809017 0 -0.587785 -0.929773 0 0.368133 -0.929773 0 0.368133 -0.992116 0 0.12532 -0.637415 0 -0.77052 -0.992116 0 0.12532 -0.637415 0 -0.77052 -0.425784 0 -0.904825 -0.425784 0 -0.904825 -0.187385 0 -0.982287 -0.187385 0 -0.982287 0.0628046 0 -0.998026 0.0628046 0 -0.998026 0.309006 0 -0.95106 0.535834 0 -0.844323 0.309006 0 -0.95106 0.535834 0 -0.844323 0.728962 0 -0.684555 0.876307 0 -0.481753 0.728962 0 -0.684555 0.876307 0 -0.481753 0.968583 0 -0.248692 0.968583 0 -0.248692 1 0 0 1 0 0 0.968581 0 0.248697 0.968581 0 0.248697 0.876311 0 0.481747 0.876311 0 0.481747 0.728963 0 0.684553 0.728963 0 0.684553 0.535832 0 0.844325 0.535832 0 0.844325 0.30901 0 0.951059 0.30901 0 0.951059 0.0627893 0 0.998027 0.0627893 0 0.998027 -0.187379 0 0.982288 -0.187379 0 0.982288 -0.992114 0 -0.125342 -0.425778 0 0.904828 -0.425778 0 0.904828 -0.929779 0 -0.368118 -0.992114 0 -0.125342 -0.637425 0 0.770512 -0.637425 0 0.770512 -0.809012 0 -0.587792 -0.929779 0 -0.368118 -0.809012 0 0.587792 -0.809012 0 0.587792 -0.809012 0 -0.587792 -0.92978 0 0.368117 -0.92978 0 0.368117 -0.992114 0 0.125342 -0.637425 0 -0.770512 -0.992114 0 0.125342 -0.637425 0 -0.770512 -0.425781 0 -0.904826 -0.425781 0 -0.904826 -0.187379 0 -0.982288 -0.187379 0 -0.982288 0.0627922 0 -0.998027 0.0627922 0 -0.998027 0.309018 0 -0.951056 0.535826 0 -0.844329 0.309018 0 -0.951056 0.535826 0 -0.844329 0.728969 0 -0.684547 0.876307 0 -0.481753 0.728969 0 -0.684547 0.876307 0 -0.481753 0.968583 0 -0.248691 0.968583 0 -0.248691 1 0 0 1 0 0 0.968583 0 0.24869 0.968583 0 0.24869 0.876307 0 0.481753 0.876307 0 0.481753 0.728969 0 0.684547 0.728969 0 0.684547 0.535826 0 0.844329 0.535826 0 0.844329 0.309018 0 0.951056 0.309018 0 0.951056 0.0627885 0 0.998027 0.0627885 0 0.998027 -0.187379 0 0.982288 -0.187379 0 0.982288 -0.992114 0 -0.125337 -0.425779 0 0.904827 -0.425779 0 0.904827 -0.929774 0 -0.368131 -0.992114 0 -0.125337 -0.63742 0 0.770516 -0.63742 0 0.770516 -0.809017 0 -0.587785 -0.929774 0 -0.368131 -0.809018 0 0.587784 -0.809018 0 0.587784 -0.809017 0 -0.587785 -0.929774 0 0.368131 -0.929774 0 0.368131 -0.992114 0 0.125337 -0.63742 0 -0.770516 -0.992114 0 0.125337 -0.63742 0 -0.770516 -0.425782 0 -0.904826 -0.425782 0 -0.904826 -0.187379 0 -0.982288 -0.187379 0 -0.982288 0.0627897 0 -0.998027 0.0627897 0 -0.998027 0.309008 0 -0.95106 0.535835 0 -0.844323 0.309008 0 -0.95106 0.535835 0 -0.844323 0.728967 0 -0.684549 0.876305 0 -0.481757 0.728967 0 -0.684549 0.876305 0 -0.481757 0.968584 0 -0.248688 0.968584 0 -0.248688 1 0 0 1 0 0 0.968584 0 0.248688 0.968584 0 0.248688 0.876308 0 0.481751 0.876308 0 0.481751 0.728967 0 0.684549 0.728967 0 0.684549 0.535835 0 0.844323 0.535835 0 0.844323 0.309008 0 0.95106 0.309008 0 0.95106 0.0627897 0 0.998027 0.0627897 0 0.998027 0 0 -1 0 0 -1 -1 0 0 -1 0 0 1 0 0 1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 -1 0 0 -1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 1 0 0 1 0 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0 0 1 0 1 0 0 1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 1 0 0 1 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -1 0 0 -1 0 0 1 0 0 1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -1 0 0 -1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 1 0 0 1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 0.447214 -0.894427 0 0.447214 -0.894427 0 1 0 0 1 0 0 0.447214 0.894427 0 0.447214 0.894427 0 1 0 0 1 0 0 0 -1 0 0 -1 0 -0.894427 -0.447214 0 -0.894427 -0.447214 0 0 -1 0 0 -1 0 0.894427 -0.447214 0 0.894427 -0.447214 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -0.447214 0.894427 0 -0.447214 0.894427 0 -1 0 0 -1 0 0 -0.447214 -0.894427 0 -0.447214 -0.894427 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0.894427 0.447214 0 0.894427 0.447214 0 0 1 0 0 1 0 -0.894427 0.447214 0 -0.894427 0.447214 0 0 1 0 0 1 0 1 0 0 1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 0 0 1 0 0 1 0 -1 0 0 -1 0 1 0 0 1 0 0 0 0 1 0 0 1 0 -1 0 0 -1 0 1 0 0 1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0.382684 0 -0.923879 0.382684 0 -0.923879 0 0.382684 -0.923879 0 0.382684 -0.923879 0 -0.382683 -0.92388 0 -0.382683 -0.92388 -0.382683 0 -0.92388 -0.382683 0 -0.92388 -1 0 0 -1 0 0 0 1 0 0 1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 0.707106 0 -0.707107 0.707106 0 -0.707107 0 0.707106 -0.707107 0 0.707106 -0.707107 0 -0.707107 -0.707106 0 -0.707107 -0.707106 -0.707107 0 -0.707106 -0.707107 0 -0.707106 -0.707107 0 0.707106 -0.707107 0 0.707106 0 0.707107 0.707106 0 0.707107 0.707106 0 -0.707107 0.707106 0 -0.707107 0.707106 0.707107 0 0.707106 0.707107 0 0.707106 1 0 0 1 0 0 0 1 0 0 1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 + + + + + + + + + + + + + + +

4 0 0 0 1370 0 4 1 1370 1 1 1 4 2 1 2 629 2 5 3 4 3 629 3 5 4 629 4 275 4 356 5 5 5 275 5 356 6 275 6 3 6 2 7 356 7 3 7 0 8 2 8 3 8 0 9 3 9 1370 9 2 10 0 10 356 10 0 11 4 11 5 11 356 12 0 12 5 12 275 13 1370 13 3 13 629 14 1 14 1370 14 629 15 1370 15 275 15 6 16 1299 16 1372 16 6 17 1372 17 7 17 6 18 7 18 8 18 11 19 6 19 8 19 11 20 8 20 815 20 9 21 11 21 815 21 9 22 815 22 866 22 10 23 9 23 866 23 1299 24 10 24 866 24 1299 25 866 25 1372 25 10 26 1299 26 9 26 1299 27 6 27 11 27 9 28 1299 28 11 28 815 29 1372 29 866 29 8 30 7 30 1372 30 8 31 1372 31 815 31 13 32 16 32 12 32 13 33 12 33 19 33 13 34 19 34 237 34 14 35 13 35 237 35 14 36 237 36 996 36 15 37 14 37 996 37 15 38 996 38 17 38 18 39 15 39 17 39 16 40 18 40 17 40 16 41 17 41 12 41 18 42 16 42 15 42 16 43 13 43 14 43 15 44 16 44 14 44 996 45 12 45 17 45 237 46 19 46 12 46 237 47 12 47 996 47 828 48 1258 48 1365 48 828 49 1365 49 868 49 828 50 868 50 23 50 872 51 828 51 23 51 872 52 23 52 24 52 20 53 872 53 24 53 20 54 24 54 22 54 21 55 20 55 22 55 1258 56 21 56 22 56 1258 57 22 57 1365 57 21 58 1258 58 20 58 1258 59 828 59 872 59 20 60 1258 60 872 60 24 61 1365 61 22 61 23 62 868 62 1365 62 23 63 1365 63 24 63 25 64 34 64 26 64 26 65 34 65 32 65 32 66 29 66 28 66 26 67 32 67 28 67 27 68 26 68 28 68 27 69 28 69 33 69 33 70 28 70 29 70 33 71 29 71 31 71 25 72 26 72 27 72 30 73 25 73 27 73 31 74 29 74 32 74 34 75 25 75 30 75 30 76 27 76 34 76 27 77 33 77 32 77 33 78 31 78 32 78 34 79 27 79 32 79 39 80 35 80 40 80 40 81 35 81 43 81 43 82 38 82 37 82 40 83 43 83 37 83 42 84 40 84 37 84 42 85 37 85 36 85 36 86 37 86 38 86 36 87 38 87 44 87 39 88 40 88 42 88 41 89 39 89 42 89 44 90 38 90 43 90 35 91 39 91 41 91 41 92 42 92 35 92 42 93 36 93 43 93 36 94 44 94 43 94 35 95 42 95 43 95 50 96 51 96 45 96 45 97 51 97 49 97 49 98 47 98 46 98 45 99 49 99 46 99 54 100 45 100 46 100 54 101 46 101 52 101 52 102 46 102 47 102 52 103 47 103 53 103 50 104 45 104 54 104 48 105 50 105 54 105 53 106 47 106 49 106 51 107 50 107 48 107 48 108 54 108 51 108 54 109 52 109 49 109 52 110 53 110 49 110 51 111 54 111 49 111 60 112 55 112 59 112 59 113 55 113 63 113 63 114 62 114 56 114 59 115 63 115 56 115 61 116 59 116 56 116 61 117 56 117 57 117 57 118 56 118 62 118 57 119 62 119 58 119 60 120 59 120 61 120 64 121 60 121 61 121 58 122 62 122 63 122 55 123 60 123 64 123 64 124 61 124 55 124 61 125 57 125 63 125 57 126 58 126 63 126 55 127 61 127 63 127 72 128 65 128 67 128 67 129 65 129 66 129 66 130 69 130 68 130 67 131 66 131 68 131 73 132 67 132 68 132 73 133 68 133 74 133 74 134 68 134 69 134 74 135 69 135 70 135 72 136 67 136 73 136 71 137 72 137 73 137 70 138 69 138 66 138 65 139 72 139 71 139 71 140 73 140 65 140 73 141 74 141 66 141 74 142 70 142 66 142 65 143 73 143 66 143 79 144 84 144 76 144 76 145 84 145 75 145 75 146 81 146 78 146 76 147 75 147 78 147 80 148 76 148 78 148 80 149 78 149 77 149 77 150 78 150 81 150 77 151 81 151 83 151 79 152 76 152 80 152 82 153 79 153 80 153 83 154 81 154 75 154 84 155 79 155 82 155 82 156 80 156 84 156 80 157 77 157 75 157 77 158 83 158 75 158 84 159 80 159 75 159 90 160 93 160 85 160 85 161 93 161 94 161 94 162 88 162 86 162 85 163 94 163 86 163 91 164 85 164 86 164 91 165 86 165 87 165 87 166 86 166 88 166 87 167 88 167 92 167 90 168 85 168 91 168 89 169 90 169 91 169 92 170 88 170 94 170 93 171 90 171 89 171 89 172 91 172 93 172 91 173 87 173 94 173 87 174 92 174 94 174 93 175 91 175 94 175 95 176 104 176 96 176 96 177 104 177 103 177 103 178 99 178 98 178 96 179 103 179 98 179 101 180 96 180 98 180 101 181 98 181 97 181 97 182 98 182 99 182 97 183 99 183 102 183 95 184 96 184 101 184 100 185 95 185 101 185 102 186 99 186 103 186 104 187 95 187 100 187 100 188 101 188 104 188 101 189 97 189 103 189 97 190 102 190 103 190 104 191 101 191 103 191 109 192 111 192 105 192 105 193 111 193 106 193 106 194 108 194 107 194 105 195 106 195 107 195 112 196 105 196 107 196 112 197 107 197 113 197 113 198 107 198 108 198 113 199 108 199 114 199 109 200 105 200 112 200 110 201 109 201 112 201 114 202 108 202 106 202 111 203 109 203 110 203 110 204 112 204 111 204 112 205 113 205 106 205 113 206 114 206 106 206 111 207 112 207 106 207 115 208 116 208 119 208 119 209 116 209 122 209 122 210 117 210 120 210 119 211 122 211 120 211 118 212 119 212 120 212 118 213 120 213 121 213 121 214 120 214 117 214 121 215 117 215 124 215 115 216 119 216 118 216 123 217 115 217 118 217 124 218 117 218 122 218 116 219 115 219 123 219 123 220 118 220 116 220 118 221 121 221 122 221 121 222 124 222 122 222 116 223 118 223 122 223 133 224 134 224 130 224 130 225 134 225 127 225 127 226 125 226 126 226 130 227 127 227 126 227 131 228 130 228 126 228 131 229 126 229 128 229 128 230 126 230 125 230 128 231 125 231 129 231 133 232 130 232 131 232 132 233 133 233 131 233 129 234 125 234 127 234 134 235 133 235 132 235 132 236 131 236 134 236 131 237 128 237 127 237 128 238 129 238 127 238 134 239 131 239 127 239 135 240 136 240 137 240 137 241 136 241 144 241 144 242 140 242 139 242 137 243 144 243 139 243 143 244 137 244 139 244 143 245 139 245 138 245 138 246 139 246 140 246 138 247 140 247 142 247 135 248 137 248 143 248 141 249 135 249 143 249 142 250 140 250 144 250 136 251 135 251 141 251 141 252 143 252 136 252 143 253 138 253 144 253 138 254 142 254 144 254 136 255 143 255 144 255 146 256 145 256 147 256 146 257 150 257 145 257 153 258 154 258 148 258 153 259 146 259 147 259 153 260 148 260 146 260 145 261 151 261 153 261 147 262 145 262 153 262 149 263 150 263 146 263 148 264 149 264 146 264 149 265 152 265 151 265 145 266 150 266 151 266 150 267 149 267 151 267 153 268 151 268 152 268 153 269 152 269 154 269 152 270 149 270 154 270 154 271 149 271 148 271 155 272 162 272 159 272 155 273 156 273 162 273 158 274 164 274 157 274 158 275 155 275 159 275 158 276 157 276 155 276 162 277 163 277 158 277 159 278 162 278 158 278 160 279 156 279 155 279 157 280 160 280 155 280 160 281 161 281 163 281 162 282 156 282 163 282 156 283 160 283 163 283 158 284 163 284 161 284 158 285 161 285 164 285 161 286 160 286 164 286 164 287 160 287 157 287 168 288 169 288 165 288 168 289 170 289 169 289 172 290 174 290 166 290 172 291 168 291 165 291 172 292 166 292 168 292 169 293 167 293 172 293 165 294 169 294 172 294 171 295 170 295 168 295 166 296 171 296 168 296 171 297 173 297 167 297 169 298 170 298 167 298 170 299 171 299 167 299 172 300 167 300 173 300 172 301 173 301 174 301 173 302 171 302 174 302 174 303 171 303 166 303 179 304 180 304 178 304 179 305 175 305 180 305 176 306 182 306 177 306 176 307 179 307 178 307 176 308 177 308 179 308 180 309 181 309 176 309 178 310 180 310 176 310 184 311 175 311 179 311 177 312 184 312 179 312 184 313 183 313 181 313 180 314 175 314 181 314 175 315 184 315 181 315 176 316 181 316 183 316 176 317 183 317 182 317 183 318 184 318 182 318 182 319 184 319 177 319 185 320 187 320 186 320 185 321 189 321 187 321 192 322 194 322 190 322 192 323 185 323 186 323 192 324 190 324 185 324 187 325 191 325 192 325 186 326 187 326 192 326 188 327 189 327 185 327 190 328 188 328 185 328 188 329 193 329 191 329 187 330 189 330 191 330 189 331 188 331 191 331 192 332 191 332 193 332 192 333 193 333 194 333 193 334 188 334 194 334 194 335 188 335 190 335 196 336 195 336 198 336 196 337 202 337 195 337 197 338 204 338 200 338 197 339 196 339 198 339 197 340 200 340 196 340 195 341 203 341 197 341 198 342 195 342 197 342 199 343 202 343 196 343 200 344 199 344 196 344 199 345 201 345 203 345 195 346 202 346 203 346 202 347 199 347 203 347 197 348 203 348 201 348 197 349 201 349 204 349 201 350 199 350 204 350 204 351 199 351 200 351 205 352 206 352 210 352 205 353 207 353 206 353 215 354 216 354 211 354 215 355 205 355 210 355 215 356 211 356 205 356 206 357 208 357 209 357 210 358 206 358 209 358 212 359 207 359 205 359 211 360 212 360 205 360 212 361 214 361 213 361 206 362 207 362 213 362 207 363 212 363 213 363 215 364 213 364 214 364 215 365 214 365 216 365 214 366 212 366 216 366 216 367 212 367 211 367 217 368 220 368 218 368 217 369 219 369 220 369 224 370 227 370 228 370 224 371 217 371 218 371 224 372 228 372 217 372 220 373 221 373 222 373 218 374 220 374 222 374 226 375 219 375 217 375 228 376 226 376 217 376 226 377 225 377 223 377 220 378 219 378 223 378 219 379 226 379 223 379 224 380 223 380 225 380 224 381 225 381 227 381 225 382 226 382 227 382 227 383 226 383 228 383 230 384 237 384 350 384 350 385 237 385 275 385 229 386 14 386 230 386 230 387 14 387 237 387 350 388 275 388 231 388 231 389 275 389 356 389 277 390 237 390 232 390 503 391 499 391 277 391 233 392 277 392 499 392 504 393 503 393 277 393 497 394 277 394 233 394 505 395 504 395 277 395 276 396 277 396 497 396 236 397 505 397 277 397 239 398 508 398 236 398 239 399 234 399 508 399 239 400 510 400 234 400 239 401 513 401 510 401 239 402 235 402 513 402 239 403 516 403 235 403 239 404 236 404 277 404 481 405 516 405 239 405 483 406 481 406 239 406 14 407 240 407 232 407 14 408 232 408 237 408 238 409 239 409 277 409 519 410 240 410 14 410 469 411 238 411 519 411 469 412 467 412 238 412 438 413 491 413 490 413 436 414 490 414 486 414 436 415 438 415 490 415 241 416 238 416 467 416 434 417 493 417 491 417 434 418 491 418 438 418 471 419 469 419 519 419 242 420 436 420 486 420 242 421 486 421 243 421 465 422 238 422 241 422 244 423 493 423 434 423 244 424 276 424 493 424 473 425 471 425 519 425 245 426 242 426 243 426 245 427 243 427 246 427 463 428 238 428 465 428 474 429 473 429 519 429 441 430 246 430 489 430 441 431 245 431 246 431 459 432 238 432 463 432 249 433 489 433 250 433 249 434 441 434 489 434 247 435 474 435 519 435 461 436 238 436 459 436 248 437 249 437 250 437 248 438 250 438 484 438 476 439 247 439 519 439 456 440 238 440 461 440 251 441 476 441 519 441 477 442 251 442 519 442 479 443 477 443 519 443 363 444 445 444 443 444 363 445 252 445 445 445 363 446 253 446 252 446 363 447 413 447 253 447 363 448 255 448 413 448 363 449 254 449 255 449 363 450 421 450 254 450 363 451 424 451 421 451 258 452 238 452 456 452 258 453 453 453 256 453 258 454 454 454 453 454 258 455 456 455 454 455 258 456 450 456 257 456 258 457 451 457 450 457 258 458 455 458 451 458 258 459 259 459 455 459 258 460 256 460 259 460 262 461 260 461 416 461 262 462 419 462 260 462 262 463 422 463 419 463 262 464 426 464 422 464 262 465 428 465 426 465 262 466 424 466 363 466 262 467 431 467 428 467 262 468 244 468 431 468 262 469 416 469 424 469 1306 470 365 470 258 470 1306 471 479 471 519 471 1306 472 258 472 257 472 1306 473 261 473 479 473 1306 474 257 474 261 474 1306 475 519 475 14 475 266 476 262 476 363 476 368 477 275 477 262 477 265 478 360 478 263 478 398 479 266 479 265 479 264 480 398 480 265 480 395 481 266 481 398 481 400 482 264 482 265 482 394 483 266 483 395 483 267 484 400 484 265 484 268 485 266 485 394 485 402 486 267 486 265 486 391 487 266 487 268 487 406 488 402 488 265 488 269 489 266 489 391 489 407 490 406 490 265 490 381 491 266 491 269 491 408 492 407 492 265 492 382 493 266 493 381 493 409 494 408 494 265 494 270 495 409 495 265 495 270 496 265 496 263 496 373 497 270 497 263 497 271 498 272 498 376 498 271 499 382 499 272 499 271 500 273 500 373 500 271 501 266 501 382 501 271 502 274 502 273 502 271 503 380 503 274 503 271 504 383 504 380 504 271 505 385 505 383 505 271 506 376 506 385 506 271 507 373 507 263 507 370 508 275 508 368 508 356 509 370 509 271 509 356 510 275 510 370 510 356 511 271 511 263 511 275 512 237 512 277 512 275 513 277 513 276 513 275 514 276 514 262 514 262 515 276 515 244 515 239 516 363 516 483 516 483 517 363 517 484 517 484 518 363 518 443 518 484 519 443 519 248 519 271 520 262 520 266 520 277 521 519 521 238 521 286 522 230 522 349 522 349 523 498 523 501 523 498 524 349 524 500 524 349 525 501 525 502 525 500 526 349 526 278 526 349 527 502 527 279 527 278 528 349 528 351 528 349 529 279 529 280 529 280 530 281 530 284 530 281 531 282 531 284 531 282 532 283 532 284 532 283 533 285 533 284 533 285 534 514 534 284 534 514 535 517 535 284 535 349 536 280 536 284 536 284 537 517 537 480 537 284 538 480 538 353 538 286 539 287 539 355 539 230 540 286 540 355 540 349 541 284 541 300 541 355 542 287 542 288 542 288 543 300 543 468 543 300 544 466 544 468 544 291 545 289 545 437 545 290 546 291 546 435 546 291 547 437 547 435 547 466 548 300 548 292 548 289 549 293 549 432 549 437 550 289 550 432 550 288 551 468 551 470 551 290 552 435 552 439 552 294 553 290 553 439 553 292 554 300 554 464 554 432 555 293 555 433 555 293 556 351 556 433 556 288 557 470 557 472 557 294 558 439 558 296 558 295 559 294 559 296 559 464 560 300 560 462 560 288 561 472 561 299 561 298 562 295 562 440 562 295 563 296 563 440 563 462 564 300 564 460 564 487 565 298 565 297 565 298 566 440 566 297 566 288 567 299 567 475 567 460 568 300 568 301 568 487 569 297 569 442 569 354 570 487 570 442 570 288 571 475 571 302 571 301 572 300 572 313 572 288 573 302 573 303 573 288 574 303 574 478 574 288 575 478 575 304 575 444 576 305 576 362 576 305 577 306 577 362 577 306 578 307 578 362 578 307 579 308 579 362 579 308 580 309 580 362 580 309 581 310 581 362 581 310 582 311 582 362 582 311 583 312 583 362 583 313 584 300 584 316 584 314 585 449 585 316 585 449 586 448 586 316 586 448 587 313 587 316 587 325 588 452 588 316 588 452 589 315 589 316 589 315 590 457 590 316 590 457 591 458 591 316 591 458 592 314 592 316 592 323 593 317 593 352 593 317 594 318 594 352 594 318 595 319 595 352 595 319 596 320 596 352 596 320 597 321 597 352 597 362 598 312 598 352 598 321 599 322 599 352 599 322 600 433 600 352 600 312 601 323 601 352 601 316 602 366 602 326 602 288 603 304 603 326 603 325 604 316 604 326 604 304 605 324 605 326 605 324 606 325 606 326 606 355 607 288 607 326 607 362 608 352 608 331 608 352 609 350 609 327 609 357 610 359 610 358 610 358 611 331 611 328 611 358 612 328 612 397 612 328 613 331 613 396 613 358 614 397 614 399 614 396 615 331 615 392 615 358 616 399 616 401 616 392 617 331 617 329 617 358 618 401 618 403 618 329 619 331 619 330 619 358 620 403 620 405 620 330 621 331 621 332 621 358 622 405 622 404 622 332 623 331 623 333 623 358 624 404 624 334 624 333 625 331 625 339 625 358 626 334 626 335 626 357 627 358 627 335 627 357 628 335 628 336 628 357 629 336 629 337 629 357 630 337 630 340 630 345 631 338 631 346 631 338 632 339 632 346 632 339 633 331 633 346 633 340 634 341 634 346 634 341 635 342 635 346 635 342 636 343 636 346 636 343 637 344 637 346 637 344 638 345 638 346 638 357 639 340 639 346 639 327 640 350 640 347 640 346 641 347 641 348 641 347 642 350 642 348 642 357 643 346 643 348 643 349 644 230 644 350 644 351 645 349 645 350 645 352 646 351 646 350 646 433 647 351 647 352 647 353 648 362 648 284 648 354 649 362 649 353 649 444 650 362 650 354 650 442 651 444 651 354 651 331 652 352 652 346 652 300 653 288 653 349 653 355 654 326 654 14 654 326 655 1306 655 14 655 348 656 356 656 357 656 357 657 356 657 263 657 357 658 360 658 359 658 357 659 263 659 360 659 358 660 359 660 265 660 359 661 360 661 265 661 361 662 265 662 266 662 331 663 361 663 266 663 331 664 266 664 363 664 362 665 331 665 363 665 362 666 363 666 239 666 284 667 362 667 239 667 284 668 239 668 300 668 300 669 239 669 238 669 300 670 238 670 258 670 364 671 300 671 258 671 316 672 258 672 366 672 366 673 258 673 365 673 366 674 365 674 326 674 365 675 1306 675 326 675 371 676 262 676 367 676 368 677 262 677 371 677 262 678 271 678 369 678 262 679 369 679 367 679 370 680 368 680 371 680 370 681 371 681 372 681 369 682 370 682 372 682 271 683 370 683 369 683 410 684 373 684 273 684 374 685 410 685 273 685 374 686 273 686 274 686 375 687 374 687 274 687 388 688 376 688 272 688 377 689 388 689 272 689 378 690 377 690 272 690 375 691 274 691 380 691 379 692 375 692 380 692 378 693 272 693 382 693 379 694 380 694 383 694 384 695 379 695 383 695 387 696 378 696 381 696 378 697 382 697 381 697 384 698 383 698 385 698 386 699 384 699 385 699 388 700 386 700 385 700 390 701 387 701 269 701 388 702 385 702 376 702 387 703 381 703 269 703 389 704 390 704 391 704 393 705 389 705 391 705 390 706 269 706 391 706 393 707 391 707 268 707 392 708 393 708 394 708 393 709 268 709 394 709 396 710 392 710 395 710 392 711 394 711 395 711 328 712 396 712 398 712 397 713 328 713 398 713 396 714 395 714 398 714 397 715 398 715 264 715 399 716 397 716 400 716 397 717 264 717 400 717 399 718 400 718 267 718 401 719 399 719 267 719 403 720 401 720 267 720 403 721 267 721 402 721 403 722 402 722 406 722 405 723 403 723 406 723 404 724 405 724 406 724 404 725 406 725 407 725 404 726 407 726 408 726 334 727 404 727 408 727 334 728 408 728 409 728 335 729 334 729 409 729 335 730 409 730 270 730 336 731 335 731 270 731 336 732 270 732 373 732 410 733 336 733 373 733 446 734 253 734 413 734 411 735 446 735 413 735 412 736 427 736 416 736 411 737 413 737 255 737 414 738 411 738 255 738 415 739 412 739 260 739 412 740 416 740 260 740 414 741 255 741 254 741 417 742 414 742 254 742 418 743 415 743 419 743 415 744 260 744 419 744 417 745 254 745 421 745 420 746 417 746 421 746 418 747 419 747 422 747 420 748 421 748 424 748 423 749 420 749 424 749 427 750 423 750 424 750 425 751 418 751 426 751 427 752 424 752 416 752 418 753 422 753 426 753 430 754 425 754 428 754 425 755 426 755 428 755 429 756 430 756 431 756 430 757 428 757 431 757 433 758 429 758 244 758 429 759 431 759 244 759 432 760 433 760 434 760 437 761 432 761 434 761 433 762 244 762 434 762 437 763 434 763 438 763 435 764 437 764 436 764 439 765 435 765 436 765 437 766 438 766 436 766 439 767 436 767 242 767 439 768 242 768 245 768 296 769 439 769 245 769 440 770 296 770 245 770 440 771 245 771 441 771 440 772 441 772 249 772 297 773 440 773 249 773 442 774 297 774 249 774 442 775 249 775 248 775 442 776 248 776 443 776 444 777 442 777 443 777 444 778 443 778 445 778 305 779 444 779 445 779 305 780 445 780 252 780 306 781 305 781 252 781 306 782 252 782 253 782 446 783 306 783 253 783 324 784 261 784 257 784 325 785 324 785 257 785 325 786 257 786 450 786 452 787 325 787 450 787 447 788 256 788 453 788 449 789 447 789 453 789 448 790 449 790 453 790 452 791 450 791 451 791 315 792 452 792 451 792 448 793 453 793 454 793 315 794 451 794 455 794 457 795 315 795 455 795 313 796 448 796 456 796 448 797 454 797 456 797 457 798 455 798 259 798 458 799 457 799 259 799 447 800 458 800 259 800 301 801 313 801 461 801 447 802 259 802 256 802 313 803 456 803 461 803 460 804 301 804 459 804 462 805 460 805 459 805 301 806 461 806 459 806 462 807 459 807 463 807 464 808 462 808 465 808 462 809 463 809 465 809 292 810 464 810 241 810 464 811 465 811 241 811 466 812 292 812 467 812 468 813 466 813 467 813 292 814 241 814 467 814 468 815 467 815 469 815 470 816 468 816 471 816 468 817 469 817 471 817 470 818 471 818 473 818 472 819 470 819 473 819 299 820 472 820 473 820 299 821 473 821 474 821 299 822 474 822 247 822 475 823 299 823 247 823 302 824 475 824 247 824 302 825 247 825 476 825 302 826 476 826 251 826 303 827 302 827 251 827 303 828 251 828 477 828 478 829 303 829 477 829 478 830 477 830 479 830 304 831 478 831 479 831 304 832 479 832 261 832 324 833 304 833 261 833 517 834 516 834 481 834 480 835 517 835 481 835 480 836 481 836 483 836 353 837 480 837 483 837 482 838 246 838 243 838 294 839 482 839 243 839 485 840 294 840 243 840 353 841 483 841 484 841 354 842 353 842 484 842 485 843 243 843 486 843 354 844 484 844 250 844 487 845 354 845 250 845 488 846 485 846 490 846 485 847 486 847 490 847 487 848 250 848 489 848 298 849 487 849 489 849 482 850 298 850 489 850 494 851 488 851 491 851 482 852 489 852 246 852 488 853 490 853 491 853 492 854 494 854 493 854 495 855 492 855 493 855 494 856 491 856 493 856 495 857 493 857 276 857 496 858 495 858 497 858 495 859 276 859 497 859 500 860 496 860 233 860 496 861 497 861 233 861 498 862 500 862 499 862 501 863 498 863 499 863 500 864 233 864 499 864 501 865 499 865 503 865 502 866 501 866 504 866 501 867 503 867 504 867 502 868 504 868 505 868 506 869 502 869 505 869 507 870 506 870 505 870 507 871 505 871 236 871 507 872 236 872 508 872 509 873 507 873 508 873 511 874 509 874 508 874 511 875 508 875 234 875 511 876 234 876 510 876 512 877 511 877 510 877 512 878 510 878 513 878 515 879 512 879 513 879 515 880 513 880 235 880 514 881 515 881 235 881 514 882 235 882 516 882 517 883 514 883 516 883 349 884 277 884 286 884 286 885 277 885 232 885 518 886 519 886 277 886 349 887 518 887 277 887 286 888 232 888 240 888 287 889 286 889 240 889 287 890 240 890 519 890 518 891 287 891 519 891 24 892 22 892 629 892 629 893 22 893 1 893 20 894 21 894 24 894 24 895 21 895 22 895 629 896 1 896 5 896 5 897 1 897 4 897 522 898 22 898 528 898 746 899 520 899 522 899 743 900 522 900 520 900 747 901 746 901 522 901 745 902 522 902 743 902 521 903 747 903 522 903 741 904 522 904 745 904 749 905 521 905 522 905 523 906 750 906 749 906 523 907 525 907 750 907 523 908 524 908 525 908 523 909 526 909 524 909 523 910 527 910 526 910 523 911 752 911 527 911 523 912 749 912 522 912 733 913 752 913 523 913 577 914 733 914 523 914 21 915 529 915 528 915 21 916 528 916 22 916 544 917 523 917 522 917 560 918 529 918 21 918 530 919 544 919 560 919 530 920 531 920 544 920 534 921 739 921 533 921 532 922 533 922 734 922 532 923 534 923 533 923 535 924 544 924 531 924 536 925 539 925 739 925 536 926 739 926 534 926 723 927 530 927 560 927 537 928 532 928 734 928 537 929 734 929 541 929 719 930 544 930 535 930 538 931 539 931 536 931 538 932 741 932 539 932 540 933 723 933 560 933 690 934 537 934 541 934 690 935 541 935 738 935 715 936 544 936 719 936 542 937 540 937 560 937 691 938 738 938 543 938 691 939 690 939 738 939 714 940 544 940 715 940 694 941 691 941 543 941 694 942 543 942 545 942 725 943 542 943 560 943 713 944 544 944 714 944 696 945 545 945 578 945 696 946 694 946 545 946 546 947 725 947 560 947 707 948 544 948 713 948 547 949 546 949 560 949 729 950 547 950 560 950 731 951 729 951 560 951 551 952 548 952 579 952 551 953 549 953 548 953 551 954 673 954 549 954 551 955 674 955 673 955 551 956 679 956 674 956 551 957 550 957 679 957 551 958 683 958 550 958 551 959 684 959 683 959 552 960 544 960 707 960 552 961 553 961 700 961 552 962 554 962 553 962 552 963 707 963 554 963 552 964 702 964 701 964 552 965 703 965 702 965 552 966 705 966 703 966 552 967 710 967 705 967 552 968 700 968 710 968 580 969 555 969 677 969 580 970 556 970 555 970 580 971 686 971 556 971 580 972 687 972 686 972 580 973 689 973 687 973 580 974 557 974 689 974 580 975 538 975 557 975 580 976 684 976 551 976 580 977 677 977 684 977 559 978 558 978 552 978 559 979 731 979 560 979 559 980 552 980 701 980 559 981 561 981 731 981 559 982 701 982 561 982 559 983 560 983 21 983 564 984 580 984 551 984 640 985 1 985 580 985 573 986 562 986 635 986 563 987 564 987 562 987 565 988 563 988 562 988 566 989 564 989 563 989 568 990 565 990 562 990 660 991 564 991 566 991 567 992 568 992 562 992 659 993 564 993 660 993 664 994 567 994 562 994 569 995 564 995 659 995 666 996 664 996 562 996 658 997 564 997 569 997 570 998 666 998 562 998 656 999 564 999 658 999 668 1000 570 1000 562 1000 571 1001 564 1001 656 1001 669 1002 562 1002 573 1002 669 1003 668 1003 562 1003 572 1004 669 1004 573 1004 643 1005 572 1005 573 1005 644 1006 643 1006 573 1006 576 1007 647 1007 646 1007 576 1008 571 1008 647 1008 576 1009 648 1009 644 1009 576 1010 651 1010 648 1010 576 1011 574 1011 651 1011 576 1012 575 1012 574 1012 576 1013 646 1013 575 1013 576 1014 564 1014 571 1014 576 1015 644 1015 573 1015 642 1016 1 1016 640 1016 4 1017 642 1017 576 1017 4 1018 1 1018 642 1018 4 1019 576 1019 573 1019 1 1020 22 1020 522 1020 1 1021 522 1021 741 1021 1 1022 741 1022 580 1022 580 1023 741 1023 538 1023 523 1024 551 1024 577 1024 577 1025 551 1025 578 1025 578 1026 551 1026 579 1026 578 1027 579 1027 696 1027 576 1028 580 1028 564 1028 522 1029 560 1029 544 1029 755 1030 24 1030 583 1030 583 1031 742 1031 581 1031 742 1032 583 1032 582 1032 583 1033 581 1033 584 1033 582 1034 583 1034 744 1034 583 1035 584 1035 748 1035 744 1036 583 1036 630 1036 583 1037 748 1037 588 1037 588 1038 585 1038 632 1038 585 1039 751 1039 632 1039 751 1040 586 1040 632 1040 586 1041 587 1041 632 1041 587 1042 754 1042 632 1042 754 1043 753 1043 632 1043 583 1044 588 1044 632 1044 632 1045 753 1045 589 1045 632 1046 589 1046 631 1046 755 1047 756 1047 20 1047 24 1048 755 1048 20 1048 583 1049 632 1049 590 1049 20 1050 756 1050 591 1050 591 1051 590 1051 722 1051 590 1052 720 1052 722 1052 736 1053 737 1053 593 1053 735 1054 736 1054 592 1054 736 1055 593 1055 592 1055 720 1056 590 1056 718 1056 737 1057 740 1057 594 1057 593 1058 737 1058 594 1058 591 1059 722 1059 721 1059 735 1060 592 1060 597 1060 732 1061 735 1061 597 1061 718 1062 590 1062 716 1062 594 1063 740 1063 595 1063 740 1064 630 1064 595 1064 591 1065 721 1065 596 1065 732 1066 597 1066 599 1066 598 1067 732 1067 599 1067 716 1068 590 1068 717 1068 591 1069 596 1069 724 1069 600 1070 598 1070 692 1070 598 1071 599 1071 692 1071 717 1072 590 1072 601 1072 600 1073 692 1073 693 1073 602 1074 600 1074 693 1074 591 1075 724 1075 726 1075 601 1076 590 1076 711 1076 633 1077 602 1077 695 1077 602 1078 693 1078 695 1078 591 1079 726 1079 727 1079 711 1080 590 1080 712 1080 591 1081 727 1081 728 1081 591 1082 728 1082 603 1082 591 1083 603 1083 730 1083 604 1084 605 1084 637 1084 605 1085 606 1085 637 1085 606 1086 697 1086 637 1086 697 1087 678 1087 637 1087 678 1088 607 1088 637 1088 607 1089 680 1089 637 1089 680 1090 682 1090 637 1090 682 1091 615 1091 637 1091 712 1092 590 1092 608 1092 611 1093 609 1093 608 1093 609 1094 704 1094 608 1094 704 1095 712 1095 608 1095 699 1096 610 1096 608 1096 610 1097 706 1097 608 1097 706 1098 708 1098 608 1098 708 1099 709 1099 608 1099 709 1100 611 1100 608 1100 676 1101 675 1101 612 1101 675 1102 681 1102 612 1102 681 1103 685 1103 612 1103 685 1104 688 1104 612 1104 688 1105 613 1105 612 1105 613 1106 614 1106 612 1106 614 1107 595 1107 612 1107 637 1108 615 1108 612 1108 615 1109 676 1109 612 1109 608 1110 638 1110 639 1110 591 1111 730 1111 639 1111 699 1112 608 1112 639 1112 730 1113 698 1113 639 1113 698 1114 699 1114 639 1114 20 1115 591 1115 639 1115 637 1116 612 1116 636 1116 612 1117 629 1117 616 1117 634 1118 617 1118 619 1118 619 1119 636 1119 661 1119 619 1120 661 1120 662 1120 661 1121 636 1121 618 1121 619 1122 662 1122 620 1122 618 1123 636 1123 621 1123 619 1124 620 1124 663 1124 621 1125 636 1125 622 1125 619 1126 663 1126 623 1126 622 1127 636 1127 657 1127 619 1128 623 1128 665 1128 657 1129 636 1129 655 1129 619 1130 665 1130 667 1130 655 1131 636 1131 624 1131 619 1132 667 1132 625 1132 624 1133 636 1133 650 1133 634 1134 619 1134 671 1134 619 1135 625 1135 671 1135 634 1136 671 1136 670 1136 634 1137 670 1137 672 1137 634 1138 672 1138 626 1138 627 1139 645 1139 641 1139 645 1140 650 1140 641 1140 626 1141 649 1141 641 1141 649 1142 653 1142 641 1142 653 1143 652 1143 641 1143 652 1144 654 1144 641 1144 654 1145 627 1145 641 1145 650 1146 636 1146 641 1146 634 1147 626 1147 641 1147 616 1148 629 1148 628 1148 641 1149 628 1149 5 1149 628 1150 629 1150 5 1150 634 1151 641 1151 5 1151 583 1152 24 1152 629 1152 630 1153 583 1153 629 1153 612 1154 630 1154 629 1154 595 1155 630 1155 612 1155 631 1156 637 1156 632 1156 633 1157 637 1157 631 1157 604 1158 637 1158 633 1158 695 1159 604 1159 633 1159 636 1160 612 1160 641 1160 590 1161 591 1161 583 1161 639 1162 559 1162 20 1162 20 1163 559 1163 21 1163 634 1164 5 1164 573 1164 5 1165 4 1165 573 1165 635 1166 617 1166 634 1166 635 1167 634 1167 573 1167 619 1168 617 1168 635 1168 619 1169 635 1169 562 1169 619 1170 562 1170 564 1170 636 1171 619 1171 564 1171 636 1172 564 1172 551 1172 637 1173 636 1173 551 1173 637 1174 551 1174 523 1174 632 1175 637 1175 523 1175 632 1176 523 1176 590 1176 590 1177 523 1177 544 1177 590 1178 544 1178 552 1178 608 1179 590 1179 552 1179 638 1180 608 1180 558 1180 608 1181 552 1181 558 1181 559 1182 638 1182 558 1182 559 1183 639 1183 638 1183 616 1184 580 1184 612 1184 640 1185 580 1185 616 1185 580 1186 576 1186 641 1186 580 1187 641 1187 612 1187 642 1188 640 1188 616 1188 642 1189 616 1189 628 1189 576 1190 642 1190 628 1190 576 1191 628 1191 641 1191 672 1192 643 1192 644 1192 626 1193 672 1193 644 1193 645 1194 627 1194 646 1194 626 1195 644 1195 648 1195 649 1196 626 1196 648 1196 650 1197 645 1197 647 1197 645 1198 646 1198 647 1198 649 1199 648 1199 651 1199 653 1200 649 1200 651 1200 624 1201 650 1201 571 1201 650 1202 647 1202 571 1202 653 1203 651 1203 574 1203 652 1204 653 1204 574 1204 624 1205 571 1205 656 1205 652 1206 574 1206 575 1206 654 1207 652 1207 575 1207 627 1208 654 1208 575 1208 655 1209 624 1209 658 1209 627 1210 575 1210 646 1210 624 1211 656 1211 658 1211 657 1212 655 1212 569 1212 655 1213 658 1213 569 1213 622 1214 657 1214 659 1214 657 1215 569 1215 659 1215 621 1216 622 1216 660 1216 622 1217 659 1217 660 1217 618 1218 621 1218 566 1218 661 1219 618 1219 566 1219 621 1220 660 1220 566 1220 661 1221 566 1221 563 1221 662 1222 661 1222 565 1222 620 1223 662 1223 565 1223 661 1224 563 1224 565 1224 620 1225 565 1225 568 1225 620 1226 568 1226 567 1226 663 1227 620 1227 567 1227 623 1228 663 1228 567 1228 623 1229 567 1229 664 1229 623 1230 664 1230 666 1230 665 1231 623 1231 666 1231 667 1232 665 1232 666 1232 667 1233 666 1233 570 1233 667 1234 570 1234 668 1234 625 1235 667 1235 668 1235 625 1236 668 1236 669 1236 671 1237 625 1237 669 1237 671 1238 669 1238 572 1238 670 1239 671 1239 572 1239 670 1240 572 1240 643 1240 672 1241 670 1241 643 1241 697 1242 673 1242 674 1242 678 1243 697 1243 674 1243 675 1244 676 1244 677 1244 678 1245 674 1245 679 1245 607 1246 678 1246 679 1246 681 1247 675 1247 555 1247 675 1248 677 1248 555 1248 607 1249 679 1249 550 1249 680 1250 607 1250 550 1250 685 1251 681 1251 556 1251 681 1252 555 1252 556 1252 680 1253 550 1253 683 1253 682 1254 680 1254 683 1254 685 1255 556 1255 686 1255 682 1256 683 1256 684 1256 615 1257 682 1257 684 1257 676 1258 615 1258 684 1258 688 1259 685 1259 687 1259 676 1260 684 1260 677 1260 685 1261 686 1261 687 1261 613 1262 688 1262 689 1262 688 1263 687 1263 689 1263 614 1264 613 1264 557 1264 613 1265 689 1265 557 1265 595 1266 614 1266 538 1266 614 1267 557 1267 538 1267 594 1268 595 1268 536 1268 593 1269 594 1269 536 1269 595 1270 538 1270 536 1270 593 1271 536 1271 534 1271 592 1272 593 1272 532 1272 597 1273 592 1273 532 1273 593 1274 534 1274 532 1274 597 1275 532 1275 537 1275 597 1276 537 1276 690 1276 599 1277 597 1277 690 1277 692 1278 599 1278 690 1278 692 1279 690 1279 691 1279 692 1280 691 1280 694 1280 693 1281 692 1281 694 1281 695 1282 693 1282 694 1282 695 1283 694 1283 696 1283 695 1284 696 1284 579 1284 604 1285 695 1285 579 1285 604 1286 579 1286 548 1286 605 1287 604 1287 548 1287 605 1288 548 1288 549 1288 606 1289 605 1289 549 1289 606 1290 549 1290 673 1290 697 1291 606 1291 673 1291 698 1292 561 1292 701 1292 699 1293 698 1293 701 1293 609 1294 611 1294 700 1294 699 1295 701 1295 702 1295 610 1296 699 1296 702 1296 704 1297 609 1297 553 1297 609 1298 700 1298 553 1298 610 1299 702 1299 703 1299 706 1300 610 1300 703 1300 712 1301 704 1301 554 1301 704 1302 553 1302 554 1302 706 1303 703 1303 705 1303 708 1304 706 1304 705 1304 712 1305 554 1305 707 1305 708 1306 705 1306 710 1306 709 1307 708 1307 710 1307 611 1308 709 1308 710 1308 711 1309 712 1309 713 1309 611 1310 710 1310 700 1310 712 1311 707 1311 713 1311 601 1312 711 1312 714 1312 711 1313 713 1313 714 1313 717 1314 601 1314 715 1314 601 1315 714 1315 715 1315 716 1316 717 1316 719 1316 717 1317 715 1317 719 1317 718 1318 716 1318 535 1318 720 1319 718 1319 535 1319 716 1320 719 1320 535 1320 720 1321 535 1321 531 1321 722 1322 720 1322 530 1322 721 1323 722 1323 530 1323 720 1324 531 1324 530 1324 721 1325 530 1325 723 1325 721 1326 723 1326 540 1326 596 1327 721 1327 540 1327 724 1328 596 1328 540 1328 724 1329 540 1329 542 1329 724 1330 542 1330 725 1330 726 1331 724 1331 725 1331 727 1332 726 1332 725 1332 727 1333 725 1333 546 1333 727 1334 546 1334 547 1334 728 1335 727 1335 547 1335 728 1336 547 1336 729 1336 603 1337 728 1337 729 1337 603 1338 729 1338 731 1338 730 1339 603 1339 731 1339 730 1340 731 1340 561 1340 698 1341 730 1341 561 1341 753 1342 752 1342 733 1342 589 1343 753 1343 733 1343 732 1344 598 1344 738 1344 589 1345 733 1345 577 1345 631 1346 589 1346 577 1346 735 1347 732 1347 541 1347 732 1348 738 1348 541 1348 631 1349 577 1349 578 1349 633 1350 631 1350 578 1350 736 1351 735 1351 734 1351 735 1352 541 1352 734 1352 633 1353 578 1353 545 1353 602 1354 633 1354 545 1354 736 1355 734 1355 533 1355 602 1356 545 1356 543 1356 600 1357 602 1357 543 1357 598 1358 600 1358 543 1358 737 1359 736 1359 739 1359 598 1360 543 1360 738 1360 736 1361 533 1361 739 1361 740 1362 737 1362 539 1362 737 1363 739 1363 539 1363 630 1364 740 1364 741 1364 740 1365 539 1365 741 1365 744 1366 630 1366 745 1366 630 1367 741 1367 745 1367 582 1368 744 1368 743 1368 742 1369 582 1369 743 1369 744 1370 745 1370 743 1370 742 1371 743 1371 520 1371 581 1372 742 1372 746 1372 584 1373 581 1373 746 1373 742 1374 520 1374 746 1374 584 1375 746 1375 747 1375 584 1376 747 1376 521 1376 748 1377 584 1377 521 1377 588 1378 748 1378 521 1378 588 1379 521 1379 749 1379 588 1380 749 1380 750 1380 585 1381 588 1381 750 1381 751 1382 585 1382 750 1382 751 1383 750 1383 525 1383 751 1384 525 1384 524 1384 586 1385 751 1385 524 1385 586 1386 524 1386 526 1386 587 1387 586 1387 526 1387 587 1388 526 1388 527 1388 754 1389 587 1389 527 1389 754 1390 527 1390 752 1390 753 1391 754 1391 752 1391 583 1392 522 1392 528 1392 755 1393 583 1393 528 1393 591 1394 560 1394 522 1394 583 1395 591 1395 522 1395 755 1396 528 1396 529 1396 756 1397 755 1397 529 1397 756 1398 529 1398 591 1398 591 1399 529 1399 560 1399 868 1400 23 1400 866 1400 866 1401 23 1401 815 1401 828 1402 872 1402 868 1402 868 1403 872 1403 23 1403 866 1404 815 1404 10 1404 10 1405 815 1405 9 1405 994 1406 23 1406 766 1406 757 1407 758 1407 994 1407 980 1408 994 1408 758 1408 983 1409 757 1409 994 1409 759 1410 994 1410 980 1410 760 1411 983 1411 994 1411 775 1412 994 1412 759 1412 985 1413 760 1413 994 1413 761 1414 986 1414 985 1414 761 1415 762 1415 986 1415 761 1416 989 1416 762 1416 761 1417 763 1417 989 1417 761 1418 765 1418 763 1418 761 1419 764 1419 765 1419 761 1420 985 1420 994 1420 968 1421 764 1421 761 1421 817 1422 968 1422 761 1422 872 1423 995 1423 766 1423 872 1424 766 1424 23 1424 767 1425 761 1425 994 1425 818 1426 995 1426 872 1426 956 1427 767 1427 818 1427 956 1428 768 1428 767 1428 920 1429 977 1429 978 1429 771 1430 978 1430 773 1430 771 1431 920 1431 978 1431 954 1432 767 1432 768 1432 769 1433 770 1433 977 1433 769 1434 977 1434 920 1434 957 1435 956 1435 818 1435 772 1436 771 1436 773 1436 772 1437 773 1437 774 1437 953 1438 767 1438 954 1438 816 1439 770 1439 769 1439 816 1440 775 1440 770 1440 958 1441 957 1441 818 1441 778 1442 772 1442 774 1442 778 1443 774 1443 969 1443 776 1444 767 1444 953 1444 960 1445 958 1445 818 1445 777 1446 969 1446 779 1446 777 1447 778 1447 969 1447 781 1448 767 1448 776 1448 923 1449 777 1449 779 1449 923 1450 779 1450 973 1450 962 1451 960 1451 818 1451 780 1452 767 1452 781 1452 925 1453 973 1453 971 1453 925 1454 923 1454 973 1454 963 1455 962 1455 818 1455 948 1456 767 1456 780 1456 965 1457 963 1457 818 1457 782 1458 965 1458 818 1458 967 1459 782 1459 818 1459 793 1460 927 1460 926 1460 793 1461 928 1461 927 1461 793 1462 930 1462 928 1462 793 1463 783 1463 930 1463 793 1464 784 1464 783 1464 793 1465 910 1465 784 1465 793 1466 911 1466 910 1466 793 1467 912 1467 911 1467 785 1468 767 1468 948 1468 785 1469 939 1469 940 1469 785 1470 942 1470 939 1470 785 1471 948 1471 942 1471 785 1472 941 1472 933 1472 785 1473 943 1473 941 1473 785 1474 786 1474 943 1474 785 1475 946 1475 786 1475 785 1476 940 1476 946 1476 792 1477 787 1477 909 1477 792 1478 788 1478 787 1478 792 1479 916 1479 788 1479 792 1480 790 1480 916 1480 792 1481 789 1481 790 1481 792 1482 791 1482 789 1482 792 1483 816 1483 791 1483 792 1484 912 1484 793 1484 792 1485 909 1485 912 1485 1295 1486 877 1486 785 1486 1295 1487 967 1487 818 1487 1295 1488 785 1488 933 1488 1295 1489 932 1489 967 1489 1295 1490 933 1490 932 1490 1295 1491 818 1491 872 1491 797 1492 792 1492 793 1492 881 1493 815 1493 792 1493 794 1494 795 1494 813 1494 796 1495 797 1495 794 1495 798 1496 796 1496 794 1496 800 1497 797 1497 796 1497 897 1498 798 1498 794 1498 799 1499 797 1499 800 1499 899 1500 897 1500 794 1500 801 1501 797 1501 799 1501 900 1502 899 1502 794 1502 802 1503 797 1503 801 1503 803 1504 900 1504 794 1504 804 1505 797 1505 802 1505 902 1506 803 1506 794 1506 892 1507 797 1507 804 1507 805 1508 902 1508 794 1508 890 1509 797 1509 892 1509 903 1510 805 1510 794 1510 904 1511 794 1511 813 1511 904 1512 903 1512 794 1512 812 1513 904 1513 813 1513 807 1514 886 1514 806 1514 807 1515 890 1515 886 1515 807 1516 809 1516 812 1516 807 1517 808 1517 809 1517 807 1518 810 1518 808 1518 807 1519 889 1519 810 1519 807 1520 811 1520 889 1520 807 1521 806 1521 811 1521 807 1522 797 1522 890 1522 807 1523 812 1523 813 1523 814 1524 815 1524 881 1524 9 1525 814 1525 807 1525 9 1526 807 1526 813 1526 9 1527 815 1527 814 1527 815 1528 23 1528 994 1528 815 1529 994 1529 775 1529 815 1530 775 1530 792 1530 792 1531 775 1531 816 1531 761 1532 793 1532 817 1532 817 1533 793 1533 971 1533 971 1534 793 1534 926 1534 971 1535 926 1535 925 1535 807 1536 792 1536 797 1536 994 1537 818 1537 767 1537 819 1538 868 1538 822 1538 822 1539 820 1539 982 1539 820 1540 822 1540 979 1540 822 1541 982 1541 821 1541 979 1542 822 1542 981 1542 822 1543 821 1543 984 1543 981 1544 822 1544 869 1544 822 1545 984 1545 823 1545 823 1546 987 1546 824 1546 987 1547 988 1547 824 1547 988 1548 991 1548 824 1548 991 1549 990 1549 824 1549 990 1550 993 1550 824 1550 993 1551 992 1551 824 1551 822 1552 823 1552 824 1552 824 1553 992 1553 825 1553 824 1554 825 1554 826 1554 819 1555 827 1555 828 1555 868 1556 819 1556 828 1556 822 1557 824 1557 876 1557 828 1558 827 1558 837 1558 837 1559 876 1559 830 1559 876 1560 955 1560 830 1560 972 1561 976 1561 829 1561 970 1562 972 1562 919 1562 972 1563 829 1563 919 1563 955 1564 876 1564 831 1564 976 1565 832 1565 918 1565 829 1566 976 1566 918 1566 837 1567 830 1567 834 1567 970 1568 919 1568 921 1568 835 1569 970 1569 921 1569 831 1570 876 1570 952 1570 918 1571 832 1571 833 1571 832 1572 869 1572 833 1572 837 1573 834 1573 959 1573 835 1574 921 1574 836 1574 838 1575 835 1575 836 1575 952 1576 876 1576 950 1576 837 1577 959 1577 840 1577 974 1578 838 1578 839 1578 838 1579 836 1579 839 1579 950 1580 876 1580 951 1580 974 1581 839 1581 922 1581 975 1582 974 1582 922 1582 837 1583 840 1583 961 1583 951 1584 876 1584 949 1584 871 1585 975 1585 924 1585 975 1586 922 1586 924 1586 837 1587 961 1587 964 1587 949 1588 876 1588 947 1588 837 1589 964 1589 841 1589 837 1590 841 1590 842 1590 837 1591 842 1591 966 1591 870 1592 843 1592 875 1592 843 1593 929 1593 875 1593 929 1594 931 1594 875 1594 931 1595 844 1595 875 1595 844 1596 906 1596 875 1596 906 1597 845 1597 875 1597 845 1598 846 1598 875 1598 846 1599 913 1599 875 1599 947 1600 876 1600 850 1600 935 1601 934 1601 850 1601 934 1602 938 1602 850 1602 938 1603 947 1603 850 1603 937 1604 936 1604 850 1604 936 1605 847 1605 850 1605 847 1606 944 1606 850 1606 944 1607 945 1607 850 1607 945 1608 935 1608 850 1608 848 1609 908 1609 880 1609 908 1610 907 1610 880 1610 907 1611 915 1611 880 1611 915 1612 914 1612 880 1612 914 1613 917 1613 880 1613 917 1614 849 1614 880 1614 849 1615 833 1615 880 1615 875 1616 913 1616 880 1616 913 1617 848 1617 880 1617 850 1618 878 1618 879 1618 837 1619 966 1619 879 1619 937 1620 850 1620 879 1620 966 1621 851 1621 879 1621 851 1622 937 1622 879 1622 828 1623 837 1623 879 1623 875 1624 880 1624 852 1624 880 1625 866 1625 882 1625 1297 1626 873 1626 874 1626 874 1627 852 1627 895 1627 874 1628 895 1628 896 1628 895 1629 852 1629 894 1629 874 1630 896 1630 898 1630 894 1631 852 1631 893 1631 874 1632 898 1632 853 1632 893 1633 852 1633 854 1633 874 1634 853 1634 901 1634 854 1635 852 1635 856 1635 874 1636 901 1636 855 1636 856 1637 852 1637 858 1637 874 1638 855 1638 857 1638 858 1639 852 1639 859 1639 874 1640 857 1640 860 1640 859 1641 852 1641 861 1641 1297 1642 874 1642 862 1642 874 1643 860 1643 862 1643 1297 1644 862 1644 905 1644 1297 1645 905 1645 884 1645 1297 1646 884 1646 865 1646 863 1647 885 1647 883 1647 885 1648 861 1648 883 1648 861 1649 852 1649 883 1649 865 1650 887 1650 883 1650 887 1651 888 1651 883 1651 888 1652 864 1652 883 1652 864 1653 891 1653 883 1653 891 1654 863 1654 883 1654 1297 1655 865 1655 883 1655 882 1656 866 1656 867 1656 883 1657 867 1657 10 1657 867 1658 866 1658 10 1658 1297 1659 883 1659 10 1659 822 1660 868 1660 866 1660 869 1661 822 1661 866 1661 880 1662 869 1662 866 1662 833 1663 869 1663 880 1663 826 1664 875 1664 824 1664 871 1665 875 1665 826 1665 870 1666 875 1666 871 1666 924 1667 870 1667 871 1667 852 1668 880 1668 883 1668 876 1669 837 1669 822 1669 828 1670 879 1670 872 1670 879 1671 1295 1671 872 1671 10 1672 9 1672 1297 1672 1297 1673 9 1673 813 1673 813 1674 795 1674 873 1674 1297 1675 813 1675 873 1675 874 1676 873 1676 794 1676 873 1677 795 1677 794 1677 874 1678 794 1678 797 1678 852 1679 874 1679 797 1679 852 1680 797 1680 793 1680 875 1681 852 1681 793 1681 875 1682 793 1682 761 1682 824 1683 875 1683 761 1683 824 1684 761 1684 876 1684 876 1685 761 1685 767 1685 876 1686 767 1686 785 1686 850 1687 876 1687 785 1687 850 1688 785 1688 878 1688 878 1689 785 1689 877 1689 878 1690 877 1690 1295 1690 878 1691 1295 1691 879 1691 881 1692 792 1692 880 1692 881 1693 880 1693 882 1693 792 1694 807 1694 883 1694 792 1695 883 1695 880 1695 814 1696 881 1696 882 1696 814 1697 882 1697 867 1697 883 1698 814 1698 867 1698 807 1699 814 1699 883 1699 884 1700 812 1700 809 1700 865 1701 884 1701 809 1701 885 1702 863 1702 806 1702 865 1703 809 1703 808 1703 887 1704 865 1704 808 1704 861 1705 885 1705 886 1705 885 1706 806 1706 886 1706 887 1707 808 1707 810 1707 888 1708 887 1708 810 1708 859 1709 861 1709 890 1709 861 1710 886 1710 890 1710 888 1711 810 1711 889 1711 864 1712 888 1712 889 1712 859 1713 890 1713 892 1713 864 1714 889 1714 811 1714 891 1715 864 1715 811 1715 863 1716 891 1716 811 1716 858 1717 859 1717 804 1717 863 1718 811 1718 806 1718 859 1719 892 1719 804 1719 856 1720 858 1720 802 1720 858 1721 804 1721 802 1721 854 1722 856 1722 801 1722 856 1723 802 1723 801 1723 893 1724 854 1724 799 1724 854 1725 801 1725 799 1725 894 1726 893 1726 800 1726 895 1727 894 1727 800 1727 893 1728 799 1728 800 1728 895 1729 800 1729 796 1729 896 1730 895 1730 798 1730 898 1731 896 1731 798 1731 895 1732 796 1732 798 1732 898 1733 798 1733 897 1733 898 1734 897 1734 899 1734 853 1735 898 1735 899 1735 901 1736 853 1736 899 1736 901 1737 899 1737 900 1737 901 1738 900 1738 803 1738 855 1739 901 1739 803 1739 857 1740 855 1740 803 1740 857 1741 803 1741 902 1741 857 1742 902 1742 805 1742 860 1743 857 1743 805 1743 860 1744 805 1744 903 1744 862 1745 860 1745 903 1745 862 1746 903 1746 904 1746 905 1747 862 1747 904 1747 905 1748 904 1748 812 1748 884 1749 905 1749 812 1749 931 1750 930 1750 783 1750 844 1751 931 1751 783 1751 908 1752 848 1752 909 1752 844 1753 783 1753 784 1753 906 1754 844 1754 784 1754 907 1755 908 1755 787 1755 908 1756 909 1756 787 1756 906 1757 784 1757 910 1757 845 1758 906 1758 910 1758 915 1759 907 1759 788 1759 907 1760 787 1760 788 1760 845 1761 910 1761 911 1761 846 1762 845 1762 911 1762 915 1763 788 1763 916 1763 846 1764 911 1764 912 1764 913 1765 846 1765 912 1765 848 1766 913 1766 912 1766 914 1767 915 1767 790 1767 848 1768 912 1768 909 1768 915 1769 916 1769 790 1769 917 1770 914 1770 789 1770 914 1771 790 1771 789 1771 849 1772 917 1772 791 1772 917 1773 789 1773 791 1773 833 1774 849 1774 816 1774 849 1775 791 1775 816 1775 918 1776 833 1776 769 1776 829 1777 918 1777 769 1777 833 1778 816 1778 769 1778 829 1779 769 1779 920 1779 919 1780 829 1780 771 1780 921 1781 919 1781 771 1781 829 1782 920 1782 771 1782 921 1783 771 1783 772 1783 921 1784 772 1784 778 1784 836 1785 921 1785 778 1785 839 1786 836 1786 778 1786 839 1787 778 1787 777 1787 839 1788 777 1788 923 1788 922 1789 839 1789 923 1789 924 1790 922 1790 923 1790 924 1791 923 1791 925 1791 924 1792 925 1792 926 1792 870 1793 924 1793 926 1793 870 1794 926 1794 927 1794 843 1795 870 1795 927 1795 843 1796 927 1796 928 1796 929 1797 843 1797 928 1797 929 1798 928 1798 930 1798 931 1799 929 1799 930 1799 851 1800 932 1800 933 1800 937 1801 851 1801 933 1801 934 1802 935 1802 940 1802 937 1803 933 1803 941 1803 936 1804 937 1804 941 1804 938 1805 934 1805 939 1805 934 1806 940 1806 939 1806 936 1807 941 1807 943 1807 847 1808 936 1808 943 1808 947 1809 938 1809 942 1809 938 1810 939 1810 942 1810 847 1811 943 1811 786 1811 944 1812 847 1812 786 1812 947 1813 942 1813 948 1813 944 1814 786 1814 946 1814 945 1815 944 1815 946 1815 935 1816 945 1816 946 1816 949 1817 947 1817 780 1817 935 1818 946 1818 940 1818 947 1819 948 1819 780 1819 951 1820 949 1820 781 1820 949 1821 780 1821 781 1821 950 1822 951 1822 776 1822 951 1823 781 1823 776 1823 952 1824 950 1824 953 1824 950 1825 776 1825 953 1825 831 1826 952 1826 954 1826 955 1827 831 1827 954 1827 952 1828 953 1828 954 1828 955 1829 954 1829 768 1829 830 1830 955 1830 956 1830 834 1831 830 1831 956 1831 955 1832 768 1832 956 1832 834 1833 956 1833 957 1833 834 1834 957 1834 958 1834 959 1835 834 1835 958 1835 840 1836 959 1836 958 1836 840 1837 958 1837 960 1837 840 1838 960 1838 962 1838 961 1839 840 1839 962 1839 964 1840 961 1840 962 1840 964 1841 962 1841 963 1841 964 1842 963 1842 965 1842 841 1843 964 1843 965 1843 841 1844 965 1844 782 1844 842 1845 841 1845 782 1845 842 1846 782 1846 967 1846 966 1847 842 1847 967 1847 966 1848 967 1848 932 1848 851 1849 966 1849 932 1849 992 1850 764 1850 968 1850 825 1851 992 1851 968 1851 835 1852 838 1852 969 1852 825 1853 968 1853 817 1853 826 1854 825 1854 817 1854 970 1855 835 1855 774 1855 835 1856 969 1856 774 1856 826 1857 817 1857 971 1857 871 1858 826 1858 971 1858 972 1859 970 1859 773 1859 970 1860 774 1860 773 1860 871 1861 971 1861 973 1861 975 1862 871 1862 973 1862 972 1863 773 1863 978 1863 975 1864 973 1864 779 1864 974 1865 975 1865 779 1865 838 1866 974 1866 779 1866 976 1867 972 1867 977 1867 838 1868 779 1868 969 1868 972 1869 978 1869 977 1869 832 1870 976 1870 770 1870 976 1871 977 1871 770 1871 869 1872 832 1872 775 1872 832 1873 770 1873 775 1873 981 1874 869 1874 759 1874 869 1875 775 1875 759 1875 979 1876 981 1876 980 1876 820 1877 979 1877 980 1877 981 1878 759 1878 980 1878 820 1879 980 1879 758 1879 982 1880 820 1880 757 1880 821 1881 982 1881 757 1881 820 1882 758 1882 757 1882 821 1883 757 1883 983 1883 821 1884 983 1884 760 1884 984 1885 821 1885 760 1885 823 1886 984 1886 760 1886 823 1887 760 1887 985 1887 823 1888 985 1888 986 1888 987 1889 823 1889 986 1889 988 1890 987 1890 986 1890 988 1891 986 1891 762 1891 988 1892 762 1892 989 1892 991 1893 988 1893 989 1893 991 1894 989 1894 763 1894 990 1895 991 1895 763 1895 990 1896 763 1896 765 1896 993 1897 990 1897 765 1897 993 1898 765 1898 764 1898 992 1899 993 1899 764 1899 822 1900 994 1900 766 1900 819 1901 822 1901 766 1901 837 1902 818 1902 994 1902 822 1903 837 1903 994 1903 819 1904 766 1904 995 1904 827 1905 819 1905 995 1905 827 1906 995 1906 837 1906 837 1907 995 1907 818 1907 996 1908 999 1908 8 1908 8 1909 999 1909 1052 1909 15 1910 997 1910 996 1910 996 1911 997 1911 999 1911 8 1912 1052 1912 11 1912 11 1913 1052 1913 998 1913 1054 1914 999 1914 1232 1914 1223 1915 1000 1915 1054 1915 1222 1916 1054 1916 1000 1916 1225 1917 1223 1917 1054 1917 1001 1918 1054 1918 1222 1918 1226 1919 1225 1919 1054 1919 1219 1920 1054 1920 1001 1920 1002 1921 1226 1921 1054 1921 1003 1922 1227 1922 1002 1922 1003 1923 1229 1923 1227 1923 1003 1924 1004 1924 1229 1924 1003 1925 1230 1925 1004 1925 1003 1926 1005 1926 1230 1926 1003 1927 1205 1927 1005 1927 1003 1928 1002 1928 1054 1928 1006 1929 1205 1929 1003 1929 1206 1930 1006 1930 1003 1930 997 1931 1234 1931 1232 1931 997 1932 1232 1932 999 1932 1008 1933 1003 1933 1054 1933 1017 1934 1234 1934 997 1934 1007 1935 1008 1935 1017 1935 1007 1936 1193 1936 1008 1936 1012 1937 1011 1937 1009 1937 1167 1938 1009 1938 1010 1938 1167 1939 1012 1939 1009 1939 1016 1940 1008 1940 1193 1940 1166 1941 1217 1941 1011 1941 1166 1942 1011 1942 1012 1942 1013 1943 1007 1943 1017 1943 1014 1944 1167 1944 1010 1944 1014 1945 1010 1945 1015 1945 1191 1946 1008 1946 1016 1946 1165 1947 1217 1947 1166 1947 1165 1948 1219 1948 1217 1948 1196 1949 1013 1949 1017 1949 1018 1950 1014 1950 1015 1950 1018 1951 1015 1951 1215 1951 1190 1952 1008 1952 1191 1952 1019 1953 1196 1953 1017 1953 1020 1954 1215 1954 1021 1954 1020 1955 1018 1955 1215 1955 1189 1956 1008 1956 1190 1956 1171 1957 1021 1957 1212 1957 1171 1958 1020 1958 1021 1958 1198 1959 1019 1959 1017 1959 1188 1960 1008 1960 1189 1960 1056 1961 1171 1961 1212 1961 1056 1962 1212 1962 1209 1962 1023 1963 1198 1963 1017 1963 1028 1964 1008 1964 1188 1964 1022 1965 1023 1965 1017 1965 1201 1966 1022 1966 1017 1966 1203 1967 1201 1967 1017 1967 1024 1968 1174 1968 1055 1968 1024 1969 1175 1969 1174 1969 1024 1970 1149 1970 1175 1970 1024 1971 1025 1971 1149 1971 1024 1972 1026 1972 1025 1972 1024 1973 1154 1973 1026 1973 1024 1974 1157 1974 1154 1974 1024 1975 1027 1975 1157 1975 1117 1976 1008 1976 1028 1976 1117 1977 1029 1977 1178 1977 1117 1978 1184 1978 1029 1978 1117 1979 1028 1979 1184 1979 1117 1980 1181 1980 1034 1980 1117 1981 1182 1981 1181 1981 1117 1982 1185 1982 1182 1982 1117 1983 1186 1983 1185 1983 1117 1984 1178 1984 1186 1984 1032 1985 1153 1985 1152 1985 1032 1986 1031 1986 1153 1986 1032 1987 1030 1987 1031 1987 1032 1988 1160 1988 1030 1988 1032 1989 1161 1989 1160 1989 1032 1990 1027 1990 1024 1990 1032 1991 1164 1991 1161 1991 1032 1992 1165 1992 1164 1992 1032 1993 1152 1993 1027 1993 1033 1994 1118 1994 1117 1994 1033 1995 1203 1995 1017 1995 1033 1996 1117 1996 1034 1996 1033 1997 1204 1997 1203 1997 1033 1998 1034 1998 1204 1998 1033 1999 1017 1999 997 1999 1048 2000 1032 2000 1024 2000 1051 2001 1052 2001 1032 2001 1053 2002 1043 2002 1035 2002 1140 2003 1048 2003 1043 2003 1141 2004 1140 2004 1043 2004 1139 2005 1048 2005 1140 2005 1036 2006 1141 2006 1043 2006 1037 2007 1048 2007 1139 2007 1038 2008 1036 2008 1043 2008 1137 2009 1048 2009 1037 2009 1039 2010 1038 2010 1043 2010 1041 2011 1048 2011 1137 2011 1143 2012 1039 2012 1043 2012 1040 2013 1048 2013 1041 2013 1042 2014 1143 2014 1043 2014 1131 2015 1048 2015 1040 2015 1146 2016 1042 2016 1043 2016 1049 2017 1048 2017 1131 2017 1044 2018 1146 2018 1043 2018 1044 2019 1043 2019 1053 2019 1045 2020 1044 2020 1053 2020 1046 2021 1045 2021 1053 2021 1125 2022 1046 2022 1053 2022 1057 2023 1047 2023 1124 2023 1057 2024 1049 2024 1047 2024 1057 2025 1048 2025 1049 2025 1057 2026 1126 2026 1125 2026 1057 2027 1050 2027 1126 2027 1057 2028 1129 2028 1050 2028 1057 2029 1132 2029 1129 2029 1057 2030 1124 2030 1132 2030 1057 2031 1125 2031 1053 2031 1121 2032 1052 2032 1051 2032 998 2033 1121 2033 1057 2033 998 2034 1052 2034 1121 2034 998 2035 1057 2035 1053 2035 1052 2036 999 2036 1054 2036 1052 2037 1054 2037 1219 2037 1052 2038 1219 2038 1032 2038 1032 2039 1219 2039 1165 2039 1003 2040 1024 2040 1206 2040 1206 2041 1024 2041 1209 2041 1209 2042 1024 2042 1055 2042 1209 2043 1055 2043 1056 2043 1057 2044 1032 2044 1048 2044 1054 2045 1017 2045 1008 2045 1068 2046 996 2046 1060 2046 1060 2047 1059 2047 1058 2047 1059 2048 1060 2048 1221 2048 1060 2049 1058 2049 1224 2049 1221 2050 1060 2050 1061 2050 1060 2051 1224 2051 1062 2051 1061 2052 1060 2052 1220 2052 1060 2053 1062 2053 1063 2053 1063 2054 1228 2054 1069 2054 1228 2055 1064 2055 1069 2055 1064 2056 1065 2056 1069 2056 1065 2057 1066 2057 1069 2057 1066 2058 1231 2058 1069 2058 1231 2059 1067 2059 1069 2059 1060 2060 1063 2060 1069 2060 1069 2061 1067 2061 1207 2061 1069 2062 1207 2062 1208 2062 1068 2063 1235 2063 15 2063 996 2064 1068 2064 15 2064 1060 2065 1069 2065 1116 2065 15 2066 1235 2066 1233 2066 1233 2067 1116 2067 1070 2067 1116 2068 1071 2068 1070 2068 1072 2069 1216 2069 1074 2069 1210 2070 1072 2070 1075 2070 1072 2071 1074 2071 1075 2071 1071 2072 1116 2072 1192 2072 1216 2073 1218 2073 1073 2073 1074 2074 1216 2074 1073 2074 1233 2075 1070 2075 1194 2075 1210 2076 1075 2076 1168 2076 1077 2077 1210 2077 1168 2077 1192 2078 1116 2078 1076 2078 1073 2079 1218 2079 1114 2079 1218 2080 1220 2080 1114 2080 1233 2081 1194 2081 1195 2081 1077 2082 1168 2082 1169 2082 1214 2083 1077 2083 1169 2083 1076 2084 1116 2084 1078 2084 1233 2085 1195 2085 1197 2085 1213 2086 1214 2086 1170 2086 1214 2087 1169 2087 1170 2087 1078 2088 1116 2088 1080 2088 1211 2089 1213 2089 1079 2089 1213 2090 1170 2090 1079 2090 1233 2091 1197 2091 1199 2091 1080 2092 1116 2092 1187 2092 1211 2093 1079 2093 1172 2093 1115 2094 1211 2094 1172 2094 1233 2095 1199 2095 1200 2095 1187 2096 1116 2096 1088 2096 1233 2097 1200 2097 1081 2097 1233 2098 1081 2098 1082 2098 1233 2099 1082 2099 1202 2099 1083 2100 1173 2100 1095 2100 1173 2101 1176 2101 1095 2101 1176 2102 1084 2102 1095 2102 1084 2103 1085 2103 1095 2103 1085 2104 1086 2104 1095 2104 1086 2105 1155 2105 1095 2105 1155 2106 1156 2106 1095 2106 1156 2107 1087 2107 1095 2107 1088 2108 1116 2108 1119 2108 1092 2109 1179 2109 1119 2109 1179 2110 1183 2110 1119 2110 1183 2111 1088 2111 1119 2111 1177 2112 1180 2112 1119 2112 1180 2113 1089 2113 1119 2113 1089 2114 1090 2114 1119 2114 1090 2115 1091 2115 1119 2115 1091 2116 1092 2116 1119 2116 1158 2117 1150 2117 1094 2117 1150 2118 1151 2118 1094 2118 1151 2119 1159 2119 1094 2119 1159 2120 1093 2120 1094 2120 1093 2121 1163 2121 1094 2121 1095 2122 1087 2122 1094 2122 1163 2123 1162 2123 1094 2123 1162 2124 1114 2124 1094 2124 1087 2125 1158 2125 1094 2125 1119 2126 1096 2126 1301 2126 1233 2127 1202 2127 1301 2127 1177 2128 1119 2128 1301 2128 1202 2129 1097 2129 1301 2129 1097 2130 1177 2130 1301 2130 15 2131 1233 2131 1301 2131 1095 2132 1094 2132 1107 2132 1094 2133 8 2133 1098 2133 1108 2134 1099 2134 1103 2134 1103 2135 1107 2135 1138 2135 1103 2136 1138 2136 1100 2136 1138 2137 1107 2137 1102 2137 1103 2138 1100 2138 1101 2138 1102 2139 1107 2139 1136 2139 1103 2140 1101 2140 1142 2140 1136 2141 1107 2141 1104 2141 1103 2142 1142 2142 1105 2142 1104 2143 1107 2143 1135 2143 1103 2144 1105 2144 1106 2144 1135 2145 1107 2145 1134 2145 1103 2146 1106 2146 1144 2146 1134 2147 1107 2147 1130 2147 1103 2148 1144 2148 1145 2148 1130 2149 1107 2149 1111 2149 1108 2150 1103 2150 1147 2150 1103 2151 1145 2151 1147 2151 1108 2152 1147 2152 1148 2152 1108 2153 1148 2153 1109 2153 1108 2154 1109 2154 1110 2154 1123 2155 1122 2155 1120 2155 1122 2156 1111 2156 1120 2156 1110 2157 1127 2157 1120 2157 1111 2158 1107 2158 1120 2158 1127 2159 1128 2159 1120 2159 1128 2160 1112 2160 1120 2160 1112 2161 1133 2161 1120 2161 1133 2162 1123 2162 1120 2162 1108 2163 1110 2163 1120 2163 1098 2164 8 2164 1113 2164 1120 2165 1113 2165 11 2165 1113 2166 8 2166 11 2166 1108 2167 1120 2167 11 2167 1060 2168 996 2168 8 2168 1220 2169 1060 2169 8 2169 1094 2170 1220 2170 8 2170 1114 2171 1220 2171 1094 2171 1208 2172 1095 2172 1069 2172 1115 2173 1095 2173 1208 2173 1083 2174 1095 2174 1115 2174 1172 2175 1083 2175 1115 2175 1107 2176 1094 2176 1120 2176 1116 2177 1233 2177 1060 2177 1301 2178 1033 2178 15 2178 15 2179 1033 2179 997 2179 1108 2180 11 2180 1053 2180 11 2181 998 2181 1053 2181 1035 2182 1099 2182 1108 2182 1035 2183 1108 2183 1053 2183 1103 2184 1099 2184 1035 2184 1103 2185 1035 2185 1043 2185 1103 2186 1043 2186 1048 2186 1107 2187 1103 2187 1048 2187 1107 2188 1048 2188 1024 2188 1095 2189 1107 2189 1024 2189 1095 2190 1024 2190 1003 2190 1069 2191 1095 2191 1003 2191 1069 2192 1003 2192 1116 2192 1116 2193 1003 2193 1008 2193 1116 2194 1008 2194 1117 2194 1119 2195 1116 2195 1117 2195 1096 2196 1119 2196 1118 2196 1119 2197 1117 2197 1118 2197 1301 2198 1096 2198 1118 2198 1033 2199 1301 2199 1118 2199 1098 2200 1032 2200 1094 2200 1051 2201 1032 2201 1098 2201 1032 2202 1057 2202 1120 2202 1032 2203 1120 2203 1094 2203 1121 2204 1051 2204 1098 2204 1121 2205 1098 2205 1113 2205 1120 2206 1121 2206 1113 2206 1057 2207 1121 2207 1120 2207 1109 2208 1046 2208 1125 2208 1110 2209 1109 2209 1125 2209 1122 2210 1123 2210 1124 2210 1110 2211 1125 2211 1126 2211 1127 2212 1110 2212 1126 2212 1111 2213 1122 2213 1047 2213 1122 2214 1124 2214 1047 2214 1127 2215 1126 2215 1050 2215 1128 2216 1127 2216 1050 2216 1130 2217 1111 2217 1049 2217 1111 2218 1047 2218 1049 2218 1128 2219 1050 2219 1129 2219 1112 2220 1128 2220 1129 2220 1130 2221 1049 2221 1131 2221 1112 2222 1129 2222 1132 2222 1133 2223 1112 2223 1132 2223 1123 2224 1133 2224 1132 2224 1134 2225 1130 2225 1040 2225 1123 2226 1132 2226 1124 2226 1130 2227 1131 2227 1040 2227 1135 2228 1134 2228 1041 2228 1134 2229 1040 2229 1041 2229 1104 2230 1135 2230 1137 2230 1135 2231 1041 2231 1137 2231 1136 2232 1104 2232 1037 2232 1104 2233 1137 2233 1037 2233 1102 2234 1136 2234 1139 2234 1138 2235 1102 2235 1139 2235 1136 2236 1037 2236 1139 2236 1138 2237 1139 2237 1140 2237 1100 2238 1138 2238 1141 2238 1101 2239 1100 2239 1141 2239 1138 2240 1140 2240 1141 2240 1101 2241 1141 2241 1036 2241 1101 2242 1036 2242 1038 2242 1142 2243 1101 2243 1038 2243 1105 2244 1142 2244 1038 2244 1105 2245 1038 2245 1039 2245 1105 2246 1039 2246 1143 2246 1106 2247 1105 2247 1143 2247 1144 2248 1106 2248 1143 2248 1144 2249 1143 2249 1042 2249 1144 2250 1042 2250 1146 2250 1145 2251 1144 2251 1146 2251 1145 2252 1146 2252 1044 2252 1147 2253 1145 2253 1044 2253 1147 2254 1044 2254 1045 2254 1148 2255 1147 2255 1045 2255 1148 2256 1045 2256 1046 2256 1109 2257 1148 2257 1046 2257 1084 2258 1149 2258 1025 2258 1085 2259 1084 2259 1025 2259 1150 2260 1158 2260 1152 2260 1085 2261 1025 2261 1026 2261 1086 2262 1085 2262 1026 2262 1151 2263 1150 2263 1153 2263 1150 2264 1152 2264 1153 2264 1086 2265 1026 2265 1154 2265 1155 2266 1086 2266 1154 2266 1159 2267 1151 2267 1031 2267 1151 2268 1153 2268 1031 2268 1155 2269 1154 2269 1157 2269 1156 2270 1155 2270 1157 2270 1159 2271 1031 2271 1030 2271 1156 2272 1157 2272 1027 2272 1087 2273 1156 2273 1027 2273 1158 2274 1087 2274 1027 2274 1093 2275 1159 2275 1160 2275 1158 2276 1027 2276 1152 2276 1159 2277 1030 2277 1160 2277 1163 2278 1093 2278 1161 2278 1093 2279 1160 2279 1161 2279 1162 2280 1163 2280 1164 2280 1163 2281 1161 2281 1164 2281 1114 2282 1162 2282 1165 2282 1162 2283 1164 2283 1165 2283 1073 2284 1114 2284 1166 2284 1074 2285 1073 2285 1166 2285 1114 2286 1165 2286 1166 2286 1074 2287 1166 2287 1012 2287 1075 2288 1074 2288 1167 2288 1168 2289 1075 2289 1167 2289 1074 2290 1012 2290 1167 2290 1168 2291 1167 2291 1014 2291 1168 2292 1014 2292 1018 2292 1169 2293 1168 2293 1018 2293 1170 2294 1169 2294 1018 2294 1170 2295 1018 2295 1020 2295 1170 2296 1020 2296 1171 2296 1079 2297 1170 2297 1171 2297 1172 2298 1079 2298 1171 2298 1172 2299 1171 2299 1056 2299 1172 2300 1056 2300 1055 2300 1083 2301 1172 2301 1055 2301 1083 2302 1055 2302 1174 2302 1173 2303 1083 2303 1174 2303 1173 2304 1174 2304 1175 2304 1176 2305 1173 2305 1175 2305 1176 2306 1175 2306 1149 2306 1084 2307 1176 2307 1149 2307 1097 2308 1204 2308 1034 2308 1177 2309 1097 2309 1034 2309 1179 2310 1092 2310 1178 2310 1177 2311 1034 2311 1181 2311 1180 2312 1177 2312 1181 2312 1183 2313 1179 2313 1029 2313 1179 2314 1178 2314 1029 2314 1180 2315 1181 2315 1182 2315 1089 2316 1180 2316 1182 2316 1088 2317 1183 2317 1184 2317 1183 2318 1029 2318 1184 2318 1089 2319 1182 2319 1185 2319 1090 2320 1089 2320 1185 2320 1088 2321 1184 2321 1028 2321 1090 2322 1185 2322 1186 2322 1091 2323 1090 2323 1186 2323 1092 2324 1091 2324 1186 2324 1187 2325 1088 2325 1188 2325 1092 2326 1186 2326 1178 2326 1088 2327 1028 2327 1188 2327 1080 2328 1187 2328 1189 2328 1187 2329 1188 2329 1189 2329 1078 2330 1080 2330 1190 2330 1080 2331 1189 2331 1190 2331 1076 2332 1078 2332 1191 2332 1078 2333 1190 2333 1191 2333 1192 2334 1076 2334 1016 2334 1071 2335 1192 2335 1016 2335 1076 2336 1191 2336 1016 2336 1071 2337 1016 2337 1193 2337 1070 2338 1071 2338 1007 2338 1194 2339 1070 2339 1007 2339 1071 2340 1193 2340 1007 2340 1194 2341 1007 2341 1013 2341 1194 2342 1013 2342 1196 2342 1195 2343 1194 2343 1196 2343 1197 2344 1195 2344 1196 2344 1197 2345 1196 2345 1019 2345 1197 2346 1019 2346 1198 2346 1199 2347 1197 2347 1198 2347 1200 2348 1199 2348 1198 2348 1200 2349 1198 2349 1023 2349 1200 2350 1023 2350 1022 2350 1081 2351 1200 2351 1022 2351 1081 2352 1022 2352 1201 2352 1082 2353 1081 2353 1201 2353 1082 2354 1201 2354 1203 2354 1202 2355 1082 2355 1203 2355 1202 2356 1203 2356 1204 2356 1097 2357 1202 2357 1204 2357 1067 2358 1205 2358 1006 2358 1207 2359 1067 2359 1006 2359 1077 2360 1214 2360 1215 2360 1207 2361 1006 2361 1206 2361 1208 2362 1207 2362 1206 2362 1210 2363 1077 2363 1015 2363 1077 2364 1215 2364 1015 2364 1208 2365 1206 2365 1209 2365 1115 2366 1208 2366 1209 2366 1072 2367 1210 2367 1010 2367 1210 2368 1015 2368 1010 2368 1115 2369 1209 2369 1212 2369 1211 2370 1115 2370 1212 2370 1072 2371 1010 2371 1009 2371 1211 2372 1212 2372 1021 2372 1213 2373 1211 2373 1021 2373 1214 2374 1213 2374 1021 2374 1216 2375 1072 2375 1011 2375 1214 2376 1021 2376 1215 2376 1072 2377 1009 2377 1011 2377 1218 2378 1216 2378 1217 2378 1216 2379 1011 2379 1217 2379 1220 2380 1218 2380 1219 2380 1218 2381 1217 2381 1219 2381 1061 2382 1220 2382 1001 2382 1220 2383 1219 2383 1001 2383 1221 2384 1061 2384 1222 2384 1059 2385 1221 2385 1222 2385 1061 2386 1001 2386 1222 2386 1059 2387 1222 2387 1000 2387 1058 2388 1059 2388 1223 2388 1224 2389 1058 2389 1223 2389 1059 2390 1000 2390 1223 2390 1224 2391 1223 2391 1225 2391 1224 2392 1225 2392 1226 2392 1062 2393 1224 2393 1226 2393 1063 2394 1062 2394 1226 2394 1063 2395 1226 2395 1002 2395 1063 2396 1002 2396 1227 2396 1228 2397 1063 2397 1227 2397 1064 2398 1228 2398 1227 2398 1064 2399 1227 2399 1229 2399 1064 2400 1229 2400 1004 2400 1065 2401 1064 2401 1004 2401 1065 2402 1004 2402 1230 2402 1066 2403 1065 2403 1230 2403 1066 2404 1230 2404 1005 2404 1231 2405 1066 2405 1005 2405 1231 2406 1005 2406 1205 2406 1067 2407 1231 2407 1205 2407 1060 2408 1054 2408 1068 2408 1068 2409 1054 2409 1232 2409 1233 2410 1017 2410 1054 2410 1060 2411 1233 2411 1054 2411 1068 2412 1232 2412 1234 2412 1235 2413 1068 2413 1234 2413 1235 2414 1234 2414 1233 2414 1233 2415 1234 2415 1017 2415 1238 2416 1242 2416 1236 2416 1251 2417 1238 2417 1236 2417 1247 2418 1280 2418 1242 2418 1238 2419 1247 2419 1242 2419 1237 2420 1248 2420 1284 2420 1237 2421 1284 2421 1238 2421 1250 2422 1238 2422 1251 2422 1250 2423 1237 2423 1238 2423 1255 2424 1251 2424 1253 2424 1255 2425 1250 2425 1251 2425 1240 2426 1263 2426 1239 2426 1240 2427 1239 2427 1274 2427 1240 2428 1274 2428 1321 2428 1241 2429 1240 2429 1321 2429 1242 2430 1241 2430 1321 2430 1244 2431 1236 2431 1242 2431 1244 2432 1242 2432 1321 2432 1243 2433 1236 2433 1244 2433 1254 2434 1244 2434 1278 2434 1254 2435 1243 2435 1244 2435 1269 2436 1278 2436 1277 2436 1269 2437 1254 2437 1278 2437 1236 2438 1281 2438 1245 2438 1236 2439 1245 2439 1251 2439 1242 2440 1280 2440 1246 2440 1242 2441 1246 2441 1241 2441 1281 2442 1280 2442 1247 2442 1281 2443 1247 2443 1245 2443 1344 2444 1247 2444 1238 2444 1284 2445 1344 2445 1238 2445 1284 2446 1240 2446 1241 2446 1287 2447 1284 2447 1248 2447 1287 2448 1240 2448 1284 2448 1290 2449 1287 2449 1248 2449 1237 2450 1285 2450 1293 2450 1237 2451 1293 2451 1248 2451 1289 2452 1237 2452 1250 2452 1249 2453 1289 2453 1250 2453 1251 2454 1245 2454 1252 2454 1251 2455 1252 2455 1253 2455 1348 2456 1286 2456 1250 2456 1255 2457 1348 2457 1250 2457 1253 2458 1243 2458 1254 2458 1255 2459 1253 2459 1254 2459 1292 2460 1291 2460 1255 2460 1292 2461 1255 2461 1254 2461 1351 2462 1271 2462 879 2462 1320 2463 1256 2463 16 2463 1258 2464 1351 2464 879 2464 1258 2465 559 2465 1259 2465 1239 2466 16 2466 1256 2466 1258 2467 1257 2467 1351 2467 1258 2468 1259 2468 1257 2468 1304 2469 16 2469 1239 2469 1260 2470 1320 2470 16 2470 1263 2471 1304 2471 1239 2471 1261 2472 1320 2472 1260 2472 1306 2473 1304 2473 1263 2473 1301 2474 1261 2474 1260 2474 263 2475 1306 2475 1263 2475 1262 2476 1266 2476 263 2476 1262 2477 263 2477 1263 2477 1264 2478 1108 2478 1268 2478 1264 2479 1301 2479 1108 2479 1264 2480 1261 2480 1301 2480 1276 2481 1264 2481 1268 2481 1319 2482 1266 2482 1262 2482 0 2483 1319 2483 1265 2483 0 2484 1266 2484 1319 2484 1299 2485 1267 2485 1276 2485 1299 2486 1276 2486 1268 2486 1277 2487 1267 2487 1299 2487 1317 2488 0 2488 1265 2488 573 2489 1317 2489 1270 2489 573 2490 0 2490 1317 2490 573 2491 1270 2491 634 2491 1297 2492 1269 2492 1277 2492 1297 2493 1277 2493 1299 2493 1297 2494 813 2494 1269 2494 639 2495 634 2495 1270 2495 1272 2496 639 2496 1270 2496 1271 2497 813 2497 1295 2497 1271 2498 1269 2498 813 2498 559 2499 639 2499 1272 2499 879 2500 1271 2500 1295 2500 1259 2501 559 2501 1272 2501 1273 2502 1343 2502 1274 2502 1239 2503 1273 2503 1274 2503 1240 2504 1275 2504 1316 2504 1240 2505 1316 2505 1263 2505 1274 2506 1320 2506 1261 2506 1321 2507 1274 2507 1261 2507 1279 2508 1283 2508 1244 2508 1321 2509 1279 2509 1244 2509 1244 2510 1264 2510 1276 2510 1278 2511 1244 2511 1276 2511 1282 2512 1335 2512 1277 2512 1278 2513 1282 2513 1277 2513 1243 2514 1347 2514 1281 2514 1243 2515 1281 2515 1236 2515 1269 2516 1336 2516 1346 2516 1269 2517 1346 2517 1254 2517 1273 2518 1316 2518 1275 2518 1343 2519 1273 2519 1275 2519 1279 2520 1343 2520 1275 2520 1279 2521 1275 2521 1246 2521 1279 2522 1246 2522 1280 2522 1280 2523 1281 2523 1283 2523 1279 2524 1280 2524 1283 2524 1283 2525 1281 2525 1347 2525 1282 2526 1283 2526 1346 2526 1283 2527 1347 2527 1346 2527 1335 2528 1282 2528 1336 2528 1282 2529 1346 2529 1336 2529 1241 2530 1344 2530 1284 2530 1241 2531 1246 2531 1344 2531 1344 2532 1293 2532 1285 2532 1247 2533 1344 2533 1285 2533 1245 2534 1247 2534 1286 2534 1247 2535 1285 2535 1286 2535 1252 2536 1245 2536 1348 2536 1245 2537 1286 2537 1348 2537 1355 2538 1275 2538 1240 2538 1287 2539 1355 2539 1240 2539 1288 2540 1319 2540 1262 2540 1288 2541 1262 2541 1287 2541 1288 2542 1287 2542 1290 2542 1350 2543 1288 2543 1290 2543 1289 2544 1350 2544 1290 2544 1314 2545 1289 2545 1249 2545 1314 2546 1249 2546 1291 2546 1314 2547 1350 2547 1289 2547 1353 2548 1291 2548 1292 2548 1353 2549 1314 2549 1291 2549 1351 2550 1292 2550 1271 2550 1351 2551 1353 2551 1292 2551 1293 2552 1345 2552 1290 2552 1248 2553 1293 2553 1290 2553 1358 2554 1285 2554 1237 2554 1289 2555 1358 2555 1237 2555 1250 2556 1286 2556 1357 2556 1250 2557 1357 2557 1249 2557 1253 2558 1252 2558 1243 2558 1252 2559 1347 2559 1243 2559 1291 2560 1294 2560 1348 2560 1291 2561 1348 2561 1255 2561 1254 2562 1346 2562 1359 2562 1254 2563 1359 2563 1292 2563 1295 2564 1340 2564 879 2564 879 2565 1340 2565 1323 2565 1337 2566 1295 2566 813 2566 1337 2567 1340 2567 1295 2567 1297 2568 1337 2568 813 2568 1296 2569 1337 2569 1297 2569 1299 2570 1296 2570 1297 2570 1299 2571 1298 2571 1296 2571 1331 2572 1298 2572 1299 2572 1268 2573 1331 2573 1299 2573 1268 2574 1108 2574 1331 2574 1108 2575 1330 2575 1331 2575 1300 2576 1108 2576 1301 2576 1300 2577 1330 2577 1108 2577 1303 2578 1301 2578 1260 2578 1303 2579 1300 2579 1301 2579 1302 2580 1303 2580 1260 2580 16 2581 1302 2581 1260 2581 1304 2582 1305 2582 16 2582 1305 2583 1302 2583 16 2583 1304 2584 1306 2584 1305 2584 1306 2585 1307 2585 1305 2585 1326 2586 1306 2586 263 2586 1326 2587 1307 2587 1306 2587 1266 2588 1326 2588 263 2588 1327 2589 1326 2589 1266 2589 0 2590 1333 2590 1327 2590 0 2591 1327 2591 1266 2591 573 2592 1334 2592 1333 2592 573 2593 1333 2593 0 2593 634 2594 1308 2594 573 2594 573 2595 1308 2595 1334 2595 639 2596 1308 2596 634 2596 1339 2597 1308 2597 639 2597 1322 2598 639 2598 559 2598 1322 2599 1339 2599 639 2599 1309 2600 1322 2600 559 2600 1258 2601 1309 2601 559 2601 879 2602 1309 2602 1258 2602 879 2603 1323 2603 1309 2603 1277 2604 1335 2604 1267 2604 1335 2605 1310 2605 1267 2605 1310 2606 1311 2606 1276 2606 1267 2607 1310 2607 1276 2607 1271 2608 1312 2608 1336 2608 1271 2609 1336 2609 1269 2609 1329 2610 1342 2610 1261 2610 1264 2611 1329 2611 1261 2611 1257 2612 1325 2612 1324 2612 1257 2613 1324 2613 1351 2613 1320 2614 1313 2614 1256 2614 1320 2615 1341 2615 1313 2615 1259 2616 1354 2616 1325 2616 1259 2617 1325 2617 1257 2617 1256 2618 1273 2618 1239 2618 1256 2619 1313 2619 1273 2619 1272 2620 1314 2620 1353 2620 1259 2621 1272 2621 1353 2621 1270 2622 1315 2622 1338 2622 1270 2623 1338 2623 1272 2623 1316 2624 1328 2624 1262 2624 1263 2625 1316 2625 1262 2625 1317 2626 1288 2626 1350 2626 1270 2627 1317 2627 1350 2627 1265 2628 1318 2628 1317 2628 1318 2629 1361 2629 1317 2629 1332 2630 1318 2630 1265 2630 1319 2631 1332 2631 1265 2631 1343 2632 1341 2632 1320 2632 1274 2633 1343 2633 1320 2633 1261 2634 1342 2634 1279 2634 1261 2635 1279 2635 1321 2635 1283 2636 1329 2636 1264 2636 1244 2637 1283 2637 1264 2637 1276 2638 1311 2638 1282 2638 1276 2639 1282 2639 1278 2639 1338 2640 1322 2640 1354 2640 1323 2641 1312 2641 1324 2641 1302 2642 1313 2642 1341 2642 1313 2643 1302 2643 1273 2643 1323 2644 1324 2644 1309 2644 1324 2645 1325 2645 1309 2645 1325 2646 1354 2646 1309 2646 1273 2647 1302 2647 1305 2647 1354 2648 1322 2648 1309 2648 1302 2649 1341 2649 1303 2649 1273 2650 1305 2650 1316 2650 1303 2651 1341 2651 1342 2651 1316 2652 1305 2652 1307 2652 1303 2653 1342 2653 1300 2653 1316 2654 1307 2654 1326 2654 1326 2655 1327 2655 1328 2655 1316 2656 1326 2656 1328 2656 1331 2657 1330 2657 1329 2657 1330 2658 1300 2658 1329 2658 1300 2659 1342 2659 1329 2659 1331 2660 1329 2660 1311 2660 1328 2661 1327 2661 1332 2661 1311 2662 1310 2662 1298 2662 1331 2663 1311 2663 1298 2663 1318 2664 1332 2664 1333 2664 1332 2665 1327 2665 1333 2665 1318 2666 1333 2666 1361 2666 1298 2667 1310 2667 1335 2667 1315 2668 1361 2668 1334 2668 1361 2669 1333 2669 1334 2669 1308 2670 1315 2670 1334 2670 1335 2671 1336 2671 1296 2671 1336 2672 1337 2672 1296 2672 1298 2673 1335 2673 1296 2673 1315 2674 1308 2674 1339 2674 1315 2675 1339 2675 1338 2675 1340 2676 1337 2676 1312 2676 1337 2677 1336 2677 1312 2677 1338 2678 1339 2678 1322 2678 1340 2679 1312 2679 1323 2679 1342 2680 1341 2680 1343 2680 1342 2681 1343 2681 1279 2681 1246 2682 1275 2682 1344 2682 1293 2683 1344 2683 1355 2683 1344 2684 1275 2684 1355 2684 1293 2685 1355 2685 1345 2685 1311 2686 1329 2686 1283 2686 1311 2687 1283 2687 1282 2687 1346 2688 1347 2688 1252 2688 1346 2689 1252 2689 1348 2689 1348 2690 1294 2690 1359 2690 1346 2691 1348 2691 1359 2691 1286 2692 1285 2692 1358 2692 1286 2693 1358 2693 1357 2693 1328 2694 1355 2694 1287 2694 1262 2695 1328 2695 1287 2695 1288 2696 1356 2696 1332 2696 1288 2697 1332 2697 1319 2697 1345 2698 1358 2698 1289 2698 1290 2699 1345 2699 1289 2699 1314 2700 1360 2700 1349 2700 1314 2701 1349 2701 1350 2701 1357 2702 1294 2702 1291 2702 1249 2703 1357 2703 1291 2703 1351 2704 1324 2704 1352 2704 1351 2705 1352 2705 1353 2705 1359 2706 1312 2706 1271 2706 1292 2707 1359 2707 1271 2707 1353 2708 1352 2708 1354 2708 1353 2709 1354 2709 1259 2709 1338 2710 1360 2710 1314 2710 1272 2711 1338 2711 1314 2711 1350 2712 1349 2712 1315 2712 1350 2713 1315 2713 1270 2713 1361 2714 1356 2714 1288 2714 1317 2715 1361 2715 1288 2715 1328 2716 1332 2716 1356 2716 1355 2717 1328 2717 1356 2717 1345 2718 1355 2718 1356 2718 1345 2719 1356 2719 1349 2719 1345 2720 1349 2720 1358 2720 1357 2721 1358 2721 1360 2721 1294 2722 1357 2722 1360 2722 1358 2723 1349 2723 1360 2723 1359 2724 1294 2724 1352 2724 1294 2725 1360 2725 1352 2725 1312 2726 1359 2726 1324 2726 1359 2727 1352 2727 1324 2727 1352 2728 1360 2728 1338 2728 1352 2729 1338 2729 1354 2729 1349 2730 1356 2730 1361 2730 1349 2731 1361 2731 1315 2731 1363 2732 12 2732 1362 2732 12 2733 1363 2733 1372 2733 1362 2734 12 2734 1370 2734 1372 2735 1363 2735 1366 2735 1366 2736 1364 2736 1365 2736 1372 2737 1366 2737 1365 2737 1364 2738 1370 2738 1365 2738 1362 2739 1370 2739 1364 2739 1362 2740 1367 2740 1363 2740 1368 2741 1367 2741 1362 2741 1363 2742 1367 2742 1366 2742 1366 2743 1367 2743 1369 2743 1364 2744 1368 2744 1362 2744 1376 2745 1368 2745 1364 2745 1366 2746 1369 2746 1364 2746 1364 2747 1369 2747 1376 2747 1370 2748 12 2748 1377 2748 1371 2749 1370 2749 1377 2749 1374 2750 1365 2750 1370 2750 1374 2751 1370 2751 1371 2751 12 2752 1372 2752 1373 2752 1377 2753 12 2753 1373 2753 1373 2754 1372 2754 1365 2754 1373 2755 1365 2755 1374 2755 1375 2756 1382 2756 1367 2756 1375 2757 1367 2757 1368 2757 1367 2758 1382 2758 1383 2758 1369 2759 1367 2759 1383 2759 1384 2760 1375 2760 1368 2760 1384 2761 1368 2761 1376 2761 1369 2762 1383 2762 1384 2762 1376 2763 1369 2763 1384 2763 1377 2764 1380 2764 1379 2764 1371 2765 1377 2765 1379 2765 1378 2766 1371 2766 1379 2766 1374 2767 1371 2767 1378 2767 1381 2768 1380 2768 1377 2768 1381 2769 1377 2769 1373 2769 1381 2770 1373 2770 1378 2770 1378 2771 1373 2771 1374 2771 1386 2772 1382 2772 1375 2772 1386 2773 1375 2773 1387 2773 1383 2774 1382 2774 1386 2774 1385 2775 1383 2775 1386 2775 1387 2776 1375 2776 1384 2776 1387 2777 1384 2777 1388 2777 1384 2778 1383 2778 1385 2778 1388 2779 1384 2779 1385 2779 1379 2780 1380 2780 1386 2780 1381 2781 1386 2781 1380 2781 1385 2782 1386 2782 1381 2782 1387 2783 1379 2783 1386 2783 1378 2784 1388 2784 1385 2784 1378 2785 1385 2785 1381 2785 1378 2786 1379 2786 1387 2786 1378 2787 1387 2787 1388 2787

+
+
+
+
+ + + + + + + + + + + + + + +
diff --git a/models/rg_robot/meshes/FixedBrick.dae b/models/rg_robot/meshes/FixedBrick.dae index 2227e7bd7e..c00e434edc 100644 --- a/models/rg_robot/meshes/FixedBrick.dae +++ b/models/rg_robot/meshes/FixedBrick.dae @@ -1,63 +1,70 @@ - - - - - VCGLab - VCGLib | MeshLab - - Y_UP - do sep. 17 13:54:34 2015 - do sep. 17 13:54:34 2015 - - - - - - - - - -0.012573 -0.0115027 -0.01625 -0.012573 -0.0115027 -0.01775 -0.0129245 -0.0115946 -0.01775 -0.0130518 -0.011666 -0.01625 -0.0133536 -0.0119791 -0.01775 -0.0133536 -0.0119791 -0.01625 -0.0133893 -0.0120428 -0.01625 -0.0134464 -0.0121771 -0.01775 -0.013494 -0.0123906 -0.01625 -0.0134993 -0.0124635 -0.01625 -0.0134675 -0.0127529 -0.01775 -0.0134675 -0.0127529 -0.01625 -0.0134464 -0.0128229 -0.01625 -0.0131112 -0.0132915 -0.01775 -0.0130518 -0.013334 -0.01625 -0.0129245 -0.0134054 -0.01625 -0.0128572 -0.013434 -0.01775 -0.0128572 -0.013434 -0.01625 -0.0127174 -0.0134761 -0.01775 -0.0127174 -0.0134761 -0.01625 -0.0125 -0.0135 -0.01775 -0.012573 -0.0134973 -0.01625 -0.0125 -0.0135 -0.01625 -0.012427 -0.0134973 -0.01625 -0.0123544 -0.0134893 -0.01625 -0.0122826 -0.0134761 -0.01775 -0.0122826 -0.0134761 -0.01625 -0.0122119 -0.0134576 -0.01625 -0.0121428 -0.013434 -0.01625 -0.0119482 -0.013334 -0.01625 -0.0118327 -0.0132448 -0.01625 -0.0117314 -0.0131397 -0.01625 -0.0116867 -0.0130819 -0.01625 -0.0115325 -0.0127529 -0.01625 -0.011506 -0.0126094 -0.01625 -0.0115007 -0.0124635 -0.01625 -0.0116107 -0.0120428 -0.01775 -0.0117314 -0.0118603 -0.01775 -0.0116867 -0.0119181 -0.01625 -0.0117314 -0.0118603 -0.01625 -0.0117801 -0.0118059 -0.01625 -0.0118888 -0.0117085 -0.01625 -0.0119482 -0.011666 -0.01625 -0.0120755 -0.0115946 -0.01775 -0.0121428 -0.011566 -0.01625 -0.0122826 -0.0115239 -0.01625 -0.0123544 -0.0115107 -0.01625 -0.0127174 -0.0115239 -0.01775 -0.0127174 -0.0115239 -0.01625 -0.0127881 -0.0115424 -0.01775 -0.0127881 -0.0115424 -0.01625 -0.0128572 -0.011566 -0.01775 -0.0129894 -0.0116279 -0.01775 -0.0131112 -0.0117085 -0.01775 -0.0131673 -0.0117552 -0.01775 -0.0132686 -0.0118603 -0.01775 -0.0133133 -0.0119181 -0.01775 -0.0134203 -0.0121089 -0.01775 -0.0134464 -0.0121771 -0.01625 -0.013494 -0.0123906 -0.01775 -0.0134993 -0.0124635 -0.01775 -0.0134834 -0.0126816 -0.01775 -0.0134464 -0.0128229 -0.01775 -0.0133133 -0.0130819 -0.01775 -0.0132686 -0.0131397 -0.01775 -0.0132686 -0.0131397 -0.01625 -0.0132199 -0.0131941 -0.01625 -0.0130518 -0.013334 -0.01775 -0.0129894 -0.013372 -0.01775 -0.0129245 -0.0134054 -0.01775 -0.012573 -0.0134973 -0.01775 -0.0120106 -0.013372 -0.01775 -0.0119482 -0.013334 -0.01775 -0.0117801 -0.0131941 -0.01775 -0.0117314 -0.0131397 -0.01775 -0.0116464 -0.0130209 -0.01775 -0.0116107 -0.0129572 -0.01625 -0.0115797 -0.0128911 -0.01625 -0.0115166 -0.0126816 -0.01625 -0.011506 -0.0126094 -0.01775 -0.011506 -0.0123906 -0.01775 -0.0115325 -0.0122471 -0.01775 -0.0115325 -0.0122471 -0.01625 -0.0116867 -0.0119181 -0.01775 -0.0117801 -0.0118059 -0.01775 -0.0118327 -0.0117552 -0.01775 -0.0118888 -0.0117085 -0.01775 -0.0119482 -0.011666 -0.01775 -0.0121428 -0.011566 -0.01775 -0.0122119 -0.0115424 -0.01625 -0.0122826 -0.0115239 -0.01775 0.012427 -0.0115027 -0.01625 0.0125 -0.0115 -0.01625 0.012427 -0.0115027 -0.01775 0.0123544 -0.0115107 -0.01775 0.0122826 -0.0115239 -0.01775 0.0122119 -0.0115424 -0.01625 0.0120106 -0.0116279 -0.01775 0.0118888 -0.0117085 -0.01625 0.0118327 -0.0117552 -0.01625 0.0116867 -0.0119181 -0.01775 0.011506 -0.0126094 -0.01625 0.0115536 -0.0128229 -0.01775 0.0115325 -0.0127529 -0.01625 0.0116107 -0.0129572 -0.01625 0.0116867 -0.0130819 -0.01625 0.0117314 -0.0131397 -0.01625 0.0118327 -0.0132448 -0.01625 0.0118888 -0.0132915 -0.01775 0.0118888 -0.0132915 -0.01625 0.0121428 -0.013434 -0.01625 0.0125 -0.0135 -0.01775 0.0125 -0.0135 -0.01625 0.0127881 -0.0134576 -0.01775 0.0128572 -0.013434 -0.01775 0.0128572 -0.013434 -0.01625 0.0129894 -0.013372 -0.01775 0.0129245 -0.0134054 -0.01625 0.0130518 -0.013334 -0.01775 0.0130518 -0.013334 -0.01625 0.0131112 -0.0132915 -0.01775 0.0132686 -0.0131397 -0.01625 0.0133133 -0.0130819 -0.01775 0.0133536 -0.0130209 -0.01775 0.0133536 -0.0130209 -0.01625 0.0133893 -0.0129572 -0.01625 0.0134675 -0.0127529 -0.01775 0.013494 -0.0126094 -0.01775 0.0134993 -0.0125365 -0.01625 0.013494 -0.0123906 -0.01775 0.013494 -0.0123906 -0.01625 0.0134675 -0.0122471 -0.01625 0.0133133 -0.0119181 -0.01625 0.0132686 -0.0118603 -0.01625 0.0128572 -0.011566 -0.01625 0.012573 -0.0115027 -0.01775 0.012573 -0.0115027 -0.01625 0.0125 -0.0115 -0.01775 0.0121428 -0.011566 -0.01625 0.0120755 -0.0115946 -0.01625 0.0120106 -0.0116279 -0.01625 0.0119482 -0.011666 -0.01775 0.0118888 -0.0117085 -0.01775 0.0117801 -0.0118059 -0.01775 0.0117801 -0.0118059 -0.01625 0.0117314 -0.0118603 -0.01775 0.0116867 -0.0119181 -0.01625 0.0116107 -0.0120428 -0.01625 0.0115536 -0.0121771 -0.01775 0.0116464 -0.0130209 -0.01775 0.0117314 -0.0131397 -0.01775 0.0117801 -0.0131941 -0.01775 0.0117801 -0.0131941 -0.01625 0.0119482 -0.013334 -0.01775 0.0120106 -0.013372 -0.01775 0.0120755 -0.0134054 -0.01625 0.012573 -0.0134973 -0.01775 0.012573 -0.0134973 -0.01625 0.0126456 -0.0134893 -0.01775 0.0127881 -0.0134576 -0.01625 0.0129245 -0.0134054 -0.01775 0.0129894 -0.013372 -0.01625 0.0131112 -0.0132915 -0.01625 0.0133133 -0.0130819 -0.01625 0.0134675 -0.0127529 -0.01625 0.0134993 -0.0125365 -0.01775 0.0134993 -0.0124635 -0.01775 0.0134834 -0.0123184 -0.01775 0.0134464 -0.0121771 -0.01775 0.0133893 -0.0120428 -0.01775 0.0133536 -0.0119791 -0.01775 0.0132686 -0.0118603 -0.01775 0.0132199 -0.0118059 -0.01775 0.0131673 -0.0117552 -0.01775 0.0131112 -0.0117085 -0.01775 0.0130518 -0.011666 -0.01775 0.0129894 -0.0116279 -0.01775 0.0129894 -0.0116279 -0.01625 0.0127881 -0.0115424 -0.01625 0.0126456 -0.0115107 -0.01775 0.012427 0.0134973 -0.01625 0.0123544 0.0134893 -0.01775 0.0123544 0.0134893 -0.01625 0.0122826 0.0134761 -0.01775 0.0122119 0.0134576 -0.01625 0.0121428 0.013434 -0.01625 0.0119482 0.013334 -0.01775 0.0119482 0.013334 -0.01625 0.0118888 0.0132915 -0.01625 0.0118327 0.0132448 -0.01625 0.0116464 0.0130209 -0.01625 0.0115797 0.0128911 -0.01775 0.0115797 0.0128911 -0.01625 0.0115325 0.0127529 -0.01775 0.0115166 0.0126816 -0.01775 0.0115007 0.0124635 -0.01625 0.0115166 0.0123184 -0.01775 0.0115166 0.0123184 -0.01625 0.0115325 0.0122471 -0.01625 0.0115536 0.0121771 -0.01625 0.0116867 0.0119181 -0.01625 0.0117314 0.0118603 -0.01625 0.0118327 0.0117552 -0.01775 0.0118327 0.0117552 -0.01625 0.0119482 0.011666 -0.01775 0.0122119 0.0115424 -0.01625 0.0125 0.0115 -0.01775 0.012573 0.0115027 -0.01775 0.0129894 0.011628 -0.01775 0.0129894 0.011628 -0.01625 0.0131673 0.0117552 -0.01625 0.0132686 0.0118603 -0.01775 0.0133133 0.0119181 -0.01625 0.0133536 0.0119791 -0.01625 0.0133893 0.0120428 -0.01625 0.0134203 0.0121089 -0.01775 0.0134675 0.0122471 -0.01775 0.013494 0.0123906 -0.01775 0.0134834 0.0126816 -0.01625 0.0134675 0.0127529 -0.01625 0.0134203 0.0128911 -0.01775 0.0134464 0.0128229 -0.01625 0.0133893 0.0129572 -0.01775 0.0132199 0.0131941 -0.01775 0.0132199 0.0131941 -0.01625 0.0131673 0.0132448 -0.01625 0.0130518 0.013334 -0.01625 0.0129245 0.0134054 -0.01775 0.0126456 0.0134893 -0.01775 0.012573 0.0134973 -0.01775 0.0122826 0.0134761 -0.01625 0.0122119 0.0134576 -0.01775 0.0121428 0.013434 -0.01775 0.0120755 0.0134054 -0.01775 0.0120106 0.013372 -0.01625 0.0118327 0.0132448 -0.01775 0.0117801 0.0131941 -0.01775 0.0117314 0.0131397 -0.01775 0.0116867 0.0130819 -0.01775 0.0116464 0.0130209 -0.01775 0.0115536 0.0128229 -0.01775 0.0115325 0.0127529 -0.01625 0.0115166 0.0126816 -0.01625 0.011506 0.0126094 -0.01625 0.0115007 0.0125365 -0.01625 0.0115007 0.0124635 -0.01775 0.011506 0.0123906 -0.01775 0.0116107 0.0120428 -0.01775 0.0117801 0.0118059 -0.01775 0.0120755 0.0115946 -0.01775 0.0122826 0.0115239 -0.01625 0.0123544 0.0115107 -0.01625 0.0127174 0.0115239 -0.01625 0.0127881 0.0115424 -0.01775 0.0127881 0.0115424 -0.01625 0.0129245 0.0115946 -0.01625 0.0130518 0.011666 -0.01625 0.0132199 0.0118059 -0.01775 0.0132199 0.0118059 -0.01625 0.0133536 0.0119791 -0.01775 0.0133893 0.0120428 -0.01775 0.0134203 0.0121089 -0.01625 0.0134834 0.0123184 -0.01625 0.013494 0.0123906 -0.01625 0.0134993 0.0125365 -0.01775 0.013494 0.0126094 -0.01775 0.0134675 0.0127529 -0.01775 0.0133893 0.0129572 -0.01625 0.0133133 0.0130819 -0.01775 0.0132686 0.0131397 -0.01625 0.0131673 0.0132448 -0.01775 0.0130518 0.013334 -0.01775 0.0129894 0.013372 -0.01625 0.0127881 0.0134576 -0.01775 0.0127174 0.0134761 -0.01775 0.0127174 0.0134761 -0.01625 0.012573 0.0134973 -0.01625 -0.012573 0.0134973 -0.01775 -0.012573 0.0134973 -0.01625 -0.0126456 0.0134893 -0.01625 -0.0127174 0.0134761 -0.01775 -0.0127174 0.0134761 -0.01625 -0.0128572 0.013434 -0.01625 -0.0129245 0.0134054 -0.01625 -0.0129894 0.013372 -0.01775 -0.0131673 0.0132448 -0.01775 -0.0131673 0.0132448 -0.01625 -0.0133536 0.0130209 -0.01775 -0.0133536 0.0130209 -0.01625 -0.0134675 0.0127529 -0.01775 -0.0134993 0.0125365 -0.01775 -0.0134993 0.0124635 -0.01775 -0.013494 0.0123906 -0.01775 -0.013494 0.0123906 -0.01625 -0.0134834 0.0123184 -0.01775 -0.0134834 0.0123184 -0.01625 -0.0134464 0.0121771 -0.01775 -0.0134203 0.0121089 -0.01625 -0.0133893 0.0120428 -0.01625 -0.0133536 0.0119791 -0.01625 -0.0132686 0.0118603 -0.01625 -0.0132199 0.0118059 -0.01625 -0.0131112 0.0117085 -0.01625 -0.0130518 0.011666 -0.01775 -0.0129894 0.011628 -0.01625 -0.0129245 0.0115946 -0.01625 -0.0128572 0.011566 -0.01775 -0.0127881 0.0115424 -0.01625 -0.0126456 0.0115107 -0.01775 -0.012573 0.0115027 -0.01775 -0.012573 0.0115027 -0.01625 -0.012427 0.0115027 -0.01775 -0.012427 0.0115027 -0.01625 -0.0122119 0.0115424 -0.01625 -0.0121428 0.011566 -0.01625 -0.0120755 0.0115946 -0.01775 -0.0118327 0.0117552 -0.01625 -0.0117314 0.0118603 -0.01625 -0.0116107 0.0120428 -0.01625 -0.0115325 0.0122471 -0.01775 -0.0115536 0.0121771 -0.01625 -0.0115166 0.0123184 -0.01775 -0.0115007 0.0125365 -0.01775 -0.0115007 0.0125365 -0.01625 -0.0115325 0.0127529 -0.01625 -0.0115536 0.0128229 -0.01625 -0.0116107 0.0129572 -0.01625 -0.0117801 0.0131941 -0.01775 -0.0119482 0.013334 -0.01775 -0.0121428 0.013434 -0.01775 -0.0121428 0.013434 -0.01625 -0.0122826 0.0134761 -0.01775 -0.0123544 0.0134893 -0.01775 -0.0125 0.0135 -0.01775 -0.0125 0.0135 -0.01625 -0.0127881 0.0134576 -0.01625 -0.0129245 0.0134054 -0.01775 -0.0131112 0.0132915 -0.01625 -0.0132686 0.0131397 -0.01625 -0.0133893 0.0129572 -0.01625 -0.0134203 0.0128911 -0.01775 -0.0134203 0.0128911 -0.01625 -0.0134675 0.0127529 -0.01625 -0.0134834 0.0126816 -0.01775 -0.0134834 0.0126816 -0.01625 -0.013494 0.0126094 -0.01625 -0.0134675 0.0122471 -0.01775 -0.0134203 0.0121089 -0.01775 -0.0133893 0.0120428 -0.01775 -0.0131673 0.0117552 -0.01625 -0.0129894 0.011628 -0.01775 -0.0129245 0.0115946 -0.01775 -0.0123544 0.0115107 -0.01625 -0.0122119 0.0115424 -0.01775 -0.0120106 0.011628 -0.01775 -0.0118888 0.0117085 -0.01625 -0.0118327 0.0117552 -0.01775 -0.0117801 0.0118059 -0.01625 -0.0116464 0.0119791 -0.01625 -0.0115797 0.0121089 -0.01625 -0.0115325 0.0122471 -0.01625 -0.0115166 0.0123184 -0.01625 -0.011506 0.0123906 -0.01625 -0.0115007 0.0124635 -0.01775 -0.011506 0.0126094 -0.01775 -0.011506 0.0126094 -0.01625 -0.0115166 0.0126816 -0.01625 -0.0115325 0.0127529 -0.01775 -0.0115536 0.0128229 -0.01775 -0.0117314 0.0131397 -0.01625 -0.0117801 0.0131941 -0.01625 -0.0118327 0.0132448 -0.01775 -0.0118327 0.0132448 -0.01625 -0.0119482 0.013334 -0.01625 -0.0120755 0.0134054 -0.01625 -0.0122119 0.0134576 -0.01775 -0.019 -0.010428 -0.00953638 -0.017625 -0.010428 -0.00953638 -0.017 -0.0105544 -0.0093798 -0.017625 -0.0104892 -0.0094565 -0.017 -0.0104892 -0.0094565 -0.017625 -0.0103709 -0.00961925 -0.019 -0.0104892 -0.0094565 -0.017625 -0.0105544 -0.0093798 -0.017 -0.0106233 -0.00930647 -0.017625 -0.0106233 -0.00930647 -0.017 -0.0106959 -0.0092367 -0.017625 -0.0106959 -0.0092367 -0.019 -0.0106233 -0.00930647 -0.019 -0.0106959 -0.0092367 -0.017625 -0.0107718 -0.00917067 -0.017625 -0.010851 -0.00910855 -0.017 -0.0109332 -0.00905048 -0.017625 -0.0109332 -0.00905048 -0.017625 -0.0110182 -0.00899663 -0.017 -0.0110182 -0.00899663 -0.017625 -0.0111059 -0.00894712 -0.017625 -0.0111959 -0.00890208 -0.017 -0.011288 -0.00886163 -0.017625 -0.0113821 -0.00882587 -0.017 -0.0113821 -0.00882587 -0.017625 -0.011288 -0.00886163 -0.017 -0.0114779 -0.00879488 -0.017625 -0.0114779 -0.00879488 -0.019 -0.0114779 -0.00879488 -0.017 -0.0117728 -0.00873131 -0.017625 -0.0115751 -0.00876875 -0.019 -0.0115751 -0.00876875 -0.017625 -0.0116734 -0.00874754 -0.017 -0.0103709 -0.00961925 -0.017 -0.010428 -0.00953638 -0.017 -0.0125365 -0.0107507 -0.017 -0.0107718 -0.00917067 -0.017 -0.010851 -0.00910855 -0.017 -0.0111059 -0.00894712 -0.017 -0.0111959 -0.00890208 -0.017 -0.0115751 -0.00876875 -0.017 -0.0116734 -0.00874754 -0.017 -0.0122657 -0.00859762 -0.017 -0.0126816 -0.0107666 -0.017 -0.0124506 -0.00850965 -0.017 -0.0128911 -0.0108297 -0.017 -0.0128229 -0.0108036 -0.017 -0.0123593 -0.008556 -0.017 -0.0126094 -0.010756 -0.017 -0.0126255 -0.00840322 -0.017 -0.0127087 -0.00834344 -0.017 -0.0128653 -0.0082115 -0.017 -0.0131941 -0.0110301 -0.017 -0.013134 -0.00790321 -0.017 -0.0132434 -0.00773008 -0.017 -0.0132913 -0.00763955 -0.017 -0.013372 -0.0112606 -0.017 -0.0134054 -0.0113255 -0.017 -0.0134576 -0.0114619 -0.017 -0.0134582 -0.00715685 -0.017 -0.0134764 -0.00705608 -0.017 -0.0134893 -0.0116044 -0.017 -0.0134895 -0.0069545 -0.017 -0.0135 -0.01175 -0.017 -0.0134974 -0.00685238 -0.017 -0.0127887 -0.00827947 -0.017 -0.0132448 -0.0110827 -0.017 -0.0132915 -0.0111388 -0.017 -0.013334 -0.0111982 -0.017 -0.013434 -0.0113928 -0.017 -0.0123906 -0.010756 -0.017 -0.0123184 -0.0107666 -0.017 -0.0120428 -0.0108607 -0.017 -0.0119791 -0.0108964 -0.017 -0.0119181 -0.0109367 -0.017 -0.0118603 -0.0109814 -0.017 -0.0117085 -0.0111388 -0.017 -0.0116279 -0.0112606 -0.017 -0.0115946 -0.0113255 -0.017 -0.010318 -0.00970488 -0.017 -0.0115027 -0.011823 -0.017 -0.0115424 -0.0120381 -0.017 -0.011566 -0.0121072 -0.017 -0.0116279 -0.0122394 -0.017 -0.011666 -0.0123018 -0.017 -0.0117085 -0.0123612 -0.017 -0.0121771 -0.0126964 -0.017 -0.0121089 -0.0126703 -0.017 -0.0123906 -0.012744 -0.017 -0.0123184 -0.0127334 -0.017 -0.0124635 -0.0127493 -0.017 -0.0125365 -0.0127493 -0.017 -0.0128911 -0.0126703 -0.017 -0.0135 -0.01625 -0.017 -0.0130209 -0.0126036 -0.017 -0.0130819 -0.0125633 -0.017 -0.0131397 -0.0125186 -0.017 -0.0131941 -0.0124699 -0.017 -0.013372 -0.0122394 -0.017 -0.013434 -0.0121072 -0.017 -0.0134761 -0.0119674 -0.017 -0.0134893 -0.0118956 -0.017 -0.0118738 -0.00871475 -0.0174167 -0.0118738 -0.00871475 -0.017 -0.0120728 -0.00866623 -0.0174167 -0.0120759 -0.00866529 -0.0174167 -0.0121745 -0.00863281 -0.0174167 -0.0124585 -0.00850536 -0.017 -0.0125394 -0.00845867 -0.0174167 -0.0127192 -0.00833539 -0.017 -0.0130076 -0.00806424 -0.017 -0.0133345 -0.00754669 -0.0174167 -0.0133828 -0.00742454 -0.0174167 -0.0134153 -0.00732591 -0.017 -0.0134348 -0.00725656 -0.0174167 -0.0134426 -0.00722572 -0.0174167 -0.0134647 -0.00712425 -0.0174167 -0.0134814 -0.00702177 -0.017 -0.0135 -0.00675 -0.019 -0.0117728 -0.00873131 -0.019 -0.0118738 -0.00871475 -0.0174167 -0.0119757 -0.0086926 -0.019 -0.0120728 -0.00866623 -0.0174167 -0.0122714 -0.00859527 -0.019 -0.0122657 -0.00859762 -0.019 -0.0127087 -0.00834344 -0.0174167 -0.0127999 -0.00826997 -0.019 -0.0127887 -0.00827947 -0.0174167 -0.0129505 -0.00812702 -0.0174167 -0.01302 -0.00804988 -0.019 -0.0133345 -0.00754669 -0.019 -0.0134348 -0.00725656 -0.0174167 -0.0134989 -0.0068149 -0.019 -0.0121701 -0.00863439 -0.0174167 -0.0126353 -0.00839654 -0.019 -0.0129383 -0.00813969 -0.019 -0.0130076 -0.00806424 -0.019 -0.0130729 -0.00798534 -0.0174167 -0.0130854 -0.00796924 -0.0174167 -0.0131465 -0.00788531 -0.0174167 -0.0132032 -0.00779832 -0.0174167 -0.0133027 -0.0076161 -0.0174167 -0.0134929 -0.00691856 -0.017 -0.0134063 -0.00735494 -0.017 -0.0133729 -0.00745173 -0.0174167 -0.0133453 -0.00752136 -0.0174167 -0.0132554 -0.0077085 -0.017 -0.0131909 -0.00781805 -0.017 -0.0130729 -0.00798534 -0.017 -0.0129383 -0.00813969 -0.0174167 -0.012877 -0.00820045 -0.0174167 -0.0125483 -0.00845324 -0.0174167 -0.0123661 -0.00855274 -0.017 -0.0121701 -0.00863439 -0.017 -0.0119739 -0.00869304 -0.019 -0.0116734 -0.00874754 -0.019 -0.0113821 -0.00882587 -0.019 -0.011288 -0.00886163 -0.019 -0.0111059 -0.00894712 -0.019 -0.0111959 -0.00890208 -0.019 -0.0121089 -0.0108297 -0.019 -0.0110182 -0.00899663 -0.019 -0.0120428 -0.0108607 -0.019 -0.0109332 -0.00905048 -0.019 -0.010851 -0.00910855 -0.019 -0.0107718 -0.00917067 -0.019 -0.0119791 -0.0108964 -0.019 -0.0105544 -0.0093798 -0.019 -0.0103709 -0.00961925 -0.019 -0.0117552 -0.0110827 -0.019 -0.0115107 -0.0118956 -0.019 -0.0115946 -0.0121745 -0.019 -0.011566 -0.0121072 -0.019 -0.0117085 -0.0123612 -0.019 -0.0118603 -0.0125186 -0.019 -0.0119791 -0.0126036 -0.019 -0.0120428 -0.0126393 -0.019 -0.0121089 -0.0126703 -0.019 -0.0121771 -0.0126964 -0.019 -0.0124635 -0.0127493 -0.019 -0.0125365 -0.0127493 -0.019 -0.0126094 -0.012744 -0.019 -0.0126816 -0.0127334 -0.019 -0.0127529 -0.0127175 -0.019 -0.0128911 -0.0126703 -0.019 -0.0129572 -0.0126393 -0.019 -0.0130819 -0.0125633 -0.019 -0.0132915 -0.0123612 -0.019 -0.0134054 -0.0121745 -0.019 -0.013434 -0.0121072 -0.019 -0.0134973 -0.011823 -0.019 -0.0134063 -0.00735494 -0.019 -0.0133729 -0.00745173 -0.019 -0.0132913 -0.00763955 -0.019 -0.0132434 -0.00773008 -0.019 -0.0131909 -0.00781805 -0.019 -0.013134 -0.00790321 -0.019 -0.0126255 -0.00840322 -0.019 -0.0125394 -0.00845867 -0.019 -0.0134973 0.013177 -0.019 -0.01725 0.01775 -0.019 -0.0134973 0.013323 -0.019 -0.0134576 0.0135381 -0.019 -0.013434 0.0136072 -0.019 -0.0134054 0.0136745 -0.019 -0.013334 0.0138018 -0.019 -0.013372 0.0137394 -0.019 -0.0132448 0.0139173 -0.019 -0.0132915 0.0138612 -0.019 -0.0131397 0.0140186 -0.019 -0.0129572 0.0141393 -0.019 -0.0126816 0.0142334 -0.019 -0.0125365 0.0142493 -0.019 -0.0122471 0.0142175 -0.019 -0.0121771 0.0141964 -0.019 -0.0121089 0.0141703 -0.019 -0.0119791 0.0141036 -0.019 -0.0119181 0.0140633 -0.019 -0.0118603 0.0140186 -0.019 -0.0118059 0.0139699 -0.019 0.0117552 0.0139173 -0.019 0.0117085 0.0138612 -0.019 0.011566 0.0136072 -0.019 -0.0115027 0.013177 -0.019 0.0115027 0.013177 -0.019 0.0115107 0.0131044 -0.019 -0.0115424 0.0129619 -0.019 -0.011628 0.0127606 -0.019 0.0116279 0.0127606 -0.019 0.0117552 0.0125827 -0.019 -0.0118059 0.0125301 -0.019 -0.0118603 0.0124814 -0.019 -0.0119791 0.0123964 -0.019 -0.0119181 0.0124367 -0.019 -0.0121771 0.0123036 -0.019 -0.0121089 0.0123297 -0.019 -0.0128229 0.0123036 -0.019 -0.0127529 0.0122825 -0.019 -0.0129572 0.0123607 -0.019 -0.013334 0.0126982 -0.019 -0.013372 0.0127606 -0.019 -0.0135 0.01075 -0.019 0.01725 0.01775 -0.019 0.01725 -0.01625 -0.019 0.0135 -0.00225 -0.019 0.0147817 0.000540508 -0.019 0.0148495 0.000563053 -0.019 0.0150406 0.000658748 -0.019 0.0150993 0.000699461 -0.019 0.0155 -0.00225 -0.019 0.0152557 0.00084514 -0.019 0.0154096 0.00108459 -0.019 0.0154595 0.00121827 -0.019 0.0154369 0.00115054 -0.019 0.0154771 0.00128744 -0.019 0.0154975 0.00142866 -0.019 0.0135229 -0.000287434 -0.019 0.0136995 9.92789e-05 -0.019 0.0145 0.000500001 -0.019 0.0137929 0.000207109 -0.019 0.0139007 0.000300542 -0.019 0.0141505 0.00043695 -0.019 0.0145713 0.00050255 -0.019 0.0155 0.0085 -0.019 0.0154369 0.00884947 -0.019 0.0153777 0.00897925 -0.019 0.0134973 0.013323 -0.019 0.0134893 0.0133956 -0.019 0.0134054 0.0136745 -0.019 0.013334 0.0138018 -0.019 0.0132915 0.0138612 -0.019 0.0131941 0.0139699 -0.019 0.0130819 0.0140633 -0.019 0.0126816 0.0142334 -0.019 0.0125365 0.0142493 -0.019 0.0124635 0.0142493 -0.019 0.0123906 0.014244 -0.019 0.0122471 0.0142175 -0.019 0.0121089 0.0141703 -0.019 0.0150406 0.00934126 -0.019 0.0149154 0.00940963 -0.019 0.0135 0.01075 -0.019 0.0147817 0.0094595 -0.019 0.0146423 0.00948982 -0.019 0.0143577 0.00951018 -0.019 0.0142874 0.00952286 -0.019 0.0141505 0.00956305 -0.019 0.0145 0.0095 -0.019 0.0139594 0.00965875 -0.019 0.0139007 0.00969946 -0.019 0.0138451 0.00974425 -0.019 0.0136587 0.00995936 -0.019 0.0136223 0.0100208 -0.019 0.0135102 0.0103577 -0.019 -0.0134974 -0.00685238 -0.019 -0.0134895 -0.0069545 -0.019 -0.0134764 -0.00705608 -0.019 -0.0134582 -0.00715685 -0.019 -0.0128653 -0.0082115 -0.019 -0.0130209 -0.0108964 -0.019 -0.0127529 -0.0107825 -0.019 -0.0124506 -0.00850965 -0.019 -0.0123593 -0.008556 -0.019 -0.0126094 -0.010756 -0.019 -0.0119739 -0.00869304 -0.019 -0.0122471 -0.0107825 -0.019 -0.0117552 0.0139173 -0.019 -0.011628 0.0137394 -0.019 -0.011566 0.0136072 -0.019 -0.0115424 0.0135381 -0.019 -0.0115107 0.0133956 -0.019 -0.0115027 0.013323 -0.019 0.0115 0.01325 -0.019 0.011566 0.0128928 -0.019 -0.011666 0.0126982 -0.019 -0.0117552 0.0125827 -0.019 0.0118603 0.0124814 -0.019 0.0119791 0.0123964 -0.019 0.0120428 0.0123607 -0.019 0.0121089 0.0123297 -0.019 0.0126094 0.012256 -0.019 0.0130819 0.0124367 -0.019 0.0131397 0.0124814 -0.019 0.0132448 0.0125827 -0.019 0.0132915 0.0126388 -0.019 0.013372 0.0127606 -0.019 0.013434 0.0128928 -0.019 0.0134576 0.0129619 -0.019 -0.0115027 -0.011823 -0.019 -0.0115 -0.01175 -0.019 -0.010318 -0.00970488 -0.019 -0.0115107 -0.0116044 -0.019 -0.0115239 -0.0115326 -0.0160139 -0.0125 -0.00225 -0.0161287 -0.0126213 -0.00225738 -0.016268 -0.012752 -0.001 -0.0163653 -0.0128341 -0.001 -0.0163103 -0.0127886 -0.00229254 -0.0164374 -0.0128905 -0.00232939 -0.0165026 -0.0129386 -0.0023513 -0.0166362 -0.013029 -0.00240137 -0.0167869 -0.013119 -0.001 -0.0167737 -0.0131116 -0.00245886 -0.0169147 -0.0131864 -0.00252282 -0.0169863 -0.0132209 -0.00255694 -0.0171317 -0.0132838 -0.00262895 -0.0172052 -0.0133122 -0.0026666 -0.0172792 -0.0133386 -0.00270523 -0.0173715 -0.0133685 -0.001 -0.0174284 -0.0133853 -0.00278498 -0.0175034 -0.0134056 -0.0028259 -0.0175786 -0.0134239 -0.00286739 -0.0179545 -0.0134854 -0.00307981 -0.0180291 -0.0134919 -0.00312261 -0.01825 -0.0135 -0.001 -0.0170143 -0.0132337 -0.001 -0.0160841 -0.0125758 -0.001 -0.015998 -0.012482 -0.001 -0.0159159 -0.0123847 -0.001 -0.0155434 -0.0117939 -0.00225 -0.0155712 -0.0118506 -0.001 -0.0154932 -0.0116832 -0.00225 -0.0154475 -0.0115705 -0.00225 -0.0154214 -0.0114994 -0.001 -0.0154064 -0.011456 -0.00225 -0.01537 -0.01134 -0.00225 -0.0153173 -0.011132 -0.001 -0.0152894 -0.0109844 -0.00225 -0.0152743 -0.0108811 -0.001 -0.01525 -0.0105 -0.00225 -0.0158593 -0.0123123 -0.00225 -0.015838 -0.0122839 -0.001 -0.0157645 -0.0121799 -0.001 -0.0156954 -0.0120729 -0.001 -0.0155981 -0.0119026 -0.00225 -0.0155163 -0.0117357 -0.001 -0.0154663 -0.0116186 -0.001 -0.0153114 -0.011104 -0.00225 -0.0152932 -0.011007 -0.001 -0.0152722 -0.010864 -0.00225 -0.0152527 -0.0106273 -0.001 -0.0152525 -0.0106216 -0.00225 -0.01525 -0.0105 -0.001 -0.01525 0.0105 -0.00225 -0.0152608 0.0107544 -0.000999999 -0.0153173 0.011132 -0.000999999 -0.0154214 0.0114994 -0.000999999 -0.0155163 0.0117357 -0.000999999 -0.0155712 0.0118506 -0.00225 -0.0155712 0.0118506 -0.000999999 -0.015631 0.0119631 -0.000999999 -0.0156954 0.0120729 -0.00225 -0.0156954 0.0120729 -0.000999999 -0.0157645 0.0121799 -0.000999999 -0.015838 0.0122839 -0.00225 -0.015998 0.012482 -0.000999999 -0.0160841 0.0125758 -0.000999999 -0.0160841 0.0125758 -0.00225 -0.0166771 0.0130546 -0.000999999 -0.0168994 0.0131788 -0.00225 -0.017743 0.0134568 -0.000999999 -0.017743 0.0134568 -0.00225 -0.0178689 0.0134757 -0.00225 -0.0181227 0.0134973 -0.00225 -0.0152608 0.0107544 -0.00225 -0.0152743 0.0108811 -0.000999999 -0.0152932 0.011007 -0.000999999 -0.0152932 0.011007 -0.00225 -0.0153173 0.011132 -0.00225 -0.0153468 0.0112559 -0.00225 -0.0153815 0.0113785 -0.00225 -0.0154214 0.0114994 -0.00225 -0.0154663 0.0116186 -0.000999999 -0.0155163 0.0117357 -0.00225 -0.015838 0.0122839 -0.000999999 -0.0159159 0.0123847 -0.00225 -0.015998 0.012482 -0.00225 -0.016268 0.012752 -0.00225 -0.0163653 0.0128341 -0.000999999 -0.0164661 0.012912 -0.00225 -0.0165701 0.0129855 -0.00225 -0.0166771 0.0130546 -0.00225 -0.0167869 0.013119 -0.000999999 -0.0167869 0.013119 -0.00225 -0.0168994 0.0131788 -0.000999999 -0.0172506 0.0133286 -0.00225 -0.0173715 0.0133685 -0.00225 -0.0174941 0.0134032 -0.00225 -0.017618 0.0134327 -0.000999999 -0.0179956 0.0134892 -0.000999999 -0.01525 0.0135 0.00226795 -0.017 0.0135 -0.000499999 -0.0135 0.0155 -0.01625 0.01525 0.0135 -0.00225 0.0135 0.01525 -0.00225 -0.0152527 0.0106273 -0.00225 -0.0152743 0.0108811 -0.00225 -0.0154663 0.0116186 -0.00225 -0.0135 0.01525 -0.00225 -0.015631 0.0119631 -0.00225 -0.0157645 0.0121799 -0.00225 -0.0161742 0.0126659 -0.00225 -0.0163653 0.0128341 -0.00225 -0.0170143 0.0132337 -0.00225 -0.0171314 0.0132837 -0.00225 -0.017618 0.0134327 -0.00225 -0.0179956 0.0134892 -0.00225 -0.01825 0.0135 -0.00225 0.0125 -0.0135 -0.00225 -0.0159347 -0.0124077 -0.00225 -0.0157878 -0.0122139 -0.00225 -0.0157204 -0.0121127 -0.00225 -0.0156571 -0.0120089 -0.00225 -0.0153383 -0.0112226 -0.00225 -0.0152599 -0.010743 -0.00225 0.0135 0.0155 -0.00225 -0.0135 0.01525 0.01775 -0.0135 0.0155 -0.00225 -0.0135 0.017 -0.00425 -0.0135 0.019 0.01075 -0.01825 -0.0135 -0.00325 -0.0134952 -0.0134952 -0.00315198 -0.0135625 -0.0134985 -0.00319523 -0.0166875 -0.0134985 -0.00319523 -0.0181032 -0.0134964 -0.00316531 -0.0166875 -0.0134918 -0.00312258 -0.0166875 -0.0134799 -0.00305043 -0.0134808 -0.0134808 -0.00305491 -0.0135625 -0.0134799 -0.00305043 -0.0181769 -0.0134991 -0.0032078 -0.0178796 -0.013477 -0.00303698 -0.0178045 -0.0134667 -0.00299423 -0.0176539 -0.0134402 -0.00290933 -0.0166875 -0.0134402 -0.00290946 -0.0135625 -0.0133439 -0.00271356 -0.0135625 -0.0133025 -0.00265338 -0.0132071 -0.0132071 -0.00254289 -0.013241 -0.013241 -0.00257844 -0.0135625 -0.0132071 -0.00254289 -0.0166875 -0.0134627 -0.00297944 -0.0135625 -0.0134404 -0.00290987 -0.0177292 -0.0134545 -0.00295165 -0.015125 -0.013413 -0.00284215 -0.015125 -0.0133808 -0.00277659 -0.0166875 -0.013413 -0.00284215 -0.0166875 -0.0133808 -0.00277659 -0.0166875 -0.0133439 -0.00271356 -0.0131716 -0.0131716 -0.00250905 -0.0173536 -0.013363 -0.00274473 -0.015125 -0.0133025 -0.00265338 -0.0170587 -0.0132533 -0.00259235 -0.015125 -0.0132071 -0.00254289 -0.0135625 -0.0130966 -0.00244747 -0.0131344 -0.0131344 -0.00247699 -0.0135625 -0.0131536 -0.00249316 -0.0166875 -0.0132568 -0.0025964 -0.0166875 -0.0132071 -0.00254289 -0.0166875 -0.0131536 -0.00249316 -0.015125 -0.0130966 -0.00244747 -0.0166875 -0.0130966 -0.00244747 -0.0167045 -0.0130713 -0.00242925 -0.015125 -0.0130364 -0.00240606 -0.0135625 -0.0129734 -0.00236916 -0.0129714 -0.0129714 -0.00236808 -0.0135625 -0.0130364 -0.00240606 -0.0168438 -0.01315 -0.00249009 -0.015125 -0.0129078 -0.00233695 -0.0163733 -0.0128405 -0.00230975 -0.0162485 -0.0127347 -0.00227794 -0.015125 -0.0126996 -0.00227012 -0.0126467 -0.0126467 -0.00226082 -0.012743 -0.012743 -0.00227997 -0.0135625 -0.0128402 -0.00230964 -0.0128827 -0.0128827 -0.00232612 -0.0135625 -0.0129078 -0.00233695 -0.0165689 -0.0129847 -0.00237534 -0.015125 -0.0129734 -0.00236916 -0.015125 -0.0128403 -0.0023097 -0.015125 -0.0127706 -0.0022873 -0.0135625 -0.0127706 -0.0022873 -0.0135625 -0.0126996 -0.00227012 -0.016188 -0.012679 -0.00226615 -0.015125 -0.0126275 -0.00225816 -0.012598 -0.012598 -0.00225482 -0.015125 -0.0125548 -0.0022515 -0.0160706 -0.0125616 -0.0022519 -0.0125 -0.0125 -0.00225 -0.0135625 -0.0125548 -0.0022515 -0.0127903 -0.0127903 -0.00229306 -0.0129276 -0.0129276 -0.00234601 -0.0135625 -0.0132568 -0.0025964 -0.013273 -0.013273 -0.00261561 -0.0133577 -0.0133577 -0.0027359 -0.015125 -0.0133439 -0.00271356 -0.0135625 -0.0133808 -0.00277659 -0.0134239 -0.0134239 -0.00286732 -0.0135625 -0.013413 -0.00284215 -0.0135625 -0.0134627 -0.00297944 -0.0134569 -0.0134569 -0.00295971 -0.015125 -0.0134918 -0.00312254 -0.015125 -0.0134985 -0.00319523 -0.0135625 -0.0134918 -0.00312249 -0.015125 -0.0134799 -0.00305043 -0.015125 -0.0134627 -0.00297944 -0.015125 -0.0134403 -0.00290967 -0.015125 -0.0132568 -0.0025964 -0.0166875 -0.0133025 -0.00265338 -0.015125 -0.0131536 -0.00249316 -0.0135625 -0.0126275 -0.00225816 -0.0125491 -0.0125491 -0.0022512 -0.0125 -0.01525 -0.00225 -0.0126275 -0.0138125 -0.00225816 -0.0126951 -0.0126951 -0.00226921 -0.0127126 -0.01525 -0.00227285 -0.0128495 -0.01525 -0.00231305 -0.0129154 -0.01525 -0.00234037 -0.0130406 -0.01525 -0.00240875 -0.0132071 -0.01525 -0.00254289 -0.0132568 -0.0138125 -0.0025964 -0.0133032 -0.0133032 -0.0026543 -0.0132071 -0.0138125 -0.00254289 -0.0130966 -0.0138125 -0.00244747 -0.0130993 -0.01525 -0.00244946 -0.0126996 -0.0138125 -0.00227012 -0.0127817 -0.01525 -0.00229051 -0.0128401 -0.0138125 -0.00230962 -0.0127706 -0.0138125 -0.0022873 -0.0128369 -0.0128369 -0.00230846 -0.0129078 -0.0138125 -0.00233695 -0.0129734 -0.0138125 -0.00236916 -0.0130141 -0.0130141 -0.00239227 -0.0130364 -0.0138125 -0.00240606 -0.0130556 -0.0130556 -0.00241853 -0.0130957 -0.0130957 -0.00244679 -0.0131536 -0.0138125 -0.00249316 -0.0131549 -0.01525 -0.00249425 -0.0133005 -0.01525 -0.00265072 -0.0133439 -0.0138125 -0.00271356 -0.0133315 -0.0133315 -0.00269443 -0.0133025 -0.0138125 -0.00265338 -0.0133808 -0.0138125 -0.00277659 -0.0133777 -0.01525 -0.00277075 -0.013413 -0.0138125 -0.00284215 -0.013404 -0.013404 -0.00282245 -0.0133819 -0.0133819 -0.0027786 -0.0134096 -0.01525 -0.00283459 -0.0134369 -0.01525 -0.00290054 -0.0134415 -0.0134415 -0.00291311 -0.0134595 -0.01525 -0.00296827 -0.0134627 -0.0138125 -0.00297944 -0.01347 -0.01347 -0.00300702 -0.0134404 -0.0138125 -0.00290989 -0.0134799 -0.0138125 -0.00305043 -0.0134892 -0.0134892 -0.00310327 -0.0134898 -0.01525 -0.00310769 -0.0134918 -0.0138125 -0.00312249 -0.0134975 -0.01525 -0.00317866 -0.0134985 -0.0138125 -0.00319523 -0.0134988 -0.0134988 -0.00320093 -0.0125548 -0.0138125 -0.0022515 -0.019 -0.0135 -0.00675 -0.0135 -0.0135 -0.00325 -0.0135 -0.01525 -0.00325 -0.0135 -0.0135 -0.01625 0.0134975 -0.0135 -0.00317866 0.0134975 -0.01525 -0.00317866 0.0134918 -0.0136667 -0.00312249 0.0134898 -0.0135 -0.00310769 0.0134627 -0.0136667 -0.00297944 0.0134369 -0.0135 -0.00290054 0.013413 -0.0136667 -0.00284215 0.0133808 -0.0136667 -0.00277659 0.0133005 -0.01525 -0.00265072 0.0132071 -0.0136667 -0.00254289 0.0131536 -0.0136667 -0.00249317 0.0132071 -0.0135 -0.00254289 0.0132568 -0.0136667 -0.0025964 0.0132557 -0.0135 -0.00259514 0.0133025 -0.0136667 -0.00265339 0.0134898 -0.01525 -0.00310769 0.0134771 -0.01525 -0.00303744 0.0134771 -0.0135 -0.00303744 0.0134595 -0.01525 -0.00296827 0.0134369 -0.01525 -0.00290054 0.0133777 -0.0135 -0.00277075 0.0133439 -0.0136667 -0.00271356 0.0133777 -0.01525 -0.00277075 0.0132557 -0.01525 -0.00259514 0.0130966 -0.0136667 -0.00244747 0.0130364 -0.0136667 -0.00240607 0.0129734 -0.0136667 -0.00236916 0.0129792 -0.01525 -0.00237232 0.0128495 -0.0135 -0.00231305 0.0129078 -0.0136667 -0.00233695 0.0129154 -0.01525 -0.00234037 0.0128495 -0.01525 -0.00231305 0.0127817 -0.01525 -0.00229051 0.0127706 -0.0136667 -0.0022873 0.0126423 -0.01525 -0.00226018 0.0126275 -0.0136667 -0.00225816 0.0125713 -0.0135 -0.00225255 0.0125713 -0.01525 -0.00225255 0.0125548 -0.0136667 -0.0022515 0.0127126 -0.0135 -0.00227285 0.0126423 -0.0135 -0.00226018 0.0126996 -0.0136667 -0.00227012 0.0128401 -0.0136667 -0.00230962 0.0134404 -0.0136667 -0.00290989 0.0134799 -0.0136667 -0.00305043 0.0135 -0.01525 -0.00325 0.0134985 -0.0136667 -0.00319523 -0.0185 0.0144452 0.0004985 -0.0185 0.0143725 0.00049184 -0.0185 0.0143004 0.000479887 -0.017 0.0142183 0.000459495 -0.017 0.0139594 0.000341255 -0.0185 0.0139034 0.000302528 -0.0185 0.0138464 0.000256837 -0.019 0.0143577 0.000489824 -0.019 0.0142874 0.000477148 -0.0185 0.0141599 0.000440385 -0.019 0.0139594 0.000341255 -0.0185 0.0137929 0.000207109 -0.019 0.0138451 0.00025575 -0.019 0.0137442 0.000154863 -0.0185 0.0136975 9.66182e-05 -0.0185 0.0137432 0.000153606 -0.019 0.0142183 0.000459495 -0.019 0.0140846 0.000409633 -0.0185 0.0140266 0.000380844 -0.019 0.0140208 0.000377681 -0.019 0.0136587 4.06413e-05 -0.0185 0.0136561 3.64451e-05 -0.017 0.0135904 -8.45839e-05 -0.0185 0.0136192 -2.65889e-05 -0.019 0.0136223 -2.07505e-05 -0.019 0.0135904 -8.45838e-05 -0.019 0.013563 -0.000150534 -0.0185 0.0135869 -9.21502e-05 -0.017 0.013563 -0.000150534 -0.0185 0.0135596 -0.000159886 -0.019 0.0135405 -0.000218266 -0.017 0.0135405 -0.000218266 -0.0185 0.0135373 -0.000229437 -0.017 0.0135102 -0.000357684 -0.0185 0.0135201 -0.000300433 -0.019 0.0135102 -0.000357684 -0.0185 0.0135015 -0.000445231 -0.0185 0.0135082 -0.00037249 -0.019 0.0135025 -0.00042866 -0.019 0.0135 -0.000499999 -0.0185 0.0139636 0.000343938 -0.0185 0.0140922 0.000413051 -0.0185 0.0142294 0.000462703 -0.017 0.0142874 0.000477148 -0.019 0.0144287 0.000497453 -0.019 0.0147126 0.000522855 -0.018 0.0150966 0.000697475 -0.018 0.0151536 0.000743166 -0.019 0.0152071 0.000792894 -0.019 0.0151549 0.000744253 -0.017 0.0150406 0.000658748 -0.017 0.0152071 0.000792894 -0.019 0.0153005 0.000900724 -0.019 0.0153413 0.000959362 -0.018 0.0152568 0.000846397 -0.018 0.0152071 0.000792894 -0.018 0.0147706 0.0005373 -0.017 0.0149154 0.00059037 -0.018 0.0150364 0.000656065 -0.017 0.0151549 0.000744253 -0.017 0.0153005 0.000900724 -0.018 0.0153025 0.000903385 -0.018 0.0153808 0.00102659 -0.019 0.0153777 0.00102075 -0.018 0.0153439 0.000963558 -0.017 0.0153777 0.00102075 -0.017 0.0154096 0.00108459 -0.018 0.015413 0.00109215 -0.017 0.0154369 0.00115054 -0.018 0.0154404 0.00115989 -0.018 0.0154627 0.00122944 -0.017 0.0154771 0.00128744 -0.018 0.0154799 0.00130044 -0.018 0.0154918 0.00137249 -0.019 0.0154898 0.00135769 -0.018 0.0154985 0.00144523 -0.019 0.0155 0.0015 -0.019 0.0149792 0.000622322 -0.018 0.0149734 0.000619159 -0.019 0.0149154 0.00059037 -0.018 0.0149078 0.000586952 -0.018 0.0148401 0.000559618 -0.019 0.0146423 0.000510179 -0.018 0.0146996 0.000520116 -0.018 0.0146275 0.000508163 -0.017 0.0145 0.000500001 -0.018 0.0145548 0.000501503 -0.019 0.0154771 0.00871257 -0.018 0.0154627 0.00877056 -0.018 0.015413 0.00890785 -0.018 0.0152568 0.00915361 -0.019 0.0152071 0.00920711 -0.018 0.0154799 0.00869957 -0.018 0.0154918 0.00862751 -0.017 0.0154771 0.00871257 -0.017 0.0153777 0.00897925 -0.018 0.0153025 0.00909662 -0.017 0.0152071 0.00920711 -0.017 0.0151549 0.00925575 -0.017 0.0150993 0.00930054 -0.018 0.0150966 0.00930253 -0.019 0.0150993 0.00930054 -0.018 0.0151536 0.00925684 -0.019 0.0151549 0.00925575 -0.017 0.0154369 0.00884947 -0.018 0.0153808 0.00897341 -0.017 0.0153005 0.00909928 -0.017 0.0152557 0.00915486 -0.018 0.0152071 0.00920711 -0.017 0.0150406 0.00934126 -0.018 0.0150364 0.00934394 -0.017 0.0149792 0.00937768 -0.018 0.0149734 0.00938084 -0.019 0.0149792 0.00937768 -0.019 0.0148495 0.00943695 -0.017 0.0149154 0.00940963 -0.018 0.0149078 0.00941305 -0.018 0.0148401 0.00944039 -0.018 0.0147706 0.0094627 -0.019 0.0147126 0.00947715 -0.017 0.0147126 0.00947715 -0.017 0.0146423 0.00948982 -0.018 0.0146996 0.00947989 -0.018 0.0146275 0.00949184 -0.019 0.0145713 0.00949745 -0.018 0.0145548 0.0094985 -0.017 0.0145 0.0095 -0.019 0.0152557 0.00915486 -0.019 0.0153005 0.00909928 -0.019 0.0153413 0.00904064 -0.018 0.0153439 0.00903645 -0.019 0.0154096 0.00891542 -0.018 0.0154404 0.00884011 -0.019 0.0154595 0.00878173 -0.019 0.0154898 0.00864232 -0.019 0.0154975 0.00857134 -0.017 0.0154975 0.00857134 -0.018 0.0154985 0.00855477 -0.019 0.0135 0.0105 -0.017 0.0135229 0.0102874 -0.018 0.0135373 0.0102294 -0.017 0.0135904 0.0100846 -0.017 0.0136223 0.0100208 -0.017 0.0136587 0.00995936 -0.018 0.0136561 0.00996356 -0.018 0.0136975 0.00990339 -0.018 0.0137432 0.0098464 -0.018 0.0137929 0.0097929 -0.017 0.0137929 0.0097929 -0.018 0.0135201 0.0103004 -0.019 0.0135229 0.0102874 -0.019 0.0135405 0.0102183 -0.018 0.0135596 0.0101599 -0.019 0.013563 0.0101505 -0.019 0.0135904 0.0100846 -0.019 0.0137442 0.00984514 -0.019 0.0137929 0.0097929 -0.018 0.0138464 0.00974317 -0.018 0.0139034 0.00969748 -0.017 0.0138451 0.00974425 -0.018 0.0135869 0.0100922 -0.019 0.0136995 0.00990072 -0.019 0.0140208 0.00962232 -0.018 0.0139636 0.00965607 -0.018 0.0140266 0.00961916 -0.019 0.0140846 0.00959037 -0.017 0.0140846 0.00959037 -0.018 0.0140922 0.00958695 -0.017 0.0141505 0.00956305 -0.018 0.0141599 0.00955962 -0.018 0.0142294 0.0095373 -0.019 0.0142183 0.00954051 -0.018 0.0143004 0.00952012 -0.017 0.0143577 0.00951018 -0.019 0.0144287 0.00950255 -0.018 0.0143725 0.00950816 -0.018 0.0144452 0.0095015 -0.017 0.0137442 0.00984514 -0.018 0.0136192 0.0100266 -0.017 0.0135405 0.0102183 -0.017 0.0135025 0.0104287 -0.019 0.0135025 0.0104287 -0.018 0.0135015 0.0104452 -0.018 0.0135082 0.0103725 -0.017125 0.0135 0.01325 -0.017 0.0135 0.0105 -0.0168125 0.0139895 0.00312782 -0.0168125 0.0139281 0.00308827 -0.0168125 0.013716 0.00288872 -0.017 0.013652 0.00279787 -0.01525 0.0135603 0.00260997 -0.0168125 0.013546 0.00256769 -0.0168125 0.0135266 0.00249725 -0.0168125 0.0135125 0.00242559 -0.01525 0.0135024 0.00233771 -0.0168125 0.0135001 0.00228013 -0.017 0.0138843 0.00305596 -0.017 0.0138309 0.00301109 -0.0168125 0.0138147 0.00299625 -0.017 0.0137807 0.00296261 -0.017 0.013734 0.00291074 -0.0168125 0.0136728 0.00282983 -0.0168125 0.013634 0.00276795 -0.01525 0.0136171 0.00273742 -0.017 0.0135865 0.00267469 -0.01525 0.0135865 0.00267469 -0.0168125 0.0135704 0.00263653 -0.017 0.0135219 0.00247586 -0.01525 0.0135387 0.00254359 -0.017 0.0135097 0.00240712 -0.0168125 0.0135036 0.00235308 -0.01525 0.0135097 0.00240712 -0.01525 0.0135219 0.00247586 -0.0168125 0.0135998 0.0027034 -0.01525 0.013652 0.00279787 -0.01525 0.013691 0.00285574 -0.0168125 0.0137634 0.00294429 -0.01525 0.0138309 0.00301109 -0.0168125 0.0138697 0.00304433 -0.01525 0.0139408 0.00309699 -0.01525 0.014 0.00313398 -0.017 0.0139408 0.00309699 -0.017 0.0139007 0.000300542 -0.017 0.0154975 0.00142866 -0.017 0.0154898 0.00135769 -0.017 0.0154595 0.00121827 -0.017 0.0140846 0.000409633 -0.017 0.0153413 0.000959362 -0.017 0.0152557 0.00084514 -0.017 0.0141505 0.00043695 -0.017 0.0149792 0.000622322 -0.017 0.0143577 0.000489824 -0.017 0.0147817 0.000540508 -0.017 0.0148495 0.000563053 -0.017 0.0144287 0.000497453 -0.017 0.0147126 0.000522855 -0.017 0.0146423 0.000510179 -0.017 0.0145713 0.00050255 -0.017 0.0140208 0.000377681 -0.017 0.0150993 0.000699461 -0.017 0.0140592 0.00317096 -0.017 0.0135 0.00226795 -0.017 0.0155 0.0015 -0.017 0.014266 0.00335721 -0.017 0.014309 0.00341222 -0.017 0.0143829 0.00353053 -0.017 0.0144135 0.00359327 -0.017 0.0144613 0.00372436 -0.017 0.0144903 0.00386083 -0.017 0.0144976 0.00393025 -0.017 0.0155 0.0085 -0.017 0.0153413 0.00904064 -0.017 0.0154096 0.00891542 -0.017 0.0154595 0.00878173 -0.017 0.0154898 0.00864232 -0.017 0.0138451 0.00025575 -0.017 0.0137929 0.000207109 -0.017 0.0137442 0.000154863 -0.017 0.0136995 9.92788e-05 -0.017 0.0136587 4.06412e-05 -0.017 0.0136223 -2.07506e-05 -0.017 0.0135229 -0.000287434 -0.017 0.0135025 -0.00042866 -0.017 0.0135024 0.00233771 -0.017 0.0135387 0.00254359 -0.017 0.0135603 0.00260997 -0.017 0.0136171 0.00273742 -0.017 0.013691 0.00285574 -0.017 0.0145 0.007 -0.017 0.0147817 0.0094595 -0.017 0.0148495 0.00943695 -0.017 0.0144613 0.00727564 -0.017 0.0145713 0.00949745 -0.017 0.0144397 0.00734202 -0.017 0.0144287 0.00950255 -0.017 0.014309 0.00758779 -0.017 0.0142874 0.00952286 -0.017 0.0139408 0.00790302 -0.017 0.0139007 0.00969946 -0.017 0.013652 0.00820213 -0.017 0.0136995 0.00990072 -0.017 0.0142183 0.00954051 -0.017 0.0140208 0.00962232 -0.017 0.0139594 0.00965875 -0.017 0.0137807 0.00803739 -0.017 0.0135387 0.00845642 -0.017 0.0135219 0.00852414 -0.017 0.0135097 0.00859288 -0.017 0.0135102 0.0103577 -0.017 0.0135 0.00873205 -0.017 0.013563 0.0101505 -0.0165625 0.0140364 0.00315606 -0.01525 0.0140592 0.00317096 -0.0165625 0.0140966 0.00319748 -0.01525 0.0141157 0.00321199 -0.0165625 0.0141536 0.00324317 -0.01525 0.0141691 0.00325686 -0.017 0.0142193 0.00330534 -0.017 0.0141691 0.00325686 -0.017 0.0141157 0.00321199 -0.017 0.014 0.00313398 -0.0165625 0.0142071 0.00329289 -0.0165625 0.0142568 0.0033464 -0.0165625 0.0143025 0.00340338 -0.01525 0.014309 0.00341222 -0.01525 0.014348 0.00347008 -0.0165625 0.0143439 0.00346356 -0.0165625 0.0143808 0.00352659 -0.017 0.014348 0.00347008 -0.01525 0.0143829 0.00353053 -0.0165625 0.0144404 0.00365989 -0.0165625 0.0144627 0.00372944 -0.01525 0.0144781 0.00379209 -0.01525 0.0144903 0.00386083 -0.017 0.0145 0.004 -0.0165625 0.0144985 0.00394523 -0.0165625 0.0144918 0.00387249 -0.01525 0.0144397 0.00365798 -0.0165625 0.014413 0.00359215 -0.017 0.0144397 0.00365798 -0.017 0.0144781 0.00379209 -0.0165625 0.0144799 0.00380044 -0.01525 0.014 0.00786603 -0.017 0.0138843 0.00794404 -0.017 0.0138309 0.00798891 -0.0165625 0.0138147 0.00800375 -0.017 0.013734 0.00808927 -0.017 0.013691 0.00814427 -0.0165625 0.013634 0.00823205 -0.017 0.0136171 0.00826258 -0.0165625 0.013546 0.00843231 -0.0165625 0.0135125 0.00857442 -0.0165625 0.0135036 0.00864692 -0.0165625 0.0135001 0.00871988 -0.01525 0.0138843 0.00794404 -0.01525 0.0139408 0.00790302 -0.0165625 0.0138697 0.00795567 -0.0165625 0.013716 0.00811129 -0.01525 0.013734 0.00808927 -0.0165625 0.0136728 0.00817017 -0.0165625 0.0135998 0.0082966 -0.0165625 0.0135704 0.00836348 -0.01525 0.0135603 0.00839003 -0.01525 0.0135219 0.00852414 -0.01525 0.0137807 0.00803739 -0.0165625 0.0137634 0.00805572 -0.01525 0.013691 0.00814427 -0.0165625 0.0135266 0.00850275 -0.017 0.0135024 0.0086623 -0.017 0.0135603 0.00839003 -0.017 0.0135865 0.00832532 -0.0165625 0.0139281 0.00791174 -0.017 0.014 0.00786603 -0.0165625 0.0139895 0.00787218 -0.01525 0.0138843 0.00305596 -0.01525 0.0137807 0.00296261 -0.01525 0.013734 0.00291074 -0.01525 0.01525 -0.000999999 -0.01525 0.0144135 0.00359327 -0.01525 0.0144613 0.00372436 -0.01525 0.0144976 0.00393025 -0.01525 0.0145 0.004 -0.01525 0.0144903 0.00713917 -0.01525 0.0144781 0.00720791 -0.01525 0.01525 0.01775 -0.01525 0.0144135 0.00740674 -0.01525 0.0135 0.00873205 -0.01525 0.0136171 0.00826258 -0.01525 0.0135865 0.00832532 -0.01525 0.0135387 0.00845642 -0.01525 0.0135024 0.0086623 -0.01525 0.0135097 0.00859288 -0.01525 0.013652 0.00820213 -0.01525 0.0138309 0.00798891 -0.01525 0.0135 0.01775 -0.01525 0.0141157 0.00778801 -0.01525 0.0140592 0.00782904 -0.01525 0.014266 0.00335721 -0.01525 0.0142193 0.00330534 -0.017 0.0140592 0.00782904 -0.017 0.0141157 0.00778801 -0.0165625 0.0141536 0.00775684 -0.0165625 0.0140966 0.00780253 -0.01525 0.0142193 0.00769466 -0.01525 0.0141691 0.00774315 -0.0165625 0.0140364 0.00784394 -0.017 0.0141691 0.00774315 -0.0165625 0.0142071 0.00770711 -0.0165625 0.0142568 0.00765361 -0.01525 0.014266 0.00764279 -0.017 0.0142193 0.00769466 -0.017 0.014266 0.00764279 -0.0165625 0.0143025 0.00759662 -0.0165625 0.0143439 0.00753645 -0.01525 0.014309 0.00758779 -0.017 0.014348 0.00752992 -0.01525 0.014348 0.00752992 -0.0165625 0.0143808 0.00747341 -0.01525 0.0143829 0.00746947 -0.017 0.0143829 0.00746947 -0.017 0.0144135 0.00740674 -0.0165625 0.0144404 0.00734011 -0.017 0.0144976 0.00706976 -0.0165625 0.0144985 0.00705477 -0.0165625 0.014413 0.00740785 -0.01525 0.0144397 0.00734202 -0.01525 0.0144613 0.00727564 -0.017 0.0144781 0.00720791 -0.0165625 0.0144627 0.00727056 -0.017 0.0144903 0.00713918 -0.01525 0.0144976 0.00706976 -0.0165625 0.0144799 0.00719957 -0.0165625 0.0144918 0.00712751 -0.01525 0.0145 0.007 0.00109091 -0.0089351 -0.0162489 0.00981818 -0.0089351 -0.0162489 0.00327273 -0.00852428 -0.0161926 -0.00763636 -0.0081339 -0.0160527 -0.00545455 -0.0081339 -0.0160527 -0.00981818 -0.0080415 -0.0160054 -0.00981818 -0.00795168 -0.0159532 -0.00327273 -0.00832622 -0.0161331 -0.00109091 -0.00842409 -0.0161653 -0.00109091 -0.00852428 -0.0161926 0.00545454 -0.00872822 -0.0162314 0.00763636 -0.00883144 -0.0162429 0.00763636 -0.0089351 -0.0162489 0.0119019 -0.00890186 -0.0162476 0.00327273 -0.00842409 -0.0161653 -0.00327273 -0.00822864 -0.0160953 -0.0108484 -0.00784838 -0.0158852 0.0117065 -0.00870654 -0.0162284 0.00981818 -0.00862575 -0.0162147 0.00763636 -0.00852428 -0.0161926 0.00545454 -0.00852428 -0.0161926 0.00109091 -0.00822864 -0.0160953 -0.00327273 -0.0080415 -0.0160054 -0.00545455 -0.0080415 -0.0160054 -0.00763636 -0.00786469 -0.0158965 -0.00981818 -0.00778076 -0.0158354 -0.0107695 -0.00776954 -0.0158267 -0.0108086 -0.0078086 -0.0158564 -0.00981818 -0.00786469 -0.0158965 0.00327273 -0.00832622 -0.0161331 -0.00545455 -0.00786469 -0.0158965 -0.00763636 -0.00778076 -0.0158354 -0.0107312 -0.00773121 -0.015796 0.00763636 -0.00842409 -0.0161653 0.00981818 -0.00842409 -0.0161653 0.00109091 -0.0080415 -0.0160054 -0.00109091 -0.0080415 -0.0160054 -0.00327273 -0.00795168 -0.0159532 -0.00545455 -0.00778076 -0.0158354 -0.00763636 -0.00770012 -0.01577 -0.0106569 -0.00765688 -0.0157319 -0.00981818 -0.00770012 -0.01577 0.00763636 -0.00822864 -0.0160953 0.00327273 -0.0081339 -0.0160527 0.00327273 -0.0080415 -0.0160054 -0.00109091 -0.00786469 -0.0158965 -0.00763636 -0.00762298 -0.0157005 -0.00981818 -0.00754955 -0.015627 -0.0105515 -0.00755151 -0.0156291 -0.0106209 -0.00762092 -0.0156985 0.0114194 -0.00841943 -0.0161639 0.0109718 -0.00797179 -0.0159655 0.0108889 -0.00788886 -0.0159129 0.0108086 -0.0078086 -0.0158564 0.00981818 -0.00778076 -0.0158354 0.00981818 -0.00770012 -0.01577 0.0106569 -0.00765688 -0.0157319 0.00981818 -0.00762298 -0.0157005 0.0106209 -0.00762092 -0.0156985 0.0105181 -0.0075181 -0.0155931 0.0104856 -0.00748558 -0.0155563 0.00981818 -0.00748003 -0.0155499 0.010454 -0.00745398 -0.0155188 0.0103936 -0.00739358 -0.0154414 0.00981818 -0.00735346 -0.0153853 0.00981818 -0.00729676 -0.0152983 0.0102845 -0.00728454 -0.0152782 0.0102598 -0.00725983 -0.0152358 0.0101522 -0.00715224 -0.0150154 0.00981818 -0.00711691 -0.0149238 0.00763636 -0.00711691 -0.0149238 0.00545454 -0.0070574 -0.0147257 0.00327273 -0.0070574 -0.0147257 0.00109091 -0.00703533 -0.0146243 0.00109091 -0.00701855 -0.0145218 -0.00327273 -0.00700105 -0.0143149 -0.0100024 -0.00700241 -0.0143481 0.00981818 -0.00719726 -0.0151161 0.00763636 -0.00715473 -0.0150214 -0.00327273 -0.00778076 -0.0158354 -0.00327273 -0.00770012 -0.01577 -0.00763636 -0.00754955 -0.015627 0.00763636 -0.0081339 -0.0160527 0.00763636 -0.0080415 -0.0160054 0.00109091 -0.00778076 -0.0158354 -0.00109091 -0.00778076 -0.0158354 -0.00327273 -0.00762298 -0.0157005 -0.00763636 -0.00748003 -0.0155499 -0.00981818 -0.00748003 -0.0155499 -0.0104856 -0.00748558 -0.0155563 0.0110572 -0.00805721 -0.0160138 -0.00109091 -0.00762298 -0.0157005 -0.00327273 -0.00754955 -0.015627 -0.00545455 -0.00754955 -0.015627 -0.00981818 -0.00741461 -0.0154692 -0.0104233 -0.00742331 -0.0154805 0.00763636 -0.00795168 -0.0159532 0.00545454 -0.00786469 -0.0158965 0.00327273 -0.00770012 -0.01577 0.00109091 -0.00762298 -0.0157005 -0.00327273 -0.00748003 -0.0155499 -0.00545455 -0.00741461 -0.0154692 -0.00763636 -0.00741461 -0.0154692 -0.0103371 -0.00733706 -0.0153611 -0.00981818 -0.00735346 -0.0153853 0.00981818 -0.00786469 -0.0158965 0.00545454 -0.00770012 -0.01577 0.00327273 -0.00762298 -0.0157005 0.00327273 -0.00754955 -0.015627 0.00109091 -0.00748003 -0.0155499 -0.00545455 -0.00729676 -0.0152983 -0.00981818 -0.00724464 -0.0152085 -0.00763636 -0.00729676 -0.0152983 0.00545454 -0.00754955 -0.015627 0.00109091 -0.00741461 -0.0154692 0.00109091 -0.00735346 -0.0153853 -0.00327273 -0.00729676 -0.0152983 -0.00327273 -0.00724464 -0.0152085 -0.0101522 -0.00715224 -0.0150154 0.00763636 -0.00741461 -0.0154692 -0.00109091 -0.00724464 -0.0152085 -0.00327273 -0.00715473 -0.0150214 -0.00545455 -0.00711691 -0.0149238 -0.0100861 -0.00708612 -0.0148306 -0.00981818 -0.00708471 -0.0148259 0.00545454 -0.00729676 -0.0152983 0.00545454 -0.00724464 -0.0152085 0.00327273 -0.00719726 -0.0151161 -0.00545455 -0.0070574 -0.0147257 -0.0100384 -0.00703843 -0.0146402 -0.00981818 -0.00703533 -0.0146243 0.00981818 -0.00715473 -0.0150214 0.00763636 -0.00708471 -0.0148259 0.0101009 -0.00710094 -0.0148774 0.0100861 -0.00708612 -0.0148306 0.00981818 -0.00708471 -0.0148259 0.0100724 -0.00707245 -0.0147834 0.00327273 -0.00700712 -0.0144186 -0.00109091 -0.00700105 -0.0143149 0.00109091 -0.00700712 -0.0144186 0.00763636 -0.00703533 -0.0146243 0.0100294 -0.00702944 -0.0145919 0.0100216 -0.00702165 -0.0145435 0.010015 -0.00701504 -0.0144948 0.00763636 -0.00701855 -0.0145218 0.00981818 -0.00701855 -0.0145218 0.00109091 -0.00700105 -0.0143149 0.00545454 -0.00700712 -0.0144186 0.00981818 -0.00703533 -0.0146243 0.00981818 -0.00700712 -0.0144186 0.0100054 -0.00700542 -0.0143971 0.00763636 -0.00700712 -0.0144186 0.00763636 -0.00700105 -0.0143149 0.00545454 -0.00700105 -0.0143149 0.0100024 -0.00700241 -0.0143481 0.00981818 -0.00700105 -0.0143149 -0.00545455 -0.00700105 -0.0143149 -0.01 -0.007 -0.01425 -0.00763636 -0.00700712 -0.0144186 -0.00545455 -0.00700712 -0.0144186 -0.00327273 -0.00701855 -0.0145218 -0.00981818 -0.00700105 -0.0143149 -0.00763636 -0.00700105 -0.0143149 -0.00981818 -0.00700712 -0.0144186 -0.00981818 -0.00701855 -0.0145218 -0.0100294 -0.00702944 -0.0145919 -0.00763636 -0.00701855 -0.0145218 -0.00545455 -0.00703533 -0.0146243 -0.00327273 -0.0070574 -0.0147257 -0.00109091 -0.00708471 -0.0148259 0.00109091 -0.00715473 -0.0150214 0.00763636 -0.00729676 -0.0152983 0.00763636 -0.00724464 -0.0152085 0.00545454 -0.00719726 -0.0151161 0.00763636 -0.00719726 -0.0151161 0.00545454 -0.00715473 -0.0150214 0.00109091 -0.00708471 -0.0148259 -0.00109091 -0.0070574 -0.0147257 -0.00545455 -0.00701855 -0.0145218 -0.0100724 -0.00707245 -0.0147834 -0.00763636 -0.00703533 -0.0146243 -0.0100486 -0.0070486 -0.0146882 -0.00981818 -0.0070574 -0.0147257 -0.00763636 -0.0070574 -0.0147257 -0.0101169 -0.00711691 -0.0149238 -0.00763636 -0.00711691 -0.0149238 -0.0101009 -0.00710094 -0.0148774 -0.00981818 -0.00711691 -0.0149238 -0.010192 -0.00719202 -0.0151051 -0.00981818 -0.00719726 -0.0151161 -0.00981818 -0.00729676 -0.0152983 -0.00763636 -0.00735346 -0.0153853 -0.00545455 -0.00735346 -0.0153853 -0.00327273 -0.00741461 -0.0154692 -0.0111449 -0.00814489 -0.016058 -0.00981818 -0.0081339 -0.0160527 -0.00981818 -0.00822864 -0.0160953 -0.00763636 -0.00832622 -0.0161331 0.00109091 -0.00872822 -0.0162314 0.00545454 -0.0089351 -0.0162489 0.00327273 -0.00883144 -0.0162429 0.00109091 -0.00883144 -0.0162429 0.00327273 -0.0089351 -0.0162489 -0.0116098 -0.00860982 -0.0162116 -0.00545455 -0.0089351 -0.0162489 -0.00545455 -0.00872822 -0.0162314 -0.00545455 -0.00862575 -0.0162147 -0.00545455 -0.00852428 -0.0161926 -0.00763636 -0.00842409 -0.0161653 -0.00981818 -0.00842409 -0.0161653 -0.00981818 -0.00852428 -0.0161926 -0.00763636 -0.00862575 -0.0162147 -0.00763636 -0.00872822 -0.0162314 -0.0117065 -0.00870654 -0.0162284 -0.00981818 -0.00872822 -0.0162314 -0.00763636 -0.00883144 -0.0162429 -0.011804 -0.00880396 -0.0162404 -0.00981818 -0.00883144 -0.0162429 -0.00763636 -0.0089351 -0.0162489 -0.00981818 -0.0089351 -0.0162489 -0.00545455 -0.00883144 -0.0162429 -0.00327273 -0.0089351 -0.0162489 -0.00327273 -0.00883144 -0.0162429 -0.00109091 -0.0089351 -0.0162489 -0.00109091 -0.00883144 -0.0162429 0.00981818 -0.00872822 -0.0162314 0.00763636 -0.00872822 -0.0162314 0.00981818 -0.00883144 -0.0162429 0.00545454 -0.00883144 -0.0162429 -0.00327273 -0.00872822 -0.0162314 0.00327273 -0.00872822 -0.0162314 0.00545454 -0.00862575 -0.0162147 0.00327273 -0.00862575 -0.0162147 0.00763636 -0.00862575 -0.0162147 -0.00109091 -0.00872822 -0.0162314 -0.00327273 -0.00862575 -0.0162147 0.00109091 -0.00862575 -0.0162147 0.00109091 -0.00852428 -0.0161926 -0.00109091 -0.00862575 -0.0162147 -0.00981818 -0.00862575 -0.0162147 0.00981818 -0.00852428 -0.0161926 0.00109091 -0.00842409 -0.0161653 -0.00545455 -0.00842409 -0.0161653 -0.00327273 -0.00852428 -0.0161926 -0.00763636 -0.00852428 -0.0161926 0.00981818 -0.00832622 -0.0161331 0.00545454 -0.00832622 -0.0161331 0.00545454 -0.00842409 -0.0161653 -0.00327273 -0.00842409 -0.0161653 0.00109091 -0.00832622 -0.0161331 0.00981818 -0.00822864 -0.0160953 0.00545454 -0.00822864 -0.0160953 0.00763636 -0.00832622 -0.0161331 0.00327273 -0.00822864 -0.0160953 -0.00545455 -0.00822864 -0.0160953 -0.00109091 -0.00832622 -0.0161331 -0.00545455 -0.00832622 -0.0161331 -0.00981818 -0.00832622 -0.0161331 0.00545454 -0.0081339 -0.0160527 0.00109091 -0.0081339 -0.0160527 -0.00109091 -0.0081339 -0.0160527 -0.00109091 -0.00822864 -0.0160953 -0.00327273 -0.0081339 -0.0160527 -0.00763636 -0.00822864 -0.0160953 0.00981818 -0.0081339 -0.0160527 0.00545454 -0.0080415 -0.0160054 -0.00763636 -0.0080415 -0.0160054 0.00981818 -0.00795168 -0.0159532 0.00981818 -0.0080415 -0.0160054 0.00545454 -0.00795168 -0.0159532 -0.00109091 -0.00795168 -0.0159532 -0.00763636 -0.00795168 -0.0159532 0.00327273 -0.00786469 -0.0158965 0.00327273 -0.00795168 -0.0159532 0.00109091 -0.00795168 -0.0159532 -0.00545455 -0.00795168 -0.0159532 0.00763636 -0.00786469 -0.0158965 0.00763636 -0.00778076 -0.0158354 0.00545454 -0.00778076 -0.0158354 -0.00109091 -0.00754955 -0.015627 0.00327273 -0.00778076 -0.0158354 0.00109091 -0.00786469 -0.0158965 -0.00327273 -0.00786469 -0.0158965 0.00109091 -0.00770012 -0.01577 -0.00545455 -0.00770012 -0.01577 0.00763636 -0.00770012 -0.01577 0.00763636 -0.00762298 -0.0157005 0.00545454 -0.00762298 -0.0157005 0.00327273 -0.00748003 -0.0155499 -0.00545455 -0.00724464 -0.0152085 -0.00763636 -0.00724464 -0.0152085 -0.00109091 -0.00770012 -0.01577 -0.00545455 -0.00762298 -0.0157005 -0.00981818 -0.00762298 -0.0157005 0.00981818 -0.00754955 -0.015627 -0.00109091 -0.00729676 -0.0152983 -0.010134 -0.00713401 -0.0149698 -0.00981818 -0.00715473 -0.0150214 0.00763636 -0.00748003 -0.0155499 0.00545454 -0.00748003 -0.0155499 0.00763636 -0.00754955 -0.015627 0.00109091 -0.00754955 -0.015627 -0.00109091 -0.00748003 -0.0155499 0.00545454 -0.00741461 -0.0154692 -0.00109091 -0.00741461 -0.0154692 -0.00545455 -0.00748003 -0.0155499 0.00545454 -0.00735346 -0.0153853 0.00109091 -0.00719726 -0.0151161 0.00981818 -0.00741461 -0.0154692 0.00763636 -0.00735346 -0.0153853 0.00327273 -0.00741461 -0.0154692 -0.00109091 -0.00735346 -0.0153853 -0.00327273 -0.00735346 -0.0153853 0.00109091 -0.00729676 -0.0152983 0.00327273 -0.00735346 -0.0153853 0.00327273 -0.00729676 -0.0152983 0.00327273 -0.00724464 -0.0152085 0.00109091 -0.00724464 -0.0152085 0.00981818 -0.00724464 -0.0152085 -0.00109091 -0.00719726 -0.0151161 -0.00545455 -0.00719726 -0.0151161 -0.00327273 -0.00719726 -0.0151161 -0.00763636 -0.00719726 -0.0151161 0.00327273 -0.00715473 -0.0150214 -0.00109091 -0.00715473 -0.0150214 -0.00763636 -0.00715473 -0.0150214 -0.00545455 -0.00715473 -0.0150214 0.00545454 -0.00711691 -0.0149238 0.00545454 -0.00708471 -0.0148259 0.00109091 -0.00711691 -0.0149238 0.00327273 -0.00711691 -0.0149238 -0.00327273 -0.00711691 -0.0149238 0.00327273 -0.00708471 -0.0148259 -0.00109091 -0.00711691 -0.0149238 -0.00327273 -0.00708471 -0.0148259 -0.00763636 -0.00708471 -0.0148259 0.00109091 -0.0070574 -0.0147257 -0.00545455 -0.00708471 -0.0148259 0.00981818 -0.0070574 -0.0147257 0.00763636 -0.0070574 -0.0147257 0.00327273 -0.00701855 -0.0145218 0.00545454 -0.00703533 -0.0146243 0.00327273 -0.00703533 -0.0146243 -0.00109091 -0.00703533 -0.0146243 -0.00109091 -0.00701855 -0.0145218 -0.00327273 -0.00703533 -0.0146243 0.00545454 -0.00701855 -0.0145218 -0.00109091 -0.00700712 -0.0144186 0.00327273 -0.00700105 -0.0143149 -0.00327273 -0.00700712 -0.0144186 -0.0100006 -0.0070006 -0.0142991 -0.0100011 -0.0069375 -0.0143149 -0.0100054 -0.00700542 -0.0143971 -0.010015 -0.00548496 -0.0144948 -0.0100096 -0.00549037 -0.014446 -0.0100096 -0.00700963 -0.014446 -0.0100186 -0.0069375 -0.0145218 -0.010015 -0.00701504 -0.0144948 -0.0100216 -0.00702165 -0.0145435 -0.0100024 -0.00549759 -0.0143481 -0.0100071 -0.0069375 -0.0144186 -0.0100384 -0.00546157 -0.0146402 -0.0100486 -0.0054514 -0.0146882 -0.0100599 -0.00544006 -0.014736 -0.0100574 -0.0069375 -0.0147257 -0.0100353 -0.0069375 -0.0146243 -0.0100599 -0.00705994 -0.014736 -0.0100847 -0.0069375 -0.0148259 -0.0101169 -0.0069375 -0.0149238 -0.010134 -0.00536598 -0.0149698 -0.0101547 -0.0069375 -0.0150214 -0.0101973 -0.0069375 -0.0151161 -0.0101716 -0.00717158 -0.0150605 -0.010192 -0.00530798 -0.0151051 -0.0102446 -0.0069375 -0.0152085 -0.0102598 -0.00524017 -0.0152358 -0.0102845 -0.00728454 -0.0152782 -0.0102968 -0.0069375 -0.0152983 -0.0102136 -0.00721355 -0.0151492 -0.0102362 -0.00526384 -0.0151928 -0.0103103 -0.00731029 -0.01532 -0.0103371 -0.00516294 -0.0153611 -0.0103648 -0.00736483 -0.0154016 -0.0103535 -0.0069375 -0.0153853 -0.0103648 -0.00513517 -0.0154016 -0.0103936 -0.00739358 -0.0154414 -0.0103936 -0.00510641 -0.0154414 -0.0104233 -0.00507669 -0.0154805 -0.01048 -0.0069375 -0.0155499 -0.0105181 -0.0049819 -0.0155931 -0.010454 -0.00745398 -0.0155188 -0.0104146 -0.0069375 -0.0154692 -0.0105515 -0.00494849 -0.0156291 -0.0105858 -0.00758579 -0.0156642 -0.010623 -0.0069375 -0.0157005 -0.0105858 -0.00491421 -0.0156642 -0.0106209 -0.00487908 -0.0156985 -0.0106569 -0.00484312 -0.0157319 -0.0107001 -0.0069375 -0.01577 -0.0107312 -0.00476879 -0.015796 -0.0107808 -0.0069375 -0.0158354 -0.0106937 -0.00769365 -0.0157644 -0.0107001 -0.004875 -0.01577 -0.0107695 -0.00473046 -0.0158267 -0.0107808 -0.004875 -0.0158354 -0.0108086 -0.0046914 -0.0158564 -0.0108647 -0.004875 -0.0158965 -0.0108889 -0.00461114 -0.0159129 -0.0108647 -0.0069375 -0.0158965 -0.01093 -0.00456999 -0.0159397 -0.0109517 -0.004875 -0.0159532 -0.0112286 -0.004875 -0.0160953 -0.0115243 -0.0069375 -0.0161926 -0.0116257 -0.004875 -0.0162147 -0.012 -0.0035 -0.01625 -0.0119019 -0.00890186 -0.0162476 -0.0118314 -0.0069375 -0.0162429 -0.0117282 -0.0069375 -0.0162314 -0.0114194 -0.00841943 -0.0161639 -0.0113262 -0.00832622 -0.0161331 -0.0112346 -0.00823463 -0.0160978 -0.0111339 -0.0069375 -0.0160527 -0.0111339 -0.004875 -0.0160527 -0.0112286 -0.0069375 -0.0160953 -0.0110415 -0.004875 -0.0160054 -0.0112346 -0.00426537 -0.0160978 -0.0114194 -0.00408057 -0.0161639 -0.0114241 -0.004875 -0.0161653 -0.011514 -0.00398596 -0.0161901 -0.0116098 -0.00389018 -0.0162116 -0.011804 -0.00369603 -0.0162404 -0.0119019 -0.00359813 -0.0162476 -0.0119351 -0.004875 -0.0162489 -0.0111449 -0.00435511 -0.016058 -0.0115243 -0.004875 -0.0161926 -0.0117282 -0.004875 -0.0162314 -0.0118314 -0.004875 -0.0162429 -0.0119351 -0.0069375 -0.0162489 -0.0116257 -0.0069375 -0.0162147 -0.011514 -0.00851404 -0.0161901 -0.0114241 -0.0069375 -0.0161653 -0.0113262 -0.004875 -0.0161331 -0.0110572 -0.00805721 -0.0160138 -0.0110415 -0.0069375 -0.0160054 -0.0109718 -0.00797179 -0.0159655 -0.0109517 -0.0069375 -0.0159532 -0.01093 -0.00793 -0.0159397 -0.0108889 -0.00788886 -0.0159129 -0.0105495 -0.0069375 -0.015627 -0.0105181 -0.0075181 -0.0155931 -0.0102598 -0.00725983 -0.0152358 -0.0102362 -0.00723616 -0.0151928 -0.0101716 -0.00532842 -0.0150605 -0.0113262 -0.0069375 -0.0161331 0.00109091 -0.0035649 -0.0162489 -0.00327273 -0.0035649 -0.0162489 -0.00763636 -0.0035649 -0.0162489 -0.00981818 -0.0035649 -0.0162489 -0.00763636 -0.00377177 -0.0162314 -0.00327273 -0.00397572 -0.0161926 -0.00109091 -0.00397572 -0.0161926 -0.00109091 -0.00407591 -0.0161653 0.00109091 -0.00417378 -0.0161331 0.00327273 -0.00427136 -0.0160953 0.00981818 -0.00454832 -0.0159532 0.00763636 -0.00436609 -0.0160527 0.00545454 -0.00417378 -0.0161331 0.00109091 -0.00407591 -0.0161653 0.00327273 -0.00407591 -0.0161653 -0.00327273 -0.00377177 -0.0162314 -0.00545455 -0.00366856 -0.0162429 -0.00763636 -0.00366856 -0.0162429 -0.00763636 -0.00387425 -0.0162147 -0.00545455 -0.00387425 -0.0162147 -0.00327273 -0.00407591 -0.0161653 -0.00109091 -0.00417378 -0.0161331 0.00109091 -0.00427136 -0.0160953 0.00327273 -0.00436609 -0.0160527 0.0108484 -0.00465162 -0.0158852 0.0108889 -0.00461114 -0.0159129 -0.0117065 -0.00379346 -0.0162284 -0.00981818 -0.00387425 -0.0162147 -0.00545455 -0.00397572 -0.0161926 -0.00109091 -0.00427136 -0.0160953 0.00545454 -0.0044585 -0.0160054 0.00763636 -0.00463531 -0.0158965 0.00981818 -0.00463531 -0.0158965 0.0108086 -0.0046914 -0.0158564 -0.00763636 -0.00397572 -0.0161926 -0.00763636 -0.00407591 -0.0161653 -0.00327273 -0.00427136 -0.0160953 0.00545454 -0.00454832 -0.0159532 0.00545454 -0.00463531 -0.0158965 0.0107312 -0.00476879 -0.015796 -0.00981818 -0.00397572 -0.0161926 -0.00981818 -0.00407591 -0.0161653 -0.00545455 -0.00427136 -0.0160953 -0.00109091 -0.00436609 -0.0160527 0.00327273 -0.00463531 -0.0158965 0.00763636 -0.00479988 -0.01577 0.00981818 -0.00487702 -0.0157005 0.00981818 -0.00479988 -0.01577 -0.00981818 -0.00417378 -0.0161331 -0.00763636 -0.00427136 -0.0160953 -0.00109091 -0.00454832 -0.0159532 0.00109091 -0.00454832 -0.0159532 0.00545454 -0.00471924 -0.0158354 0.00545454 -0.00479988 -0.01577 -0.0113262 -0.00417378 -0.0161331 -0.0109718 -0.0045282 -0.0159655 -0.0108484 -0.00465162 -0.0158852 -0.00981818 -0.00471924 -0.0158354 -0.00981818 -0.00479988 -0.01577 -0.0106937 -0.00480634 -0.0157644 -0.00981818 -0.00495045 -0.015627 -0.0104856 -0.00501442 -0.0155563 -0.010454 -0.00504602 -0.0155188 -0.00981818 -0.00508539 -0.0154692 -0.0103103 -0.00518971 -0.01532 -0.0102845 -0.00521546 -0.0152782 -0.00763636 -0.00534526 -0.0150214 -0.0101522 -0.00534776 -0.0150154 -0.0101169 -0.00538309 -0.0149238 -0.00981818 -0.00538309 -0.0149238 -0.00545455 -0.0054426 -0.0147257 -0.00327273 -0.00546467 -0.0146243 0.00981818 -0.00549895 -0.0143149 0.00763636 -0.00549895 -0.0143149 0.00327273 -0.00546467 -0.0146243 0.00109091 -0.0054426 -0.0147257 -0.00327273 -0.00541529 -0.0148259 -0.00545455 -0.00538309 -0.0149238 -0.00327273 -0.00538309 -0.0149238 -0.00763636 -0.00530274 -0.0151161 -0.00763636 -0.00436609 -0.0160527 -0.00545455 -0.0044585 -0.0160054 -0.00327273 -0.0044585 -0.0160054 -0.00109091 -0.00463531 -0.0158965 0.00327273 -0.00479988 -0.01577 0.00545454 -0.00487702 -0.0157005 0.0105515 -0.00494849 -0.0156291 -0.00763636 -0.0044585 -0.0160054 0.00109091 -0.00479988 -0.01577 0.00327273 -0.00487702 -0.0157005 0.00763636 -0.00495045 -0.015627 0.010454 -0.00504602 -0.0155188 0.0104233 -0.00507669 -0.0154805 0.0104856 -0.00501442 -0.0155563 0.00981818 -0.00501997 -0.0155499 -0.0110572 -0.00444279 -0.0160138 -0.00109091 -0.00471924 -0.0158354 -0.00327273 -0.00471924 -0.0158354 -0.00109091 -0.00479988 -0.01577 0.00981818 -0.00508539 -0.0154692 -0.00763636 -0.00454832 -0.0159532 -0.00327273 -0.00479988 -0.01577 -0.00109091 -0.00487702 -0.0157005 0.00109091 -0.00495045 -0.015627 0.00545454 -0.00501997 -0.0155499 0.00545454 -0.00508539 -0.0154692 0.00763636 -0.00514653 -0.0153853 0.0103103 -0.00518971 -0.01532 0.0103371 -0.00516294 -0.0153611 0.0103648 -0.00513517 -0.0154016 -0.00981818 -0.00463531 -0.0158965 0.00327273 -0.00508539 -0.0154692 0.00327273 -0.00514653 -0.0153853 0.00763636 -0.00520324 -0.0152983 0.00545454 -0.00520324 -0.0152983 0.0102136 -0.00528645 -0.0151492 0.00981818 -0.00525536 -0.0152085 0.0102362 -0.00526384 -0.0151928 -0.00763636 -0.00487702 -0.0157005 -0.00981818 -0.00487702 -0.0157005 0.00109091 -0.00520324 -0.0152983 0.00763636 -0.00534526 -0.0150214 0.00981818 -0.00534526 -0.0150214 0.0101522 -0.00534776 -0.0150154 -0.00981818 -0.00501997 -0.0155499 0.00327273 -0.00530274 -0.0151161 0.00763636 -0.00538309 -0.0149238 0.0101009 -0.00539906 -0.0148774 -0.00981818 -0.00514653 -0.0153853 -0.00763636 -0.00520324 -0.0152983 0.00327273 -0.00541529 -0.0148259 0.00545454 -0.00541529 -0.0148259 0.00981818 -0.00546467 -0.0146243 0.0100486 -0.0054514 -0.0146882 -0.0102136 -0.00528645 -0.0151492 -0.00981818 -0.00530274 -0.0151161 -0.00981818 -0.00534526 -0.0150214 -0.0101009 -0.00539906 -0.0148774 -0.0100861 -0.00541388 -0.0148306 -0.00981818 -0.00541529 -0.0148259 -0.0100724 -0.00542755 -0.0147834 -0.00763636 -0.00546467 -0.0146243 -0.00545455 -0.00548145 -0.0145218 -0.00327273 -0.00548145 -0.0145218 -0.00327273 -0.00549288 -0.0144186 -0.00109091 -0.00549895 -0.0143149 0.00109091 -0.00549288 -0.0144186 0.00109091 -0.00549895 -0.0143149 -0.00109091 -0.00549288 -0.0144186 -0.00981818 -0.00546467 -0.0146243 -0.0100294 -0.00547055 -0.0145919 -0.00981818 -0.00548145 -0.0145218 -0.0100216 -0.00547835 -0.0145435 -0.00763636 -0.00548145 -0.0145218 -0.00545455 -0.00549895 -0.0143149 -0.00327273 -0.00549895 -0.0143149 -0.00763636 -0.00549288 -0.0144186 -0.00545455 -0.00549288 -0.0144186 -0.0100054 -0.00549458 -0.0143971 -0.00981818 -0.00549288 -0.0144186 -0.0100006 -0.0054994 -0.0142991 -0.00763636 -0.00549895 -0.0143149 -0.00981818 -0.00549895 -0.0143149 0.00327273 -0.00549288 -0.0144186 0.00545454 -0.00549895 -0.0143149 0.00327273 -0.00548145 -0.0145218 0.00545454 -0.00549288 -0.0144186 0.00545454 -0.00548145 -0.0145218 0.0100006 -0.0054994 -0.0142991 0.00763636 -0.00549288 -0.0144186 0.00981818 -0.00549288 -0.0144186 0.0100096 -0.00549037 -0.014446 0.00763636 -0.00548145 -0.0145218 0.00981818 -0.00548145 -0.0145218 0.0100216 -0.00547835 -0.0145435 0.00327273 -0.0054426 -0.0147257 -0.00327273 -0.00534526 -0.0150214 -0.00981818 -0.00520324 -0.0152983 -0.00981818 -0.00525536 -0.0152085 -0.00763636 -0.00525536 -0.0152085 -0.00109091 -0.00541529 -0.0148259 0.0100294 -0.00547055 -0.0145919 0.00981818 -0.0054426 -0.0147257 0.00763636 -0.00546467 -0.0146243 0.00981818 -0.00538309 -0.0149238 0.010134 -0.00536598 -0.0149698 0.00763636 -0.00541529 -0.0148259 0.00981818 -0.00530274 -0.0151161 0.00763636 -0.00525536 -0.0152085 0.00545454 -0.00530274 -0.0151161 0.00763636 -0.00530274 -0.0151161 0.00545454 -0.00514653 -0.0153853 0.00981818 -0.00520324 -0.0152983 0.00763636 -0.00417378 -0.0161331 -0.00109091 -0.00377177 -0.0162314 -0.00327273 -0.00366856 -0.0162429 -0.00545455 -0.0035649 -0.0162489 0.0112346 -0.00426537 -0.0160978 0.00981818 -0.00427136 -0.0160953 0.00981818 -0.00417378 -0.0161331 0.00545454 -0.00397572 -0.0161926 0.00327273 -0.00397572 -0.0161926 0.0113262 -0.00417378 -0.0161331 0.00981818 -0.00377177 -0.0162314 0.00981818 -0.0035649 -0.0162489 0.00763636 -0.00377177 -0.0162314 0.00763636 -0.00397572 -0.0161926 0.00763636 -0.00407591 -0.0161653 0.00981818 -0.00407591 -0.0161653 0.00763636 -0.00387425 -0.0162147 0.00981818 -0.00387425 -0.0162147 0.00545454 -0.00377177 -0.0162314 0.00763636 -0.00366856 -0.0162429 0.011804 -0.00369603 -0.0162404 0.00545454 -0.0035649 -0.0162489 0.00763636 -0.0035649 -0.0162489 0.012 -0.0035 -0.01625 -0.00109091 -0.00366856 -0.0162429 0.00327273 -0.00366856 -0.0162429 0.00327273 -0.0035649 -0.0162489 0.00545454 -0.00366856 -0.0162429 -0.00109091 -0.0035649 -0.0162489 -0.00545455 -0.00377177 -0.0162314 -0.00981818 -0.00366856 -0.0162429 0.00109091 -0.00366856 -0.0162429 0.00981818 -0.00366856 -0.0162429 -0.00981818 -0.00377177 -0.0162314 0.00109091 -0.00377177 -0.0162314 0.00545454 -0.00387425 -0.0162147 0.00327273 -0.00377177 -0.0162314 -0.00327273 -0.00387425 -0.0162147 0.00109091 -0.00387425 -0.0162147 -0.00109091 -0.00387425 -0.0162147 0.00327273 -0.00387425 -0.0162147 0.00981818 -0.00397572 -0.0161926 -0.00545455 -0.00407591 -0.0161653 0.00109091 -0.00397572 -0.0161926 -0.00763636 -0.00417378 -0.0161331 -0.00545455 -0.00417378 -0.0161331 0.00545454 -0.00407591 -0.0161653 -0.00327273 -0.00417378 -0.0161331 0.00327273 -0.00417378 -0.0161331 0.00545454 -0.00427136 -0.0160953 0.00763636 -0.00427136 -0.0160953 -0.00981818 -0.00427136 -0.0160953 -0.00327273 -0.00436609 -0.0160527 0.00109091 -0.00436609 -0.0160527 0.00545454 -0.00436609 -0.0160527 0.00981818 -0.00436609 -0.0160527 -0.00981818 -0.00436609 -0.0160527 -0.00545455 -0.00436609 -0.0160527 -0.00109091 -0.0044585 -0.0160054 0.00109091 -0.0044585 -0.0160054 0.00327273 -0.0044585 -0.0160054 0.00981818 -0.0044585 -0.0160054 0.00763636 -0.0044585 -0.0160054 -0.00981818 -0.0044585 -0.0160054 -0.00545455 -0.00454832 -0.0159532 -0.00327273 -0.00454832 -0.0159532 0.00763636 -0.00454832 -0.0159532 -0.00545455 -0.00463531 -0.0158965 0.00109091 -0.00463531 -0.0158965 0.00327273 -0.00454832 -0.0159532 -0.00981818 -0.00454832 -0.0159532 -0.00763636 -0.00463531 -0.0158965 -0.00763636 -0.00471924 -0.0158354 -0.00545455 -0.00479988 -0.01577 0.00327273 -0.00501997 -0.0155499 -0.00545455 -0.00471924 -0.0158354 -0.00327273 -0.00463531 -0.0158965 0.00327273 -0.00471924 -0.0158354 0.00763636 -0.00471924 -0.0158354 0.00981818 -0.00471924 -0.0158354 0.00109091 -0.00471924 -0.0158354 -0.00763636 -0.00479988 -0.01577 -0.00545455 -0.00487702 -0.0157005 -0.00545455 -0.00495045 -0.015627 -0.00109091 -0.00508539 -0.0154692 0.00545454 -0.00525536 -0.0152085 -0.00327273 -0.00487702 -0.0157005 0.00109091 -0.00487702 -0.0157005 -0.00327273 -0.00495045 -0.015627 -0.00109091 -0.00495045 -0.015627 0.00545454 -0.00495045 -0.015627 0.00981818 -0.00495045 -0.015627 0.00763636 -0.00487702 -0.0157005 -0.00545455 -0.00501997 -0.0155499 0.00327273 -0.00525536 -0.0152085 -0.00763636 -0.00495045 -0.015627 -0.00327273 -0.00501997 -0.0155499 0.00327273 -0.00495045 -0.015627 0.00763636 -0.00501997 -0.0155499 -0.00763636 -0.00501997 -0.0155499 -0.00545455 -0.00508539 -0.0154692 -0.00109091 -0.00501997 -0.0155499 0.00109091 -0.00508539 -0.0154692 0.00109091 -0.00501997 -0.0155499 0.00763636 -0.00508539 -0.0154692 -0.00763636 -0.00508539 -0.0154692 -0.00763636 -0.00514653 -0.0153853 -0.00327273 -0.00520324 -0.0152983 0.00327273 -0.00538309 -0.0149238 0.00763636 -0.0054426 -0.0147257 0.00981818 -0.00541529 -0.0148259 -0.00545455 -0.00514653 -0.0153853 -0.00327273 -0.00508539 -0.0154692 -0.00109091 -0.00514653 -0.0153853 0.00981818 -0.00514653 -0.0153853 -0.00545455 -0.00520324 -0.0152983 -0.00327273 -0.00514653 -0.0153853 -0.00109091 -0.00520324 -0.0152983 0.00327273 -0.00520324 -0.0152983 0.00109091 -0.00514653 -0.0153853 -0.00327273 -0.00525536 -0.0152085 0.00109091 -0.00525536 -0.0152085 -0.00545455 -0.00530274 -0.0151161 -0.00545455 -0.00525536 -0.0152085 -0.00109091 -0.00530274 -0.0151161 -0.00327273 -0.00530274 -0.0151161 -0.00109091 -0.00525536 -0.0152085 0.00109091 -0.00530274 -0.0151161 0.00109091 -0.00534526 -0.0150214 -0.00109091 -0.00534526 -0.0150214 0.00327273 -0.00534526 -0.0150214 0.00545454 -0.00534526 -0.0150214 -0.00545455 -0.00534526 -0.0150214 -0.00763636 -0.00538309 -0.0149238 -0.00109091 -0.00538309 -0.0149238 0.00109091 -0.00538309 -0.0149238 0.00545454 -0.00538309 -0.0149238 -0.00327273 -0.0054426 -0.0147257 -0.00545455 -0.00541529 -0.0148259 0.00109091 -0.00541529 -0.0148259 -0.00109091 -0.0054426 -0.0147257 0.00545454 -0.0054426 -0.0147257 -0.00981818 -0.0054426 -0.0147257 -0.00763636 -0.00541529 -0.0148259 -0.00109091 -0.00548145 -0.0145218 -0.00763636 -0.0054426 -0.0147257 0.00109091 -0.00546467 -0.0146243 -0.00109091 -0.00546467 -0.0146243 0.00545454 -0.00546467 -0.0146243 -0.00545455 -0.00546467 -0.0146243 0.00109091 -0.00548145 -0.0145218 0.00327273 -0.00549895 -0.0143149 0.0118314 -0.00716667 -0.0162429 0.0117065 -0.00379346 -0.0162284 0.0116257 -0.00533333 -0.0162147 0.0115243 -0.00716667 -0.0161926 0.0116098 -0.00860982 -0.0162116 0.0116257 -0.00716667 -0.0162147 0.0117282 -0.00533333 -0.0162314 0.0117282 -0.00716667 -0.0162314 0.0119019 -0.00359813 -0.0162476 0.0119351 -0.00533333 -0.0162489 0.0119351 -0.00716667 -0.0162489 0.0116098 -0.00389018 -0.0162116 0.011514 -0.00851404 -0.0161901 0.0114241 -0.00716667 -0.0161653 0.0115243 -0.00533333 -0.0161926 0.0114241 -0.00533333 -0.0161653 0.0113262 -0.00716667 -0.0161331 0.0113262 -0.00832622 -0.0161331 0.011514 -0.00398596 -0.0161901 0.0114194 -0.00408057 -0.0161639 0.0113262 -0.00533333 -0.0161331 0.0112346 -0.00823463 -0.0160978 0.0112286 -0.00716667 -0.0160953 0.0111449 -0.00435511 -0.016058 0.0110572 -0.00444279 -0.0160138 0.01093 -0.00456999 -0.0159397 0.0109517 -0.00533333 -0.0159532 0.0107695 -0.00473046 -0.0158267 0.0107808 -0.00533333 -0.0158354 0.0106937 -0.00480634 -0.0157644 0.0106569 -0.00484312 -0.0157319 0.0106209 -0.00487908 -0.0156985 0.0105858 -0.00491421 -0.0156642 0.0105181 -0.0049819 -0.0155931 0.01048 -0.00716667 -0.0155499 0.0111449 -0.00814489 -0.016058 0.0112286 -0.00533333 -0.0160953 0.0111339 -0.00533333 -0.0160527 0.0110415 -0.00533333 -0.0160054 0.0109517 -0.00716667 -0.0159532 0.0110415 -0.00716667 -0.0160054 0.0109718 -0.0045282 -0.0159655 0.0108647 -0.00716667 -0.0158965 0.0108484 -0.00784838 -0.0158852 0.01093 -0.00793 -0.0159397 0.0107312 -0.00773121 -0.015796 0.0107695 -0.00776954 -0.0158267 0.0107808 -0.00716667 -0.0158354 0.0108647 -0.00533333 -0.0158965 0.0107001 -0.00533333 -0.01577 0.010623 -0.00716667 -0.0157005 0.0103936 -0.00510641 -0.0154414 0.0103648 -0.00736483 -0.0154016 0.0103371 -0.00733706 -0.0153611 0.0103535 -0.00533333 -0.0153853 0.0103535 -0.00716667 -0.0153853 0.01048 -0.00533333 -0.0155499 0.0102968 -0.00716667 -0.0152983 0.0103103 -0.00731029 -0.01532 0.0102968 -0.00533333 -0.0152983 0.0102845 -0.00521546 -0.0152782 0.0102598 -0.00524017 -0.0152358 0.0102446 -0.00533333 -0.0152085 0.0102362 -0.00723616 -0.0151928 0.0102446 -0.00716667 -0.0152085 0.0102136 -0.00721355 -0.0151492 0.0101973 -0.00716667 -0.0151161 0.010192 -0.00530798 -0.0151051 0.010192 -0.00719202 -0.0151051 0.0101716 -0.00532842 -0.0150605 0.0101973 -0.00533333 -0.0151161 0.0101716 -0.00717158 -0.0150605 0.010134 -0.00713401 -0.0149698 0.0101169 -0.00538309 -0.0149238 0.0100861 -0.00541388 -0.0148306 0.0100724 -0.00542755 -0.0147834 0.0100599 -0.00705994 -0.014736 0.0100599 -0.00544006 -0.014736 0.0100486 -0.0070486 -0.0146882 0.0101169 -0.00711691 -0.0149238 0.0100384 -0.00546157 -0.0146402 0.0100384 -0.00703843 -0.0146402 0.010015 -0.00548496 -0.0144948 0.0100054 -0.00549458 -0.0143971 0.0100096 -0.00700963 -0.014446 0.0100024 -0.00549759 -0.0143481 0.0100006 -0.0070006 -0.0142991 0.01 -0.007 -0.01425 0.0104146 -0.00533333 -0.0154692 0.0104146 -0.00716667 -0.0154692 0.0104233 -0.00742331 -0.0154805 0.0105515 -0.00755151 -0.0156291 0.0105858 -0.00758579 -0.0156642 0.010623 -0.00533333 -0.0157005 0.0107001 -0.00716667 -0.01577 0.0106937 -0.00769365 -0.0157644 0.011804 -0.00880396 -0.0162404 0.0118314 -0.00533333 -0.0162429 0.0111339 -0.00716667 -0.0160527 0.0105495 -0.00716667 -0.015627 0.0105495 -0.00533333 -0.015627 -0.01 -0.007 -0.01125 -0.012 -0.009 -0.01625 -0.0126456 -0.0115107 -0.01625 -0.0125 -0.0115 -0.01625 -0.012427 -0.0115027 -0.01625 -0.0120755 -0.0115946 -0.01625 -0.0120106 -0.0116279 -0.01625 -0.0118327 -0.0117552 -0.01625 -0.0116464 -0.0119791 -0.01625 -0.0116107 -0.0120428 -0.01625 -0.0115797 -0.0121089 -0.01625 -0.0115536 -0.0121771 -0.01625 -0.0115166 -0.0123184 -0.01625 -0.011506 -0.0123906 -0.01625 -0.0115007 -0.0125365 -0.01625 -0.0115536 -0.0128229 -0.01625 -0.0065 -0.017 -0.01625 -0.0116464 -0.0130209 -0.01625 -0.0117801 -0.0131941 -0.01625 -0.0118888 -0.0132915 -0.01625 -0.0120106 -0.013372 -0.01625 -0.0120755 -0.0134054 -0.01625 -0.0126456 -0.0134893 -0.01625 -0.0127881 -0.0134576 -0.01625 -0.0129894 -0.013372 -0.01625 -0.0131112 -0.0132915 -0.01625 -0.0131673 -0.0132448 -0.01625 -0.0133133 -0.0130819 -0.01625 -0.0133536 -0.0130209 -0.01625 -0.0133893 -0.0129572 -0.01625 -0.0134203 -0.0128911 -0.01625 -0.0134834 -0.0126816 -0.01625 -0.013494 -0.0126094 -0.01625 -0.0134993 -0.0125365 -0.01625 -0.0134834 -0.0123184 -0.01625 -0.0134675 -0.0122471 -0.01625 -0.0134203 -0.0121089 -0.01625 -0.0133133 -0.0119181 -0.01625 -0.0132686 -0.0118603 -0.01625 -0.0132199 -0.0118059 -0.01625 -0.0131673 -0.0117552 -0.01625 -0.0131112 -0.0117085 -0.01625 -0.0129894 -0.0116279 -0.01625 -0.0129245 -0.0115946 -0.01625 -0.0128572 -0.011566 -0.01625 0.0065 -0.019 -0.01625 0.0115325 -0.0122471 -0.01625 0.011506 -0.0123906 -0.01625 0.0115166 -0.0123184 -0.01625 0.0115007 -0.0124635 -0.01625 0.0115007 -0.0125365 -0.01625 0.0115166 -0.0126816 -0.01625 0.0065 -0.017 -0.01625 0.0115797 -0.0128911 -0.01625 0.0115536 -0.0128229 -0.01625 0.0116464 -0.0130209 -0.01625 0.0119482 -0.013334 -0.01625 0.0120106 -0.013372 -0.01625 0.0122119 -0.0134576 -0.01625 0.0122826 -0.0134761 -0.01625 0.0123544 -0.0134893 -0.01625 0.012427 -0.0134973 -0.01625 0.0126456 -0.0134893 -0.01625 0.0127174 -0.0134761 -0.01625 -0.017 -0.0065 -0.01625 -0.0130518 0.011666 -0.01625 -0.0128572 0.011566 -0.01625 -0.0126456 0.0115107 -0.01625 -0.0127174 0.0115239 -0.01625 -0.0125 0.0115 -0.01625 -0.0122826 0.0115239 -0.01625 -0.0120755 0.0115946 -0.01625 -0.0120106 0.011628 -0.01625 -0.0119482 0.011666 -0.01625 -0.0116867 0.0119181 -0.01625 0.0116464 0.0119791 -0.01625 0.0123544 -0.0115107 -0.01625 0.0127174 -0.0115239 -0.01625 0.012 -0.009 -0.01625 0.0126456 -0.0115107 -0.01625 0.0129245 -0.0115946 -0.01625 0.0130518 -0.011666 -0.01625 0.0131112 -0.0117085 -0.01625 0.0131673 -0.0117552 -0.01625 0.0132199 -0.0118059 -0.01625 0.0133536 -0.0119791 -0.01625 0.0133893 -0.0120428 -0.01625 0.0134203 -0.0121089 -0.01625 0.0134464 -0.0121771 -0.01625 0.0134834 -0.0123184 -0.01625 0.0134993 -0.0124635 -0.01625 0.013494 -0.0126094 -0.01625 0.0134834 -0.0126816 -0.01625 0.0135 -0.0135 -0.01625 0.0134203 -0.0128911 -0.01625 0.0134464 -0.0128229 -0.01625 0.0132199 -0.0131941 -0.01625 0.0131673 -0.0132448 -0.01625 0.0115536 -0.0121771 -0.01625 0.0115797 -0.0121089 -0.01625 0.0116464 -0.0119791 -0.01625 0.0117314 -0.0118603 -0.01625 0.0119482 -0.011666 -0.01625 0.0122826 -0.0115239 -0.01625 0.0135 0.0155 -0.01625 0.0125 0.0135 -0.01625 0.0126456 0.0134893 -0.01625 0.0127881 0.0134576 -0.01625 0.0128572 0.013434 -0.01625 0.0129245 0.0134054 -0.01625 0.0131112 0.0132915 -0.01625 0.0133133 0.0130819 -0.01625 0.0133536 0.0130209 -0.01625 0.0134203 0.0128911 -0.01625 0.013494 0.0126094 -0.01625 0.0134993 0.0125365 -0.01625 0.0134993 0.0124635 -0.01625 0.0134675 0.0122471 -0.01625 0.0134464 0.0121771 -0.01625 0.0132686 0.0118603 -0.01625 0.0131112 0.0117085 -0.01625 0.0128572 0.011566 -0.01625 0.0126456 0.0115107 -0.01625 0.012573 0.0115027 -0.01625 0.0125 0.0115 -0.01625 0.012427 0.0115027 -0.01625 0.0121428 0.011566 -0.01625 0.0120755 0.0115946 -0.01625 0.0120106 0.011628 -0.01625 0.0119482 0.011666 -0.01625 0.0118888 0.0117085 -0.01625 0.0117801 0.0118059 -0.01625 0.0116107 0.0120428 -0.01625 0.0115797 0.0121089 -0.01625 0.011506 0.0123906 -0.01625 0.0065 0.017 -0.01625 0.0115536 0.0128229 -0.01625 0.0116107 0.0129572 -0.01625 0.0116867 0.0130819 -0.01625 0.0117801 0.0131941 -0.01625 0.0117314 0.0131397 -0.01625 0.0120755 0.0134054 -0.01625 -0.012427 0.0134973 -0.01625 -0.0123544 0.0134893 -0.01625 -0.0122826 0.0134761 -0.01625 -0.0122119 0.0134576 -0.01625 -0.0120106 0.013372 -0.01625 -0.0118888 0.0132915 -0.01625 -0.0116867 0.0130819 -0.01625 -0.0116464 0.0130209 -0.01625 -0.0115797 0.0128911 -0.01625 -0.0115007 0.0124635 -0.01625 -0.0133133 0.0119181 -0.01625 -0.0134675 0.0122471 -0.01625 -0.0134464 0.0121771 -0.01625 -0.0134993 0.0124635 -0.01625 -0.0134993 0.0125365 -0.01625 -0.019 0.0155 -0.01625 -0.0134464 0.0128229 -0.01625 -0.0133133 0.0130819 -0.01625 -0.0132199 0.0131941 -0.01625 -0.0130518 0.013334 -0.01625 -0.0129894 0.013372 -0.01625 0.01 -0.0055 -0.01425 -0.01 -0.0055 -0.01425 -0.01 -0.0055 -0.01125 0.01 -0.007 -0.01125 0.01 -0.0055 -0.01125 0.0180625 0.0134973 0.013323 0.019 0.0134973 0.013323 0.0180625 0.0134893 0.0133956 0.017125 0.0134576 0.0135381 0.017125 0.0134761 0.0134674 0.0180625 0.0134761 0.0134674 0.017125 0.0134893 0.0133956 0.017125 0.0134973 0.013323 0.0180625 0.0134893 0.0131044 0.017125 0.0134576 0.0129619 0.0180625 0.0134054 0.0128255 0.017125 0.0134054 0.0128255 0.017125 0.013334 0.0126982 0.0180625 0.0130209 0.0123964 0.017125 0.0129572 0.0123607 0.0180625 0.0127529 0.0122825 0.017125 0.0126094 0.012256 0.0180625 0.0125365 0.0122507 0.017125 0.0125365 0.0122507 0.0180625 0.0121089 0.0123297 0.0180625 0.0120428 0.0123607 0.0180625 0.0117085 0.0126388 0.017125 0.011666 0.0126982 0.017125 0.0116279 0.0127606 0.017125 0.0115107 0.0131044 0.017125 0.0115 0.01325 0.0180625 0.0115107 0.0133956 0.0180625 0.0115239 0.0134674 0.0180625 0.0115424 0.0135381 0.0180625 0.011566 0.0136072 0.017125 0.0117552 0.0139173 0.017125 0.0119181 0.0140633 0.017125 0.0120428 0.0141393 0.017125 0.0121089 0.0141703 0.017125 0.0121771 0.0141964 0.017125 0.0123906 0.014244 0.017125 0.0126094 0.014244 0.017125 0.0126816 0.0142334 0.017125 0.0130209 0.0141036 0.017125 0.0131941 0.0139699 0.0180625 0.0132448 0.0139173 0.0180625 0.0132915 0.0138612 0.017125 0.0132915 0.0138612 0.017125 0.013334 0.0138018 0.017125 0.013372 0.0137394 0.0180625 0.0134054 0.0136745 0.017125 0.0134054 0.0136745 0.0180625 0.0134576 0.0135381 0.017125 0.013434 0.0136072 0.019 0.0134761 0.0134674 0.0180625 0.013434 0.0136072 0.019 0.013434 0.0136072 0.0180625 0.013372 0.0137394 0.019 0.013334 0.0138018 0.0180625 0.013334 0.0138018 0.019 0.0132915 0.0138612 0.0180625 0.0131941 0.0139699 0.0180625 0.0131397 0.0140186 0.017125 0.0131397 0.0140186 0.019 0.0131397 0.0140186 0.017125 0.0130819 0.0140633 0.019 0.0130819 0.0140633 0.0180625 0.0130819 0.0140633 0.0180625 0.0130209 0.0141036 0.019 0.0130209 0.0141036 0.017125 0.0129572 0.0141393 0.0180625 0.0129572 0.0141393 0.017125 0.0128911 0.0141703 0.0180625 0.0128911 0.0141703 0.019 0.0129572 0.0141393 0.019 0.0128911 0.0141703 0.0180625 0.0128229 0.0141964 0.017125 0.0127529 0.0142175 0.0180625 0.0127529 0.0142175 0.019 0.0127529 0.0142175 0.0180625 0.0126816 0.0142334 0.019 0.0126816 0.0142334 0.0180625 0.0126094 0.014244 0.017125 0.0125365 0.0142493 0.0180625 0.0125365 0.0142493 0.0180625 0.0124635 0.0142493 0.019 0.0124635 0.0142493 0.0180625 0.0123906 0.014244 0.017125 0.0123184 0.0142334 0.019 0.0123906 0.014244 0.017125 0.0122471 0.0142175 0.0180625 0.0123184 0.0142334 0.019 0.0123184 0.0142334 0.019 0.0122471 0.0142175 0.0180625 0.0122471 0.0142175 0.0180625 0.0121771 0.0141964 0.0180625 0.0121089 0.0141703 0.0180625 0.0120428 0.0141393 0.0180625 0.0119791 0.0141036 0.019 0.0119791 0.0141036 0.0180625 0.0119181 0.0140633 0.0180625 0.0118603 0.0140186 0.019 0.0118603 0.0140186 0.0180625 0.0118059 0.0139699 0.019 0.0118059 0.0139699 0.0180625 0.0117552 0.0139173 0.0180625 0.0117085 0.0138612 0.019 0.011666 0.0138018 0.017125 0.0116279 0.0137394 0.0180625 0.011666 0.0138018 0.0180625 0.0116279 0.0137394 0.019 0.0116279 0.0137394 0.0180625 0.0115946 0.0136745 0.019 0.0115946 0.0136745 0.019 0.011566 0.0136072 0.019 0.0115424 0.0135381 0.019 0.0115239 0.0134674 0.0180625 0.0115027 0.013323 0.019 0.0115027 0.013323 0.0180625 0.0115 0.01325 0.017125 0.0115027 0.013177 0.0180625 0.0115027 0.013177 0.017125 0.0115239 0.0130326 0.0180625 0.0115107 0.0131044 0.0180625 0.0115239 0.0130326 0.0180625 0.0115424 0.0129619 0.019 0.0115424 0.0129619 0.0180625 0.011566 0.0128928 0.0180625 0.0115946 0.0128255 0.019 0.0115946 0.0128255 0.0180625 0.0116279 0.0127606 0.019 0.0116279 0.0127606 0.0180625 0.011666 0.0126982 0.019 0.011666 0.0126982 0.0180625 0.0117552 0.0125827 0.019 0.0118059 0.0125301 0.0180625 0.0118059 0.0125301 0.0180625 0.0118603 0.0124814 0.019 0.0118603 0.0124814 0.0180625 0.0119181 0.0124367 0.019 0.0119181 0.0124367 0.0180625 0.0119791 0.0123964 0.019 0.0119791 0.0123964 0.019 0.0120428 0.0123607 0.017125 0.0121089 0.0123297 0.0180625 0.0121771 0.0123036 0.0180625 0.0122471 0.0122825 0.017125 0.0122471 0.0122825 0.0180625 0.0123184 0.0122666 0.019 0.0123906 0.012256 0.0180625 0.0123906 0.012256 0.0180625 0.0124635 0.0122507 0.019 0.0125365 0.0122507 0.0180625 0.0126094 0.012256 0.019 0.0126094 0.012256 0.019 0.0126816 0.0122666 0.0180625 0.0126816 0.0122666 0.019 0.0127529 0.0122825 0.0180625 0.0128229 0.0123036 0.0180625 0.0128911 0.0123297 0.019 0.0128229 0.0123036 0.019 0.0128911 0.0123297 0.019 0.0129572 0.0123607 0.0180625 0.0129572 0.0123607 0.019 0.0130209 0.0123964 0.0180625 0.0130819 0.0124367 0.019 0.0130819 0.0124367 0.0180625 0.0131397 0.0124814 0.0180625 0.0131941 0.0125301 0.017125 0.0131941 0.0125301 0.019 0.0131941 0.0125301 0.017125 0.0132915 0.0126388 0.0180625 0.0132448 0.0125827 0.0180625 0.0132915 0.0126388 0.0180625 0.013334 0.0126982 0.019 0.0132915 0.0126388 0.0180625 0.013372 0.0127606 0.017125 0.013372 0.0127606 0.019 0.0134054 0.0128255 0.0180625 0.013434 0.0128928 0.019 0.013434 0.0128928 0.0180625 0.0134761 0.0130326 0.0180625 0.0134576 0.0129619 0.019 0.0134576 0.0129619 0.019 0.0134761 0.0130326 0.019 0.0134893 0.0131044 0.0180625 0.0135 0.01325 0.0180625 0.0134973 0.013177 -0.017125 0.0134973 0.013323 -0.01748 0.0134973 0.013323 -0.019 0.0134761 0.0134674 -0.019 0.0134576 0.0135381 -0.01748 0.0134893 0.0133956 -0.019 0.0135 0.01325 -0.01748 0.0134893 0.0131044 -0.019 0.0134761 0.0130326 -0.01748 0.013434 0.0128928 -0.019 0.0131941 0.0125301 -0.01748 0.0130819 0.0124367 -0.019 0.0130209 0.0123964 -0.01748 0.0128911 0.0123297 -0.019 0.0128911 0.0123297 -0.019 0.0128229 0.0123036 -0.019 0.0127529 0.0122825 -0.019 0.0126816 0.0122666 -0.019 0.0125365 0.0122507 -0.019 0.0124635 0.0122507 -0.019 0.0123906 0.012256 -0.019 0.0123184 0.0122666 -0.01748 0.0121771 0.0123036 -0.019 0.0122471 0.0122825 -0.01748 0.0119791 0.0123964 -0.019 0.0118059 0.0125301 -0.019 0.0117085 0.0126388 -0.019 0.011666 0.0126982 -0.01748 0.0115027 0.013177 -0.019 0.0115107 0.0133956 -0.01748 0.0115239 0.0134674 -0.019 0.0115239 0.0134674 -0.01748 0.0115424 0.0135381 -0.019 0.0116279 0.0137394 -0.01748 0.0117552 0.0139173 -0.01748 0.0118059 0.0139699 -0.019 0.0119181 0.0140633 -0.01748 0.0119791 0.0141036 -0.019 0.0119791 0.0141036 -0.019 0.0120428 0.0141393 -0.019 0.0121771 0.0141964 -0.019 0.0123184 0.0142334 -0.01748 0.0124635 0.0142493 -0.01748 0.0125365 0.0142493 -0.019 0.0127529 0.0142175 -0.019 0.0128229 0.0141964 -0.019 0.0129572 0.0141393 -0.01748 0.0130819 0.0140633 -0.019 0.0130209 0.0141036 -0.019 0.0131397 0.0140186 -0.01748 0.0132915 0.0138612 -0.019 0.013372 0.0137394 -0.01748 0.0134054 0.0136745 -0.01748 0.0134576 0.0135381 -0.01748 0.0134761 0.0134674 -0.017125 0.0134893 0.0133956 -0.017125 0.0134761 0.0134674 -0.019 0.013434 0.0136072 -0.01748 0.013434 0.0136072 -0.01748 0.013372 0.0137394 -0.017125 0.013372 0.0137394 -0.01748 0.013334 0.0138018 -0.017125 0.013334 0.0138018 -0.017125 0.0132915 0.0138612 -0.019 0.0132448 0.0139173 -0.01748 0.0132448 0.0139173 -0.01748 0.0131941 0.0139699 -0.017125 0.0131941 0.0139699 -0.01748 0.0131397 0.0140186 -0.017125 0.0131397 0.0140186 -0.017125 0.0130819 0.0140633 -0.017125 0.0130209 0.0141036 -0.01748 0.0130209 0.0141036 -0.017125 0.0129572 0.0141393 -0.019 0.0128911 0.0141703 -0.01748 0.0129572 0.0141393 -0.01748 0.0128911 0.0141703 -0.01748 0.0128229 0.0141964 -0.01748 0.0127529 0.0142175 -0.017125 0.0128229 0.0141964 -0.017125 0.0127529 0.0142175 -0.01748 0.0126816 0.0142334 -0.01748 0.0126094 0.014244 -0.019 0.0126094 0.014244 -0.017125 0.0125365 0.0142493 -0.01748 0.0123906 0.014244 -0.01748 0.0123184 0.0142334 -0.01748 0.0122471 0.0142175 -0.017125 0.0123184 0.0142334 -0.01748 0.0121771 0.0141964 -0.017125 0.0122471 0.0142175 -0.017125 0.0121771 0.0141964 -0.017125 0.0121089 0.0141703 -0.01748 0.0121089 0.0141703 -0.01748 0.0120428 0.0141393 -0.01748 0.0119181 0.0140633 -0.01748 0.0118603 0.0140186 -0.019 0.0118603 0.0140186 -0.017125 0.0119181 0.0140633 -0.017125 0.0118603 0.0140186 -0.019 0.0118059 0.0139699 -0.017125 0.0118059 0.0139699 -0.01748 0.0117085 0.0138612 -0.017125 0.0117085 0.0138612 -0.01748 0.011666 0.0138018 -0.019 0.011666 0.0138018 -0.017125 0.011666 0.0138018 -0.01748 0.0116279 0.0137394 -0.019 0.0115946 0.0136745 -0.017125 0.0116279 0.0137394 -0.01748 0.0115946 0.0136745 -0.01748 0.011566 0.0136072 -0.017125 0.011566 0.0136072 -0.019 0.0115424 0.0135381 -0.01748 0.0115107 0.0133956 -0.019 0.0115027 0.013323 -0.01748 0.0115027 0.013323 -0.01748 0.0115 0.01325 -0.01748 0.0115107 0.0131044 -0.01748 0.0115239 0.0130326 -0.019 0.0115239 0.0130326 -0.017125 0.0115107 0.0131044 -0.019 0.0115424 0.0129619 -0.01748 0.0115424 0.0129619 -0.017125 0.0115239 0.0130326 -0.019 0.0115946 0.0128255 -0.01748 0.011566 0.0128928 -0.017125 0.011566 0.0128928 -0.017125 0.0115946 0.0128255 -0.01748 0.0115946 0.0128255 -0.01748 0.0116279 0.0127606 -0.01748 0.011666 0.0126982 -0.017125 0.0117085 0.0126388 -0.01748 0.0117085 0.0126388 -0.01748 0.0117552 0.0125827 -0.01748 0.0118059 0.0125301 -0.017125 0.0118059 0.0125301 -0.017125 0.0118603 0.0124814 -0.01748 0.0118603 0.0124814 -0.01748 0.0119181 0.0124367 -0.019 0.0119181 0.0124367 -0.017125 0.0119181 0.0124367 -0.01748 0.0120428 0.0123607 -0.019 0.0121771 0.0123036 -0.01748 0.0121089 0.0123297 -0.017125 0.0121771 0.0123036 -0.01748 0.0122471 0.0122825 -0.017125 0.0122471 0.0122825 -0.01748 0.0123184 0.0122666 -0.017125 0.0123906 0.012256 -0.01748 0.0123906 0.012256 -0.01748 0.0124635 0.0122507 -0.01748 0.0126094 0.012256 -0.01748 0.0125365 0.0122507 -0.01748 0.0126816 0.0122666 -0.01748 0.0127529 0.0122825 -0.017125 0.0127529 0.0122825 -0.01748 0.0128229 0.0123036 -0.019 0.0129572 0.0123607 -0.017125 0.0129572 0.0123607 -0.01748 0.0129572 0.0123607 -0.01748 0.0130209 0.0123964 -0.01748 0.0131397 0.0124814 -0.01748 0.0131941 0.0125301 -0.017125 0.0131397 0.0124814 -0.017125 0.0132448 0.0125827 -0.01748 0.0132448 0.0125827 -0.01748 0.0132915 0.0126388 -0.017125 0.0132915 0.0126388 -0.01748 0.013334 0.0126982 -0.019 0.013334 0.0126982 -0.017125 0.013334 0.0126982 -0.01748 0.013372 0.0127606 -0.019 0.0134054 0.0128255 -0.01748 0.0134054 0.0128255 -0.01748 0.0134576 0.0129619 -0.017125 0.0134576 0.0129619 -0.01748 0.0134761 0.0130326 -0.019 0.0134893 0.0131044 -0.017125 0.0134761 0.0130326 -0.019 0.0134973 0.013177 -0.017125 0.0134893 0.0131044 -0.01748 0.0134973 0.013177 -0.01748 0.0135 0.01325 -0.017 -0.0115239 -0.0119674 -0.017 -0.0115107 -0.0118956 -0.01748 -0.0115107 -0.0118956 -0.01748 -0.0115027 -0.011823 -0.017 -0.0115 -0.01175 -0.017 -0.0115239 -0.0115326 -0.017 -0.0115424 -0.0114619 -0.017 -0.0118059 -0.0110301 -0.01748 -0.0119181 -0.0109367 -0.017 -0.0121089 -0.0108297 -0.017 -0.0121771 -0.0108036 -0.01748 -0.0122471 -0.0107825 -0.01748 -0.0124635 -0.0107507 -0.017 -0.0124635 -0.0107507 -0.01748 -0.0127529 -0.0107825 -0.017 -0.0127529 -0.0107825 -0.017 -0.0129572 -0.0108607 -0.017 -0.0131397 -0.0109814 -0.01748 -0.0131941 -0.0110301 -0.01748 -0.0132448 -0.0110827 -0.01748 -0.013334 -0.0111982 -0.01748 -0.013372 -0.0112606 -0.01748 -0.0134893 -0.0116044 -0.017 -0.0134973 -0.011677 -0.01748 -0.0135 -0.01175 -0.01748 -0.0134893 -0.0118956 -0.017 -0.0134973 -0.011823 -0.01748 -0.0134576 -0.0120381 -0.01748 -0.0134054 -0.0121745 -0.017 -0.0134054 -0.0121745 -0.017 -0.013334 -0.0123018 -0.01748 -0.0132915 -0.0123612 -0.017 -0.0132915 -0.0123612 -0.017 -0.0129572 -0.0126393 -0.017 -0.0128229 -0.0126964 -0.017 -0.0127529 -0.0127175 -0.01748 -0.0126816 -0.0127334 -0.017 -0.0126816 -0.0127334 -0.01748 -0.0125365 -0.0127493 -0.017 -0.0126094 -0.012744 -0.01748 -0.0124635 -0.0127493 -0.01748 -0.0122471 -0.0127175 -0.017 -0.0122471 -0.0127175 -0.01748 -0.0121089 -0.0126703 -0.01748 -0.0120428 -0.0126393 -0.017 -0.0119181 -0.0125633 -0.01748 -0.0118059 -0.0124699 -0.017 -0.0118059 -0.0124699 -0.01748 -0.011666 -0.0123018 -0.01748 -0.011566 -0.0121072 -0.01748 -0.0115239 -0.0119674 -0.01748 -0.0115424 -0.0120381 -0.019 -0.0115239 -0.0119674 -0.019 -0.0115424 -0.0120381 -0.01748 -0.0115946 -0.0121745 -0.017 -0.0115946 -0.0121745 -0.019 -0.0116279 -0.0122394 -0.01748 -0.0116279 -0.0122394 -0.01748 -0.0117085 -0.0123612 -0.019 -0.011666 -0.0123018 -0.017 -0.0117552 -0.0124173 -0.019 -0.0117552 -0.0124173 -0.01748 -0.0117552 -0.0124173 -0.019 -0.0118059 -0.0124699 -0.017 -0.0118603 -0.0125186 -0.01748 -0.0118603 -0.0125186 -0.019 -0.0119181 -0.0125633 -0.017 -0.0119791 -0.0126036 -0.01748 -0.0119181 -0.0125633 -0.017 -0.0120428 -0.0126393 -0.01748 -0.0119791 -0.0126036 -0.01748 -0.0121771 -0.0126964 -0.019 -0.0122471 -0.0127175 -0.019 -0.0123184 -0.0127334 -0.01748 -0.0123184 -0.0127334 -0.01748 -0.0123906 -0.012744 -0.019 -0.0123906 -0.012744 -0.01748 -0.0126094 -0.012744 -0.01748 -0.0127529 -0.0127175 -0.01748 -0.0128229 -0.0126964 -0.019 -0.0128229 -0.0126964 -0.01748 -0.0128911 -0.0126703 -0.01748 -0.0129572 -0.0126393 -0.019 -0.0130209 -0.0126036 -0.01748 -0.0130209 -0.0126036 -0.01748 -0.0130819 -0.0125633 -0.01748 -0.0131397 -0.0125186 -0.019 -0.0131397 -0.0125186 -0.019 -0.0131941 -0.0124699 -0.01748 -0.0131941 -0.0124699 -0.017 -0.0132448 -0.0124173 -0.019 -0.0132448 -0.0124173 -0.01748 -0.0132448 -0.0124173 -0.01748 -0.013334 -0.0123018 -0.019 -0.013334 -0.0123018 -0.01748 -0.013372 -0.0122394 -0.019 -0.013372 -0.0122394 -0.01748 -0.013434 -0.0121072 -0.017 -0.0134576 -0.0120381 -0.019 -0.0134576 -0.0120381 -0.019 -0.0134761 -0.0119674 -0.01748 -0.0134761 -0.0119674 -0.019 -0.0134893 -0.0118956 -0.01748 -0.0134973 -0.011823 -0.019 -0.0135 -0.01175 -0.01748 -0.0134973 -0.011677 -0.019 -0.0134973 -0.011677 -0.019 -0.0134893 -0.0116044 -0.01748 -0.0134761 -0.0115326 -0.017 -0.0134761 -0.0115326 -0.019 -0.0134761 -0.0115326 -0.01748 -0.0134576 -0.0114619 -0.019 -0.0134576 -0.0114619 -0.01748 -0.013434 -0.0113928 -0.01748 -0.0134054 -0.0113255 -0.019 -0.013434 -0.0113928 -0.019 -0.0134054 -0.0113255 -0.019 -0.013372 -0.0112606 -0.019 -0.013334 -0.0111982 -0.01748 -0.0132915 -0.0111388 -0.019 -0.0132915 -0.0111388 -0.019 -0.0132448 -0.0110827 -0.01748 -0.0131397 -0.0109814 -0.019 -0.0131941 -0.0110301 -0.019 -0.0131397 -0.0109814 -0.01748 -0.0130819 -0.0109367 -0.017 -0.0130819 -0.0109367 -0.019 -0.0130819 -0.0109367 -0.01748 -0.0130209 -0.0108964 -0.017 -0.0130209 -0.0108964 -0.01748 -0.0129572 -0.0108607 -0.019 -0.0129572 -0.0108607 -0.01748 -0.0128911 -0.0108297 -0.019 -0.0128911 -0.0108297 -0.01748 -0.0128229 -0.0108036 -0.019 -0.0128229 -0.0108036 -0.01748 -0.0126816 -0.0107666 -0.019 -0.0126816 -0.0107666 -0.01748 -0.0126094 -0.010756 -0.01748 -0.0125365 -0.0107507 -0.019 -0.0125365 -0.0107507 -0.019 -0.0124635 -0.0107507 -0.01748 -0.0123906 -0.010756 -0.019 -0.0123906 -0.010756 -0.01748 -0.0123184 -0.0107666 -0.019 -0.0123184 -0.0107666 -0.017 -0.0122471 -0.0107825 -0.01748 -0.0121771 -0.0108036 -0.019 -0.0121771 -0.0108036 -0.01748 -0.0121089 -0.0108297 -0.01748 -0.0120428 -0.0108607 -0.01748 -0.0119791 -0.0108964 -0.019 -0.0119181 -0.0109367 -0.01748 -0.0118603 -0.0109814 -0.019 -0.0118603 -0.0109814 -0.01748 -0.0118059 -0.0110301 -0.017 -0.0117552 -0.0110827 -0.019 -0.0118059 -0.0110301 -0.01748 -0.0117552 -0.0110827 -0.019 -0.0117085 -0.0111388 -0.01748 -0.0117085 -0.0111388 -0.017 -0.011666 -0.0111982 -0.01748 -0.011666 -0.0111982 -0.019 -0.011666 -0.0111982 -0.01748 -0.0116279 -0.0112606 -0.01748 -0.0115946 -0.0113255 -0.019 -0.0116279 -0.0112606 -0.017 -0.011566 -0.0113928 -0.019 -0.0115946 -0.0113255 -0.01748 -0.0115424 -0.0114619 -0.01748 -0.011566 -0.0113928 -0.019 -0.011566 -0.0113928 -0.019 -0.0115424 -0.0114619 -0.01748 -0.0115239 -0.0115326 -0.017 -0.0115107 -0.0116044 -0.01748 -0.0115107 -0.0116044 -0.01748 -0.0115027 -0.011677 -0.017 -0.0115027 -0.011677 -0.019 -0.0115027 -0.011677 -0.01748 -0.0115 -0.01175 0.0175 -0.0134973 -0.011677 0.0175 -0.0134893 -0.0116044 0.019 -0.0134576 -0.0114619 0.019 -0.0134761 -0.0115326 0.019 -0.0134973 -0.011677 0.019 -0.0134973 -0.011823 0.019 -0.0134761 -0.0119674 0.0175 -0.0134054 -0.0121745 0.019 -0.013434 -0.0121072 0.019 -0.0132915 -0.0123612 0.0175 -0.0131941 -0.0124699 0.019 -0.0132448 -0.0124173 0.019 -0.0131941 -0.0124699 0.019 -0.0129572 -0.0126393 0.019 -0.0128911 -0.0126703 0.019 -0.0128229 -0.0126964 0.019 -0.0127529 -0.0127175 0.019 -0.0124635 -0.0127493 0.019 -0.0123184 -0.0127334 0.019 -0.0122471 -0.0127175 0.019 -0.0121771 -0.0126964 0.0175 -0.0119791 -0.0126036 0.019 -0.0118059 -0.0124699 0.019 -0.0117552 -0.0124173 0.0175 -0.0115027 -0.011677 0.0175 -0.0115107 -0.0116044 0.0175 -0.0115239 -0.0115326 0.019 -0.0115424 -0.0114619 0.019 -0.0115946 -0.0113255 0.019 -0.011666 -0.0111982 0.019 -0.0117552 -0.0110827 0.0175 -0.0119791 -0.0108964 0.019 -0.0121771 -0.0108036 0.0175 -0.0123184 -0.0107666 0.0175 -0.0124635 -0.0107507 0.019 -0.0123906 -0.010756 0.019 -0.0127529 -0.0107825 0.019 -0.0128911 -0.0108297 0.019 -0.0130209 -0.0108964 0.019 -0.0131397 -0.0109814 0.0175 -0.0132915 -0.0111388 0.019 -0.0132448 -0.0110827 0.019 -0.013372 -0.0112606 0.019 -0.013434 -0.0113928 0.0175 -0.0134576 -0.0114619 0.0175 -0.0134761 -0.0115326 0.016 -0.0134761 -0.0115326 0.016 -0.013434 -0.0113928 0.0175 -0.013434 -0.0113928 0.0175 -0.0134054 -0.0113255 0.016 -0.013372 -0.0112606 0.0175 -0.013334 -0.0111982 0.0175 -0.013372 -0.0112606 0.016 -0.013334 -0.0111982 0.019 -0.0132915 -0.0111388 0.0175 -0.0132448 -0.0110827 0.0175 -0.0131941 -0.0110301 0.016 -0.0132448 -0.0110827 0.0175 -0.0131397 -0.0109814 0.019 -0.0130819 -0.0109367 0.0175 -0.0130819 -0.0109367 0.016 -0.0130819 -0.0109367 0.016 -0.0130209 -0.0108964 0.0175 -0.0130209 -0.0108964 0.0175 -0.0129572 -0.0108607 0.016 -0.0129572 -0.0108607 0.0175 -0.0128911 -0.0108297 0.0175 -0.0128229 -0.0108036 0.016 -0.0128911 -0.0108297 0.0175 -0.0127529 -0.0107825 0.016 -0.0128229 -0.0108036 0.0175 -0.0126816 -0.0107666 0.016 -0.0126816 -0.0107666 0.0175 -0.0126094 -0.010756 0.016 -0.0126094 -0.010756 0.0175 -0.0125365 -0.0107507 0.016 -0.0125365 -0.0107507 0.016 -0.0124635 -0.0107507 0.0175 -0.0123906 -0.010756 0.019 -0.0123184 -0.0107666 0.016 -0.0123184 -0.0107666 0.0175 -0.0122471 -0.0107825 0.0175 -0.0121771 -0.0108036 0.0175 -0.0121089 -0.0108297 0.019 -0.0120428 -0.0108607 0.016 -0.0121089 -0.0108297 0.0175 -0.0120428 -0.0108607 0.016 -0.0120428 -0.0108607 0.019 -0.0119791 -0.0108964 0.016 -0.0119791 -0.0108964 0.0175 -0.0119181 -0.0109367 0.0175 -0.0118603 -0.0109814 0.0175 -0.0118059 -0.0110301 0.016 -0.0118059 -0.0110301 0.0175 -0.0117552 -0.0110827 0.0175 -0.0117085 -0.0111388 0.016 -0.0117552 -0.0110827 0.0175 -0.011666 -0.0111982 0.016 -0.011666 -0.0111982 0.0175 -0.0116279 -0.0112606 0.0175 -0.0115946 -0.0113255 0.0175 -0.011566 -0.0113928 0.019 -0.011566 -0.0113928 0.016 -0.011566 -0.0113928 0.019 -0.0115239 -0.0115326 0.0175 -0.0115424 -0.0114619 0.019 -0.0115027 -0.011677 0.019 -0.0115 -0.01175 0.0175 -0.0115 -0.01175 0.0175 -0.0115027 -0.011823 0.0175 -0.0115107 -0.0118956 0.016 -0.0115027 -0.011823 0.019 -0.0115239 -0.0119674 0.0175 -0.0115239 -0.0119674 0.019 -0.0115424 -0.0120381 0.016 -0.0115239 -0.0119674 0.0175 -0.011566 -0.0121072 0.019 -0.011566 -0.0121072 0.0175 -0.0115424 -0.0120381 0.016 -0.0115946 -0.0121745 0.0175 -0.0115946 -0.0121745 0.0175 -0.0116279 -0.0122394 0.016 -0.0116279 -0.0122394 0.0175 -0.011666 -0.0123018 0.016 -0.0117085 -0.0123612 0.0175 -0.0117552 -0.0124173 0.0175 -0.0117085 -0.0123612 0.0175 -0.0118059 -0.0124699 0.016 -0.0117552 -0.0124173 0.0175 -0.0118603 -0.0125186 0.016 -0.0118603 -0.0125186 0.0175 -0.0119181 -0.0125633 0.019 -0.0119791 -0.0126036 0.016 -0.0119791 -0.0126036 0.016 -0.0120428 -0.0126393 0.0175 -0.0120428 -0.0126393 0.0175 -0.0121089 -0.0126703 0.016 -0.0121089 -0.0126703 0.0175 -0.0121771 -0.0126964 0.0175 -0.0122471 -0.0127175 0.016 -0.0121771 -0.0126964 0.0175 -0.0123184 -0.0127334 0.016 -0.0123184 -0.0127334 0.016 -0.0123906 -0.012744 0.0175 -0.0123906 -0.012744 0.0175 -0.0124635 -0.0127493 0.019 -0.0125365 -0.0127493 0.0175 -0.0125365 -0.0127493 0.019 -0.0126094 -0.012744 0.0175 -0.0126094 -0.012744 0.016 -0.0125365 -0.0127493 0.019 -0.0126816 -0.0127334 0.0175 -0.0126816 -0.0127334 0.016 -0.0126816 -0.0127334 0.0175 -0.0127529 -0.0127175 0.016 -0.0127529 -0.0127175 0.0175 -0.0128229 -0.0126964 0.016 -0.0128911 -0.0126703 0.0175 -0.0128911 -0.0126703 0.0175 -0.0129572 -0.0126393 0.016 -0.0129572 -0.0126393 0.016 -0.0130209 -0.0126036 0.0175 -0.0130209 -0.0126036 0.0175 -0.0130819 -0.0125633 0.0175 -0.0131397 -0.0125186 0.019 -0.0131397 -0.0125186 0.016 -0.0131941 -0.0124699 0.0175 -0.0132448 -0.0124173 0.0175 -0.0132915 -0.0123612 0.019 -0.013334 -0.0123018 0.0175 -0.013334 -0.0123018 0.0175 -0.013372 -0.0122394 0.0175 -0.013434 -0.0121072 0.0175 -0.0134576 -0.0120381 0.019 -0.0134576 -0.0120381 0.016 -0.0134576 -0.0120381 0.016 -0.0134761 -0.0119674 0.0175 -0.0134761 -0.0119674 0.0175 -0.0134893 -0.0118956 0.016 -0.0134893 -0.0118956 0.019 -0.0135 -0.01175 0.0175 -0.0134973 -0.011823 0.0175 -0.0135 -0.01175 0.016 -0.0135 -0.01175 0.016 0.0134893 -0.0118956 0.019 0.0134576 -0.0120381 0.019 0.0134761 -0.0119674 0.0175 0.0134893 -0.0118956 0.0175 0.0134973 -0.011823 0.019 0.0134973 -0.011823 0.019 0.0134973 -0.011677 0.019 0.0134761 -0.0115326 0.019 0.0134576 -0.0114619 0.0175 0.0134054 -0.0113255 0.019 0.013434 -0.0113928 0.019 0.0134054 -0.0113255 0.019 0.013372 -0.0112606 0.0175 0.0132915 -0.0111388 0.0175 0.0132448 -0.0110827 0.019 0.0132915 -0.0111388 0.019 0.0132448 -0.0110827 0.019 0.0131941 -0.0110301 0.0175 0.0130819 -0.0109367 0.019 0.0130209 -0.0108964 0.0175 0.0129572 -0.0108607 0.0175 0.0128911 -0.0108297 0.0175 0.0128229 -0.0108036 0.0175 0.0126816 -0.0107666 0.0175 0.0126094 -0.010756 0.0175 0.0123906 -0.010756 0.019 0.0124635 -0.0107507 0.0175 0.0119791 -0.0108964 0.0175 0.011628 -0.0112606 0.0175 0.0115946 -0.0113255 0.019 0.011566 -0.0113928 0.0175 0.0115107 -0.0118956 0.019 0.0115027 -0.011823 0.019 0.0115239 -0.0119674 0.0175 0.011566 -0.0121072 0.019 0.0115946 -0.0121745 0.019 0.011666 -0.0123018 0.019 0.0117552 -0.0124173 0.0175 0.0118603 -0.0125186 0.0175 0.0119791 -0.0126036 0.019 0.0120428 -0.0126393 0.019 0.0121089 -0.0126703 0.0175 0.0121771 -0.0126964 0.0175 0.0123184 -0.0127334 0.019 0.0123184 -0.0127334 0.019 0.0125365 -0.0127493 0.019 0.0126094 -0.012744 0.019 0.0127529 -0.0127175 0.0175 0.0128911 -0.0126703 0.019 0.0128229 -0.0126964 0.0175 0.0130209 -0.0126036 0.0175 0.0132448 -0.0124173 0.0175 0.0132915 -0.0123612 0.019 0.0132915 -0.0123612 0.019 0.013334 -0.0123018 0.0175 0.013434 -0.0121072 0.0175 0.0134761 -0.0119674 0.016 0.0134761 -0.0119674 0.016 0.0134576 -0.0120381 0.0175 0.0134576 -0.0120381 0.0175 0.0134054 -0.0121745 0.016 0.013434 -0.0121072 0.019 0.013372 -0.0122394 0.0175 0.013372 -0.0122394 0.0175 0.013334 -0.0123018 0.016 0.0132915 -0.0123612 0.016 0.0132448 -0.0124173 0.0175 0.0131941 -0.0124699 0.0175 0.0131397 -0.0125186 0.016 0.0131941 -0.0124699 0.0175 0.0130819 -0.0125633 0.019 0.0130209 -0.0126036 0.0175 0.0129572 -0.0126393 0.016 0.0129572 -0.0126393 0.019 0.0128911 -0.0126703 0.0175 0.0128229 -0.0126964 0.016 0.0128911 -0.0126703 0.0175 0.0127529 -0.0127175 0.016 0.0127529 -0.0127175 0.0175 0.0126816 -0.0127334 0.0175 0.0126094 -0.012744 0.016 0.0126816 -0.0127334 0.0175 0.0125365 -0.0127493 0.0175 0.0124635 -0.0127493 0.0175 0.0123906 -0.012744 0.016 0.0124635 -0.0127493 0.016 0.0123906 -0.012744 0.0175 0.0122471 -0.0127175 0.019 0.0122471 -0.0127175 0.016 0.0122471 -0.0127175 0.016 0.0121771 -0.0126964 0.0175 0.0121089 -0.0126703 0.0175 0.0120428 -0.0126393 0.0175 0.0119181 -0.0125633 0.016 0.0119791 -0.0126036 0.019 0.0118603 -0.0125186 0.0175 0.0118059 -0.0124699 0.0175 0.0117552 -0.0124173 0.016 0.0117552 -0.0124173 0.019 0.0117085 -0.0123612 0.0175 0.0117085 -0.0123612 0.0175 0.011666 -0.0123018 0.016 0.011666 -0.0123018 0.019 0.011628 -0.0122394 0.016 0.011628 -0.0122394 0.0175 0.011628 -0.0122394 0.0175 0.0115946 -0.0121745 0.016 0.011566 -0.0121072 0.0175 0.0115424 -0.0120381 0.016 0.0115424 -0.0120381 0.019 0.0115107 -0.0118956 0.0175 0.0115239 -0.0119674 0.016 0.0115107 -0.0118956 0.0175 0.0115027 -0.011823 0.016 0.0115027 -0.011823 0.0175 0.0115 -0.01175 0.0175 0.0115027 -0.011677 0.0175 0.0115107 -0.0116044 0.016 0.0115107 -0.0116044 0.0175 0.0115239 -0.0115326 0.0175 0.0115424 -0.0114619 0.0175 0.011566 -0.0113928 0.019 0.0115946 -0.0113255 0.016 0.011628 -0.0112606 0.019 0.011666 -0.0111982 0.019 0.0117085 -0.0111388 0.0175 0.011666 -0.0111982 0.0175 0.0117085 -0.0111388 0.019 0.0117552 -0.0110827 0.0175 0.0117552 -0.0110827 0.019 0.0118059 -0.0110301 0.0175 0.0118059 -0.0110301 0.0175 0.0118603 -0.0109814 0.0175 0.0119181 -0.0109367 0.016 0.0119181 -0.0109367 0.016 0.0119791 -0.0108964 0.016 0.0120428 -0.0108607 0.0175 0.0120428 -0.0108607 0.0175 0.0121089 -0.0108297 0.016 0.0121089 -0.0108297 0.0175 0.0121771 -0.0108036 0.0175 0.0122471 -0.0107825 0.0175 0.0123184 -0.0107666 0.019 0.0123184 -0.0107666 0.019 0.0123906 -0.010756 0.016 0.0123906 -0.010756 0.0175 0.0124635 -0.0107507 0.0175 0.0125365 -0.0107507 0.016 0.0125365 -0.0107507 0.016 0.0126094 -0.010756 0.0175 0.0127529 -0.0107825 0.019 0.0127529 -0.0107825 0.019 0.0129572 -0.0108607 0.0175 0.0130209 -0.0108964 0.0175 0.0131397 -0.0109814 0.019 0.0131397 -0.0109814 0.016 0.0130819 -0.0109367 0.0175 0.0131941 -0.0110301 0.016 0.0131941 -0.0110301 0.0175 0.013334 -0.0111982 0.019 0.013334 -0.0111982 0.016 0.0132915 -0.0111388 0.0175 0.013372 -0.0112606 0.016 0.0134054 -0.0113255 0.016 0.013434 -0.0113928 0.0175 0.013434 -0.0113928 0.0175 0.0134576 -0.0114619 0.0175 0.0134893 -0.0116044 0.0175 0.0134761 -0.0115326 0.016 0.0134893 -0.0116044 0.0175 0.0134973 -0.011677 0.016 0.0134973 -0.011677 0.0175 0.0135 -0.01175 0.019 0.0135 -0.01175 -0.019 -0.0115107 0.0131044 -0.017125 -0.0115239 0.0130326 -0.017125 -0.0115424 0.0129619 -0.01748 -0.0115107 0.0131044 -0.01748 -0.0115 0.01325 -0.01748 -0.0115027 0.013323 -0.017125 -0.0115107 0.0133956 -0.017125 -0.0115424 0.0135381 -0.017125 -0.011666 0.0138018 -0.017125 -0.0117552 0.0139173 -0.017125 -0.0118603 0.0140186 -0.01748 -0.0120428 0.0141393 -0.017125 -0.0119791 0.0141036 -0.017125 -0.0121771 0.0141964 -0.01748 -0.0122471 0.0142175 -0.017125 -0.0123184 0.0142334 -0.01748 -0.0124635 0.0142493 -0.017125 -0.0123906 0.014244 -0.017125 -0.0125365 0.0142493 -0.017125 -0.0126094 0.014244 -0.017125 -0.0126816 0.0142334 -0.017125 -0.0128229 0.0141964 -0.017125 -0.0131397 0.0140186 -0.01748 -0.0132448 0.0139173 -0.017125 -0.0132448 0.0139173 -0.01748 -0.013372 0.0137394 -0.017125 -0.013334 0.0138018 -0.01748 -0.013434 0.0136072 -0.017125 -0.0134054 0.0136745 -0.01748 -0.0134576 0.0135381 -0.017125 -0.0134576 0.0135381 -0.01748 -0.0134893 0.0133956 -0.017125 -0.0134761 0.0134674 -0.017125 -0.0134973 0.013323 -0.01748 -0.0134893 0.0131044 -0.01748 -0.0134761 0.0130326 -0.01748 -0.0134054 0.0128255 -0.01748 -0.013372 0.0127606 -0.017125 -0.013334 0.0126982 -0.01748 -0.0132448 0.0125827 -0.017125 -0.0131941 0.0125301 -0.01748 -0.0129572 0.0123607 -0.01748 -0.0128911 0.0123297 -0.017125 -0.0126816 0.0122666 -0.017125 -0.0126094 0.012256 -0.017125 -0.0125365 0.0122507 -0.017125 -0.0124635 0.0122507 -0.017125 -0.0123906 0.012256 -0.01748 -0.0121771 0.0123036 -0.017125 -0.0121089 0.0123297 -0.017125 -0.011666 0.0126982 -0.01748 -0.011628 0.0127606 -0.01748 -0.0115239 0.0130326 -0.019 -0.0115239 0.0130326 -0.01748 -0.0115424 0.0129619 -0.019 -0.011566 0.0128928 -0.01748 -0.0115946 0.0128255 -0.01748 -0.011566 0.0128928 -0.019 -0.0115946 0.0128255 -0.017125 -0.0117085 0.0126388 -0.01748 -0.011666 0.0126982 -0.01748 -0.0117085 0.0126388 -0.01748 -0.0117552 0.0125827 -0.019 -0.0117085 0.0126388 -0.017125 -0.0118603 0.0124814 -0.01748 -0.0118059 0.0125301 -0.01748 -0.0118603 0.0124814 -0.01748 -0.0119181 0.0124367 -0.017125 -0.0119791 0.0123964 -0.01748 -0.0119791 0.0123964 -0.017125 -0.0120428 0.0123607 -0.01748 -0.0120428 0.0123607 -0.019 -0.0120428 0.0123607 -0.01748 -0.0121089 0.0123297 -0.01748 -0.0122471 0.0122825 -0.017125 -0.0122471 0.0122825 -0.019 -0.0122471 0.0122825 -0.017125 -0.0123184 0.0122666 -0.01748 -0.0123184 0.0122666 -0.019 -0.0123184 0.0122666 -0.019 -0.0123906 0.012256 -0.01748 -0.0124635 0.0122507 -0.01748 -0.0123906 0.012256 -0.019 -0.0124635 0.0122507 -0.019 -0.0125365 0.0122507 -0.01748 -0.0125365 0.0122507 -0.01748 -0.0126094 0.012256 -0.01748 -0.0126816 0.0122666 -0.019 -0.0126094 0.012256 -0.019 -0.0126816 0.0122666 -0.017125 -0.0127529 0.0122825 -0.01748 -0.0127529 0.0122825 -0.01748 -0.0128229 0.0123036 -0.019 -0.0128911 0.0123297 -0.01748 -0.0130209 0.0123964 -0.019 -0.0130209 0.0123964 -0.017125 -0.0130819 0.0124367 -0.019 -0.0130819 0.0124367 -0.01748 -0.0131397 0.0124814 -0.01748 -0.0130819 0.0124367 -0.019 -0.0131397 0.0124814 -0.017125 -0.0132448 0.0125827 -0.01748 -0.0131941 0.0125301 -0.019 -0.0131941 0.0125301 -0.019 -0.0132448 0.0125827 -0.01748 -0.0132915 0.0126388 -0.019 -0.0132915 0.0126388 -0.017125 -0.013372 0.0127606 -0.01748 -0.013334 0.0126982 -0.017125 -0.0134054 0.0128255 -0.019 -0.0134054 0.0128255 -0.01748 -0.013434 0.0128928 -0.019 -0.013434 0.0128928 -0.01748 -0.0134576 0.0129619 -0.017125 -0.0134761 0.0130326 -0.019 -0.0134576 0.0129619 -0.019 -0.0134761 0.0130326 -0.019 -0.0134893 0.0131044 -0.017125 -0.0134973 0.013177 -0.01748 -0.0134973 0.013177 -0.017125 -0.0135 0.01325 -0.01748 -0.0135 0.01325 -0.019 -0.0135 0.01325 -0.01748 -0.0134973 0.013323 -0.017125 -0.0134893 0.0133956 -0.019 -0.0134893 0.0133956 -0.01748 -0.0134761 0.0134674 -0.019 -0.0134761 0.0134674 -0.017125 -0.013434 0.0136072 -0.01748 -0.0134054 0.0136745 -0.017125 -0.013372 0.0137394 -0.01748 -0.013334 0.0138018 -0.017125 -0.0132915 0.0138612 -0.01748 -0.0132915 0.0138612 -0.01748 -0.0131941 0.0139699 -0.017125 -0.0131941 0.0139699 -0.019 -0.0131941 0.0139699 -0.01748 -0.0131397 0.0140186 -0.01748 -0.0130819 0.0140633 -0.017125 -0.0130819 0.0140633 -0.019 -0.0130819 0.0140633 -0.01748 -0.0130209 0.0141036 -0.01748 -0.0129572 0.0141393 -0.019 -0.0130209 0.0141036 -0.019 -0.0128911 0.0141703 -0.01748 -0.0128911 0.0141703 -0.019 -0.0128229 0.0141964 -0.01748 -0.0127529 0.0142175 -0.017125 -0.0127529 0.0142175 -0.01748 -0.0128229 0.0141964 -0.019 -0.0127529 0.0142175 -0.01748 -0.0126816 0.0142334 -0.01748 -0.0126094 0.014244 -0.01748 -0.0125365 0.0142493 -0.019 -0.0126094 0.014244 -0.019 -0.0124635 0.0142493 -0.01748 -0.0123906 0.014244 -0.019 -0.0123906 0.014244 -0.01748 -0.0123184 0.0142334 -0.019 -0.0123184 0.0142334 -0.01748 -0.0121771 0.0141964 -0.01748 -0.0121089 0.0141703 -0.019 -0.0120428 0.0141393 -0.01748 -0.0119791 0.0141036 -0.01748 -0.0119181 0.0140633 -0.01748 -0.0118603 0.0140186 -0.01748 -0.0118059 0.0139699 -0.01748 -0.0117085 0.0138612 -0.01748 -0.0117552 0.0139173 -0.01748 -0.011666 0.0138018 -0.019 -0.0117085 0.0138612 -0.019 -0.011666 0.0138018 -0.017125 -0.0115946 0.0136745 -0.01748 -0.011628 0.0137394 -0.01748 -0.0115946 0.0136745 -0.019 -0.0115946 0.0136745 -0.01748 -0.011566 0.0136072 -0.01748 -0.0115424 0.0135381 -0.01748 -0.0115239 0.0134674 -0.017125 -0.0115239 0.0134674 -0.01748 -0.0115107 0.0133956 -0.019 -0.0115239 0.0134674 -0.017125 -0.0115027 0.013323 -0.019 -0.0115 0.01325 -0.01748 -0.0115027 0.013177 0.0180625 -0.0134893 0.0133956 0.017125 -0.0134893 0.0133956 0.0180625 -0.0134761 0.0134674 0.019 -0.0134761 0.0134674 0.0180625 -0.0134973 0.013323 0.019 -0.0135 0.01325 0.0180625 -0.0134761 0.0130326 0.019 -0.0134893 0.0131044 0.019 -0.0134761 0.0130326 0.019 -0.0134054 0.0128255 0.019 -0.013334 0.0126982 0.0180625 -0.0131941 0.0125301 0.019 -0.0131397 0.0124814 0.019 -0.0128911 0.0123297 0.019 -0.0128229 0.0123036 0.019 -0.0127529 0.0122825 0.019 -0.0123906 0.012256 0.019 -0.0123184 0.0122666 0.019 -0.0122471 0.0122825 0.0180625 -0.0120428 0.0123607 0.019 -0.0121089 0.0123297 0.019 -0.0118059 0.0125301 0.019 -0.0117085 0.0126388 0.0180625 -0.011666 0.0126982 0.019 -0.011666 0.0126982 0.019 -0.011628 0.0127606 0.019 -0.0115946 0.0128255 0.019 -0.0115 0.01325 0.0180625 -0.0115107 0.0133956 0.019 -0.0115027 0.013323 0.019 -0.0115239 0.0134674 0.0180625 -0.0115424 0.0135381 0.0180625 -0.0115946 0.0136745 0.019 -0.011566 0.0136072 0.0180625 -0.011628 0.0137394 0.019 -0.0117085 0.0138612 0.0180625 -0.0122471 0.0142175 0.019 -0.0125365 0.0142493 0.0180625 -0.0126816 0.0142334 0.019 -0.0127529 0.0142175 0.019 -0.0128911 0.0141703 0.019 -0.0129572 0.0141393 0.019 -0.0130819 0.0140633 0.0180625 -0.0131397 0.0140186 0.019 -0.0131397 0.0140186 0.0180625 -0.0132448 0.0139173 0.0180625 -0.013372 0.0137394 0.0180625 -0.0134054 0.0136745 0.019 -0.0134054 0.0136745 0.0180625 -0.0134576 0.0135381 0.0180625 -0.013434 0.0136072 0.017125 -0.013372 0.0137394 0.0180625 -0.013334 0.0138018 0.0180625 -0.0132915 0.0138612 0.019 -0.0132448 0.0139173 0.017125 -0.0132915 0.0138612 0.0180625 -0.0131941 0.0139699 0.017125 -0.0131397 0.0140186 0.0180625 -0.0130819 0.0140633 0.0180625 -0.0130209 0.0141036 0.0180625 -0.0128911 0.0141703 0.0180625 -0.0129572 0.0141393 0.017125 -0.0128911 0.0141703 0.019 -0.0128229 0.0141964 0.0180625 -0.0128229 0.0141964 0.0180625 -0.0127529 0.0142175 0.017125 -0.0128229 0.0141964 0.017125 -0.0127529 0.0142175 0.017125 -0.0126816 0.0142334 0.0180625 -0.0126094 0.014244 0.017125 -0.0125365 0.0142493 0.0180625 -0.0125365 0.0142493 0.0180625 -0.0124635 0.0142493 0.019 -0.0123906 0.014244 0.017125 -0.0123906 0.014244 0.0180625 -0.0123906 0.014244 0.0180625 -0.0123184 0.0142334 0.017125 -0.0123184 0.0142334 0.017125 -0.0122471 0.0142175 0.0180625 -0.0121771 0.0141964 0.019 -0.0121089 0.0141703 0.0180625 -0.0121089 0.0141703 0.0180625 -0.0120428 0.0141393 0.017125 -0.0121089 0.0141703 0.019 -0.0119791 0.0141036 0.0180625 -0.0119791 0.0141036 0.0180625 -0.0119181 0.0140633 0.0180625 -0.0118059 0.0139699 0.0180625 -0.0118603 0.0140186 0.017125 -0.0117552 0.0139173 0.0180625 -0.0117552 0.0139173 0.0180625 -0.0117085 0.0138612 0.017125 -0.0117085 0.0138612 0.0180625 -0.011666 0.0138018 0.017125 -0.011666 0.0138018 0.017125 -0.011628 0.0137394 0.017125 -0.0115946 0.0136745 0.0180625 -0.011566 0.0136072 0.017125 -0.011566 0.0136072 0.017125 -0.0115239 0.0134674 0.0180625 -0.0115239 0.0134674 0.0180625 -0.0115027 0.013323 0.017125 -0.0115027 0.013323 0.017125 -0.0115 0.01325 0.0180625 -0.0115 0.01325 0.019 -0.0115027 0.013177 0.0180625 -0.0115027 0.013177 0.017125 -0.0115027 0.013177 0.019 -0.0115107 0.0131044 0.0180625 -0.0115107 0.0131044 0.0180625 -0.0115239 0.0130326 0.019 -0.0115239 0.0130326 0.017125 -0.0115107 0.0131044 0.017125 -0.0115239 0.0130326 0.0180625 -0.0115424 0.0129619 0.017125 -0.0115424 0.0129619 0.0180625 -0.011566 0.0128928 0.0180625 -0.0115946 0.0128255 0.0180625 -0.011628 0.0127606 0.017125 -0.011666 0.0126982 0.019 -0.0117552 0.0125827 0.0180625 -0.0117085 0.0126388 0.0180625 -0.0117552 0.0125827 0.017125 -0.0117552 0.0125827 0.0180625 -0.0118059 0.0125301 0.017125 -0.0118059 0.0125301 0.019 -0.0118603 0.0124814 0.0180625 -0.0118603 0.0124814 0.017125 -0.0118603 0.0124814 0.0180625 -0.0119181 0.0124367 0.0180625 -0.0119791 0.0123964 0.017125 -0.0119791 0.0123964 0.0180625 -0.0121089 0.0123297 0.0180625 -0.0121771 0.0123036 0.017125 -0.0121771 0.0123036 0.0180625 -0.0122471 0.0122825 0.017125 -0.0123184 0.0122666 0.0180625 -0.0123184 0.0122666 0.0180625 -0.0123906 0.012256 0.017125 -0.0123906 0.012256 0.017125 -0.0124635 0.0122507 0.0180625 -0.0125365 0.0122507 0.0180625 -0.0124635 0.0122507 0.019 -0.0126094 0.012256 0.017125 -0.0126094 0.012256 0.0180625 -0.0126094 0.012256 0.019 -0.0126816 0.0122666 0.0180625 -0.0126816 0.0122666 0.017125 -0.0126816 0.0122666 0.0180625 -0.0127529 0.0122825 0.0180625 -0.0128229 0.0123036 0.0180625 -0.0128911 0.0123297 0.0180625 -0.0129572 0.0123607 0.0180625 -0.0130209 0.0123964 0.017125 -0.0130209 0.0123964 0.0180625 -0.0131397 0.0124814 0.0180625 -0.0130819 0.0124367 0.017125 -0.0131397 0.0124814 0.019 -0.0131941 0.0125301 0.019 -0.0132448 0.0125827 0.017125 -0.0131941 0.0125301 0.0180625 -0.0132448 0.0125827 0.017125 -0.0132448 0.0125827 0.0180625 -0.0132915 0.0126388 0.0180625 -0.013334 0.0126982 0.017125 -0.013334 0.0126982 0.0180625 -0.013372 0.0127606 0.017125 -0.013372 0.0127606 0.0180625 -0.0134054 0.0128255 0.0180625 -0.013434 0.0128928 0.019 -0.0134576 0.0129619 0.0180625 -0.0134576 0.0129619 0.017125 -0.013434 0.0128928 0.0180625 -0.0134893 0.0131044 0.017125 -0.0134893 0.0131044 0.0180625 -0.0134973 0.013177 0.0180625 -0.0135 0.01325 0.016 -0.0134973 -0.011677 0.016 -0.0134893 -0.0116044 0.016 -0.0134576 -0.0114619 0.016 -0.0134054 -0.0113255 0.016 -0.0132915 -0.0111388 0.016 -0.0131941 -0.0110301 0.016 -0.0131397 -0.0109814 0.016 -0.0127529 -0.0107825 0.016 -0.0123906 -0.010756 0.016 -0.0122471 -0.0107825 0.016 -0.0121771 -0.0108036 0.016 -0.0119181 -0.0109367 0.016 -0.0118603 -0.0109814 0.016 -0.0117085 -0.0111388 0.016 -0.0116279 -0.0112606 0.016 -0.0115424 -0.0114619 0.016 -0.0115946 -0.0113255 0.016 -0.0065 -0.01625 0.016 -0.0118059 -0.0124699 0.016 -0.0135 -0.01625 0.016 -0.0119181 -0.0125633 0.016 -0.0122471 -0.0127175 0.016 -0.0124635 -0.0127493 0.016 -0.0126094 -0.012744 0.016 -0.0128229 -0.0126964 0.016 -0.0130819 -0.0125633 0.016 -0.0131397 -0.0125186 0.016 -0.0132448 -0.0124173 0.016 -0.013334 -0.0123018 0.016 -0.0132915 -0.0123612 0.016 -0.013372 -0.0122394 0.016 -0.013434 -0.0121072 0.016 -0.0134054 -0.0121745 0.016 -0.0134973 -0.011823 0.016 -0.011666 -0.0123018 0.016 -0.011566 -0.0121072 0.016 -0.0115424 -0.0120381 0.016 -0.0115107 -0.0118956 0.016 -0.0115 -0.01175 0.016 -0.0115027 -0.011677 0.016 -0.0115107 -0.0116044 0.016 -0.0115239 -0.0115326 0.016 0.0135 -0.00225 0.016 0.0134973 -0.011823 0.016 0.0135 -0.01175 0.016 0.0134761 -0.0115326 0.016 0.0134576 -0.0114619 0.016 0.0155 -0.00225 0.016 0.013372 -0.0112606 0.016 0.013334 -0.0111982 0.016 0.0132448 -0.0110827 0.016 0.0131397 -0.0109814 0.016 0.0130209 -0.0108964 0.016 0.0129572 -0.0108607 0.016 0.0128229 -0.0108036 0.016 0.0128911 -0.0108297 0.016 0.0127529 -0.0107825 0.016 0.0126816 -0.0107666 0.016 0.0124635 -0.0107507 0.016 0.0123184 -0.0107666 0.016 0.0121771 -0.0108036 0.016 0.0122471 -0.0107825 0.016 0.0118603 -0.0109814 0.016 0.0118059 -0.0110301 0.016 0.0117552 -0.0110827 0.016 0.011666 -0.0111982 0.016 0.0135 -0.00425 0.016 0.0117085 -0.0111388 0.016 0.0115946 -0.0113255 0.016 0.011566 -0.0113928 0.016 0.0115424 -0.0114619 0.016 0.0115239 -0.0115326 0.016 0.0115027 -0.011677 0.016 0.0115 -0.01175 0.016 0.0115239 -0.0119674 0.016 0.0115946 -0.0121745 0.016 0.0117085 -0.0123612 0.016 0.0118059 -0.0124699 0.016 0.0118603 -0.0125186 0.016 0.0119181 -0.0125633 0.016 0.0120428 -0.0126393 0.016 0.0121089 -0.0126703 0.016 0.0065 -0.01625 0.016 0.0155 -0.01625 0.016 0.0123184 -0.0127334 0.016 0.0126094 -0.012744 0.016 0.0125365 -0.0127493 0.016 0.0128229 -0.0126964 0.016 0.0130209 -0.0126036 0.016 0.0130819 -0.0125633 0.016 0.0131397 -0.0125186 0.016 0.013334 -0.0123018 0.016 0.013372 -0.0122394 0.016 0.0134054 -0.0121745 -0.017125 -0.0122471 0.0142175 -0.017125 -0.0135 0.01775 -0.017125 -0.0124635 0.0142493 -0.017125 -0.0129572 0.0141393 -0.017125 -0.0128911 0.0141703 -0.017125 -0.0130209 0.0141036 -0.017125 -0.0121089 0.0141703 -0.017125 0.0120428 0.0141393 -0.017125 -0.0120428 0.0141393 -0.017125 0.0119791 0.0141036 -0.017125 0.0117552 0.0139173 -0.017125 -0.0118059 0.0139699 -0.017125 -0.0117085 0.0138612 -0.017125 -0.011628 0.0137394 -0.017125 0.0115946 0.0136745 -0.017125 -0.011566 0.0136072 -0.017125 0.0115239 0.0134674 -0.017125 0.0115107 0.0133956 -0.017125 0.0115027 0.013323 -0.017125 -0.0115946 0.0128255 -0.017125 0.011666 0.0126982 -0.017125 -0.011628 0.0127606 -0.017125 0.0117552 0.0125827 -0.017125 -0.0117552 0.0125827 -0.017125 -0.0118059 0.0125301 -0.017125 -0.0119181 0.0124367 -0.017125 -0.0121771 0.0123036 -0.017125 -0.0128229 0.0123036 -0.017125 -0.0128911 0.0123297 -0.017125 -0.0129572 0.0123607 -0.017125 -0.0130209 0.0123964 -0.017125 -0.0131397 0.0124814 -0.017125 -0.0135 0.01075 -0.017125 -0.0132915 0.0126388 -0.017125 -0.013434 0.0128928 -0.017125 -0.0134576 0.0129619 -0.017125 -0.0134893 0.0131044 -0.017125 -0.0119181 0.0140633 -0.017125 0.0115424 0.0135381 -0.017125 0.0115 0.01325 -0.017125 -0.0115 0.01325 -0.017125 0.0115027 0.013177 -0.017125 -0.0115027 0.013177 -0.017125 -0.0115107 0.0131044 -0.017125 0.0115424 0.0129619 -0.017125 -0.011566 0.0128928 -0.017125 0.0116279 0.0127606 -0.017125 0.0119791 0.0123964 -0.017125 0.0120428 0.0123607 -0.017125 0.0121089 0.0123297 -0.017125 0.0123184 0.0122666 -0.017125 0.0135 0.01075 -0.017125 0.0125365 0.0122507 -0.017125 0.0124635 0.0122507 -0.017125 0.0126816 0.0122666 -0.017125 0.0126094 0.012256 -0.017125 0.0128911 0.0123297 -0.017125 0.0128229 0.0123036 -0.017125 0.0130209 0.0123964 -0.017125 0.0130819 0.0124367 -0.017125 0.0131941 0.0125301 -0.017125 0.0134054 0.0128255 -0.017125 0.013372 0.0127606 -0.017125 0.013434 0.0128928 -0.017125 0.0134973 0.013177 -0.017125 0.0123906 0.014244 -0.017125 0.0124635 0.0142493 -0.017125 0.0126094 0.014244 -0.017125 0.0126816 0.0142334 -0.017125 0.0128911 0.0141703 -0.017125 0.0132448 0.0139173 -0.017125 0.0134054 0.0136745 -0.017125 0.013434 0.0136072 -0.017125 0.0134576 0.0135381 0.01525 -0.0135 0.01775 0.016 -0.0135 -0.00425 0.0135 -0.0135 -0.00325 0.0134595 -0.0135 -0.00296827 0.0134096 -0.0135 -0.00283459 0.0133413 -0.0135 -0.00270936 0.0133005 -0.0135 -0.00265072 0.01525 -0.0135 -0.00225 0.0131549 -0.0135 -0.00249425 0.0130993 -0.0135 -0.00244946 0.0130406 -0.0135 -0.00240875 0.0129792 -0.0135 -0.00237232 0.0129154 -0.0135 -0.00234037 0.0127817 -0.0135 -0.00229051 0.017125 0.0124635 0.0142493 0.017125 0.0128229 0.0141964 0.017125 0.0132448 0.0139173 0.017125 0.0135 0.01325 0.017125 -0.0120428 0.0141393 0.017125 -0.0119791 0.0141036 0.017125 -0.0119181 0.0140633 0.017125 0.0119791 0.0141036 0.017125 -0.0118603 0.0140186 0.017125 0.0118603 0.0140186 0.017125 0.0117085 0.0138612 0.017125 -0.0115424 0.0135381 0.017125 0.0115424 0.0135381 0.017125 0.0115107 0.0133956 0.017125 0.0115027 0.013323 0.017125 0.0115424 0.0129619 0.017125 -0.011566 0.0128928 0.017125 0.0115946 0.0128255 0.017125 -0.0117085 0.0126388 0.017125 0.0118059 0.0125301 0.017125 0.0119181 0.0124367 0.017125 0.0119791 0.0123964 0.017125 -0.0120428 0.0123607 0.017125 0.0120428 0.0123607 0.017125 -0.0121089 0.0123297 0.017125 -0.0122471 0.0122825 0.017125 -0.0125365 0.0122507 0.017125 -0.0127529 0.0122825 0.017125 -0.0128911 0.0123297 0.017125 -0.0128229 0.0123036 0.017125 -0.0129572 0.0123607 0.017125 -0.0130819 0.0124367 0.017125 -0.0132915 0.0126388 0.017125 -0.0134054 0.0128255 0.017125 -0.0134576 0.0129619 0.017125 -0.0135 0.01075 0.017125 -0.0134761 0.0130326 0.017125 -0.0135 0.01325 0.017125 -0.0134973 0.013177 0.017125 -0.0118059 0.0139699 0.017125 0.0118059 0.0139699 0.017125 0.011666 0.0138018 0.017125 0.0115946 0.0136745 0.017125 0.011566 0.0136072 0.017125 0.0115239 0.0134674 0.017125 -0.0115107 0.0133956 0.017125 0.011566 0.0128928 0.017125 -0.0115946 0.0128255 0.017125 -0.011628 0.0127606 0.017125 0.0117085 0.0126388 0.017125 0.0117552 0.0125827 0.017125 0.0118603 0.0124814 0.017125 -0.0119181 0.0124367 0.017125 0.0121771 0.0123036 0.017125 0.0123184 0.0122666 0.017125 0.0124635 0.0122507 0.017125 0.0123906 0.012256 0.017125 0.0126816 0.0122666 0.017125 0.0127529 0.0122825 0.017125 0.0128229 0.0123036 0.017125 0.0128911 0.0123297 0.017125 0.0130209 0.0123964 0.017125 0.0130819 0.0124367 0.017125 0.0131397 0.0124814 0.017125 0.0132448 0.0125827 0.017125 0.0135 0.01075 0.017125 0.013434 0.0128928 0.017125 0.0134761 0.0130326 0.017125 0.0134893 0.0131044 0.017125 0.0134973 0.013177 0.017125 -0.0135 0.01775 0.017125 -0.0121771 0.0141964 0.017125 -0.0124635 0.0142493 0.017125 -0.0126094 0.014244 0.017125 -0.0129572 0.0141393 0.017125 -0.0130209 0.0141036 0.017125 -0.0130819 0.0140633 0.017125 -0.0131941 0.0139699 0.017125 -0.0132448 0.0139173 0.017125 -0.013334 0.0138018 0.017125 -0.0134054 0.0136745 0.017125 -0.013434 0.0136072 0.017125 -0.0134761 0.0134674 0.017125 -0.0134576 0.0135381 0.017125 -0.0134973 0.013323 0.019 -0.0065 -0.01625 -0.0134761 0.019 -0.0119674 -0.0134761 0.018 -0.0119674 -0.0134893 0.018 -0.0118956 -0.0134893 0.019 -0.0118956 -0.0134973 0.018 -0.011677 -0.0134893 0.019 -0.0116044 -0.0134761 0.019 -0.0115326 -0.013434 0.019 -0.0113928 -0.013372 0.018 -0.0112606 -0.0132915 0.018 -0.0111388 -0.0132915 0.019 -0.0111388 -0.0131941 0.018 -0.0110301 -0.0131941 0.019 -0.0110301 -0.0130819 0.019 -0.0109367 -0.0128911 0.019 -0.0108297 -0.0127529 0.018 -0.0107825 -0.0128229 0.019 -0.0108036 -0.0127529 0.019 -0.0107825 -0.0123184 0.018 -0.0107666 -0.0122471 0.018 -0.0107825 -0.0118603 0.018 -0.0109814 -0.0118603 0.019 -0.0109814 -0.0117085 0.018 -0.0111388 -0.0117552 0.019 -0.0110827 -0.0117085 0.019 -0.0111388 -0.011566 0.018 -0.0113928 -0.0115424 0.019 -0.0114619 -0.0115239 0.019 -0.0115326 -0.0115 0.019 -0.01175 -0.0115107 0.019 -0.0118956 -0.0115239 0.019 -0.0119674 -0.011566 0.019 -0.0121072 -0.0115946 0.018 -0.0121745 -0.0115946 0.019 -0.0121745 -0.011628 0.018 -0.0122394 -0.011628 0.019 -0.0122394 -0.0117085 0.019 -0.0123612 -0.0118603 0.018 -0.0125186 -0.0120428 0.019 -0.0126393 -0.0121771 0.019 -0.0126964 -0.0122471 0.018 -0.0127175 -0.0123184 0.019 -0.0127334 -0.0126094 0.019 -0.012744 -0.0128229 0.019 -0.0126964 -0.0128911 0.019 -0.0126703 -0.0130819 0.019 -0.0125633 -0.0131941 0.018 -0.0124699 -0.0131941 0.019 -0.0124699 -0.0132448 0.019 -0.0124173 -0.013372 0.018 -0.0122394 -0.013372 0.019 -0.0122394 -0.013434 0.018 -0.0121072 -0.0134893 0.017 -0.0118956 -0.0134761 0.017 -0.0119674 -0.0134576 0.018 -0.0120381 -0.013434 0.017 -0.0121072 -0.0134054 0.018 -0.0121745 -0.013334 0.017 -0.0123018 -0.013334 0.018 -0.0123018 -0.0132915 0.019 -0.0123612 -0.0132915 0.018 -0.0123612 -0.0132448 0.018 -0.0124173 -0.0132448 0.017 -0.0124173 -0.0131397 0.018 -0.0125186 -0.0131397 0.017 -0.0125186 -0.0130819 0.018 -0.0125633 -0.0130819 0.017 -0.0125633 -0.0130209 0.018 -0.0126036 -0.0129572 0.018 -0.0126393 -0.0128911 0.017 -0.0126703 -0.0128911 0.018 -0.0126703 -0.0128229 0.018 -0.0126964 -0.0127529 0.019 -0.0127175 -0.0127529 0.018 -0.0127175 -0.0126816 0.019 -0.0127334 -0.0127529 0.017 -0.0127175 -0.0126816 0.018 -0.0127334 -0.0126816 0.017 -0.0127334 -0.0126094 0.018 -0.012744 -0.0125365 0.018 -0.0127493 -0.0126094 0.017 -0.012744 -0.0124635 0.019 -0.0127493 -0.0125365 0.017 -0.0127493 -0.0124635 0.017 -0.0127493 -0.0123906 0.018 -0.012744 -0.0124635 0.018 -0.0127493 -0.0123906 0.017 -0.012744 -0.0123184 0.018 -0.0127334 -0.0123184 0.017 -0.0127334 -0.0122471 0.017 -0.0127175 -0.0121771 0.018 -0.0126964 -0.0121089 0.019 -0.0126703 -0.0121771 0.017 -0.0126964 -0.0121089 0.018 -0.0126703 -0.0121089 0.017 -0.0126703 -0.0120428 0.018 -0.0126393 -0.0119791 0.018 -0.0126036 -0.0119181 0.017 -0.0125633 -0.0119181 0.018 -0.0125633 -0.0118059 0.019 -0.0124699 -0.0118059 0.018 -0.0124699 -0.0117552 0.018 -0.0124173 -0.0117552 0.017 -0.0124173 -0.0117085 0.018 -0.0123612 -0.0117085 0.017 -0.0123612 -0.011666 0.019 -0.0123018 -0.011666 0.018 -0.0123018 -0.011666 0.017 -0.0123018 -0.011628 0.017 -0.0122394 -0.011566 0.018 -0.0121072 -0.0115946 0.017 -0.0121745 -0.0115424 0.018 -0.0120381 -0.0115239 0.018 -0.0119674 -0.0115424 0.017 -0.0120381 -0.0115107 0.018 -0.0118956 -0.0115107 0.017 -0.0118956 -0.0115027 0.018 -0.011823 -0.0115 0.017 -0.01175 -0.0115 0.018 -0.01175 -0.0115027 0.018 -0.011677 -0.0115107 0.019 -0.0116044 -0.0115107 0.018 -0.0116044 -0.0115107 0.017 -0.0116044 -0.0115239 0.018 -0.0115326 -0.0115424 0.018 -0.0114619 -0.011566 0.017 -0.0113928 -0.0115946 0.017 -0.0113255 -0.011628 0.019 -0.0112606 -0.0115946 0.018 -0.0113255 -0.011628 0.018 -0.0112606 -0.011666 0.019 -0.0111982 -0.011666 0.017 -0.0111982 -0.011666 0.018 -0.0111982 -0.0117552 0.018 -0.0110827 -0.0118059 0.017 -0.0110301 -0.0118059 0.018 -0.0110301 -0.0119181 0.019 -0.0109367 -0.0118603 0.017 -0.0109814 -0.0119181 0.018 -0.0109367 -0.0119791 0.017 -0.0108964 -0.0119791 0.018 -0.0108964 -0.0120428 0.018 -0.0108607 -0.0120428 0.017 -0.0108607 -0.0121089 0.019 -0.0108297 -0.0121089 0.018 -0.0108297 -0.0121089 0.017 -0.0108297 -0.0121771 0.018 -0.0108036 -0.0122471 0.017 -0.0107825 -0.0123184 0.017 -0.0107666 -0.0123906 0.018 -0.010756 -0.0123906 0.017 -0.010756 -0.0124635 0.018 -0.0107507 -0.0125365 0.018 -0.0107507 -0.0125365 0.017 -0.0107507 -0.0126094 0.018 -0.010756 -0.0126094 0.019 -0.010756 -0.0126816 0.018 -0.0107666 -0.0128229 0.017 -0.0108036 -0.0128229 0.018 -0.0108036 -0.0128911 0.018 -0.0108297 -0.0128911 0.017 -0.0108297 -0.0129572 0.017 -0.0108607 -0.0129572 0.018 -0.0108607 -0.0130209 0.017 -0.0108964 -0.0130209 0.018 -0.0108964 -0.0130819 0.018 -0.0109367 -0.0130819 0.017 -0.0109367 -0.0131397 0.018 -0.0109814 -0.0131397 0.017 -0.0109814 -0.0131941 0.017 -0.0110301 -0.0132448 0.018 -0.0110827 -0.0132448 0.019 -0.0110827 -0.0132448 0.017 -0.0110827 -0.0132915 0.017 -0.0111388 -0.013334 0.017 -0.0111982 -0.013334 0.018 -0.0111982 -0.013372 0.017 -0.0112606 -0.0134054 0.017 -0.0113255 -0.0134054 0.018 -0.0113255 -0.013434 0.017 -0.0113928 -0.013434 0.018 -0.0113928 -0.0134576 0.018 -0.0114619 -0.0134576 0.017 -0.0114619 -0.0134761 0.018 -0.0115326 -0.0134893 0.018 -0.0116044 -0.0134893 0.017 -0.0116044 -0.0134973 0.019 -0.011677 -0.0134973 0.017 -0.011677 -0.0135 0.019 -0.01175 -0.0135 0.018 -0.01175 -0.0134973 0.018 -0.011823 -0.011628 0.017 -0.0112606 -0.0117085 0.017 -0.0111388 -0.0117552 0.017 -0.0110827 -0.0119181 0.017 -0.0109367 -0.0121771 0.017 -0.0108036 -0.0124635 0.017 -0.0107507 -0.0126094 0.017 -0.010756 -0.0126816 0.017 -0.0107666 -0.0127529 0.017 -0.0107825 -0.0134761 0.017 -0.0115326 -0.0115239 0.017 -0.0115326 -0.0115424 0.017 -0.0114619 -0.0115027 0.017 -0.011677 -0.0115027 0.017 -0.011823 -0.0065 0.017 -0.01625 -0.0115239 0.017 -0.0119674 -0.011566 0.017 -0.0121072 -0.0118059 0.017 -0.0124699 -0.0118603 0.017 -0.0125186 -0.0119791 0.017 -0.0126036 -0.0120428 0.017 -0.0126393 -0.0128229 0.017 -0.0126964 -0.0129572 0.017 -0.0126393 -0.0130209 0.017 -0.0126036 -0.0135 0.017 -0.01625 -0.0131941 0.017 -0.0124699 -0.0132915 0.017 -0.0123612 -0.013372 0.017 -0.0122394 -0.0134054 0.017 -0.0121745 -0.0134576 0.017 -0.0120381 -0.0135 0.017 -0.01175 -0.0134973 0.017 -0.011823 0.0134973 0.017 -0.011677 0.0134893 0.018 -0.0116044 0.0134761 0.018 -0.0115326 0.0134973 0.018 -0.011677 0.0135 0.018 -0.01175 0.0134973 0.019 -0.011677 0.0135 0.019 -0.01175 0.0134576 0.019 -0.0120381 0.013434 0.019 -0.0121072 0.013372 0.018 -0.0122394 0.0134054 0.019 -0.0121745 0.0131397 0.019 -0.0125186 0.0130209 0.018 -0.0126036 0.0130819 0.019 -0.0125633 0.0130209 0.019 -0.0126036 0.0129572 0.019 -0.0126393 0.0125365 0.019 -0.0127493 0.0120428 0.018 -0.0126393 0.0121089 0.019 -0.0126703 0.0119181 0.018 -0.0125633 0.0119181 0.019 -0.0125633 0.0118059 0.019 -0.0124699 0.0117552 0.019 -0.0124173 0.011666 0.018 -0.0123018 0.011666 0.019 -0.0123018 0.0116279 0.019 -0.0122394 0.011566 0.018 -0.0121072 0.011566 0.019 -0.0121072 0.0115239 0.019 -0.0119674 0.0115027 0.019 -0.011823 0.0115 0.019 -0.01175 0.0115946 0.018 -0.0113255 0.011566 0.019 -0.0113928 0.011666 0.018 -0.0111982 0.0118059 0.019 -0.0110301 0.0118603 0.019 -0.0109814 0.0119181 0.018 -0.0109367 0.0119181 0.019 -0.0109367 0.0119791 0.019 -0.0108964 0.0121089 0.019 -0.0108297 0.0121771 0.019 -0.0108036 0.0123184 0.019 -0.0107666 0.0123906 0.019 -0.010756 0.0128229 0.018 -0.0108036 0.0128229 0.019 -0.0108036 0.0128911 0.019 -0.0108297 0.0130209 0.018 -0.0108964 0.0130209 0.019 -0.0108964 0.0131941 0.019 -0.0110301 0.013434 0.018 -0.0113928 0.0134054 0.019 -0.0113255 0.0134576 0.018 -0.0114619 0.0134576 0.019 -0.0114619 0.0134761 0.017 -0.0115326 0.0134576 0.017 -0.0114619 0.013434 0.017 -0.0113928 0.0134054 0.018 -0.0113255 0.013372 0.019 -0.0112606 0.013372 0.018 -0.0112606 0.013372 0.017 -0.0112606 0.013334 0.017 -0.0111982 0.013334 0.018 -0.0111982 0.0132915 0.018 -0.0111388 0.0132448 0.018 -0.0110827 0.0132448 0.017 -0.0110827 0.0131941 0.018 -0.0110301 0.0131941 0.017 -0.0110301 0.0131397 0.018 -0.0109814 0.0131397 0.019 -0.0109814 0.0130819 0.017 -0.0109367 0.0130819 0.018 -0.0109367 0.0129572 0.018 -0.0108607 0.0129572 0.019 -0.0108607 0.0128911 0.018 -0.0108297 0.0128911 0.017 -0.0108297 0.0128229 0.017 -0.0108036 0.0127529 0.019 -0.0107825 0.0127529 0.018 -0.0107825 0.0127529 0.017 -0.0107825 0.0126816 0.019 -0.0107666 0.0126816 0.018 -0.0107666 0.0126094 0.018 -0.010756 0.0126094 0.017 -0.010756 0.0125365 0.018 -0.0107507 0.0124635 0.017 -0.0107507 0.0124635 0.018 -0.0107507 0.0123906 0.018 -0.010756 0.0123184 0.018 -0.0107666 0.0123184 0.017 -0.0107666 0.0122471 0.018 -0.0107825 0.0122471 0.019 -0.0107825 0.0122471 0.017 -0.0107825 0.0121089 0.018 -0.0108297 0.0121771 0.018 -0.0108036 0.0120428 0.018 -0.0108607 0.0120428 0.017 -0.0108607 0.0119791 0.018 -0.0108964 0.0119181 0.017 -0.0109367 0.0118603 0.018 -0.0109814 0.0118059 0.018 -0.0110301 0.0118059 0.017 -0.0110301 0.0117552 0.018 -0.0110827 0.0117552 0.017 -0.0110827 0.011666 0.019 -0.0111982 0.0117085 0.018 -0.0111388 0.0117085 0.017 -0.0111388 0.0116279 0.019 -0.0112606 0.0116279 0.018 -0.0112606 0.011666 0.017 -0.0111982 0.0116279 0.017 -0.0112606 0.011566 0.017 -0.0113928 0.011566 0.018 -0.0113928 0.0115424 0.019 -0.0114619 0.0115424 0.018 -0.0114619 0.0115239 0.018 -0.0115326 0.0115107 0.018 -0.0116044 0.0115027 0.019 -0.011677 0.0115027 0.018 -0.011677 0.0115 0.018 -0.01175 0.0115 0.017 -0.01175 0.0115107 0.018 -0.0118956 0.0115027 0.018 -0.011823 0.0115239 0.018 -0.0119674 0.0115424 0.018 -0.0120381 0.0115424 0.019 -0.0120381 0.0115946 0.018 -0.0121745 0.0116279 0.018 -0.0122394 0.0116279 0.017 -0.0122394 0.0117085 0.019 -0.0123612 0.0117085 0.018 -0.0123612 0.0117085 0.017 -0.0123612 0.0117552 0.018 -0.0124173 0.0117552 0.017 -0.0124173 0.0118059 0.018 -0.0124699 0.0118059 0.017 -0.0124699 0.0118603 0.018 -0.0125186 0.0119791 0.018 -0.0126036 0.0119181 0.017 -0.0125633 0.0119791 0.017 -0.0126036 0.0120428 0.019 -0.0126393 0.0121089 0.018 -0.0126703 0.0121089 0.017 -0.0126703 0.0121771 0.018 -0.0126964 0.0121771 0.019 -0.0126964 0.0122471 0.019 -0.0127175 0.0121771 0.017 -0.0126964 0.0122471 0.018 -0.0127175 0.0123184 0.019 -0.0127334 0.0123906 0.018 -0.012744 0.0123184 0.018 -0.0127334 0.0124635 0.018 -0.0127493 0.0123906 0.017 -0.012744 0.0125365 0.018 -0.0127493 0.0126094 0.018 -0.012744 0.0126816 0.018 -0.0127334 0.0126816 0.017 -0.0127334 0.0127529 0.019 -0.0127175 0.0127529 0.018 -0.0127175 0.0128229 0.017 -0.0126964 0.0128229 0.018 -0.0126964 0.0128911 0.018 -0.0126703 0.0129572 0.018 -0.0126393 0.0129572 0.017 -0.0126393 0.0130209 0.017 -0.0126036 0.0130819 0.018 -0.0125633 0.0131397 0.017 -0.0125186 0.0131941 0.018 -0.0124699 0.0131941 0.019 -0.0124699 0.0131397 0.018 -0.0125186 0.0132448 0.018 -0.0124173 0.0132915 0.018 -0.0123612 0.0132915 0.017 -0.0123612 0.013372 0.019 -0.0122394 0.013334 0.018 -0.0123018 0.0134054 0.017 -0.0121745 0.0134054 0.018 -0.0121745 0.013434 0.018 -0.0121072 0.013434 0.017 -0.0121072 0.0134576 0.018 -0.0120381 0.0134761 0.019 -0.0119674 0.0134761 0.018 -0.0119674 0.0134893 0.018 -0.0118956 0.0134973 0.018 -0.011823 0.0134973 0.017 -0.011823 0.0135 0.017 -0.01175 0.0118603 0.017 -0.0125186 0.0120428 0.017 -0.0126393 0.0123184 0.017 -0.0127334 0.0122471 0.017 -0.0127175 0.0135 0.017 -0.01625 0.0124635 0.017 -0.0127493 0.0126094 0.017 -0.012744 0.0125365 0.017 -0.0127493 0.0127529 0.017 -0.0127175 0.0128911 0.017 -0.0126703 0.0130819 0.017 -0.0125633 0.0131941 0.017 -0.0124699 0.0132448 0.017 -0.0124173 0.013372 0.017 -0.0122394 0.013334 0.017 -0.0123018 0.0134576 0.017 -0.0120381 0.0134761 0.017 -0.0119674 0.0134893 0.017 -0.0118956 0.011666 0.017 -0.0123018 0.0115946 0.017 -0.0121745 0.011566 0.017 -0.0121072 0.0115424 0.017 -0.0120381 0.0115107 0.017 -0.0118956 0.0115239 0.017 -0.0119674 0.0115027 0.017 -0.011823 0.0115027 0.017 -0.011677 0.0115107 0.017 -0.0116044 0.0115239 0.017 -0.0115326 0.0115424 0.017 -0.0114619 0.0115946 0.017 -0.0113255 0.0118603 0.017 -0.0109814 0.0119791 0.017 -0.0108964 0.0121089 0.017 -0.0108297 0.0121771 0.017 -0.0108036 0.0123906 0.017 -0.010756 0.0125365 0.017 -0.0107507 0.0126816 0.017 -0.0107666 0.0129572 0.017 -0.0108607 0.0130209 0.017 -0.0108964 0.0131397 0.017 -0.0109814 0.0135 0.017 -0.00425 0.0132915 0.017 -0.0111388 0.0134054 0.017 -0.0113255 0.0134893 0.017 -0.0116044 0.0135 0.01525 -0.001 0.0135 0.017125 0.01325 -0.0115107 0.01748 0.0131044 -0.0115239 0.01748 0.0130326 -0.0115 0.01748 0.01325 -0.0115107 0.01748 0.0133956 -0.0115107 0.017125 0.0133956 -0.0115424 0.01748 0.0135381 -0.0115239 0.017125 0.0134674 -0.011566 0.01748 0.0136072 -0.0115424 0.017125 0.0135381 -0.0115946 0.01748 0.0136745 -0.0117552 0.01748 0.0139173 -0.0119791 0.01748 0.0141036 -0.0119181 0.017125 0.0140633 -0.0119791 0.017125 0.0141036 -0.0121089 0.01748 0.0141703 -0.0122471 0.017125 0.0142175 -0.0126816 0.01748 0.0142334 -0.0127529 0.01748 0.0142175 -0.0126816 0.017125 0.0142334 -0.0127529 0.017125 0.0142175 -0.0128911 0.017125 0.0141703 -0.0129572 0.017125 0.0141393 -0.0130209 0.01748 0.0141036 -0.0130819 0.017125 0.0140633 -0.0131941 0.01748 0.0139699 -0.0131397 0.017125 0.0140186 -0.0131941 0.017125 0.0139699 -0.0132448 0.017125 0.0139173 -0.013434 0.01748 0.0136072 -0.0134761 0.017125 0.0134674 -0.0134893 0.017125 0.0133956 -0.0134893 0.01748 0.0131044 -0.0134893 0.017125 0.0131044 -0.0134761 0.017125 0.0130326 -0.0134576 0.01748 0.0129619 -0.0134576 0.017125 0.0129619 -0.0131941 0.01748 0.0125301 -0.0131397 0.01748 0.0124814 -0.0130819 0.017125 0.0124367 -0.0129572 0.017125 0.0123607 -0.0128911 0.017125 0.0123297 -0.0123906 0.01748 0.012256 -0.0121771 0.017125 0.0123036 -0.0121089 0.017125 0.0123297 -0.0119791 0.01748 0.0123964 -0.0119181 0.01748 0.0124367 -0.0117085 0.01748 0.0126388 -0.0117085 0.017125 0.0126388 -0.0115424 0.01748 0.0129619 -0.011566 0.01748 0.0128928 -0.0115946 0.01748 0.0128255 -0.011566 0.019 0.0128928 -0.0116279 0.01748 0.0127606 -0.0116279 0.019 0.0127606 -0.011666 0.01748 0.0126982 -0.0117085 0.019 0.0126388 -0.0117552 0.017125 0.0125827 -0.0118059 0.017125 0.0125301 -0.0117552 0.01748 0.0125827 -0.0118059 0.01748 0.0125301 -0.0118603 0.01748 0.0124814 -0.0118603 0.019 0.0124814 -0.0119791 0.019 0.0123964 -0.0120428 0.017125 0.0123607 -0.0120428 0.01748 0.0123607 -0.0121089 0.01748 0.0123297 -0.0120428 0.019 0.0123607 -0.0121771 0.01748 0.0123036 -0.0121771 0.019 0.0123036 -0.0122471 0.01748 0.0122825 -0.0122471 0.017125 0.0122825 -0.0122471 0.019 0.0122825 -0.0123184 0.01748 0.0122666 -0.0123184 0.019 0.0122666 -0.0124635 0.01748 0.0122507 -0.0124635 0.019 0.0122507 -0.0125365 0.01748 0.0122507 -0.0125365 0.019 0.0122507 -0.0126094 0.01748 0.012256 -0.0126816 0.01748 0.0122666 -0.0126816 0.019 0.0122666 -0.0127529 0.01748 0.0122825 -0.0128229 0.01748 0.0123036 -0.0128911 0.01748 0.0123297 -0.0129572 0.01748 0.0123607 -0.0128911 0.019 0.0123297 -0.0129572 0.019 0.0123607 -0.0130209 0.01748 0.0123964 -0.0130209 0.017125 0.0123964 -0.0130819 0.019 0.0124367 -0.0130819 0.01748 0.0124367 -0.0131397 0.017125 0.0124814 -0.0131397 0.019 0.0124814 -0.0132448 0.01748 0.0125827 -0.0132448 0.019 0.0125827 -0.0132915 0.01748 0.0126388 -0.013334 0.01748 0.0126982 -0.013334 0.019 0.0126982 -0.013372 0.017125 0.0127606 -0.013372 0.01748 0.0127606 -0.0134054 0.01748 0.0128255 -0.013372 0.019 0.0127606 -0.0134054 0.019 0.0128255 -0.013434 0.01748 0.0128928 -0.0134576 0.019 0.0129619 -0.0134761 0.01748 0.0130326 -0.0134973 0.017125 0.013177 -0.0134973 0.01748 0.013177 -0.0134973 0.019 0.013177 -0.0135 0.017125 0.01325 -0.0135 0.01748 0.01325 -0.0134973 0.01748 0.013323 -0.0135 0.019 0.01325 -0.0134973 0.019 0.013323 -0.0134893 0.01748 0.0133956 -0.0134761 0.019 0.0134674 -0.0134576 0.017125 0.0135381 -0.0134761 0.01748 0.0134674 -0.013434 0.017125 0.0136072 -0.0134576 0.01748 0.0135381 -0.0134576 0.019 0.0135381 -0.0134054 0.01748 0.0136745 -0.013434 0.019 0.0136072 -0.013372 0.01748 0.0137394 -0.0134054 0.019 0.0136745 -0.013334 0.01748 0.0138018 -0.0132915 0.019 0.0138612 -0.0132915 0.01748 0.0138612 -0.0132448 0.019 0.0139173 -0.0132448 0.01748 0.0139173 -0.0131397 0.01748 0.0140186 -0.0131941 0.019 0.0139699 -0.0131397 0.019 0.0140186 -0.0130819 0.01748 0.0140633 -0.0129572 0.01748 0.0141393 -0.0128911 0.01748 0.0141703 -0.0128911 0.019 0.0141703 -0.0128229 0.019 0.0141964 -0.0128229 0.01748 0.0141964 -0.0127529 0.019 0.0142175 -0.0126816 0.019 0.0142334 -0.0126094 0.017125 0.014244 -0.0126094 0.01748 0.014244 -0.0125365 0.017125 0.0142493 -0.0124635 0.01748 0.0142493 -0.0124635 0.017125 0.0142493 -0.0125365 0.01748 0.0142493 -0.0124635 0.019 0.0142493 -0.0123906 0.019 0.014244 -0.0123184 0.017125 0.0142334 -0.0123906 0.01748 0.014244 -0.0123184 0.01748 0.0142334 -0.0123184 0.019 0.0142334 -0.0122471 0.019 0.0142175 -0.0122471 0.01748 0.0142175 -0.0121771 0.01748 0.0141964 -0.0121089 0.017125 0.0141703 -0.0121771 0.019 0.0141964 -0.0120428 0.017125 0.0141393 -0.0120428 0.01748 0.0141393 -0.0120428 0.019 0.0141393 -0.0119791 0.019 0.0141036 -0.0118603 0.01748 0.0140186 -0.0118603 0.017125 0.0140186 -0.0119181 0.01748 0.0140633 -0.0119181 0.019 0.0140633 -0.0118059 0.01748 0.0139699 -0.0117085 0.017125 0.0138612 -0.0117085 0.01748 0.0138612 -0.0117552 0.019 0.0139173 -0.011666 0.01748 0.0138018 -0.0117085 0.019 0.0138612 -0.011666 0.019 0.0138018 -0.0116279 0.01748 0.0137394 -0.0115946 0.019 0.0136745 -0.011566 0.019 0.0136072 -0.0115239 0.01748 0.0134674 -0.0115239 0.019 0.0134674 -0.0115107 0.019 0.0133956 -0.0115027 0.01748 0.013323 -0.0115 0.019 0.01325 -0.0115027 0.01748 0.013177 -0.0115027 -0.019 0.013323 -0.0115107 -0.0180625 0.0133956 -0.0115239 -0.0180625 0.0134674 -0.0115107 -0.017125 0.0133956 -0.0115027 -0.0180625 0.013323 -0.0115 -0.0180625 0.01325 -0.0115 -0.017125 0.01325 -0.0115107 -0.017125 0.0131044 -0.0115424 -0.017125 0.0129619 -0.0120428 -0.017125 0.0123607 -0.0121771 -0.017125 0.0123036 -0.0123184 -0.017125 0.0122666 -0.0130209 -0.017125 0.0123964 -0.0130819 -0.017125 0.0124367 -0.0131397 -0.0180625 0.0124814 -0.0132915 -0.017125 0.0126388 -0.013334 -0.017125 0.0126982 -0.013372 -0.017125 0.0127606 -0.0134054 -0.0180625 0.0128255 -0.0134973 -0.017125 0.013177 -0.0135 -0.017125 0.01325 -0.0134761 -0.0180625 0.0134674 -0.0134761 -0.017125 0.0134674 -0.0134576 -0.017125 0.0135381 -0.013372 -0.017125 0.0137394 -0.0132915 -0.017125 0.0138612 -0.0132448 -0.017125 0.0139173 -0.0131397 -0.017125 0.0140186 -0.0130819 -0.017125 0.0140633 -0.0130209 -0.017125 0.0141036 -0.0124635 -0.0180625 0.0142493 -0.0123906 -0.017125 0.014244 -0.0122471 -0.017125 0.0142175 -0.0119181 -0.0180625 0.0140633 -0.011666 -0.017125 0.0138018 -0.011566 -0.017125 0.0136072 -0.0115239 -0.019 0.0134674 -0.0115107 -0.019 0.0133956 -0.0115424 -0.0180625 0.0135381 -0.011566 -0.0180625 0.0136072 -0.0115946 -0.019 0.0136745 -0.0115946 -0.0180625 0.0136745 -0.0116279 -0.0180625 0.0137394 -0.011666 -0.0180625 0.0138018 -0.011666 -0.019 0.0138018 -0.0117085 -0.019 0.0138612 -0.0117552 -0.017125 0.0139173 -0.0117085 -0.0180625 0.0138612 -0.0117552 -0.0180625 0.0139173 -0.0118059 -0.019 0.0139699 -0.0118059 -0.0180625 0.0139699 -0.0118603 -0.017125 0.0140186 -0.0118603 -0.0180625 0.0140186 -0.0118603 -0.019 0.0140186 -0.0119791 -0.017125 0.0141036 -0.0119791 -0.0180625 0.0141036 -0.0120428 -0.017125 0.0141393 -0.0120428 -0.0180625 0.0141393 -0.0121089 -0.0180625 0.0141703 -0.0121089 -0.019 0.0141703 -0.0121771 -0.0180625 0.0141964 -0.0122471 -0.0180625 0.0142175 -0.0123184 -0.0180625 0.0142334 -0.0123906 -0.0180625 0.014244 -0.0123906 -0.019 0.014244 -0.0124635 -0.019 0.0142493 -0.0125365 -0.017125 0.0142493 -0.0125365 -0.0180625 0.0142493 -0.0126094 -0.0180625 0.014244 -0.0126094 -0.019 0.014244 -0.0126816 -0.0180625 0.0142334 -0.0127529 -0.0180625 0.0142175 -0.0127529 -0.019 0.0142175 -0.0128229 -0.017125 0.0141964 -0.0128229 -0.0180625 0.0141964 -0.0128229 -0.019 0.0141964 -0.0128911 -0.019 0.0141703 -0.0129572 -0.017125 0.0141393 -0.0128911 -0.0180625 0.0141703 -0.0129572 -0.0180625 0.0141393 -0.0129572 -0.019 0.0141393 -0.0130209 -0.0180625 0.0141036 -0.0130819 -0.0180625 0.0140633 -0.0130819 -0.019 0.0140633 -0.0131397 -0.0180625 0.0140186 -0.0131941 -0.019 0.0139699 -0.0131941 -0.0180625 0.0139699 -0.0132448 -0.0180625 0.0139173 -0.0132915 -0.0180625 0.0138612 -0.0132915 -0.019 0.0138612 -0.013334 -0.0180625 0.0138018 -0.013334 -0.019 0.0138018 -0.013372 -0.019 0.0137394 -0.013372 -0.0180625 0.0137394 -0.0134054 -0.0180625 0.0136745 -0.0134054 -0.017125 0.0136745 -0.013434 -0.0180625 0.0136072 -0.0134576 -0.0180625 0.0135381 -0.0134761 -0.019 0.0134674 -0.0134893 -0.017125 0.0133956 -0.0134893 -0.019 0.0133956 -0.0134893 -0.0180625 0.0133956 -0.0134973 -0.0180625 0.013323 -0.0134973 -0.019 0.013323 -0.0134973 -0.0180625 0.013177 -0.0135 -0.0180625 0.01325 -0.0135 -0.019 0.01325 -0.0134893 -0.0180625 0.0131044 -0.0134973 -0.019 0.013177 -0.0134761 -0.017125 0.0130326 -0.0134893 -0.019 0.0131044 -0.0134761 -0.019 0.0130326 -0.0134761 -0.0180625 0.0130326 -0.0134576 -0.0180625 0.0129619 -0.013434 -0.017125 0.0128928 -0.0134054 -0.017125 0.0128255 -0.013434 -0.0180625 0.0128928 -0.013434 -0.019 0.0128928 -0.0134054 -0.019 0.0128255 -0.013372 -0.0180625 0.0127606 -0.013334 -0.0180625 0.0126982 -0.013334 -0.019 0.0126982 -0.0132448 -0.0180625 0.0125827 -0.0132448 -0.017125 0.0125827 -0.0132915 -0.0180625 0.0126388 -0.0132915 -0.019 0.0126388 -0.0132448 -0.019 0.0125827 -0.0131941 -0.0180625 0.0125301 -0.0131941 -0.017125 0.0125301 -0.0131397 -0.019 0.0124814 -0.0130209 -0.0180625 0.0123964 -0.0130819 -0.0180625 0.0124367 -0.0130209 -0.019 0.0123964 -0.0129572 -0.0180625 0.0123607 -0.0129572 -0.019 0.0123607 -0.0128911 -0.019 0.0123297 -0.0128911 -0.0180625 0.0123297 -0.0128229 -0.017125 0.0123036 -0.0128229 -0.0180625 0.0123036 -0.0127529 -0.0180625 0.0122825 -0.0126816 -0.0180625 0.0122666 -0.0126094 -0.017125 0.012256 -0.0126816 -0.019 0.0122666 -0.0126094 -0.019 0.012256 -0.0126094 -0.0180625 0.012256 -0.0125365 -0.017125 0.0122507 -0.0125365 -0.0180625 0.0122507 -0.0124635 -0.017125 0.0122507 -0.0123906 -0.0180625 0.012256 -0.0124635 -0.0180625 0.0122507 -0.0123184 -0.0180625 0.0122666 -0.0123906 -0.019 0.012256 -0.0123184 -0.019 0.0122666 -0.0122471 -0.019 0.0122825 -0.0122471 -0.0180625 0.0122825 -0.0121771 -0.0180625 0.0123036 -0.0121771 -0.019 0.0123036 -0.0121089 -0.0180625 0.0123297 -0.0121089 -0.019 0.0123297 -0.0120428 -0.019 0.0123607 -0.0119791 -0.017125 0.0123964 -0.0120428 -0.0180625 0.0123607 -0.0119791 -0.019 0.0123964 -0.0119791 -0.0180625 0.0123964 -0.0119181 -0.0180625 0.0124367 -0.0118603 -0.017125 0.0124814 -0.0118603 -0.0180625 0.0124814 -0.0118603 -0.019 0.0124814 -0.0118059 -0.017125 0.0125301 -0.0118059 -0.019 0.0125301 -0.0118059 -0.0180625 0.0125301 -0.0117552 -0.0180625 0.0125827 -0.0117552 -0.019 0.0125827 -0.0117085 -0.0180625 0.0126388 -0.011666 -0.019 0.0126982 -0.011666 -0.0180625 0.0126982 -0.0116279 -0.019 0.0127606 -0.0116279 -0.0180625 0.0127606 -0.0115946 -0.0180625 0.0128255 -0.0115946 -0.019 0.0128255 -0.011566 -0.0180625 0.0128928 -0.011566 -0.019 0.0128928 -0.0115424 -0.019 0.0129619 -0.0115424 -0.0180625 0.0129619 -0.0115239 -0.017125 0.0130326 -0.0115239 -0.0180625 0.0130326 -0.0115239 -0.019 0.0130326 -0.0115107 -0.019 0.0131044 -0.0115027 -0.0180625 0.013177 -0.0115107 -0.0180625 0.0131044 -0.0115027 -0.019 0.013177 0.0134973 -0.017 -0.011823 0.0134973 -0.018 -0.011823 0.0134893 -0.017 -0.0118956 0.0134761 -0.019 -0.0119674 0.0134893 -0.018 -0.0118956 0.0134893 -0.019 -0.0118956 0.0134973 -0.019 -0.011823 0.0134893 -0.018 -0.0116044 0.0134576 -0.019 -0.0114619 0.0132915 -0.018 -0.0111388 0.013334 -0.019 -0.0111982 0.0132448 -0.019 -0.0110827 0.0131397 -0.018 -0.0109814 0.0130819 -0.018 -0.0109367 0.0131397 -0.019 -0.0109814 0.0130819 -0.019 -0.0109367 0.0129572 -0.019 -0.0108607 0.0128229 -0.019 -0.0108036 0.0127529 -0.019 -0.0107825 0.0126816 -0.018 -0.0107666 0.0124635 -0.019 -0.0107507 0.0123184 -0.019 -0.0107666 0.0120428 -0.019 -0.0108607 0.0119181 -0.019 -0.0109367 0.0118603 -0.018 -0.0109814 0.0118603 -0.019 -0.0109814 0.0117552 -0.018 -0.0110827 0.0118059 -0.019 -0.0110301 0.0117085 -0.018 -0.0111388 0.011666 -0.019 -0.0111982 0.011566 -0.018 -0.0113928 0.0115424 -0.018 -0.0114619 0.0115239 -0.019 -0.0115326 0.0115107 -0.018 -0.0116044 0.0115027 -0.018 -0.011677 0.0115027 -0.019 -0.011677 0.0115 -0.018 -0.01175 0.0115 -0.019 -0.01175 0.0115107 -0.018 -0.0118956 0.0115107 -0.019 -0.0118956 0.0115239 -0.019 -0.0119674 0.0115424 -0.018 -0.0120381 0.0115424 -0.019 -0.0120381 0.011566 -0.019 -0.0121072 0.011666 -0.019 -0.0123018 0.0118059 -0.019 -0.0124699 0.0119181 -0.019 -0.0125633 0.0120428 -0.018 -0.0126393 0.0120428 -0.019 -0.0126393 0.0121089 -0.019 -0.0126703 0.0126816 -0.018 -0.0127334 0.0126816 -0.019 -0.0127334 0.0128911 -0.018 -0.0126703 0.0129572 -0.019 -0.0126393 0.0130209 -0.019 -0.0126036 0.0130819 -0.019 -0.0125633 0.0131397 -0.019 -0.0125186 0.0132915 -0.018 -0.0123612 0.0132915 -0.019 -0.0123612 0.013334 -0.019 -0.0123018 0.0134054 -0.018 -0.0121745 0.013434 -0.018 -0.0121072 0.0134054 -0.019 -0.0121745 0.013434 -0.019 -0.0121072 0.0134576 -0.018 -0.0120381 0.0134576 -0.019 -0.0120381 0.0134761 -0.018 -0.0119674 0.0134761 -0.017 -0.0119674 0.0134576 -0.017 -0.0120381 0.013434 -0.017 -0.0121072 0.0134054 -0.017 -0.0121745 0.013372 -0.017 -0.0122394 0.013372 -0.018 -0.0122394 0.013334 -0.018 -0.0123018 0.0132448 -0.019 -0.0124173 0.0132448 -0.018 -0.0124173 0.0131941 -0.019 -0.0124699 0.0131941 -0.018 -0.0124699 0.0132448 -0.017 -0.0124173 0.0131941 -0.017 -0.0124699 0.0131397 -0.018 -0.0125186 0.0130819 -0.018 -0.0125633 0.0130819 -0.017 -0.0125633 0.0130209 -0.018 -0.0126036 0.0130209 -0.017 -0.0126036 0.0129572 -0.018 -0.0126393 0.0128229 -0.018 -0.0126964 0.0128911 -0.017 -0.0126703 0.0128229 -0.017 -0.0126964 0.0127529 -0.017 -0.0127175 0.0127529 -0.018 -0.0127175 0.0126816 -0.017 -0.0127334 0.0126094 -0.019 -0.012744 0.0126094 -0.018 -0.012744 0.0126094 -0.017 -0.012744 0.0125365 -0.018 -0.0127493 0.0125365 -0.019 -0.0127493 0.0125365 -0.017 -0.0127493 0.0124635 -0.018 -0.0127493 0.0123906 -0.018 -0.012744 0.0123906 -0.019 -0.012744 0.0124635 -0.017 -0.0127493 0.0123184 -0.018 -0.0127334 0.0123184 -0.017 -0.0127334 0.0122471 -0.018 -0.0127175 0.0121771 -0.018 -0.0126964 0.0121089 -0.018 -0.0126703 0.0120428 -0.017 -0.0126393 0.0119791 -0.019 -0.0126036 0.0119791 -0.018 -0.0126036 0.0119791 -0.017 -0.0126036 0.0119181 -0.017 -0.0125633 0.0119181 -0.018 -0.0125633 0.0118603 -0.018 -0.0125186 0.0118059 -0.017 -0.0124699 0.0118059 -0.018 -0.0124699 0.0117552 -0.017 -0.0124173 0.0117552 -0.018 -0.0124173 0.0117085 -0.018 -0.0123612 0.011666 -0.018 -0.0123018 0.0116279 -0.018 -0.0122394 0.0116279 -0.017 -0.0122394 0.0115946 -0.019 -0.0121745 0.0115946 -0.018 -0.0121745 0.0115946 -0.017 -0.0121745 0.011566 -0.017 -0.0121072 0.011566 -0.018 -0.0121072 0.0115424 -0.017 -0.0120381 0.0115239 -0.018 -0.0119674 0.0115239 -0.017 -0.0119674 0.0115107 -0.017 -0.0118956 0.0115027 -0.019 -0.011823 0.0115027 -0.018 -0.011823 0.0115027 -0.017 -0.011823 0.0115107 -0.019 -0.0116044 0.0115107 -0.017 -0.0116044 0.0115239 -0.017 -0.0115326 0.0115239 -0.018 -0.0115326 0.011566 -0.017 -0.0113928 0.0115946 -0.018 -0.0113255 0.0116279 -0.018 -0.0112606 0.0116279 -0.017 -0.0112606 0.011666 -0.018 -0.0111982 0.0117085 -0.019 -0.0111388 0.0117552 -0.019 -0.0110827 0.0118059 -0.018 -0.0110301 0.0118059 -0.017 -0.0110301 0.0119181 -0.018 -0.0109367 0.0118603 -0.017 -0.0109814 0.0119181 -0.017 -0.0109367 0.0119791 -0.019 -0.0108964 0.0119791 -0.018 -0.0108964 0.0119791 -0.017 -0.0108964 0.0120428 -0.018 -0.0108607 0.0120428 -0.017 -0.0108607 0.0121089 -0.018 -0.0108297 0.0121089 -0.017 -0.0108297 0.0121771 -0.017 -0.0108036 0.0122471 -0.018 -0.0107825 0.0122471 -0.019 -0.0107825 0.0121771 -0.018 -0.0108036 0.0123184 -0.018 -0.0107666 0.0123906 -0.019 -0.010756 0.0123906 -0.018 -0.010756 0.0123184 -0.017 -0.0107666 0.0124635 -0.018 -0.0107507 0.0124635 -0.017 -0.0107507 0.0125365 -0.018 -0.0107507 0.0125365 -0.017 -0.0107507 0.0126094 -0.019 -0.010756 0.0126094 -0.018 -0.010756 0.0126094 -0.017 -0.010756 0.0127529 -0.018 -0.0107825 0.0127529 -0.017 -0.0107825 0.0128229 -0.017 -0.0108036 0.0128229 -0.018 -0.0108036 0.0128911 -0.018 -0.0108297 0.0128911 -0.017 -0.0108297 0.0130209 -0.018 -0.0108964 0.0129572 -0.018 -0.0108607 0.0130209 -0.017 -0.0108964 0.0131941 -0.018 -0.0110301 0.0131941 -0.019 -0.0110301 0.0131941 -0.017 -0.0110301 0.0132448 -0.018 -0.0110827 0.0132915 -0.019 -0.0111388 0.013334 -0.018 -0.0111982 0.013334 -0.017 -0.0111982 0.013372 -0.018 -0.0112606 0.0134054 -0.019 -0.0113255 0.013372 -0.017 -0.0112606 0.013434 -0.018 -0.0113928 0.0134054 -0.018 -0.0113255 0.013434 -0.017 -0.0113928 0.0134576 -0.018 -0.0114619 0.0134761 -0.019 -0.0115326 0.0134761 -0.018 -0.0115326 0.0134893 -0.019 -0.0116044 0.0134761 -0.017 -0.0115326 0.0134893 -0.017 -0.0116044 0.0134973 -0.019 -0.011677 0.0134973 -0.018 -0.011677 0.0134973 -0.017 -0.011677 0.0135 -0.019 -0.01175 0.0135 -0.018 -0.01175 -0.0134973 -0.018 -0.011677 -0.0135 -0.017 -0.01175 -0.0134973 -0.017 -0.011677 -0.0134761 -0.018 -0.0115326 -0.0134893 -0.018 -0.0116044 -0.0134973 -0.018 -0.011823 -0.0134973 -0.019 -0.011823 -0.0134893 -0.019 -0.0118956 -0.013334 -0.018 -0.0123018 -0.0132915 -0.018 -0.0123612 -0.0130209 -0.018 -0.0126036 -0.0128911 -0.019 -0.0126703 -0.0125365 -0.018 -0.0127493 -0.0125365 -0.019 -0.0127493 -0.0123184 -0.018 -0.0127334 -0.0123184 -0.019 -0.0127334 -0.0122471 -0.018 -0.0127175 -0.0121771 -0.018 -0.0126964 -0.0121771 -0.019 -0.0126964 -0.0120428 -0.018 -0.0126393 -0.0119791 -0.019 -0.0126036 -0.0119181 -0.019 -0.0125633 -0.0118603 -0.019 -0.0125186 -0.0118059 -0.019 -0.0124699 -0.011566 -0.018 -0.0121072 -0.0115424 -0.018 -0.0120381 -0.0115107 -0.019 -0.0118956 -0.0115027 -0.018 -0.011823 -0.0115027 -0.019 -0.011823 -0.0115424 -0.019 -0.0114619 -0.0115946 -0.018 -0.0113255 -0.011666 -0.019 -0.0111982 -0.0117085 -0.018 -0.0111388 -0.0117085 -0.019 -0.0111388 -0.0118603 -0.019 -0.0109814 -0.0119181 -0.019 -0.0109367 -0.0119791 -0.018 -0.0108964 -0.0119791 -0.019 -0.0108964 -0.0121089 -0.019 -0.0108297 -0.0122471 -0.018 -0.0107825 -0.0121771 -0.019 -0.0108036 -0.0122471 -0.019 -0.0107825 -0.0123906 -0.018 -0.010756 -0.0124635 -0.018 -0.0107507 -0.0125365 -0.018 -0.0107507 -0.0126816 -0.019 -0.0107666 -0.0127529 -0.018 -0.0107825 -0.0127529 -0.019 -0.0107825 -0.0128229 -0.018 -0.0108036 -0.0129572 -0.019 -0.0108607 -0.0130819 -0.019 -0.0109367 -0.0132448 -0.018 -0.0110827 -0.013334 -0.019 -0.0111982 -0.013434 -0.019 -0.0113928 -0.0134576 -0.019 -0.0114619 -0.013434 -0.018 -0.0113928 -0.0134576 -0.018 -0.0114619 -0.0134054 -0.019 -0.0113255 -0.0134054 -0.017 -0.0113255 -0.013372 -0.018 -0.0112606 -0.013372 -0.019 -0.0112606 -0.0134054 -0.018 -0.0113255 -0.013334 -0.018 -0.0111982 -0.0132915 -0.018 -0.0111388 -0.0132448 -0.017 -0.0110827 -0.0131941 -0.018 -0.0110301 -0.0131941 -0.017 -0.0110301 -0.0131397 -0.018 -0.0109814 -0.0131397 -0.017 -0.0109814 -0.0130819 -0.018 -0.0109367 -0.0130819 -0.017 -0.0109367 -0.0130209 -0.018 -0.0108964 -0.0129572 -0.018 -0.0108607 -0.0128911 -0.018 -0.0108297 -0.0128911 -0.017 -0.0108297 -0.0128229 -0.017 -0.0108036 -0.0126816 -0.017 -0.0107666 -0.0126094 -0.018 -0.010756 -0.0126816 -0.018 -0.0107666 -0.0126094 -0.017 -0.010756 -0.0125365 -0.019 -0.0107507 -0.0123906 -0.019 -0.010756 -0.0123906 -0.017 -0.010756 -0.0123184 -0.018 -0.0107666 -0.0123184 -0.017 -0.0107666 -0.0121771 -0.018 -0.0108036 -0.0122471 -0.017 -0.0107825 -0.0121771 -0.017 -0.0108036 -0.0121089 -0.018 -0.0108297 -0.0120428 -0.019 -0.0108607 -0.0120428 -0.018 -0.0108607 -0.0119791 -0.017 -0.0108964 -0.0119181 -0.018 -0.0109367 -0.0119181 -0.017 -0.0109367 -0.0118603 -0.018 -0.0109814 -0.0118059 -0.018 -0.0110301 -0.0118603 -0.017 -0.0109814 -0.0117552 -0.017 -0.0110827 -0.0117552 -0.018 -0.0110827 -0.011666 -0.018 -0.0111982 -0.011628 -0.017 -0.0112606 -0.0115946 -0.019 -0.0113255 -0.011628 -0.018 -0.0112606 -0.0115946 -0.017 -0.0113255 -0.0115424 -0.018 -0.0114619 -0.011566 -0.018 -0.0113928 -0.0115424 -0.017 -0.0114619 -0.0115239 -0.017 -0.0115326 -0.0115239 -0.018 -0.0115326 -0.0115107 -0.018 -0.0116044 -0.0115027 -0.018 -0.011677 -0.0115027 -0.017 -0.011677 -0.0115 -0.018 -0.01175 -0.0115027 -0.017 -0.011823 -0.0115107 -0.018 -0.0118956 -0.0115239 -0.019 -0.0119674 -0.0115107 -0.017 -0.0118956 -0.0115239 -0.018 -0.0119674 -0.011566 -0.019 -0.0121072 -0.0115946 -0.019 -0.0121745 -0.0115946 -0.018 -0.0121745 -0.011566 -0.017 -0.0121072 -0.011628 -0.018 -0.0122394 -0.011628 -0.019 -0.0122394 -0.0115946 -0.017 -0.0121745 -0.011628 -0.017 -0.0122394 -0.011666 -0.019 -0.0123018 -0.011666 -0.017 -0.0123018 -0.011666 -0.018 -0.0123018 -0.0117085 -0.018 -0.0123612 -0.0117085 -0.017 -0.0123612 -0.0117552 -0.018 -0.0124173 -0.0117552 -0.017 -0.0124173 -0.0118059 -0.018 -0.0124699 -0.0118603 -0.018 -0.0125186 -0.0118603 -0.017 -0.0125186 -0.0119181 -0.018 -0.0125633 -0.0119791 -0.017 -0.0126036 -0.0119791 -0.018 -0.0126036 -0.0121089 -0.019 -0.0126703 -0.0121089 -0.018 -0.0126703 -0.0120428 -0.017 -0.0126393 -0.0121089 -0.017 -0.0126703 -0.0122471 -0.017 -0.0127175 -0.0123184 -0.017 -0.0127334 -0.0123906 -0.019 -0.012744 -0.0123906 -0.018 -0.012744 -0.0124635 -0.018 -0.0127493 -0.0123906 -0.017 -0.012744 -0.0125365 -0.017 -0.0127493 -0.0126094 -0.018 -0.012744 -0.0126816 -0.018 -0.0127334 -0.0127529 -0.019 -0.0127175 -0.0127529 -0.018 -0.0127175 -0.0127529 -0.017 -0.0127175 -0.0128229 -0.018 -0.0126964 -0.0128911 -0.018 -0.0126703 -0.0128229 -0.017 -0.0126964 -0.0129572 -0.018 -0.0126393 -0.0130209 -0.017 -0.0126036 -0.0130819 -0.018 -0.0125633 -0.0131397 -0.018 -0.0125186 -0.0130819 -0.017 -0.0125633 -0.0131397 -0.017 -0.0125186 -0.0131941 -0.018 -0.0124699 -0.0131941 -0.019 -0.0124699 -0.0131941 -0.017 -0.0124699 -0.0132448 -0.019 -0.0124173 -0.0132448 -0.018 -0.0124173 -0.0132448 -0.017 -0.0124173 -0.013334 -0.017 -0.0123018 -0.013372 -0.018 -0.0122394 -0.0134054 -0.018 -0.0121745 -0.013434 -0.017 -0.0121072 -0.013434 -0.018 -0.0121072 -0.0134576 -0.019 -0.0120381 -0.0134576 -0.018 -0.0120381 -0.0134576 -0.017 -0.0120381 -0.0134761 -0.018 -0.0119674 -0.0134893 -0.018 -0.0118956 -0.0134893 -0.017 -0.0118956 -0.0134973 -0.017 -0.011823 -0.0135 -0.018 -0.01175 0.0134973 -0.019 0.013323 0.0134576 -0.017125 0.0135381 0.0134893 -0.017125 0.0133956 0.0134893 -0.0180625 0.0133956 0.0135 -0.0180625 0.01325 0.0134576 -0.0180625 0.0129619 0.0134054 -0.017125 0.0128255 0.013334 -0.017125 0.0126982 0.0132448 -0.0180625 0.0125827 0.0132915 -0.017125 0.0126388 0.0131941 -0.017125 0.0125301 0.0130819 -0.017125 0.0124367 0.0129572 -0.0180625 0.0123607 0.0129572 -0.017125 0.0123607 0.0126816 -0.017125 0.0122666 0.0125365 -0.0180625 0.0122507 0.0125365 -0.017125 0.0122507 0.0122471 -0.0180625 0.0122825 0.0121771 -0.017125 0.0123036 0.0119181 -0.017125 0.0124367 0.0117085 -0.017125 0.0126388 0.011666 -0.017125 0.0126982 0.011566 -0.0180625 0.0128928 0.0115946 -0.017125 0.0128255 0.011566 -0.017125 0.0128928 0.0115424 -0.017125 0.0129619 0.0115239 -0.017125 0.0130326 0.0115027 -0.0180625 0.013177 0.0115107 -0.017125 0.0131044 0.0115 -0.0180625 0.01325 0.0115027 -0.0180625 0.013323 0.0115 -0.017125 0.01325 0.0115239 -0.0180625 0.0134674 0.0115107 -0.017125 0.0133956 0.0115424 -0.0180625 0.0135381 0.0117552 -0.017125 0.0139173 0.0120428 -0.0180625 0.0141393 0.0123906 -0.0180625 0.014244 0.0126094 -0.0180625 0.014244 0.0127529 -0.017125 0.0142175 0.0128911 -0.0180625 0.0141703 0.0128911 -0.017125 0.0141703 0.0129572 -0.0180625 0.0141393 0.0129572 -0.017125 0.0141393 0.0130209 -0.017125 0.0141036 0.0131397 -0.0180625 0.0140186 0.0131397 -0.017125 0.0140186 0.0134054 -0.017125 0.0136745 0.0134576 -0.0180625 0.0135381 0.0134761 -0.0180625 0.0134674 0.0134576 -0.019 0.0135381 0.013434 -0.0180625 0.0136072 0.0134054 -0.0180625 0.0136745 0.013372 -0.0180625 0.0137394 0.013372 -0.019 0.0137394 0.013334 -0.019 0.0138018 0.013334 -0.0180625 0.0138018 0.0132915 -0.019 0.0138612 0.0132915 -0.0180625 0.0138612 0.0131941 -0.0180625 0.0139699 0.0131941 -0.017125 0.0139699 0.0132448 -0.0180625 0.0139173 0.0131941 -0.019 0.0139699 0.0130819 -0.0180625 0.0140633 0.0130819 -0.019 0.0140633 0.0130209 -0.0180625 0.0141036 0.0129572 -0.019 0.0141393 0.0128229 -0.017125 0.0141964 0.0128911 -0.019 0.0141703 0.0128229 -0.0180625 0.0141964 0.0128229 -0.019 0.0141964 0.0127529 -0.0180625 0.0142175 0.0126816 -0.017125 0.0142334 0.0126816 -0.0180625 0.0142334 0.0126094 -0.017125 0.014244 0.0126816 -0.019 0.0142334 0.0126094 -0.019 0.014244 0.0125365 -0.0180625 0.0142493 0.0125365 -0.019 0.0142493 0.0124635 -0.0180625 0.0142493 0.0123906 -0.019 0.014244 0.0123184 -0.019 0.0142334 0.0123184 -0.0180625 0.0142334 0.0122471 -0.0180625 0.0142175 0.0121771 -0.017125 0.0141964 0.0121771 -0.0180625 0.0141964 0.0121089 -0.0180625 0.0141703 0.0121771 -0.019 0.0141964 0.0121089 -0.019 0.0141703 0.0120428 -0.019 0.0141393 0.0119791 -0.0180625 0.0141036 0.0118603 -0.017125 0.0140186 0.0119181 -0.0180625 0.0140633 0.0118603 -0.019 0.0140186 0.0118059 -0.017125 0.0139699 0.0118603 -0.0180625 0.0140186 0.0118059 -0.0180625 0.0139699 0.0118059 -0.019 0.0139699 0.0117552 -0.019 0.0139173 0.0117552 -0.0180625 0.0139173 0.0117085 -0.0180625 0.0138612 0.011666 -0.0180625 0.0138018 0.011628 -0.019 0.0137394 0.0115946 -0.017125 0.0136745 0.011628 -0.0180625 0.0137394 0.0115946 -0.0180625 0.0136745 0.011566 -0.0180625 0.0136072 0.0115424 -0.017125 0.0135381 0.0115107 -0.0180625 0.0133956 0.0115239 -0.019 0.0134674 0.0115027 -0.017125 0.013323 0.0115027 -0.019 0.013177 0.0115107 -0.0180625 0.0131044 0.0115239 -0.0180625 0.0130326 0.0115239 -0.019 0.0130326 0.0115424 -0.0180625 0.0129619 0.011628 -0.0180625 0.0127606 0.0115946 -0.0180625 0.0128255 0.011628 -0.019 0.0127606 0.011666 -0.0180625 0.0126982 0.0117085 -0.0180625 0.0126388 0.011666 -0.019 0.0126982 0.0117552 -0.017125 0.0125827 0.0117552 -0.019 0.0125827 0.0117552 -0.0180625 0.0125827 0.0118059 -0.0180625 0.0125301 0.0118059 -0.017125 0.0125301 0.0118059 -0.019 0.0125301 0.0118603 -0.0180625 0.0124814 0.0119181 -0.0180625 0.0124367 0.0118603 -0.019 0.0124814 0.0119791 -0.0180625 0.0123964 0.0120428 -0.0180625 0.0123607 0.0121089 -0.017125 0.0123297 0.0121089 -0.0180625 0.0123297 0.0121089 -0.019 0.0123297 0.0122471 -0.017125 0.0122825 0.0121771 -0.0180625 0.0123036 0.0122471 -0.019 0.0122825 0.0123184 -0.017125 0.0122666 0.0123184 -0.0180625 0.0122666 0.0123184 -0.019 0.0122666 0.0123906 -0.0180625 0.012256 0.0124635 -0.0180625 0.0122507 0.0126094 -0.017125 0.012256 0.0125365 -0.019 0.0122507 0.0126094 -0.0180625 0.012256 0.0126816 -0.0180625 0.0122666 0.0127529 -0.0180625 0.0122825 0.0127529 -0.019 0.0122825 0.0128229 -0.0180625 0.0123036 0.0128911 -0.0180625 0.0123297 0.0130209 -0.0180625 0.0123964 0.0131397 -0.0180625 0.0124814 0.0131397 -0.017125 0.0124814 0.0130819 -0.0180625 0.0124367 0.0131397 -0.019 0.0124814 0.0131941 -0.0180625 0.0125301 0.0131941 -0.019 0.0125301 0.0132448 -0.019 0.0125827 0.0132915 -0.0180625 0.0126388 0.013334 -0.0180625 0.0126982 0.0132915 -0.019 0.0126388 0.013334 -0.019 0.0126982 0.013372 -0.0180625 0.0127606 0.013372 -0.019 0.0127606 0.013434 -0.0180625 0.0128928 0.013434 -0.017125 0.0128928 0.0134054 -0.0180625 0.0128255 0.0134054 -0.019 0.0128255 0.0134761 -0.017125 0.0130326 0.0134761 -0.0180625 0.0130326 0.0134761 -0.019 0.0130326 0.0134893 -0.0180625 0.0131044 0.0134973 -0.0180625 0.013177 0.0135 -0.019 0.01325 0.0134973 -0.0180625 0.013323 0.0134973 -0.017125 0.013323 0.0134893 0.01748 0.0131044 0.0134973 0.019 0.013177 0.0134893 0.019 0.0131044 0.0134761 0.017125 0.0130326 0.0134893 0.017125 0.0131044 0.0134893 0.017125 0.0133956 0.0134576 0.01748 0.0135381 0.0134576 0.017125 0.0135381 0.013434 0.017125 0.0136072 0.0134054 0.017125 0.0136745 0.0132915 0.017125 0.0138612 0.0131941 0.01748 0.0139699 0.0132448 0.017125 0.0139173 0.0131397 0.017125 0.0140186 0.0130819 0.01748 0.0140633 0.0130209 0.017125 0.0141036 0.0128911 0.017125 0.0141703 0.0128229 0.01748 0.0141964 0.0128229 0.017125 0.0141964 0.0126816 0.017125 0.0142334 0.0125365 0.01748 0.0142493 0.0125365 0.017125 0.0142493 0.0124635 0.017125 0.0142493 0.0123906 0.017125 0.014244 0.0123184 0.017125 0.0142334 0.0120428 0.017125 0.0141393 0.0118603 0.01748 0.0140186 0.0117552 0.017125 0.0139173 0.011566 0.01748 0.0136072 0.011566 0.017125 0.0136072 0.0115424 0.017125 0.0135381 0.0115107 0.017125 0.0131044 0.0115239 0.017125 0.0130326 0.0117085 0.017125 0.0126388 0.0118059 0.01748 0.0125301 0.0118059 0.017125 0.0125301 0.0118603 0.017125 0.0124814 0.0119181 0.01748 0.0124367 0.0119181 0.017125 0.0124367 0.0119791 0.01748 0.0123964 0.0121089 0.017125 0.0123297 0.0121771 0.017125 0.0123036 0.0123184 0.017125 0.0122666 0.0125365 0.01748 0.0122507 0.0126094 0.01748 0.012256 0.0126816 0.01748 0.0122666 0.0126816 0.017125 0.0122666 0.0128911 0.01748 0.0123297 0.0131397 0.01748 0.0124814 0.0131397 0.017125 0.0124814 0.0132448 0.017125 0.0125827 0.013372 0.017125 0.0127606 0.0134576 0.017125 0.0129619 0.0134761 0.01748 0.0130326 0.0134761 0.019 0.0130326 0.0134576 0.019 0.0129619 0.0134576 0.01748 0.0129619 0.0134054 0.01748 0.0128255 0.013434 0.01748 0.0128928 0.013372 0.019 0.0127606 0.013372 0.01748 0.0127606 0.013334 0.019 0.0126982 0.013334 0.01748 0.0126982 0.0132915 0.01748 0.0126388 0.0132915 0.019 0.0126388 0.0132448 0.01748 0.0125827 0.0131941 0.01748 0.0125301 0.0131941 0.019 0.0125301 0.0130819 0.01748 0.0124367 0.0130819 0.019 0.0124367 0.0130209 0.017125 0.0123964 0.0130209 0.01748 0.0123964 0.0130209 0.019 0.0123964 0.0129572 0.01748 0.0123607 0.0129572 0.019 0.0123607 0.0128911 0.019 0.0123297 0.0128229 0.01748 0.0123036 0.0128229 0.019 0.0123036 0.0127529 0.01748 0.0122825 0.0127529 0.019 0.0122825 0.0126094 0.017125 0.012256 0.0125365 0.019 0.0122507 0.0124635 0.017125 0.0122507 0.0123906 0.01748 0.012256 0.0123906 0.017125 0.012256 0.0124635 0.01748 0.0122507 0.0123184 0.01748 0.0122666 0.0122471 0.019 0.0122825 0.0122471 0.01748 0.0122825 0.0121771 0.01748 0.0123036 0.0121771 0.019 0.0123036 0.0121089 0.01748 0.0123297 0.0120428 0.01748 0.0123607 0.0120428 0.019 0.0123607 0.0119791 0.019 0.0123964 0.0118603 0.01748 0.0124814 0.0118603 0.019 0.0124814 0.0117085 0.01748 0.0126388 0.0117552 0.01748 0.0125827 0.011666 0.01748 0.0126982 0.011666 0.019 0.0126982 0.011628 0.01748 0.0127606 0.011628 0.019 0.0127606 0.0115946 0.01748 0.0128255 0.011566 0.01748 0.0128928 0.011566 0.019 0.0128928 0.0115424 0.017125 0.0129619 0.0115424 0.01748 0.0129619 0.0115239 0.01748 0.0130326 0.0115239 0.019 0.0130326 0.0115107 0.01748 0.0131044 0.0115027 0.01748 0.013177 0.0115027 0.019 0.013177 0.0115 0.01748 0.01325 0.0115027 0.017125 0.013323 0.0115027 0.019 0.013323 0.0115027 0.01748 0.013323 0.0115107 0.01748 0.0133956 0.0115239 0.01748 0.0134674 0.0115239 0.017125 0.0134674 0.0115239 0.019 0.0134674 0.0115424 0.019 0.0135381 0.0115424 0.01748 0.0135381 0.0115946 0.017125 0.0136745 0.011566 0.019 0.0136072 0.0115946 0.01748 0.0136745 0.011628 0.01748 0.0137394 0.011666 0.01748 0.0138018 0.011666 0.017125 0.0138018 0.011666 0.019 0.0138018 0.0117085 0.017125 0.0138612 0.0117085 0.01748 0.0138612 0.0117552 0.01748 0.0139173 0.0117552 0.019 0.0139173 0.0118059 0.01748 0.0139699 0.0118059 0.019 0.0139699 0.0119181 0.01748 0.0140633 0.0119181 0.019 0.0140633 0.0119791 0.019 0.0141036 0.0119791 0.01748 0.0141036 0.0120428 0.01748 0.0141393 0.0120428 0.019 0.0141393 0.0121089 0.01748 0.0141703 0.0121771 0.01748 0.0141964 0.0122471 0.01748 0.0142175 0.0122471 0.019 0.0142175 0.0123184 0.01748 0.0142334 0.0123906 0.019 0.014244 0.0123906 0.01748 0.014244 0.0124635 0.01748 0.0142493 0.0124635 0.019 0.0142493 0.0126094 0.019 0.014244 0.0126094 0.01748 0.014244 0.0127529 0.017125 0.0142175 0.0126816 0.01748 0.0142334 0.0127529 0.019 0.0142175 0.0127529 0.01748 0.0142175 0.0128911 0.01748 0.0141703 0.0128911 0.019 0.0141703 0.0129572 0.01748 0.0141393 0.0130209 0.019 0.0141036 0.0130209 0.01748 0.0141036 0.0130819 0.019 0.0140633 0.0131397 0.01748 0.0140186 0.0131397 0.019 0.0140186 0.0131941 0.017125 0.0139699 0.0131941 0.019 0.0139699 0.0132448 0.01748 0.0139173 0.0132448 0.019 0.0139173 0.0132915 0.01748 0.0138612 0.013334 0.01748 0.0138018 0.013334 0.019 0.0138018 0.013372 0.01748 0.0137394 0.0134054 0.01748 0.0136745 0.013434 0.01748 0.0136072 0.013434 0.019 0.0136072 0.0134576 0.019 0.0135381 0.0134761 0.01748 0.0134674 0.0134761 0.017125 0.0134674 0.0134761 0.019 0.0134674 0.0134893 0.01748 0.0133956 0.0134893 0.019 0.0133956 0.0134973 0.017125 0.013323 0.0134973 0.01748 0.013323 0.0134973 0.019 0.013323 0.0135 0.01748 0.01325 0.0134973 0.01748 0.013177 0.013334 -0.017 -0.0123018 0.0132915 -0.017 -0.0123612 0.0135 -0.017 -0.01625 0.0131397 -0.017 -0.0125186 0.0129572 -0.017 -0.0126393 0.0123906 -0.017 -0.012744 0.0122471 -0.017 -0.0127175 0.0121771 -0.017 -0.0126964 0.0121089 -0.017 -0.0126703 0.0118603 -0.017 -0.0125186 0.0115946 -0.017 -0.0113255 0.011666 -0.017 -0.0111982 0.0117085 -0.017 -0.0111388 0.0117552 -0.017 -0.0110827 0.0122471 -0.017 -0.0107825 0.0123906 -0.017 -0.010756 0.0126816 -0.017 -0.0107666 0.0129572 -0.017 -0.0108607 0.0130819 -0.017 -0.0109367 0.0131397 -0.017 -0.0109814 0.0132915 -0.017 -0.0111388 0.0132448 -0.017 -0.0110827 0.0135 -0.017 -0.00425 0.0134054 -0.017 -0.0113255 0.0134576 -0.017 -0.0114619 0.0115424 -0.017 -0.0114619 0.0115027 -0.017 -0.011677 0.0115 -0.017 -0.01175 0.011666 -0.017 -0.0123018 0.0117085 -0.017 -0.0123612 -0.0118059 -0.017 -0.0124699 -0.0119181 -0.017 -0.0125633 -0.0135 -0.017 -0.01625 -0.0121771 -0.017 -0.0126964 -0.0124635 -0.017 -0.0127493 -0.0126094 -0.017 -0.012744 -0.0126816 -0.017 -0.0127334 -0.0128911 -0.017 -0.0126703 -0.0129572 -0.017 -0.0126393 -0.0132915 -0.017 -0.0123612 -0.013372 -0.017 -0.0122394 -0.0134054 -0.017 -0.0121745 -0.0134761 -0.017 -0.0119674 -0.0115424 -0.017 -0.0120381 -0.0115239 -0.017 -0.0119674 -0.0115 -0.017 -0.01175 -0.0115107 -0.017 -0.0116044 -0.011566 -0.017 -0.0113928 -0.011666 -0.017 -0.0111982 -0.0117085 -0.017 -0.0111388 -0.0118059 -0.017 -0.0110301 -0.0135 -0.017 -0.00425 -0.0120428 -0.017 -0.0108607 -0.0121089 -0.017 -0.0108297 -0.0124635 -0.017 -0.0107507 -0.0125365 -0.017 -0.0107507 -0.0127529 -0.017 -0.0107825 -0.0129572 -0.017 -0.0108607 -0.0130209 -0.017 -0.0108964 -0.0132915 -0.017 -0.0111388 -0.013334 -0.017 -0.0111982 -0.013372 -0.017 -0.0112606 -0.013434 -0.017 -0.0113928 -0.0134576 -0.017 -0.0114619 -0.0134761 -0.017 -0.0115326 -0.0134893 -0.017 -0.0116044 0.0122471 -0.017125 0.0142175 0.0123184 -0.017125 0.0142334 0.0123906 -0.017125 0.014244 0.0124635 -0.017125 0.0142493 0.0125365 -0.017125 0.0142493 0.0130819 -0.017125 0.0140633 0.0132448 -0.017125 0.0139173 0.013334 -0.017125 0.0138018 0.0132915 -0.017125 0.0138612 0.013372 -0.017125 0.0137394 0.013434 -0.017125 0.0136072 0.0134761 -0.017125 0.0134674 0.0121089 -0.017125 0.0141703 0.0119181 -0.017125 0.0140633 -0.0118059 -0.017125 0.0139699 0.0117085 -0.017125 0.0138612 -0.0116279 -0.017125 0.0137394 -0.0115946 -0.017125 0.0136745 0.011566 -0.017125 0.0136072 0.0115239 -0.017125 0.0134674 -0.0115027 -0.017125 0.013323 -0.0115027 -0.017125 0.013177 0.0115027 -0.017125 0.013177 -0.0115946 -0.017125 0.0128255 0.011628 -0.017125 0.0127606 -0.0117085 -0.017125 0.0126388 -0.0117552 -0.017125 0.0125827 0.0119791 -0.017125 0.0123964 0.0120428 -0.017125 0.0123607 -0.0121089 -0.017125 0.0123297 -0.0122471 -0.017125 0.0122825 -0.0123906 -0.017125 0.012256 -0.0126816 -0.017125 0.0122666 -0.0127529 -0.017125 0.0122825 -0.0135 -0.017125 0.01075 -0.0129572 -0.017125 0.0123607 -0.0128911 -0.017125 0.0123297 -0.0131397 -0.017125 0.0124814 -0.0134576 -0.017125 0.0129619 -0.0134893 -0.017125 0.0131044 0.0120428 -0.017125 0.0141393 0.0119791 -0.017125 0.0141036 -0.0119181 -0.017125 0.0140633 -0.0117085 -0.017125 0.0138612 0.011666 -0.017125 0.0138018 0.011628 -0.017125 0.0137394 -0.0115424 -0.017125 0.0135381 -0.0115239 -0.017125 0.0134674 -0.011566 -0.017125 0.0128928 -0.0116279 -0.017125 0.0127606 -0.011666 -0.017125 0.0126982 0.0118603 -0.017125 0.0124814 -0.0119181 -0.017125 0.0124367 0.0123906 -0.017125 0.012256 0.0124635 -0.017125 0.0122507 0.0127529 -0.017125 0.0122825 0.0128229 -0.017125 0.0123036 0.0128911 -0.017125 0.0123297 0.0130209 -0.017125 0.0123964 0.0132448 -0.017125 0.0125827 0.013372 -0.017125 0.0127606 0.0134576 -0.017125 0.0129619 0.0134893 -0.017125 0.0131044 0.0134973 -0.017125 0.013177 0.0135 -0.017125 0.01325 -0.0121089 -0.017125 0.0141703 -0.0121771 -0.017125 0.0141964 -0.0123184 -0.017125 0.0142334 -0.0124635 -0.017125 0.0142493 -0.0126094 -0.017125 0.014244 -0.0126816 -0.017125 0.0142334 -0.0127529 -0.017125 0.0142175 -0.0128911 -0.017125 0.0141703 -0.0131941 -0.017125 0.0139699 -0.013334 -0.017125 0.0138018 -0.0135 -0.017125 0.01775 -0.013434 -0.017125 0.0136072 -0.0134973 -0.017125 0.013323 -0.0121771 0.017125 0.0141964 -0.0123906 0.017125 0.014244 -0.0128229 0.017125 0.0141964 -0.0130209 0.017125 0.0141036 -0.0135 0.017125 0.01775 -0.0132915 0.017125 0.0138612 -0.013372 0.017125 0.0137394 -0.013334 0.017125 0.0138018 -0.0134054 0.017125 0.0136745 -0.0134973 0.017125 0.013323 0.0119791 0.017125 0.0141036 0.0118603 0.017125 0.0140186 -0.011666 0.017125 0.0138018 -0.0116279 0.017125 0.0137394 -0.0115946 0.017125 0.0136745 -0.011566 0.017125 0.0136072 0.0115107 0.017125 0.0133956 -0.0115027 0.017125 0.013323 -0.0115 0.017125 0.01325 -0.0115107 0.017125 0.0131044 -0.0115239 0.017125 0.0130326 -0.0115946 0.017125 0.0128255 -0.0116279 0.017125 0.0127606 -0.011666 0.017125 0.0126982 0.0117552 0.017125 0.0125827 -0.0118603 0.017125 0.0124814 -0.0119791 0.017125 0.0123964 0.0120428 0.017125 0.0123607 0.0122471 0.017125 0.0122825 0.0135 0.017125 0.01075 0.0125365 0.017125 0.0122507 0.0127529 0.017125 0.0122825 0.0128911 0.017125 0.0123297 0.0128229 0.017125 0.0123036 0.0129572 0.017125 0.0123607 0.0130819 0.017125 0.0124367 0.0131941 0.017125 0.0125301 0.0132915 0.017125 0.0126388 0.013334 0.017125 0.0126982 0.0134054 0.017125 0.0128255 0.013434 0.017125 0.0128928 0.0134973 0.017125 0.013177 0.0119181 0.017125 0.0140633 0.0118059 0.017125 0.0139699 -0.0118059 0.017125 0.0139699 -0.0117552 0.017125 0.0139173 0.011628 0.017125 0.0137394 0.0115 0.017125 0.01325 0.0115027 0.017125 0.013177 -0.0115027 0.017125 0.013177 -0.0115424 0.017125 0.0129619 0.011566 0.017125 0.0128928 -0.011566 0.017125 0.0128928 0.0115946 0.017125 0.0128255 0.011628 0.017125 0.0127606 0.011666 0.017125 0.0126982 -0.0119181 0.017125 0.0124367 0.0119791 0.017125 0.0123964 -0.0123184 0.017125 0.0122666 -0.0123906 0.017125 0.012256 -0.0124635 0.017125 0.0122507 -0.0125365 0.017125 0.0122507 -0.0126094 0.017125 0.012256 -0.0126816 0.017125 0.0122666 -0.0127529 0.017125 0.0122825 -0.0128229 0.017125 0.0123036 -0.0131941 0.017125 0.0125301 -0.0132448 0.017125 0.0125827 -0.0132915 0.017125 0.0126388 -0.013334 0.017125 0.0126982 -0.0134054 0.017125 0.0128255 -0.013434 0.017125 0.0128928 0.0121089 0.017125 0.0141703 0.0121771 0.017125 0.0141964 0.0122471 0.017125 0.0142175 0.0126094 0.017125 0.014244 0.0135 0.017125 0.01775 0.0129572 0.017125 0.0141393 0.0130819 0.017125 0.0140633 0.013334 0.017125 0.0138018 0.013372 0.017125 0.0137394 0.0135 -0.017 -0.01175 0.0135 -0.017125 0.01075 0.0135 -0.017125 0.01775 -0.01575 0.0205 -0.01475 0.01575 0.0205 -0.01475 0.01575 -0.0205 -0.01475 -0.01575 -0.0205 -0.01475 0.0205 0.0205 0.01625 0.0205 0.01575 0.01625 0.0205 -0.0205 -0.01625 0.0205 -0.01575 0.01625 0.0205 0.01575 -0.01475 -0.019 -0.01725 -0.01625 -0.019 -0.0065 -0.01625 0.0205 -0.0205 0.01625 -0.0205 -0.0205 -0.01625 -0.0205 -0.0205 0.01625 -0.01575 -0.0205 0.01625 0.0205 -0.01575 -0.01475 0.0205 0.0205 -0.01625 -0.0205 0.0205 -0.01625 -0.0205 0.01575 0.01625 -0.0205 0.01575 -0.01475 -0.0205 -0.01575 -0.01475 -0.0125 -0.0115 -0.01775 -0.012427 -0.0115027 -0.01775 -0.0123544 0.0115107 -0.01775 -0.0123544 -0.0115107 -0.01775 -0.0121428 0.011566 -0.01775 -0.0117801 0.0118059 -0.01775 0.0120106 0.011628 -0.01775 0.0122119 -0.0115424 -0.01775 0.0123544 0.0115107 -0.01775 -0.0126456 -0.0115107 -0.01775 -0.0130518 -0.011666 -0.01775 -0.0132199 -0.0118059 -0.01775 -0.0132686 0.0118603 -0.01775 -0.0133133 0.0119181 -0.01775 -0.0132199 0.0118059 -0.01775 -0.0127174 0.0115239 -0.01775 -0.0127881 0.0115424 -0.01775 -0.0131112 0.0117085 -0.01775 -0.0131673 0.0117552 -0.01775 -0.0133893 -0.0120428 -0.01775 -0.0134675 -0.0122471 -0.01775 -0.0134834 -0.0123184 -0.01775 -0.013494 -0.0126094 -0.01775 -0.0134993 -0.0125365 -0.01775 -0.0134203 -0.0128911 -0.01775 -0.0133893 -0.0129572 -0.01775 -0.0133536 -0.0130209 -0.01775 -0.0132199 -0.0131941 -0.01775 -0.0131673 -0.0132448 -0.01775 -0.0127881 -0.0134576 -0.01775 -0.0126456 -0.0134893 -0.01775 -0.019 -0.019 -0.01775 -0.012427 -0.0134973 -0.01775 -0.0123544 -0.0134893 -0.01775 -0.0122119 -0.0134576 -0.01775 -0.0121428 -0.013434 -0.01775 -0.0120755 -0.0134054 -0.01775 -0.0118327 -0.0132448 -0.01775 -0.0116867 -0.0130819 -0.01775 -0.0115797 -0.0128911 -0.01775 -0.0115536 -0.0128229 -0.01775 -0.0115166 -0.0126816 -0.01775 -0.0115007 -0.0124635 -0.01775 0.011506 -0.0123906 -0.01775 0.0115166 -0.0123184 -0.01775 0.0115325 -0.0122471 -0.01775 -0.0115536 -0.0121771 -0.01775 -0.0115797 -0.0121089 -0.01775 -0.0133536 0.0119791 -0.01775 -0.013494 0.0126094 -0.01775 -0.0134464 0.0128229 -0.01775 -0.0133893 0.0129572 -0.01775 -0.0133133 0.0130819 -0.01775 -0.0132686 0.0131397 -0.01775 -0.0132199 0.0131941 -0.01775 -0.0131112 0.0132915 -0.01775 -0.0130518 0.013334 -0.01775 -0.0128572 0.013434 -0.01775 -0.0127881 0.0134576 -0.01775 -0.0126456 0.0134893 -0.01775 -0.012427 0.0134973 -0.01775 -0.0120755 0.0134054 -0.01775 -0.019 0.019 -0.01775 -0.0120106 0.013372 -0.01775 0.0120106 0.013372 -0.01775 -0.0117314 0.0131397 -0.01775 -0.0116107 0.0129572 -0.01775 -0.0115797 0.0128911 -0.01775 0.011506 0.0126094 -0.01775 0.0115007 0.0125365 -0.01775 0.0115325 0.0122471 -0.01775 -0.0116464 0.0119791 -0.01775 0.0116464 0.0119791 -0.01775 0.0117314 0.0118603 -0.01775 -0.0118888 -0.0132915 -0.01775 0.0118327 -0.0132448 -0.01775 0.0116867 -0.0130819 -0.01775 0.0116107 -0.0129572 -0.01775 -0.0116107 -0.0129572 -0.01775 0.0115797 -0.0128911 -0.01775 0.0115325 -0.0127529 -0.01775 -0.0115325 -0.0127529 -0.01775 0.0115166 -0.0126816 -0.01775 0.011506 -0.0126094 -0.01775 0.0115007 -0.0125365 -0.01775 -0.0115007 -0.0125365 -0.01775 0.0115007 -0.0124635 -0.01775 -0.0115166 -0.0123184 -0.01775 0.0115797 -0.0121089 -0.01775 0.0116107 -0.0120428 -0.01775 0.0116464 -0.0119791 -0.01775 -0.0116464 -0.0119791 -0.01775 -0.0118888 0.0117085 -0.01775 -0.0119482 0.011666 -0.01775 -0.0120106 -0.0116279 -0.01775 -0.0122119 -0.0115424 -0.01775 -0.0122826 0.0115239 -0.01775 0.012427 0.0115027 -0.01775 0.0122826 0.0115239 -0.01775 0.0122119 0.0115424 -0.01775 0.0121428 -0.011566 -0.01775 0.0121428 0.011566 -0.01775 0.0120755 -0.0115946 -0.01775 0.0118327 -0.0117552 -0.01775 0.0118888 0.0117085 -0.01775 0.0120755 -0.0134054 -0.01775 0.0121428 -0.013434 -0.01775 0.0122119 -0.0134576 -0.01775 0.0123544 -0.0134893 -0.01775 0.0122826 -0.0134761 -0.01775 0.012427 -0.0134973 -0.01775 0.0127174 -0.0134761 -0.01775 0.0131673 -0.0132448 -0.01775 0.0132199 -0.0131941 -0.01775 0.0132686 -0.0131397 -0.01775 0.019 -0.019 -0.01775 0.0133893 -0.0129572 -0.01775 0.0134203 -0.0128911 -0.01775 0.0134464 -0.0128229 -0.01775 0.0134834 -0.0126816 -0.01775 0.0134675 -0.0122471 -0.01775 0.0134203 -0.0121089 -0.01775 0.0133133 -0.0119181 -0.01775 0.0133133 0.0119181 -0.01775 0.0131673 0.0117552 -0.01775 0.0130518 0.011666 -0.01775 0.0129245 0.0115946 -0.01775 0.0129245 -0.0115946 -0.01775 0.0128572 -0.011566 -0.01775 0.0127881 -0.0115424 -0.01775 0.0127174 -0.0115239 -0.01775 0.0131112 0.0117085 -0.01775 0.0128572 0.011566 -0.01775 0.0127174 0.0115239 -0.01775 0.0126456 0.0115107 -0.01775 0.0125 0.0135 -0.01775 0.012427 0.0134973 -0.01775 -0.0118888 0.0132915 -0.01775 0.0118888 0.0132915 -0.01775 -0.0116867 0.0130819 -0.01775 -0.0116464 0.0130209 -0.01775 0.0116107 0.0129572 -0.01775 -0.0115166 0.0126816 -0.01775 -0.011506 0.0123906 -0.01775 -0.0115536 0.0121771 -0.01775 0.0115536 0.0121771 -0.01775 -0.0115797 0.0121089 -0.01775 0.0115797 0.0121089 -0.01775 -0.0116107 0.0120428 -0.01775 -0.0116867 0.0119181 -0.01775 0.0116867 0.0119181 -0.01775 -0.0117314 0.0118603 -0.01775 0.0134464 0.0121771 -0.01775 0.0134834 0.0123184 -0.01775 0.0134993 0.0124635 -0.01775 0.0134834 0.0126816 -0.01775 0.0134464 0.0128229 -0.01775 0.0133536 0.0130209 -0.01775 0.019 0.019 -0.01775 0.0132686 0.0131397 -0.01775 0.0131112 0.0132915 -0.01775 0.0129894 0.013372 -0.01775 0.0128572 0.013434 -0.01775 -0.0125 0.0115 -0.01775 0.0115107 -0.019 0.0131044 0.0115424 -0.019 0.0129619 -0.0117085 -0.019 0.0126388 0.0119791 -0.019 0.0123964 0.0119181 -0.019 0.0124367 0.0120428 -0.019 0.0123607 0.0121771 -0.019 0.0123036 0.0123906 -0.019 0.012256 0.0124635 -0.019 0.0122507 0.0126094 -0.019 0.012256 0.0126816 -0.019 0.0122666 0.0128911 -0.019 0.0123297 0.0128229 -0.019 0.0123036 0.0130209 -0.019 0.0123964 0.0129572 -0.019 0.0123607 0.0130819 -0.019 0.0124367 0.013434 -0.019 0.0128928 0.0134576 -0.019 0.0129619 0.0135 -0.019 0.01075 0.0134893 -0.019 0.0131044 0.01725 -0.019 -0.01625 0.013372 -0.019 -0.0122394 0.0128911 -0.019 -0.0126703 0.0128229 -0.019 -0.0126964 0.0127529 -0.019 -0.0127175 0.0124635 -0.019 -0.0127493 0.0123184 -0.019 -0.0127334 0.0122471 -0.019 -0.0127175 0.0121771 -0.019 -0.0126964 0.0118603 -0.019 -0.0125186 0.0117552 -0.019 -0.0124173 0.0117085 -0.019 -0.0123612 0.0116279 -0.019 -0.0122394 0.0115424 -0.019 -0.0114619 0.011566 -0.019 -0.0113928 0.0115946 -0.019 -0.0113255 0.0116279 -0.019 -0.0112606 0.0121089 -0.019 -0.0108297 0.0121771 -0.019 -0.0108036 0.0125365 -0.019 -0.0107507 0.0126816 -0.019 -0.0107666 0.0128911 -0.019 -0.0108297 0.0130209 -0.019 -0.0108964 0.0135 -0.019 -0.00425 0.013372 -0.019 -0.0112606 0.013434 -0.019 -0.0113928 0.011566 -0.019 0.0128928 0.0115946 -0.019 0.0128255 0.0117085 -0.019 0.0126388 -0.0119181 -0.019 0.0124367 -0.0124635 -0.019 0.0122507 -0.0125365 -0.019 0.0122507 -0.0128229 -0.019 0.0123036 -0.0127529 -0.019 0.0122825 -0.0130819 -0.019 0.0124367 -0.0135 -0.019 0.01075 -0.0131941 -0.019 0.0125301 -0.013372 -0.019 0.0127606 -0.0134576 -0.019 0.0129619 -0.0134576 -0.019 0.0135381 -0.013434 -0.019 0.0136072 -0.0134054 -0.019 0.0136745 -0.01725 -0.019 0.01775 -0.0132448 -0.019 0.0139173 -0.0131397 -0.019 0.0140186 -0.0130209 -0.019 0.0141036 -0.0126816 -0.019 0.0142334 -0.0125365 -0.019 0.0142493 -0.0123184 -0.019 0.0142334 -0.0122471 -0.019 0.0142175 -0.0121771 -0.019 0.0141964 -0.0120428 -0.019 0.0141393 0.0119791 -0.019 0.0141036 -0.0119791 -0.019 0.0141036 -0.0119181 -0.019 0.0140633 0.0117085 -0.019 0.0138612 -0.0117552 -0.019 0.0139173 0.011666 -0.019 0.0138018 -0.011566 -0.019 0.0136072 -0.0115424 -0.019 0.0135381 -0.0115 -0.019 0.01325 0.0119181 -0.019 0.0140633 -0.0116279 -0.019 0.0137394 0.0115946 -0.019 0.0136745 0.011566 -0.019 0.0136072 0.0115424 -0.019 0.0135381 0.0115107 -0.019 0.0133956 0.0115027 -0.019 0.013323 0.0115 -0.019 0.01325 -0.0115 -0.019 -0.01175 -0.0115107 -0.019 -0.0116044 -0.0115239 -0.019 -0.0115326 -0.0115027 -0.019 -0.011677 -0.0115424 -0.019 -0.0120381 -0.0117085 -0.019 -0.0123612 -0.0117552 -0.019 -0.0124173 -0.0120428 -0.019 -0.0126393 -0.0122471 -0.019 -0.0127175 -0.0124635 -0.019 -0.0127493 -0.01725 -0.019 -0.01625 -0.0126094 -0.019 -0.012744 -0.0126816 -0.019 -0.0127334 -0.0128229 -0.019 -0.0126964 -0.0129572 -0.019 -0.0126393 -0.0130209 -0.019 -0.0126036 -0.0130819 -0.019 -0.0125633 -0.0131397 -0.019 -0.0125186 -0.0132915 -0.019 -0.0123612 -0.013334 -0.019 -0.0123018 -0.013372 -0.019 -0.0122394 -0.0134054 -0.019 -0.0121745 -0.013434 -0.019 -0.0121072 -0.0134761 -0.019 -0.0119674 -0.0135 -0.019 -0.01175 -0.0134973 -0.019 -0.011677 -0.0134893 -0.019 -0.0116044 -0.0134761 -0.019 -0.0115326 -0.0132915 -0.019 -0.0111388 -0.0132448 -0.019 -0.0110827 -0.0131941 -0.019 -0.0110301 -0.0131397 -0.019 -0.0109814 -0.0130209 -0.019 -0.0108964 -0.0128911 -0.019 -0.0108297 -0.0135 -0.019 -0.00425 -0.0128229 -0.019 -0.0108036 -0.0126094 -0.019 -0.010756 -0.0124635 -0.019 -0.0107507 -0.0123184 -0.019 -0.0107666 -0.0118059 -0.019 -0.0110301 -0.0117552 -0.019 -0.0110827 -0.0065 -0.019 -0.01625 -0.011566 -0.019 -0.0113928 -0.011628 -0.019 -0.0112606 0.0134893 -0.019 0.0133956 0.0134973 -0.019 0.013177 0.0122471 -0.019 0.0142175 0.0124635 -0.019 0.0142493 0.0127529 -0.019 0.0142175 0.0130209 -0.019 0.0141036 0.01725 -0.019 0.01775 0.0131397 -0.019 0.0140186 0.0132448 -0.019 0.0139173 0.0134054 -0.019 0.0136745 0.013434 -0.019 0.0136072 0.0134761 -0.019 0.0134674 0.019 0.0134893 0.0133956 0.019 0.0135 0.01325 0.019 0.0134973 0.013177 0.019 0.013372 0.0127606 0.019 0.013334 0.0126982 0.019 0.0132448 0.0125827 0.019 0.0135 0.01075 0.019 0.0131397 0.0124814 0.019 0.0124635 0.0122507 0.019 0.0123184 0.0122666 0.019 0.0122471 0.0122825 0.019 0.0121771 0.0123036 0.019 0.0121089 0.0123297 0.019 -0.0119181 0.0124367 0.019 -0.0119791 0.0123964 0.019 -0.0120428 0.0123607 0.019 -0.0121771 0.0123036 0.019 -0.0124635 0.0122507 0.019 -0.0125365 0.0122507 0.019 -0.0129572 0.0123607 0.019 -0.0130209 0.0123964 0.019 -0.0130819 0.0124367 0.019 -0.0132915 0.0126388 0.019 -0.0135 0.01075 0.019 -0.013372 0.0127606 0.019 -0.013434 0.0128928 0.019 -0.0134054 -0.0113255 0.019 -0.013334 -0.0111982 0.019 -0.0131941 -0.0110301 0.019 -0.0129572 -0.0108607 0.019 -0.0135 -0.00425 0.019 -0.0128229 -0.0108036 0.019 -0.0126816 -0.0107666 0.019 -0.0126094 -0.010756 0.019 -0.0125365 -0.0107507 0.019 -0.0124635 -0.0107507 0.019 -0.0122471 -0.0107825 0.019 -0.0121089 -0.0108297 0.019 -0.0119181 -0.0109367 0.019 -0.0118603 -0.0109814 0.019 -0.0118059 -0.0110301 0.019 -0.0117085 -0.0111388 0.019 -0.0116279 -0.0112606 0.019 -0.0115107 -0.0116044 0.019 -0.0115027 -0.011823 0.019 -0.0115107 -0.0118956 0.019 -0.0116279 -0.0122394 0.019 -0.0115946 -0.0121745 0.019 -0.011666 -0.0123018 0.019 -0.0117085 -0.0123612 0.019 -0.0118603 -0.0125186 0.019 -0.0119181 -0.0125633 0.019 -0.0120428 -0.0126393 0.019 -0.0121089 -0.0126703 0.019 -0.0123906 -0.012744 0.019 -0.0130209 -0.0126036 0.019 -0.0130819 -0.0125633 0.019 -0.013372 -0.0122394 0.019 -0.0134054 -0.0121745 0.019 -0.0134893 -0.0118956 0.019 -0.01725 -0.01625 0.019 -0.0134893 -0.0116044 0.019 0.0130819 -0.0109367 0.019 0.0128911 -0.0108297 0.019 0.0128229 -0.0108036 0.019 0.0126094 -0.010756 0.019 0.0126816 -0.0107666 0.019 0.0125365 -0.0107507 0.019 0.0121771 -0.0108036 0.019 0.0122471 -0.0107825 0.019 0.0121089 -0.0108297 0.019 0.0119791 -0.0108964 0.019 0.0120428 -0.0108607 0.019 0.0119181 -0.0109367 0.019 0.0118603 -0.0109814 0.019 0.0135 -0.00425 0.019 0.011628 -0.0112606 0.019 0.0115424 -0.0114619 0.019 0.0115239 -0.0115326 0.019 0.0115107 -0.0116044 0.019 0.0115027 -0.011677 0.019 0.0115 -0.01175 0.019 0.0115424 -0.0120381 0.019 0.011566 -0.0121072 0.019 0.0065 -0.01625 0.019 0.0118059 -0.0124699 0.019 0.0119181 -0.0125633 0.019 0.0119791 -0.0126036 0.019 0.0121771 -0.0126964 0.019 0.0123906 -0.012744 0.019 0.0124635 -0.0127493 0.019 0.0117552 0.0125827 0.019 0.0117085 0.0126388 0.019 -0.0115424 0.0129619 0.019 0.011566 0.0128928 0.019 0.0115239 0.0130326 0.019 0.0115027 0.013177 0.019 0.0115 0.01325 0.019 0.0117085 0.0138612 0.019 -0.0118059 0.0139699 0.019 -0.0120428 0.0141393 0.019 -0.01725 0.01775 0.019 -0.0122471 0.0142175 0.019 -0.0121771 0.0141964 0.019 -0.0123184 0.0142334 0.019 -0.0124635 0.0142493 0.019 -0.0126816 0.0142334 0.019 -0.0126094 0.014244 0.019 -0.0130209 0.0141036 0.019 -0.0131941 0.0139699 0.019 -0.0132915 0.0138612 0.019 -0.013334 0.0138018 0.019 -0.013372 0.0137394 0.019 -0.0134576 0.0135381 0.019 -0.013434 0.0136072 0.019 -0.0134893 0.0133956 0.019 -0.0134973 0.013323 0.019 -0.0134973 0.013177 0.019 -0.011566 0.0128928 0.019 0.0115107 0.0131044 0.019 -0.0115107 0.0133956 0.019 0.0115107 0.0133956 0.019 -0.0115424 0.0135381 0.019 -0.0115946 0.0136745 0.019 -0.011628 0.0137394 0.019 -0.011666 0.0138018 0.019 -0.0117552 0.0139173 0.019 0.0117552 0.0139173 0.019 -0.0118603 0.0140186 0.019 -0.0119181 0.0140633 0.019 0.0119181 0.0140633 0.019 0.0120428 0.0141393 0.019 0.0121089 0.0141703 0.019 0.0121771 0.0141964 0.019 0.0125365 0.0142493 0.019 0.0126094 0.014244 0.019 0.0128229 0.0141964 0.019 0.0131941 0.0139699 0.019 0.0132448 0.0139173 0.019 0.013372 0.0137394 0.019 0.0134054 0.0136745 0.019 0.0134576 0.0135381 0.019 0.01725 -0.01625 0.019 0.0134893 -0.0116044 0.019 0.0134893 -0.0118956 0.019 0.013434 -0.0121072 0.019 0.0134054 -0.0121745 0.019 0.0132448 -0.0124173 0.019 0.0131941 -0.0124699 0.019 0.0131397 -0.0125186 0.019 0.0130819 -0.0125633 0.019 0.0129572 -0.0126393 0.019 0.0126816 -0.0127334 -0.0134973 0.019 -0.011823 -0.0134576 0.019 -0.0120381 -0.0134054 0.019 -0.0121745 -0.013434 0.019 -0.0121072 -0.013334 0.019 -0.0123018 -0.0131397 0.019 -0.0125186 -0.0130209 0.019 -0.0126036 -0.0129572 0.019 -0.0126393 -0.0125365 0.019 -0.0127493 -0.0122471 0.019 -0.0127175 -0.0119791 0.019 -0.0126036 -0.0119181 0.019 -0.0125633 -0.0118603 0.019 -0.0125186 -0.0117552 0.019 -0.0124173 -0.0115424 0.019 -0.0120381 -0.0115027 0.019 -0.011823 -0.0115027 0.019 -0.011677 -0.011566 0.019 -0.0113928 -0.0115946 0.019 -0.0113255 -0.0065 0.019 -0.01625 -0.0118059 0.019 -0.0110301 -0.0119791 0.019 -0.0108964 -0.0120428 0.019 -0.0108607 -0.0121771 0.019 -0.0108036 -0.0122471 0.019 -0.0107825 -0.0123184 0.019 -0.0107666 -0.0123906 0.019 -0.010756 -0.0124635 0.019 -0.0107507 -0.0125365 0.019 -0.0107507 -0.0126816 0.019 -0.0107666 -0.0129572 0.019 -0.0108607 -0.0130209 0.019 -0.0108964 -0.0131397 0.019 -0.0109814 -0.013334 0.019 -0.0111982 -0.0135 0.019 -0.00425 -0.013372 0.019 -0.0112606 -0.0134054 0.019 -0.0113255 -0.0134576 0.019 -0.0114619 -0.0123906 0.019 -0.012744 -0.0134893 0.019 0.0133956 -0.013372 0.019 0.0137394 -0.013334 0.019 0.0138018 -0.0130819 0.019 0.0140633 -0.0130209 0.019 0.0141036 -0.0129572 0.019 0.0141393 -0.0126094 0.019 0.014244 -0.0125365 0.019 0.0142493 -0.0121089 0.019 0.0141703 -0.0118603 0.019 0.0140186 0.0118603 0.019 0.0140186 0.0117085 0.019 0.0138612 -0.0116279 0.019 0.0137394 0.0115946 0.019 0.0136745 -0.0115027 0.019 0.013323 0.0115107 0.019 0.0133956 -0.0115239 0.019 0.0130326 -0.0115424 0.019 0.0129619 -0.0115946 0.019 0.0128255 0.0117085 0.019 0.0126388 0.0117552 0.019 0.0125827 -0.0118059 0.019 0.0125301 -0.0119181 0.019 0.0124367 -0.0121089 0.019 0.0123297 -0.0123906 0.019 0.012256 -0.0126094 0.019 0.012256 -0.0127529 0.019 0.0122825 -0.0128229 0.019 0.0123036 -0.0130209 0.019 0.0123964 -0.0131941 0.019 0.0125301 -0.0132915 0.019 0.0126388 -0.013434 0.019 0.0128928 -0.0134893 0.019 0.0131044 -0.0134761 0.019 0.0130326 0.013434 0.019 -0.0113928 0.0134893 0.019 -0.0116044 0.0134761 0.019 -0.0115326 0.0134893 0.019 -0.0118956 0.0134973 0.019 -0.011823 0.01725 0.019 -0.01625 0.0132915 0.019 -0.0123612 0.013334 0.019 -0.0123018 0.0132448 0.019 -0.0124173 0.0128911 0.019 -0.0126703 0.0128229 0.019 -0.0126964 0.0126816 0.019 -0.0127334 0.0126094 0.019 -0.012744 0.0124635 0.019 -0.0127493 0.0123906 0.019 -0.012744 0.0119791 0.019 -0.0126036 0.0118603 0.019 -0.0125186 0.0065 0.019 -0.01625 0.0115946 0.019 -0.0121745 0.0115107 0.019 -0.0118956 0.0115107 0.019 -0.0116044 0.0115239 0.019 -0.0115326 0.0115946 0.019 -0.0113255 0.0117085 0.019 -0.0111388 0.0135 0.019 0.01325 0.0134054 0.019 0.0136745 0.013372 0.019 0.0137394 0.0132915 0.019 0.0138612 0.0129572 0.019 0.0141393 0.0128229 0.019 0.0141964 0.0126816 0.019 0.0142334 0.01725 0.019 0.01775 0.0125365 0.019 0.0142493 0.0123184 0.019 0.0142334 0.0121771 0.019 0.0141964 0.0121089 0.019 0.0141703 -0.0115424 0.019 0.0135381 0.011628 0.019 0.0137394 -0.0118059 0.019 0.0139699 -0.0117552 0.019 0.0125827 -0.011666 0.019 0.0126982 0.0115946 0.019 0.0128255 0.0115424 0.019 0.0129619 0.0115107 0.019 0.0131044 -0.0115107 0.019 0.0131044 -0.0115027 0.019 0.013177 0.0115 0.019 0.01325 0.0118059 0.019 0.0125301 0.0119181 0.019 0.0124367 0.0121089 0.019 0.0123297 0.0123184 0.019 0.0122666 0.0123906 0.019 0.012256 0.0124635 0.019 0.0122507 0.0126094 0.019 0.012256 0.0126816 0.019 0.0122666 0.0131397 0.019 0.0124814 0.0135 0.019 0.01075 0.0132448 0.019 0.0125827 0.0134054 0.019 0.0128255 0.013434 0.019 0.0128928 0.013334 0.019 -0.0111982 0.0132915 0.019 -0.0111388 0.0132448 0.019 -0.0110827 0.0130819 0.019 -0.0109367 0.0126094 0.019 -0.010756 0.0125365 0.019 -0.0107507 0.0124635 0.019 -0.0107507 0.0135 0.019 -0.00425 0.0120428 0.019 -0.0108607 0.0117552 0.019 -0.0110827 0.01575 0.0205 0.01625 0.019 0.019 0.01775 0.019 0.01725 0.01775 0.01525 0.0135 0.01775 0.017125 0.0135 0.01775 0.01525 0.01525 0.01775 0.01525 -0.01525 0.01775 0.0135 -0.01525 0.01775 -0.01525 -0.01525 0.01775 -0.017125 0.0135 0.01775 -0.0135 -0.01525 0.01775 0.019 -0.019 0.01775 0.01575 -0.0205 0.01625 -0.019 -0.019 0.01775 -0.0205 -0.01575 0.01625 -0.01575 0.0205 0.01625 -0.0205 0.0205 0.01625 -0.019 0.019 0.01775 -0.0135 0.017125 0.01075 0.0134096 -0.01525 -0.00283459 0.0135 -0.01525 -0.001 0.0133413 -0.01525 -0.00270936 0.0132071 -0.01525 -0.00254289 0.0131549 -0.01525 -0.00249425 0.0130993 -0.01525 -0.00244946 0.0130406 -0.01525 -0.00240875 0.0127126 -0.01525 -0.00227285 0.0125 -0.01525 -0.00225 -0.0125713 -0.01525 -0.00225255 -0.0134771 -0.01525 -0.00303743 -0.0133413 -0.01525 -0.00270936 -0.0132557 -0.01525 -0.00259514 -0.0129792 -0.01525 -0.00237232 -0.0126423 -0.01525 -0.00226018 -0.0152608 -0.0107544 -0.001 -0.0153468 -0.0112559 -0.001 -0.0153815 -0.0113785 -0.001 -0.015631 -0.0119631 -0.001 -0.0161742 -0.0126659 -0.001 -0.0164661 -0.012912 -0.001 -0.0165701 -0.0129855 -0.001 -0.0166771 -0.0130546 -0.001 -0.0168994 -0.0131788 -0.001 -0.0171314 -0.0132837 -0.001 -0.0172506 -0.0133286 -0.001 -0.0174941 -0.0134032 -0.001 -0.01525 -0.0135 -0.001 -0.017618 -0.0134327 -0.001 -0.017743 -0.0134568 -0.001 -0.0178689 -0.0134757 -0.001 -0.0179956 -0.0134892 -0.001 -0.0181227 -0.0134973 -0.001 0.01525 -0.0135 -0.001 0.01525 0.01525 -0.001 -0.01525 0.0105 -0.000999999 -0.0152527 0.0106273 -0.000999999 -0.0153468 0.0112559 -0.000999999 -0.0153815 0.0113785 -0.000999999 -0.01825 0.0135 -0.000999999 -0.0181227 0.0134973 -0.000999999 -0.0178689 0.0134757 -0.000999999 -0.0174941 0.0134032 -0.000999999 -0.0173715 0.0133685 -0.000999999 -0.0172506 0.0133286 -0.000999999 -0.0171314 0.0132837 -0.000999999 -0.0170143 0.0132337 -0.000999999 -0.0165701 0.0129855 -0.000999999 -0.0164661 0.012912 -0.000999999 -0.016268 0.012752 -0.000999999 -0.0161742 0.0126659 -0.000999999 -0.0159159 0.0123847 -0.000999999 -0.01525 0.0135 -0.000999999 -0.0135 -0.01525 -0.001 -0.01525 -0.01525 -0.001 -0.01525 -0.0135 0.01775 0.0135 0.01525 0.01775 0.01525 0.0135 -0.001 0.01525 -0.01525 -0.001 -0.0135 0.01525 -0.000999999 -0.01725 0.019 -0.01625 -0.01725 0.019 0.01775 - - - - - - - - - - 0.0365165 -0.999333 0 0.109369 -0.994001 0 0.109369 -0.994001 0 0.18164 -0.983365 0 0.18164 -0.983365 0 0.252935 -0.967483 0 0.322876 -0.946441 0 0.391111 -0.920344 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.69406 -0.719917 0 0.74478 -0.667311 0 0.791492 -0.611179 0 0.834001 -0.551763 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934019 -0.357224 0 0.9576 -0.288102 0 0.976074 -0.217441 0 0.989345 -0.145593 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989344 0.145595 0 0.976074 0.217438 0 0.9576 0.288102 0 0.934019 0.357224 0 0.905448 0.424457 0 0.872048 0.489421 0 0.833997 0.551769 0 0.791497 0.611173 0 0.744774 0.667317 0 0.694066 0.719911 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391111 0.920344 0 0.322876 0.946441 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109369 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109371 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322872 0.946443 0 -0.391115 0.920342 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.69406 0.719917 0 -0.74478 0.66731 0 -0.791497 0.611173 0 -0.833997 0.551769 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934015 0.357235 0 -0.957603 0.288091 0 -0.976074 0.217438 0 -0.989344 0.145595 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989345 -0.145593 0 -0.976074 -0.217441 0 -0.957603 -0.288091 0 -0.934015 -0.357235 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.834001 -0.551763 0 -0.791492 -0.611179 0 -0.744786 -0.667303 0 -0.694053 -0.719923 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391115 -0.920342 0 -0.322872 -0.946443 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109371 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.252935 -0.967483 0 0.322876 -0.946441 0 0.391111 -0.920344 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.69406 -0.719917 0 0.74478 -0.667311 0 0.791492 -0.611179 0 0.834001 -0.551763 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934019 -0.357224 0 0.9576 -0.288102 0 0.976074 -0.217441 0 0.989345 -0.145593 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989344 0.145595 0 0.976074 0.217438 0 0.9576 0.288102 0 0.934019 0.357224 0 0.905448 0.424457 0 0.872048 0.489421 0 0.833997 0.551769 0 0.791497 0.611173 0 0.744774 0.667317 0 0.694066 0.719911 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391111 0.920344 0 0.322876 0.946441 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109369 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109371 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322872 0.946443 0 -0.391115 0.920342 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.69406 0.719917 0 -0.74478 0.66731 0 -0.791497 0.611173 0 -0.833997 0.551769 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934015 0.357235 0 -0.957603 0.288091 0 -0.976074 0.217438 0 -0.989344 0.145595 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989345 -0.145593 0 -0.976074 -0.217441 0 -0.957603 -0.288091 0 -0.934015 -0.357235 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.834001 -0.551763 0 -0.791492 -0.611179 0 -0.744786 -0.667303 0 -0.694053 -0.719923 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391115 -0.920342 0 -0.322872 -0.946443 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109371 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.109371 -0.994001 0 0.109371 -0.994001 0 0.18164 -0.983365 0 0.18164 -0.983365 0 0.252935 -0.967483 0 0.322872 -0.946443 0 0.391115 -0.920342 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.694053 -0.719923 0 0.744786 -0.667303 0 0.791492 -0.611179 0 0.834001 -0.551763 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934015 -0.357235 0 0.957603 -0.288091 0 0.976074 -0.217441 0 0.989345 -0.145593 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989344 0.145595 0 0.976074 0.217438 0 0.957603 0.288091 0 0.934015 0.357235 0 0.905448 0.424457 0 0.872048 0.489421 0 0.833997 0.551769 0 0.791497 0.611173 0 0.74478 0.66731 0 0.69406 0.719917 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391115 0.920342 0 0.322872 0.946443 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109371 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109369 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322876 0.946441 0 -0.391111 0.920344 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.694066 0.719911 0 -0.744774 0.667317 0 -0.791497 0.611173 0 -0.833997 0.551769 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934019 0.357224 0 -0.9576 0.288102 0 -0.976074 0.217438 0 -0.989344 0.145595 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989345 -0.145593 0 -0.976074 -0.217441 0 -0.9576 -0.288102 0 -0.934019 -0.357224 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.834001 -0.551763 0 -0.791492 -0.611179 0 -0.74478 -0.667311 0 -0.69406 -0.719917 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391111 -0.920344 0 -0.322876 -0.946441 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109369 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.252935 -0.967483 0 0.322872 -0.946443 0 0.391115 -0.920342 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.694053 -0.719923 0 0.744786 -0.667303 0 0.791492 -0.611179 0 0.834001 -0.551763 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934015 -0.357235 0 0.957603 -0.288091 0 0.976074 -0.217441 0 0.989345 -0.145593 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989344 0.145595 0 0.976074 0.217438 0 0.957603 0.288091 0 0.934015 0.357235 0 0.905448 0.424457 0 0.872048 0.489421 0 0.833997 0.551769 0 0.791497 0.611173 0 0.74478 0.66731 0 0.69406 0.719917 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391115 0.920342 0 0.322872 0.946443 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109371 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109369 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322876 0.946441 0 -0.391111 0.920344 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.694066 0.719911 0 -0.744774 0.667317 0 -0.791497 0.611173 0 -0.833997 0.551769 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934019 0.357224 0 -0.9576 0.288102 0 -0.976074 0.217438 0 -0.989344 0.145595 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989345 -0.145593 0 -0.976074 -0.217441 0 -0.9576 -0.288102 0 -0.934019 -0.357224 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.834001 -0.551763 0 -0.791492 -0.611179 0 -0.74478 -0.667311 0 -0.69406 -0.719917 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391111 -0.920344 0 -0.322876 -0.946441 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109369 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.109371 -0.994001 0 0.109371 -0.994001 0 0.18164 -0.983365 0 0.18164 -0.983365 0 0.252935 -0.967483 0 0.322872 -0.946443 0 0.391115 -0.920342 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.69406 -0.719917 0 0.74478 -0.66731 0 0.791497 -0.611173 0 0.833997 -0.551769 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934015 -0.357235 0 0.957603 -0.288091 0 0.976074 -0.217438 0 0.989344 -0.145595 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989345 0.145593 0 0.976074 0.217441 0 0.957603 0.288091 0 0.934015 0.357235 0 0.905448 0.424457 0 0.872048 0.489421 0 0.834001 0.551763 0 0.791492 0.611179 0 0.744786 0.667303 0 0.694053 0.719923 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391115 0.920342 0 0.322872 0.946443 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109371 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109369 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322876 0.946441 0 -0.391111 0.920344 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.69406 0.719917 0 -0.74478 0.667311 0 -0.791492 0.611179 0 -0.834001 0.551763 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934019 0.357224 0 -0.9576 0.288102 0 -0.976074 0.217441 0 -0.989345 0.145593 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989344 -0.145595 0 -0.976074 -0.217438 0 -0.9576 -0.288102 0 -0.934019 -0.357224 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.833997 -0.551769 0 -0.791497 -0.611173 0 -0.744774 -0.667317 0 -0.694066 -0.719911 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391111 -0.920344 0 -0.322876 -0.946441 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109369 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.252935 -0.967483 0 0.322872 -0.946443 0 0.391115 -0.920342 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.69406 -0.719917 0 0.74478 -0.66731 0 0.791497 -0.611173 0 0.833997 -0.551769 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934015 -0.357235 0 0.957603 -0.288091 0 0.976074 -0.217438 0 0.989344 -0.145595 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989345 0.145593 0 0.976074 0.217441 0 0.957603 0.288091 0 0.934015 0.357235 0 0.905448 0.424457 0 0.872048 0.489421 0 0.834001 0.551763 0 0.791492 0.611179 0 0.744786 0.667303 0 0.694053 0.719923 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391115 0.920342 0 0.322872 0.946443 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109371 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109369 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322876 0.946441 0 -0.391111 0.920344 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.69406 0.719917 0 -0.74478 0.667311 0 -0.791492 0.611179 0 -0.834001 0.551763 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934019 0.357224 0 -0.9576 0.288102 0 -0.976074 0.217441 0 -0.989345 0.145593 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989344 -0.145595 0 -0.976074 -0.217438 0 -0.9576 -0.288102 0 -0.934019 -0.357224 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.833997 -0.551769 0 -0.791497 -0.611173 0 -0.744774 -0.667317 0 -0.694066 -0.719911 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391111 -0.920344 0 -0.322876 -0.946441 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109369 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.109369 -0.994001 0 0.109369 -0.994001 0 0.18164 -0.983365 0 0.18164 -0.983365 0 0.252935 -0.967483 0 0.322876 -0.946441 0 0.391111 -0.920344 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.694066 -0.719911 0 0.744774 -0.667317 0 0.791497 -0.611173 0 0.833997 -0.551769 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934019 -0.357224 0 0.9576 -0.288102 0 0.976074 -0.217438 0 0.989344 -0.145595 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989345 0.145593 0 0.976074 0.217441 0 0.9576 0.288102 0 0.934019 0.357224 0 0.905448 0.424457 0 0.872048 0.489421 0 0.834001 0.551763 0 0.791492 0.611179 0 0.74478 0.667311 0 0.69406 0.719917 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391111 0.920344 0 0.322876 0.946441 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109369 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109371 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322872 0.946443 0 -0.391115 0.920342 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.694053 0.719923 0 -0.744786 0.667303 0 -0.791492 0.611179 0 -0.834001 0.551763 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934015 0.357235 0 -0.957603 0.288091 0 -0.976074 0.217441 0 -0.989345 0.145593 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989344 -0.145595 0 -0.976074 -0.217438 0 -0.957603 -0.288091 0 -0.934015 -0.357235 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.833997 -0.551769 0 -0.791497 -0.611173 0 -0.74478 -0.66731 0 -0.69406 -0.719917 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391115 -0.920342 0 -0.322872 -0.946443 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109371 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.252935 -0.967483 0 0.322876 -0.946441 0 0.391111 -0.920344 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.694066 -0.719911 0 0.744774 -0.667317 0 0.791497 -0.611173 0 0.833997 -0.551769 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934019 -0.357224 0 0.9576 -0.288102 0 0.976074 -0.217438 0 0.989344 -0.145595 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989345 0.145593 0 0.976074 0.217441 0 0.9576 0.288102 0 0.934019 0.357224 0 0.905448 0.424457 0 0.872048 0.489421 0 0.834001 0.551763 0 0.791492 0.611179 0 0.74478 0.667311 0 0.69406 0.719917 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391111 0.920344 0 0.322876 0.946441 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109369 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109371 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322872 0.946443 0 -0.391115 0.920342 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.694053 0.719923 0 -0.744786 0.667303 0 -0.791492 0.611179 0 -0.834001 0.551763 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934015 0.357235 0 -0.957603 0.288091 0 -0.976074 0.217441 0 -0.989345 0.145593 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989344 -0.145595 0 -0.976074 -0.217438 0 -0.957603 -0.288091 0 -0.934015 -0.357235 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.833997 -0.551769 0 -0.791497 -0.611173 0 -0.74478 -0.66731 0 -0.69406 -0.719917 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391115 -0.920342 0 -0.322872 -0.946443 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109371 -0.994001 0 -0.0365165 -0.999333 0 0 0.85083 0.525441 0 0.823314 0.567586 0 0.85083 0.525441 0 0.823314 0.567586 0 0.793725 0.608276 0 0.793725 0.608276 0 0.762109 0.647449 0 0.72858 0.68496 0 0.762109 0.647449 0 0.762109 0.647449 0 0.793725 0.608277 0 0.793725 0.608277 0 0.823314 0.567586 0 0.823314 0.567586 0 0.85083 0.525441 0 0.762109 0.647449 0 0.728581 0.68496 0 0.693185 0.72076 0 0.72858 0.68496 0 0.728581 0.68496 0 0.693185 0.72076 0 0.656069 0.754701 0 0.693185 0.72076 0 0.693185 0.72076 0 0.656069 0.754701 0 0.617262 0.786758 0 0.656069 0.754701 0 0.656069 0.754701 0 0.617262 0.786758 0 0.576906 0.816811 0 0.617262 0.786758 0 0.617262 0.786758 0 0.576906 0.816811 0 0.53508 0.844801 0 0.576906 0.816811 0 0.576906 0.816811 0 0.53508 0.844801 0 0.491905 0.870649 0 0.53508 0.844801 0 0.53508 0.844801 0 0.491905 0.870649 0 0.447476 0.894296 0 0.491905 0.870649 0 0.491905 0.870649 0 0.447476 0.894296 0 0.401926 0.915672 0 0.447476 0.894296 0 0.447476 0.894296 0 0.401926 0.915672 0 0.35535 0.934734 0 0.401926 0.915672 0 0.401926 0.915672 0 0.35535 0.934734 0 0.307882 0.951425 0 0.35535 0.934734 0 0.35535 0.934734 0 0.307882 0.951425 0 0.259624 0.96571 0 0.307882 0.951425 0 0.307882 0.951425 0 0.259624 0.96571 0 0.210709 0.977549 0 0.259624 0.96571 0 0.259624 0.96571 0 0.210709 0.977549 0 0.16127 0.98691 0 0.210709 0.977549 0 0.210709 0.977549 0 0.161269 0.98691 0 0.16127 0.98691 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 0.161709 0.986839 0.000113632 0.212459 0.97717 0.00019721 0.262997 0.964797 0.000280519 0.312725 0.949844 0.000361859 0.361597 0.932334 0.000434048 0.409493 0.912313 0.000512117 0.456306 0.889823 0.000584135 0.501874 0.864941 0.000652503 0.546088 0.837728 0.00072235 0.588841 0.808249 0.000784741 0.629986 0.776606 0.000848159 0.669449 0.742857 0.000908524 0.707111 0.707102 0.000965873 0.742853 0.669454 0.00102128 0.776606 0.629986 0.00107505 0.808248 0.588841 0.00112566 0.837727 0.546087 0.00117328 0.864941 0.501872 0.00121858 0.889823 0.456304 0.00126107 0.912311 0.409496 0.00130406 0.932334 0.361596 0.00134229 0.949845 0.312717 0.0013755 0.964794 0.263004 0.00140887 0.977146 0.212564 0.00143726 0.986862 0.161561 0.00146993 0.993918 0.110116 0.00149132 0.998293 0.0583947 0 0.999869 0.016214 -0.00146354 0.999671 0.0256073 0 0.161709 0.986839 0 0.161709 0.986839 0 0.212008 0.977268 -2.9903e-05 0.212459 0.97717 2.93169e-05 0.261764 0.965132 -5.18972e-05 0.262998 0.964797 5.13039e-05 0.310827 0.950467 7.22379e-05 0.359066 0.933312 9.25004e-05 0.40639 0.9137 0.000113795 0.452611 0.891708 0.000132253 0.497677 0.867363 0.000151048 0.541427 0.840748 0.000169486 0.58375 0.811934 0.000186256 0.624554 0.780981 0.000203742 0.663706 0.747994 0.000219712 0.701135 0.713029 0.000234979 0.736713 0.676205 0.000250117 0.770367 0.637601 0.000263936 0.801988 0.59734 0.000277541 0.83152 0.555495 0.000290292 0.858868 0.512198 0.000302613 0.88396 0.467562 0.000314449 0.906739 0.421692 0.000325221 0.927132 0.374734 0.000335136 0.945103 0.326774 0.000344873 0.960592 0.277961 0.000354652 0.973561 0.228429 0.000363099 0.98398 0.17828 0.000371018 0.991813 0.127699 0.000377501 0.997052 0.0767313 0.000385143 0.999672 0.0256073 -0.000392453 0.998294 0.0583947 -7.38204e-05 0.312725 0.949844 -9.52249e-05 0.361597 0.932334 -0.000114222 0.409493 0.912313 -0.000134766 0.456306 0.889823 -0.000153718 0.501874 0.864941 -0.000171712 0.546088 0.837728 -0.000190092 0.588841 0.808249 -0.000206511 0.629986 0.776606 -0.000223198 0.66945 0.742857 -0.000239086 0.707111 0.707102 -0.000254176 0.742853 0.669455 -0.000268758 0.776606 0.629986 -0.000282906 0.808249 0.588841 -0.000296224 0.837728 0.546088 -0.000308758 0.864942 0.501872 -0.000320678 0.889824 0.456304 -0.000331859 0.912312 0.409497 -0.000343172 0.932335 0.361596 -0.000353232 0.949846 0.312717 -0.000361973 0.964795 0.263005 -0.000370754 0.977147 0.212564 -0.000378225 0.986863 0.161561 -0.000386822 0.993919 0.110116 -0.00143451 0.997051 0.0767312 -0.00140987 0.991812 0.127699 -0.00137978 0.983979 0.178279 -0.00134768 0.97356 0.228429 -0.00131052 0.960591 0.277961 -0.00127352 0.945102 0.326774 -0.00123584 0.927132 0.374734 -0.0011949 0.906738 0.421692 -0.00114993 0.883959 0.467562 -0.00110311 0.858867 0.512198 -0.00105466 0.831519 0.555495 -0.00100296 0.801988 0.59734 -0.000950445 0.770366 0.637601 -0.000892919 0.736713 0.676205 -0.000834906 0.701134 0.713029 -0.000774215 0.663706 0.747993 -0.000707763 0.624554 0.780981 -0.000644051 0.58375 0.811933 -0.000573981 0.541427 0.840748 -0.000502559 0.497677 0.867363 -0.000432419 0.452611 0.891708 -0.000351503 0.40639 0.9137 -0.000274503 0.359066 0.933312 -0.000194955 0.310827 0.950467 -0.000111403 0.261764 0.965132 0 0.212008 0.977268 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -0.735963 0.677022 -0.000269019 -0.716861 0.697216 0.00016538 -0.70711 0.707103 -0.00053762 -0.697309 0.716771 0.000159832 -0.677284 0.735722 -0.00025465 -0.676457 0.736482 -0.00034365 -0.656951 0.753933 0.000189245 -0.6446 0.76452 -0.000514448 -0.636156 0.77156 9.32422e-05 -0.614928 0.788583 -0.00011789 -0.611558 0.7912 -0.000411459 -0.593438 0.80488 0.000201418 -0.577434 0.816437 -0.000458341 -0.571443 0.820642 -1.31342e-05 -0.549209 0.835685 1.42212e-05 -0.54226 0.84021 -0.000454163 -0.526642 0.850087 0.0001815 -0.506117 0.862465 -0.000357185 -0.503612 0.86393 -0.000163791 -0.480496 0.876997 0.000111471 -0.469053 0.88317 -0.000453796 -0.45699 0.889472 0.000117763 -0.433134 0.90133 -0.000181777 -0.40926 0.912418 0.00015827 -0.384967 0.92293 1.99811e-06 -0.360556 0.932737 -2.34674e-06 -0.353085 0.935591 -0.000381937 -0.336017 0.941856 0.000144532 -0.313043 0.949739 -0.000287278 -0.311135 0.950366 -0.000172998 -0.28641 0.958107 0.000105204 -0.272466 0.962165 -0.000377159 -0.261417 0.965226 6.00699e-05 -0.236301 0.97168 -7.69856e-05 -0.211345 0.977412 0.000130787 -0.186084 0.982534 -0.00010723 -0.161108 0.986937 7.42313e-05 -0.148044 0.988981 -0.000338784 -0.136022 0.990706 5.74757e-05 -0.110943 0.993827 -7.59444e-05 -0.0861475 0.996282 0.000109451 -0.0612133 0.998125 -0.000140016 -0.0366933 0.999327 8.07895e-05 -0.0212223 0.999775 -0.000299518 -0.0121784 0.999926 0 -0.431153 0.902279 -0.000318074 -0.392465 0.919767 -0.000400721 -0.231383 0.972863 -0.000312774 -0.189881 0.981807 -0.000290701 -0.105928 0.994374 -0.000284787 -0.0636439 0.997973 -0.000247609 -0.736475 0.676465 -0.000331587 -0.758721 0.651416 0.000312456 -0.764526 0.644593 -0.000386843 -0.791201 0.611557 -0.000434993 -0.816433 0.57744 -0.000474005 -0.840209 0.542262 -0.000503014 -0.862465 0.506117 -0.000524132 -0.88317 0.469052 -0.000536238 -0.902284 0.431141 -0.000540546 -0.919767 0.392465 -0.000536246 -0.935591 0.353085 -0.000521905 -0.949737 0.313047 -0.000497955 -0.962166 0.272464 -0.000467704 -0.972863 0.231383 -0.000426516 -0.981807 0.189882 -0.000377766 -0.988981 0.148043 -0.000319734 -0.994374 0.105928 -0.000252885 -0.997973 0.0636448 -0.000177362 -0.999775 0.0212223 -9.25776e-05 -0.999795 0.020271 0 -0.784494 0.620136 0.000365146 -0.80898 0.587836 0.000407157 -0.832147 0.554555 0.000442174 -0.853935 0.52038 0.000468042 -0.874326 0.48534 0.000485149 -0.893275 0.44951 0.000492941 -0.910759 0.412938 0.000490944 -0.926749 0.375682 0.000481044 -0.941216 0.337805 0.00046403 -0.954128 0.2994 0.000435964 -0.965482 0.260468 0.000400287 -0.975243 0.221135 0.000355525 -0.983406 0.18142 0.000301896 -0.989951 0.141414 0.000239816 -0.994869 0.101171 0.000168479 -0.998152 0.0607748 8.92006e-05 -1 0 0 -1 0 0 -0.999775 -0.0212223 0 -0.999775 -0.0212223 0 -0.997973 -0.0636448 0 -0.994374 -0.105928 0 -0.988981 -0.148043 0 -0.981807 -0.189882 0 -0.972863 -0.231383 0 -0.962166 -0.272464 0 -0.949738 -0.313047 0 -0.935591 -0.353085 0 -0.919767 -0.392465 0 -0.902284 -0.431141 0 -0.88317 -0.469053 0 -0.862465 -0.506117 0 -0.84021 -0.542262 0 -0.816433 -0.57744 0 -0.791201 -0.611557 0 -0.764526 -0.644593 0 -0.736475 -0.676465 0 -0.70711 -0.707103 0 -0.676457 -0.736482 0 -0.6446 -0.76452 0 -0.611558 -0.7912 0 -0.577434 -0.816437 0 -0.54226 -0.840211 0 -0.506117 -0.862465 0 -0.469053 -0.88317 0 -0.431153 -0.902279 0 -0.392465 -0.919767 0 -0.353085 -0.935591 0 -0.313043 -0.949739 0 -0.272466 -0.962165 0 -0.231383 -0.972863 0 -0.189881 -0.981807 0 -0.148044 -0.988981 0 -0.105928 -0.994374 0 -0.0636439 -0.997973 0 -0.0212223 -0.999775 0 -0.0212223 -0.999775 0 -0.997973 -0.0636448 0 -0.994374 -0.105928 0 -0.988981 -0.148043 0 -0.981807 -0.189882 0 -0.972863 -0.231383 0 -0.962166 -0.272464 0 -0.949738 -0.313047 0 -0.935591 -0.353085 0 -0.919767 -0.392465 0 -0.902284 -0.431141 0 -0.88317 -0.469053 0 -0.862465 -0.506117 0 -0.84021 -0.542262 0 -0.816433 -0.57744 0 -0.791201 -0.611557 0 -0.764526 -0.644593 0 -0.736475 -0.676465 0 -0.70711 -0.707103 0 -0.676457 -0.736482 0 -0.6446 -0.76452 0 -0.611558 -0.7912 0 -0.577434 -0.816437 0 -0.54226 -0.840211 0 -0.506117 -0.862465 0 -0.469053 -0.88317 0 -0.431153 -0.902279 0 -0.392465 -0.919767 0 -0.353085 -0.935591 0 -0.313043 -0.949739 0 -0.272466 -0.962165 0 -0.231383 -0.972863 0 -0.189881 -0.981807 0 -0.148044 -0.988981 0 -0.105928 -0.994374 0 -0.0636439 -0.997973 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -6.6523e-08 0 -1 -6.6523e-08 0 -1 -6.6523e-08 0 -1 -6.6523e-08 -8.46657e-08 5.17401e-08 -1 -4.46595e-08 4.76654e-08 -1 -5.24121e-08 4.43487e-08 -1 -4.2646e-08 4.55164e-08 -1 -3.47508e-08 -3.47509e-08 -1 -5.17401e-08 6.80791e-08 -1 1.25798e-07 2.67033e-09 -1 1.13714e-07 7.252e-09 -1 1.03956e-07 1.10741e-08 -1 9.58822e-08 1.43529e-08 -1 8.90792e-08 1.7228e-08 -1 8.32507e-08 1.98001e-08 -1 7.81879e-08 2.21411e-08 -1 7.37362e-08 2.43045e-08 -1 6.97765e-08 2.63331e-08 -1 6.62228e-08 2.82573e-08 -1 6.30028e-08 3.01048e-08 -1 6.00599e-08 3.18979e-08 -1 5.73502e-08 3.36546e-08 -1 5.48371e-08 3.53912e-08 -1 5.24884e-08 3.71236e-08 -1 5.02803e-08 3.88641e-08 -1 4.81877e-08 4.06284e-08 -1 4.61927e-08 4.24288e-08 -1 4.42788e-08 4.42783e-08 -1 4.24284e-08 4.61932e-08 -1 4.06288e-08 4.81873e-08 -1 3.88641e-08 5.02803e-08 -1 3.71232e-08 5.24888e-08 -1 3.53911e-08 5.48372e-08 -1 3.36546e-08 5.73502e-08 -1 3.18979e-08 6.00599e-08 -1 3.01054e-08 6.30018e-08 -1 2.82573e-08 6.62228e-08 -1 6.41154e-08 0 -1 -1.18297e-06 2.04333e-06 -1 4.81226e-08 1.45999e-07 -1 3.94725e-08 1.3939e-07 -1 3.18502e-08 1.33916e-07 -1 2.50156e-08 1.29347e-07 -1 1.8789e-08 1.25516e-07 -1 1.3028e-08 1.22297e-07 -1 7.62724e-09 1.19599e-07 -1 2.49099e-09 1.17349e-07 -1 -3.10441e-07 0 -1 -3.72529e-08 0 -1 -4.31795e-08 8.46657e-08 -1 -6.62593e-08 5.68883e-08 -1 -6.6378e-08 5.24713e-08 -1 -6.66055e-08 4.83982e-08 -1 -6.69354e-08 4.46068e-08 -1 -6.73631e-08 4.10505e-08 -1 -6.78867e-08 3.76841e-08 -1 -6.85059e-08 3.44732e-08 -1 -6.92223e-08 3.13854e-08 -1 -7.00391e-08 2.83922e-08 -1 -7.09612e-08 2.54682e-08 -1 -7.19946e-08 2.25915e-08 -1 -7.31495e-08 1.97343e-08 -1 -7.4435e-08 1.6878e-08 -1 -7.58662e-08 1.39959e-08 -1 -7.74587e-08 1.10649e-08 -1 -7.92336e-08 8.05748e-09 -1 -8.12151e-08 4.94497e-09 -1 -8.34354e-08 1.69166e-09 -1 -4.23328e-08 7.31211e-08 -1 0 0 -1 0 0 -1 0 0 -1 1 0 0 1 -2.20432e-07 -4.40862e-08 1 0 0 1 0 0 1 1.99569e-07 -6.6523e-08 1 0 0 1 -1.65569e-07 -1.24176e-07 1 -8.18745e-08 -6.14059e-08 1 0 -6.20882e-08 1 0 -6.20882e-08 1 -1.85437e-07 -4.96706e-08 0 0.999699 -0.0245344 7.17082e-05 0.999505 -0.0314731 0 0.997076 -0.0764153 -0.00193336 0.997299 -0.0734232 0 0.998653 -0.0518866 0 0.999625 -0.0273848 -0.000455375 0.995836 -0.091157 -5.98203e-07 0.994359 -0.106067 5.71309e-07 0.988862 -0.148833 -0.000510028 0.986526 -0.163606 0 0.986523 -0.163625 0 0.984883 -0.173219 -0.00122473 0.975756 -0.218857 0.00148998 0.985233 -0.171212 -0.000234931 0.999786 -0.0206867 0.000128576 0.99797 -0.0636785 0.000168843 0.981409 -0.191926 -0.000326744 0.972257 -0.233914 0.000204068 0.960733 -0.277475 2.7829e-06 0.94756 -0.319579 -3.47403e-06 0.932281 -0.361735 -0.000695423 0.927251 -0.374439 0.000290651 0.914801 -0.403904 -0.000603637 0.897549 -0.440914 0 0.862961 -0.50527 0 0.850897 -0.525332 0 0.837757 -0.546043 0 0.823786 -0.566901 0.000728158 0.770358 -0.637611 0.00183144 0.732454 -0.680814 -4.74468e-07 0.689559 -0.72423 -4.51752e-07 0.724232 -0.689556 -0.000395984 0.971943 -0.235216 0 0.952142 -0.305655 3.85661e-06 0.948987 -0.315315 -0.000642895 0.933077 -0.359677 0.000740283 0.949453 -0.31391 -0.000618859 0.952108 -0.305762 -4.87086e-06 0.927291 -0.374342 0 0.918542 -0.395323 0 0.908565 -0.417743 0 0.897549 -0.440914 0 0.897549 -0.440914 -0.000360376 0.895958 -0.44414 0.000240387 0.874173 -0.485615 0.000298046 0.850747 -0.525575 -0.000460449 0.826038 -0.563615 -0.000896548 0.823786 -0.5669 0.000513012 0.79771 -0.603041 -0.00146428 0.780196 -0.625534 0 0.732456 -0.680815 0 0.680813 -0.732457 -0.000734171 0.653592 -0.756847 0.00150702 0.680812 -0.732457 -0.00100086 0.862961 -0.50527 0 0.823786 -0.566901 0 0.862961 -0.50527 0.00029778 0.768474 -0.639881 -0.000376846 0.737704 -0.675124 0.0010478 0.702761 -0.711425 -0.000356358 0.680813 -0.732457 0 0.625536 -0.780195 -6.01288e-05 0.615262 -0.788323 0.00140692 0.625535 -0.780195 -0.00175959 0.732455 -0.680814 0 0.69686 -0.717208 0 0.732456 -0.680815 0 0.670184 -0.742195 -0.000788557 0.631888 -0.775059 0 0.63286 -0.774266 -0.000309825 0.625536 -0.780195 0 0.604159 -0.796864 0.00489105 0.586167 -0.810176 0 0.58401 -0.811746 -0.000413266 0.566904 -0.823784 0 0.505263 -0.862965 -9.159e-05 0.492956 -0.870054 0.00112044 0.505263 -0.862965 -0.000569672 0.535407 -0.844594 0.00113047 0.566903 -0.823784 0 0.625536 -0.780195 0 0.566904 -0.823784 0 0.566904 -0.823784 0.000637203 0.667752 -0.744384 0.000121303 0.550167 -0.835055 -0.000173809 0.507231 -0.86181 0.000177003 0.461625 -0.887075 0.000240494 0.414379 -0.910104 -3.19622e-06 0.365626 -0.930762 2.77966e-06 0.314641 -0.949211 -0.000431771 0.305664 -0.952139 0.000298049 0.261427 -0.965223 -0.000593388 0.235221 -0.971942 0 0.163643 -0.98652 -0.00030774 0.122708 -0.992443 0.000568772 0.163643 -0.986519 0.000141839 0.170829 -0.985301 -0.000117958 0.219228 -0.975674 -0.000370352 0.267033 -0.963687 0.000127701 0.313564 -0.949567 -0.000110448 0.36 -0.932952 0.000918446 0.374274 -0.927318 -0.000454096 0.405637 -0.914034 0.000876245 0.440919 -0.897546 0 0.505263 -0.862965 -0.00026884 0.505263 -0.862965 -0.000482272 0.440919 -0.897547 0 0.374344 -0.92729 4.51856e-06 0.305665 -0.952139 0 0.235221 -0.971942 0.000774112 0.235221 -0.971942 -0.000388319 0.374344 -0.92729 0.00034821 0.206588 -0.978428 -0.000513109 0.163643 -0.986519 0 0.0911993 -0.995833 -0.000122824 0.073687 -0.997281 0.000667431 0.0911993 -0.995832 0.000160683 0.150067 -0.988676 -0.000205439 0.0917164 -0.995785 0.000200335 0.0306462 -0.99953 -6.62446e-08 0.0273942 -0.999625 0.000155217 0.0243839 -0.999703 -0.000241765 0.0911993 -0.995833 0 0.0273972 -0.999625 0 0.0911993 -0.995833 0.000700004 0.305573 -0.952169 0 0.305573 -0.952169 9.85161e-05 0.449528 -0.893266 6.00367e-05 0.575771 -0.817611 -0.0009831 0.757631 -0.652682 0 0.787989 -0.615689 -0.000107625 0.788389 -0.615177 0.00010783 0.817549 -0.575859 0.000439052 0.818425 -0.574613 -0.00136887 0.84524 -0.534386 -0.000298818 0.870167 -0.492756 0.000317803 0.893161 -0.449736 0 0.89272 -0.450612 0.000347768 0.862961 -0.50527 0 0.869522 -0.493894 -0.0019841 0.914525 -0.404524 -0.00293033 0.963987 -0.265933 0 0.966473 -0.256769 0.00038341 0.971943 -0.235216 0 0.975197 -0.221339 -0.00405291 0.992533 -0.121911 -5.28331e-07 0.99362 -0.112784 0.000394335 0.995834 -0.0911805 -1.10214e-06 0.995834 -0.0911805 8.84159e-07 0.986526 -0.163606 0 0.995837 -0.091157 1.34671e-07 0.990558 -0.137096 0.000319178 0.986523 -0.163625 0 0.971943 -0.235216 0 0.971943 -0.235216 0 0.959879 -0.280415 0.000325169 0.952142 -0.305655 5.30537e-06 0.952108 -0.305762 -2.99633e-06 0.932376 -0.361489 0.000366072 0.927291 -0.374342 0 0.927252 -0.374439 0.000323329 0.897549 -0.440914 0 0.823786 -0.566901 0 0.780197 -0.625534 0 0.732456 -0.680815 0 0.780197 -0.625534 0 0.780197 -0.625534 0 0.732456 -0.680815 0 0.680813 -0.732457 0 0.440919 -0.897547 0 0.440919 -0.897547 -3.4957e-06 0.374275 -0.927318 0 0.235221 -0.971942 0 0.163643 -0.98652 0.0245389 8.46402e-08 -0.999699 0.0300059 9.94695e-05 -0.99955 0.0911993 0.000530423 -0.995833 0.163643 0.000447193 -0.98652 0.219202 -9.10529e-05 -0.97568 0.170859 0.00011039 -0.985295 0.0356948 0.00031641 -0.999363 0.0911993 -0.000326035 -0.995833 0.106891 0.000293184 -0.994271 0.17755 0.000268216 -0.984112 0.247315 0.000241741 -0.968935 0.315795 0.000213054 -0.948827 0.382688 0.000182244 -0.923878 0.447615 0.000149748 -0.894226 0.510272 0.00011531 -0.860013 0.570322 7.91137e-05 -0.821421 0.627465 3.99383e-05 -0.778645 0.681426 0 -0.731887 0.731887 0 -0.681426 0.732456 -4.1487e-05 -0.680815 0.778645 4.0016e-05 -0.627464 0.780197 -8.00897e-05 -0.625534 0.788369 -5.43048e-05 -0.615203 0.780196 0.00111626 -0.625534 0.757443 -0.000540425 -0.652901 0.732455 0.00103091 -0.680814 0.724232 2.65199e-07 -0.689556 0.689558 2.80227e-07 -0.72423 0.680813 0.000919733 -0.732457 0.653432 -0.000455157 -0.756985 0.625536 0.000888759 -0.780195 0.625536 -8.00936e-05 -0.780195 0.163643 -0.000302303 -0.98652 0.235221 -0.000276435 -0.971942 0.26695 -0.000281067 -0.96371 0.235221 0.000593587 -0.971942 0.305537 -0.00024813 -0.95218 0.313595 9.32013e-05 -0.949557 0.305537 0.000528489 -0.95218 0.374236 -0.000218484 -0.927334 0.405532 -0.000328152 -0.914081 0.374236 0.0006732 -0.927333 0.359973 -7.9712e-05 -0.932963 0.440919 -0.000186895 -0.897547 0.449551 6.96591e-05 -0.893255 0.440919 0.000629175 -0.897547 0.505263 -0.000153275 -0.862965 0.535276 -0.000385688 -0.844677 0.505263 0.000770102 -0.862965 0.492935 -6.38199e-05 -0.870066 0.566904 -0.000117628 -0.823784 0.575785 3.96043e-05 -0.817601 0.566904 0.000757606 -0.823784 0.680813 -4.15861e-05 -0.732457 0.821422 7.91176e-05 -0.57032 0.823784 -0.000117545 -0.566904 0.845032 -0.000638908 -0.534716 0.823783 0.00119161 -0.566903 0.817566 5.54902e-05 -0.575834 0.860011 0.000115408 -0.510276 0.862961 -0.000153032 -0.50527 0.870125 -0.000125771 -0.49283 0.862961 0.00135145 -0.505269 0.894228 0.000150052 -0.447613 0.897549 -0.000186717 -0.440914 0.914323 -0.000743176 -0.404984 0.897548 0.00136214 -0.440914 0.893199 0.00013372 -0.449662 0.923875 0.000182326 -0.382694 0.927334 -0.000218829 -0.374234 0.933021 -0.000209544 -0.359822 0.927333 0.00161065 -0.374233 0.948829 0.000212724 -0.315791 0.952179 -0.000248065 -0.305542 0.963842 -0.000837715 -0.266472 0.952178 0.00151864 -0.305541 0.949502 0.000242116 -0.313761 0.968936 0.000241903 -0.24731 0.967792 0.000432495 -0.251749 0.975426 -0.00013496 -0.220326 0.985267 0.000369675 -0.171024 0.975713 -0.000304474 -0.219054 0.984112 0.00026837 -0.177551 0.986519 -0.000302001 -0.163647 0.992493 -0.000896637 -0.122296 0.986518 0.0016111 -0.163647 0.994271 0.000293527 -0.106891 0.994059 0.000370485 -0.108839 0.997155 -0.000142195 -0.0753781 0.999699 0.000497767 -0.0245466 0.997292 -0.000394669 -0.0735362 0.999363 0.000316892 -0.0356951 0.999625 0 -0.0273848 0.615248 -3.86193e-05 -0.788333 0.122643 -0.000242428 -0.992451 0.0736624 -9.80416e-05 -0.997283 0 1 0 0 1 0 -4.08008e-07 1 5.32184e-08 0 1 7.92615e-08 -1.85437e-07 1 4.96705e-08 3.07575e-15 1 7.92615e-08 0 1 0 0 1 0 0 1 0 -1.86264e-07 1 1.86264e-07 9.21089e-08 1 7.16402e-08 0 1 0 1 0 -7.16402e-08 1 -1.11387e-07 -8.66347e-08 1 0 0 1 7.0958e-08 -1.24176e-07 1 0 0 1 1.85437e-07 -4.96705e-08 1 0 0 1 0 0 1 1.13549e-07 -6.11419e-08 1 0 -6.20882e-08 1 0 -6.20882e-08 -0.999625 0 -0.0273848 -0.998007 0 -0.0630976 -0.994271 0.000266492 -0.106891 -0.985269 0 -0.171013 -0.97854 0 -0.206056 -0.968936 0.000219623 -0.24731 -0.950389 0 -0.311064 -0.938689 0 -0.344766 -0.923875 0.000165533 -0.382694 -0.894228 0.000136231 -0.447613 -0.860011 0.000104779 -0.510276 -0.821424 7.18382e-05 -0.570317 -0.778645 3.63303e-05 -0.627464 -0.731887 0 -0.681426 -0.681426 0 -0.731887 -0.680813 -3.77558e-05 -0.732457 -0.627465 3.62598e-05 -0.778645 -0.625536 -7.27171e-05 -0.780195 -0.570322 -0.000682352 -0.821421 -0.625536 0.000690803 -0.780195 -0.627465 -0.000344466 -0.778645 -0.680813 0.000358678 -0.732457 -0.681426 0 -0.731887 -0.731887 0 -0.681426 -0.732456 0.000357823 -0.680815 -0.778645 -0.000345136 -0.627464 -0.780196 0.00069077 -0.625534 -0.780197 -7.27124e-05 -0.625534 -0.995042 0 -0.0994555 -0.990856 0 -0.134921 -0.994268 -0.00253165 -0.10689 -0.984112 0.000243652 -0.177551 -0.970359 0 -0.241668 -0.961104 0 -0.276186 -0.968934 -0.0020864 -0.247309 -0.948829 0.000193131 -0.315791 -0.925457 0 -0.378853 -0.897548 0.00161044 -0.440914 -0.862961 0.00131988 -0.505269 -0.823786 0.00101371 -0.5669 -0.821424 -0.000682459 -0.570317 -0.897549 -0.000169521 -0.440914 -0.862961 -0.000138936 -0.50527 -0.823786 -0.000106706 -0.566901 -0.732456 -3.76659e-05 -0.680815 -0.570322 7.18269e-05 -0.821421 -0.566904 -0.000106793 -0.823784 -0.510272 -0.000994547 -0.860013 -0.566903 0.00101453 -0.823784 -0.510272 0.00010469 -0.860013 -0.505263 -0.000139159 -0.862965 -0.447615 -0.00129157 -0.894225 -0.505263 0.00132199 -0.862965 -0.447615 0.000135956 -0.894226 -0.440919 -0.000169681 -0.897547 -0.382687 -0.00157184 -0.923877 -0.440919 0.00161196 -0.897546 -0.382688 0.000165458 -0.923878 -0.378848 0 -0.925459 -0.344815 0 -0.938671 -0.311061 0 -0.95039 -0.247315 0.000219476 -0.968935 -0.20604 0 -0.978544 -0.171016 0 -0.985268 -0.106891 0.000266181 -0.994271 -0.0630557 0 -0.99801 -0.0273972 0 -0.999625 -0.0356947 -0.00272901 -0.999359 -0.315795 0.000193431 -0.948827 -0.276173 0 -0.961108 -0.241678 0 -0.970357 -0.247315 -0.002085 -0.968933 -0.17755 0.000243512 -0.984112 -0.134888 0 -0.990861 -0.0994645 0 -0.995041 -0.106891 -0.00252869 -0.994268 -0.0356948 0.000287267 -0.999363 -0.17755 -0.00231334 -0.984109 -0.315795 -0.00183757 -0.948826 -0.86001 -0.000995391 -0.510276 -0.894227 -0.00129419 -0.447612 -0.923874 -0.00157255 -0.382694 -0.948827 -0.00183473 -0.315791 -0.984109 -0.00231467 -0.17755 -0.999359 -0.00273317 -0.035695 -0.999363 0.000287705 -0.0356951 2.90929e-08 -0.0273974 0.999625 -0.000312163 -0.091184 0.995834 -0.000289451 -0.163647 0.986519 -0.000263792 -0.235244 0.971936 -0.000238973 -0.305539 0.952179 -0.000209245 -0.374219 0.92734 -0.000178864 -0.44093 0.897542 -0.000147825 -0.50525 0.862973 -0.000112593 -0.566922 0.823771 -7.72002e-05 -0.625527 0.780203 -3.93072e-05 -0.680799 0.73247 3.30532e-08 -0.731894 0.681419 3.55022e-08 -0.681405 0.731907 -0.000843953 -0.106878 0.994272 0.000868464 -0.163647 0.986519 -0.000774334 -0.177575 0.984107 0.000791543 -0.235244 0.971936 -0.000691944 -0.247288 0.968942 -0.000611874 -0.315821 0.948819 -0.000524821 -0.382676 0.923883 -0.000427465 -0.447601 0.894233 -0.00033206 -0.510281 0.860008 -0.000225772 -0.570328 0.821417 -0.000116331 -0.627472 0.778639 2.13012e-08 -0.681405 0.731907 1.86631e-08 -0.731894 0.681419 0.000120656 -0.732469 0.6808 -0.000113742 -0.778651 0.627458 0.000229135 -0.780191 0.625541 7.62093e-05 -0.821421 0.570322 -7.63404e-05 -0.780191 0.625541 3.79505e-05 -0.778651 0.627458 -4.01791e-05 -0.732469 0.6808 0.000717084 -0.305539 0.952179 0.000627897 -0.374219 0.92734 0.000536748 -0.44093 0.897541 0.000443623 -0.50525 0.862973 0.000337922 -0.566922 0.823771 0.000231737 -0.625527 0.780203 0.000118049 -0.6808 0.73247 -0.000228528 -0.821421 0.570322 0.000338452 -0.823789 0.566896 0.000110415 -0.860017 0.510265 -0.000112786 -0.823789 0.566896 -0.000331155 -0.860017 0.510265 0.000438117 -0.862958 0.505275 0.000144367 -0.894225 0.447618 -0.00014601 -0.862958 0.505275 -0.00043302 -0.894225 0.447618 0.000535198 -0.897546 0.44092 0.000175338 -0.923875 0.382695 -0.000178374 -0.897546 0.44092 -0.000525946 -0.923875 0.382695 0.00062896 -0.927339 0.374222 0.000203858 -0.948829 0.315791 -0.000209632 -0.927339 0.374222 -0.000611519 -0.948829 0.315791 0.000713147 -0.952178 0.305542 0.000231778 -0.968934 0.247319 -0.000237699 -0.952178 0.305542 -0.000695288 -0.968934 0.247319 0.00079596 -0.971944 0.235213 0.000256553 -0.984111 0.177553 -0.000265307 -0.971944 0.235213 -0.000769626 -0.984111 0.177552 0.000869884 -0.986518 0.163652 0.000280771 -0.994271 0.106889 -0.000289953 -0.986518 0.163652 -0.000842293 -0.994271 0.106889 0.000937698 -0.995832 0.0912007 0.000303086 -0.999363 0.0356956 -0.000312561 -0.995833 0.0912007 -0.000909249 -0.999362 0.0356955 7.59896e-10 -0.999625 0.0274017 3.88222e-05 -0.627472 0.778639 7.53054e-05 -0.570328 0.821417 0.000110737 -0.510281 0.860008 0.000142541 -0.447601 0.894234 0.000174995 -0.382676 0.923883 0.000204014 -0.315821 0.948819 0.000230705 -0.247288 0.968942 0.000258165 -0.177576 0.984107 0.000281357 -0.106878 0.994272 0.00030332 -0.0356969 0.999363 0.000936604 -0.0911839 0.995834 -0.00090984 -0.0356969 0.999362 2.90929e-08 -0.0273982 0.999625 -0.000468388 -0.091184 0.995834 -0.000434374 -0.163646 0.986519 -0.000395935 -0.235244 0.971936 -0.000358496 -0.30554 0.952179 -0.000314294 -0.374219 0.92734 -0.000268345 -0.44093 0.897542 -0.000221737 -0.50525 0.862973 -0.000168881 -0.566917 0.823775 -0.000115867 -0.625527 0.780203 -5.8362e-05 -0.680806 0.732464 0 -0.731894 0.681419 0 -0.681404 0.731907 -0.00042182 -0.106878 0.994272 0.000434431 -0.163646 0.986519 -0.000387002 -0.177575 0.984107 0.000395991 -0.235244 0.971936 -0.000345935 -0.247291 0.968941 -0.00030563 -0.315817 0.94882 -0.000262343 -0.38268 0.923881 -0.000213756 -0.447601 0.894233 -0.000166032 -0.510281 0.860008 -0.000112863 -0.570322 0.821421 -5.87357e-05 -0.627479 0.778634 4.26026e-08 -0.681404 0.731907 4.04369e-08 -0.731894 0.681419 5.973e-05 -0.732463 0.680807 -5.7516e-05 -0.778645 0.627465 0.000115268 -0.780197 0.625534 0.00011356 -0.821427 0.570313 -0.000115232 -0.780197 0.625534 5.75535e-05 -0.778645 0.627465 -5.96904e-05 -0.732463 0.680807 0.000358552 -0.30554 0.952179 0.000314348 -0.374219 0.92734 0.000268398 -0.44093 0.897542 0.000221787 -0.50525 0.862973 0.000168929 -0.566917 0.823775 0.000115913 -0.625527 0.780203 5.84046e-05 -0.680806 0.732464 -0.000113527 -0.821427 0.570313 0.000168544 -0.823784 0.566904 0.000166324 -0.860012 0.510274 -0.000168511 -0.823784 0.566904 -0.000166292 -0.860011 0.510275 0.000219814 -0.862964 0.505266 0.000215709 -0.894225 0.447619 -0.000219755 -0.862964 0.505266 -0.000215656 -0.894225 0.447619 0.000268508 -0.897547 0.440919 0.000262193 -0.92388 0.382683 -0.000268457 -0.897547 0.440919 -0.000262149 -0.92388 0.382683 0.000313784 -0.927335 0.374233 0.000306515 -0.948825 0.315803 -0.00031374 -0.927335 0.374233 -0.000306478 -0.948825 0.315803 0.00035735 -0.952182 0.305531 0.0003469 -0.968937 0.247307 -0.000357315 -0.952182 0.305531 -0.000346871 -0.968937 0.247307 0.000397235 -0.971941 0.235225 0.000385755 -0.984111 0.177553 -0.000397208 -0.971941 0.235225 -0.000385734 -0.984111 0.177553 0.000434036 -0.986518 0.163651 0.000422076 -0.994271 0.106889 -0.000434017 -0.986518 0.163651 -0.000422063 -0.994271 0.106889 0.000468646 -0.995834 0.0911881 0.000454627 -0.999363 0.0356956 -0.000468635 -0.995834 0.0911881 -0.000454623 -0.999363 0.0356956 1.58312e-09 -0.999625 0.0274018 5.8781e-05 -0.627479 0.778634 0.000112911 -0.570322 0.821421 0.000166082 -0.510281 0.860008 0.000213808 -0.447601 0.894233 0.000262397 -0.38268 0.923881 0.000305685 -0.315817 0.94882 0.000345992 -0.247291 0.968941 0.000387059 -0.177575 0.984107 0.000421878 -0.106878 0.994272 0.000454896 -0.0356964 0.999363 -0.000454837 -0.0356964 0.999363 0.000468446 -0.091184 0.995834 0 -1 0 0 -1 0 0 -0.999625 -0.0274019 -0.000468625 -0.995834 -0.0911875 -0.000433937 -0.986518 -0.163653 -0.000397193 -0.971941 -0.235224 -0.000357291 -0.952182 -0.305532 -0.00031394 -0.927335 -0.374231 -0.000268251 -0.897547 -0.440919 -0.000219825 -0.862963 -0.505267 -0.000168722 -0.823782 -0.566906 -0.000115272 -0.780197 -0.625534 -5.98813e-05 -0.732466 -0.680804 0 -0.681404 -0.731908 0 -0.731895 -0.681418 -0.000422135 -0.994271 -0.10689 0.000433937 -0.986518 -0.163653 -0.000385759 -0.984111 -0.177552 0.000397193 -0.971941 -0.235224 -0.000346938 -0.968937 -0.247306 -0.000306351 -0.948825 -0.315801 -0.000262341 -0.923878 -0.382687 -0.000215681 -0.894226 -0.447615 -0.000166145 -0.860012 -0.510274 -0.00011351 -0.821424 -0.570317 -5.73717e-05 -0.778645 -0.627464 0 -0.731895 -0.681418 0 -0.681404 -0.731908 5.8611e-05 -0.680803 -0.732467 -5.85133e-05 -0.627477 -0.778635 0.000115826 -0.625528 -0.780202 0.000112943 -0.570326 -0.821418 -0.000115828 -0.625528 -0.780202 5.85158e-05 -0.627477 -0.778635 -5.86111e-05 -0.680803 -0.732467 0.000357291 -0.952182 -0.305532 0.000313941 -0.927335 -0.374231 0.000268251 -0.897547 -0.440919 0.000219825 -0.862963 -0.505267 0.000168722 -0.823782 -0.566907 0.000115272 -0.780197 -0.625534 5.98813e-05 -0.732466 -0.680804 -0.000112943 -0.570326 -0.821418 0.0001693 -0.566915 -0.823776 0.000165614 -0.510284 -0.860006 -0.000169302 -0.566915 -0.823776 -0.000165614 -0.510284 -0.860006 0.000222258 -0.505252 -0.862972 0.000213298 -0.4476 -0.894234 -0.000222257 -0.505252 -0.862972 -0.000213298 -0.4476 -0.894234 0.000268391 -0.440935 -0.897539 0.000262441 -0.382676 -0.923883 -0.000268393 -0.440935 -0.897539 -0.000262441 -0.382676 -0.923883 0.000313823 -0.37422 -0.92734 0.000306228 -0.315813 -0.948821 -0.000313823 -0.37422 -0.92734 -0.000306228 -0.315813 -0.948821 0.000358178 -0.305532 -0.952182 0.000346107 -0.2473 -0.968939 -0.000358178 -0.305532 -0.952182 -0.000346107 -0.2473 -0.968939 0.00039597 -0.235251 -0.971935 0.000387116 -0.177575 -0.984107 -0.000395971 -0.235251 -0.971935 -0.000387116 -0.177575 -0.984107 0.00043491 -0.163636 -0.986521 0.000421183 -0.106878 -0.994272 -0.00043491 -0.163636 -0.986521 -0.000421183 -0.106878 -0.994272 0.000468814 -0.0911887 -0.995834 0.000454574 -0.0356948 -0.999363 -0.000468815 -0.0911887 -0.995834 -0.000454574 -0.0356948 -0.999363 0 -0.0274019 -0.999625 5.73718e-05 -0.778645 -0.627464 0.00011351 -0.821424 -0.570317 0.000166145 -0.860012 -0.510274 0.000215681 -0.894226 -0.447615 0.000262341 -0.923878 -0.382687 0.000306351 -0.948825 -0.315801 0.000346938 -0.968937 -0.247306 0.000385759 -0.984111 -0.177552 0.000422135 -0.994271 -0.10689 0.000454625 -0.999363 -0.0356957 -0.000454625 -0.999363 -0.0356957 0.000468625 -0.995834 -0.0911875 0 -0.999625 -0.0274014 -0.000468855 -0.995832 -0.0912013 -0.000434977 -0.986518 -0.16365 -0.000397974 -0.971943 -0.235215 -0.0003565 -0.952179 -0.30554 -0.000314385 -0.927338 -0.374225 -0.000267826 -0.897547 -0.440919 -0.000219094 -0.862957 -0.505277 -0.000169401 -0.823792 -0.566892 -0.000114578 -0.78019 -0.625542 -5.99284e-05 -0.732466 -0.680803 0 -0.681404 -0.731908 0 -0.731895 -0.681418 -0.00042111 -0.994271 -0.10689 0.000434977 -0.986518 -0.16365 -0.000384842 -0.984111 -0.177552 0.000397974 -0.971943 -0.235215 -0.000347656 -0.968933 -0.247321 -0.000305885 -0.948829 -0.315789 -0.000262786 -0.923875 -0.382693 -0.000216475 -0.894224 -0.447621 -0.0001654 -0.860017 -0.510265 -0.000114264 -0.821423 -0.57032 -5.72342e-05 -0.778647 -0.627463 0 -0.731895 -0.681418 0 -0.681404 -0.731908 5.85639e-05 -0.680803 -0.732466 -5.86508e-05 -0.627479 -0.778634 0.000115826 -0.625528 -0.780202 0.000112954 -0.570323 -0.82142 -0.000115828 -0.625528 -0.780202 5.86484e-05 -0.627479 -0.778634 -5.85639e-05 -0.680803 -0.732466 0.0003565 -0.952179 -0.30554 0.000314385 -0.927338 -0.374225 0.000267826 -0.897547 -0.440919 0.000219094 -0.862957 -0.505277 0.000169401 -0.823792 -0.566892 0.000114578 -0.78019 -0.625542 5.99283e-05 -0.732466 -0.680803 -0.000112954 -0.570323 -0.82142 0.000168573 -0.566921 -0.823772 0.000166415 -0.510284 -0.860006 -0.000168571 -0.566921 -0.823772 -0.000166415 -0.510284 -0.860006 0.000222184 -0.505243 -0.862977 0.000213298 -0.4476 -0.894234 -0.000222184 -0.505243 -0.862977 -0.000213298 -0.4476 -0.894234 0.000268391 -0.440935 -0.897539 0.000262482 -0.382671 -0.923885 -0.000268393 -0.440935 -0.897539 -0.000262482 -0.382671 -0.923885 0.000313475 -0.37422 -0.92734 0.000306358 -0.315829 -0.948816 -0.000313474 -0.37422 -0.92734 -0.000306358 -0.315829 -0.948816 0.000358319 -0.305544 -0.952178 0.000346291 -0.247285 -0.968943 -0.000358319 -0.305544 -0.952178 -0.000346291 -0.247285 -0.968943 0.000395587 -0.235239 -0.971938 0.000387282 -0.177575 -0.984107 -0.000395587 -0.235239 -0.971938 -0.000387282 -0.177575 -0.984107 0.000434027 -0.163648 -0.986519 0.000422209 -0.106878 -0.994272 -0.000434027 -0.163648 -0.986519 -0.000422209 -0.106878 -0.994272 0.000468519 -0.091176 -0.995835 0.000454599 -0.0356953 -0.999363 -0.000468519 -0.091176 -0.995835 -0.000454599 -0.0356953 -0.999363 0 -0.0274019 -0.999625 5.72342e-05 -0.778647 -0.627463 0.000114264 -0.821423 -0.57032 0.0001654 -0.860017 -0.510265 0.000216475 -0.894224 -0.447621 0.000262786 -0.923875 -0.382693 0.000305885 -0.948829 -0.315789 0.000347656 -0.968933 -0.247321 0.000384842 -0.984111 -0.177552 0.00042111 -0.994271 -0.10689 0.000454633 -0.999363 -0.0356953 -0.000454633 -0.999363 -0.0356953 0.000468855 -0.995832 -0.0912013 0 -1 -3.7253e-06 0 -1 -3.7253e-06 -3.58604e-07 -1 -3.54963e-07 2.47858e-07 -1 -1.03274e-07 0 -1 0 0 -1 0 7.45057e-06 -1 0 0 -0.505216 0.862993 0 -0.535081 0.844801 -0.00224079 -0.587784 0.809015 0 -0.622964 0.78225 0 -0.649929 0.759995 -0.00280006 -0.69464 0.719352 0 -0.728052 0.685522 0 -0.751503 0.65973 -0.00314001 -0.788009 0.615655 0 -0.818211 0.574918 0 -0.837777 0.546012 -0.00324683 -0.866021 0.499997 0 -0.891755 0.452518 0 -0.906978 0.421177 -0.00313364 -0.92718 0.374603 0 -0.947091 0.320966 0 -0.95772 0.287703 -0.00279806 -0.970293 0.241919 0 -0.983143 0.182837 0 -0.988929 0.148389 -0.00223476 -0.994519 0.104529 0 -0.99916 0.0409831 0 -0.999981 0.00611972 0.000224318 -0.999391 0.0349055 0 -0.565529 0.824728 0 -0.594018 0.804452 0.000268895 -0.587786 0.809017 -0.00255026 -0.642792 0.766036 0 -0.677213 0.735787 0 -0.702488 0.711696 0.000336007 -0.694643 0.719355 -0.00299204 -0.74315 0.669118 0 -0.775085 0.631856 0 -0.796689 0.604389 0.000376803 -0.788013 0.615658 -0.00322084 -0.829025 0.559202 0 -0.857173 0.515028 0 -0.874614 0.484819 0.00038962 -0.866026 0.499999 -0.00321498 -0.89879 0.438369 0 -0.921775 0.387726 0 -0.934734 0.355349 0.000376038 -0.927185 0.374605 -0.00299725 -0.951052 0.309017 0 -0.967588 0.252534 0 -0.975809 0.218624 0.000335767 -0.970296 0.241919 -0.00254803 -0.984805 0.173643 0 -0.993687 0.112189 0 -0.996994 0.0774722 0.000268171 -0.994522 0.10453 -0.00186932 -0.999389 0.0349055 0.000305764 -0.984809 0.173644 0.000359671 -0.951056 0.309019 0.000385799 -0.898794 0.438371 0.000386501 -0.82903 0.559205 0.000359044 -0.743153 0.669121 0.000306031 -0.642794 0.766039 0.000224943 -0.529919 0.848049 -0.00187454 -0.529918 0.848047 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.00132653 -0.529931 0.84804 0.000474366 -0.566917 0.823775 -0.000417989 -0.587768 0.809029 0.000448702 -0.625526 0.780203 -0.000386853 -0.642792 0.76604 0.000413071 -0.680806 0.732464 0.00104538 -0.694665 0.719333 -0.00123921 -0.680806 0.732463 0.00116055 -0.642792 0.76604 -0.0013461 -0.625526 0.780203 0.00125396 -0.587768 0.809029 -0.0014231 -0.566917 0.823774 0 -0.518334 0.855179 -0.000442177 -0.529932 0.84804 -0.000348459 -0.694665 0.719333 0.000369256 -0.732463 0.680807 0.000902712 -0.743141 0.669134 -0.00110777 -0.732463 0.680806 -0.000300904 -0.743141 0.669135 0.000318275 -0.78019 0.625542 0.000738881 -0.788005 0.615668 -0.000954824 -0.78019 0.625542 -0.000246294 -0.788005 0.615668 0.000258806 -0.82379 0.566895 0.000548214 -0.829045 0.559181 -0.000776417 -0.82379 0.566895 -0.000182738 -0.829045 0.559181 0.000191498 -0.862958 0.505275 0.000336663 -0.866025 0.500001 -0.000574494 -0.862958 0.505275 -0.000112221 -0.866025 0.500001 0.000115636 -0.897552 0.440909 9.61055e-05 -0.898794 0.438371 -0.000346909 -0.897552 0.440909 -3.20352e-05 -0.898794 0.438371 3.26972e-05 -0.927185 0.374603 -5.30151e-05 -0.951056 0.30902 -0.000130269 -0.970295 0.241925 -0.000199014 -0.984808 0.173644 -0.00026064 -0.994521 0.10454 -0.000312819 -0.999391 0.0348922 7.29138e-09 -0.999625 0.0274017 0.000938494 -0.999391 0.0348922 -0.000997683 -0.995833 0.0911884 0.000332594 -0.995834 0.0911884 5.42783e-05 -0.927334 0.374234 0.000159045 -0.951056 0.30902 -0.000162835 -0.927334 0.374234 -9.80939e-05 -0.927185 0.374603 0.000135642 -0.952178 0.305543 0.000390807 -0.970295 0.241925 -0.000406927 -0.952178 0.305543 0.000209537 -0.971944 0.235212 0.000597041 -0.984808 0.173644 -0.000628613 -0.971944 0.235212 0.000274315 -0.986518 0.163651 0.000781938 -0.99452 0.10454 -0.000822944 -0.986518 0.163651 0 -0.505231 -0.862984 0.00101486 -0.541574 -0.840653 0.00115792 -0.601486 -0.798883 0.00127822 -0.658206 -0.752837 0.00137908 -0.711403 -0.702783 0.00144967 -0.760796 -0.648989 0.0014958 -0.806143 -0.59172 0.00152285 -0.847187 -0.531293 0.00152383 -0.883711 -0.468031 0.00149963 -0.915512 -0.402287 0.0014502 -0.942434 -0.334388 0.00137746 -0.964333 -0.26469 0.00128115 -0.98108 -0.1936 0.00116007 -0.992596 -0.121458 0.00101651 -0.998813 -0.0486924 0 -0.999981 -0.00611937 -0.000801183 -0.99939 -0.0349053 0.000319889 -0.58779 -0.809014 -0.000385975 -0.601486 -0.798883 0.000364694 -0.642792 -0.76604 -0.000426074 -0.658207 -0.752837 0.000400264 -0.694643 -0.719355 0.000427467 -0.74315 -0.669125 0.00044838 -0.788017 -0.615654 0.000460374 -0.829029 -0.559206 0.000463701 -0.866025 -0.500001 0.000459327 -0.898795 -0.438369 0.000447652 -0.927184 -0.374607 0.000428051 -0.951056 -0.309018 0.000399694 -0.970296 -0.24192 0.000363912 -0.984809 -0.173644 0.000319247 -0.994522 -0.104529 0.000267061 -0.999391 -0.0349053 -0.000338836 -0.998814 -0.0486925 -0.000459694 -0.711403 -0.702784 -0.000483223 -0.760797 -0.64899 -0.000498603 -0.806143 -0.59172 -0.000507618 -0.847188 -0.531293 -0.000507947 -0.883712 -0.468032 -0.000499877 -0.915513 -0.402288 -0.000483402 -0.942435 -0.334388 -0.000459155 -0.964333 -0.26469 -0.00042705 -0.981081 -0.1936 -0.00038669 -0.992597 -0.121458 -0.00095774 -0.994521 -0.104529 -0.00109173 -0.984808 -0.173644 -0.00119908 -0.970295 -0.24192 -0.00128415 -0.951056 -0.309017 -0.00134295 -0.927183 -0.374607 -0.00137798 -0.898794 -0.438369 -0.0013911 -0.866024 -0.5 -0.00138112 -0.829028 -0.559206 -0.00134514 -0.788016 -0.615653 -0.0012824 -0.74315 -0.669124 -0.00120079 -0.694643 -0.719354 -0.00109408 -0.642792 -0.76604 -0.000959666 -0.58779 -0.809013 -0.000802996 -0.529921 -0.848047 -0.000338287 -0.541574 -0.840653 0.000267666 -0.529921 -0.848047 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 -5.29819e-07 -4.96706e-08 1 -8.27607e-07 -2.88946e-08 1 -4.9668e-07 -5.22088e-08 1 -3.54026e-07 -6.24228e-08 1 -2.73892e-07 -6.82899e-08 1 -2.22162e-07 -7.21853e-08 1 -1.85693e-07 -7.50245e-08 1 -1.58356e-07 -7.72353e-08 1 -1.36908e-07 -7.90442e-08 1 -1.19473e-07 -8.05829e-08 1 -1.0487e-07 -8.19351e-08 1 0 -9.21444e-08 1 -9.60808e-08 -9.94931e-08 1 -1.11741e-07 -1.00613e-07 1 -3.6016e-07 -1.03274e-07 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 -1.48102e-07 -9.98995e-08 1 -1.2734e-07 -9.94867e-08 1 -1.10027e-07 -9.90674e-08 1 -9.52519e-08 -9.86405e-08 1 -8.24048e-08 -9.8205e-08 1 -7.1027e-08 -9.7759e-08 1 -6.08009e-08 -9.73013e-08 1 -6.08014e-08 -9.73013e-08 1 -7.11152e-08 -9.78826e-08 1 -8.2737e-08 -9.86009e-08 1 0 0 1 0 0 -0.000442353 -0.529925 -0.848045 -0.00142358 -0.566923 -0.823769 0.00125381 -0.587776 -0.809022 -0.00134583 -0.625528 -0.780201 0.00116056 -0.642792 -0.76604 -0.00123921 -0.680806 -0.732463 -0.000348618 -0.694664 -0.719335 0.000412811 -0.680806 -0.732464 -0.000387124 -0.642792 -0.76604 0.000448333 -0.625528 -0.780202 -0.000418124 -0.587781 -0.80902 0.000474529 -0.566924 -0.82377 0 -0.518322 -0.855186 0.00132706 -0.529924 -0.848044 0.00104509 -0.694663 -0.719334 -0.00110813 -0.732463 -0.680806 -0.000301023 -0.743141 -0.669135 0.000369135 -0.732463 -0.680807 0.000902356 -0.743141 -0.669134 -0.000955157 -0.78019 -0.625542 -0.000246403 -0.788005 -0.615668 0.000318164 -0.78019 -0.625542 0.000738554 -0.788005 -0.615668 -0.000776719 -0.82379 -0.566895 -0.000182837 -0.829045 -0.559181 0.000258705 -0.82379 -0.566895 0.000547916 -0.829045 -0.559181 -0.000574734 -0.862957 -0.505277 -0.0001124 -0.866025 -0.500001 0.000191399 -0.862957 -0.505277 0.000336663 -0.866025 -0.500001 -0.000346909 -0.897552 -0.440909 -3.22678e-05 -0.898795 -0.438369 0.00011548 -0.897552 -0.440909 9.63379e-05 -0.898795 -0.43837 -9.78886e-05 -0.927185 -0.374605 0.000159045 -0.951056 -0.30902 0.000390678 -0.970295 -0.241925 0.000597053 -0.984808 -0.173644 0.000781994 -0.99452 -0.10454 0.000938495 -0.999391 -0.0348922 -7.29135e-09 -0.999625 -0.0274016 -0.000312844 -0.999391 -0.0348922 0.000332526 -0.995834 -0.0911881 -0.000997673 -0.995833 -0.0911881 -0.000162835 -0.927334 -0.374234 -5.31247e-05 -0.951056 -0.30902 5.41454e-05 -0.927334 -0.374234 3.24969e-05 -0.927185 -0.374605 -0.000407079 -0.952179 -0.305542 -0.000130312 -0.970295 -0.241925 0.000135584 -0.952179 -0.305542 -0.000628627 -0.971944 -0.235213 -0.000199079 -0.984809 -0.173644 0.000209459 -0.971944 -0.235213 -0.000822856 -0.986518 -0.163651 -0.000260702 -0.994521 -0.10454 0.000274228 -0.986518 -0.163651 0 -1 0 0 -1 0 0 -0.0162429 0.999868 0 -0.0162429 0.999868 0 -0.0162429 0.999868 0 -0.0162428 0.999868 0 -0.0162429 0.999868 0 -0.0162429 0.999868 -0.000254849 -0.0248079 0.999692 0.000281676 -0.0583855 0.998294 0 -0.110116 0.993919 0 -0.161561 0.986863 0 -0.212573 0.977145 0 -0.262981 0.964801 0 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0.000133008 -0.524696 0.85129 -0.000409543 -0.501867 0.864945 0.000292038 -0.493126 0.869958 -0.000310547 -0.456317 0.889817 0 -0.409489 0.912315 0 -0.361409 0.932408 0 -0.312537 0.949905 0 -0.262981 0.964801 0 -0.212573 0.977145 0 -0.161561 0.986863 0 -0.110116 0.993919 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.0583855 0.998294 -0.000241126 -0.0737851 0.997274 0.00026573 -0.110116 0.993919 0 -0.161561 0.986863 0 -0.212573 0.977145 0 -0.262981 0.964801 0 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0.000138869 -0.565831 0.824521 -0.0005092 -0.546086 0.837729 -0.000419642 -0.545009 0.83843 -0.000218591 -0.122641 0.992451 0.000237527 -0.161561 0.986863 0 -0.212573 0.977145 0 -0.262981 0.964801 0 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0.000129583 -0.605591 0.795776 -0.000606039 -0.588841 0.808249 -0.000327847 -0.585607 0.810595 -0.000184031 -0.17114 0.985247 0.000198329 -0.212573 0.977145 0 -0.262981 0.964801 0 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 9.83033e-05 -0.643868 0.765136 -0.000701456 -0.629992 0.776602 -0.000232642 -0.624724 0.780845 -0.000134041 -0.219209 0.975678 0.000146459 -0.262981 0.964801 0 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 4.15528e-05 -0.680624 0.732633 -0.0007934 -0.669452 0.742855 -0.000139132 -0.662345 0.749199 -7.09058e-05 -0.266791 0.963755 7.44434e-05 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 -4.99833e-05 -0.715719 0.698389 -0.000878296 -0.707113 0.7071 -4.50523e-05 -0.69835 0.715756 0 -0.313691 0.949525 0 -0.359892 0.932994 -0.000105203 -0.405311 0.914179 -0.000198372 -0.44978 0.893139 -0.000292038 -0.493126 0.869958 -0.000133009 -0.524695 0.85129 0.000419642 -0.545009 0.83843 0.0005092 -0.546086 0.837729 -0.000138869 -0.565831 0.824521 0.000327847 -0.585607 0.810595 -0.000129582 -0.605591 0.795776 0.000232642 -0.624724 0.780845 0.000701456 -0.629992 0.776602 -9.83033e-05 -0.643868 0.765136 0.000139131 -0.662345 0.749199 -4.1554e-05 -0.680624 0.732633 4.50523e-05 -0.69835 0.715756 0.000878296 -0.707113 0.7071 4.99833e-05 -0.715719 0.698389 -4.5234e-05 -0.732671 0.680584 0.000186407 -0.749044 0.66252 -0.000129803 -0.765225 0.643764 0.00100973 -0.776599 0.629994 0.000376452 -0.780592 0.625041 -0.000207736 -0.795911 0.605414 0.000639381 -0.810243 0.586094 -0.000271618 -0.824671 0.565613 0.00105028 -0.837725 0.546092 0.00099623 -0.83793 0.545777 -0.000314662 -0.851439 0.524453 0.000893469 -0.863752 0.503916 0.00128336 -0.864944 0.501866 -0.000326136 -0.876141 0.482055 0.000702414 -0.887496 0.460815 0 -0.89733 0.441361 0.000397768 -0.908854 0.417114 -0.000205767 -0.919148 0.393912 0.000267586 -0.928471 0.371406 0 -0.927905 0.372817 0.000273014 -0.932409 0.361405 0 -0.93734 0.348417 0 -0.93734 0.348417 0 -0.945606 0.325315 0.000268753 -0.949909 0.312525 0 -0.964795 0.263002 0 -0.977147 0.212563 0 -0.986863 0.161561 0 -0.993918 0.110125 0 -0.998294 0.0583811 0 -0.999868 0.0162285 -0.00010852 -0.999925 0.012277 0.000718674 -0.999323 0.0367729 -0.000248172 -0.998294 0.0583811 0 -0.993918 0.110125 0 -0.986863 0.161561 0 -0.977147 0.212563 0 -0.964795 0.263002 0 -0.94991 0.312525 0 -0.932409 0.361405 0 -0.912308 0.409504 0 -0.912308 0.409504 0.000112646 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 -0.000186407 -0.749044 0.66252 -0.000951462 -0.742853 0.669454 4.5233e-05 -0.732671 0.680584 0.000210962 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629994 -0.000376452 -0.780592 0.625041 -0.00100973 -0.776599 0.629994 0.000129803 -0.765225 0.643764 0.000310548 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629994 0 -0.808253 0.588835 -0.000639383 -0.810243 0.586094 -0.00104538 -0.808253 0.588835 0.000207736 -0.795911 0.605414 0.000409544 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629995 0 -0.808253 0.588835 0 -0.837725 0.546092 -0.00099623 -0.83793 0.545777 -0.00105028 -0.837725 0.546092 0.000271618 -0.824671 0.565613 0.000606039 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629995 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.885576 0.464494 -0.000702416 -0.887496 0.460815 0.000326135 -0.876141 0.482055 -0.000516244 -0.870338 0.492455 0 -0.864945 0.501867 0.000793399 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629995 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.912308 0.409504 0 -0.918474 0.395481 -0.000267588 -0.928471 0.371406 0.000205766 -0.919148 0.393912 0.000951462 -0.742853 0.669454 0 -0.7766 0.629995 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.912308 0.409504 0 -0.932409 0.361405 0 -0.94991 0.312525 0 -0.953675 0.30084 0.000200882 -0.960446 0.278465 -0.00024741 -0.953284 0.302076 0.00104538 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.912308 0.409504 0 -0.932409 0.361405 0 -0.94991 0.312525 0 -0.964795 0.263002 0 -0.977147 0.212563 0 -0.979149 0.203145 0.000612013 -0.983126 0.182926 -0.00122448 -0.978261 0.207373 -0.000296666 -0.898727 0.438508 0.000491551 -0.909079 0.416623 0 -0.918474 0.395481 0 -0.953675 0.30084 -0.000200886 -0.960446 0.278465 0 -0.960767 0.277356 0.000270603 -0.964795 0.263002 0 -0.967644 0.25232 0.000655973 -0.966934 0.255026 -0.000426386 -0.972964 0.230959 0.000260681 -0.977147 0.212563 0 -0.986863 0.161561 0 -0.993918 0.110125 0 -0.998294 0.0583811 0 -0.999868 0.0162285 0 -0.999868 0.0162285 0 -0.998294 0.0583811 0 -0.998294 0.0583811 0 -0.998294 0.0583811 0.000247412 -0.953284 0.302076 0 -0.979149 0.203145 -0.000612013 -0.983126 0.182926 0 -0.983839 0.179056 0.00024378 -0.986863 0.161561 0.000163147 -0.987298 0.158878 0 -0.989965 0.141314 -0.000741998 -0.990915 0.134488 0 -0.991624 0.129161 0.000217774 -0.993905 0.110239 0.000221441 -0.993918 0.110125 0 -0.998294 0.0583811 0 -0.999594 0.0284909 0 -0.999925 0.0122757 1.94771e-05 -0.999796 0.0202108 0 -0.999868 0.0162285 0 -0.998294 0.0583811 0 -0.993918 0.110125 0 -0.986863 0.161561 0 -0.993918 0.110125 0 -0.993918 0.110125 0.00122448 -0.978261 0.207373 0 -0.995835 0.0911704 -0.000787166 -0.996318 0.0857288 0 -0.996883 0.0788973 0.000159734 -0.998117 0.0613403 0.000248172 -0.998294 0.0583811 0 -0.999594 0.0284909 0 -0.998294 0.0583811 0 -0.999169 0.0407501 -0.000718674 -0.999323 0.0367729 0 -0.999594 0.0284909 0 -0.999868 0.0162285 0 -0.998294 0.0583811 0 -0.998294 0.0583811 0 -0.998294 0.0583811 0 -0.993918 0.110125 0 -0.993918 0.110125 0 -0.999594 0.0284909 0 -0.999169 0.0407501 -0.000159734 -0.998117 0.0613403 0 -0.996883 0.0788973 0.000787166 -0.996318 0.0857288 0 -0.995835 0.0911704 -0.000221441 -0.993918 0.110125 -0.000217774 -0.993905 0.110239 0 -0.991624 0.129161 0.000741998 -0.990915 0.134488 0 -0.989965 0.141314 -0.000163146 -0.987298 0.158878 -0.00024378 -0.986863 0.161561 0 -0.977147 0.212563 0 -0.964795 0.263002 0 -0.94991 0.312525 0 -0.932409 0.361405 0 -0.912308 0.409504 0 -0.889823 0.456306 0 -0.864945 0.501867 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.912308 0.409504 0 -0.932409 0.361405 0 -0.94991 0.312525 0 -0.964795 0.263002 0 -0.977147 0.212563 0 -0.986863 0.161561 0 -0.993918 0.110125 0 -0.986863 0.161561 0 -0.986863 0.161561 0 -0.983839 0.179056 0.000426386 -0.972964 0.230959 -0.000655977 -0.966934 0.255026 0 -0.967644 0.25232 -0.000260681 -0.977147 0.212563 0 -0.973549 0.228478 0 -0.945606 0.325315 0 -0.93734 0.348417 0 -0.93734 0.348417 -0.000268752 -0.949909 0.312525 0 -0.945606 0.325315 -0.000491552 -0.909079 0.416623 0.000296664 -0.898727 0.438508 -0.000527228 -0.894823 0.446421 0 -0.889823 0.456306 0 -0.889823 0.456306 0 -0.861001 0.508603 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.808253 0.588835 -0.000893469 -0.863752 0.503916 0.000314662 -0.851439 0.524453 0.000198373 -0.44978 0.893139 -0.000210962 -0.409489 0.912315 0 -0.361409 0.932408 0 -0.312537 0.949905 0 -0.262981 0.964801 0 -0.212573 0.977145 0 -0.161561 0.986863 0 -0.110116 0.993919 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0.000105203 -0.405311 0.914179 -0.000112646 -0.361409 0.932408 0 -0.312537 0.949905 0 -0.262981 0.964801 0 -0.212573 0.977145 0 -0.161561 0.986863 0 -0.110116 0.993919 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.359892 0.932994 0 -0.313691 0.949525 7.09053e-05 -0.266791 0.963755 0.000134041 -0.219209 0.975678 0.000184031 -0.17114 0.985247 0.000218591 -0.122641 0.992451 0.000241126 -0.0737851 0.997274 0.000254849 -0.0248079 0.999692 0 -0.0162429 0.999868 0 -0.0583855 0.998294 0 -0.110116 0.993919 0 -0.161561 0.986863 0 -0.212573 0.977145 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.312537 0.949905 -7.44434e-05 -0.312537 0.949905 -0.000146459 -0.262981 0.964801 0 -0.212573 0.977145 0 -0.212573 0.977145 -0.00019833 -0.212573 0.977145 0 -0.161561 0.986863 0 -0.161561 0.986863 -0.000237527 -0.161561 0.986863 0 -0.110116 0.993919 0 -0.110116 0.993919 -0.00026573 -0.110116 0.993919 0 -0.0583855 0.998294 0 -0.0583855 0.998294 -0.000281677 -0.0583855 0.998294 0 -0.0162429 0.999868 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.0162429 0.999868 0 -0.0162429 0.999868 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.212573 0.977145 0 -0.161561 0.986863 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629994 0 -0.7766 0.629995 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629994 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.864945 0.501867 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629994 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.912308 0.409504 0 -0.932409 0.361405 -0.000273014 -0.932409 0.361405 0 -0.927905 0.372817 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.7766 0.629994 0 -0.7766 0.629995 0 -0.7766 0.629994 0 -0.7766 0.629995 0 -0.7766 0.629994 0 -0.7766 0.629994 0 -0.7766 0.629994 0 -0.7766 0.629995 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.912308 0.409504 0 -0.932409 0.361405 0 -0.94991 0.312525 0 -0.964795 0.263002 -0.000270603 -0.964795 0.263002 0 -0.960767 0.277356 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.864945 0.501867 0 -0.864945 0.501867 0 -0.864945 0.501867 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.889823 0.456306 0.000342653 -0.886515 0.4627 0 -0.889823 0.456306 0 -0.889823 0.456306 0 -0.889823 0.456306 0 -0.889823 0.456306 0 -0.889823 0.456306 -0.000257376 -0.912308 0.409504 0 -0.907894 0.419199 0 -0.912308 0.409504 0 -0.912308 0.409504 0 -0.912308 0.409504 0 -0.912308 0.409504 0 -0.912308 0.409504 0 -0.912308 0.409504 0 -0.932409 0.361405 0 -0.932409 0.361405 0 -0.94991 0.312525 0 -0.94991 0.312525 0 -0.932409 0.361405 0 -0.932409 0.361405 0 -0.932409 0.361405 0 -0.932409 0.361405 0 -0.932409 0.361405 0 -0.945606 0.325315 0 -0.94991 0.312525 0 -0.94991 0.312525 0 -0.964795 0.263002 0 -0.964795 0.263002 0 -0.94991 0.312525 0 -0.94991 0.312525 0 -0.94991 0.312525 0 -0.94991 0.312525 0 -0.964795 0.263002 0 -0.964795 0.263002 0 -0.977147 0.212563 0 -0.977147 0.212563 0 -0.964795 0.263002 0 -0.964795 0.263002 0 -0.964795 0.263002 0 -0.973549 0.228478 0 -0.964795 0.263002 0 -0.977147 0.212563 0 -0.986863 0.161561 0 -0.993918 0.110125 0 -0.993918 0.110125 0 -0.977147 0.212563 0 -0.977147 0.212563 0 -0.977147 0.212563 0 -0.977147 0.212563 0 -0.986863 0.161561 0 -0.986863 0.161561 0 -0.977147 0.212563 0 -0.977147 0.212563 0 -0.986863 0.161561 0 -0.986863 0.161561 0 -0.986863 0.161561 0 -0.986863 0.161561 0 -0.993918 0.110125 0 -0.993918 0.110125 0 -0.993918 0.110125 0 -0.998294 0.0583811 0 -0.998294 0.0583811 0 -0.998294 0.0583811 -0.999925 0 0.0122756 -0.999925 0 0.0122756 -0.999323 -9.19591e-05 0.0367865 -0.998118 0 0.0613211 -0.996878 0 0.0789619 -0.99584 0 0.0911202 -0.993906 0 0.110234 -0.993906 0 0.110234 -0.991626 0 0.129144 -0.990902 -0.000102665 0.134583 -0.989959 0 0.141356 -0.990932 0.00184117 0.134351 -0.999596 0 0.0284321 -0.999169 0 0.0407641 -0.999324 0.00207743 0.0367134 -0.996313 -0.000102056 0.0857874 -0.987304 0 0.158843 -0.987304 0 0.158843 -0.983841 0 0.179043 -0.981568 0 0.191114 -0.978316 0 0.207116 -0.978316 0 0.207116 -0.973549 0 0.228478 -0.972944 -6.82555e-05 0.231039 -0.97066 0 0.240457 -0.972986 0.000850346 0.230863 -0.983109 -9.05891e-05 0.18302 -0.966977 0 0.254864 -0.966977 0 0.254864 -0.960764 0 0.277366 -0.953268 -0.000447729 0.302125 -0.960456 0.000363344 0.278431 -0.960431 -3.51498e-05 0.278519 -0.953305 4.32827e-05 0.302009 -0.945606 0 0.325315 -0.937343 0 0.348409 -0.945606 0 0.325315 -0.945606 0 0.325315 -0.937343 0 0.348409 -0.927899 0 0.372832 -0.919162 0.000317044 0.39388 -0.928447 -0.000416348 0.371466 -0.928496 5.67846e-05 0.371342 -0.919119 -4.32664e-05 0.393979 -0.9079 0 0.419186 -0.898754 0.000434492 0.438453 -0.909044 -0.000716262 0.4167 -0.909149 0.00011375 0.416472 -0.898684 -6.9062e-05 0.438598 -0.887641 0 0.460536 -0.879819 0 0.47531 -0.873915 0 0.486078 -0.863963 0 0.503556 -0.863963 0 0.503556 -0.854711 0 0.519104 -0.851377 -8.73063e-05 0.524555 -0.84823 0 0.529629 -0.851469 0.000421338 0.524406 -0.897325 0 0.441371 -0.876097 -8.29207e-05 0.482136 -0.838235 0 0.545308 -0.838235 0 0.545308 -0.827391 0 0.561627 -0.810166 -0.000824916 0.586201 -0.8247 0.000348958 0.56557 -0.824614 -8.10661e-05 0.565695 -0.813837 0.000126218 0.581093 -0.810449 0 0.585809 -0.795859 -6.6904e-05 0.605482 -0.780692 0.000121204 0.624916 -0.782951 0 0.622084 -0.76655 0 0.642185 -0.765194 -4.52089e-05 0.6438 -0.749103 6.39389e-05 0.662454 -0.733131 0 0.680087 -0.715704 -6.03341e-05 0.698404 -0.732674 5.34947e-05 0.680579 -0.797981 0 0.602683 -0.780555 -0.000470635 0.625086 -0.79593 0.000259678 0.605389 -0.732657 -1.64773e-05 0.680598 -0.715725 1.85836e-05 0.698383 -0.697873 0 0.716221 -0.680632 4.87601e-05 0.732625 -0.698346 -5.28972e-05 0.71576 -0.698364 1.75965e-05 0.715743 -0.680615 -1.62219e-05 0.732642 -0.6624 5.77381e-05 0.74915 -0.644481 -0.00114541 0.76462 -0.629992 0.000295585 0.776602 -0.624695 -0.000265363 0.780869 -0.629992 -0.000807646 0.776602 -0.643877 0.000112897 0.765129 -0.662342 -0.000161851 0.749202 -0.669454 -0.000925235 0.742853 -0.669454 0.000317801 0.742853 -0.651511 0 0.758639 -0.642185 0 0.76655 -0.624795 0.000100866 0.780788 -0.612477 0 0.790488 -0.60602 -0.000799945 0.795449 -0.602666 0 0.797993 -0.588841 0.000267355 0.808249 -0.585712 0.000148634 0.81052 -0.571773 0 0.820412 -0.566151 -0.000608349 0.824302 -0.561612 0 0.827401 -0.546081 0.000234196 0.837732 -0.545164 0.000199067 0.838329 -0.529612 0 0.84824 -0.501867 0 0.864945 -0.45632 0 0.889816 -0.409486 0 0.912316 -0.361412 0 0.932406 -0.312537 0 0.949905 -0.262979 0 0.964802 -0.212573 0 0.977145 -0.161561 0 0.986863 -0.110117 0 0.993919 -0.0583855 0 0.998294 -0.024965 -0.000411806 0.999688 -0.0162429 0 0.999868 -0.0162429 0 0.999868 -0.0248233 0.000270079 0.999692 -0.0738002 0.000256353 0.997273 -0.122654 0.000233106 0.99245 -0.171153 0.000196943 0.985245 -0.219221 0.000144106 0.975675 -0.266796 7.67635e-05 0.963753 -0.313688 0 0.949526 -0.359892 0 0.932994 -0.361411 -0.000123251 0.932406 -0.405322 0.000114651 0.914174 -0.409486 -0.000231532 0.912316 -0.409486 0 0.912316 -0.524941 -0.000460647 0.851138 -0.501867 0.0013637 0.864944 -0.493553 -0.000856412 0.869715 -0.450028 -0.00051352 0.893015 -0.405431 -0.000245217 0.914126 -0.359892 0 0.932994 -0.313688 0 0.949526 -0.312537 0.000149681 0.949905 -0.266857 -0.000142804 0.963736 -0.262979 0.000279344 0.964802 -0.219326 -0.000254269 0.975652 -0.171284 -0.000332232 0.985222 -0.122797 -0.000378364 0.992432 -0.0739455 -0.000402434 0.997262 -0.0583855 0.000459618 0.998294 -0.45632 0.000891072 0.889815 -0.409486 0.000537705 0.912316 -0.361412 0.000262246 0.932406 -0.212573 0.000361012 0.977145 -0.161561 0.00041486 0.986863 -0.110117 0.000447518 0.993919 -0.0583855 -0.000298758 0.998294 -0.0583855 0 0.998294 -0.110117 -0.000282678 0.993919 -0.110117 0 0.993919 -0.161561 -0.000253633 0.986863 -0.161561 0 0.986863 -0.212573 -0.000212598 0.977145 -0.212573 0 0.977145 -0.262979 -0.000157528 0.964802 -0.262979 0 0.964802 -0.312537 -8.02409e-05 0.949905 -0.312537 0 0.949905 -0.449792 0.00021846 0.893133 -0.45632 -0.000343955 0.889816 -0.45632 0 0.889816 -0.493149 0.00032318 0.869945 -0.501867 -0.000457241 0.864945 -0.501867 0 0.864945 -0.524715 0.000148405 0.851278 -0.544974 -0.000469024 0.838453 -0.565844 0.000157307 0.824512 -0.585567 -0.000369102 0.810624 -0.588841 -0.000689365 0.808249 -0.605607 0.000147819 0.795764 -0.546081 -0.000572678 0.837732 -0.71623 0 0.697864 -0.750539 0 0.660826 -0.749032 -0.000226015 0.662534 -0.765242 0.000159773 0.643743 -0.876183 0.000453989 0.481979 -0.887641 0 0.460536 -0.918474 0 0.395481 -0.937343 0 0.348408 -0.953677 0 0.300832 -0.983151 0.00136215 0.182791 -0.996326 0.00211351 0.0856153 -0.998118 0 0.0613211 -0.361412 0 0.932406 0 0.0162428 0.999868 0 0.0162428 0.999868 0 0.0162428 0.999868 0 0.0162428 0.999868 0 0.0162428 0.999868 0 0.0162428 0.999868 0.000254852 0.0248079 0.999692 -0.00028167 0.0583855 0.998294 0 0.110117 0.993919 0 0.16156 0.986863 0 0.212574 0.977145 0 0.262979 0.964802 0 0.312539 0.949905 0 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 -0.000132896 0.5247 0.851287 0.000409838 0.501863 0.864947 -0.000291783 0.493122 0.86996 0.000310718 0.456317 0.889817 0 0.409491 0.912314 0 0.36141 0.932407 0 0.312539 0.949905 0 0.262979 0.964802 0 0.212574 0.977145 0 0.16156 0.986863 0 0.110117 0.993919 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.0583855 0.998294 0.000241153 0.0737856 0.997274 -0.000265692 0.110117 0.993919 0 0.16156 0.986863 0 0.212574 0.977145 0 0.262979 0.964802 0 0.312539 0.949905 0 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 -0.000138869 0.565831 0.824521 0.0005092 0.546086 0.837729 0.000419642 0.545009 0.83843 0.000218544 0.12264 0.992451 -0.000237598 0.16156 0.986863 0 0.212574 0.977145 0 0.262979 0.964802 0 0.312539 0.949905 0 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 -0.000129791 0.605596 0.795772 0.000606225 0.588837 0.808252 0.000327637 0.585598 0.810601 0.000183987 0.17114 0.985247 -0.000198389 0.212574 0.977145 0 0.262979 0.964802 0 0.312539 0.949905 0 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 -9.83033e-05 0.643868 0.765136 0.000701456 0.629992 0.776602 0.000232252 0.62472 0.780849 0.000134031 0.219211 0.975678 -0.000146444 0.262979 0.964802 0 0.312539 0.949905 0 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 -4.15528e-05 0.680624 0.732633 0.0007934 0.669452 0.742855 0.000139132 0.662345 0.749199 7.1133e-05 0.266792 0.963754 -7.41751e-05 0.312539 0.949905 0 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 4.99833e-05 0.715719 0.698389 0.000878296 0.707113 0.7071 4.50523e-05 0.69835 0.715756 0 0.313688 0.949526 0 0.359892 0.932994 0.000105087 0.405314 0.914177 0.000198352 0.449778 0.89314 0.000291783 0.493122 0.86996 0.000132896 0.5247 0.851287 -0.000419642 0.545009 0.83843 -0.0005092 0.546086 0.837729 0.000138869 0.565831 0.824521 -0.000327637 0.585598 0.810601 0.000129791 0.605596 0.795772 -0.000232252 0.62472 0.780849 -0.000701456 0.629992 0.776602 9.83033e-05 0.643868 0.765136 -0.000139131 0.662345 0.749199 4.1554e-05 0.680624 0.732633 -4.50523e-05 0.69835 0.715756 -0.000878296 0.707113 0.7071 -4.99833e-05 0.715719 0.698389 4.5234e-05 0.732671 0.680584 -0.000185789 0.749049 0.662514 0.000130249 0.76522 0.643769 -0.00100973 0.776599 0.629994 -0.000376452 0.780592 0.625041 0.000207736 0.795911 0.605414 -0.000639381 0.810243 0.586094 0.000271618 0.824671 0.565613 -0.00105028 0.837725 0.546092 -0.00099623 0.83793 0.545777 0.000314662 0.851439 0.524453 -0.000893469 0.863752 0.503916 -0.00128336 0.864944 0.501866 0.000326745 0.876145 0.482047 -0.000701093 0.887492 0.460822 0 0.89733 0.441361 -0.000397768 0.908854 0.417114 0.000205767 0.919148 0.393912 -0.000267755 0.928474 0.371398 0 0.927908 0.37281 -0.000272841 0.932409 0.361405 0 0.937337 0.348425 0 0.937337 0.348425 0 0.945606 0.325315 -0.000268752 0.949909 0.312525 0 0.964797 0.262998 0 0.977147 0.212563 0 0.986862 0.161565 0 0.993918 0.110121 0 0.998294 0.0583855 0 0.999868 0.0162285 0.00010852 0.999925 0.012277 -0.000719494 0.999323 0.0367824 0.000248039 0.998294 0.0583855 0 0.993918 0.110121 0 0.986862 0.161565 0 0.977147 0.212563 0 0.964797 0.262997 0 0.94991 0.312525 0 0.932409 0.361405 0 0.912308 0.409504 0 0.912308 0.409504 -0.000112758 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0.00018579 0.749049 0.662514 0.000951462 0.742853 0.669454 -4.5233e-05 0.732671 0.680584 -0.000210945 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629995 0.000376452 0.780592 0.625041 0.00100973 0.776599 0.629994 -0.000130249 0.76522 0.643769 -0.000310719 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629994 0 0.808253 0.588835 0.000639383 0.810243 0.586094 0.00104538 0.808253 0.588835 -0.000207736 0.795911 0.605414 -0.000409839 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629995 0 0.808253 0.588835 0 0.837725 0.546092 0.00099623 0.83793 0.545777 0.00105028 0.837725 0.546092 -0.000271618 0.824671 0.565613 -0.000606225 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629995 0 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.885576 0.464494 0.000701094 0.887492 0.460822 -0.000326745 0.876145 0.482047 0.000516244 0.870338 0.492455 0 0.864945 0.501867 -0.000793399 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629995 0 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.889823 0.456306 0 0.912308 0.409504 0 0.918474 0.395481 0.000267758 0.928474 0.371398 -0.000205766 0.919148 0.393912 -0.000951462 0.742853 0.669454 0 0.7766 0.629994 0 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.889823 0.456306 0 0.912308 0.409504 0 0.932409 0.361405 0 0.94991 0.312525 0 0.953675 0.30084 -0.000200882 0.960446 0.278465 0.00024741 0.953284 0.302076 -0.00104538 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.889823 0.456306 0 0.912308 0.409504 0 0.932409 0.361405 0 0.94991 0.312525 0 0.964797 0.262998 0 0.977147 0.212563 0 0.97915 0.203138 -0.000611315 0.983125 0.182935 0.00122379 0.978263 0.207364 0.000296666 0.898727 0.438508 -0.000491551 0.909079 0.416623 0 0.918474 0.395481 0 0.953675 0.30084 0.000200886 0.960446 0.278465 0 0.960767 0.277356 -0.000270682 0.964796 0.262997 0 0.967646 0.252313 -0.000657861 0.966934 0.255026 0.000424493 0.972963 0.230959 -0.000260869 0.977147 0.212563 0 0.986862 0.161565 0 0.993918 0.110121 0 0.998294 0.0583855 0 0.999868 0.0162285 0 0.999868 0.0162285 0 0.998294 0.0583855 0 0.998294 0.0583855 0 0.998294 0.0583855 -0.000247412 0.953284 0.302076 0 0.97915 0.203138 0.000611317 0.983125 0.182935 0 0.983836 0.17907 -0.000243912 0.986862 0.161565 -0.000163146 0.987298 0.158878 0 0.989965 0.141314 0.000741998 0.990915 0.134488 0 0.991624 0.129161 -0.000217774 0.993905 0.110239 -0.000221585 0.993918 0.110121 0 0.998294 0.0583855 0 0.999594 0.0284909 0 0.999925 0.0122757 -1.94771e-05 0.999796 0.0202108 0 0.999868 0.0162285 0 0.998294 0.0583855 0 0.993918 0.110121 0 0.986862 0.161565 0 0.993918 0.110121 0 0.993918 0.110121 -0.00122379 0.978263 0.207364 0 0.995837 0.0911537 0.000784712 0.996318 0.085729 0 0.996881 0.0789188 -0.000160016 0.998118 0.0613309 -0.000248039 0.998294 0.0583855 0 0.999594 0.0284909 0 0.998294 0.0583855 0 0.999169 0.0407641 0.000719494 0.999323 0.0367824 0 0.999594 0.0284909 0 0.999868 0.0162285 0 0.998294 0.0583855 0 0.998294 0.0583855 0 0.998294 0.0583855 0 0.993918 0.110121 0 0.993918 0.110121 0 0.999594 0.0284909 0 0.999169 0.0407641 0.000160016 0.998118 0.0613309 0 0.996881 0.0789189 -0.000784712 0.996318 0.085729 0 0.995837 0.0911537 0.000221585 0.993918 0.110121 0.000217774 0.993905 0.110239 0 0.991624 0.129161 -0.000741998 0.990915 0.134488 0 0.989965 0.141314 0.000163147 0.987298 0.158878 0.000243912 0.986862 0.161565 0 0.977147 0.212563 0 0.964797 0.262998 0 0.94991 0.312525 0 0.932409 0.361405 0 0.912308 0.409504 0 0.889823 0.456306 0 0.864945 0.501867 0 0.837725 0.546092 0 0.864945 0.501867 0 0.864945 0.501867 0 0.889823 0.456306 0 0.912308 0.409504 0 0.932409 0.361405 0 0.94991 0.312525 0 0.964797 0.262997 0 0.977147 0.212563 0 0.986862 0.161565 0 0.993918 0.110121 0 0.986862 0.161565 0 0.986862 0.161565 0 0.983836 0.17907 -0.000424493 0.972963 0.230959 0.000657861 0.966934 0.255026 0 0.967646 0.252313 0.000260869 0.977147 0.212563 0 0.973547 0.228489 0 0.945606 0.325315 0 0.937337 0.348425 0 0.937337 0.348425 0.000268753 0.949909 0.312525 0 0.945606 0.325315 0.000491552 0.909079 0.416623 -0.000296664 0.898727 0.438508 0.000527228 0.894823 0.446421 0 0.889823 0.456306 0 0.889823 0.456306 0 0.861001 0.508603 0 0.837725 0.546092 0 0.837725 0.546092 0 0.808253 0.588835 0 0.808253 0.588835 0 0.808253 0.588835 0.000893469 0.863752 0.503916 -0.000314662 0.851439 0.524453 -0.000198351 0.449778 0.89314 0.000210945 0.409491 0.912314 0 0.36141 0.932407 0 0.312539 0.949905 0 0.262979 0.964802 0 0.212574 0.977145 0 0.16156 0.986863 0 0.110117 0.993919 0 0.0583855 0.998294 0 0.0583855 0.998294 -0.000105087 0.405314 0.914177 0.000112758 0.36141 0.932407 0 0.312539 0.949905 0 0.262979 0.964802 0 0.212574 0.977145 0 0.16156 0.986863 0 0.110117 0.993919 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.359892 0.932994 0 0.313688 0.949526 -7.11336e-05 0.266792 0.963754 -0.000134031 0.219211 0.975678 -0.000183987 0.17114 0.985247 -0.000218544 0.12264 0.992451 -0.000241153 0.0737856 0.997274 -0.000254852 0.0248079 0.999692 0 0.0162428 0.999868 0 0.0583855 0.998294 0 0.110117 0.993919 0 0.16156 0.986863 0 0.212574 0.977145 0 0.262979 0.964802 0 0.262979 0.964802 0 0.312539 0.949905 7.41751e-05 0.312539 0.949905 0.000146444 0.262979 0.964802 0 0.212574 0.977145 0 0.212574 0.977145 0.000198388 0.212574 0.977145 0 0.16156 0.986863 0 0.16156 0.986863 0.000237598 0.16156 0.986863 0 0.110117 0.993919 0 0.110117 0.993919 0.000265692 0.110117 0.993919 0 0.0583855 0.998294 0 0.0583855 0.998294 0.00028167 0.0583855 0.998294 0 0.0162428 0.999868 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.0162428 0.999868 0 0.0162428 0.999868 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.212574 0.977145 0 0.16156 0.986863 0 0.212574 0.977145 0 0.212574 0.977145 0 0.212574 0.977145 0 0.212574 0.977145 0 0.212574 0.977145 0 0.212574 0.977145 0 0.212574 0.977145 0 0.212574 0.977145 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629994 0 0.7766 0.629995 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629994 0 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.864945 0.501867 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629994 0 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.889823 0.456306 0 0.912308 0.409504 0 0.932409 0.361405 0.000272841 0.932409 0.361405 0 0.927908 0.37281 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.7766 0.629994 0 0.7766 0.629995 0 0.7766 0.629994 0 0.7766 0.629995 0 0.7766 0.629994 0 0.7766 0.629994 0 0.7766 0.629995 0 0.7766 0.629994 0 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.889823 0.456306 0 0.912308 0.409504 0 0.932409 0.361405 0 0.94991 0.312525 0 0.964797 0.262998 0.000270682 0.964796 0.262998 0 0.960767 0.277356 0 0.808253 0.588835 0 0.808253 0.588835 0 0.808253 0.588835 0 0.808253 0.588835 0 0.808253 0.588835 0 0.808253 0.588835 0 0.808253 0.588835 0 0.837725 0.546092 0 0.837725 0.546092 0 0.837725 0.546092 0 0.837725 0.546092 0 0.837725 0.546092 0 0.837725 0.546092 0 0.837725 0.546092 0 0.864945 0.501867 0 0.864945 0.501867 0 0.864945 0.501867 0 0.864945 0.501867 0 0.864945 0.501867 0 0.889823 0.456306 0 0.889823 0.456306 -0.000342653 0.886515 0.4627 0 0.889823 0.456306 0 0.889823 0.456306 0 0.889823 0.456306 0 0.889823 0.456306 0 0.889823 0.456306 0.000257376 0.912308 0.409504 0 0.907894 0.419199 0 0.912308 0.409504 0 0.912308 0.409504 0 0.912308 0.409504 0 0.912308 0.409504 0 0.912308 0.409504 0 0.912308 0.409504 0 0.932409 0.361405 0 0.932409 0.361405 0 0.94991 0.312525 0 0.94991 0.312525 0 0.932409 0.361405 0 0.932409 0.361405 0 0.932409 0.361405 0 0.932409 0.361405 0 0.932409 0.361405 0 0.945606 0.325315 0 0.94991 0.312525 0 0.94991 0.312525 0 0.964797 0.262997 0 0.964797 0.262997 0 0.94991 0.312525 0 0.94991 0.312525 0 0.94991 0.312525 0 0.94991 0.312525 0 0.964797 0.262998 0 0.964797 0.262998 0 0.977147 0.212563 0 0.977147 0.212563 0 0.964797 0.262998 0 0.964797 0.262998 0 0.964797 0.262998 0 0.973546 0.228489 0 0.964797 0.262998 0 0.977147 0.212563 0 0.986862 0.161565 0 0.993918 0.110121 0 0.993918 0.110121 0 0.977147 0.212563 0 0.977147 0.212563 0 0.977147 0.212563 0 0.977147 0.212563 0 0.986862 0.161565 0 0.986862 0.161565 0 0.977147 0.212563 0 0.977147 0.212563 0 0.986862 0.161565 0 0.986862 0.161565 0 0.986862 0.161565 0 0.986862 0.161565 0 0.993918 0.110121 0 0.993918 0.110121 0 0.993918 0.110121 0 0.998294 0.0583855 0 0.998294 0.0583855 0 0.998294 0.0583855 0.0248583 0.000305077 0.999691 0.0583855 0 0.998294 0.0583855 0.000338215 0.998294 0.0738353 -0.000291641 0.99727 0.110117 0.000322243 0.993919 0.122688 -0.000267314 0.992445 0.161561 0.000291379 0.986863 0.212573 0 0.977145 0.219244 0.00016843 0.97567 0.212573 -0.000246357 0.977145 0.171183 0.000227874 0.985239 0.161561 -0.00029138 0.986863 0.110117 0 0.993919 0.161561 0 0.986863 0.161561 0 0.986863 0.0248583 -0.000305077 0.999691 0.0162429 0 0.999868 0.0162429 0 0.999868 0.171183 -0.000227874 0.985239 0.212573 0.000246357 0.977145 0.262979 0 0.964802 0.266809 9.07525e-05 0.963749 0.262979 -0.000184321 0.964802 0.219244 -0.00016843 0.97567 0.262979 0.000184321 0.964802 0.312537 0 0.949905 0.313688 0 0.949526 0.312537 -9.49193e-05 0.949905 0.266809 -9.07524e-05 0.963749 0.312537 9.49192e-05 0.949905 0.361412 0 0.932406 0.405343 0.000139391 0.914165 0.361412 -0.000149699 0.932406 0.359893 0 0.932994 0.313688 0 0.949526 0.359892 0 0.932994 0.405343 -0.000139391 0.914165 0.449834 -0.000270214 0.893112 0.493214 -0.000407821 0.869908 0.524746 -0.000191724 0.851259 0.544878 0.000605991 0.838515 0.546081 0.000754401 0.837732 0.565879 -0.000208945 0.824488 0.585487 0.000490314 0.810682 0.605642 -0.000202975 0.795737 0.624635 0.000364413 0.780917 0.629991 0.00113549 0.776601 0.643905 -0.000161391 0.765105 0.662303 0.000231392 0.749236 0.680645 -7.32473e-05 0.732613 0.698332 7.94608e-05 0.715774 0.700784 0.000504569 0.713373 0.71623 0 0.697864 0.715687 9.64467e-05 0.698422 0.732689 -8.55107e-05 0.680563 0.750539 0 0.660826 0.76529 0.000276594 0.643686 0.748959 -0.000391349 0.662616 0.361412 0.000149699 0.932406 0.409486 0 0.912316 0.449834 0.000270214 0.893112 0.409486 -0.000285771 0.912316 0.409486 0.000285772 0.912316 0.45632 0 0.889816 0.493214 0.000407821 0.869908 0.45632 -0.000432464 0.889816 0.45632 0.000432466 0.889816 0.501867 0 0.864945 0.524747 0.000191723 0.851259 0.501867 -0.000587388 0.864945 0.501867 0.000587387 0.864945 0.546081 0 0.837732 0.565879 0.000208946 0.824488 0.546081 -0.000754402 0.837732 0.544878 -0.000605992 0.838515 0.588841 0.000935464 0.808248 0.629992 0 0.776602 0.643905 0.000161391 0.765105 0.629991 -0.00113549 0.776601 0.624635 -0.000364413 0.780917 0.605642 0.000202976 0.795737 0.585487 -0.000490315 0.810682 0.588841 -0.000935464 0.808248 0.588841 0 0.808249 0.546081 0 0.837732 0.669454 0.00135776 0.742853 0.697873 0 0.716221 0.698332 -7.94645e-05 0.715774 0.680645 7.32469e-05 0.732613 0.676108 -0.00050939 0.736803 0.669454 0 0.742853 0.750539 0 0.660826 0.76655 0 0.642185 0.782951 0 0.622084 0.797981 0 0.602683 0.810449 0 0.585809 0.810449 0 0.585809 0.820434 0 0.571741 0.824837 -0.000776788 0.56537 0.827391 0 0.561627 0.824837 0.000776788 0.56537 0.820434 0 0.571741 0.827391 0 0.561627 0.748959 0.000391351 0.662616 0.76529 -0.000276594 0.643686 0.780385 0.000905929 0.625299 0.796018 -0.000499591 0.605273 0.838235 0 0.545308 0.838235 0 0.545309 0.84823 0 0.529629 0.854711 0 0.519104 0.851672 0.00116283 0.524074 0.84823 0 0.529629 0.851672 -0.00116283 0.524074 0.854711 0 0.519104 0.863963 0 0.503556 0.873915 0 0.486078 0.879819 0 0.47531 0.876491 0.00178615 0.481415 0.873915 0 0.486078 0.876491 -0.00178614 0.481415 0.879819 0 0.47531 0.887641 0 0.460536 0.897325 0 0.441371 0.902701 0 0.430269 0.899371 0.00367857 0.437171 0.897325 0 0.441371 0.899371 -0.00367857 0.437171 0.908089 0.00611156 0.418733 0.909168 0 0.416429 0.907898 -8.01186e-05 0.419192 0.919113 0 0.393995 0.919113 0 0.393995 0.928504 0 0.371322 0.928504 0 0.371322 0.937343 0 0.348409 0.945606 0 0.325315 0.953309 0 0.301997 0.960428 0 0.278528 0.960428 0 0.278528 0.966977 0 0.254864 0.966977 0 0.254864 0.972941 0 0.231054 0.978316 0 0.207116 0.972941 0 0.231054 0.937343 0 0.348408 0.945606 0 0.325315 0.953309 0 0.301997 0.978316 0 0.207116 0.983106 0 0.183036 0.987304 0 0.158843 0.983106 0 0.183036 0.987304 0 0.158843 0.9909 0 0.134597 0.993906 0 0.110234 0.9909 0 0.134597 0.993906 0 0.110234 0.996313 0 0.0857961 0.998118 0 0.0613211 0.996313 0 0.0857961 0.998118 0 0.0613211 0.999323 0 0.0367899 0.999925 0 0.0122756 0.999323 0 0.0367899 0.999925 0 0.0122756 0.887641 0 0.460536 0.863963 0 0.503556 0.797981 0 0.602683 0.796018 0.000499591 0.605273 0.780385 -0.000905929 0.625299 0.782951 0 0.622084 0.76655 0 0.642185 0.732689 8.55107e-05 0.680563 0.715687 -9.64482e-05 0.698422 0.713382 -0.000504565 0.700775 0.660826 0 0.750539 0.629992 0 0.776602 0.662303 -0.000231395 0.749236 0.122688 0.000267314 0.992445 0.110117 -0.000322243 0.993919 0.0583855 0 0.998294 0.110117 0 0.993919 0.0738353 0.000291641 0.99727 0.0583855 -0.000338215 0.998294 0.212573 0 0.977145 0.262979 0 0.964802 0.312537 0 0.949905 0.361412 0 0.932406 0.409486 0 0.912316 0.45632 0 0.889816 0.501867 0 0.864945 0.588841 0 0.808249 0.733131 0 0.680087 0.733131 0 0.680087 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 1 0 0 1 0 1 0 0 1 0 0 0 0 1 0 0 1 0 -0.999333 -0.036516 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.999333 -0.036516 0 -0.999333 0.036516 0 -0.994001 0.109372 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.920346 0.391106 0 -0.889339 0.457248 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.719911 0.694066 0 -0.66731 0.74478 0 -0.611173 0.791497 0 -0.551787 0.833985 0 -0.489392 0.872064 0 -0.424457 0.905448 0 -0.357246 0.93401 0 -0.288102 0.9576 0 -0.217426 0.976077 0 -0.145608 0.989342 0 -0.0729821 0.997333 0 0 1 0 0.0729821 0.997333 0 0.145606 0.989343 0 0.217429 0.976076 0 0.288102 0.9576 0 0.357246 0.93401 0 0.424457 0.905448 0 0.489392 0.872064 0 0.551781 0.833989 0 0.611179 0.791492 0 0.667303 0.744786 0 0.719917 0.69406 0 0.768644 0.639676 0 0.813288 0.581862 0 0.853594 0.520939 0 0.889339 0.457248 0 0.920346 0.391106 0 0.946443 0.322872 0 0.967483 0.252938 0 0.983365 0.181638 0 0.994001 0.109372 0 0.999333 0.036516 0 0.999333 -0.036516 0 0.994001 -0.109372 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.920346 -0.391106 0 0.889337 -0.457253 0 0.853597 -0.520933 0 0.813288 -0.581862 0 0.768639 -0.639683 0 0.719923 -0.694053 0 0.667296 -0.744792 0 0.611187 -0.791486 0 0.551772 -0.833995 0 0.489401 -0.872059 0 0.424457 -0.905448 0 0.357246 -0.93401 0 0.288091 -0.957603 0 0.217441 -0.976074 0 0.145606 -0.989343 0 0.0729821 -0.997333 0 0 -1 0 -0.0729821 -0.997333 0 -0.145608 -0.989342 0 -0.217438 -0.976074 0 -0.288091 -0.957603 0 -0.357246 -0.93401 0 -0.424457 -0.905448 0 -0.489401 -0.872059 0 -0.551778 -0.833991 0 -0.611181 -0.791491 0 -0.667303 -0.744787 0 -0.719917 -0.69406 0 -0.768639 -0.639683 0 -0.813288 -0.581862 0 -0.853597 -0.520933 0 -0.889337 -0.457253 0 -0.920346 -0.391106 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.667303 -0.744787 0 -0.667303 -0.744787 0 -0.667303 -0.744787 0 -0.611181 -0.791491 0 -0.611181 -0.791491 0 -0.611181 -0.791491 0 -0.551778 -0.833991 0 -0.551778 -0.833991 0 -0.551778 -0.833991 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.217438 -0.976074 0 -0.217438 -0.976074 0 -0.217438 -0.976074 0 -0.145608 -0.989342 0 -0.145608 -0.989342 0 -0.145608 -0.989342 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.145606 -0.989343 0 0.145606 -0.989343 0 0.145606 -0.989343 0 0.217441 -0.976074 0 0.217441 -0.976074 0 0.217441 -0.976074 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.551772 -0.833995 0 0.551772 -0.833995 0 0.551772 -0.833995 0 0.611187 -0.791486 0 0.611187 -0.791486 0 0.611187 -0.791486 0 0.667296 -0.744792 0 0.667296 -0.744792 0 0.667296 -0.744792 0 0.719923 -0.694053 0 0.719923 -0.694053 0 0.719923 -0.694053 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.999333 -0.036516 0 0.999333 -0.036516 0 0.999333 -0.036516 0 0.999333 0.036516 0 0.999333 0.036516 0 0.999333 0.036516 0 0.994001 0.109372 0 0.994001 0.109372 0 0.994001 0.109372 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.920346 0.391106 0 0.920346 0.391106 0 0.920346 0.391106 0 0.889339 0.457248 0 0.889339 0.457248 0 0.889339 0.457248 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.719917 0.69406 0 0.719917 0.69406 0 0.719917 0.69406 0 0.667303 0.744786 0 0.667303 0.744786 0 0.667303 0.744786 0 0.611179 0.791492 0 0.611179 0.791492 0 0.611179 0.791492 0 0.551781 0.833989 0 0.551781 0.833989 0 0.551781 0.833989 0 0.489392 0.872064 0 0.489392 0.872064 0 0.489392 0.872064 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.357246 0.93401 0 0.357246 0.93401 0 0.357246 0.93401 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.217429 0.976076 0 0.217429 0.976076 0 0.217429 0.976076 0 0.145606 0.989343 0 0.145606 0.989343 0 0.145606 0.989343 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.145608 0.989342 0 -0.145608 0.989342 0 -0.145608 0.989342 0 -0.217426 0.976077 0 -0.217426 0.976077 0 -0.217426 0.976077 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.551787 0.833985 0 -0.551787 0.833985 0 -0.551787 0.833985 0 -0.611173 0.791497 0 -0.611173 0.791497 0 -0.611173 0.791497 0 -0.66731 0.74478 0 -0.66731 0.74478 0 -0.66731 0.74478 0 -0.719911 0.694066 0 -0.719911 0.694066 0 -0.719911 0.694066 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.999333 0.036516 0 -0.999333 0.036516 0 -0.999333 0.036516 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.999333 -0.036516 0 -0.999333 0.036516 0 -0.994001 0.109372 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.920346 0.391106 0 -0.889339 0.457248 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.719911 0.694066 0 -0.66731 0.74478 0 -0.611173 0.791497 0 -0.551787 0.833985 0 -0.489392 0.872064 0 -0.424457 0.905448 0 -0.357246 0.93401 0 -0.288102 0.9576 0 -0.217426 0.976077 0 -0.145608 0.989342 0 -0.0729821 0.997333 0 0 1 0 0.0729821 0.997333 0 0.145606 0.989343 0 0.217429 0.976076 0 0.288102 0.9576 0 0.357246 0.93401 0 0.424457 0.905448 0 0.489392 0.872064 0 0.551781 0.833989 0 0.611179 0.791492 0 0.667303 0.744786 0 0.719917 0.69406 0 0.768644 0.639676 0 0.813288 0.581862 0 0.853594 0.520939 0 0.889339 0.457248 0 0.920346 0.391106 0 0.946443 0.322872 0 0.967483 0.252938 0 0.983365 0.181638 0 0.994001 0.109372 0 0.999333 0.036516 0 0.999333 -0.036516 0 0.994001 -0.109372 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.920346 -0.391106 0 0.889337 -0.457253 0 0.853597 -0.520933 0 0.813288 -0.581862 0 0.768639 -0.639683 0 0.719923 -0.694053 0 0.667296 -0.744792 0 0.611187 -0.791486 0 0.551772 -0.833995 0 0.489401 -0.872059 0 0.424457 -0.905448 0 0.357246 -0.93401 0 0.288091 -0.957603 0 0.217441 -0.976074 0 0.145606 -0.989343 0 0.0729821 -0.997333 0 0 -1 0 -0.0729821 -0.997333 0 -0.145608 -0.989342 0 -0.217438 -0.976074 0 -0.288091 -0.957603 0 -0.357246 -0.93401 0 -0.424457 -0.905448 0 -0.489401 -0.872059 0 -0.551778 -0.833991 0 -0.611181 -0.791491 0 -0.667303 -0.744787 0 -0.719917 -0.69406 0 -0.768639 -0.639683 0 -0.813288 -0.581862 0 -0.853597 -0.520933 0 -0.889337 -0.457253 0 -0.920346 -0.391106 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.667303 -0.744787 0 -0.667303 -0.744787 0 -0.667303 -0.744787 0 -0.611181 -0.791491 0 -0.611181 -0.791491 0 -0.611181 -0.791491 0 -0.551778 -0.833991 0 -0.551778 -0.833991 0 -0.551778 -0.833991 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.217438 -0.976074 0 -0.217438 -0.976074 0 -0.217438 -0.976074 0 -0.145608 -0.989342 0 -0.145608 -0.989342 0 -0.145608 -0.989342 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.145606 -0.989343 0 0.145606 -0.989343 0 0.145606 -0.989343 0 0.217441 -0.976074 0 0.217441 -0.976074 0 0.217441 -0.976074 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.551772 -0.833995 0 0.551772 -0.833995 0 0.551772 -0.833995 0 0.611187 -0.791486 0 0.611187 -0.791486 0 0.611187 -0.791486 0 0.667296 -0.744792 0 0.667296 -0.744792 0 0.667296 -0.744792 0 0.719923 -0.694053 0 0.719923 -0.694053 0 0.719923 -0.694053 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.999333 -0.036516 0 0.999333 -0.036516 0 0.999333 -0.036516 0 0.999333 0.036516 0 0.999333 0.036516 0 0.999333 0.036516 0 0.994001 0.109372 0 0.994001 0.109372 0 0.994001 0.109372 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.920346 0.391106 0 0.920346 0.391106 0 0.920346 0.391106 0 0.889339 0.457248 0 0.889339 0.457248 0 0.889339 0.457248 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.719917 0.69406 0 0.719917 0.69406 0 0.719917 0.69406 0 0.667303 0.744786 0 0.667303 0.744786 0 0.667303 0.744786 0 0.611179 0.791492 0 0.611179 0.791492 0 0.611179 0.791492 0 0.551781 0.833989 0 0.551781 0.833989 0 0.551781 0.833989 0 0.489392 0.872064 0 0.489392 0.872064 0 0.489392 0.872064 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.357246 0.93401 0 0.357246 0.93401 0 0.357246 0.93401 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.217429 0.976076 0 0.217429 0.976076 0 0.217429 0.976076 0 0.145606 0.989343 0 0.145606 0.989343 0 0.145606 0.989343 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.145608 0.989342 0 -0.145608 0.989342 0 -0.145608 0.989342 0 -0.217426 0.976077 0 -0.217426 0.976077 0 -0.217426 0.976077 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.551787 0.833985 0 -0.551787 0.833985 0 -0.551787 0.833985 0 -0.611173 0.791497 0 -0.611173 0.791497 0 -0.611173 0.791497 0 -0.66731 0.74478 0 -0.66731 0.74478 0 -0.66731 0.74478 0 -0.719911 0.694066 0 -0.719911 0.694066 0 -0.719911 0.694066 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.999333 0.036516 0 -0.999333 0.036516 0 -0.999333 0.036516 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.999333 0.0365165 0 -0.994001 0.109371 0 -0.994001 0.109371 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.994001 0.109371 0 -0.994001 0.109371 0 -0.999333 0.0365165 0 -0.999333 -0.0365165 0 -0.994001 -0.109371 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.920344 -0.391111 0 -0.889342 -0.457243 0 -0.853594 -0.520939 0 -0.813288 -0.581862 0 -0.768644 -0.639676 0 -0.719917 -0.69406 0 -0.667311 -0.74478 0 -0.611171 -0.791498 0 -0.551763 -0.834001 0 -0.489421 -0.872048 0 -0.424457 -0.905448 0 -0.357235 -0.934015 0 -0.288102 -0.9576 0 -0.217429 -0.976076 0 -0.145606 -0.989343 0 -0.0729821 -0.997333 0 0 -1 0 0.0729821 -0.997333 0 0.145608 -0.989342 0 0.217426 -0.976077 0 0.288102 -0.9576 0 0.357235 -0.934015 0 0.424457 -0.905448 0 0.489421 -0.872048 0 0.551769 -0.833997 0 0.611165 -0.791503 0 0.667317 -0.744774 0 0.719911 -0.694066 0 0.768644 -0.639676 0 0.813288 -0.581862 0 0.853594 -0.520939 0 0.889342 -0.457243 0 0.920344 -0.391111 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.994001 -0.109371 0 0.999333 -0.0365165 0 0.999333 0.0365165 0 0.994001 0.109371 0 0.983365 0.181638 0 0.967483 0.252938 0 0.946443 0.322872 0 0.920344 0.391111 0 0.889342 0.457243 0 0.853594 0.520939 0 0.813288 0.581862 0 0.768644 0.639676 0 0.719911 0.694066 0 0.667317 0.744774 0 0.611165 0.791503 0 0.551769 0.833997 0 0.489421 0.872048 0 0.424457 0.905448 0 0.357235 0.934015 0 0.288102 0.9576 0 0.217426 0.976077 0 0.145608 0.989342 0 0.0729821 0.997333 0 0 1 0 -0.0729821 0.997333 0 -0.145606 0.989343 0 -0.217429 0.976076 0 -0.288102 0.9576 0 -0.357235 0.934015 0 -0.424457 0.905448 0 -0.489421 0.872048 0 -0.551763 0.834001 0 -0.611171 0.791498 0 -0.667311 0.74478 0 -0.719917 0.69406 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.889342 0.457243 0 -0.920344 0.391111 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.920344 0.391111 0 -0.920344 0.391111 0 -0.920344 0.391111 0 -0.889342 0.457243 0 -0.889342 0.457243 0 -0.889342 0.457243 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.66731 0.74478 0 -0.667311 0.74478 0 -0.66731 0.74478 0 -0.611171 0.791498 0 -0.611171 0.791498 0 -0.611171 0.791498 0 -0.551763 0.834001 0 -0.551763 0.834001 0 -0.551763 0.834001 0 -0.489421 0.872048 0 -0.489421 0.872048 0 -0.489421 0.872048 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.357235 0.934015 0 -0.357235 0.934015 0 -0.357235 0.934015 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.145608 0.989342 0 0.145608 0.989342 0 0.145608 0.989342 0 0.217426 0.976077 0 0.217426 0.976077 0 0.217426 0.976077 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.357235 0.934015 0 0.357235 0.934015 0 0.357235 0.934015 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.489421 0.872048 0 0.489421 0.872048 0 0.489421 0.872048 0 0.551769 0.833997 0 0.551769 0.833997 0 0.551769 0.833997 0 0.611165 0.791503 0 0.611165 0.791503 0 0.611165 0.791503 0 0.667317 0.744774 0 0.667317 0.744774 0 0.667317 0.744774 0 0.719911 0.694066 0 0.719911 0.694066 0 0.719911 0.694066 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.889342 0.457243 0 0.889342 0.457243 0 0.889342 0.457243 0 0.920344 0.391111 0 0.920344 0.391111 0 0.920344 0.391111 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.994001 0.109371 0 0.994001 0.109371 0 0.994001 0.109371 0 0.999333 0.0365165 0 0.999333 0.0365165 0 0.999333 0.0365165 0 0.999333 -0.0365165 0 0.999333 -0.0365165 0 0.999333 -0.0365165 0 0.994001 -0.109371 0 0.994001 -0.109371 0 0.994001 -0.109371 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.920344 -0.391111 0 0.920344 -0.391111 0 0.920344 -0.391111 0 0.889342 -0.457243 0 0.889342 -0.457243 0 0.889342 -0.457243 0 0.853594 -0.520939 0 0.853594 -0.520939 0 0.853594 -0.520939 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.768644 -0.639676 0 0.768644 -0.639676 0 0.768644 -0.639676 0 0.719911 -0.694066 0 0.719911 -0.694066 0 0.719911 -0.694066 0 0.667317 -0.744774 0 0.667317 -0.744774 0 0.667317 -0.744774 0 0.611165 -0.791503 0 0.611165 -0.791503 0 0.611165 -0.791503 0 0.551769 -0.833997 0 0.551769 -0.833997 0 0.551769 -0.833997 0 0.489421 -0.872048 0 0.489421 -0.872048 0 0.489421 -0.872048 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.357235 -0.934015 0 0.357235 -0.934015 0 0.357235 -0.934015 0 0.288102 -0.9576 0 0.288102 -0.9576 0 0.288102 -0.9576 0 0.217426 -0.976077 0 0.217426 -0.976077 0 0.217426 -0.976077 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.217429 -0.976076 0 -0.217429 -0.976076 0 -0.217429 -0.976076 0 -0.288102 -0.9576 0 -0.288102 -0.9576 0 -0.288102 -0.9576 0 -0.357235 -0.934015 0 -0.357235 -0.934015 0 -0.357235 -0.934015 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.489421 -0.872048 0 -0.489421 -0.872048 0 -0.489421 -0.872048 0 -0.551763 -0.834001 0 -0.551763 -0.834001 0 -0.551763 -0.834001 0 -0.611171 -0.791498 0 -0.611171 -0.791498 0 -0.611171 -0.791498 0 -0.66731 -0.74478 0 -0.667311 -0.74478 0 -0.66731 -0.74478 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.768644 -0.639676 0 -0.768644 -0.639676 0 -0.768644 -0.639676 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.853594 -0.520939 0 -0.853594 -0.520939 0 -0.853594 -0.520939 0 -0.889342 -0.457243 0 -0.889342 -0.457243 0 -0.889342 -0.457243 0 -0.920344 -0.391111 0 -0.920344 -0.391111 0 -0.920344 -0.391111 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109371 0 -0.994001 -0.109371 0 -0.994001 -0.109371 0 -0.999333 -0.0365165 0 -0.999333 -0.0365165 0 -0.999333 -0.0365165 0 -0.999333 0.0365165 0 -0.999333 0.0365165 0 0.999333 -0.0365165 0 0.994001 -0.109371 0 0.994001 -0.109371 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.994001 -0.109371 0 0.994001 -0.109371 0 0.999333 -0.0365165 0 0.999333 0.0365165 0 0.994001 0.109371 0 0.983365 0.181638 0 0.967483 0.252938 0 0.946443 0.322872 0 0.920344 0.391111 0 0.889342 0.457243 0 0.853594 0.520939 0 0.813288 0.581862 0 0.768644 0.639676 0 0.719911 0.694066 0 0.667317 0.744774 0 0.611165 0.791503 0 0.551769 0.833997 0 0.489421 0.872048 0 0.424457 0.905448 0 0.357235 0.934015 0 0.288102 0.9576 0 0.217426 0.976077 0 0.145608 0.989342 0 0.0729821 0.997333 0 0 1 0 -0.0729821 0.997333 0 -0.145606 0.989343 0 -0.217429 0.976076 0 -0.288102 0.9576 0 -0.357235 0.934015 0 -0.424457 0.905448 0 -0.489421 0.872048 0 -0.551763 0.834001 0 -0.611171 0.791498 0 -0.667311 0.74478 0 -0.719917 0.69406 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.889342 0.457243 0 -0.920344 0.391111 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.994001 0.109371 0 -0.999333 0.0365165 0 -0.999333 -0.0365165 0 -0.994001 -0.109371 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.920344 -0.391111 0 -0.889342 -0.457243 0 -0.853594 -0.520939 0 -0.813288 -0.581862 0 -0.768644 -0.639676 0 -0.719917 -0.69406 0 -0.667311 -0.74478 0 -0.611171 -0.791498 0 -0.551763 -0.834001 0 -0.489421 -0.872048 0 -0.424457 -0.905448 0 -0.357235 -0.934015 0 -0.288102 -0.9576 0 -0.217429 -0.976076 0 -0.145606 -0.989343 0 -0.0729821 -0.997333 0 0 -1 0 0.0729821 -0.997333 0 0.145608 -0.989342 0 0.217426 -0.976077 0 0.288102 -0.9576 0 0.357235 -0.934015 0 0.424457 -0.905448 0 0.489421 -0.872048 0 0.551769 -0.833997 0 0.611165 -0.791503 0 0.667317 -0.744774 0 0.719911 -0.694066 0 0.768644 -0.639676 0 0.813288 -0.581862 0 0.853594 -0.520939 0 0.889342 -0.457243 0 0.920344 -0.391111 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.920344 -0.391111 0 0.920344 -0.391111 0 0.920344 -0.391111 0 0.889342 -0.457243 0 0.889342 -0.457243 0 0.889342 -0.457243 0 0.853594 -0.520939 0 0.853594 -0.520939 0 0.853594 -0.520939 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.768644 -0.639676 0 0.768644 -0.639676 0 0.768644 -0.639676 0 0.719911 -0.694066 0 0.719911 -0.694066 0 0.719911 -0.694066 0 0.667317 -0.744774 0 0.667317 -0.744774 0 0.667317 -0.744774 0 0.611165 -0.791503 0 0.611165 -0.791503 0 0.611165 -0.791503 0 0.551769 -0.833997 0 0.551769 -0.833997 0 0.551769 -0.833997 0 0.489421 -0.872048 0 0.489421 -0.872048 0 0.489421 -0.872048 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.357235 -0.934015 0 0.357235 -0.934015 0 0.357235 -0.934015 0 0.288102 -0.9576 0 0.288102 -0.9576 0 0.288102 -0.9576 0 0.217426 -0.976077 0 0.217426 -0.976077 0 0.217426 -0.976077 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.217429 -0.976076 0 -0.217429 -0.976076 0 -0.217429 -0.976076 0 -0.288102 -0.9576 0 -0.288102 -0.9576 0 -0.288102 -0.9576 0 -0.357235 -0.934015 0 -0.357235 -0.934015 0 -0.357235 -0.934015 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.489421 -0.872048 0 -0.489421 -0.872048 0 -0.489421 -0.872048 0 -0.551763 -0.834001 0 -0.551763 -0.834001 0 -0.551763 -0.834001 0 -0.611171 -0.791498 0 -0.611171 -0.791498 0 -0.611171 -0.791498 0 -0.667311 -0.74478 0 -0.667311 -0.74478 0 -0.667311 -0.74478 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.768644 -0.639676 0 -0.768644 -0.639676 0 -0.768644 -0.639676 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.853594 -0.520939 0 -0.853594 -0.520939 0 -0.853594 -0.520939 0 -0.889342 -0.457243 0 -0.889342 -0.457243 0 -0.889342 -0.457243 0 -0.920344 -0.391111 0 -0.920344 -0.391111 0 -0.920344 -0.391111 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109371 0 -0.994001 -0.109371 0 -0.994001 -0.109371 0 -0.999333 -0.0365165 0 -0.999333 -0.0365165 0 -0.999333 -0.0365165 0 -0.999333 0.0365165 0 -0.999333 0.0365165 0 -0.999333 0.0365165 0 -0.994001 0.109371 0 -0.994001 0.109371 0 -0.994001 0.109371 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.920344 0.391111 0 -0.920344 0.391111 0 -0.920344 0.391111 0 -0.889342 0.457243 0 -0.889342 0.457243 0 -0.889342 0.457243 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.667311 0.74478 0 -0.667311 0.74478 0 -0.667311 0.74478 0 -0.611171 0.791498 0 -0.611171 0.791498 0 -0.611171 0.791498 0 -0.551763 0.834001 0 -0.551763 0.834001 0 -0.551763 0.834001 0 -0.489421 0.872048 0 -0.489421 0.872048 0 -0.489421 0.872048 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.357235 0.934015 0 -0.357235 0.934015 0 -0.357235 0.934015 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.145608 0.989342 0 0.145608 0.989342 0 0.145608 0.989342 0 0.217426 0.976077 0 0.217426 0.976077 0 0.217426 0.976077 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.357235 0.934015 0 0.357235 0.934015 0 0.357235 0.934015 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.489421 0.872048 0 0.489421 0.872048 0 0.489421 0.872048 0 0.551769 0.833997 0 0.551769 0.833997 0 0.551769 0.833997 0 0.611165 0.791503 0 0.611165 0.791503 0 0.611165 0.791503 0 0.667317 0.744774 0 0.667317 0.744774 0 0.667317 0.744774 0 0.719911 0.694066 0 0.719911 0.694066 0 0.719911 0.694066 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.889342 0.457243 0 0.889342 0.457243 0 0.889342 0.457243 0 0.920344 0.391111 0 0.920344 0.391111 0 0.920344 0.391111 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.994001 0.109371 0 0.994001 0.109371 0 0.994001 0.109371 0 0.999333 0.0365165 0 0.999333 0.0365165 0 0.999333 0.0365165 0 0.999333 -0.0365165 0 0.999333 -0.0365165 0 -0.999333 0.0365165 0 -0.994001 0.109371 0 -0.994001 0.109371 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.994001 0.109371 0 -0.994001 0.109371 0 -0.999333 0.0365165 0 -0.999333 -0.0365165 0 -0.994001 -0.109371 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.920344 -0.391111 0 -0.889342 -0.457243 0 -0.853594 -0.520939 0 -0.813288 -0.581862 0 -0.768644 -0.639676 0 -0.719911 -0.694066 0 -0.667317 -0.744774 0 -0.611165 -0.791503 0 -0.551769 -0.833997 0 -0.489421 -0.872048 0 -0.424457 -0.905448 0 -0.357235 -0.934015 0 -0.288102 -0.9576 0 -0.217426 -0.976077 0 -0.145608 -0.989342 0 -0.0729821 -0.997333 0 0 -1 0 0.0729821 -0.997333 0 0.145606 -0.989343 0 0.217429 -0.976076 0 0.288102 -0.9576 0 0.357235 -0.934015 0 0.424457 -0.905448 0 0.489421 -0.872048 0 0.551763 -0.834001 0 0.611171 -0.791498 0 0.667311 -0.74478 0 0.719917 -0.69406 0 0.768644 -0.639676 0 0.813288 -0.581862 0 0.853594 -0.520939 0 0.889342 -0.457243 0 0.920344 -0.391111 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.994001 -0.109371 0 0.999333 -0.0365165 0 0.999333 0.0365165 0 0.994001 0.109371 0 0.983365 0.181638 0 0.967483 0.252938 0 0.946443 0.322872 0 0.920344 0.391111 0 0.889342 0.457243 0 0.853594 0.520939 0 0.813288 0.581862 0 0.768644 0.639676 0 0.719917 0.69406 0 0.667311 0.74478 0 0.611171 0.791498 0 0.551763 0.834001 0 0.489421 0.872048 0 0.424457 0.905448 0 0.357235 0.934015 0 0.288102 0.9576 0 0.217429 0.976076 0 0.145606 0.989343 0 0.0729821 0.997333 0 0 1 0 -0.0729821 0.997333 0 -0.145608 0.989342 0 -0.217426 0.976077 0 -0.288102 0.9576 0 -0.357235 0.934015 0 -0.424457 0.905448 0 -0.489421 0.872048 0 -0.551769 0.833997 0 -0.611165 0.791503 0 -0.667317 0.744774 0 -0.719911 0.694066 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.889342 0.457243 0 -0.920344 0.391111 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.920344 0.391111 0 -0.920344 0.391111 0 -0.920344 0.391111 0 -0.889342 0.457243 0 -0.889342 0.457243 0 -0.889342 0.457243 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.719911 0.694066 0 -0.719911 0.694066 0 -0.719911 0.694066 0 -0.667317 0.744774 0 -0.667317 0.744774 0 -0.667317 0.744774 0 -0.611165 0.791503 0 -0.611165 0.791503 0 -0.611165 0.791503 0 -0.551769 0.833997 0 -0.551769 0.833997 0 -0.551769 0.833997 0 -0.489421 0.872048 0 -0.489421 0.872048 0 -0.489421 0.872048 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.357235 0.934015 0 -0.357235 0.934015 0 -0.357235 0.934015 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.217426 0.976077 0 -0.217426 0.976077 0 -0.217426 0.976077 0 -0.145608 0.989342 0 -0.145608 0.989342 0 -0.145608 0.989342 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.145606 0.989343 0 0.145606 0.989343 0 0.145606 0.989343 0 0.217429 0.976076 0 0.217429 0.976076 0 0.217429 0.976076 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.357235 0.934015 0 0.357235 0.934015 0 0.357235 0.934015 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.489421 0.872048 0 0.489421 0.872048 0 0.489421 0.872048 0 0.551763 0.834001 0 0.551763 0.834001 0 0.551763 0.834001 0 0.611171 0.791498 0 0.611171 0.791498 0 0.611171 0.791498 0 0.667311 0.74478 0 0.667311 0.74478 0 0.667311 0.74478 0 0.719917 0.69406 0 0.719917 0.69406 0 0.719917 0.69406 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.889342 0.457243 0 0.889342 0.457243 0 0.889342 0.457243 0 0.920344 0.391111 0 0.920344 0.391111 0 0.920344 0.391111 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.994001 0.109371 0 0.994001 0.109371 0 0.994001 0.109371 0 0.999333 0.0365165 0 0.999333 0.0365165 0 0.999333 0.0365165 0 0.999333 -0.0365165 0 0.999333 -0.0365165 0 0.999333 -0.0365165 0 0.994001 -0.109371 0 0.994001 -0.109371 0 0.994001 -0.109371 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.920344 -0.391111 0 0.920344 -0.391111 0 0.920344 -0.391111 0 0.889342 -0.457243 0 0.889342 -0.457243 0 0.889342 -0.457243 0 0.853594 -0.520939 0 0.853594 -0.520939 0 0.853594 -0.520939 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.768644 -0.639676 0 0.768644 -0.639676 0 0.768644 -0.639676 0 0.719917 -0.69406 0 0.719917 -0.69406 0 0.719917 -0.69406 0 0.667311 -0.74478 0 0.667311 -0.74478 0 0.667311 -0.74478 0 0.611171 -0.791498 0 0.611171 -0.791498 0 0.611171 -0.791498 0 0.551763 -0.834001 0 0.551763 -0.834001 0 0.551763 -0.834001 0 0.489421 -0.872048 0 0.489421 -0.872048 0 0.489421 -0.872048 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.357235 -0.934015 0 0.357235 -0.934015 0 0.357235 -0.934015 0 0.288102 -0.9576 0 0.288102 -0.9576 0 0.288102 -0.9576 0 0.217429 -0.976076 0 0.217429 -0.976076 0 0.217429 -0.976076 0 0.145606 -0.989343 0 0.145606 -0.989343 0 0.145606 -0.989343 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.145608 -0.989342 0 -0.145608 -0.989342 0 -0.145608 -0.989342 0 -0.217426 -0.976077 0 -0.217426 -0.976077 0 -0.217426 -0.976077 0 -0.288102 -0.9576 0 -0.288102 -0.9576 0 -0.288102 -0.9576 0 -0.357235 -0.934015 0 -0.357235 -0.934015 0 -0.357235 -0.934015 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.489421 -0.872048 0 -0.489421 -0.872048 0 -0.489421 -0.872048 0 -0.551769 -0.833997 0 -0.551769 -0.833997 0 -0.551769 -0.833997 0 -0.611165 -0.791503 0 -0.611165 -0.791503 0 -0.611165 -0.791503 0 -0.667317 -0.744774 0 -0.667317 -0.744774 0 -0.667317 -0.744774 0 -0.719911 -0.694066 0 -0.719911 -0.694066 0 -0.719911 -0.694066 0 -0.768644 -0.639676 0 -0.768644 -0.639676 0 -0.768644 -0.639676 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.853594 -0.520939 0 -0.853594 -0.520939 0 -0.853594 -0.520939 0 -0.889342 -0.457243 0 -0.889342 -0.457243 0 -0.889342 -0.457243 0 -0.920344 -0.391111 0 -0.920344 -0.391111 0 -0.920344 -0.391111 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109371 0 -0.994001 -0.109371 0 -0.994001 -0.109371 0 -0.999333 -0.0365165 0 -0.999333 -0.0365165 0 -0.999333 -0.0365165 0 -0.999333 0.0365165 0 -0.999333 0.0365165 0 -0.999333 0.036516 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.999333 0.036516 0 -0.999333 -0.036516 0 -0.994001 -0.109372 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.920346 -0.391106 0 -0.889337 -0.457253 0 -0.853597 -0.520933 0 -0.813288 -0.581862 0 -0.768639 -0.639683 0 -0.719923 -0.694053 0 -0.667296 -0.744792 0 -0.611187 -0.791486 0 -0.551772 -0.833995 0 -0.489401 -0.872059 0 -0.424457 -0.905448 0 -0.357246 -0.93401 0 -0.288091 -0.957603 0 -0.217441 -0.976074 0 -0.145606 -0.989343 0 -0.0729821 -0.997333 0 0 -1 0 0.0729821 -0.997333 0 0.145608 -0.989342 0 0.217438 -0.976074 0 0.288091 -0.957603 0 0.357246 -0.93401 0 0.424457 -0.905448 0 0.489401 -0.872059 0 0.551778 -0.833991 0 0.611181 -0.791491 0 0.667303 -0.744787 0 0.719917 -0.69406 0 0.768639 -0.639683 0 0.813288 -0.581862 0 0.853597 -0.520933 0 0.889337 -0.457253 0 0.920346 -0.391106 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.994001 -0.109372 0 0.999333 -0.036516 0 0.999333 0.036516 0 0.994001 0.109372 0 0.983365 0.181638 0 0.967483 0.252938 0 0.946443 0.322872 0 0.920346 0.391106 0 0.889339 0.457248 0 0.853594 0.520939 0 0.813288 0.581862 0 0.768644 0.639676 0 0.719911 0.694066 0 0.66731 0.74478 0 0.611173 0.791497 0 0.551787 0.833985 0 0.489392 0.872064 0 0.424457 0.905448 0 0.357246 0.93401 0 0.288102 0.9576 0 0.217426 0.976077 0 0.145608 0.989342 0 0.0729821 0.997333 0 0 1 0 -0.0729821 0.997333 0 -0.145606 0.989343 0 -0.217429 0.976076 0 -0.288102 0.9576 0 -0.357246 0.93401 0 -0.424457 0.905448 0 -0.489392 0.872064 0 -0.551781 0.833989 0 -0.611179 0.791492 0 -0.667303 0.744786 0 -0.719917 0.69406 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.889339 0.457248 0 -0.920346 0.391106 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.667303 0.744786 0 -0.667303 0.744786 0 -0.667303 0.744786 0 -0.611179 0.791492 0 -0.611179 0.791492 0 -0.611179 0.791492 0 -0.551781 0.833989 0 -0.551781 0.833989 0 -0.551781 0.833989 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.145608 0.989342 0 0.145608 0.989342 0 0.145608 0.989342 0 0.217426 0.976077 0 0.217426 0.976077 0 0.217426 0.976077 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.357246 0.93401 0 0.357246 0.93401 0 0.357246 0.93401 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.489392 0.872064 0 0.489392 0.872064 0 0.489392 0.872064 0 0.551787 0.833985 0 0.551787 0.833985 0 0.551787 0.833985 0 0.611173 0.791497 0 0.611173 0.791497 0 0.611173 0.791497 0 0.66731 0.74478 0 0.66731 0.74478 0 0.66731 0.74478 0 0.719911 0.694066 0 0.719911 0.694066 0 0.719911 0.694066 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.889339 0.457248 0 0.889339 0.457248 0 0.889339 0.457248 0 0.920346 0.391106 0 0.920346 0.391106 0 0.920346 0.391106 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.994001 0.109372 0 0.994001 0.109372 0 0.994001 0.109372 0 0.999333 0.036516 0 0.999333 0.036516 0 0.999333 0.036516 0 0.999333 -0.036516 0 0.999333 -0.036516 0 0.999333 -0.036516 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.719917 -0.69406 0 0.719917 -0.69406 0 0.719917 -0.69406 0 0.667303 -0.744787 0 0.667303 -0.744787 0 0.667303 -0.744787 0 0.611181 -0.791491 0 0.611181 -0.791491 0 0.611181 -0.791491 0 0.551778 -0.833991 0 0.551778 -0.833991 0 0.551778 -0.833991 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.217438 -0.976074 0 0.217438 -0.976074 0 0.217438 -0.976074 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.217441 -0.976074 0 -0.217441 -0.976074 0 -0.217441 -0.976074 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.551772 -0.833995 0 -0.551772 -0.833995 0 -0.551772 -0.833995 0 -0.611187 -0.791486 0 -0.611187 -0.791486 0 -0.611187 -0.791486 0 -0.667296 -0.744792 0 -0.667296 -0.744792 0 -0.667296 -0.744792 0 -0.719923 -0.694053 0 -0.719923 -0.694053 0 -0.719923 -0.694053 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.999333 0.036516 0 -0.999333 0.036516 0 0.999333 -0.036516 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.999333 -0.036516 0 0.999333 0.036516 0 0.994001 0.109372 0 0.983365 0.181638 0 0.967483 0.252938 0 0.946443 0.322872 0 0.920346 0.391106 0 0.889339 0.457248 0 0.853594 0.520939 0 0.813288 0.581862 0 0.768644 0.639676 0 0.719911 0.694066 0 0.66731 0.74478 0 0.611173 0.791497 0 0.551787 0.833985 0 0.489392 0.872064 0 0.424457 0.905448 0 0.357246 0.93401 0 0.288102 0.9576 0 0.217426 0.976077 0 0.145608 0.989342 0 0.0729821 0.997333 0 0 1 0 -0.0729821 0.997333 0 -0.145606 0.989343 0 -0.217429 0.976076 0 -0.288102 0.9576 0 -0.357246 0.93401 0 -0.424457 0.905448 0 -0.489392 0.872064 0 -0.551781 0.833989 0 -0.611179 0.791492 0 -0.667303 0.744786 0 -0.719917 0.69406 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.889339 0.457248 0 -0.920346 0.391106 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.994001 0.109372 0 -0.999333 0.036516 0 -0.999333 -0.036516 0 -0.994001 -0.109372 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.920346 -0.391106 0 -0.889337 -0.457253 0 -0.853597 -0.520933 0 -0.813288 -0.581862 0 -0.768639 -0.639683 0 -0.719923 -0.694053 0 -0.667296 -0.744792 0 -0.611187 -0.791486 0 -0.551772 -0.833995 0 -0.489401 -0.872059 0 -0.424457 -0.905448 0 -0.357246 -0.93401 0 -0.288091 -0.957603 0 -0.217441 -0.976074 0 -0.145606 -0.989343 0 -0.0729821 -0.997333 0 0 -1 0 0.0729821 -0.997333 0 0.145608 -0.989342 0 0.217438 -0.976074 0 0.288091 -0.957603 0 0.357246 -0.93401 0 0.424457 -0.905448 0 0.489401 -0.872059 0 0.551778 -0.833991 0 0.611181 -0.791491 0 0.667303 -0.744787 0 0.719917 -0.69406 0 0.768639 -0.639683 0 0.813288 -0.581862 0 0.853597 -0.520933 0 0.889337 -0.457253 0 0.920346 -0.391106 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.719917 -0.69406 0 0.719917 -0.69406 0 0.719917 -0.69406 0 0.667303 -0.744787 0 0.667303 -0.744787 0 0.667303 -0.744787 0 0.611181 -0.791491 0 0.611181 -0.791491 0 0.611181 -0.791491 0 0.551778 -0.833991 0 0.551778 -0.833991 0 0.551778 -0.833991 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.217438 -0.976074 0 0.217438 -0.976074 0 0.217438 -0.976074 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.217441 -0.976074 0 -0.217441 -0.976074 0 -0.217441 -0.976074 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.551772 -0.833995 0 -0.551772 -0.833995 0 -0.551772 -0.833995 0 -0.611187 -0.791486 0 -0.611187 -0.791486 0 -0.611187 -0.791486 0 -0.667296 -0.744792 0 -0.667296 -0.744792 0 -0.667296 -0.744792 0 -0.719923 -0.694053 0 -0.719923 -0.694053 0 -0.719923 -0.694053 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.999333 0.036516 0 -0.999333 0.036516 0 -0.999333 0.036516 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.667303 0.744786 0 -0.667303 0.744786 0 -0.667303 0.744786 0 -0.611179 0.791492 0 -0.611179 0.791492 0 -0.611179 0.791492 0 -0.551781 0.833989 0 -0.551781 0.833989 0 -0.551781 0.833989 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.145608 0.989342 0 0.145608 0.989342 0 0.145608 0.989342 0 0.217426 0.976077 0 0.217426 0.976077 0 0.217426 0.976077 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.357246 0.93401 0 0.357246 0.93401 0 0.357246 0.93401 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.489392 0.872064 0 0.489392 0.872064 0 0.489392 0.872064 0 0.551787 0.833985 0 0.551787 0.833985 0 0.551787 0.833985 0 0.611173 0.791497 0 0.611173 0.791497 0 0.611173 0.791497 0 0.66731 0.74478 0 0.66731 0.74478 0 0.66731 0.74478 0 0.719911 0.694066 0 0.719911 0.694066 0 0.719911 0.694066 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.889339 0.457248 0 0.889339 0.457248 0 0.889339 0.457248 0 0.920346 0.391106 0 0.920346 0.391106 0 0.920346 0.391106 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.994001 0.109372 0 0.994001 0.109372 0 0.994001 0.109372 0 0.999333 0.036516 0 0.999333 0.036516 0 0.999333 0.036516 0 0.999333 -0.036516 0 0.999333 -0.036516 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 -1 0 -4.5155e-08 -1 -6.77326e-08 -1.85437e-07 -1 -4.96705e-08 -1.0435e-07 -1 -6.26099e-08 0 -1 0 0 -1 -6.20882e-08 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1.85437e-07 1 4.96705e-08 0 1 0 0 1 0 0 1 6.20882e-08 0 1 6.20882e-08 0 1 0 2.03056e-07 1 4.6859e-08 3.31137e-07 1 1.24176e-07 -1.54505e-07 1 8.58362e-08 -4.09372e-08 1 7.16402e-08 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 -0.863779 0.503871 0 -0.863779 0.503871 0 0.863779 0.503871 0 0.863779 0.503871 0 0.863779 0.503871 0 0.863779 0.503871 0.999333 0 0.0365165 0.994001 0 0.109371 0.994001 0 0.109371 0.983365 0 0.181638 0.967483 0 0.252938 0.983365 0 0.181638 0.983365 0 0.181638 0.994001 0 0.109371 0.994001 0 0.109371 0.999333 0 0.0365165 0.999333 0 -0.0365165 0.994001 0 -0.109371 0.983365 0 -0.181638 0.967483 0 -0.252938 0.946443 0 -0.322872 0.920344 0 -0.391111 0.889342 0 -0.457243 0.853594 0 -0.520939 0.813288 0 -0.581862 0.768644 0 -0.639676 0.719911 0 -0.694066 0.667317 0 -0.744774 0.611165 0 -0.791503 0.551769 0 -0.833997 0.489421 0 -0.872048 0.424457 0 -0.905448 0.357235 0 -0.934015 0.288102 0 -0.9576 0.217426 0 -0.976077 0.145608 0 -0.989342 0.0729821 0 -0.997333 0 0 -1 -0.0729821 0 -0.997333 -0.145606 0 -0.989343 -0.217429 0 -0.976076 -0.288102 0 -0.9576 -0.357235 0 -0.934015 -0.424457 0 -0.905448 -0.489421 0 -0.872048 -0.551763 0 -0.834001 -0.611171 0 -0.791498 -0.667311 0 -0.74478 -0.719917 0 -0.69406 -0.768644 0 -0.639676 -0.813288 0 -0.581862 -0.853594 0 -0.520939 -0.889342 0 -0.457243 -0.920344 0 -0.391111 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.994001 0 -0.109371 -0.999333 0 -0.0365165 -0.999333 0 0.0365165 -0.994001 0 0.109371 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.920344 0 0.391111 -0.889342 0 0.457243 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.719917 0 0.69406 -0.667311 0 0.74478 -0.611171 0 0.791498 -0.551763 0 0.834001 -0.489421 0 0.872048 -0.424457 0 0.905448 -0.357235 0 0.934015 -0.288102 0 0.9576 -0.217429 0 0.976076 -0.145606 0 0.989343 -0.0729821 0 0.997333 0 0 1 0.0729821 0 0.997333 0.145608 0 0.989342 0.217426 0 0.976077 0.288102 0 0.9576 0.357235 0 0.934015 0.424457 0 0.905448 0.489421 0 0.872048 0.551769 0 0.833997 0.611165 0 0.791503 0.667317 0 0.744774 0.719911 0 0.694066 0.768644 0 0.639676 0.813288 0 0.581862 0.853594 0 0.520939 0.889342 0 0.457243 0.920344 0 0.391111 0.946443 0 0.322872 0.967483 0 0.252938 0.967483 0 0.252938 0.983365 0 0.181638 0.967483 0 0.252938 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.920344 0 0.391111 0.920344 0 0.391111 0.920344 0 0.391111 0.889342 0 0.457243 0.889342 0 0.457243 0.889342 0 0.457243 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.719911 0 0.694066 0.719911 0 0.694066 0.719911 0 0.694066 0.667317 0 0.744774 0.667317 0 0.744774 0.667317 0 0.744774 0.611165 0 0.791503 0.611165 0 0.791503 0.611165 0 0.791503 0.551769 0 0.833997 0.551769 0 0.833997 0.551769 0 0.833997 0.489421 0 0.872048 0.489421 0 0.872048 0.489421 0 0.872048 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.357235 0 0.934015 0.357235 0 0.934015 0.357235 0 0.934015 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.217426 0 0.976077 0.217426 0 0.976077 0.217426 0 0.976077 0.145608 0 0.989342 0.145608 0 0.989342 0.145608 0 0.989342 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.551763 0 0.834001 -0.551763 0 0.834001 -0.551763 0 0.834001 -0.611171 0 0.791498 -0.611171 0 0.791498 -0.611171 0 0.791498 -0.667311 0 0.74478 -0.667311 0 0.74478 -0.667311 0 0.74478 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.999333 0 0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.0365165 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.667311 0 -0.74478 -0.667311 0 -0.74478 -0.667311 0 -0.74478 -0.611171 0 -0.791498 -0.611171 0 -0.791498 -0.611171 0 -0.791498 -0.551763 0 -0.834001 -0.551763 0 -0.834001 -0.551763 0 -0.834001 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.217429 0 -0.976076 -0.217429 0 -0.976076 -0.217429 0 -0.976076 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.145608 0 -0.989342 0.145608 0 -0.989342 0.145608 0 -0.989342 0.217426 0 -0.976077 0.217426 0 -0.976077 0.217426 0 -0.976077 0.288102 0 -0.9576 0.288102 0 -0.9576 0.288102 0 -0.9576 0.357235 0 -0.934015 0.357235 0 -0.934015 0.357235 0 -0.934015 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.489421 0 -0.872048 0.489421 0 -0.872048 0.489421 0 -0.872048 0.551769 0 -0.833997 0.551769 0 -0.833997 0.551769 0 -0.833997 0.611165 0 -0.791503 0.611165 0 -0.791503 0.611165 0 -0.791503 0.667317 0 -0.744774 0.667317 0 -0.744774 0.667317 0 -0.744774 0.719911 0 -0.694066 0.719911 0 -0.694066 0.719911 0 -0.694066 0.768644 0 -0.639676 0.768644 0 -0.639676 0.768644 0 -0.639676 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.853594 0 -0.520939 0.853594 0 -0.520939 0.853594 0 -0.520939 0.889342 0 -0.457243 0.889342 0 -0.457243 0.889342 0 -0.457243 0.920344 0 -0.391111 0.920344 0 -0.391111 0.920344 0 -0.391111 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.994001 0 -0.109371 0.994001 0 -0.109371 0.994001 0 -0.109371 0.999333 0 -0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.0365165 0.999333 0 0.0365165 0.999333 0 0.0365165 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0.863779 0 0.503871 0.863779 0 0.503871 -0.999333 0 -0.0365165 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.999333 0 -0.0365165 -0.999333 0 0.0365165 -0.994001 0 0.109371 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.920344 0 0.391111 -0.889342 0 0.457243 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.719911 0 0.694066 -0.667317 0 0.744774 -0.611165 0 0.791503 -0.551769 0 0.833997 -0.489421 0 0.872048 -0.424457 0 0.905448 -0.357235 0 0.934015 -0.288102 0 0.9576 -0.217426 0 0.976077 -0.145608 0 0.989342 -0.0729821 0 0.997333 0 0 1 0.0729821 0 0.997333 0.145606 0 0.989343 0.217429 0 0.976076 0.288102 0 0.9576 0.357235 0 0.934015 0.424457 0 0.905448 0.489421 0 0.872048 0.551763 0 0.834001 0.611171 0 0.791498 0.667311 0 0.74478 0.719917 0 0.69406 0.768644 0 0.639676 0.813288 0 0.581862 0.853594 0 0.520939 0.889342 0 0.457243 0.920344 0 0.391111 0.946443 0 0.322872 0.967483 0 0.252938 0.983365 0 0.181638 0.994001 0 0.109371 0.999333 0 0.0365165 0.999333 0 -0.0365165 0.994001 0 -0.109371 0.983365 0 -0.181638 0.967483 0 -0.252938 0.946443 0 -0.322872 0.920344 0 -0.391111 0.889342 0 -0.457243 0.853594 0 -0.520939 0.813288 0 -0.581862 0.768644 0 -0.639676 0.719917 0 -0.69406 0.667311 0 -0.74478 0.611171 0 -0.791498 0.551763 0 -0.834001 0.489421 0 -0.872048 0.424457 0 -0.905448 0.357235 0 -0.934015 0.288102 0 -0.9576 0.217429 0 -0.976076 0.145606 0 -0.989343 0.0729821 0 -0.997333 0 0 -1 -0.0729821 0 -0.997333 -0.145608 0 -0.989342 -0.217426 0 -0.976077 -0.288102 0 -0.9576 -0.357235 0 -0.934015 -0.424457 0 -0.905448 -0.489421 0 -0.872048 -0.551769 0 -0.833997 -0.611165 0 -0.791503 -0.667317 0 -0.744774 -0.719911 0 -0.694066 -0.768644 0 -0.639676 -0.813288 0 -0.581862 -0.853594 0 -0.520939 -0.889342 0 -0.457243 -0.920344 0 -0.391111 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.719911 0 -0.694066 -0.719911 0 -0.694066 -0.719911 0 -0.694066 -0.667317 0 -0.744774 -0.667317 0 -0.744774 -0.667317 0 -0.744774 -0.611165 0 -0.791503 -0.611165 0 -0.791503 -0.611165 0 -0.791503 -0.551769 0 -0.833997 -0.551769 0 -0.833997 -0.551769 0 -0.833997 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.217426 0 -0.976077 -0.217426 0 -0.976077 -0.217426 0 -0.976077 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.145606 0 -0.989343 0.145606 0 -0.989343 0.145606 0 -0.989343 0.217429 0 -0.976076 0.217429 0 -0.976076 0.217429 0 -0.976076 0.288102 0 -0.9576 0.288102 0 -0.9576 0.288102 0 -0.9576 0.357235 0 -0.934015 0.357235 0 -0.934015 0.357235 0 -0.934015 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.489421 0 -0.872048 0.489421 0 -0.872048 0.489421 0 -0.872048 0.551763 0 -0.834001 0.551763 0 -0.834001 0.551763 0 -0.834001 0.611171 0 -0.791498 0.611171 0 -0.791498 0.611171 0 -0.791498 0.667311 0 -0.74478 0.667311 0 -0.74478 0.667311 0 -0.74478 0.719917 0 -0.69406 0.719917 0 -0.69406 0.719917 0 -0.69406 0.768644 0 -0.639676 0.768644 0 -0.639676 0.768644 0 -0.639676 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.853594 0 -0.520939 0.853594 0 -0.520939 0.853594 0 -0.520939 0.889342 0 -0.457243 0.889342 0 -0.457243 0.889342 0 -0.457243 0.920344 0 -0.391111 0.920344 0 -0.391111 0.920344 0 -0.391111 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.994001 0 -0.109371 0.994001 0 -0.109371 0.994001 0 -0.109371 0.999333 0 -0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.0365165 0.999333 0 0.0365165 0.999333 0 0.0365165 0.999333 0 0.0365165 0.994001 0 0.109371 0.994001 0 0.109371 0.994001 0 0.109371 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.920344 0 0.391111 0.920344 0 0.391111 0.920344 0 0.391111 0.889342 0 0.457243 0.889342 0 0.457243 0.889342 0 0.457243 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.719917 0 0.69406 0.719917 0 0.69406 0.719917 0 0.69406 0.667311 0 0.74478 0.667311 0 0.74478 0.667311 0 0.74478 0.611171 0 0.791498 0.611171 0 0.791498 0.611171 0 0.791498 0.551763 0 0.834001 0.551763 0 0.834001 0.551763 0 0.834001 0.489421 0 0.872048 0.489421 0 0.872048 0.489421 0 0.872048 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.357235 0 0.934015 0.357235 0 0.934015 0.357235 0 0.934015 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.217429 0 0.976076 0.217429 0 0.976076 0.217429 0 0.976076 0.145606 0 0.989343 0.145606 0 0.989343 0.145606 0 0.989343 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.551769 0 0.833997 -0.551769 0 0.833997 -0.551769 0 0.833997 -0.611165 0 0.791503 -0.611165 0 0.791503 -0.611165 0 0.791503 -0.667317 0 0.744774 -0.667317 0 0.744774 -0.667317 0 0.744774 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.999333 0 0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.0365165 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -0.863779 0 0.503871 -0.863779 0 0.503871 -1 0 0 -1 2.20432e-07 4.40862e-08 -1 0 6.20882e-08 -1 8.18745e-08 6.14059e-08 -1 1.65568e-07 1.24176e-07 -1 -1.99569e-07 6.6523e-08 -1 0 0 -1 1.85437e-07 4.96705e-08 -1 0 0 -1 0 0 -1 0 6.20882e-08 -0.999333 0 0.036516 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.999333 0 0.036516 -0.999333 0 -0.036516 -0.994001 0 -0.109372 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.920346 0 -0.391106 -0.889337 0 -0.457253 -0.853597 0 -0.520933 -0.813288 0 -0.581862 -0.768639 0 -0.639683 -0.719923 0 -0.694053 -0.667296 0 -0.744792 -0.611187 0 -0.791486 -0.551772 0 -0.833995 -0.489401 0 -0.872059 -0.424457 0 -0.905448 -0.357246 0 -0.93401 -0.288091 0 -0.957603 -0.217441 0 -0.976074 -0.145606 0 -0.989343 -0.0729821 0 -0.997333 0 0 -1 0.0729821 0 -0.997333 0.145608 0 -0.989342 0.217438 0 -0.976074 0.288091 0 -0.957603 0.357246 0 -0.93401 0.424457 0 -0.905448 0.489401 0 -0.872059 0.551778 0 -0.833991 0.611181 0 -0.791491 0.667303 0 -0.744787 0.719917 0 -0.69406 0.768639 0 -0.639683 0.813288 0 -0.581862 0.853597 0 -0.520933 0.889337 0 -0.457253 0.920346 0 -0.391106 0.946443 0 -0.322872 0.967483 0 -0.252938 0.983365 0 -0.181638 0.994001 0 -0.109372 0.999333 0 -0.036516 0.999333 0 0.036516 0.994001 0 0.109372 0.983365 0 0.181638 0.967483 0 0.252938 0.946443 0 0.322872 0.920346 0 0.391106 0.889339 0 0.457248 0.853594 0 0.520939 0.813288 0 0.581862 0.768644 0 0.639676 0.719911 0 0.694066 0.66731 0 0.74478 0.611173 0 0.791497 0.551787 0 0.833985 0.489392 0 0.872064 0.424457 0 0.905448 0.357246 0 0.93401 0.288102 0 0.9576 0.217426 0 0.976077 0.145608 0 0.989342 0.0729821 0 0.997333 0 0 1 -0.0729821 0 0.997333 -0.145606 0 0.989343 -0.217429 0 0.976076 -0.288102 0 0.9576 -0.357246 0 0.93401 -0.424457 0 0.905448 -0.489392 0 0.872064 -0.551781 0 0.833989 -0.611179 0 0.791492 -0.667303 0 0.744786 -0.719917 0 0.69406 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.889339 0 0.457248 -0.920346 0 0.391106 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.667303 0 0.744786 -0.667303 0 0.744786 -0.667303 0 0.744786 -0.611179 0 0.791492 -0.611179 0 0.791492 -0.611179 0 0.791492 -0.551781 0 0.833989 -0.551781 0 0.833989 -0.551781 0 0.833989 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0.145608 0 0.989342 0.145608 0 0.989342 0.145608 0 0.989342 0.217426 0 0.976077 0.217426 0 0.976077 0.217426 0 0.976077 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.357246 0 0.93401 0.357246 0 0.93401 0.357246 0 0.93401 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.489392 0 0.872064 0.489392 0 0.872064 0.489392 0 0.872064 0.551787 0 0.833985 0.551787 0 0.833985 0.551787 0 0.833985 0.611173 0 0.791497 0.611173 0 0.791497 0.611173 0 0.791497 0.66731 0 0.74478 0.66731 0 0.74478 0.66731 0 0.74478 0.719911 0 0.694066 0.719911 0 0.694066 0.719911 0 0.694066 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.889339 0 0.457248 0.889339 0 0.457248 0.889339 0 0.457248 0.920346 0 0.391106 0.920346 0 0.391106 0.920346 0 0.391106 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.994001 0 0.109372 0.994001 0 0.109372 0.994001 0 0.109372 0.999333 0 0.036516 0.999333 0 0.036516 0.999333 0 0.036516 0.999333 0 -0.036516 0.999333 0 -0.036516 0.999333 0 -0.036516 0.994001 0 -0.109372 0.994001 0 -0.109372 0.994001 0 -0.109372 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.920346 0 -0.391106 0.920346 0 -0.391106 0.920346 0 -0.391106 0.889337 0 -0.457253 0.889337 0 -0.457253 0.889337 0 -0.457253 0.853597 0 -0.520933 0.853597 0 -0.520933 0.853597 0 -0.520933 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.768639 0 -0.639683 0.768639 0 -0.639683 0.768639 0 -0.639683 0.719917 0 -0.69406 0.719917 0 -0.69406 0.719917 0 -0.69406 0.667303 0 -0.744787 0.667303 0 -0.744787 0.667303 0 -0.744787 0.611181 0 -0.791491 0.611181 0 -0.791491 0.611181 0 -0.791491 0.551778 0 -0.833991 0.551778 0 -0.833991 0.551778 0 -0.833991 0.489401 0 -0.872059 0.489401 0 -0.872059 0.489401 0 -0.872059 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.357246 0 -0.93401 0.357246 0 -0.93401 0.357246 0 -0.93401 0.288091 0 -0.957603 0.288091 0 -0.957603 0.288091 0 -0.957603 0.217438 0 -0.976074 0.217438 0 -0.976074 0.217438 0 -0.976074 0.145608 0 -0.989342 0.145608 0 -0.989342 0.145608 0 -0.989342 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.217441 0 -0.976074 -0.217441 0 -0.976074 -0.217441 0 -0.976074 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.551772 0 -0.833995 -0.551772 0 -0.833995 -0.551772 0 -0.833995 -0.611187 0 -0.791486 -0.611187 0 -0.791486 -0.611187 0 -0.791486 -0.667296 0 -0.744792 -0.667296 0 -0.744792 -0.667296 0 -0.744792 -0.719923 0 -0.694053 -0.719923 0 -0.694053 -0.719923 0 -0.694053 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.999333 0 -0.036516 -0.999333 0 -0.036516 -0.999333 0 -0.036516 -0.999333 0 0.036516 -0.999333 0 0.036516 -0.999333 0 -0.036516 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.999333 0 -0.036516 -0.999333 0 0.036516 -0.994001 0 0.109372 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.920346 0 0.391106 -0.889339 0 0.457248 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.719917 0 0.69406 -0.667303 0 0.744786 -0.611179 0 0.791492 -0.551781 0 0.833989 -0.489392 0 0.872064 -0.424457 0 0.905448 -0.357246 0 0.93401 -0.288102 0 0.9576 -0.217429 0 0.976076 -0.145606 0 0.989343 -0.0729821 0 0.997333 0 0 1 0.0729821 0 0.997333 0.145608 0 0.989342 0.217426 0 0.976077 0.288102 0 0.9576 0.357246 0 0.93401 0.424457 0 0.905448 0.489392 0 0.872064 0.551787 0 0.833985 0.611173 0 0.791497 0.66731 0 0.74478 0.719911 0 0.694066 0.768644 0 0.639676 0.813288 0 0.581862 0.853594 0 0.520939 0.889339 0 0.457248 0.920346 0 0.391106 0.946443 0 0.322872 0.967483 0 0.252938 0.983365 0 0.181638 0.994001 0 0.109372 0.999333 0 0.036516 0.999333 0 -0.036516 0.994001 0 -0.109372 0.983365 0 -0.181638 0.967483 0 -0.252938 0.946443 0 -0.322872 0.920346 0 -0.391106 0.889337 0 -0.457253 0.853597 0 -0.520933 0.813288 0 -0.581862 0.768639 0 -0.639683 0.719917 0 -0.69406 0.667303 0 -0.744787 0.611181 0 -0.791491 0.551778 0 -0.833991 0.489401 0 -0.872059 0.424457 0 -0.905448 0.357246 0 -0.93401 0.288091 0 -0.957603 0.217438 0 -0.976074 0.145608 0 -0.989342 0.0729821 0 -0.997333 0 0 -1 -0.0729821 0 -0.997333 -0.145606 0 -0.989343 -0.217441 0 -0.976074 -0.288091 0 -0.957603 -0.357246 0 -0.93401 -0.424457 0 -0.905448 -0.489401 0 -0.872059 -0.551772 0 -0.833995 -0.611187 0 -0.791486 -0.667296 0 -0.744792 -0.719923 0 -0.694053 -0.768639 0 -0.639683 -0.813288 0 -0.581862 -0.853597 0 -0.520933 -0.889337 0 -0.457253 -0.920346 0 -0.391106 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.719923 0 -0.694053 -0.719923 0 -0.694053 -0.719923 0 -0.694053 -0.667296 0 -0.744792 -0.667296 0 -0.744792 -0.667296 0 -0.744792 -0.611187 0 -0.791486 -0.611187 0 -0.791486 -0.611187 0 -0.791486 -0.551772 0 -0.833995 -0.551772 0 -0.833995 -0.551772 0 -0.833995 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.217441 0 -0.976074 -0.217441 0 -0.976074 -0.217441 0 -0.976074 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.145608 0 -0.989342 0.145608 0 -0.989342 0.145608 0 -0.989342 0.217438 0 -0.976074 0.217438 0 -0.976074 0.217438 0 -0.976074 0.288091 0 -0.957603 0.288091 0 -0.957603 0.288091 0 -0.957603 0.357246 0 -0.93401 0.357246 0 -0.93401 0.357246 0 -0.93401 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.489401 0 -0.872059 0.489401 0 -0.872059 0.489401 0 -0.872059 0.551778 0 -0.833991 0.551778 0 -0.833991 0.551778 0 -0.833991 0.611181 0 -0.791491 0.611181 0 -0.791491 0.611181 0 -0.791491 0.667303 0 -0.744787 0.667303 0 -0.744787 0.667303 0 -0.744787 0.719917 0 -0.69406 0.719917 0 -0.69406 0.719917 0 -0.69406 0.768639 0 -0.639683 0.768639 0 -0.639683 0.768639 0 -0.639683 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.853597 0 -0.520933 0.853597 0 -0.520933 0.853597 0 -0.520933 0.889337 0 -0.457253 0.889337 0 -0.457253 0.889337 0 -0.457253 0.920346 0 -0.391106 0.920346 0 -0.391106 0.920346 0 -0.391106 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.994001 0 -0.109372 0.994001 0 -0.109372 0.994001 0 -0.109372 0.999333 0 -0.036516 0.999333 0 -0.036516 0.999333 0 -0.036516 0.999333 0 0.036516 0.999333 0 0.036516 0.999333 0 0.036516 0.994001 0 0.109372 0.994001 0 0.109372 0.994001 0 0.109372 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.920346 0 0.391106 0.920346 0 0.391106 0.920346 0 0.391106 0.889339 0 0.457248 0.889339 0 0.457248 0.889339 0 0.457248 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.719911 0 0.694066 0.719911 0 0.694066 0.719911 0 0.694066 0.66731 0 0.74478 0.66731 0 0.74478 0.66731 0 0.74478 0.611173 0 0.791497 0.611173 0 0.791497 0.611173 0 0.791497 0.551787 0 0.833985 0.551787 0 0.833985 0.551787 0 0.833985 0.489392 0 0.872064 0.489392 0 0.872064 0.489392 0 0.872064 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.357246 0 0.93401 0.357246 0 0.93401 0.357246 0 0.93401 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.217426 0 0.976077 0.217426 0 0.976077 0.217426 0 0.976077 0.145608 0 0.989342 0.145608 0 0.989342 0.145608 0 0.989342 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.551781 0 0.833989 -0.551781 0 0.833989 -0.551781 0 0.833989 -0.611179 0 0.791492 -0.611179 0 0.791492 -0.611179 0 0.791492 -0.667303 0 0.744786 -0.667303 0 0.744786 -0.667303 0 0.744786 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.999333 0 0.036516 -0.999333 0 0.036516 -0.999333 0 0.036516 -0.999333 0 -0.036516 -0.999333 0 -0.036516 -0.999333 0 0.0365165 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.999333 0 0.0365165 -0.999333 0 -0.0365165 -0.994001 0 -0.109371 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.920344 0 -0.391111 -0.889342 0 -0.457243 -0.853594 0 -0.520939 -0.813288 0 -0.581862 -0.768644 0 -0.639676 -0.719911 0 -0.694066 -0.667317 0 -0.744774 -0.611165 0 -0.791503 -0.551769 0 -0.833997 -0.489421 0 -0.872048 -0.424457 0 -0.905448 -0.357235 0 -0.934015 -0.288102 0 -0.9576 -0.217426 0 -0.976077 -0.145608 0 -0.989342 -0.0729821 0 -0.997333 0 0 -1 0.0729821 0 -0.997333 0.145606 0 -0.989343 0.217429 0 -0.976076 0.288102 0 -0.9576 0.357235 0 -0.934015 0.424457 0 -0.905448 0.489421 0 -0.872048 0.551763 0 -0.834001 0.611171 0 -0.791498 0.667311 0 -0.74478 0.719917 0 -0.69406 0.768644 0 -0.639676 0.813288 0 -0.581862 0.853594 0 -0.520939 0.889342 0 -0.457243 0.920344 0 -0.391111 0.946443 0 -0.322872 0.967483 0 -0.252938 0.983365 0 -0.181638 0.994001 0 -0.109371 0.999333 0 -0.0365165 0.999333 0 0.0365165 0.994001 0 0.109371 0.983365 0 0.181638 0.967483 0 0.252938 0.946443 0 0.322872 0.920344 0 0.391111 0.889342 0 0.457243 0.853594 0 0.520939 0.813288 0 0.581862 0.768644 0 0.639676 0.719917 0 0.69406 0.667311 0 0.74478 0.611171 0 0.791498 0.551763 0 0.834001 0.489421 0 0.872048 0.424457 0 0.905448 0.357235 0 0.934015 0.288102 0 0.9576 0.217429 0 0.976076 0.145606 0 0.989343 0.0729821 0 0.997333 0 0 1 -0.0729821 0 0.997333 -0.145608 0 0.989342 -0.217426 0 0.976077 -0.288102 0 0.9576 -0.357235 0 0.934015 -0.424457 0 0.905448 -0.489421 0 0.872048 -0.551769 0 0.833997 -0.611165 0 0.791503 -0.667317 0 0.744774 -0.719911 0 0.694066 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.889342 0 0.457243 -0.920344 0 0.391111 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.667317 0 0.744774 -0.667317 0 0.744774 -0.667317 0 0.744774 -0.611165 0 0.791503 -0.611165 0 0.791503 -0.611165 0 0.791503 -0.551769 0 0.833997 -0.551769 0 0.833997 -0.551769 0 0.833997 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0.145606 0 0.989343 0.145606 0 0.989343 0.145606 0 0.989343 0.217429 0 0.976076 0.217429 0 0.976076 0.217429 0 0.976076 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.357235 0 0.934015 0.357235 0 0.934015 0.357235 0 0.934015 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.489421 0 0.872048 0.489421 0 0.872048 0.489421 0 0.872048 0.551763 0 0.834001 0.551763 0 0.834001 0.551763 0 0.834001 0.611171 0 0.791498 0.611171 0 0.791498 0.611171 0 0.791498 0.667311 0 0.74478 0.667311 0 0.74478 0.667311 0 0.74478 0.719917 0 0.69406 0.719917 0 0.69406 0.719917 0 0.69406 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.889342 0 0.457243 0.889342 0 0.457243 0.889342 0 0.457243 0.920344 0 0.391111 0.920344 0 0.391111 0.920344 0 0.391111 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.994001 0 0.109371 0.994001 0 0.109371 0.994001 0 0.109371 0.999333 0 0.0365165 0.999333 0 0.0365165 0.999333 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.0365165 0.994001 0 -0.109371 0.994001 0 -0.109371 0.994001 0 -0.109371 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.920344 0 -0.391111 0.920344 0 -0.391111 0.920344 0 -0.391111 0.889342 0 -0.457243 0.889342 0 -0.457243 0.889342 0 -0.457243 0.853594 0 -0.520939 0.853594 0 -0.520939 0.853594 0 -0.520939 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.768644 0 -0.639676 0.768644 0 -0.639676 0.768644 0 -0.639676 0.719917 0 -0.69406 0.719917 0 -0.69406 0.719917 0 -0.69406 0.667311 0 -0.74478 0.667311 0 -0.74478 0.667311 0 -0.74478 0.611171 0 -0.791498 0.611171 0 -0.791498 0.611171 0 -0.791498 0.551763 0 -0.834001 0.551763 0 -0.834001 0.551763 0 -0.834001 0.489421 0 -0.872048 0.489421 0 -0.872048 0.489421 0 -0.872048 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.357235 0 -0.934015 0.357235 0 -0.934015 0.357235 0 -0.934015 0.288102 0 -0.9576 0.288102 0 -0.9576 0.288102 0 -0.9576 0.217429 0 -0.976076 0.217429 0 -0.976076 0.217429 0 -0.976076 0.145606 0 -0.989343 0.145606 0 -0.989343 0.145606 0 -0.989343 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.217426 0 -0.976077 -0.217426 0 -0.976077 -0.217426 0 -0.976077 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.551769 0 -0.833997 -0.551769 0 -0.833997 -0.551769 0 -0.833997 -0.611165 0 -0.791503 -0.611165 0 -0.791503 -0.611165 0 -0.791503 -0.667317 0 -0.744774 -0.667317 0 -0.744774 -0.667317 0 -0.744774 -0.719911 0 -0.694066 -0.719911 0 -0.694066 -0.719911 0 -0.694066 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.0365165 0.999333 0 -0.0365165 0.994001 0 -0.109371 0.994001 0 -0.109371 0.983365 0 -0.181638 0.967483 0 -0.252938 0.983365 0 -0.181638 0.983365 0 -0.181638 0.994001 0 -0.109371 0.994001 0 -0.109371 0.999333 0 -0.0365165 0.999333 0 0.0365165 0.994001 0 0.109371 0.983365 0 0.181638 0.967483 0 0.252938 0.946443 0 0.322872 0.920344 0 0.391111 0.889342 0 0.457243 0.853594 0 0.520939 0.813288 0 0.581862 0.768644 0 0.639676 0.719911 0 0.694066 0.667317 0 0.744774 0.611165 0 0.791503 0.551769 0 0.833997 0.489421 0 0.872048 0.424457 0 0.905448 0.357235 0 0.934015 0.288102 0 0.9576 0.217426 0 0.976077 0.145608 0 0.989342 0.0729821 0 0.997333 0 0 1 -0.0729821 0 0.997333 -0.145606 0 0.989343 -0.217429 0 0.976076 -0.288102 0 0.9576 -0.357235 0 0.934015 -0.424457 0 0.905448 -0.489421 0 0.872048 -0.551763 0 0.834001 -0.611171 0 0.791498 -0.667311 0 0.74478 -0.719917 0 0.69406 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.889342 0 0.457243 -0.920344 0 0.391111 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.994001 0 0.109371 -0.999333 0 0.0365165 -0.999333 0 -0.0365165 -0.994001 0 -0.109371 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.920344 0 -0.391111 -0.889342 0 -0.457243 -0.853594 0 -0.520939 -0.813288 0 -0.581862 -0.768644 0 -0.639676 -0.719917 0 -0.69406 -0.667311 0 -0.74478 -0.611171 0 -0.791498 -0.551763 0 -0.834001 -0.489421 0 -0.872048 -0.424457 0 -0.905448 -0.357235 0 -0.934015 -0.288102 0 -0.9576 -0.217429 0 -0.976076 -0.145606 0 -0.989343 -0.0729821 0 -0.997333 0 0 -1 0.0729821 0 -0.997333 0.145608 0 -0.989342 0.217426 0 -0.976077 0.288102 0 -0.9576 0.357235 0 -0.934015 0.424457 0 -0.905448 0.489421 0 -0.872048 0.551769 0 -0.833997 0.611165 0 -0.791503 0.667317 0 -0.744774 0.719911 0 -0.694066 0.768644 0 -0.639676 0.813288 0 -0.581862 0.853594 0 -0.520939 0.889342 0 -0.457243 0.920344 0 -0.391111 0.946443 0 -0.322872 0.967483 0 -0.252938 0.967483 0 -0.252938 0.983365 0 -0.181638 0.967483 0 -0.252938 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.920344 0 -0.391111 0.920344 0 -0.391111 0.920344 0 -0.391111 0.889342 0 -0.457243 0.889342 0 -0.457243 0.889342 0 -0.457243 0.853594 0 -0.520939 0.853594 0 -0.520939 0.853594 0 -0.520939 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.768644 0 -0.639676 0.768644 0 -0.639676 0.768644 0 -0.639676 0.719911 0 -0.694066 0.719911 0 -0.694066 0.719911 0 -0.694066 0.667317 0 -0.744774 0.667317 0 -0.744774 0.667317 0 -0.744774 0.611165 0 -0.791503 0.611165 0 -0.791503 0.611165 0 -0.791503 0.551769 0 -0.833997 0.551769 0 -0.833997 0.551769 0 -0.833997 0.489421 0 -0.872048 0.489421 0 -0.872048 0.489421 0 -0.872048 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.357235 0 -0.934015 0.357235 0 -0.934015 0.357235 0 -0.934015 0.288102 0 -0.9576 0.288102 0 -0.9576 0.288102 0 -0.9576 0.217426 0 -0.976077 0.217426 0 -0.976077 0.217426 0 -0.976077 0.145608 0 -0.989342 0.145608 0 -0.989342 0.145608 0 -0.989342 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.217429 0 -0.976076 -0.217429 0 -0.976076 -0.217429 0 -0.976076 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.551763 0 -0.834001 -0.551763 0 -0.834001 -0.551763 0 -0.834001 -0.611171 0 -0.791498 -0.611171 0 -0.791498 -0.611171 0 -0.791498 -0.667311 0 -0.74478 -0.667311 0 -0.74478 -0.667311 0 -0.74478 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.0365165 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.667311 0 0.74478 -0.667311 0 0.74478 -0.667311 0 0.74478 -0.611171 0 0.791498 -0.611171 0 0.791498 -0.611171 0 0.791498 -0.551763 0 0.834001 -0.551763 0 0.834001 -0.551763 0 0.834001 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0.145608 0 0.989342 0.145608 0 0.989342 0.145608 0 0.989342 0.217426 0 0.976077 0.217426 0 0.976077 0.217426 0 0.976077 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.357235 0 0.934015 0.357235 0 0.934015 0.357235 0 0.934015 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.489421 0 0.872048 0.489421 0 0.872048 0.489421 0 0.872048 0.551769 0 0.833997 0.551769 0 0.833997 0.551769 0 0.833997 0.611165 0 0.791503 0.611165 0 0.791503 0.611165 0 0.791503 0.667317 0 0.744774 0.667317 0 0.744774 0.667317 0 0.744774 0.719911 0 0.694066 0.719911 0 0.694066 0.719911 0 0.694066 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.889342 0 0.457243 0.889342 0 0.457243 0.889342 0 0.457243 0.920344 0 0.391111 0.920344 0 0.391111 0.920344 0 0.391111 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.994001 0 0.109371 0.994001 0 0.109371 0.994001 0 0.109371 0.999333 0 0.0365165 0.999333 0 0.0365165 0.999333 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.0365165 -0.999333 0 -0.036516 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.999333 0 -0.036516 -0.999333 0 0.036516 -0.994001 0 0.109372 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.920346 0 0.391106 -0.889339 0 0.457248 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.719911 0 0.694066 -0.66731 0 0.74478 -0.611173 0 0.791497 -0.551787 0 0.833985 -0.489392 0 0.872064 -0.424457 0 0.905448 -0.357246 0 0.93401 -0.288102 0 0.9576 -0.217426 0 0.976077 -0.145608 0 0.989342 -0.0729821 0 0.997333 0 0 1 0.0729821 0 0.997333 0.145606 0 0.989343 0.217429 0 0.976076 0.288102 0 0.9576 0.357246 0 0.93401 0.424457 0 0.905448 0.489392 0 0.872064 0.551781 0 0.833989 0.611179 0 0.791492 0.667303 0 0.744786 0.719917 0 0.69406 0.768644 0 0.639676 0.813288 0 0.581862 0.853594 0 0.520939 0.889339 0 0.457248 0.920346 0 0.391106 0.946443 0 0.322872 0.967483 0 0.252938 0.983365 0 0.181638 0.994001 0 0.109372 0.999333 0 0.036516 0.999333 0 -0.036516 0.994001 0 -0.109372 0.983365 0 -0.181638 0.967483 0 -0.252938 0.946443 0 -0.322872 0.920346 0 -0.391106 0.889337 0 -0.457253 0.853597 0 -0.520933 0.813288 0 -0.581862 0.768639 0 -0.639683 0.719923 0 -0.694053 0.667296 0 -0.744792 0.611187 0 -0.791486 0.551772 0 -0.833995 0.489401 0 -0.872059 0.424457 0 -0.905448 0.357246 0 -0.93401 0.288091 0 -0.957603 0.217441 0 -0.976074 0.145606 0 -0.989343 0.0729821 0 -0.997333 0 0 -1 -0.0729821 0 -0.997333 -0.145608 0 -0.989342 -0.217438 0 -0.976074 -0.288091 0 -0.957603 -0.357246 0 -0.93401 -0.424457 0 -0.905448 -0.489401 0 -0.872059 -0.551778 0 -0.833991 -0.611181 0 -0.791491 -0.667303 0 -0.744787 -0.719917 0 -0.69406 -0.768639 0 -0.639683 -0.813288 0 -0.581862 -0.853597 0 -0.520933 -0.889337 0 -0.457253 -0.920346 0 -0.391106 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.667303 0 -0.744787 -0.667303 0 -0.744787 -0.667303 0 -0.744787 -0.611181 0 -0.791491 -0.611181 0 -0.791491 -0.611181 0 -0.791491 -0.551778 0 -0.833991 -0.551778 0 -0.833991 -0.551778 0 -0.833991 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.217438 0 -0.976074 -0.217438 0 -0.976074 -0.217438 0 -0.976074 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.145606 0 -0.989343 0.145606 0 -0.989343 0.145606 0 -0.989343 0.217441 0 -0.976074 0.217441 0 -0.976074 0.217441 0 -0.976074 0.288091 0 -0.957603 0.288091 0 -0.957603 0.288091 0 -0.957603 0.357246 0 -0.93401 0.357246 0 -0.93401 0.357246 0 -0.93401 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.489401 0 -0.872059 0.489401 0 -0.872059 0.489401 0 -0.872059 0.551772 0 -0.833995 0.551772 0 -0.833995 0.551772 0 -0.833995 0.611187 0 -0.791486 0.611187 0 -0.791486 0.611187 0 -0.791486 0.667296 0 -0.744792 0.667296 0 -0.744792 0.667296 0 -0.744792 0.719923 0 -0.694053 0.719923 0 -0.694053 0.719923 0 -0.694053 0.768639 0 -0.639683 0.768639 0 -0.639683 0.768639 0 -0.639683 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.853597 0 -0.520933 0.853597 0 -0.520933 0.853597 0 -0.520933 0.889337 0 -0.457253 0.889337 0 -0.457253 0.889337 0 -0.457253 0.920346 0 -0.391106 0.920346 0 -0.391106 0.920346 0 -0.391106 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.994001 0 -0.109372 0.994001 0 -0.109372 0.994001 0 -0.109372 0.999333 0 -0.036516 0.999333 0 -0.036516 0.999333 0 -0.036516 0.999333 0 0.036516 0.999333 0 0.036516 0.999333 0 0.036516 0.994001 0 0.109372 0.994001 0 0.109372 0.994001 0 0.109372 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.920346 0 0.391106 0.920346 0 0.391106 0.920346 0 0.391106 0.889339 0 0.457248 0.889339 0 0.457248 0.889339 0 0.457248 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.719917 0 0.69406 0.719917 0 0.69406 0.719917 0 0.69406 0.667303 0 0.744786 0.667303 0 0.744786 0.667303 0 0.744786 0.611179 0 0.791492 0.611179 0 0.791492 0.611179 0 0.791492 0.551781 0 0.833989 0.551781 0 0.833989 0.551781 0 0.833989 0.489392 0 0.872064 0.489392 0 0.872064 0.489392 0 0.872064 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.357246 0 0.93401 0.357246 0 0.93401 0.357246 0 0.93401 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.217429 0 0.976076 0.217429 0 0.976076 0.217429 0 0.976076 0.145606 0 0.989343 0.145606 0 0.989343 0.145606 0 0.989343 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.551787 0 0.833985 -0.551787 0 0.833985 -0.551787 0 0.833985 -0.611173 0 0.791497 -0.611173 0 0.791497 -0.611173 0 0.791497 -0.66731 0 0.74478 -0.66731 0 0.74478 -0.66731 0 0.74478 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.999333 0 0.036516 -0.999333 0 0.036516 -0.999333 0 0.036516 -0.999333 0 -0.036516 -0.999333 0 -0.036516 -0.999333 0 0.036516 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.999333 0 0.036516 -0.999333 0 -0.036516 -0.994001 0 -0.109372 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.920346 0 -0.391106 -0.889337 0 -0.457253 -0.853597 0 -0.520933 -0.813288 0 -0.581862 -0.768639 0 -0.639683 -0.719917 0 -0.69406 -0.667303 0 -0.744787 -0.611181 0 -0.791491 -0.551778 0 -0.833991 -0.489401 0 -0.872059 -0.424457 0 -0.905448 -0.357246 0 -0.93401 -0.288091 0 -0.957603 -0.217438 0 -0.976074 -0.145608 0 -0.989342 -0.0729821 0 -0.997333 0 0 -1 0.0729821 0 -0.997333 0.145606 0 -0.989343 0.217441 0 -0.976074 0.288091 0 -0.957603 0.357246 0 -0.93401 0.424457 0 -0.905448 0.489401 0 -0.872059 0.551772 0 -0.833995 0.611187 0 -0.791486 0.667296 0 -0.744792 0.719923 0 -0.694053 0.768639 0 -0.639683 0.813288 0 -0.581862 0.853597 0 -0.520933 0.889337 0 -0.457253 0.920346 0 -0.391106 0.946443 0 -0.322872 0.967483 0 -0.252938 0.983365 0 -0.181638 0.994001 0 -0.109372 0.999333 0 -0.036516 0.999333 0 0.036516 0.994001 0 0.109372 0.983365 0 0.181638 0.967483 0 0.252938 0.946443 0 0.322872 0.920346 0 0.391106 0.889339 0 0.457248 0.853594 0 0.520939 0.813288 0 0.581862 0.768644 0 0.639676 0.719917 0 0.69406 0.667303 0 0.744786 0.611179 0 0.791492 0.551781 0 0.833989 0.489392 0 0.872064 0.424457 0 0.905448 0.357246 0 0.93401 0.288102 0 0.9576 0.217429 0 0.976076 0.145606 0 0.989343 0.0729821 0 0.997333 0 0 1 -0.0729821 0 0.997333 -0.145608 0 0.989342 -0.217426 0 0.976077 -0.288102 0 0.9576 -0.357246 0 0.93401 -0.424457 0 0.905448 -0.489392 0 0.872064 -0.551787 0 0.833985 -0.611173 0 0.791497 -0.66731 0 0.74478 -0.719911 0 0.694066 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.889339 0 0.457248 -0.920346 0 0.391106 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.66731 0 0.74478 -0.66731 0 0.74478 -0.66731 0 0.74478 -0.611173 0 0.791497 -0.611173 0 0.791497 -0.611173 0 0.791497 -0.551787 0 0.833985 -0.551787 0 0.833985 -0.551787 0 0.833985 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0.145606 0 0.989343 0.145606 0 0.989343 0.145606 0 0.989343 0.217429 0 0.976076 0.217429 0 0.976076 0.217429 0 0.976076 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.357246 0 0.93401 0.357246 0 0.93401 0.357246 0 0.93401 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.489392 0 0.872064 0.489392 0 0.872064 0.489392 0 0.872064 0.551781 0 0.833989 0.551781 0 0.833989 0.551781 0 0.833989 0.611179 0 0.791492 0.611179 0 0.791492 0.611179 0 0.791492 0.667303 0 0.744786 0.667303 0 0.744786 0.667303 0 0.744786 0.719917 0 0.69406 0.719917 0 0.69406 0.719917 0 0.69406 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.889339 0 0.457248 0.889339 0 0.457248 0.889339 0 0.457248 0.920346 0 0.391106 0.920346 0 0.391106 0.920346 0 0.391106 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.994001 0 0.109372 0.994001 0 0.109372 0.994001 0 0.109372 0.999333 0 0.036516 0.999333 0 0.036516 0.999333 0 0.036516 0.999333 0 -0.036516 0.999333 0 -0.036516 0.999333 0 -0.036516 0.994001 0 -0.109372 0.994001 0 -0.109372 0.994001 0 -0.109372 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.920346 0 -0.391106 0.920346 0 -0.391106 0.920346 0 -0.391106 0.889337 0 -0.457253 0.889337 0 -0.457253 0.889337 0 -0.457253 0.853597 0 -0.520933 0.853597 0 -0.520933 0.853597 0 -0.520933 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.768639 0 -0.639683 0.768639 0 -0.639683 0.768639 0 -0.639683 0.719923 0 -0.694053 0.719923 0 -0.694053 0.719923 0 -0.694053 0.667296 0 -0.744792 0.667296 0 -0.744792 0.667296 0 -0.744792 0.611187 0 -0.791486 0.611187 0 -0.791486 0.611187 0 -0.791486 0.551772 0 -0.833995 0.551772 0 -0.833995 0.551772 0 -0.833995 0.489401 0 -0.872059 0.489401 0 -0.872059 0.489401 0 -0.872059 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.357246 0 -0.93401 0.357246 0 -0.93401 0.357246 0 -0.93401 0.288091 0 -0.957603 0.288091 0 -0.957603 0.288091 0 -0.957603 0.217441 0 -0.976074 0.217441 0 -0.976074 0.217441 0 -0.976074 0.145606 0 -0.989343 0.145606 0 -0.989343 0.145606 0 -0.989343 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.217438 0 -0.976074 -0.217438 0 -0.976074 -0.217438 0 -0.976074 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.551778 0 -0.833991 -0.551778 0 -0.833991 -0.551778 0 -0.833991 -0.611181 0 -0.791491 -0.611181 0 -0.791491 -0.611181 0 -0.791491 -0.667303 0 -0.744787 -0.667303 0 -0.744787 -0.667303 0 -0.744787 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.999333 0 -0.036516 -0.999333 0 -0.036516 -0.999333 0 -0.036516 -0.999333 0 0.036516 -0.999333 0 0.036516 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -0.863779 0 0.503871 -0.863779 0 0.503871 0.863779 0 0.503871 0.863779 0 0.503871 -1 0 7.16402e-08 -1 1.11387e-07 8.66347e-08 -1 -7.0958e-08 1.24176e-07 -1 0 0 -1 -1.13549e-07 6.11419e-08 -1 0 6.20882e-08 -1 0 6.20882e-08 -1 0 0 -1 -1.85437e-07 4.96705e-08 -1 0 0 -1 0 0 0 -0.707107 0.707106 4.18123e-08 -0.707107 0.707106 0 -0.707107 0.707107 0 -0.707107 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 4.18123e-08 0.707107 0.707107 -0.707106 -0.707107 0 -0.707106 -0.707107 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0.707106 0.707107 0 0.707106 0.707107 0 -0.707107 0 0.707107 -0.707107 -4.18123e-08 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 0.707107 0 0.707107 0.707107 -4.18123e-08 0.707106 0.707107 0 0.707106 0.707107 0 0.707106 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -0.707107 0.707106 0 -0.707107 0.707106 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0.707107 -0.707106 0 0.707107 -0.707106 0 0.707107 0 -0.707107 0.707107 0 -0.707107 -0.707107 0 -0.707107 -0.707107 0 -0.707107 0 -0.707107 -0.707107 0 -0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 -0.707107 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0.707107 0.707107 0 0.707107 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 -0.707107 -0.707106 0 -0.707107 -0.707106 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -0.707107 0.707107 0 -0.707107 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 -0.707106 0.707107 0 -0.707106 0.707107 0 0 -0.707107 0.707107 0 -0.707107 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 -6.89869e-08 0 -1 -6.89869e-08 0 -1 -6.89869e-08 0 -1 -6.89869e-08 0 -1 0 6.89869e-08 -1 0 6.89869e-08 -1 0 6.89869e-08 -1 0 6.89869e-08 -1 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 6.6523e-08 -6.6523e-08 1 4.51761e-08 -4.51761e-08 1 1.88697e-07 -4.0055e-09 1 1.70571e-07 -1.0878e-08 1 1.55934e-07 -1.66111e-08 1 1.43823e-07 -2.15293e-08 1 1.33619e-07 -2.58419e-08 1 1.24876e-07 -2.97002e-08 1 1.17282e-07 -3.32117e-08 1 1.40127e-07 -4.61877e-08 1 1.25668e-07 -4.74262e-08 1 1.14229e-07 -4.87415e-08 1 1.0494e-07 -5.01437e-08 1 9.72386e-08 -5.16435e-08 1 -1.17905e-06 -3.67999e-07 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 1.98535e-07 2.31775e-06 1 4.50193e-08 -1.58978e-07 1 4.39126e-08 -1.84633e-07 1 4.28609e-08 -2.21619e-07 1 4.1857e-08 -2.79617e-07 1 4.089e-08 -3.83844e-07 1 3.99359e-08 -6.26217e-07 1 3.88051e-08 -1.82809e-06 1 4.52575e-08 -4.43487e-08 1 4.62268e-08 -4.31168e-08 1 6.6523e-08 -6.6523e-08 1 5.01405e-08 -8.29055e-08 1 4.31168e-08 -4.03936e-08 1 -6.2899e-08 -1.33516e-09 1 -5.6857e-08 -3.62601e-09 1 -5.19779e-08 -5.53704e-09 1 -4.79411e-08 -7.17644e-09 1 -4.45396e-08 -8.61398e-09 1 -4.16254e-08 -9.90007e-09 1 -3.9094e-08 -1.10705e-08 1 5.84992e-08 -5.84992e-08 1 -1.25668e-07 -4.74262e-08 1 -1.40127e-07 -4.61877e-08 1 6.6523e-08 -6.6523e-08 1 3.88051e-08 1.82809e-06 1 3.99359e-08 6.26217e-07 1 4.089e-08 3.83844e-07 1 4.1857e-08 2.79617e-07 1 4.28609e-08 2.21619e-07 1 4.39126e-08 1.84633e-07 1 4.50193e-08 1.58978e-07 1 1.98535e-07 -2.31775e-06 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 1.17905e-06 -3.67999e-07 1 -9.72386e-08 -5.16435e-08 1 -1.0494e-07 -5.01437e-08 1 -1.14229e-07 -4.87415e-08 1 4.31168e-08 -5.76403e-08 1 8.33384e-08 -4.97076e-08 1 6.6523e-08 -6.6523e-08 1 3.30423e-15 1 4.96705e-08 0 1 4.96705e-08 1 3.30423e-15 -4.96705e-08 1 0 -4.96705e-08 0.707107 0.707106 0 0.707107 0.707106 0 -3.30423e-15 -1 -4.96705e-08 0 -1 -4.96705e-08 -1 -3.30423e-15 4.96705e-08 -1 0 4.96705e-08 3.30423e-15 1 4.96705e-08 0 1 4.96705e-08 -1 -3.30423e-15 4.96705e-08 -1 0 4.96705e-08 -3.30423e-15 -1 -4.96705e-08 0 -1 -4.96705e-08 0.707106 -0.707107 0 0.707106 -0.707107 0 - - - - - - - - - - - - - - -

0 0 1 0 2344 0 0 1 5881 1 1 1 0 2 2343 2 5881 2 5881 3 2343 3 47 3 47 4 2343 4 48 4 49 5 48 5 50 5 51 6 50 6 2385 6 2 7 2385 7 2384 7 52 8 2384 8 2383 8 5882 9 2383 9 3 9 53 10 3 10 2382 10 54 11 2382 11 2381 11 5883 12 2381 12 2380 12 55 13 2380 13 2379 13 56 14 2379 14 2378 14 4 15 2378 15 5 15 5891 16 5 16 6 16 57 17 6 17 2377 17 7 18 2377 18 58 18 5892 19 58 19 2376 19 5893 20 2376 20 2375 20 59 21 2375 21 8 21 60 22 8 22 9 22 5895 23 9 23 2374 23 5894 24 2374 24 2373 24 61 25 2373 25 2372 25 10 26 2372 26 11 26 62 27 11 27 12 27 5896 28 12 28 2371 28 5897 29 2371 29 2370 29 5898 30 2370 30 2369 30 63 31 2369 31 2368 31 64 32 2368 32 65 32 5899 33 65 33 66 33 5900 34 66 34 2367 34 13 35 2367 35 2366 35 67 36 2366 36 14 36 68 37 14 37 2365 37 69 38 2365 38 15 38 16 39 15 39 17 39 5901 40 17 40 2364 40 18 41 2364 41 19 41 5902 42 19 42 2363 42 70 43 2363 43 21 43 20 44 21 44 22 44 5904 45 22 45 23 45 5905 46 23 46 24 46 25 47 24 47 26 47 5906 48 26 48 27 48 5907 49 27 49 28 49 5908 50 28 50 2362 50 71 51 2362 51 2361 51 72 52 2361 52 29 52 5946 53 29 53 2360 53 5909 54 2360 54 30 54 73 55 30 55 2359 55 74 56 2359 56 31 56 5910 57 31 57 32 57 75 58 32 58 2358 58 5950 59 2358 59 76 59 5911 60 76 60 77 60 5912 61 77 61 2356 61 5953 62 2356 62 33 62 5913 63 33 63 78 63 79 64 78 64 34 64 5957 65 34 65 2355 65 5914 66 2355 66 35 66 80 67 35 67 2354 67 5959 68 2354 68 2353 68 81 69 2353 69 82 69 5918 70 82 70 2352 70 5919 71 2352 71 2351 71 36 72 2351 72 2350 72 5963 73 2350 73 2349 73 83 74 2349 74 38 74 37 75 38 75 39 75 84 76 39 76 40 76 85 77 40 77 2348 77 86 78 2348 78 41 78 87 79 41 79 42 79 5966 80 42 80 2347 80 43 81 2347 81 2346 81 88 82 2346 82 44 82 5967 83 44 83 89 83 90 84 89 84 45 84 5875 85 45 85 46 85 5873 86 46 86 2345 86 5872 87 2345 87 2344 87 1 88 5872 88 2344 88 47 89 48 89 49 89 49 90 50 90 51 90 51 91 2385 91 2 91 2 92 2384 92 52 92 52 93 2383 93 5882 93 5882 94 3 94 53 94 53 95 2382 95 54 95 54 96 2381 96 5883 96 5883 97 2380 97 55 97 55 98 2379 98 56 98 56 99 2378 99 4 99 4 100 5 100 5891 100 5891 101 6 101 57 101 57 102 2377 102 7 102 7 103 58 103 5892 103 5892 104 2376 104 5893 104 5893 105 2375 105 59 105 59 106 8 106 60 106 60 107 9 107 5895 107 5895 108 2374 108 5894 108 5894 109 2373 109 61 109 61 110 2372 110 10 110 10 111 11 111 62 111 62 112 12 112 5896 112 5896 113 2371 113 5897 113 5897 114 2370 114 5898 114 5898 115 2369 115 63 115 63 116 2368 116 64 116 64 117 65 117 5899 117 5899 118 66 118 5900 118 5900 119 2367 119 13 119 13 120 2366 120 67 120 67 121 14 121 68 121 68 122 2365 122 69 122 69 123 15 123 16 123 16 124 17 124 5901 124 5901 125 2364 125 18 125 18 126 19 126 5902 126 5902 127 2363 127 70 127 70 128 21 128 20 128 20 129 22 129 5904 129 5904 130 23 130 5905 130 5905 131 24 131 25 131 25 132 26 132 5906 132 5906 133 27 133 5907 133 5907 134 28 134 5908 134 5908 135 2362 135 71 135 71 136 2361 136 72 136 72 137 29 137 5946 137 5946 138 2360 138 5909 138 5909 139 30 139 73 139 73 140 2359 140 74 140 74 141 31 141 5910 141 5910 142 32 142 75 142 75 143 2358 143 5950 143 5950 144 76 144 5911 144 5911 145 77 145 5912 145 5912 146 2356 146 5953 146 5953 147 33 147 5913 147 5913 148 78 148 79 148 79 149 34 149 5957 149 5957 150 2355 150 5914 150 5914 151 35 151 80 151 80 152 2354 152 5959 152 5959 153 2353 153 81 153 81 154 82 154 5918 154 5918 155 2352 155 5919 155 5919 156 2351 156 36 156 36 157 2350 157 5963 157 5963 158 2349 158 83 158 83 159 38 159 37 159 37 160 39 160 84 160 84 161 40 161 85 161 85 162 2348 162 86 162 86 163 41 163 87 163 87 164 42 164 5966 164 5966 165 2347 165 43 165 43 166 2346 166 88 166 88 167 44 167 5967 167 5967 168 89 168 90 168 90 169 45 169 5875 169 5875 170 46 170 5873 170 5873 171 2345 171 5872 171 91 172 93 172 92 172 91 173 94 173 93 173 91 174 2417 174 94 174 94 175 2417 175 95 175 95 176 2417 176 2444 176 5879 177 2444 177 96 177 5972 178 96 178 138 178 5974 179 138 179 139 179 97 180 139 180 140 180 141 181 140 181 2443 181 142 182 2443 182 98 182 5975 183 98 183 99 183 143 184 99 184 144 184 145 185 144 185 2442 185 100 186 2442 186 146 186 5962 187 146 187 2441 187 5961 188 2441 188 147 188 5960 189 147 189 2440 189 148 190 2440 190 2439 190 5917 191 2439 191 2387 191 5916 192 2387 192 2389 192 5915 193 2389 193 2388 193 5958 194 2388 194 2390 194 5956 195 2390 195 2391 195 5955 196 2391 196 101 196 5954 197 101 197 2392 197 5952 198 2392 198 103 198 102 199 103 199 2395 199 5951 200 2395 200 2394 200 5949 201 2394 201 104 201 149 202 104 202 2396 202 5948 203 2396 203 105 203 150 204 105 204 106 204 151 205 106 205 152 205 5947 206 152 206 107 206 108 207 107 207 109 207 153 208 109 208 2397 208 154 209 2397 209 2398 209 5977 210 2398 210 155 210 5978 211 155 211 110 211 5979 212 110 212 2399 212 5981 213 2399 213 2400 213 5980 214 2400 214 2401 214 5982 215 2401 215 2402 215 111 216 2402 216 112 216 156 217 112 217 157 217 158 218 157 218 2403 218 5983 219 2403 219 2404 219 113 220 2404 220 159 220 114 221 159 221 115 221 160 222 115 222 117 222 116 223 117 223 161 223 118 224 161 224 119 224 120 225 119 225 162 225 5984 226 162 226 2438 226 5985 227 2438 227 2437 227 5986 228 2437 228 121 228 122 229 121 229 163 229 123 230 163 230 124 230 5988 231 124 231 125 231 5989 232 125 232 2435 232 5990 233 2435 233 2436 233 126 234 2436 234 164 234 5991 235 164 235 2433 235 127 236 2433 236 2432 236 165 237 2432 237 128 237 166 238 128 238 2431 238 129 239 2431 239 130 239 167 240 130 240 2430 240 5992 241 2430 241 131 241 168 242 131 242 2429 242 5993 243 2429 243 2428 243 169 244 2428 244 2427 244 170 245 2427 245 2426 245 5994 246 2426 246 132 246 171 247 132 247 133 247 172 248 133 248 2425 248 173 249 2425 249 2424 249 174 250 2424 250 2423 250 175 251 2423 251 2422 251 176 252 2422 252 177 252 5999 253 177 253 2421 253 6000 254 2421 254 134 254 6001 255 134 255 178 255 6002 256 178 256 2418 256 179 257 2418 257 2420 257 135 258 2420 258 136 258 137 259 136 259 92 259 93 260 137 260 92 260 95 261 2444 261 5879 261 5879 262 96 262 5972 262 5972 263 138 263 5974 263 5974 264 139 264 97 264 97 265 140 265 141 265 141 266 2443 266 142 266 142 267 98 267 5975 267 5975 268 99 268 143 268 143 269 144 269 145 269 145 270 2442 270 100 270 100 271 146 271 5962 271 5962 272 2441 272 5961 272 5961 273 147 273 5960 273 5960 274 2440 274 148 274 148 275 2439 275 5917 275 5917 276 2387 276 5916 276 5916 277 2389 277 5915 277 5915 278 2388 278 5958 278 5958 279 2390 279 5956 279 5956 280 2391 280 5955 280 5955 281 101 281 5954 281 5954 282 2392 282 5952 282 5952 283 103 283 102 283 102 284 2395 284 5951 284 5951 285 2394 285 5949 285 5949 286 104 286 149 286 149 287 2396 287 5948 287 5948 288 105 288 150 288 150 289 106 289 151 289 151 290 152 290 5947 290 5947 291 107 291 108 291 108 292 109 292 153 292 153 293 2397 293 154 293 154 294 2398 294 5977 294 5977 295 155 295 5978 295 5978 296 110 296 5979 296 5979 297 2399 297 5981 297 5981 298 2400 298 5980 298 5980 299 2401 299 5982 299 5982 300 2402 300 111 300 111 301 112 301 156 301 156 302 157 302 158 302 158 303 2403 303 5983 303 5983 304 2404 304 113 304 113 305 159 305 114 305 114 306 115 306 160 306 160 307 117 307 116 307 116 308 161 308 118 308 118 309 119 309 120 309 120 310 162 310 5984 310 5984 311 2438 311 5985 311 5985 312 2437 312 5986 312 5986 313 121 313 122 313 122 314 163 314 123 314 123 315 124 315 5988 315 5988 316 125 316 5989 316 5989 317 2435 317 5990 317 5990 318 2436 318 126 318 126 319 164 319 5991 319 5991 320 2433 320 127 320 127 321 2432 321 165 321 165 322 128 322 166 322 166 323 2431 323 129 323 129 324 130 324 167 324 167 325 2430 325 5992 325 5992 326 131 326 168 326 168 327 2429 327 5993 327 5993 328 2428 328 169 328 169 329 2427 329 170 329 170 330 2426 330 5994 330 5994 331 132 331 171 331 171 332 133 332 172 332 172 333 2425 333 173 333 173 334 2424 334 174 334 174 335 2423 335 175 335 175 336 2422 336 176 336 176 337 177 337 5999 337 5999 338 2421 338 6000 338 6000 339 134 339 6001 339 6001 340 178 340 6002 340 6002 341 2418 341 179 341 179 342 2420 342 135 342 135 343 136 343 137 343 180 344 6008 344 2446 344 180 345 181 345 6008 345 180 346 182 346 181 346 181 347 182 347 183 347 183 348 182 348 230 348 231 349 230 349 184 349 232 350 184 350 185 350 233 351 185 351 2482 351 5936 352 2482 352 234 352 186 353 234 353 187 353 6010 354 187 354 188 354 235 355 188 355 189 355 236 356 189 356 2480 356 237 357 2480 357 2481 357 238 358 2481 358 2479 358 239 359 2479 359 190 359 6013 360 190 360 2478 360 191 361 2478 361 192 361 240 362 192 362 2477 362 193 363 2477 363 241 363 194 364 241 364 242 364 5940 365 242 365 243 365 5941 366 243 366 244 366 245 367 244 367 195 367 246 368 195 368 2475 368 196 369 2475 369 197 369 5942 370 197 370 198 370 6017 371 198 371 199 371 6019 372 199 372 2474 372 247 373 2474 373 2473 373 5944 374 2473 374 2416 374 6022 375 2416 375 200 375 5945 376 200 376 201 376 248 377 201 377 2472 377 202 378 2472 378 203 378 5976 379 203 379 2471 379 204 380 2471 380 2470 380 5878 381 2470 381 2469 381 249 382 2469 382 2468 382 5973 383 2468 383 2467 383 5971 384 2467 384 205 384 5970 385 205 385 250 385 5880 386 250 386 251 386 5969 387 251 387 2466 387 206 388 2466 388 2465 388 207 389 2465 389 2464 389 6006 390 2464 390 2463 390 6005 391 2463 391 252 391 253 392 252 392 254 392 6004 393 254 393 2462 393 5998 394 2462 394 255 394 208 395 255 395 209 395 5997 396 209 396 256 396 6003 397 256 397 2461 397 5996 398 2461 398 210 398 257 399 210 399 258 399 211 400 258 400 2460 400 5995 401 2460 401 212 401 259 402 212 402 213 402 260 403 213 403 214 403 215 404 214 404 261 404 6024 405 261 405 2459 405 216 406 2459 406 2458 406 6025 407 2458 407 262 407 217 408 262 408 263 408 6026 409 263 409 2457 409 264 410 2457 410 2456 410 265 411 2456 411 2455 411 6027 412 2455 412 218 412 266 413 218 413 219 413 6028 414 219 414 221 414 220 415 221 415 2454 415 222 416 2454 416 267 416 6029 417 267 417 2453 417 268 418 2453 418 2452 418 6031 419 2452 419 269 419 223 420 269 420 224 420 270 421 224 421 225 421 6032 422 225 422 2451 422 271 423 2451 423 226 423 6033 424 226 424 272 424 227 425 272 425 2450 425 6034 426 2450 426 2449 426 273 427 2449 427 2448 427 274 428 2448 428 275 428 228 429 275 429 2447 429 229 430 2447 430 276 430 6007 431 276 431 2446 431 6008 432 6007 432 2446 432 183 433 230 433 231 433 231 434 184 434 232 434 232 435 185 435 233 435 233 436 2482 436 5936 436 5936 437 234 437 186 437 186 438 187 438 6010 438 6010 439 188 439 235 439 235 440 189 440 236 440 236 441 2480 441 237 441 237 442 2481 442 238 442 238 443 2479 443 239 443 239 444 190 444 6013 444 6013 445 2478 445 191 445 191 446 192 446 240 446 240 447 2477 447 193 447 193 448 241 448 194 448 194 449 242 449 5940 449 5940 450 243 450 5941 450 5941 451 244 451 245 451 245 452 195 452 246 452 246 453 2475 453 196 453 196 454 197 454 5942 454 5942 455 198 455 6017 455 6017 456 199 456 6019 456 6019 457 2474 457 247 457 247 458 2473 458 5944 458 5944 459 2416 459 6022 459 6022 460 200 460 5945 460 5945 461 201 461 248 461 248 462 2472 462 202 462 202 463 203 463 5976 463 5976 464 2471 464 204 464 204 465 2470 465 5878 465 5878 466 2469 466 249 466 249 467 2468 467 5973 467 5973 468 2467 468 5971 468 5971 469 205 469 5970 469 5970 470 250 470 5880 470 5880 471 251 471 5969 471 5969 472 2466 472 206 472 206 473 2465 473 207 473 207 474 2464 474 6006 474 6006 475 2463 475 6005 475 6005 476 252 476 253 476 253 477 254 477 6004 477 6004 478 2462 478 5998 478 5998 479 255 479 208 479 208 480 209 480 5997 480 5997 481 256 481 6003 481 6003 482 2461 482 5996 482 5996 483 210 483 257 483 257 484 258 484 211 484 211 485 2460 485 5995 485 5995 486 212 486 259 486 259 487 213 487 260 487 260 488 214 488 215 488 215 489 261 489 6024 489 6024 490 2459 490 216 490 216 491 2458 491 6025 491 6025 492 262 492 217 492 217 493 263 493 6026 493 6026 494 2457 494 264 494 264 495 2456 495 265 495 265 496 2455 496 6027 496 6027 497 218 497 266 497 266 498 219 498 6028 498 6028 499 221 499 220 499 220 500 2454 500 222 500 222 501 267 501 6029 501 6029 502 2453 502 268 502 268 503 2452 503 6031 503 6031 504 269 504 223 504 223 505 224 505 270 505 270 506 225 506 6032 506 6032 507 2451 507 271 507 271 508 226 508 6033 508 6033 509 272 509 227 509 227 510 2450 510 6034 510 6034 511 2449 511 273 511 273 512 2448 512 274 512 274 513 275 513 228 513 228 514 2447 514 229 514 229 515 276 515 6007 515 278 516 277 516 334 516 278 517 5931 517 277 517 278 518 279 518 5931 518 5931 519 279 519 280 519 280 520 279 520 281 520 5930 521 281 521 335 521 5929 522 335 522 282 522 336 523 282 523 283 523 284 524 283 524 2503 524 5928 525 2503 525 2502 525 5927 526 2502 526 337 526 285 527 337 527 286 527 5926 528 286 528 2501 528 5925 529 2501 529 338 529 5924 530 338 530 2500 530 287 531 2500 531 288 531 5923 532 288 532 339 532 340 533 339 533 341 533 5922 534 341 534 2499 534 289 535 2499 535 342 535 343 536 342 536 344 536 5921 537 344 537 345 537 290 538 345 538 2497 538 291 539 2497 539 2496 539 292 540 2496 540 293 540 294 541 293 541 295 541 346 542 295 542 2494 542 296 543 2494 543 2495 543 347 544 2495 544 297 544 348 545 297 545 298 545 5920 546 298 546 299 546 5885 547 299 547 2493 547 5884 548 2493 548 300 548 5886 549 300 549 301 549 5890 550 301 550 349 550 5889 551 349 551 302 551 303 552 302 552 2406 552 350 553 2406 553 304 553 351 554 304 554 305 554 306 555 305 555 2407 555 5888 556 2407 556 307 556 5887 557 307 557 2409 557 308 558 2409 558 2408 558 309 559 2408 559 310 559 6035 560 310 560 2410 560 311 561 2410 561 312 561 5874 562 312 562 352 562 5968 563 352 563 2411 563 353 564 2411 564 313 564 5876 565 313 565 314 565 315 566 314 566 2412 566 354 567 2412 567 2413 567 5965 568 2413 568 2414 568 5964 569 2414 569 355 569 356 570 355 570 316 570 5877 571 316 571 357 571 6023 572 357 572 317 572 6021 573 317 573 2415 573 5943 574 2415 574 358 574 6020 575 358 575 318 575 6018 576 318 576 359 576 6016 577 359 577 320 577 319 578 320 578 360 578 321 579 360 579 361 579 6015 580 361 580 362 580 363 581 362 581 2492 581 322 582 2492 582 323 582 364 583 323 583 365 583 6014 584 365 584 366 584 367 585 366 585 324 585 368 586 324 586 325 586 5939 587 325 587 2491 587 5938 588 2491 588 326 588 6012 589 326 589 2490 589 6011 590 2490 590 2489 590 5937 591 2489 591 369 591 327 592 369 592 370 592 371 593 370 593 372 593 6009 594 372 594 2488 594 328 595 2488 595 373 595 5935 596 373 596 2487 596 5933 597 2487 597 374 597 329 598 374 598 330 598 375 599 330 599 2486 599 331 600 2486 600 2485 600 332 601 2485 601 2484 601 5932 602 2484 602 2483 602 333 603 2483 603 334 603 277 604 333 604 334 604 280 605 281 605 5930 605 5930 606 335 606 5929 606 5929 607 282 607 336 607 336 608 283 608 284 608 284 609 2503 609 5928 609 5928 610 2502 610 5927 610 5927 611 337 611 285 611 285 612 286 612 5926 612 5926 613 2501 613 5925 613 5925 614 338 614 5924 614 5924 615 2500 615 287 615 287 616 288 616 5923 616 5923 617 339 617 340 617 340 618 341 618 5922 618 5922 619 2499 619 289 619 289 620 342 620 343 620 343 621 344 621 5921 621 5921 622 345 622 290 622 290 623 2497 623 291 623 291 624 2496 624 292 624 292 625 293 625 294 625 294 626 295 626 346 626 346 627 2494 627 296 627 296 628 2495 628 347 628 347 629 297 629 348 629 348 630 298 630 5920 630 5920 631 299 631 5885 631 5885 632 2493 632 5884 632 5884 633 300 633 5886 633 5886 634 301 634 5890 634 5890 635 349 635 5889 635 5889 636 302 636 303 636 303 637 2406 637 350 637 350 638 304 638 351 638 351 639 305 639 306 639 306 640 2407 640 5888 640 5888 641 307 641 5887 641 5887 642 2409 642 308 642 308 643 2408 643 309 643 309 644 310 644 6035 644 6035 645 2410 645 311 645 311 646 312 646 5874 646 5874 647 352 647 5968 647 5968 648 2411 648 353 648 353 649 313 649 5876 649 5876 650 314 650 315 650 315 651 2412 651 354 651 354 652 2413 652 5965 652 5965 653 2414 653 5964 653 5964 654 355 654 356 654 356 655 316 655 5877 655 5877 656 357 656 6023 656 6023 657 317 657 6021 657 6021 658 2415 658 5943 658 5943 659 358 659 6020 659 6020 660 318 660 6018 660 6018 661 359 661 6016 661 6016 662 320 662 319 662 319 663 360 663 321 663 321 664 361 664 6015 664 6015 665 362 665 363 665 363 666 2492 666 322 666 322 667 323 667 364 667 364 668 365 668 6014 668 6014 669 366 669 367 669 367 670 324 670 368 670 368 671 325 671 5939 671 5939 672 2491 672 5938 672 5938 673 326 673 6012 673 6012 674 2490 674 6011 674 6011 675 2489 675 5937 675 5937 676 369 676 327 676 327 677 370 677 371 677 371 678 372 678 6009 678 6009 679 2488 679 328 679 328 680 373 680 5935 680 5935 681 2487 681 5933 681 5933 682 374 682 329 682 329 683 330 683 375 683 375 684 2486 684 331 684 331 685 2485 685 332 685 332 686 2484 686 5932 686 5932 687 2483 687 333 687 455 688 706 688 381 688 409 689 381 689 410 689 409 690 455 690 381 690 376 691 377 691 544 691 376 692 379 692 377 692 376 693 382 693 379 693 379 694 382 694 383 694 378 695 383 695 384 695 378 696 379 696 383 696 378 697 380 697 379 697 379 698 380 698 377 698 377 699 380 699 410 699 381 700 377 700 410 700 381 701 544 701 377 701 381 702 706 702 544 702 382 703 543 703 383 703 383 704 543 704 385 704 384 705 385 705 386 705 384 706 383 706 385 706 543 707 388 707 385 707 385 708 388 708 387 708 386 709 387 709 412 709 386 710 385 710 387 710 388 711 389 711 387 711 387 712 389 712 390 712 412 713 390 713 413 713 412 714 387 714 390 714 389 715 541 715 390 715 390 716 541 716 391 716 413 717 391 717 392 717 413 718 390 718 391 718 541 719 540 719 391 719 391 720 540 720 393 720 392 721 393 721 395 721 392 722 391 722 393 722 540 723 539 723 393 723 393 724 539 724 394 724 395 725 394 725 414 725 395 726 393 726 394 726 539 727 537 727 394 727 394 728 537 728 396 728 414 729 396 729 415 729 414 730 394 730 396 730 537 731 534 731 396 731 396 732 534 732 397 732 415 733 397 733 398 733 415 734 396 734 397 734 534 735 535 735 397 735 397 736 535 736 401 736 398 737 401 737 400 737 398 738 397 738 401 738 535 739 533 739 401 739 401 740 533 740 399 740 400 741 399 741 402 741 400 742 401 742 399 742 533 743 532 743 399 743 399 744 532 744 403 744 402 745 403 745 416 745 402 746 399 746 403 746 532 747 404 747 403 747 403 748 404 748 406 748 416 749 406 749 417 749 416 750 403 750 406 750 404 751 407 751 406 751 406 752 407 752 408 752 417 753 408 753 405 753 417 754 406 754 408 754 407 755 531 755 408 755 408 756 531 756 495 756 405 757 408 757 495 757 409 758 411 758 455 758 409 759 410 759 411 759 411 760 410 760 380 760 378 761 411 761 380 761 378 762 384 762 411 762 411 763 384 763 386 763 412 764 411 764 386 764 412 765 413 765 411 765 411 766 413 766 392 766 395 767 411 767 392 767 395 768 414 768 411 768 411 769 414 769 415 769 398 770 411 770 415 770 398 771 400 771 411 771 411 772 400 772 402 772 416 773 411 773 402 773 416 774 417 774 411 774 411 775 417 775 405 775 478 776 411 776 405 776 478 777 530 777 411 777 411 778 530 778 480 778 529 779 411 779 480 779 529 780 418 780 411 780 411 781 418 781 423 781 424 782 423 782 420 782 419 783 420 783 484 783 2890 784 484 784 425 784 422 785 425 785 421 785 422 786 2890 786 425 786 411 787 423 787 424 787 424 788 420 788 419 788 419 789 484 789 2890 789 425 790 426 790 421 790 421 791 426 791 2891 791 2891 792 426 792 441 792 3004 793 441 793 427 793 3001 794 427 794 525 794 2892 795 525 795 486 795 428 796 486 796 524 796 442 797 524 797 429 797 443 798 429 798 523 798 444 799 523 799 430 799 432 800 430 800 431 800 487 801 432 801 431 801 487 802 433 802 432 802 487 803 520 803 433 803 433 804 520 804 445 804 445 805 520 805 519 805 434 806 519 806 490 806 2984 807 490 807 435 807 436 808 2984 808 435 808 436 809 437 809 2984 809 436 810 438 810 437 810 437 811 438 811 2898 811 2898 812 438 812 440 812 439 813 440 813 494 813 439 814 2898 814 440 814 2891 815 441 815 3004 815 3004 816 427 816 3001 816 3001 817 525 817 2892 817 2892 818 486 818 428 818 428 819 524 819 442 819 442 820 429 820 443 820 443 821 523 821 444 821 444 822 430 822 432 822 445 823 519 823 434 823 434 824 490 824 2984 824 411 825 2888 825 455 825 455 826 2888 826 446 826 447 827 455 827 446 827 447 828 3021 828 455 828 455 829 3021 829 2885 829 2884 830 455 830 2885 830 2884 831 448 831 455 831 455 832 448 832 449 832 450 833 455 833 449 833 450 834 451 834 455 834 455 835 451 835 2882 835 3031 836 455 836 2882 836 3031 837 452 837 455 837 455 838 452 838 3036 838 453 839 455 839 3036 839 453 840 454 840 455 840 455 841 454 841 3042 841 2881 842 455 842 3042 842 2881 843 2880 843 455 843 455 844 2880 844 3049 844 3052 845 455 845 3049 845 3052 846 2879 846 455 846 455 847 2879 847 456 847 2876 848 455 848 456 848 2876 849 2875 849 455 849 455 850 2875 850 2405 850 2405 851 2875 851 457 851 458 852 2405 852 457 852 458 853 2930 853 2405 853 2405 854 2930 854 459 854 460 855 2405 855 459 855 460 856 461 856 2405 856 2405 857 461 857 2935 857 2922 858 2405 858 2935 858 2922 859 2939 859 2405 859 2405 860 2939 860 469 860 469 861 2939 861 2920 861 2942 862 469 862 2920 862 2942 863 2944 863 469 863 469 864 2944 864 463 864 462 865 469 865 463 865 462 866 2917 866 469 866 469 867 2917 867 465 867 464 868 469 868 465 868 464 869 466 869 469 869 469 870 466 870 467 870 2914 871 469 871 467 871 2914 872 2912 872 469 872 469 873 2912 873 2910 873 2909 874 469 874 2910 874 2909 875 468 875 469 875 469 876 468 876 2908 876 470 877 469 877 2908 877 470 878 471 878 469 878 469 879 471 879 472 879 473 880 469 880 472 880 473 881 2965 881 469 881 469 882 2965 882 2907 882 2905 883 469 883 2907 883 2905 884 474 884 469 884 469 885 474 885 2904 885 475 886 469 886 2904 886 475 887 2973 887 469 887 469 888 2973 888 476 888 477 889 469 889 476 889 477 890 2901 890 469 890 469 891 2901 891 439 891 478 892 405 892 479 892 530 893 479 893 497 893 480 894 497 894 481 894 529 895 481 895 482 895 418 896 482 896 499 896 423 897 499 897 528 897 420 898 528 898 483 898 484 899 483 899 527 899 425 900 527 900 510 900 426 901 510 901 485 901 441 902 485 902 502 902 427 903 502 903 526 903 525 904 526 904 504 904 486 905 504 905 505 905 524 906 505 906 514 906 429 907 514 907 515 907 523 908 515 908 516 908 430 909 516 909 522 909 431 910 522 910 517 910 487 911 517 911 521 911 520 912 521 912 488 912 519 913 488 913 489 913 490 914 489 914 491 914 435 915 491 915 492 915 436 916 492 916 493 916 438 917 493 917 518 917 440 918 518 918 508 918 494 919 508 919 976 919 494 920 440 920 508 920 405 921 495 921 479 921 479 922 495 922 496 922 680 923 479 923 496 923 680 924 497 924 479 924 680 925 498 925 497 925 497 926 498 926 481 926 481 927 498 927 509 927 482 928 509 928 500 928 499 929 500 929 678 929 528 930 678 930 677 930 483 931 677 931 574 931 527 932 574 932 573 932 510 933 573 933 501 933 485 934 501 934 503 934 502 935 503 935 674 935 526 936 674 936 511 936 504 937 511 937 512 937 505 938 512 938 513 938 514 939 513 939 572 939 515 940 572 940 571 940 516 941 571 941 570 941 522 942 570 942 569 942 517 943 569 943 506 943 521 944 506 944 568 944 488 945 568 945 567 945 489 946 567 946 507 946 491 947 507 947 673 947 492 948 673 948 672 948 493 949 672 949 671 949 518 950 671 950 670 950 508 951 670 951 976 951 508 952 518 952 670 952 481 953 509 953 482 953 482 954 500 954 499 954 499 955 678 955 528 955 528 956 677 956 483 956 483 957 574 957 527 957 527 958 573 958 510 958 510 959 501 959 485 959 485 960 503 960 502 960 502 961 674 961 526 961 526 962 511 962 504 962 504 963 512 963 505 963 505 964 513 964 514 964 514 965 572 965 515 965 515 966 571 966 516 966 516 967 570 967 522 967 522 968 569 968 517 968 517 969 506 969 521 969 521 970 568 970 488 970 488 971 567 971 489 971 489 972 507 972 491 972 491 973 673 973 492 973 492 974 672 974 493 974 493 975 671 975 518 975 440 976 438 976 518 976 438 977 436 977 493 977 436 978 435 978 492 978 435 979 490 979 491 979 490 980 519 980 489 980 519 981 520 981 488 981 520 982 487 982 521 982 487 983 431 983 517 983 431 984 430 984 522 984 430 985 523 985 516 985 523 986 429 986 515 986 429 987 524 987 514 987 524 988 486 988 505 988 486 989 525 989 504 989 525 990 427 990 526 990 427 991 441 991 502 991 441 992 426 992 485 992 426 993 425 993 510 993 425 994 484 994 527 994 484 995 420 995 483 995 420 996 423 996 528 996 423 997 418 997 499 997 418 998 529 998 482 998 529 999 480 999 481 999 480 1000 530 1000 497 1000 530 1001 478 1001 479 1001 531 1002 3023 1002 495 1002 531 1003 407 1003 3023 1003 3023 1004 407 1004 404 1004 532 1005 3023 1005 404 1005 532 1006 536 1006 3023 1006 532 1007 533 1007 536 1007 536 1008 533 1008 535 1008 534 1009 536 1009 535 1009 534 1010 538 1010 536 1010 534 1011 537 1011 538 1011 538 1012 537 1012 539 1012 540 1013 538 1013 539 1013 540 1014 542 1014 538 1014 540 1015 541 1015 542 1015 542 1016 541 1016 389 1016 388 1017 542 1017 389 1017 388 1018 543 1018 542 1018 542 1019 543 1019 3027 1019 3027 1020 543 1020 382 1020 376 1021 3027 1021 382 1021 376 1022 544 1022 3027 1022 3027 1023 544 1023 3029 1023 3029 1024 544 1024 706 1024 3032 1025 706 1025 545 1025 3032 1026 3029 1026 706 1026 5861 1027 546 1027 706 1027 5861 1028 2927 1028 546 1028 5861 1029 2928 1029 2927 1029 5861 1030 548 1030 2928 1030 5861 1031 547 1031 548 1031 5861 1032 2931 1032 547 1032 5861 1033 2934 1033 2931 1033 5861 1034 549 1034 2934 1034 5861 1035 2936 1035 549 1035 5861 1036 2938 1036 2936 1036 5861 1037 550 1037 2938 1037 5861 1038 2941 1038 550 1038 5861 1039 551 1039 2941 1039 5861 1040 552 1040 551 1040 5861 1041 553 1041 552 1041 5861 1042 554 1042 553 1042 5861 1043 2947 1043 554 1043 5861 1044 2948 1044 2947 1044 5861 1045 2951 1045 2948 1045 5861 1046 5860 1046 2951 1046 2951 1047 5860 1047 555 1047 555 1048 5860 1048 556 1048 556 1049 5860 1049 557 1049 557 1050 5860 1050 558 1050 558 1051 5860 1051 559 1051 559 1052 5860 1052 2955 1052 2955 1053 5860 1053 560 1053 560 1054 5860 1054 561 1054 561 1055 5860 1055 2958 1055 2958 1056 5860 1056 562 1056 562 1057 5860 1057 2962 1057 2962 1058 5860 1058 2963 1058 2963 1059 5860 1059 2966 1059 2966 1060 5860 1060 563 1060 563 1061 5860 1061 2969 1061 2969 1062 5860 1062 2971 1062 2971 1063 5860 1063 564 1063 564 1064 5860 1064 565 1064 565 1065 5860 1065 2974 1065 2974 1066 5860 1066 2975 1066 2975 1067 5860 1067 2977 1067 2977 1068 5860 1068 566 1068 566 1069 5860 1069 2979 1069 2979 1070 5860 1070 2981 1070 2981 1071 5860 1071 2982 1071 2982 1072 5860 1072 2985 1072 2985 1073 5860 1073 2987 1073 2987 1074 5860 1074 507 1074 567 1075 2987 1075 507 1075 567 1076 2990 1076 2987 1076 567 1077 568 1077 2990 1077 2990 1078 568 1078 2991 1078 2991 1079 568 1079 506 1079 2992 1080 506 1080 569 1080 570 1081 2992 1081 569 1081 570 1082 2993 1082 2992 1082 570 1083 571 1083 2993 1083 2993 1084 571 1084 2995 1084 2995 1085 571 1085 572 1085 2996 1086 572 1086 513 1086 2998 1087 513 1087 512 1087 2999 1088 512 1088 511 1088 3002 1089 511 1089 674 1089 675 1090 674 1090 503 1090 3006 1091 503 1091 501 1091 3008 1092 501 1092 573 1092 3010 1093 573 1093 574 1093 676 1094 574 1094 677 1094 3012 1095 677 1095 678 1095 679 1096 678 1096 500 1096 3015 1097 500 1097 509 1097 3016 1098 509 1098 498 1098 3018 1099 498 1099 680 1099 3020 1100 680 1100 496 1100 681 1101 496 1101 495 1101 3023 1102 681 1102 495 1102 576 1103 976 1103 5860 1103 576 1104 617 1104 976 1104 576 1105 3530 1105 617 1105 576 1106 575 1106 3530 1106 576 1107 3535 1107 575 1107 576 1108 577 1108 3535 1108 576 1109 3538 1109 577 1109 576 1110 3540 1110 3538 1110 576 1111 578 1111 3540 1111 576 1112 579 1112 578 1112 576 1113 580 1113 579 1113 576 1114 582 1114 580 1114 576 1115 581 1115 582 1115 576 1116 584 1116 581 1116 576 1117 583 1117 584 1117 576 1118 3549 1118 583 1118 576 1119 585 1119 3549 1119 576 1120 3553 1120 585 1120 576 1121 3556 1121 3553 1121 576 1122 586 1122 3556 1122 576 1123 3557 1123 586 1123 576 1124 3559 1124 3557 1124 576 1125 3563 1125 3559 1125 576 1126 587 1126 3563 1126 576 1127 3567 1127 587 1127 576 1128 588 1128 3567 1128 576 1129 3568 1129 588 1129 576 1130 3570 1130 3568 1130 576 1131 3572 1131 3570 1131 576 1132 589 1132 3572 1132 576 1133 590 1133 589 1133 576 1134 591 1134 590 1134 576 1135 3575 1135 591 1135 576 1136 618 1136 3575 1136 3575 1137 618 1137 2730 1137 592 1138 2730 1138 2729 1138 593 1139 2729 1139 2727 1139 594 1140 2727 1140 2788 1140 595 1141 2788 1141 2791 1141 682 1142 2791 1142 596 1142 3583 1143 596 1143 597 1143 3584 1144 597 1144 2796 1144 683 1145 2796 1145 2724 1145 3588 1146 2724 1146 2799 1146 684 1147 2799 1147 598 1147 685 1148 598 1148 2804 1148 3594 1149 2804 1149 2722 1149 686 1150 2722 1150 2720 1150 687 1151 2720 1151 2806 1151 3596 1152 2806 1152 688 1152 599 1153 688 1153 600 1153 3413 1154 600 1154 601 1154 3466 1155 601 1155 2811 1155 602 1156 2811 1156 2813 1156 3468 1157 2813 1157 689 1157 3471 1158 689 1158 2816 1158 603 1159 2816 1159 604 1159 690 1160 604 1160 2718 1160 3476 1161 2718 1161 2717 1161 691 1162 2717 1162 605 1162 606 1163 605 1163 2716 1163 617 1164 2716 1164 657 1164 617 1165 606 1165 2716 1165 617 1166 607 1166 606 1166 617 1167 609 1167 607 1167 617 1168 608 1168 609 1168 617 1169 3485 1169 608 1169 617 1170 611 1170 3485 1170 617 1171 610 1171 611 1171 617 1172 3489 1172 610 1172 617 1173 3492 1173 3489 1173 617 1174 3493 1174 3492 1174 617 1175 3496 1175 3493 1175 617 1176 3497 1176 3496 1176 617 1177 3501 1177 3497 1177 617 1178 3502 1178 3501 1178 617 1179 613 1179 3502 1179 617 1180 612 1180 613 1180 617 1181 3506 1181 612 1181 617 1182 614 1182 3506 1182 617 1183 3508 1183 614 1183 617 1184 3510 1184 3508 1184 617 1185 3513 1185 3510 1185 617 1186 3516 1186 3513 1186 617 1187 3517 1187 3516 1187 617 1188 3519 1188 3517 1188 617 1189 615 1189 3519 1189 617 1190 616 1190 615 1190 617 1191 3523 1191 616 1191 617 1192 3525 1192 3523 1192 617 1193 3528 1193 3525 1193 617 1194 3529 1194 3528 1194 617 1195 3530 1195 3529 1195 619 1196 1103 1196 618 1196 619 1197 625 1197 1103 1197 619 1198 2498 1198 625 1198 620 1199 1066 1199 625 1199 625 1200 1066 1200 1109 1200 1072 1201 625 1201 1109 1201 1072 1202 621 1202 625 1202 625 1203 621 1203 622 1203 1106 1204 625 1204 622 1204 1106 1205 1104 1205 625 1205 625 1206 1104 1206 623 1206 624 1207 625 1207 623 1207 624 1208 1076 1208 625 1208 625 1209 1076 1209 1075 1209 626 1210 625 1210 1075 1210 626 1211 1079 1211 625 1211 625 1212 1079 1212 1080 1212 1090 1213 625 1213 1080 1213 1090 1214 627 1214 625 1214 625 1215 627 1215 629 1215 628 1216 625 1216 629 1216 628 1217 630 1217 625 1217 625 1218 630 1218 1101 1218 631 1219 625 1219 1101 1219 631 1220 1103 1220 625 1220 1065 1221 634 1221 1066 1221 1065 1222 1062 1222 634 1222 634 1223 1062 1223 632 1223 1057 1224 634 1224 632 1224 1057 1225 1053 1225 634 1225 634 1226 1053 1226 1052 1226 1051 1227 634 1227 1052 1227 1051 1228 1047 1228 634 1228 634 1229 1047 1229 633 1229 1040 1230 634 1230 633 1230 1040 1231 635 1231 634 1231 634 1232 635 1232 1039 1232 636 1233 634 1233 1039 1233 636 1234 1037 1234 634 1234 634 1235 1037 1235 1046 1235 1044 1236 634 1236 1046 1236 1044 1237 637 1237 634 1237 634 1238 637 1238 1043 1238 1035 1239 634 1239 1043 1239 1035 1240 1034 1240 634 1240 634 1241 1034 1241 1071 1241 634 1242 638 1242 1066 1242 1066 1243 638 1243 1109 1243 1103 1244 639 1244 618 1244 618 1245 639 1245 1162 1245 1161 1246 618 1246 1162 1246 1161 1247 1114 1247 618 1247 618 1248 1114 1248 1160 1248 640 1249 618 1249 1160 1249 640 1250 1158 1250 618 1250 618 1251 1158 1251 641 1251 1156 1252 618 1252 641 1252 1156 1253 1155 1253 618 1253 618 1254 1155 1254 1154 1254 1118 1255 618 1255 1154 1255 1118 1256 1130 1256 618 1256 618 1257 1130 1257 2871 1257 2697 1258 618 1258 2871 1258 2697 1259 642 1259 618 1259 618 1260 642 1260 643 1260 2694 1261 618 1261 643 1261 2694 1262 2695 1262 618 1262 618 1263 2695 1263 2748 1263 644 1264 618 1264 2748 1264 644 1265 2742 1265 618 1265 618 1266 2742 1266 645 1266 646 1267 618 1267 645 1267 646 1268 2755 1268 618 1268 618 1269 2755 1269 647 1269 2740 1270 618 1270 647 1270 2740 1271 648 1271 618 1271 618 1272 648 1272 2739 1272 2737 1273 618 1273 2739 1273 2737 1274 2765 1274 618 1274 618 1275 2765 1275 2736 1275 2735 1276 618 1276 2736 1276 2735 1277 649 1277 618 1277 618 1278 649 1278 2774 1278 650 1279 618 1279 2774 1279 650 1280 651 1280 618 1280 618 1281 651 1281 652 1281 2732 1282 618 1282 652 1282 2732 1283 653 1283 618 1283 618 1284 653 1284 2731 1284 654 1285 618 1285 2731 1285 654 1286 2730 1286 618 1286 1130 1287 1128 1287 2871 1287 2871 1288 1128 1288 2869 1288 2869 1289 1128 1289 655 1289 2699 1290 655 1290 1140 1290 703 1291 1140 1291 656 1291 702 1292 656 1292 657 1292 2864 1293 657 1293 701 1293 2864 1294 702 1294 657 1294 2869 1295 655 1295 2699 1295 2699 1296 1140 1296 703 1296 656 1297 1141 1297 657 1297 657 1298 1141 1298 658 1298 1146 1299 657 1299 658 1299 1146 1300 659 1300 657 1300 657 1301 659 1301 1151 1301 1165 1302 1151 1302 663 1302 1208 1303 663 1303 669 1303 1208 1304 1165 1304 663 1304 657 1305 1151 1305 1165 1305 1201 1306 660 1306 663 1306 663 1307 660 1307 661 1307 1198 1308 663 1308 661 1308 1198 1309 662 1309 663 1309 663 1310 662 1310 1192 1310 1189 1311 663 1311 1192 1311 1189 1312 664 1312 663 1312 663 1313 664 1313 665 1313 666 1314 663 1314 665 1314 666 1315 1183 1315 663 1315 663 1316 1183 1316 1182 1316 1188 1317 663 1317 1182 1317 1188 1318 667 1318 663 1318 663 1319 667 1319 668 1319 1181 1320 663 1320 668 1320 1181 1321 1180 1321 663 1321 663 1322 1180 1322 1178 1322 1177 1323 663 1323 1178 1323 1177 1324 669 1324 663 1324 976 1325 670 1325 5860 1325 5860 1326 670 1326 671 1326 672 1327 5860 1327 671 1327 672 1328 673 1328 5860 1328 5860 1329 673 1329 507 1329 2991 1330 506 1330 2992 1330 2995 1331 572 1331 2996 1331 2996 1332 513 1332 2998 1332 2998 1333 512 1333 2999 1333 2999 1334 511 1334 3002 1334 3002 1335 674 1335 675 1335 675 1336 503 1336 3006 1336 3006 1337 501 1337 3008 1337 3008 1338 573 1338 3010 1338 3010 1339 574 1339 676 1339 676 1340 677 1340 3012 1340 3012 1341 678 1341 679 1341 679 1342 500 1342 3015 1342 3015 1343 509 1343 3016 1343 3016 1344 498 1344 3018 1344 3018 1345 680 1345 3020 1345 3020 1346 496 1346 681 1346 3575 1347 2730 1347 592 1347 592 1348 2729 1348 593 1348 593 1349 2727 1349 594 1349 594 1350 2788 1350 595 1350 595 1351 2791 1351 682 1351 682 1352 596 1352 3583 1352 3583 1353 597 1353 3584 1353 3584 1354 2796 1354 683 1354 683 1355 2724 1355 3588 1355 3588 1356 2799 1356 684 1356 684 1357 598 1357 685 1357 685 1358 2804 1358 3594 1358 3594 1359 2722 1359 686 1359 686 1360 2720 1360 687 1360 687 1361 2806 1361 3596 1361 3596 1362 688 1362 599 1362 599 1363 600 1363 3413 1363 3413 1364 601 1364 3466 1364 3466 1365 2811 1365 602 1365 602 1366 2813 1366 3468 1366 3468 1367 689 1367 3471 1367 3471 1368 2816 1368 603 1368 603 1369 604 1369 690 1369 690 1370 2718 1370 3476 1370 3476 1371 2717 1371 691 1371 691 1372 605 1372 606 1372 2716 1373 692 1373 657 1373 657 1374 692 1374 2831 1374 693 1375 657 1375 2831 1375 693 1376 694 1376 657 1376 657 1377 694 1377 695 1377 2834 1378 657 1378 695 1378 2834 1379 2714 1379 657 1379 657 1380 2714 1380 2712 1380 2711 1381 657 1381 2712 1381 2711 1382 2710 1382 657 1382 657 1383 2710 1383 2709 1383 696 1384 657 1384 2709 1384 696 1385 2708 1385 657 1385 657 1386 2708 1386 2707 1386 2706 1387 657 1387 2707 1387 2706 1388 2705 1388 657 1388 657 1389 2705 1389 2849 1389 2703 1390 657 1390 2849 1390 2703 1391 697 1391 657 1391 657 1392 697 1392 698 1392 2701 1393 657 1393 698 1393 2701 1394 699 1394 657 1394 657 1395 699 1395 700 1395 2861 1396 657 1396 700 1396 2861 1397 701 1397 657 1397 702 1398 703 1398 656 1398 704 1399 705 1399 706 1399 546 1400 704 1400 706 1400 705 1401 3053 1401 706 1401 706 1402 3053 1402 707 1402 708 1403 706 1403 707 1403 708 1404 3047 1404 706 1404 706 1405 3047 1405 3046 1405 3043 1406 706 1406 3046 1406 3043 1407 3041 1407 706 1407 706 1408 3041 1408 3038 1408 3034 1409 706 1409 3038 1409 3034 1410 545 1410 706 1410 901 1411 732 1411 709 1411 901 1412 710 1412 732 1412 732 1413 710 1413 6515 1413 6515 1414 710 1414 897 1414 884 1415 6515 1415 897 1415 884 1416 711 1416 6515 1416 884 1417 713 1417 711 1417 711 1418 713 1418 712 1418 712 1419 713 1419 883 1419 714 1420 712 1420 883 1420 714 1421 6516 1421 712 1421 714 1422 715 1422 6516 1422 6516 1423 715 1423 6517 1423 6517 1424 715 1424 891 1424 716 1425 6517 1425 891 1425 716 1426 6518 1426 6517 1426 716 1427 876 1427 6518 1427 6518 1428 876 1428 717 1428 717 1429 876 1429 718 1429 881 1430 717 1430 718 1430 881 1431 6519 1431 717 1431 881 1432 719 1432 6519 1432 6519 1433 719 1433 720 1433 731 1434 720 1434 866 1434 6520 1435 866 1435 721 1435 722 1436 6520 1436 721 1436 722 1437 6521 1437 6520 1437 722 1438 723 1438 6521 1438 6521 1439 723 1439 724 1439 724 1440 723 1440 864 1440 725 1441 724 1441 864 1441 725 1442 6522 1442 724 1442 725 1443 726 1443 6522 1443 6522 1444 726 1444 727 1444 6524 1445 727 1445 848 1445 6525 1446 848 1446 857 1446 847 1447 6525 1447 857 1447 847 1448 6526 1448 6525 1448 847 1449 846 1449 6526 1449 6526 1450 846 1450 728 1450 6527 1451 728 1451 729 1451 6528 1452 729 1452 840 1452 845 1453 6528 1453 840 1453 845 1454 730 1454 6528 1454 845 1455 836 1455 730 1455 6519 1456 720 1456 731 1456 731 1457 866 1457 6520 1457 6522 1458 727 1458 6524 1458 6524 1459 848 1459 6525 1459 6526 1460 728 1460 6527 1460 6527 1461 729 1461 6528 1461 732 1462 733 1462 709 1462 709 1463 733 1463 825 1463 825 1464 733 1464 734 1464 746 1465 734 1465 747 1465 826 1466 747 1466 748 1466 827 1467 748 1467 749 1467 828 1468 749 1468 6514 1468 750 1469 6514 1469 736 1469 735 1470 736 1470 751 1470 737 1471 751 1471 752 1471 738 1472 752 1472 739 1472 740 1473 739 1473 6513 1473 741 1474 6513 1474 6512 1474 829 1475 6512 1475 742 1475 753 1476 742 1476 754 1476 743 1477 754 1477 744 1477 755 1478 744 1478 6511 1478 830 1479 6511 1479 756 1479 757 1480 756 1480 758 1480 745 1481 757 1481 758 1481 825 1482 734 1482 746 1482 746 1483 747 1483 826 1483 826 1484 748 1484 827 1484 827 1485 749 1485 828 1485 828 1486 6514 1486 750 1486 750 1487 736 1487 735 1487 735 1488 751 1488 737 1488 737 1489 752 1489 738 1489 738 1490 739 1490 740 1490 740 1491 6513 1491 741 1491 741 1492 6512 1492 829 1492 829 1493 742 1493 753 1493 753 1494 754 1494 743 1494 743 1495 744 1495 755 1495 755 1496 6511 1496 830 1496 830 1497 756 1497 757 1497 758 1498 6531 1498 745 1498 745 1499 6531 1499 759 1499 6531 1500 6532 1500 759 1500 759 1501 6532 1501 811 1501 811 1502 6532 1502 760 1502 780 1503 760 1503 781 1503 812 1504 781 1504 782 1504 783 1505 782 1505 761 1505 784 1506 761 1506 6533 1506 785 1507 6533 1507 6534 1507 786 1508 6534 1508 762 1508 787 1509 762 1509 788 1509 813 1510 788 1510 763 1510 789 1511 763 1511 765 1511 764 1512 765 1512 766 1512 815 1513 766 1513 768 1513 767 1514 768 1514 769 1514 816 1515 769 1515 790 1515 770 1516 790 1516 6547 1516 791 1517 6547 1517 771 1517 792 1518 771 1518 772 1518 773 1519 772 1519 6546 1519 817 1520 6546 1520 6545 1520 793 1521 6545 1521 794 1521 818 1522 794 1522 6544 1522 795 1523 6544 1523 6543 1523 796 1524 6543 1524 774 1524 797 1525 774 1525 798 1525 799 1526 798 1526 800 1526 775 1527 800 1527 6542 1527 819 1528 6542 1528 6541 1528 820 1529 6541 1529 6540 1529 801 1530 6540 1530 6539 1530 802 1531 6539 1531 6538 1531 803 1532 6538 1532 804 1532 821 1533 804 1533 776 1533 777 1534 776 1534 6537 1534 778 1535 6537 1535 805 1535 822 1536 805 1536 6536 1536 779 1537 6536 1537 6535 1537 823 1538 779 1538 6535 1538 811 1539 760 1539 780 1539 780 1540 781 1540 812 1540 812 1541 782 1541 783 1541 783 1542 761 1542 784 1542 784 1543 6533 1543 785 1543 785 1544 6534 1544 786 1544 786 1545 762 1545 787 1545 787 1546 788 1546 813 1546 813 1547 763 1547 789 1547 789 1548 765 1548 764 1548 764 1549 766 1549 815 1549 815 1550 768 1550 767 1550 767 1551 769 1551 816 1551 816 1552 790 1552 770 1552 770 1553 6547 1553 791 1553 791 1554 771 1554 792 1554 792 1555 772 1555 773 1555 773 1556 6546 1556 817 1556 817 1557 6545 1557 793 1557 793 1558 794 1558 818 1558 818 1559 6544 1559 795 1559 795 1560 6543 1560 796 1560 796 1561 774 1561 797 1561 797 1562 798 1562 799 1562 799 1563 800 1563 775 1563 775 1564 6542 1564 819 1564 819 1565 6541 1565 820 1565 820 1566 6540 1566 801 1566 801 1567 6539 1567 802 1567 802 1568 6538 1568 803 1568 803 1569 804 1569 821 1569 821 1570 776 1570 777 1570 777 1571 6537 1571 778 1571 778 1572 805 1572 822 1572 822 1573 6536 1573 779 1573 823 1574 6535 1574 620 1574 620 1575 6535 1575 1066 1575 1066 1576 6535 1576 807 1576 807 1577 6535 1577 6548 1577 806 1578 807 1578 6548 1578 806 1579 1268 1579 807 1579 808 1580 833 1580 2498 1580 2498 1581 833 1581 625 1581 831 1582 2445 1582 3822 1582 3822 1583 2445 1583 3858 1583 3950 1584 824 1584 809 1584 809 1585 824 1585 902 1585 759 1586 902 1586 745 1586 759 1587 809 1587 902 1587 759 1588 810 1588 809 1588 759 1589 814 1589 810 1589 759 1590 811 1590 814 1590 814 1591 811 1591 780 1591 812 1592 814 1592 780 1592 812 1593 783 1593 814 1593 814 1594 783 1594 784 1594 785 1595 814 1595 784 1595 785 1596 786 1596 814 1596 814 1597 786 1597 787 1597 813 1598 814 1598 787 1598 813 1599 789 1599 814 1599 814 1600 789 1600 764 1600 815 1601 814 1601 764 1601 815 1602 767 1602 814 1602 814 1603 767 1603 816 1603 770 1604 814 1604 816 1604 770 1605 791 1605 814 1605 814 1606 791 1606 792 1606 773 1607 814 1607 792 1607 773 1608 817 1608 814 1608 814 1609 817 1609 793 1609 818 1610 814 1610 793 1610 818 1611 795 1611 814 1611 814 1612 795 1612 796 1612 797 1613 814 1613 796 1613 797 1614 799 1614 814 1614 814 1615 799 1615 775 1615 819 1616 814 1616 775 1616 819 1617 820 1617 814 1617 814 1618 820 1618 833 1618 833 1619 820 1619 801 1619 625 1620 801 1620 802 1620 803 1621 625 1621 802 1621 803 1622 821 1622 625 1622 625 1623 821 1623 777 1623 778 1624 625 1624 777 1624 778 1625 822 1625 625 1625 625 1626 822 1626 779 1626 823 1627 625 1627 779 1627 823 1628 620 1628 625 1628 6504 1629 926 1629 824 1629 824 1630 926 1630 902 1630 709 1631 825 1631 902 1631 902 1632 825 1632 746 1632 826 1633 902 1633 746 1633 826 1634 827 1634 902 1634 902 1635 827 1635 828 1635 750 1636 902 1636 828 1636 750 1637 735 1637 902 1637 902 1638 735 1638 737 1638 738 1639 902 1639 737 1639 738 1640 740 1640 902 1640 902 1641 740 1641 741 1641 829 1642 902 1642 741 1642 829 1643 753 1643 902 1643 902 1644 753 1644 743 1644 755 1645 902 1645 743 1645 755 1646 830 1646 902 1646 902 1647 830 1647 757 1647 745 1648 902 1648 757 1648 833 1649 801 1649 625 1649 831 1650 3822 1650 810 1650 810 1651 3822 1651 809 1651 809 1652 3822 1652 3817 1652 814 1653 833 1653 6555 1653 6555 1654 833 1654 6495 1654 832 1655 6495 1655 4606 1655 5771 1656 832 1656 4606 1656 808 1657 4264 1657 833 1657 808 1658 4258 1658 4264 1658 4264 1659 834 1659 833 1659 833 1660 834 1660 6495 1660 6495 1661 834 1661 6368 1661 835 1662 6495 1662 6368 1662 6555 1663 6495 1663 832 1663 977 1664 836 1664 974 1664 974 1665 836 1665 916 1665 838 1666 916 1666 837 1666 974 1667 838 1667 837 1667 974 1668 916 1668 838 1668 916 1669 836 1669 839 1669 841 1670 839 1670 840 1670 729 1671 841 1671 840 1671 729 1672 728 1672 841 1672 841 1673 728 1673 842 1673 915 1674 842 1674 918 1674 969 1675 918 1675 844 1675 843 1676 844 1676 966 1676 843 1677 969 1677 844 1677 836 1678 845 1678 839 1678 839 1679 845 1679 840 1679 728 1680 846 1680 842 1680 842 1681 846 1681 847 1681 855 1682 847 1682 857 1682 849 1683 857 1683 848 1683 727 1684 849 1684 848 1684 727 1685 860 1685 849 1685 727 1686 726 1686 860 1686 860 1687 726 1687 861 1687 859 1688 861 1688 909 1688 908 1689 909 1689 850 1689 954 1690 850 1690 909 1690 851 1691 909 1691 865 1691 907 1692 865 1692 906 1692 853 1693 906 1693 854 1693 852 1694 854 1694 863 1694 852 1695 853 1695 854 1695 842 1696 847 1696 855 1696 919 1697 855 1697 920 1697 914 1698 920 1698 856 1698 963 1699 856 1699 911 1699 963 1700 914 1700 856 1700 855 1701 857 1701 849 1701 920 1702 849 1702 858 1702 911 1703 858 1703 912 1703 959 1704 912 1704 858 1704 859 1705 858 1705 860 1705 861 1706 859 1706 860 1706 726 1707 725 1707 861 1707 861 1708 725 1708 864 1708 862 1709 864 1709 723 1709 722 1710 862 1710 723 1710 722 1711 922 1711 862 1711 722 1712 721 1712 922 1712 922 1713 721 1713 871 1713 921 1714 871 1714 867 1714 854 1715 867 1715 870 1715 863 1716 870 1716 869 1716 863 1717 854 1717 870 1717 861 1718 864 1718 862 1718 909 1719 862 1719 865 1719 909 1720 861 1720 862 1720 721 1721 866 1721 871 1721 871 1722 866 1722 720 1722 872 1723 720 1723 719 1723 867 1724 719 1724 923 1724 870 1725 923 1725 868 1725 869 1726 868 1726 949 1726 869 1727 870 1727 868 1727 871 1728 720 1728 872 1728 867 1729 872 1729 719 1729 867 1730 871 1730 872 1730 923 1731 719 1731 873 1731 718 1732 873 1732 881 1732 718 1733 923 1733 873 1733 718 1734 874 1734 923 1734 718 1735 875 1735 874 1735 718 1736 876 1736 875 1736 875 1737 876 1737 874 1737 874 1738 876 1738 877 1738 880 1739 877 1739 878 1739 946 1740 878 1740 879 1740 946 1741 880 1741 878 1741 946 1742 948 1742 880 1742 880 1743 948 1743 868 1743 874 1744 868 1744 923 1744 874 1745 880 1745 868 1745 874 1746 877 1746 880 1746 719 1747 881 1747 873 1747 876 1748 716 1748 877 1748 877 1749 716 1749 891 1749 892 1750 891 1750 715 1750 882 1751 715 1751 714 1751 893 1752 714 1752 883 1752 713 1753 893 1753 883 1753 713 1754 894 1754 893 1754 713 1755 884 1755 894 1755 894 1756 884 1756 885 1756 896 1757 885 1757 924 1757 886 1758 924 1758 899 1758 886 1759 896 1759 924 1759 886 1760 928 1760 896 1760 896 1761 928 1761 887 1761 895 1762 887 1762 904 1762 888 1763 904 1763 943 1763 889 1764 888 1764 943 1764 889 1765 890 1765 888 1765 889 1766 905 1766 890 1766 890 1767 905 1767 878 1767 892 1768 878 1768 877 1768 891 1769 892 1769 877 1769 892 1770 715 1770 882 1770 890 1771 882 1771 893 1771 888 1772 893 1772 894 1772 895 1773 894 1773 896 1773 887 1774 895 1774 896 1774 882 1775 714 1775 893 1775 884 1776 897 1776 885 1776 885 1777 897 1777 898 1777 924 1778 898 1778 903 1778 899 1779 903 1779 925 1779 899 1780 924 1780 903 1780 897 1781 710 1781 898 1781 898 1782 710 1782 901 1782 900 1783 901 1783 709 1783 903 1784 709 1784 902 1784 925 1785 903 1785 902 1785 898 1786 901 1786 900 1786 903 1787 900 1787 709 1787 903 1788 898 1788 900 1788 895 1789 904 1789 888 1789 894 1790 895 1790 888 1790 905 1791 879 1791 878 1791 948 1792 949 1792 868 1792 853 1793 907 1793 906 1793 865 1794 907 1794 851 1794 851 1795 907 1795 935 1795 954 1796 851 1796 935 1796 954 1797 909 1797 851 1797 954 1798 908 1798 850 1798 960 1799 910 1799 908 1799 960 1800 959 1800 910 1800 910 1801 959 1801 859 1801 908 1802 859 1802 909 1802 908 1803 910 1803 859 1803 959 1804 911 1804 912 1804 966 1805 913 1805 914 1805 966 1806 919 1806 913 1806 966 1807 918 1807 919 1807 966 1808 844 1808 918 1808 837 1809 917 1809 969 1809 837 1810 915 1810 917 1810 837 1811 916 1811 915 1811 915 1812 916 1812 841 1812 842 1813 915 1813 841 1813 841 1814 916 1814 839 1814 969 1815 917 1815 915 1815 918 1816 969 1816 915 1816 919 1817 918 1817 842 1817 855 1818 919 1818 842 1818 914 1819 913 1819 919 1819 920 1820 914 1820 919 1820 849 1821 920 1821 855 1821 911 1822 856 1822 920 1822 858 1823 911 1823 920 1823 860 1824 858 1824 849 1824 959 1825 858 1825 859 1825 862 1826 922 1826 865 1826 865 1827 922 1827 921 1827 906 1828 921 1828 854 1828 906 1829 865 1829 921 1829 871 1830 921 1830 922 1830 854 1831 921 1831 867 1831 923 1832 870 1832 867 1832 890 1833 878 1833 892 1833 882 1834 890 1834 892 1834 888 1835 890 1835 893 1835 885 1836 896 1836 894 1836 898 1837 924 1837 885 1837 902 1838 926 1838 925 1838 925 1839 926 1839 975 1839 899 1840 975 1840 927 1840 886 1841 927 1841 939 1841 928 1842 939 1842 887 1842 928 1843 886 1843 939 1843 926 1844 6505 1844 975 1844 975 1845 6505 1845 927 1845 927 1846 6505 1846 6510 1846 939 1847 6510 1847 929 1847 942 1848 929 1848 940 1848 941 1849 940 1849 930 1849 944 1850 930 1850 931 1850 945 1851 931 1851 6509 1851 947 1852 6509 1852 932 1852 937 1853 932 1853 938 1853 950 1854 938 1854 951 1854 936 1855 951 1855 933 1855 6508 1856 936 1856 933 1856 6508 1857 934 1857 936 1857 6508 1858 952 1858 934 1858 934 1859 952 1859 955 1859 907 1860 955 1860 935 1860 907 1861 934 1861 955 1861 907 1862 853 1862 934 1862 934 1863 853 1863 936 1863 936 1864 853 1864 852 1864 863 1865 936 1865 852 1865 863 1866 950 1866 936 1866 863 1867 869 1867 950 1867 950 1868 869 1868 937 1868 938 1869 950 1869 937 1869 927 1870 6510 1870 939 1870 939 1871 929 1871 942 1871 887 1872 942 1872 904 1872 887 1873 939 1873 942 1873 942 1874 940 1874 941 1874 904 1875 941 1875 943 1875 904 1876 942 1876 941 1876 941 1877 930 1877 944 1877 889 1878 944 1878 905 1878 889 1879 941 1879 944 1879 889 1880 943 1880 941 1880 944 1881 931 1881 945 1881 905 1882 945 1882 879 1882 905 1883 944 1883 945 1883 945 1884 6509 1884 947 1884 946 1885 947 1885 948 1885 946 1886 945 1886 947 1886 946 1887 879 1887 945 1887 947 1888 932 1888 937 1888 948 1889 937 1889 949 1889 948 1890 947 1890 937 1890 950 1891 951 1891 936 1891 952 1892 6507 1892 955 1892 955 1893 6507 1893 953 1893 954 1894 953 1894 908 1894 954 1895 955 1895 953 1895 954 1896 935 1896 955 1896 6507 1897 957 1897 953 1897 953 1898 957 1898 956 1898 908 1899 956 1899 960 1899 908 1900 953 1900 956 1900 957 1901 961 1901 956 1901 956 1902 961 1902 958 1902 959 1903 958 1903 911 1903 959 1904 956 1904 958 1904 959 1905 960 1905 956 1905 961 1906 962 1906 958 1906 958 1907 962 1907 967 1907 911 1908 967 1908 963 1908 911 1909 958 1909 967 1909 962 1910 964 1910 967 1910 967 1911 964 1911 965 1911 914 1912 965 1912 966 1912 914 1913 967 1913 965 1913 914 1914 963 1914 967 1914 964 1915 6506 1915 965 1915 965 1916 6506 1916 966 1916 966 1917 6506 1917 968 1917 843 1918 968 1918 969 1918 843 1919 966 1919 968 1919 6506 1920 970 1920 968 1920 968 1921 970 1921 971 1921 969 1922 971 1922 837 1922 969 1923 968 1923 971 1923 970 1924 972 1924 971 1924 971 1925 972 1925 837 1925 837 1926 972 1926 973 1926 974 1927 973 1927 977 1927 974 1928 837 1928 973 1928 972 1929 978 1929 973 1929 973 1930 978 1930 977 1930 869 1931 949 1931 937 1931 886 1932 899 1932 927 1932 899 1933 925 1933 975 1933 976 1934 836 1934 494 1934 976 1935 730 1935 836 1935 976 1936 617 1936 730 1936 730 1937 617 1937 3901 1937 6523 1938 3901 1938 6551 1938 6523 1939 730 1939 3901 1939 3901 1940 3533 1940 6551 1940 6551 1941 3533 1941 3870 1941 836 1942 977 1942 494 1942 494 1943 977 1943 439 1943 439 1944 977 1944 979 1944 469 1945 439 1945 979 1945 977 1946 978 1946 979 1946 979 1947 978 1947 5076 1947 5655 1948 979 1948 5076 1948 5076 1949 978 1949 5674 1949 5674 1950 978 1950 6549 1950 5723 1951 6549 1951 6487 1951 4699 1952 6487 1952 5764 1952 4699 1953 5723 1953 6487 1953 5674 1954 6549 1954 5723 1954 6159 1955 5723 1955 6091 1955 6159 1956 5674 1956 5723 1956 3945 1957 1025 1957 1026 1957 980 1958 1026 1958 981 1958 982 1959 981 1959 995 1959 983 1960 995 1960 1024 1960 997 1961 1024 1961 996 1961 984 1962 996 1962 998 1962 3946 1963 998 1963 1023 1963 985 1964 1023 1964 999 1964 986 1965 999 1965 6496 1965 987 1966 6496 1966 1002 1966 1001 1967 1002 1967 6498 1967 994 1968 6498 1968 988 1968 992 1969 988 1969 1003 1969 989 1970 1003 1970 6499 1970 6500 1971 989 1971 6499 1971 6500 1972 990 1972 989 1972 6500 1973 6501 1973 990 1973 990 1974 6501 1974 1004 1974 3952 1975 1004 1975 3953 1975 3952 1976 990 1976 1004 1976 3952 1977 3951 1977 990 1977 990 1978 3951 1978 989 1978 989 1979 3951 1979 991 1979 993 1980 989 1980 991 1980 993 1981 992 1981 989 1981 993 1982 3949 1982 992 1982 992 1983 3949 1983 994 1983 988 1984 992 1984 994 1984 980 1985 981 1985 982 1985 983 1986 982 1986 995 1986 983 1987 980 1987 982 1987 995 1988 996 1988 1024 1988 997 1989 996 1989 984 1989 3946 1990 984 1990 998 1990 3946 1991 997 1991 984 1991 998 1992 999 1992 1023 1992 985 1993 999 1993 986 1993 3947 1994 986 1994 987 1994 1000 1995 987 1995 1001 1995 3948 1996 1001 1996 994 1996 3949 1997 3948 1997 994 1997 986 1998 6496 1998 987 1998 987 1999 1002 1999 1001 1999 1001 2000 6498 2000 994 2000 992 2001 1003 2001 989 2001 6501 2002 6502 2002 1004 2002 1004 2003 6502 2003 1005 2003 3953 2004 1005 2004 3954 2004 3953 2005 1004 2005 1005 2005 6502 2006 1007 2006 1005 2006 1005 2007 1007 2007 1006 2007 3954 2008 1006 2008 3955 2008 3954 2009 1005 2009 1006 2009 1007 2010 1010 2010 1006 2010 1006 2011 1010 2011 1009 2011 3955 2012 1009 2012 1008 2012 3955 2013 1006 2013 1009 2013 1010 2014 1011 2014 1009 2014 1009 2015 1011 2015 1008 2015 1008 2016 1011 2016 1022 2016 3956 2017 1022 2017 1012 2017 1013 2018 1012 2018 6503 2018 1019 2019 6503 2019 1021 2019 1020 2020 1021 2020 1014 2020 1015 2021 1014 2021 1017 2021 1016 2022 1017 2022 1018 2022 824 2023 1018 2023 6504 2023 824 2024 1016 2024 1018 2024 1011 2025 1012 2025 1022 2025 3956 2026 1012 2026 1013 2026 1019 2027 1013 2027 6503 2027 1019 2028 3956 2028 1013 2028 6503 2029 1014 2029 1021 2029 1020 2030 1014 2030 1015 2030 1016 2031 1015 2031 1017 2031 1016 2032 1020 2032 1015 2032 1017 2033 6504 2033 1018 2033 1020 2034 1019 2034 1021 2034 3956 2035 1008 2035 1022 2035 3948 2036 1000 2036 1001 2036 1000 2037 3947 2037 987 2037 3947 2038 985 2038 986 2038 985 2039 3946 2039 1023 2039 997 2040 983 2040 1024 2040 980 2041 3945 2041 1026 2041 1025 2042 981 2042 1026 2042 1112 2043 634 2043 1027 2043 1261 2044 1027 2044 1028 2044 1258 2045 1028 2045 1029 2045 1070 2046 1029 2046 1069 2046 1030 2047 1069 2047 1036 2047 1256 2048 1036 2048 1068 2048 1253 2049 1068 2049 1045 2049 1265 2050 1045 2050 1067 2050 1031 2051 1067 2051 1032 2051 1249 2052 1032 2052 1033 2052 1282 2053 1033 2053 1038 2053 1283 2054 1038 2054 1284 2054 1283 2055 1282 2055 1038 2055 1034 2056 1028 2056 1071 2056 1034 2057 1029 2057 1028 2057 1034 2058 1035 2058 1029 2058 1029 2059 1035 2059 1069 2059 1069 2060 1035 2060 1043 2060 1036 2061 1043 2061 637 2061 1068 2062 637 2062 1044 2062 1045 2063 1044 2063 1046 2063 1067 2064 1046 2064 1037 2064 1032 2065 1037 2065 636 2065 1033 2066 636 2066 1039 2066 1038 2067 1039 2067 635 2067 1040 2068 1038 2068 635 2068 1040 2069 1042 2069 1038 2069 1040 2070 633 2070 1042 2070 1042 2071 633 2071 1041 2071 1285 2072 1041 2072 1286 2072 1285 2073 1042 2073 1041 2073 1285 2074 1284 2074 1042 2074 1042 2075 1284 2075 1038 2075 1069 2076 1043 2076 1036 2076 1036 2077 637 2077 1068 2077 1068 2078 1044 2078 1045 2078 1045 2079 1046 2079 1067 2079 1067 2080 1037 2080 1032 2080 1032 2081 636 2081 1033 2081 1033 2082 1039 2082 1038 2082 633 2083 1047 2083 1041 2083 1041 2084 1047 2084 1048 2084 1286 2085 1048 2085 1287 2085 1286 2086 1041 2086 1048 2086 1047 2087 1051 2087 1048 2087 1048 2088 1051 2088 1050 2088 1287 2089 1050 2089 1049 2089 1287 2090 1048 2090 1050 2090 1051 2091 1052 2091 1050 2091 1050 2092 1052 2092 1054 2092 1049 2093 1054 2093 1055 2093 1049 2094 1050 2094 1054 2094 1052 2095 1053 2095 1054 2095 1054 2096 1053 2096 1056 2096 1055 2097 1056 2097 1058 2097 1055 2098 1054 2098 1056 2098 1053 2099 1057 2099 1056 2099 1056 2100 1057 2100 1059 2100 1058 2101 1059 2101 1288 2101 1058 2102 1056 2102 1059 2102 1057 2103 632 2103 1059 2103 1059 2104 632 2104 1061 2104 1288 2105 1061 2105 1060 2105 1288 2106 1059 2106 1061 2106 632 2107 1062 2107 1061 2107 1061 2108 1062 2108 1064 2108 1060 2109 1064 2109 1289 2109 1060 2110 1061 2110 1064 2110 1062 2111 1065 2111 1064 2111 1064 2112 1065 2112 1063 2112 1289 2113 1063 2113 807 2113 1289 2114 1064 2114 1063 2114 1065 2115 1066 2115 1063 2115 1063 2116 1066 2116 807 2116 1282 2117 1249 2117 1033 2117 1249 2118 1031 2118 1032 2118 1031 2119 1265 2119 1067 2119 1265 2120 1253 2120 1045 2120 1253 2121 1256 2121 1068 2121 1256 2122 1030 2122 1036 2122 1030 2123 1070 2123 1069 2123 1070 2124 1258 2124 1029 2124 1258 2125 1261 2125 1028 2125 1261 2126 1112 2126 1027 2126 1071 2127 1028 2127 1027 2127 634 2128 1071 2128 1027 2128 634 2129 1112 2129 1113 2129 638 2130 1113 2130 1111 2130 1109 2131 1111 2131 1110 2131 1072 2132 1110 2132 1083 2132 621 2133 1083 2133 1108 2133 622 2134 1108 2134 1107 2134 1106 2135 1107 2135 1105 2135 1104 2136 1105 2136 1085 2136 623 2137 1085 2137 1073 2137 624 2138 1073 2138 1074 2138 1076 2139 1074 2139 1082 2139 1075 2140 1082 2140 626 2140 1075 2141 1076 2141 1082 2141 1263 2142 1111 2142 1264 2142 1263 2143 1110 2143 1111 2143 1263 2144 1262 2144 1110 2144 1110 2145 1262 2145 1083 2145 1083 2146 1262 2146 1259 2146 1108 2147 1259 2147 1260 2147 1107 2148 1260 2148 1084 2148 1105 2149 1084 2149 1257 2149 1085 2150 1257 2150 1077 2150 1073 2151 1077 2151 1266 2151 1074 2152 1266 2152 1086 2152 1082 2153 1086 2153 1078 2153 1255 2154 1082 2154 1078 2154 1255 2155 1081 2155 1082 2155 1255 2156 1087 2156 1081 2156 1081 2157 1087 2157 1088 2157 1079 2158 1088 2158 1080 2158 1079 2159 1081 2159 1088 2159 1079 2160 626 2160 1081 2160 1081 2161 626 2161 1082 2161 1083 2162 1259 2162 1108 2162 1108 2163 1260 2163 1107 2163 1107 2164 1084 2164 1105 2164 1105 2165 1257 2165 1085 2165 1085 2166 1077 2166 1073 2166 1073 2167 1266 2167 1074 2167 1074 2168 1086 2168 1082 2168 1087 2169 1254 2169 1088 2169 1088 2170 1254 2170 1091 2170 1080 2171 1091 2171 1090 2171 1080 2172 1088 2172 1091 2172 1254 2173 1092 2173 1091 2173 1091 2174 1092 2174 1089 2174 1090 2175 1089 2175 627 2175 1090 2176 1091 2176 1089 2176 1092 2177 1093 2177 1089 2177 1089 2178 1093 2178 1094 2178 627 2179 1094 2179 629 2179 627 2180 1089 2180 1094 2180 1093 2181 1095 2181 1094 2181 1094 2182 1095 2182 1096 2182 629 2183 1096 2183 628 2183 629 2184 1094 2184 1096 2184 1095 2185 1252 2185 1096 2185 1096 2186 1252 2186 1097 2186 628 2187 1097 2187 630 2187 628 2188 1096 2188 1097 2188 1252 2189 1098 2189 1097 2189 1097 2190 1098 2190 1099 2190 630 2191 1099 2191 1101 2191 630 2192 1097 2192 1099 2192 1098 2193 1251 2193 1099 2193 1099 2194 1251 2194 1100 2194 1101 2195 1100 2195 631 2195 1101 2196 1099 2196 1100 2196 1251 2197 1250 2197 1100 2197 1100 2198 1250 2198 1102 2198 631 2199 1102 2199 1103 2199 631 2200 1100 2200 1102 2200 1250 2201 1269 2201 1102 2201 1102 2202 1269 2202 1103 2202 1076 2203 624 2203 1074 2203 624 2204 623 2204 1073 2204 623 2205 1104 2205 1085 2205 1104 2206 1106 2206 1105 2206 1106 2207 622 2207 1107 2207 622 2208 621 2208 1108 2208 621 2209 1072 2209 1083 2209 1072 2210 1109 2210 1110 2210 1109 2211 638 2211 1111 2211 638 2212 634 2212 1113 2212 1112 2213 1264 2213 1113 2213 1113 2214 1264 2214 1111 2214 1269 2215 1277 2215 1103 2215 1103 2216 1277 2216 639 2216 639 2217 1277 2217 1164 2217 1162 2218 1164 2218 1120 2218 1161 2219 1120 2219 1119 2219 1114 2220 1119 2220 1115 2220 1160 2221 1115 2221 1159 2221 640 2222 1159 2222 1116 2222 1158 2223 1116 2223 1132 2223 641 2224 1132 2224 1157 2224 1156 2225 1157 2225 1123 2225 1155 2226 1123 2226 1117 2226 1154 2227 1117 2227 1135 2227 1118 2228 1135 2228 1130 2228 1118 2229 1154 2229 1135 2229 1281 2230 1120 2230 1163 2230 1281 2231 1119 2231 1120 2231 1281 2232 1121 2232 1119 2232 1119 2233 1121 2233 1115 2233 1115 2234 1121 2234 1280 2234 1159 2235 1280 2235 1131 2235 1116 2236 1131 2236 1279 2236 1132 2237 1279 2237 1122 2237 1157 2238 1122 2238 1278 2238 1123 2239 1278 2239 1133 2239 1117 2240 1133 2240 1134 2240 1135 2241 1134 2241 1124 2241 1125 2242 1135 2242 1124 2242 1125 2243 1129 2243 1135 2243 1125 2244 1126 2244 1129 2244 1129 2245 1126 2245 1127 2245 1128 2246 1127 2246 655 2246 1128 2247 1129 2247 1127 2247 1128 2248 1130 2248 1129 2248 1129 2249 1130 2249 1135 2249 1115 2250 1280 2250 1159 2250 1159 2251 1131 2251 1116 2251 1116 2252 1279 2252 1132 2252 1132 2253 1122 2253 1157 2253 1157 2254 1278 2254 1123 2254 1123 2255 1133 2255 1117 2255 1117 2256 1134 2256 1135 2256 1126 2257 1136 2257 1127 2257 1127 2258 1136 2258 1137 2258 655 2259 1137 2259 1140 2259 655 2260 1127 2260 1137 2260 1136 2261 1138 2261 1137 2261 1137 2262 1138 2262 1139 2262 1140 2263 1139 2263 656 2263 1140 2264 1137 2264 1139 2264 1138 2265 1142 2265 1139 2265 1139 2266 1142 2266 1143 2266 656 2267 1143 2267 1141 2267 656 2268 1139 2268 1143 2268 1142 2269 1297 2269 1143 2269 1143 2270 1297 2270 1144 2270 1141 2271 1144 2271 658 2271 1141 2272 1143 2272 1144 2272 1297 2273 1296 2273 1144 2273 1144 2274 1296 2274 1145 2274 658 2275 1145 2275 1146 2275 658 2276 1144 2276 1145 2276 1296 2277 1147 2277 1145 2277 1145 2278 1147 2278 1149 2278 1146 2279 1149 2279 659 2279 1146 2280 1145 2280 1149 2280 1147 2281 1148 2281 1149 2281 1149 2282 1148 2282 1150 2282 659 2283 1150 2283 1151 2283 659 2284 1149 2284 1150 2284 1148 2285 1299 2285 1150 2285 1150 2286 1299 2286 1152 2286 1151 2287 1152 2287 663 2287 1151 2288 1150 2288 1152 2288 1299 2289 1153 2289 1152 2289 1152 2290 1153 2290 663 2290 1154 2291 1155 2291 1117 2291 1155 2292 1156 2292 1123 2292 1156 2293 641 2293 1157 2293 641 2294 1158 2294 1132 2294 1158 2295 640 2295 1116 2295 640 2296 1160 2296 1159 2296 1160 2297 1114 2297 1115 2297 1114 2298 1161 2298 1119 2298 1161 2299 1162 2299 1120 2299 1162 2300 639 2300 1164 2300 1277 2301 1163 2301 1164 2301 1164 2302 1163 2302 1120 2302 1212 2303 1165 2303 1209 2303 1207 2304 1209 2304 1210 2304 1315 2305 1210 2305 1176 2305 1166 2306 1176 2306 1167 2306 1206 2307 1167 2307 1179 2307 1317 2308 1179 2308 1187 2308 1168 2309 1187 2309 1205 2309 1169 2310 1205 2310 1171 2310 1170 2311 1171 2311 1172 2311 1307 2312 1172 2312 1173 2312 1204 2313 1173 2313 1174 2313 1175 2314 1174 2314 1186 2314 1175 2315 1204 2315 1174 2315 669 2316 1210 2316 1208 2316 669 2317 1176 2317 1210 2317 669 2318 1177 2318 1176 2318 1176 2319 1177 2319 1167 2319 1167 2320 1177 2320 1178 2320 1179 2321 1178 2321 1180 2321 1187 2322 1180 2322 1181 2322 1205 2323 1181 2323 668 2323 1171 2324 668 2324 667 2324 1172 2325 667 2325 1188 2325 1173 2326 1188 2326 1182 2326 1174 2327 1182 2327 1183 2327 666 2328 1174 2328 1183 2328 666 2329 1184 2329 1174 2329 666 2330 665 2330 1184 2330 1184 2331 665 2331 1185 2331 1305 2332 1185 2332 1310 2332 1305 2333 1184 2333 1185 2333 1305 2334 1186 2334 1184 2334 1184 2335 1186 2335 1174 2335 1167 2336 1178 2336 1179 2336 1179 2337 1180 2337 1187 2337 1187 2338 1181 2338 1205 2338 1205 2339 668 2339 1171 2339 1171 2340 667 2340 1172 2340 1172 2341 1188 2341 1173 2341 1173 2342 1182 2342 1174 2342 665 2343 664 2343 1185 2343 1185 2344 664 2344 1190 2344 1310 2345 1190 2345 1309 2345 1310 2346 1185 2346 1190 2346 664 2347 1189 2347 1190 2347 1190 2348 1189 2348 1191 2348 1309 2349 1191 2349 1193 2349 1309 2350 1190 2350 1191 2350 1189 2351 1192 2351 1191 2351 1191 2352 1192 2352 1194 2352 1193 2353 1194 2353 1195 2353 1193 2354 1191 2354 1194 2354 1192 2355 662 2355 1194 2355 1194 2356 662 2356 1196 2356 1195 2357 1196 2357 1308 2357 1195 2358 1194 2358 1196 2358 662 2359 1198 2359 1196 2359 1196 2360 1198 2360 1197 2360 1308 2361 1197 2361 1303 2361 1308 2362 1196 2362 1197 2362 1198 2363 661 2363 1197 2363 1197 2364 661 2364 1199 2364 1303 2365 1199 2365 1200 2365 1303 2366 1197 2366 1199 2366 661 2367 660 2367 1199 2367 1199 2368 660 2368 1202 2368 1200 2369 1202 2369 1301 2369 1200 2370 1199 2370 1202 2370 660 2371 1201 2371 1202 2371 1202 2372 1201 2372 1203 2372 1301 2373 1203 2373 1153 2373 1301 2374 1202 2374 1203 2374 1201 2375 663 2375 1203 2375 1203 2376 663 2376 1153 2376 1204 2377 1307 2377 1173 2377 1307 2378 1170 2378 1172 2378 1170 2379 1169 2379 1171 2379 1169 2380 1168 2380 1205 2380 1168 2381 1317 2381 1187 2381 1317 2382 1206 2382 1179 2382 1206 2383 1166 2383 1167 2383 1166 2384 1315 2384 1176 2384 1315 2385 1207 2385 1210 2385 1207 2386 1212 2386 1209 2386 1165 2387 1208 2387 1209 2387 1209 2388 1208 2388 1210 2388 657 2389 1165 2389 3920 2389 3920 2390 1165 2390 1212 2390 1211 2391 1212 2391 1393 2391 1401 2392 1211 2392 1393 2392 1401 2393 6486 2393 1211 2393 1212 2394 1316 2394 1393 2394 1211 2395 3920 2395 1212 2395 1247 2396 1327 2396 1213 2396 1246 2397 1213 2397 1248 2397 1214 2398 1248 2398 1223 2398 1381 2399 1223 2399 1245 2399 1244 2400 1245 2400 1224 2400 1225 2401 1224 2401 1226 2401 1382 2402 1226 2402 1243 2402 1383 2403 1243 2403 1227 2403 1215 2404 1227 2404 1294 2404 1242 2405 1294 2405 1228 2405 1241 2406 1228 2406 1216 2406 1229 2407 1216 2407 1293 2407 1230 2408 1293 2408 1240 2408 1232 2409 1240 2409 1231 2409 1233 2410 1231 2410 1292 2410 1217 2411 1292 2411 1218 2411 1235 2412 1218 2412 1291 2412 1219 2413 1291 2413 1234 2413 1239 2414 1234 2414 1220 2414 1238 2415 1220 2415 1236 2415 1237 2416 1236 2416 1290 2416 1221 2417 1290 2417 1222 2417 806 2418 1222 2418 1268 2418 806 2419 1221 2419 1222 2419 1246 2420 1248 2420 1214 2420 1381 2421 1214 2421 1223 2421 1381 2422 1246 2422 1214 2422 1223 2423 1224 2423 1245 2423 1244 2424 1224 2424 1225 2424 1382 2425 1225 2425 1226 2425 1382 2426 1244 2426 1225 2426 1226 2427 1227 2427 1243 2427 1383 2428 1227 2428 1215 2428 1242 2429 1215 2429 1294 2429 1242 2430 1383 2430 1215 2430 1294 2431 1216 2431 1228 2431 1241 2432 1216 2432 1229 2432 1230 2433 1229 2433 1293 2433 1230 2434 1241 2434 1229 2434 1293 2435 1231 2435 1240 2435 1232 2436 1231 2436 1233 2436 1217 2437 1233 2437 1292 2437 1217 2438 1232 2438 1233 2438 1292 2439 1291 2439 1218 2439 1235 2440 1291 2440 1219 2440 1239 2441 1219 2441 1234 2441 1239 2442 1235 2442 1219 2442 1234 2443 1236 2443 1220 2443 1238 2444 1236 2444 1237 2444 1221 2445 1237 2445 1290 2445 1221 2446 1238 2446 1237 2446 1290 2447 1268 2447 1222 2447 1238 2448 1239 2448 1220 2448 1235 2449 1217 2449 1218 2449 1232 2450 1230 2450 1240 2450 1241 2451 1242 2451 1228 2451 1383 2452 1382 2452 1243 2452 1244 2453 1381 2453 1245 2453 1246 2454 1247 2454 1213 2454 1327 2455 1248 2455 1213 2455 1250 2456 1249 2456 1269 2456 1250 2457 1031 2457 1249 2457 1250 2458 1251 2458 1031 2458 1031 2459 1251 2459 1098 2459 1265 2460 1098 2460 1252 2460 1095 2461 1265 2461 1252 2461 1095 2462 1093 2462 1265 2462 1265 2463 1093 2463 1253 2463 1253 2464 1093 2464 1092 2464 1254 2465 1253 2465 1092 2465 1254 2466 1256 2466 1253 2466 1254 2467 1087 2467 1256 2467 1256 2468 1087 2468 1255 2468 1078 2469 1256 2469 1255 2469 1078 2470 1030 2470 1256 2470 1078 2471 1086 2471 1030 2471 1030 2472 1086 2472 1266 2472 1070 2473 1266 2473 1077 2473 1257 2474 1070 2474 1077 2474 1257 2475 1258 2475 1070 2475 1257 2476 1084 2476 1258 2476 1258 2477 1084 2477 1260 2477 1259 2478 1258 2478 1260 2478 1259 2479 1261 2479 1258 2479 1259 2480 1262 2480 1261 2480 1261 2481 1262 2481 1263 2481 1264 2482 1261 2482 1263 2482 1264 2483 1112 2483 1261 2483 1031 2484 1098 2484 1265 2484 1030 2485 1266 2485 1070 2485 1269 2486 1249 2486 1268 2486 1326 2487 1268 2487 1267 2487 1326 2488 1269 2488 1268 2488 1326 2489 1325 2489 1269 2489 1269 2490 1325 2490 1324 2490 1270 2491 1269 2491 1324 2491 1270 2492 1271 2492 1269 2492 1269 2493 1271 2493 1335 2493 1272 2494 1269 2494 1335 2494 1272 2495 1273 2495 1269 2495 1269 2496 1273 2496 1346 2496 1274 2497 1269 2497 1346 2497 1274 2498 1347 2498 1269 2498 1269 2499 1347 2499 1275 2499 1276 2500 1269 2500 1275 2500 1276 2501 1341 2501 1269 2501 1269 2502 1341 2502 1277 2502 1277 2503 1341 2503 1124 2503 1134 2504 1277 2504 1124 2504 1134 2505 1133 2505 1277 2505 1277 2506 1133 2506 1278 2506 1122 2507 1277 2507 1278 2507 1122 2508 1279 2508 1277 2508 1277 2509 1279 2509 1131 2509 1280 2510 1277 2510 1131 2510 1280 2511 1121 2511 1277 2511 1277 2512 1121 2512 1281 2512 1163 2513 1277 2513 1281 2513 1249 2514 1282 2514 1268 2514 1268 2515 1282 2515 1283 2515 1284 2516 1268 2516 1283 2516 1284 2517 1285 2517 1268 2517 1268 2518 1285 2518 1286 2518 1287 2519 1268 2519 1286 2519 1287 2520 1049 2520 1268 2520 1268 2521 1049 2521 1055 2521 1058 2522 1268 2522 1055 2522 1058 2523 1288 2523 1268 2523 1268 2524 1288 2524 1060 2524 1289 2525 1268 2525 1060 2525 1289 2526 807 2526 1268 2526 1290 2527 1236 2527 1268 2527 1268 2528 1236 2528 1234 2528 1291 2529 1268 2529 1234 2529 1291 2530 1292 2530 1268 2530 1268 2531 1292 2531 1231 2531 1293 2532 1268 2532 1231 2532 1293 2533 1216 2533 1268 2533 1268 2534 1216 2534 1294 2534 1227 2535 1268 2535 1294 2535 1227 2536 1226 2536 1268 2536 1268 2537 1226 2537 1224 2537 1223 2538 1268 2538 1224 2538 1223 2539 1248 2539 1268 2539 1268 2540 1248 2540 1327 2540 1267 2541 1268 2541 1327 2541 1341 2542 1295 2542 1124 2542 1124 2543 1295 2543 1125 2543 1125 2544 1295 2544 1126 2544 1126 2545 1295 2545 1136 2545 1136 2546 1295 2546 1138 2546 1138 2547 1295 2547 1142 2547 1142 2548 1295 2548 1429 2548 1297 2549 1429 2549 1296 2549 1297 2550 1142 2550 1429 2550 1429 2551 1436 2551 1296 2551 1296 2552 1436 2552 1147 2552 1147 2553 1436 2553 1148 2553 1148 2554 1436 2554 1434 2554 1298 2555 1148 2555 1434 2555 1298 2556 1299 2556 1148 2556 1298 2557 1300 2557 1299 2557 1299 2558 1300 2558 1153 2558 1153 2559 1300 2559 1427 2559 1426 2560 1153 2560 1427 2560 1426 2561 1301 2561 1153 2561 1426 2562 1422 2562 1301 2562 1301 2563 1422 2563 1200 2563 1200 2564 1422 2564 1302 2564 1418 2565 1200 2565 1302 2565 1418 2566 1303 2566 1200 2566 1418 2567 1417 2567 1303 2567 1303 2568 1417 2568 1308 2568 1308 2569 1417 2569 1413 2569 1195 2570 1413 2570 1407 2570 1406 2571 1195 2571 1407 2571 1406 2572 1193 2572 1195 2572 1406 2573 1379 2573 1193 2573 1193 2574 1379 2574 1309 2574 1309 2575 1379 2575 1304 2575 1310 2576 1304 2576 1350 2576 1305 2577 1350 2577 1351 2577 1186 2578 1351 2578 1311 2578 1175 2579 1311 2579 1353 2579 1354 2580 1175 2580 1353 2580 1354 2581 1204 2581 1175 2581 1354 2582 1306 2582 1204 2582 1204 2583 1306 2583 1307 2583 1307 2584 1306 2584 1356 2584 1377 2585 1307 2585 1356 2585 1377 2586 1170 2586 1307 2586 1377 2587 1376 2587 1170 2587 1170 2588 1376 2588 1316 2588 1169 2589 1316 2589 1168 2589 1169 2590 1170 2590 1316 2590 1308 2591 1413 2591 1195 2591 1309 2592 1304 2592 1310 2592 1310 2593 1350 2593 1305 2593 1305 2594 1351 2594 1186 2594 1186 2595 1311 2595 1175 2595 1376 2596 1312 2596 1316 2596 1316 2597 1312 2597 1313 2597 1314 2598 1316 2598 1313 2598 1314 2599 1375 2599 1316 2599 1212 2600 1207 2600 1316 2600 1316 2601 1207 2601 1315 2601 1166 2602 1316 2602 1315 2602 1166 2603 1206 2603 1316 2603 1316 2604 1206 2604 1317 2604 1168 2605 1316 2605 1317 2605 1267 2606 1327 2606 1318 2606 1320 2607 1318 2607 1319 2607 1321 2608 1320 2608 1319 2608 1321 2609 1322 2609 1320 2609 1321 2610 1323 2610 1322 2610 1322 2611 1323 2611 1328 2611 1325 2612 1328 2612 1324 2612 1325 2613 1322 2613 1328 2613 1325 2614 1326 2614 1322 2614 1322 2615 1326 2615 1320 2615 1320 2616 1326 2616 1267 2616 1318 2617 1320 2617 1267 2617 1327 2618 1247 2618 1318 2618 1318 2619 1247 2619 1319 2619 1323 2620 1405 2620 1328 2620 1328 2621 1405 2621 1329 2621 1324 2622 1329 2622 1270 2622 1324 2623 1328 2623 1329 2623 1405 2624 1404 2624 1329 2624 1329 2625 1404 2625 1330 2625 1270 2626 1330 2626 1271 2626 1270 2627 1329 2627 1330 2627 1404 2628 1331 2628 1330 2628 1330 2629 1331 2629 1333 2629 1271 2630 1333 2630 1335 2630 1271 2631 1330 2631 1333 2631 1331 2632 1332 2632 1333 2632 1333 2633 1332 2633 1334 2633 1335 2634 1334 2634 1272 2634 1335 2635 1333 2635 1334 2635 1332 2636 1336 2636 1334 2636 1334 2637 1336 2637 1345 2637 1272 2638 1345 2638 1273 2638 1272 2639 1334 2639 1345 2639 1336 2640 1385 2640 1345 2640 1345 2641 1385 2641 1344 2641 1337 2642 1344 2642 1386 2642 1338 2643 1386 2643 1339 2643 1348 2644 1339 2644 1340 2644 1343 2645 1340 2645 1387 2645 1342 2646 1387 2646 1388 2646 1341 2647 1342 2647 1388 2647 1341 2648 1276 2648 1342 2648 1342 2649 1276 2649 1343 2649 1387 2650 1342 2650 1343 2650 1345 2651 1344 2651 1337 2651 1346 2652 1337 2652 1274 2652 1346 2653 1345 2653 1337 2653 1346 2654 1273 2654 1345 2654 1337 2655 1386 2655 1338 2655 1274 2656 1338 2656 1347 2656 1274 2657 1337 2657 1338 2657 1338 2658 1339 2658 1348 2658 1347 2659 1348 2659 1275 2659 1347 2660 1338 2660 1348 2660 1348 2661 1340 2661 1343 2661 1275 2662 1343 2662 1276 2662 1275 2663 1348 2663 1343 2663 1379 2664 1349 2664 1380 2664 1304 2665 1380 2665 1378 2665 1350 2666 1378 2666 1363 2666 1351 2667 1363 2667 1352 2667 1311 2668 1352 2668 1372 2668 1353 2669 1372 2669 1364 2669 1354 2670 1364 2670 1366 2670 1306 2671 1366 2671 1355 2671 1356 2672 1355 2672 1367 2672 1377 2673 1367 2673 1368 2673 1376 2674 1368 2674 1357 2674 1312 2675 1357 2675 1374 2675 1313 2676 1374 2676 1358 2676 1314 2677 1358 2677 1359 2677 1375 2678 1359 2678 1360 2678 1316 2679 1360 2679 1393 2679 1316 2680 1375 2680 1360 2680 1361 2681 1378 2681 1362 2681 1361 2682 1363 2682 1378 2682 1361 2683 1400 2683 1363 2683 1363 2684 1400 2684 1352 2684 1352 2685 1400 2685 1371 2685 1372 2686 1371 2686 1365 2686 1364 2687 1365 2687 1373 2687 1366 2688 1373 2688 1399 2688 1355 2689 1399 2689 1394 2689 1367 2690 1394 2690 1395 2690 1368 2691 1395 2691 1369 2691 1357 2692 1369 2692 1396 2692 1374 2693 1396 2693 1370 2693 1358 2694 1370 2694 1398 2694 1359 2695 1398 2695 1397 2695 1360 2696 1397 2696 1393 2696 1360 2697 1359 2697 1397 2697 1352 2698 1371 2698 1372 2698 1372 2699 1365 2699 1364 2699 1364 2700 1373 2700 1366 2700 1366 2701 1399 2701 1355 2701 1355 2702 1394 2702 1367 2702 1367 2703 1395 2703 1368 2703 1368 2704 1369 2704 1357 2704 1357 2705 1396 2705 1374 2705 1374 2706 1370 2706 1358 2706 1358 2707 1398 2707 1359 2707 1375 2708 1314 2708 1359 2708 1314 2709 1313 2709 1358 2709 1313 2710 1312 2710 1374 2710 1312 2711 1376 2711 1357 2711 1376 2712 1377 2712 1368 2712 1377 2713 1356 2713 1367 2713 1356 2714 1306 2714 1355 2714 1306 2715 1354 2715 1366 2715 1354 2716 1353 2716 1364 2716 1353 2717 1311 2717 1372 2717 1311 2718 1351 2718 1352 2718 1351 2719 1350 2719 1363 2719 1350 2720 1304 2720 1378 2720 1304 2721 1379 2721 1380 2721 1362 2722 1378 2722 1380 2722 1349 2723 1362 2723 1380 2723 1319 2724 1247 2724 6548 2724 1321 2725 6548 2725 1323 2725 1321 2726 1319 2726 6548 2726 1247 2727 1246 2727 6548 2727 6548 2728 1246 2728 1381 2728 1244 2729 6548 2729 1381 2729 1244 2730 1382 2730 6548 2730 6548 2731 1382 2731 1383 2731 1242 2732 6548 2732 1383 2732 1242 2733 1241 2733 6548 2733 6548 2734 1241 2734 1230 2734 1232 2735 6548 2735 1230 2735 1232 2736 1217 2736 6548 2736 6548 2737 1217 2737 806 2737 806 2738 1217 2738 1235 2738 1239 2739 806 2739 1235 2739 1239 2740 1238 2740 806 2740 806 2741 1238 2741 1221 2741 1384 2742 1404 2742 6548 2742 1384 2743 1331 2743 1404 2743 1384 2744 1332 2744 1331 2744 1384 2745 1336 2745 1332 2745 1384 2746 1385 2746 1336 2746 1384 2747 1344 2747 1385 2747 1384 2748 1386 2748 1344 2748 1384 2749 1339 2749 1386 2749 1384 2750 1340 2750 1339 2750 1384 2751 1387 2751 1340 2751 1384 2752 1388 2752 1387 2752 1384 2753 1440 2753 1388 2753 1384 2754 1391 2754 1440 2754 1440 2755 1391 2755 1437 2755 1437 2756 1391 2756 1389 2756 1389 2757 1391 2757 1390 2757 1390 2758 1391 2758 1433 2758 1433 2759 1391 2759 1432 2759 1432 2760 1391 2760 1392 2760 1392 2761 1391 2761 1425 2761 1425 2762 1391 2762 1423 2762 1423 2763 1391 2763 1421 2763 1421 2764 1391 2764 1416 2764 1416 2765 1391 2765 1401 2765 1410 2766 1401 2766 1411 2766 1410 2767 1416 2767 1401 2767 1393 2768 1399 2768 1401 2768 1393 2769 1394 2769 1399 2769 1393 2770 1395 2770 1394 2770 1393 2771 1369 2771 1395 2771 1393 2772 1396 2772 1369 2772 1393 2773 1370 2773 1396 2773 1393 2774 1398 2774 1370 2774 1393 2775 1397 2775 1398 2775 1399 2776 1373 2776 1401 2776 1401 2777 1373 2777 1365 2777 1371 2778 1401 2778 1365 2778 1371 2779 1400 2779 1401 2779 1401 2780 1400 2780 1361 2780 1362 2781 1401 2781 1361 2781 1362 2782 1349 2782 1401 2782 1401 2783 1349 2783 1403 2783 1402 2784 1401 2784 1403 2784 1402 2785 1411 2785 1401 2785 1404 2786 1405 2786 6548 2786 6548 2787 1405 2787 1323 2787 1403 2788 1349 2788 1412 2788 1409 2789 1412 2789 1406 2789 1407 2790 1409 2790 1406 2790 1407 2791 1408 2791 1409 2791 1407 2792 1413 2792 1408 2792 1408 2793 1413 2793 1414 2793 1411 2794 1414 2794 1410 2794 1411 2795 1408 2795 1414 2795 1411 2796 1402 2796 1408 2796 1408 2797 1402 2797 1409 2797 1409 2798 1402 2798 1403 2798 1412 2799 1409 2799 1403 2799 1349 2800 1379 2800 1412 2800 1412 2801 1379 2801 1406 2801 1413 2802 1417 2802 1414 2802 1414 2803 1417 2803 1415 2803 1410 2804 1415 2804 1416 2804 1410 2805 1414 2805 1415 2805 1417 2806 1418 2806 1415 2806 1415 2807 1418 2807 1419 2807 1416 2808 1419 2808 1421 2808 1416 2809 1415 2809 1419 2809 1418 2810 1302 2810 1419 2810 1419 2811 1302 2811 1420 2811 1421 2812 1420 2812 1423 2812 1421 2813 1419 2813 1420 2813 1302 2814 1422 2814 1420 2814 1420 2815 1422 2815 1424 2815 1423 2816 1424 2816 1425 2816 1423 2817 1420 2817 1424 2817 1422 2818 1426 2818 1424 2818 1424 2819 1426 2819 1431 2819 1425 2820 1431 2820 1392 2820 1425 2821 1424 2821 1431 2821 1426 2822 1427 2822 1431 2822 1431 2823 1427 2823 1300 2823 1428 2824 1300 2824 1298 2824 1435 2825 1298 2825 1434 2825 1438 2826 1434 2826 1436 2826 1439 2827 1436 2827 1429 2827 1430 2828 1429 2828 1295 2828 1440 2829 1430 2829 1295 2829 1440 2830 1437 2830 1430 2830 1430 2831 1437 2831 1439 2831 1429 2832 1430 2832 1439 2832 1431 2833 1300 2833 1428 2833 1432 2834 1428 2834 1433 2834 1432 2835 1431 2835 1428 2835 1432 2836 1392 2836 1431 2836 1428 2837 1298 2837 1435 2837 1433 2838 1435 2838 1390 2838 1433 2839 1428 2839 1435 2839 1435 2840 1434 2840 1438 2840 1390 2841 1438 2841 1389 2841 1390 2842 1435 2842 1438 2842 1438 2843 1436 2843 1439 2843 1389 2844 1439 2844 1437 2844 1389 2845 1438 2845 1439 2845 1388 2846 1440 2846 1341 2846 1341 2847 1440 2847 1295 2847 2419 2848 1664 2848 2342 2848 2419 2849 1441 2849 1664 2849 2419 2850 1643 2850 1441 2850 2419 2851 1640 2851 1643 2851 2419 2852 1453 2852 1640 2852 2419 2853 1442 2853 1453 2853 2419 2854 1454 2854 1442 2854 1442 2855 1454 2855 1668 2855 1452 2856 1668 2856 1667 2856 1451 2857 1667 2857 1672 2857 1673 2858 1672 2858 1443 2858 1678 2859 1443 2859 1682 2859 1449 2860 1682 2860 1696 2860 1448 2861 1696 2861 1456 2861 1695 2862 1456 2862 1445 2862 1444 2863 1445 2863 1707 2863 1446 2864 1707 2864 1447 2864 1885 2865 1447 2865 1887 2865 1885 2866 1446 2866 1447 2866 1885 2867 1883 2867 1446 2867 1446 2868 1883 2868 1636 2868 1444 2869 1636 2869 1704 2869 1695 2870 1704 2870 1697 2870 1448 2871 1697 2871 1689 2871 1449 2872 1689 2872 1450 2872 1678 2873 1450 2873 1677 2873 1673 2874 1677 2874 1671 2874 1451 2875 1671 2875 1669 2875 1452 2876 1669 2876 1453 2876 1442 2877 1452 2877 1453 2877 1442 2878 1668 2878 1452 2878 1454 2879 2336 2879 1668 2879 1668 2880 2336 2880 1666 2880 1667 2881 1666 2881 1674 2881 1672 2882 1674 2882 1461 2882 1443 2883 1461 2883 1455 2883 1682 2884 1455 2884 1690 2884 1696 2885 1690 2885 1702 2885 1456 2886 1702 2886 1703 2886 1445 2887 1703 2887 1464 2887 1707 2888 1464 2888 1712 2888 1447 2889 1712 2889 1469 2889 1888 2890 1469 2890 1457 2890 1888 2891 1447 2891 1469 2891 1888 2892 1887 2892 1447 2892 2336 2893 1458 2893 1666 2893 1666 2894 1458 2894 1459 2894 1674 2895 1459 2895 1460 2895 1461 2896 1460 2896 1688 2896 1455 2897 1688 2897 1470 2897 1690 2898 1470 2898 1462 2898 1702 2899 1462 2899 1701 2899 1703 2900 1701 2900 1463 2900 1464 2901 1463 2901 1716 2901 1712 2902 1716 2902 1465 2902 1469 2903 1465 2903 1466 2903 1468 2904 1466 2904 1467 2904 1468 2905 1469 2905 1466 2905 1468 2906 1457 2906 1469 2906 1458 2907 2244 2907 1459 2907 1459 2908 2244 2908 1681 2908 1460 2909 1681 2909 1474 2909 1688 2910 1474 2910 1687 2910 1470 2911 1687 2911 1694 2911 1462 2912 1694 2912 1700 2912 1701 2913 1700 2913 1477 2913 1463 2914 1477 2914 1478 2914 1716 2915 1478 2915 1471 2915 1465 2916 1471 2916 1472 2916 1466 2917 1472 2917 1482 2917 1473 2918 1482 2918 1842 2918 1473 2919 1466 2919 1482 2919 1473 2920 1467 2920 1466 2920 2244 2921 2252 2921 1681 2921 1681 2922 2252 2922 1475 2922 1474 2923 1475 2923 1693 2923 1687 2924 1693 2924 1692 2924 1694 2925 1692 2925 1484 2925 1700 2926 1484 2926 1476 2926 1477 2927 1476 2927 1711 2927 1478 2928 1711 2928 1723 2928 1471 2929 1723 2929 1479 2929 1472 2930 1479 2930 1480 2930 1482 2931 1480 2931 1734 2931 1481 2932 1734 2932 1490 2932 1481 2933 1482 2933 1734 2933 1481 2934 1842 2934 1482 2934 2252 2935 1491 2935 1475 2935 1475 2936 1491 2936 1686 2936 1693 2937 1686 2937 1483 2937 1692 2938 1483 2938 1699 2938 1484 2939 1699 2939 1485 2939 1476 2940 1485 2940 1715 2940 1711 2941 1715 2941 1486 2941 1723 2942 1486 2942 1520 2942 1479 2943 1520 2943 1725 2943 1480 2944 1725 2944 1487 2944 1734 2945 1487 2945 1488 2945 1834 2946 1488 2946 1489 2946 1834 2947 1734 2947 1488 2947 1834 2948 1490 2948 1734 2948 1491 2949 2257 2949 1686 2949 1686 2950 2257 2950 2261 2950 1691 2951 2261 2951 2275 2951 1705 2952 2275 2952 1531 2952 1709 2953 1531 2953 1492 2953 1708 2954 1492 2954 2284 2954 1493 2955 1708 2955 2284 2955 1493 2956 1546 2956 1708 2956 1493 2957 2283 2957 1546 2957 1546 2958 2283 2958 1494 2958 1495 2959 1494 2959 2286 2959 2285 2960 1495 2960 2286 2960 2285 2961 1496 2961 1495 2961 2285 2962 2335 2962 1496 2962 1496 2963 2335 2963 1497 2963 1498 2964 1497 2964 1499 2964 2332 2965 1498 2965 1499 2965 2332 2966 1735 2966 1498 2966 2332 2967 2331 2967 1735 2967 1735 2968 2331 2968 1500 2968 1502 2969 1500 2969 1501 2969 1503 2970 1502 2970 1501 2970 1503 2971 1749 2971 1502 2971 1503 2972 2330 2972 1749 2972 1749 2973 2330 2973 1504 2973 1505 2974 1504 2974 2292 2974 2293 2975 1505 2975 2292 2975 2293 2976 1506 2976 1505 2976 2293 2977 2298 2977 1506 2977 1506 2978 2298 2978 1507 2978 1508 2979 1506 2979 1507 2979 1508 2980 1759 2980 1506 2980 1508 2981 2303 2981 1759 2981 1759 2982 2303 2982 2305 2982 1615 2983 2305 2983 1518 2983 1519 2984 1518 2984 2311 2984 1572 2985 2311 2985 1509 2985 2312 2986 1572 2986 1509 2986 2312 2987 1519 2987 1572 2987 2312 2988 1511 2988 1519 2988 2312 2989 1510 2989 1511 2989 2312 2990 2319 2990 1510 2990 1510 2991 2319 2991 1574 2991 1511 2992 1574 2992 1573 2992 1769 2993 1573 2993 1512 2993 1513 2994 1512 2994 1783 2994 1514 2995 1783 2995 1515 2995 1785 2996 1515 2996 1788 2996 1790 2997 1788 2997 1516 2997 1597 2998 1516 2998 1598 2998 1603 2999 1598 2999 1791 2999 1602 3000 1791 3000 1517 3000 1603 3001 1517 3001 1599 3001 1600 3002 1599 3002 1619 3002 1601 3003 1619 3003 1786 3003 1784 3004 1786 3004 1618 3004 1777 3005 1618 3005 1617 3005 1773 3006 1617 3006 1771 3006 1768 3007 1771 3007 1616 3007 1519 3008 1616 3008 1615 3008 1518 3009 1519 3009 1615 3009 1686 3010 2261 3010 1691 3010 1483 3011 1691 3011 1523 3011 1699 3012 1523 3012 1706 3012 1485 3013 1706 3013 1714 3013 1715 3014 1714 3014 1722 3014 1486 3015 1722 3015 1526 3015 1520 3016 1526 3016 1521 3016 1725 3017 1521 3017 1733 3017 1487 3018 1733 3018 1522 3018 1488 3019 1522 3019 1529 3019 1890 3020 1529 3020 1530 3020 1890 3021 1488 3021 1529 3021 1890 3022 1489 3022 1488 3022 1691 3023 2275 3023 1705 3023 1523 3024 1705 3024 1524 3024 1706 3025 1524 3025 1710 3025 1714 3026 1710 3026 1713 3026 1722 3027 1713 3027 1525 3027 1526 3028 1525 3028 1732 3028 1521 3029 1732 3029 1527 3029 1733 3030 1527 3030 1534 3030 1522 3031 1534 3031 1528 3031 1529 3032 1528 3032 1535 3032 1831 3033 1535 3033 1536 3033 1831 3034 1529 3034 1535 3034 1831 3035 1530 3035 1529 3035 1705 3036 1531 3036 1709 3036 1524 3037 1709 3037 1537 3037 1710 3038 1537 3038 1538 3038 1713 3039 1538 3039 1721 3039 1525 3040 1721 3040 1724 3040 1732 3041 1724 3041 1532 3041 1527 3042 1532 3042 1533 3042 1534 3043 1533 3043 1746 3043 1528 3044 1746 3044 1543 3044 1535 3045 1543 3045 1545 3045 1826 3046 1545 3046 1823 3046 1826 3047 1535 3047 1545 3047 1826 3048 1536 3048 1535 3048 1709 3049 1492 3049 1708 3049 1537 3050 1708 3050 1717 3050 1538 3051 1717 3051 1719 3051 1721 3052 1719 3052 1539 3052 1724 3053 1539 3053 1540 3053 1532 3054 1540 3054 1720 3054 1533 3055 1720 3055 1541 3055 1746 3056 1541 3056 1542 3056 1543 3057 1542 3057 1632 3057 1545 3058 1632 3058 1631 3058 1544 3059 1631 3059 1821 3059 1544 3060 1545 3060 1631 3060 1544 3061 1823 3061 1545 3061 1546 3062 1494 3062 1495 3062 1718 3063 1495 3063 1726 3063 1547 3064 1726 3064 1728 3064 1548 3065 1728 3065 1549 3065 1742 3066 1549 3066 1550 3066 1743 3067 1550 3067 1745 3067 1634 3068 1745 3068 1753 3068 1633 3069 1753 3069 1551 3069 1553 3070 1551 3070 1731 3070 1552 3071 1731 3071 1819 3071 1892 3072 1552 3072 1819 3072 1892 3073 1891 3073 1552 3073 1552 3074 1891 3074 1553 3074 1731 3075 1552 3075 1553 3075 1496 3076 1497 3076 1498 3076 1727 3077 1498 3077 1741 3077 1554 3078 1741 3078 1740 3078 1729 3079 1740 3079 1751 3079 1555 3080 1751 3080 1556 3080 1752 3081 1556 3081 1736 3081 1557 3082 1736 3082 1558 3082 1730 3083 1558 3083 1761 3083 1763 3084 1761 3084 1766 3084 1813 3085 1766 3085 1738 3085 1559 3086 1738 3086 1737 3086 1559 3087 1813 3087 1738 3087 1735 3088 1500 3088 1502 3088 1739 3089 1502 3089 1560 3089 1744 3090 1560 3090 1747 3090 1755 3091 1747 3091 1756 3091 1754 3092 1756 3092 1758 3092 1561 3093 1758 3093 1760 3093 1762 3094 1760 3094 1562 3094 1767 3095 1562 3095 1563 3095 1626 3096 1563 3096 1776 3096 1627 3097 1776 3097 1565 3097 1564 3098 1565 3098 1620 3098 1564 3099 1627 3099 1565 3099 1749 3100 1504 3100 1505 3100 1750 3101 1505 3101 1612 3101 1566 3102 1612 3102 1567 3102 1757 3103 1567 3103 1568 3103 1748 3104 1568 3104 1611 3104 1765 3105 1611 3105 1774 3105 1772 3106 1774 3106 1775 3106 1778 3107 1775 3107 1569 3107 1624 3108 1569 3108 1621 3108 1622 3109 1621 3109 1571 3109 1570 3110 1571 3110 1606 3110 1570 3111 1622 3111 1571 3111 2305 3112 2308 3112 1518 3112 1518 3113 2308 3113 2311 3113 1519 3114 2311 3114 1572 3114 1573 3115 1574 3115 1576 3115 1577 3116 1576 3116 1575 3116 1577 3117 1573 3117 1576 3117 1577 3118 1780 3118 1573 3118 1577 3119 1779 3119 1780 3119 1577 3120 2316 3120 1779 3120 1779 3121 2316 3121 2318 3121 1780 3122 2318 3122 1581 3122 1782 3123 1581 3123 1787 3123 1781 3124 1787 3124 1578 3124 1580 3125 1578 3125 1587 3125 1579 3126 1587 3126 1598 3126 1516 3127 1579 3127 1598 3127 1516 3128 1788 3128 1579 3128 1579 3129 1788 3129 1580 3129 1587 3130 1579 3130 1580 3130 1574 3131 1575 3131 1576 3131 1581 3132 2318 3132 1589 3132 1582 3133 1589 3133 2321 3133 1582 3134 1581 3134 1589 3134 1582 3135 1585 3135 1581 3135 1582 3136 1583 3136 1585 3136 1585 3137 1583 3137 1586 3137 1584 3138 1586 3138 1583 3138 1584 3139 1585 3139 1586 3139 1584 3140 2324 3140 1585 3140 1585 3141 2324 3141 1592 3141 1588 3142 1592 3142 1594 3142 1789 3143 1594 3143 2326 3143 1598 3144 2326 3144 2327 3144 1598 3145 1789 3145 2326 3145 1598 3146 1587 3146 1789 3146 1789 3147 1587 3147 1578 3147 1588 3148 1578 3148 1787 3148 1585 3149 1787 3149 1581 3149 1585 3150 1588 3150 1787 3150 1585 3151 1592 3151 1588 3151 2318 3152 2321 3152 1589 3152 1592 3153 2324 3153 1590 3153 1591 3154 1590 3154 2324 3154 1591 3155 1592 3155 1590 3155 1591 3156 1595 3156 1592 3156 1592 3157 1595 3157 1593 3157 1594 3158 1593 3158 2326 3158 1594 3159 1592 3159 1593 3159 1593 3160 1595 3160 1596 3160 2326 3161 1596 3161 1595 3161 2326 3162 1593 3162 1596 3162 1597 3163 1598 3163 1603 3163 1600 3164 1603 3164 1599 3164 1600 3165 1597 3165 1603 3165 1600 3166 1790 3166 1597 3166 1600 3167 1601 3167 1790 3167 1600 3168 1619 3168 1601 3168 1603 3169 1791 3169 1602 3169 1517 3170 1603 3170 1602 3170 1517 3171 1793 3171 1599 3171 1599 3172 1793 3172 1604 3172 1796 3173 1604 3173 1793 3173 1796 3174 1599 3174 1604 3174 1796 3175 1607 3175 1599 3175 1796 3176 1798 3176 1607 3176 1607 3177 1798 3177 1605 3177 1799 3178 1605 3178 1798 3178 1799 3179 1607 3179 1605 3179 1799 3180 1606 3180 1607 3180 1607 3181 1606 3181 1621 3181 1608 3182 1621 3182 1569 3182 1609 3183 1569 3183 1775 3183 1610 3184 1775 3184 1774 3184 1770 3185 1774 3185 1611 3185 1764 3186 1611 3186 1568 3186 1614 3187 1568 3187 1567 3187 1613 3188 1567 3188 1612 3188 1506 3189 1612 3189 1505 3189 1506 3190 1613 3190 1612 3190 1506 3191 1759 3191 1613 3191 1613 3192 1759 3192 1615 3192 1614 3193 1615 3193 1616 3193 1764 3194 1616 3194 1771 3194 1770 3195 1771 3195 1617 3195 1610 3196 1617 3196 1618 3196 1609 3197 1618 3197 1786 3197 1608 3198 1786 3198 1619 3198 1607 3199 1619 3199 1599 3199 1607 3200 1608 3200 1619 3200 1607 3201 1621 3201 1608 3201 1621 3202 1606 3202 1571 3202 1807 3203 1623 3203 1622 3203 1807 3204 1620 3204 1623 3204 1623 3205 1620 3205 1624 3205 1622 3206 1624 3206 1621 3206 1622 3207 1623 3207 1624 3207 1625 3208 1628 3208 1627 3208 1625 3209 1737 3209 1628 3209 1628 3210 1737 3210 1626 3210 1627 3211 1626 3211 1776 3211 1627 3212 1628 3212 1626 3212 1629 3213 1630 3213 1813 3213 1629 3214 1819 3214 1630 3214 1630 3215 1819 3215 1731 3215 1763 3216 1731 3216 1730 3216 1761 3217 1763 3217 1730 3217 1553 3218 1891 3218 1631 3218 1632 3219 1553 3219 1631 3219 1632 3220 1633 3220 1553 3220 1632 3221 1542 3221 1633 3221 1633 3222 1542 3222 1634 3222 1753 3223 1633 3223 1634 3223 1891 3224 1817 3224 1631 3224 1631 3225 1817 3225 1821 3225 1883 3226 1635 3226 1636 3226 1636 3227 1635 3227 1637 3227 1704 3228 1637 3228 1638 3228 1697 3229 1638 3229 1683 3229 1689 3230 1683 3230 1684 3230 1450 3231 1684 3231 1679 3231 1677 3232 1679 3232 1639 3232 1671 3233 1639 3233 1641 3233 1669 3234 1641 3234 1640 3234 1453 3235 1669 3235 1640 3235 1635 3236 1861 3236 1637 3236 1637 3237 1861 3237 1698 3237 1638 3238 1698 3238 1649 3238 1683 3239 1649 3239 1648 3239 1684 3240 1648 3240 1676 3240 1679 3241 1676 3241 1675 3241 1639 3242 1675 3242 1642 3242 1641 3243 1642 3243 1643 3243 1640 3244 1641 3244 1643 3244 1861 3245 1860 3245 1698 3245 1698 3246 1860 3246 1859 3246 1650 3247 1859 3247 1880 3247 1651 3248 1880 3248 1644 3248 1680 3249 1644 3249 1654 3249 1655 3250 1654 3250 1657 3250 1658 3251 1657 3251 1856 3251 1660 3252 1856 3252 2342 3252 1659 3253 2342 3253 1645 3253 1656 3254 1645 3254 1661 3254 1653 3255 1661 3255 1646 3255 1652 3256 1646 3256 1647 3256 1685 3257 1647 3257 1648 3257 1649 3258 1685 3258 1648 3258 1649 3259 1650 3259 1685 3259 1649 3260 1698 3260 1650 3260 1650 3261 1698 3261 1859 3261 1650 3262 1880 3262 1651 3262 1685 3263 1651 3263 1652 3263 1647 3264 1685 3264 1652 3264 1651 3265 1644 3265 1680 3265 1652 3266 1680 3266 1653 3266 1646 3267 1652 3267 1653 3267 1680 3268 1654 3268 1655 3268 1653 3269 1655 3269 1656 3269 1661 3270 1653 3270 1656 3270 1655 3271 1657 3271 1658 3271 1656 3272 1658 3272 1659 3272 1645 3273 1656 3273 1659 3273 1658 3274 1856 3274 1660 3274 1659 3275 1660 3275 2342 3275 1659 3276 1658 3276 1660 3276 1643 3277 1642 3277 1441 3277 1441 3278 1642 3278 1665 3278 1664 3279 1665 3279 1663 3279 1662 3280 1663 3280 1661 3280 1645 3281 1662 3281 1661 3281 1645 3282 2342 3282 1662 3282 1662 3283 2342 3283 1664 3283 1663 3284 1662 3284 1664 3284 1664 3285 1441 3285 1665 3285 1451 3286 1669 3286 1452 3286 1667 3287 1451 3287 1452 3287 1666 3288 1667 3288 1668 3288 1671 3289 1641 3289 1669 3289 1639 3290 1642 3290 1641 3290 1642 3291 1675 3291 1665 3291 1665 3292 1675 3292 1670 3292 1663 3293 1670 3293 1646 3293 1661 3294 1663 3294 1646 3294 1663 3295 1665 3295 1670 3295 1655 3296 1658 3296 1656 3296 1459 3297 1674 3297 1666 3297 1673 3298 1671 3298 1451 3298 1672 3299 1673 3299 1451 3299 1674 3300 1672 3300 1667 3300 1677 3301 1639 3301 1671 3301 1679 3302 1675 3302 1639 3302 1647 3303 1646 3303 1670 3303 1676 3304 1670 3304 1675 3304 1676 3305 1647 3305 1670 3305 1676 3306 1648 3306 1647 3306 1680 3307 1655 3307 1653 3307 1681 3308 1460 3308 1459 3308 1460 3309 1461 3309 1674 3309 1678 3310 1677 3310 1673 3310 1443 3311 1678 3311 1673 3311 1461 3312 1443 3312 1672 3312 1450 3313 1679 3313 1677 3313 1684 3314 1676 3314 1679 3314 1651 3315 1680 3315 1652 3315 1475 3316 1474 3316 1681 3316 1474 3317 1688 3317 1460 3317 1688 3318 1455 3318 1461 3318 1449 3319 1450 3319 1678 3319 1682 3320 1449 3320 1678 3320 1455 3321 1682 3321 1443 3321 1689 3322 1684 3322 1450 3322 1683 3323 1648 3323 1684 3323 1650 3324 1651 3324 1685 3324 1686 3325 1693 3325 1475 3325 1693 3326 1687 3326 1474 3326 1687 3327 1470 3327 1688 3327 1470 3328 1690 3328 1455 3328 1448 3329 1689 3329 1449 3329 1696 3330 1448 3330 1449 3330 1690 3331 1696 3331 1682 3331 1697 3332 1683 3332 1689 3332 1638 3333 1649 3333 1683 3333 1691 3334 1483 3334 1686 3334 1483 3335 1692 3335 1693 3335 1692 3336 1694 3336 1687 3336 1694 3337 1462 3337 1470 3337 1462 3338 1702 3338 1690 3338 1695 3339 1697 3339 1448 3339 1456 3340 1695 3340 1448 3340 1702 3341 1456 3341 1696 3341 1704 3342 1638 3342 1697 3342 1637 3343 1698 3343 1638 3343 1705 3344 1523 3344 1691 3344 1523 3345 1699 3345 1483 3345 1699 3346 1484 3346 1692 3346 1484 3347 1700 3347 1694 3347 1700 3348 1701 3348 1462 3348 1701 3349 1703 3349 1702 3349 1444 3350 1704 3350 1695 3350 1445 3351 1444 3351 1695 3351 1703 3352 1445 3352 1456 3352 1636 3353 1637 3353 1704 3353 1709 3354 1524 3354 1705 3354 1524 3355 1706 3355 1523 3355 1706 3356 1485 3356 1699 3356 1485 3357 1476 3357 1484 3357 1476 3358 1477 3358 1700 3358 1477 3359 1463 3359 1701 3359 1463 3360 1464 3360 1703 3360 1446 3361 1636 3361 1444 3361 1707 3362 1446 3362 1444 3362 1464 3363 1707 3363 1445 3363 1708 3364 1537 3364 1709 3364 1537 3365 1710 3365 1524 3365 1710 3366 1714 3366 1706 3366 1714 3367 1715 3367 1485 3367 1715 3368 1711 3368 1476 3368 1711 3369 1478 3369 1477 3369 1478 3370 1716 3370 1463 3370 1716 3371 1712 3371 1464 3371 1712 3372 1447 3372 1707 3372 1717 3373 1538 3373 1537 3373 1538 3374 1713 3374 1710 3374 1713 3375 1722 3375 1714 3375 1722 3376 1486 3376 1715 3376 1486 3377 1723 3377 1711 3377 1723 3378 1471 3378 1478 3378 1471 3379 1465 3379 1716 3379 1465 3380 1469 3380 1712 3380 1708 3381 1546 3381 1717 3381 1717 3382 1546 3382 1718 3382 1719 3383 1718 3383 1547 3383 1539 3384 1547 3384 1548 3384 1540 3385 1548 3385 1742 3385 1720 3386 1742 3386 1743 3386 1541 3387 1743 3387 1634 3387 1542 3388 1541 3388 1634 3388 1495 3389 1718 3389 1546 3389 1718 3390 1719 3390 1717 3390 1719 3391 1721 3391 1538 3391 1721 3392 1525 3392 1713 3392 1525 3393 1526 3393 1722 3393 1526 3394 1520 3394 1486 3394 1520 3395 1479 3395 1723 3395 1479 3396 1472 3396 1471 3396 1472 3397 1466 3397 1465 3397 1726 3398 1547 3398 1718 3398 1547 3399 1539 3399 1719 3399 1539 3400 1724 3400 1721 3400 1724 3401 1732 3401 1525 3401 1732 3402 1521 3402 1526 3402 1521 3403 1725 3403 1520 3403 1725 3404 1480 3404 1479 3404 1480 3405 1482 3405 1472 3405 1495 3406 1496 3406 1726 3406 1726 3407 1496 3407 1727 3407 1728 3408 1727 3408 1554 3408 1549 3409 1554 3409 1729 3409 1550 3410 1729 3410 1555 3410 1745 3411 1555 3411 1752 3411 1753 3412 1752 3412 1557 3412 1551 3413 1557 3413 1730 3413 1731 3414 1551 3414 1730 3414 1498 3415 1727 3415 1496 3415 1727 3416 1728 3416 1726 3416 1728 3417 1548 3417 1547 3417 1548 3418 1540 3418 1539 3418 1540 3419 1532 3419 1724 3419 1532 3420 1527 3420 1732 3420 1527 3421 1733 3421 1521 3421 1733 3422 1487 3422 1725 3422 1487 3423 1734 3423 1480 3423 1741 3424 1554 3424 1727 3424 1554 3425 1549 3425 1728 3425 1549 3426 1742 3426 1548 3426 1742 3427 1720 3427 1540 3427 1720 3428 1533 3428 1532 3428 1533 3429 1534 3429 1527 3429 1534 3430 1522 3430 1733 3430 1522 3431 1488 3431 1487 3431 1498 3432 1735 3432 1741 3432 1741 3433 1735 3433 1739 3433 1740 3434 1739 3434 1744 3434 1751 3435 1744 3435 1755 3435 1556 3436 1755 3436 1754 3436 1736 3437 1754 3437 1561 3437 1558 3438 1561 3438 1762 3438 1761 3439 1762 3439 1767 3439 1766 3440 1767 3440 1626 3440 1737 3441 1766 3441 1626 3441 1737 3442 1738 3442 1766 3442 1502 3443 1739 3443 1735 3443 1739 3444 1740 3444 1741 3444 1740 3445 1729 3445 1554 3445 1729 3446 1550 3446 1549 3446 1550 3447 1743 3447 1742 3447 1743 3448 1541 3448 1720 3448 1541 3449 1746 3449 1533 3449 1746 3450 1528 3450 1534 3450 1528 3451 1529 3451 1522 3451 1560 3452 1744 3452 1739 3452 1744 3453 1751 3453 1740 3453 1751 3454 1555 3454 1729 3454 1555 3455 1745 3455 1550 3455 1745 3456 1634 3456 1743 3456 1543 3457 1746 3457 1542 3457 1535 3458 1528 3458 1543 3458 1502 3459 1749 3459 1560 3459 1560 3460 1749 3460 1750 3460 1747 3461 1750 3461 1566 3461 1756 3462 1566 3462 1757 3462 1758 3463 1757 3463 1748 3463 1760 3464 1748 3464 1765 3464 1562 3465 1765 3465 1772 3465 1563 3466 1772 3466 1778 3466 1776 3467 1778 3467 1624 3467 1620 3468 1776 3468 1624 3468 1620 3469 1565 3469 1776 3469 1505 3470 1750 3470 1749 3470 1750 3471 1747 3471 1560 3471 1747 3472 1755 3472 1744 3472 1755 3473 1556 3473 1751 3473 1556 3474 1752 3474 1555 3474 1752 3475 1753 3475 1745 3475 1545 3476 1543 3476 1632 3476 1566 3477 1750 3477 1612 3477 1756 3478 1747 3478 1566 3478 1754 3479 1755 3479 1756 3479 1736 3480 1556 3480 1754 3480 1557 3481 1752 3481 1736 3481 1551 3482 1753 3482 1557 3482 1553 3483 1633 3483 1551 3483 1757 3484 1566 3484 1567 3484 1758 3485 1756 3485 1757 3485 1561 3486 1754 3486 1758 3486 1558 3487 1736 3487 1561 3487 1730 3488 1557 3488 1558 3488 1614 3489 1567 3489 1613 3489 1615 3490 1614 3490 1613 3490 2305 3491 1615 3491 1759 3491 1748 3492 1757 3492 1568 3492 1760 3493 1758 3493 1748 3493 1762 3494 1561 3494 1760 3494 1761 3495 1558 3495 1762 3495 1630 3496 1731 3496 1763 3496 1813 3497 1763 3497 1766 3497 1813 3498 1630 3498 1763 3498 1764 3499 1568 3499 1614 3499 1616 3500 1764 3500 1614 3500 1765 3501 1748 3501 1611 3501 1562 3502 1760 3502 1765 3502 1767 3503 1762 3503 1562 3503 1766 3504 1761 3504 1767 3504 1768 3505 1616 3505 1519 3505 1511 3506 1768 3506 1519 3506 1511 3507 1769 3507 1768 3507 1511 3508 1573 3508 1769 3508 1770 3509 1611 3509 1764 3509 1771 3510 1770 3510 1764 3510 1772 3511 1765 3511 1774 3511 1563 3512 1562 3512 1772 3512 1626 3513 1767 3513 1563 3513 1574 3514 1511 3514 1510 3514 1773 3515 1771 3515 1768 3515 1769 3516 1773 3516 1768 3516 1769 3517 1513 3517 1773 3517 1769 3518 1512 3518 1513 3518 1610 3519 1774 3519 1770 3519 1617 3520 1610 3520 1770 3520 1778 3521 1772 3521 1775 3521 1776 3522 1563 3522 1778 3522 1777 3523 1617 3523 1773 3523 1513 3524 1777 3524 1773 3524 1513 3525 1514 3525 1777 3525 1513 3526 1783 3526 1514 3526 1609 3527 1775 3527 1610 3527 1618 3528 1609 3528 1610 3528 1624 3529 1778 3529 1569 3529 2318 3530 1780 3530 1779 3530 1573 3531 1780 3531 1512 3531 1512 3532 1780 3532 1782 3532 1783 3533 1782 3533 1781 3533 1515 3534 1781 3534 1580 3534 1788 3535 1515 3535 1580 3535 1581 3536 1782 3536 1780 3536 1782 3537 1783 3537 1512 3537 1784 3538 1618 3538 1777 3538 1514 3539 1784 3539 1777 3539 1514 3540 1785 3540 1784 3540 1514 3541 1515 3541 1785 3541 1608 3542 1569 3542 1609 3542 1786 3543 1608 3543 1609 3543 1781 3544 1782 3544 1787 3544 1515 3545 1783 3545 1781 3545 1601 3546 1786 3546 1784 3546 1785 3547 1601 3547 1784 3547 1785 3548 1790 3548 1601 3548 1785 3549 1788 3549 1790 3549 1580 3550 1781 3550 1578 3550 1789 3551 1578 3551 1588 3551 1594 3552 1789 3552 1588 3552 1597 3553 1790 3553 1516 3553 1598 3554 2505 3554 1791 3554 1791 3555 2505 3555 2055 3555 1792 3556 2055 3556 1800 3556 1517 3557 1800 3557 2053 3557 1793 3558 2053 3558 1801 3558 1796 3559 1801 3559 1795 3559 1794 3560 1796 3560 1795 3560 1794 3561 1798 3561 1796 3561 1794 3562 1797 3562 1798 3562 1794 3563 2047 3563 1797 3563 1797 3564 2047 3564 1799 3564 1798 3565 1797 3565 1799 3565 1791 3566 2055 3566 1792 3566 1517 3567 1792 3567 1800 3567 1517 3568 1791 3568 1792 3568 2053 3569 1795 3569 1801 3569 2047 3570 2045 3570 1799 3570 1799 3571 2045 3571 1606 3571 1606 3572 2045 3572 1806 3572 1570 3573 1806 3573 1802 3573 1803 3574 1570 3574 1802 3574 1803 3575 1622 3575 1570 3575 1803 3576 1805 3576 1622 3576 1803 3577 1804 3577 1805 3577 1805 3578 1804 3578 1807 3578 1622 3579 1805 3579 1807 3579 2045 3580 1802 3580 1806 3580 1804 3581 2035 3581 1807 3581 1807 3582 2035 3582 1620 3582 1620 3583 2035 3583 1808 3583 1564 3584 1808 3584 1627 3584 1564 3585 1620 3585 1808 3585 2035 3586 2033 3586 1808 3586 1808 3587 2033 3587 2032 3587 1627 3588 2032 3588 1809 3588 1625 3589 1809 3589 1737 3589 1625 3590 1627 3590 1809 3590 2032 3591 1963 3591 1809 3591 1809 3592 1963 3592 1810 3592 1737 3593 1810 3593 1811 3593 1559 3594 1811 3594 1813 3594 1559 3595 1737 3595 1811 3595 1810 3596 1962 3596 1811 3596 1811 3597 1962 3597 1893 3597 1813 3598 1893 3598 1812 3598 1629 3599 1812 3599 1819 3599 1629 3600 1813 3600 1812 3600 1893 3601 1814 3601 1812 3601 1812 3602 1814 3602 2029 3602 1819 3603 2029 3603 1820 3603 1892 3604 1820 3604 1815 3604 1891 3605 1815 3605 1816 3605 1960 3606 1891 3606 1816 3606 1960 3607 1817 3607 1891 3607 1960 3608 1818 3608 1817 3608 1960 3609 1959 3609 1818 3609 1818 3610 1959 3610 1821 3610 1817 3611 1818 3611 1821 3611 1812 3612 2029 3612 1819 3612 1820 3613 1816 3613 1815 3613 1959 3614 1822 3614 1821 3614 1821 3615 1822 3615 1544 3615 1544 3616 1822 3616 1824 3616 1823 3617 1824 3617 1826 3617 1823 3618 1544 3618 1824 3618 1822 3619 1825 3619 1824 3619 1824 3620 1825 3620 1826 3620 1826 3621 1825 3621 1827 3621 1832 3622 1827 3622 1828 3622 1957 3623 1832 3623 1828 3623 1957 3624 1831 3624 1832 3624 1957 3625 1829 3625 1831 3625 1957 3626 1956 3626 1829 3626 1829 3627 1956 3627 1830 3627 1890 3628 1830 3628 1889 3628 1489 3629 1889 3629 1834 3629 1489 3630 1890 3630 1889 3630 1826 3631 1827 3631 1832 3631 1536 3632 1832 3632 1831 3632 1536 3633 1826 3633 1832 3633 1830 3634 1833 3634 1889 3634 1889 3635 1833 3635 1836 3635 1834 3636 1836 3636 1835 3636 1490 3637 1835 3637 1481 3637 1490 3638 1834 3638 1835 3638 1836 3639 1837 3639 1835 3639 1835 3640 1837 3640 1838 3640 1839 3641 1838 3641 1954 3641 1843 3642 1954 3642 1840 3642 1839 3643 1840 3643 1841 3643 1473 3644 1841 3644 1467 3644 1473 3645 1839 3645 1841 3645 1473 3646 1842 3646 1839 3646 1839 3647 1842 3647 1481 3647 1835 3648 1839 3648 1481 3648 1835 3649 1838 3649 1839 3649 1839 3650 1954 3650 1843 3650 1840 3651 1839 3651 1843 3651 1840 3652 1844 3652 1841 3652 1841 3653 1844 3653 1845 3653 1846 3654 1845 3654 1844 3654 1846 3655 1841 3655 1845 3655 1846 3656 1849 3656 1841 3656 1846 3657 1951 3657 1849 3657 1849 3658 1951 3658 1847 3658 1848 3659 1847 3659 1951 3659 1848 3660 1849 3660 1847 3660 1848 3661 1886 3661 1849 3661 1848 3662 1850 3662 1886 3662 1886 3663 1850 3663 1851 3663 1884 3664 1851 3664 1865 3664 1862 3665 1865 3665 1863 3665 1864 3666 1863 3666 1852 3666 1894 3667 1852 3667 1882 3667 1881 3668 1882 3668 1868 3668 1853 3669 1868 3669 1875 3669 1879 3670 1875 3670 1854 3670 1858 3671 1854 3671 1876 3671 1857 3672 1876 3672 1877 3672 1878 3673 1877 3673 1873 3673 1855 3674 1873 3674 1872 3674 1855 3675 1878 3675 1873 3675 1855 3676 2342 3676 1878 3676 1878 3677 2342 3677 1856 3677 1857 3678 1856 3678 1657 3678 1858 3679 1657 3679 1654 3679 1879 3680 1654 3680 1644 3680 1853 3681 1644 3681 1880 3681 1881 3682 1880 3682 1859 3682 1894 3683 1859 3683 1860 3683 1861 3684 1894 3684 1860 3684 1861 3685 1864 3685 1894 3685 1861 3686 1635 3686 1864 3686 1864 3687 1635 3687 1862 3687 1863 3688 1864 3688 1862 3688 1850 3689 1950 3689 1851 3689 1851 3690 1950 3690 1865 3690 1865 3691 1950 3691 1990 3691 1863 3692 1990 3692 1874 3692 1852 3693 1874 3693 1866 3693 1882 3694 1866 3694 1949 3694 1867 3695 1882 3695 1949 3695 1867 3696 1868 3696 1882 3696 1867 3697 1869 3697 1868 3697 1868 3698 1869 3698 1875 3698 1875 3699 1869 3699 1870 3699 1854 3700 1870 3700 1921 3700 1876 3701 1921 3701 1871 3701 1877 3702 1871 3702 1872 3702 1873 3703 1877 3703 1872 3703 1865 3704 1990 3704 1863 3704 1863 3705 1874 3705 1852 3705 1852 3706 1866 3706 1882 3706 1875 3707 1870 3707 1854 3707 1854 3708 1921 3708 1876 3708 1876 3709 1871 3709 1877 3709 1878 3710 1856 3710 1857 3710 1877 3711 1878 3711 1857 3711 1857 3712 1657 3712 1858 3712 1876 3713 1857 3713 1858 3713 1858 3714 1654 3714 1879 3714 1854 3715 1858 3715 1879 3715 1879 3716 1644 3716 1853 3716 1875 3717 1879 3717 1853 3717 1853 3718 1880 3718 1881 3718 1868 3719 1853 3719 1881 3719 1881 3720 1859 3720 1894 3720 1882 3721 1881 3721 1894 3721 1635 3722 1883 3722 1862 3722 1862 3723 1883 3723 1884 3723 1865 3724 1862 3724 1884 3724 1883 3725 1885 3725 1884 3725 1884 3726 1885 3726 1886 3726 1851 3727 1884 3727 1886 3727 1885 3728 1887 3728 1886 3728 1886 3729 1887 3729 1888 3729 1849 3730 1888 3730 1457 3730 1468 3731 1849 3731 1457 3731 1468 3732 1841 3732 1849 3732 1468 3733 1467 3733 1841 3733 1886 3734 1888 3734 1849 3734 1836 3735 1834 3735 1889 3735 1830 3736 1890 3736 1829 3736 1829 3737 1890 3737 1530 3737 1831 3738 1829 3738 1530 3738 1891 3739 1892 3739 1815 3739 1892 3740 1819 3740 1820 3740 1893 3741 1813 3741 1811 3741 1810 3742 1737 3742 1809 3742 2032 3743 1627 3743 1808 3743 1570 3744 1606 3744 1806 3744 1796 3745 1793 3745 1801 3745 1793 3746 1517 3746 2053 3746 1894 3747 1864 3747 1852 3747 1855 3748 1895 3748 2111 3748 1855 3749 2116 3749 1895 3749 1855 3750 1896 3750 2116 3750 1855 3751 2091 3751 1896 3751 1855 3752 1897 3752 2091 3752 1855 3753 1898 3753 1897 3753 1855 3754 1872 3754 1898 3754 1898 3755 1872 3755 2118 3755 1912 3756 2118 3756 1899 3756 2117 3757 1899 3757 1914 3757 2125 3758 1914 3758 1900 3758 1901 3759 1900 3759 1902 3759 1908 3760 1902 3760 1903 3760 2136 3761 1903 3761 1904 3761 2137 3762 1904 3762 2142 3762 1906 3763 2142 3763 2150 3763 2149 3764 2150 3764 1905 3764 2281 3765 1905 3765 2265 3765 2281 3766 2149 3766 1905 3766 2281 3767 2264 3767 2149 3767 2149 3768 2264 3768 2143 3768 1906 3769 2143 3769 2138 3769 2137 3770 2138 3770 1907 3770 2136 3771 1907 3771 1909 3771 1908 3772 1909 3772 2131 3772 1901 3773 2131 3773 2127 3773 2125 3774 2127 3774 1910 3774 2117 3775 1910 3775 1911 3775 1912 3776 1911 3776 1897 3776 1898 3777 1912 3777 1897 3777 1898 3778 2118 3778 1912 3778 1872 3779 1871 3779 2118 3779 2118 3780 1871 3780 2121 3780 1899 3781 2121 3781 1913 3781 1914 3782 1913 3782 1923 3782 1900 3783 1923 3783 1915 3783 1902 3784 1915 3784 1916 3784 1903 3785 1916 3785 1917 3785 1904 3786 1917 3786 1918 3786 2142 3787 1918 3787 1925 3787 2150 3788 1925 3788 2154 3788 1905 3789 2154 3789 1927 3789 1920 3790 1927 3790 1919 3790 1920 3791 1905 3791 1927 3791 1920 3792 2265 3792 1905 3792 1871 3793 1921 3793 2121 3793 2121 3794 1921 3794 1922 3794 1913 3795 1922 3795 1929 3795 1923 3796 1929 3796 2130 3796 1915 3797 2130 3797 2135 3797 1916 3798 2135 3798 1924 3798 1917 3799 1924 3799 2141 3799 1918 3800 2141 3800 2148 3800 1925 3801 2148 3801 1932 3801 2154 3802 1932 3802 1926 3802 1927 3803 1926 3803 2167 3803 1928 3804 2167 3804 2267 3804 1928 3805 1927 3805 2167 3805 1928 3806 1919 3806 1927 3806 1921 3807 1870 3807 1922 3807 1922 3808 1870 3808 1935 3808 1929 3809 1935 3809 1930 3809 2130 3810 1930 3810 2133 3810 2135 3811 2133 3811 1931 3811 1924 3812 1931 3812 1938 3812 2141 3813 1938 3813 2147 3813 2148 3814 2147 3814 2157 3814 1932 3815 2157 3815 1933 3815 1926 3816 1933 3816 2166 3816 2167 3817 2166 3817 1942 3817 1934 3818 1942 3818 2269 3818 1934 3819 2167 3819 1942 3819 1934 3820 2267 3820 2167 3820 1870 3821 1869 3821 1935 3821 1935 3822 1869 3822 1936 3822 1930 3823 1936 3823 2132 3823 2133 3824 2132 3824 1937 3824 1931 3825 1937 3825 2140 3825 1938 3826 2140 3826 2146 3826 2147 3827 2146 3827 1946 3827 2157 3828 1946 3828 1939 3828 1933 3829 1939 3829 1947 3829 2166 3830 1947 3830 1940 3830 1942 3831 1940 3831 1941 3831 2270 3832 1941 3832 2271 3832 2270 3833 1942 3833 1941 3833 2270 3834 2269 3834 1942 3834 1869 3835 1867 3835 1936 3835 1936 3836 1867 3836 1943 3836 2132 3837 1943 3837 1944 3837 1937 3838 1944 3838 2145 3838 2140 3839 2145 3839 1977 3839 2146 3840 1977 3840 1945 3840 1946 3841 1945 3841 2156 3841 1939 3842 2156 3842 2165 3842 1947 3843 2165 3843 1948 3843 1940 3844 1948 3844 2180 3844 1941 3845 2180 3845 2179 3845 2272 3846 2179 3846 1981 3846 2272 3847 1941 3847 2179 3847 2272 3848 2271 3848 1941 3848 1867 3849 1949 3849 1943 3849 1943 3850 1949 3850 1866 3850 2139 3851 1866 3851 1874 3851 2144 3852 1874 3852 1990 3852 2151 3853 1990 3853 1950 3853 2158 3854 1950 3854 1850 3854 1848 3855 2158 3855 1850 3855 1848 3856 2005 3856 2158 3856 1848 3857 1951 3857 2005 3857 2005 3858 1951 3858 1846 3858 1952 3859 1846 3859 1844 3859 1840 3860 1952 3860 1844 3860 1840 3861 1953 3861 1952 3861 1840 3862 1954 3862 1953 3862 1953 3863 1954 3863 1838 3863 2014 3864 1838 3864 1837 3864 1836 3865 2014 3865 1837 3865 1836 3866 1955 3866 2014 3866 1836 3867 1833 3867 1955 3867 1955 3868 1833 3868 1830 3868 2019 3869 1830 3869 1956 3869 1957 3870 2019 3870 1956 3870 1957 3871 1958 3871 2019 3871 1957 3872 1828 3872 1958 3872 1958 3873 1828 3873 1827 3873 2023 3874 1827 3874 1825 3874 1822 3875 2023 3875 1825 3875 1822 3876 2072 3876 2023 3876 1822 3877 1959 3877 2072 3877 2072 3878 1959 3878 1960 3878 1816 3879 2072 3879 1960 3879 1816 3880 2073 3880 2072 3880 1816 3881 1820 3881 2073 3881 2073 3882 1820 3882 2029 3882 1974 3883 2029 3883 2030 3883 1961 3884 2030 3884 1893 3884 2031 3885 1893 3885 1962 3885 1810 3886 2031 3886 1962 3886 1810 3887 1961 3887 2031 3887 1810 3888 2221 3888 1961 3888 1810 3889 1964 3889 2221 3889 1810 3890 1963 3890 1964 3890 1964 3891 1963 3891 2032 3891 2221 3892 2032 3892 2231 3892 2226 3893 2231 3893 1965 3893 2225 3894 1965 3894 1966 3894 2235 3895 1966 3895 2232 3895 2238 3896 2232 3896 2041 3896 2058 3897 2041 3897 2239 3897 2059 3898 2239 3898 2504 3898 1968 3899 2504 3899 2063 3899 1967 3900 2063 3900 2325 3900 1968 3901 2325 3901 2064 3901 2061 3902 2064 3902 2062 3902 2060 3903 2062 3903 1969 3903 2234 3904 1969 3904 1970 3904 2228 3905 1970 3905 2075 3905 1971 3906 2075 3906 1973 3906 1972 3907 1973 3907 2220 3907 1961 3908 2220 3908 1974 3908 2030 3909 1961 3909 1974 3909 1943 3910 1866 3910 2139 3910 1944 3911 2139 3911 1975 3911 2145 3912 1975 3912 1976 3912 1977 3913 1976 3913 2153 3913 1945 3914 2153 3914 1978 3914 2156 3915 1978 3915 2168 3915 2165 3916 2168 3916 1979 3916 1948 3917 1979 3917 1980 3917 2180 3918 1980 3918 1985 3918 2179 3919 1985 3919 1989 3919 2273 3920 1989 3920 1988 3920 2273 3921 2179 3921 1989 3921 2273 3922 1981 3922 2179 3922 2139 3923 1874 3923 2144 3923 1975 3924 2144 3924 1982 3924 1976 3925 1982 3925 2152 3925 2153 3926 2152 3926 2164 3926 1978 3927 2164 3927 1991 3927 2168 3928 1991 3928 1983 3928 1979 3929 1983 3929 1984 3929 1980 3930 1984 3930 2178 3930 1985 3931 2178 3931 2186 3931 1989 3932 2186 3932 1994 3932 1986 3933 1994 3933 1987 3933 1986 3934 1989 3934 1994 3934 1986 3935 1988 3935 1989 3935 2144 3936 1990 3936 2151 3936 1982 3937 2151 3937 1995 3937 2152 3938 1995 3938 2155 3938 2164 3939 2155 3939 1992 3939 1991 3940 1992 3940 1993 3940 1983 3941 1993 3941 2175 3941 1984 3942 2175 3942 2185 3942 2178 3943 2185 3943 1999 3943 2186 3944 1999 3944 2192 3944 1994 3945 2192 3945 2202 3945 2291 3946 2202 3946 2004 3946 2291 3947 1994 3947 2202 3947 2291 3948 1987 3948 1994 3948 2151 3949 1950 3949 2158 3949 1995 3950 2158 3950 2159 3950 2155 3951 2159 3951 2163 3951 1992 3952 2163 3952 1996 3952 1993 3953 1996 3953 1997 3953 2175 3954 1997 3954 1998 3954 2185 3955 1998 3955 2162 3955 1999 3956 2162 3956 2000 3956 2192 3957 2000 3957 2001 3957 2202 3958 2001 3958 2087 3958 2003 3959 2087 3959 2002 3959 2003 3960 2202 3960 2087 3960 2003 3961 2004 3961 2202 3961 2005 3962 1846 3962 1952 3962 2160 3963 1952 3963 2169 3963 2161 3964 2169 3964 2170 3964 2174 3965 2170 3965 2176 3965 2177 3966 2176 3966 2189 3966 2191 3967 2189 3967 2190 3967 2006 3968 2190 3968 2007 3968 2086 3969 2007 3969 2009 3969 2008 3970 2009 3970 2083 3970 2011 3971 2083 3971 2010 3971 2012 3972 2011 3972 2010 3972 2012 3973 2301 3973 2011 3973 2011 3974 2301 3974 2008 3974 2083 3975 2011 3975 2008 3975 1953 3976 1838 3976 2014 3976 2013 3977 2014 3977 2183 3977 2171 3978 2183 3978 2181 3978 2184 3979 2181 3979 2200 3979 2172 3980 2200 3980 2201 3980 2207 3981 2201 3981 2015 3981 2206 3982 2015 3982 2182 3982 2173 3983 2182 3983 2084 3983 2085 3984 2084 3984 2016 3984 2309 3985 2016 3985 2017 3985 2018 3986 2017 3986 2080 3986 2018 3987 2309 3987 2017 3987 1955 3988 1830 3988 2019 3988 2187 3989 2019 3989 2193 3989 2188 3990 2193 3990 2199 3990 2204 3991 2199 3991 2195 3991 2205 3992 2195 3992 2214 3992 2209 3993 2214 3993 2215 3993 2020 3994 2215 3994 2218 3994 2219 3995 2218 3995 2224 3995 2021 3996 2224 3996 2081 3996 2022 3997 2081 3997 2198 3997 2314 3998 2198 3998 2315 3998 2314 3999 2022 3999 2198 3999 1958 4000 1827 4000 2023 4000 2194 4001 2023 4001 2024 4001 2203 4002 2024 4002 2211 4002 2208 4003 2211 4003 2213 4003 2212 4004 2213 4004 2217 4004 2216 4005 2217 4005 2223 4005 2196 4006 2223 4006 2025 4006 2026 4007 2025 4007 2229 4007 2197 4008 2229 4008 2078 4008 2028 4009 2078 4009 2027 4009 2320 4010 2027 4010 2076 4010 2320 4011 2028 4011 2027 4011 2029 4012 1814 4012 2030 4012 2030 4013 1814 4013 1893 4013 1961 4014 1893 4014 2031 4014 2231 4015 2032 4015 2034 4015 2035 4016 2034 4016 2033 4016 2035 4017 2231 4017 2034 4017 2035 4018 2233 4018 2231 4018 2035 4019 2230 4019 2233 4019 2035 4020 1804 4020 2230 4020 2230 4021 1804 4021 1803 4021 2233 4022 1803 4022 2036 4022 2237 4023 2036 4023 2037 4023 2038 4024 2037 4024 2039 4024 2043 4025 2039 4025 2040 4025 2042 4026 2040 4026 2504 4026 2239 4027 2042 4027 2504 4027 2239 4028 2041 4028 2042 4028 2042 4029 2041 4029 2043 4029 2040 4030 2042 4030 2043 4030 2032 4031 2033 4031 2034 4031 2036 4032 1803 4032 2044 4032 2045 4033 2044 4033 1802 4033 2045 4034 2036 4034 2044 4034 2045 4035 2048 4035 2036 4035 2045 4036 2047 4036 2048 4036 2048 4037 2047 4037 2046 4037 1794 4038 2046 4038 2047 4038 1794 4039 2048 4039 2046 4039 1794 4040 1795 4040 2048 4040 2048 4041 1795 4041 2051 4041 2052 4042 2051 4042 2049 4042 2050 4043 2049 4043 2055 4043 2504 4044 2055 4044 2505 4044 2504 4045 2050 4045 2055 4045 2504 4046 2040 4046 2050 4046 2050 4047 2040 4047 2039 4047 2052 4048 2039 4048 2037 4048 2048 4049 2037 4049 2036 4049 2048 4050 2052 4050 2037 4050 2048 4051 2051 4051 2052 4051 1803 4052 1802 4052 2044 4052 2051 4053 1795 4053 2054 4053 2053 4054 2054 4054 1795 4054 2053 4055 2051 4055 2054 4055 2053 4056 1800 4056 2051 4056 2051 4057 1800 4057 2056 4057 2049 4058 2056 4058 2055 4058 2049 4059 2051 4059 2056 4059 2056 4060 1800 4060 2057 4060 2055 4061 2057 4061 1800 4061 2055 4062 2056 4062 2057 4062 2059 4063 2504 4063 1968 4063 2061 4064 1968 4064 2064 4064 2061 4065 2059 4065 1968 4065 2061 4066 2058 4066 2059 4066 2061 4067 2060 4067 2058 4067 2061 4068 2062 4068 2060 4068 1968 4069 2063 4069 1967 4069 2325 4070 1968 4070 1967 4070 2325 4071 2323 4071 2064 4071 2064 4072 2323 4072 2065 4072 2066 4073 2065 4073 2323 4073 2066 4074 2064 4074 2065 4074 2066 4075 2067 4075 2064 4075 2066 4076 2322 4076 2067 4076 2067 4077 2322 4077 2068 4077 2069 4078 2068 4078 2322 4078 2069 4079 2067 4079 2068 4079 2069 4080 2076 4080 2067 4080 2067 4081 2076 4081 2078 4081 2236 4082 2078 4082 2229 4082 2070 4083 2229 4083 2025 4083 2227 4084 2025 4084 2223 4084 2222 4085 2223 4085 2217 4085 2071 4086 2217 4086 2213 4086 2210 4087 2213 4087 2211 4087 2074 4088 2211 4088 2024 4088 2072 4089 2024 4089 2023 4089 2072 4090 2074 4090 2024 4090 2072 4091 2073 4091 2074 4091 2074 4092 2073 4092 1974 4092 2210 4093 1974 4093 2220 4093 2071 4094 2220 4094 1973 4094 2222 4095 1973 4095 2075 4095 2227 4096 2075 4096 1970 4096 2070 4097 1970 4097 1969 4097 2236 4098 1969 4098 2062 4098 2067 4099 2062 4099 2064 4099 2067 4100 2236 4100 2062 4100 2067 4101 2078 4101 2236 4101 2078 4102 2076 4102 2027 4102 2317 4103 2077 4103 2028 4103 2317 4104 2315 4104 2077 4104 2077 4105 2315 4105 2197 4105 2028 4106 2197 4106 2078 4106 2028 4107 2077 4107 2197 4107 2313 4108 2079 4108 2022 4108 2313 4109 2080 4109 2079 4109 2079 4110 2080 4110 2021 4110 2022 4111 2021 4111 2081 4111 2022 4112 2079 4112 2021 4112 2307 4113 2082 4113 2309 4113 2307 4114 2010 4114 2082 4114 2082 4115 2010 4115 2083 4115 2085 4116 2083 4116 2173 4116 2084 4117 2085 4117 2173 4117 2008 4118 2301 4118 2087 4118 2001 4119 2008 4119 2087 4119 2001 4120 2086 4120 2008 4120 2001 4121 2000 4121 2086 4121 2086 4122 2000 4122 2006 4122 2007 4123 2086 4123 2006 4123 2301 4124 2300 4124 2087 4124 2087 4125 2300 4125 2002 4125 2264 4126 2263 4126 2143 4126 2143 4127 2263 4127 2093 4127 2138 4128 2093 4128 2088 4128 1907 4129 2088 4129 2134 4129 1909 4130 2134 4130 2096 4130 2131 4131 2096 4131 2126 4131 2127 4132 2126 4132 2089 4132 1910 4133 2089 4133 2090 4133 1911 4134 2090 4134 2091 4134 1897 4135 1911 4135 2091 4135 2263 4136 2092 4136 2093 4136 2093 4137 2092 4137 2094 4137 2088 4138 2094 4138 2102 4138 2134 4139 2102 4139 2095 4139 2096 4140 2095 4140 2128 4140 2126 4141 2128 4141 2122 4141 2089 4142 2122 4142 2112 4142 2090 4143 2112 4143 1896 4143 2091 4144 2090 4144 1896 4144 2092 4145 2097 4145 2094 4145 2094 4146 2097 4146 2259 4146 2103 4147 2259 4147 2258 4147 2129 4148 2258 4148 2251 4148 2105 4149 2251 4149 2241 4149 2098 4150 2241 4150 2108 4150 2120 4151 2108 4151 2248 4151 2099 4152 2248 4152 2111 4152 2110 4153 2111 4153 2109 4153 2107 4154 2109 4154 2115 4154 2100 4155 2115 4155 2106 4155 2104 4156 2106 4156 2123 4156 2101 4157 2123 4157 2095 4157 2102 4158 2101 4158 2095 4158 2102 4159 2103 4159 2101 4159 2102 4160 2094 4160 2103 4160 2103 4161 2094 4161 2259 4161 2103 4162 2258 4162 2129 4162 2101 4163 2129 4163 2104 4163 2123 4164 2101 4164 2104 4164 2129 4165 2251 4165 2105 4165 2104 4166 2105 4166 2100 4166 2106 4167 2104 4167 2100 4167 2105 4168 2241 4168 2098 4168 2100 4169 2098 4169 2107 4169 2115 4170 2100 4170 2107 4170 2098 4171 2108 4171 2120 4171 2107 4172 2120 4172 2110 4172 2109 4173 2107 4173 2110 4173 2120 4174 2248 4174 2099 4174 2110 4175 2099 4175 2111 4175 2110 4176 2120 4176 2099 4176 1896 4177 2112 4177 2116 4177 2116 4178 2112 4178 2119 4178 1895 4179 2119 4179 2113 4179 2114 4180 2113 4180 2115 4180 2109 4181 2114 4181 2115 4181 2109 4182 2111 4182 2114 4182 2114 4183 2111 4183 1895 4183 2113 4184 2114 4184 1895 4184 1895 4185 2116 4185 2119 4185 2117 4186 1911 4186 1912 4186 1899 4187 2117 4187 1912 4187 2121 4188 1899 4188 2118 4188 1910 4189 2090 4189 1911 4189 2089 4190 2112 4190 2090 4190 2112 4191 2122 4191 2119 4191 2119 4192 2122 4192 2124 4192 2113 4193 2124 4193 2106 4193 2115 4194 2113 4194 2106 4194 2113 4195 2119 4195 2124 4195 2098 4196 2120 4196 2107 4196 1922 4197 1913 4197 2121 4197 2125 4198 1910 4198 2117 4198 1914 4199 2125 4199 2117 4199 1913 4200 1914 4200 1899 4200 2127 4201 2089 4201 1910 4201 2126 4202 2122 4202 2089 4202 2123 4203 2106 4203 2124 4203 2128 4204 2124 4204 2122 4204 2128 4205 2123 4205 2124 4205 2128 4206 2095 4206 2123 4206 2105 4207 2098 4207 2100 4207 1935 4208 1929 4208 1922 4208 1929 4209 1923 4209 1913 4209 1901 4210 2127 4210 2125 4210 1900 4211 1901 4211 2125 4211 1923 4212 1900 4212 1914 4212 2131 4213 2126 4213 2127 4213 2096 4214 2128 4214 2126 4214 2129 4215 2105 4215 2104 4215 1936 4216 1930 4216 1935 4216 1930 4217 2130 4217 1929 4217 2130 4218 1915 4218 1923 4218 1908 4219 2131 4219 1901 4219 1902 4220 1908 4220 1901 4220 1915 4221 1902 4221 1900 4221 1909 4222 2096 4222 2131 4222 2134 4223 2095 4223 2096 4223 2103 4224 2129 4224 2101 4224 1943 4225 2132 4225 1936 4225 2132 4226 2133 4226 1930 4226 2133 4227 2135 4227 2130 4227 2135 4228 1916 4228 1915 4228 2136 4229 1909 4229 1908 4229 1903 4230 2136 4230 1908 4230 1916 4231 1903 4231 1902 4231 1907 4232 2134 4232 1909 4232 2088 4233 2102 4233 2134 4233 2139 4234 1944 4234 1943 4234 1944 4235 1937 4235 2132 4235 1937 4236 1931 4236 2133 4236 1931 4237 1924 4237 2135 4237 1924 4238 1917 4238 1916 4238 2137 4239 1907 4239 2136 4239 1904 4240 2137 4240 2136 4240 1917 4241 1904 4241 1903 4241 2138 4242 2088 4242 1907 4242 2093 4243 2094 4243 2088 4243 2144 4244 1975 4244 2139 4244 1975 4245 2145 4245 1944 4245 2145 4246 2140 4246 1937 4246 2140 4247 1938 4247 1931 4247 1938 4248 2141 4248 1924 4248 2141 4249 1918 4249 1917 4249 1906 4250 2138 4250 2137 4250 2142 4251 1906 4251 2137 4251 1918 4252 2142 4252 1904 4252 2143 4253 2093 4253 2138 4253 2151 4254 1982 4254 2144 4254 1982 4255 1976 4255 1975 4255 1976 4256 1977 4256 2145 4256 1977 4257 2146 4257 2140 4257 2146 4258 2147 4258 1938 4258 2147 4259 2148 4259 2141 4259 2148 4260 1925 4260 1918 4260 2149 4261 2143 4261 1906 4261 2150 4262 2149 4262 1906 4262 1925 4263 2150 4263 2142 4263 2158 4264 1995 4264 2151 4264 1995 4265 2152 4265 1982 4265 2152 4266 2153 4266 1976 4266 2153 4267 1945 4267 1977 4267 1945 4268 1946 4268 2146 4268 1946 4269 2157 4269 2147 4269 2157 4270 1932 4270 2148 4270 1932 4271 2154 4271 1925 4271 2154 4272 1905 4272 2150 4272 2159 4273 2155 4273 1995 4273 2155 4274 2164 4274 2152 4274 2164 4275 1978 4275 2153 4275 1978 4276 2156 4276 1945 4276 2156 4277 1939 4277 1946 4277 1939 4278 1933 4278 2157 4278 1933 4279 1926 4279 1932 4279 1926 4280 1927 4280 2154 4280 2158 4281 2005 4281 2159 4281 2159 4282 2005 4282 2160 4282 2163 4283 2160 4283 2161 4283 1996 4284 2161 4284 2174 4284 1997 4285 2174 4285 2177 4285 1998 4286 2177 4286 2191 4286 2162 4287 2191 4287 2006 4287 2000 4288 2162 4288 2006 4288 1952 4289 2160 4289 2005 4289 2160 4290 2163 4290 2159 4290 2163 4291 1992 4291 2155 4291 1992 4292 1991 4292 2164 4292 1991 4293 2168 4293 1978 4293 2168 4294 2165 4294 2156 4294 2165 4295 1947 4295 1939 4295 1947 4296 2166 4296 1933 4296 2166 4297 2167 4297 1926 4297 2169 4298 2161 4298 2160 4298 2161 4299 1996 4299 2163 4299 1996 4300 1993 4300 1992 4300 1993 4301 1983 4301 1991 4301 1983 4302 1979 4302 2168 4302 1979 4303 1948 4303 2165 4303 1948 4304 1940 4304 1947 4304 1940 4305 1942 4305 2166 4305 1952 4306 1953 4306 2169 4306 2169 4307 1953 4307 2013 4307 2170 4308 2013 4308 2171 4308 2176 4309 2171 4309 2184 4309 2189 4310 2184 4310 2172 4310 2190 4311 2172 4311 2207 4311 2007 4312 2207 4312 2206 4312 2009 4313 2206 4313 2173 4313 2083 4314 2009 4314 2173 4314 2014 4315 2013 4315 1953 4315 2013 4316 2170 4316 2169 4316 2170 4317 2174 4317 2161 4317 2174 4318 1997 4318 1996 4318 1997 4319 2175 4319 1993 4319 2175 4320 1984 4320 1983 4320 1984 4321 1980 4321 1979 4321 1980 4322 2180 4322 1948 4322 2180 4323 1941 4323 1940 4323 2183 4324 2171 4324 2013 4324 2171 4325 2176 4325 2170 4325 2176 4326 2177 4326 2174 4326 2177 4327 1998 4327 1997 4327 1998 4328 2185 4328 2175 4328 2185 4329 2178 4329 1984 4329 2178 4330 1985 4330 1980 4330 1985 4331 2179 4331 2180 4331 2014 4332 1955 4332 2183 4332 2183 4333 1955 4333 2187 4333 2181 4334 2187 4334 2188 4334 2200 4335 2188 4335 2204 4335 2201 4336 2204 4336 2205 4336 2015 4337 2205 4337 2209 4337 2182 4338 2209 4338 2020 4338 2084 4339 2020 4339 2219 4339 2016 4340 2219 4340 2021 4340 2080 4341 2016 4341 2021 4341 2080 4342 2017 4342 2016 4342 2019 4343 2187 4343 1955 4343 2187 4344 2181 4344 2183 4344 2181 4345 2184 4345 2171 4345 2184 4346 2189 4346 2176 4346 2189 4347 2191 4347 2177 4347 2191 4348 2162 4348 1998 4348 2162 4349 1999 4349 2185 4349 1999 4350 2186 4350 2178 4350 2186 4351 1989 4351 1985 4351 2193 4352 2188 4352 2187 4352 2188 4353 2200 4353 2181 4353 2200 4354 2172 4354 2184 4354 2172 4355 2190 4355 2189 4355 2190 4356 2006 4356 2191 4356 2192 4357 1999 4357 2000 4357 1994 4358 2186 4358 2192 4358 2019 4359 1958 4359 2193 4359 2193 4360 1958 4360 2194 4360 2199 4361 2194 4361 2203 4361 2195 4362 2203 4362 2208 4362 2214 4363 2208 4363 2212 4363 2215 4364 2212 4364 2216 4364 2218 4365 2216 4365 2196 4365 2224 4366 2196 4366 2026 4366 2081 4367 2026 4367 2197 4367 2315 4368 2081 4368 2197 4368 2315 4369 2198 4369 2081 4369 2023 4370 2194 4370 1958 4370 2194 4371 2199 4371 2193 4371 2199 4372 2204 4372 2188 4372 2204 4373 2201 4373 2200 4373 2201 4374 2207 4374 2172 4374 2207 4375 2007 4375 2190 4375 2202 4376 2192 4376 2001 4376 2203 4377 2194 4377 2024 4377 2195 4378 2199 4378 2203 4378 2205 4379 2204 4379 2195 4379 2015 4380 2201 4380 2205 4380 2206 4381 2207 4381 2015 4381 2009 4382 2007 4382 2206 4382 2008 4383 2086 4383 2009 4383 2208 4384 2203 4384 2211 4384 2214 4385 2195 4385 2208 4385 2209 4386 2205 4386 2214 4386 2182 4387 2015 4387 2209 4387 2173 4388 2206 4388 2182 4388 2210 4389 2211 4389 2074 4389 1974 4390 2210 4390 2074 4390 2029 4391 1974 4391 2073 4391 2212 4392 2208 4392 2213 4392 2215 4393 2214 4393 2212 4393 2020 4394 2209 4394 2215 4394 2084 4395 2182 4395 2020 4395 2082 4396 2083 4396 2085 4396 2309 4397 2085 4397 2016 4397 2309 4398 2082 4398 2085 4398 2071 4399 2213 4399 2210 4399 2220 4400 2071 4400 2210 4400 2216 4401 2212 4401 2217 4401 2218 4402 2215 4402 2216 4402 2219 4403 2020 4403 2218 4403 2016 4404 2084 4404 2219 4404 1972 4405 2220 4405 1961 4405 2221 4406 1972 4406 1961 4406 2221 4407 2226 4407 1972 4407 2221 4408 2231 4408 2226 4408 2222 4409 2217 4409 2071 4409 1973 4410 2222 4410 2071 4410 2196 4411 2216 4411 2223 4411 2224 4412 2218 4412 2196 4412 2021 4413 2219 4413 2224 4413 2032 4414 2221 4414 1964 4414 1971 4415 1973 4415 1972 4415 2226 4416 1971 4416 1972 4416 2226 4417 2225 4417 1971 4417 2226 4418 1965 4418 2225 4418 2227 4419 2223 4419 2222 4419 2075 4420 2227 4420 2222 4420 2026 4421 2196 4421 2025 4421 2081 4422 2224 4422 2026 4422 2228 4423 2075 4423 1971 4423 2225 4424 2228 4424 1971 4424 2225 4425 2235 4425 2228 4425 2225 4426 1966 4426 2235 4426 2070 4427 2025 4427 2227 4427 1970 4428 2070 4428 2227 4428 2197 4429 2026 4429 2229 4429 1803 4430 2233 4430 2230 4430 2231 4431 2233 4431 1965 4431 1965 4432 2233 4432 2237 4432 1966 4433 2237 4433 2038 4433 2232 4434 2038 4434 2043 4434 2041 4435 2232 4435 2043 4435 2036 4436 2237 4436 2233 4436 2237 4437 1966 4437 1965 4437 2234 4438 1970 4438 2228 4438 2235 4439 2234 4439 2228 4439 2235 4440 2238 4440 2234 4440 2235 4441 2232 4441 2238 4441 2236 4442 2229 4442 2070 4442 1969 4443 2236 4443 2070 4443 2038 4444 2237 4444 2037 4444 2232 4445 1966 4445 2038 4445 2060 4446 1969 4446 2234 4446 2238 4447 2060 4447 2234 4447 2238 4448 2058 4448 2060 4448 2238 4449 2041 4449 2058 4449 2043 4450 2038 4450 2039 4450 2050 4451 2039 4451 2052 4451 2049 4452 2050 4452 2052 4452 2059 4453 2058 4453 2239 4453 1454 4454 2419 4454 2250 4454 2240 4455 2250 4455 2249 4455 2337 4456 2249 4456 2248 4456 2108 4457 2337 4457 2248 4457 2108 4458 2246 4458 2337 4458 2108 4459 2241 4459 2246 4459 2246 4460 2241 4460 2242 4460 2245 4461 2242 4461 2243 4461 2244 4462 2243 4462 2252 4462 2244 4463 2245 4463 2243 4463 2244 4464 1458 4464 2245 4464 2245 4465 1458 4465 2247 4465 2246 4466 2247 4466 2337 4466 2246 4467 2245 4467 2247 4467 2246 4468 2242 4468 2245 4468 2248 4469 2249 4469 2111 4469 2111 4470 2249 4470 2250 4470 2419 4471 2111 4471 2250 4471 2241 4472 2251 4472 2242 4472 2242 4473 2251 4473 2254 4473 2243 4474 2254 4474 2253 4474 2252 4475 2253 4475 1491 4475 2252 4476 2243 4476 2253 4476 2251 4477 2258 4477 2254 4477 2254 4478 2258 4478 2255 4478 2253 4479 2255 4479 2256 4479 1491 4480 2256 4480 2257 4480 1491 4481 2253 4481 2256 4481 2258 4482 2259 4482 2255 4482 2255 4483 2259 4483 2260 4483 2256 4484 2260 4484 2262 4484 2261 4485 2262 4485 2275 4485 2261 4486 2256 4486 2262 4486 2261 4487 2257 4487 2256 4487 2259 4488 2097 4488 2260 4488 2260 4489 2097 4489 2092 4489 2276 4490 2092 4490 2263 4490 2277 4491 2263 4491 2264 4491 2278 4492 2264 4492 2281 4492 2266 4493 2281 4493 2265 4493 1920 4494 2266 4494 2265 4494 1920 4495 2288 4495 2266 4495 1920 4496 1919 4496 2288 4496 2288 4497 1919 4497 1928 4497 2268 4498 1928 4498 2267 4498 1934 4499 2268 4499 2267 4499 1934 4500 2289 4500 2268 4500 1934 4501 2269 4501 2289 4501 2289 4502 2269 4502 2270 4502 2333 4503 2270 4503 2271 4503 2272 4504 2333 4504 2271 4504 2272 4505 2339 4505 2333 4505 2272 4506 2340 4506 2339 4506 2272 4507 1981 4507 2340 4507 2340 4508 1981 4508 2273 4508 1500 4509 2273 4509 2274 4509 1501 4510 2274 4510 1503 4510 1501 4511 1500 4511 2274 4511 2260 4512 2092 4512 2276 4512 2262 4513 2276 4513 2338 4513 2275 4514 2338 4514 1531 4514 2275 4515 2262 4515 2338 4515 2276 4516 2263 4516 2277 4516 2338 4517 2277 4517 2280 4517 1531 4518 2280 4518 1492 4518 1531 4519 2338 4519 2280 4519 2277 4520 2264 4520 2278 4520 2280 4521 2278 4521 2279 4521 1492 4522 2279 4522 2284 4522 1492 4523 2280 4523 2279 4523 2278 4524 2281 4524 2266 4524 2279 4525 2266 4525 2282 4525 1493 4526 2282 4526 2283 4526 1493 4527 2279 4527 2282 4527 1493 4528 2284 4528 2279 4528 2288 4529 1928 4529 2268 4529 2287 4530 2268 4530 2334 4530 2285 4531 2334 4531 2335 4531 2285 4532 2287 4532 2334 4532 2285 4533 2286 4533 2287 4533 2287 4534 2286 4534 1494 4534 2282 4535 1494 4535 2283 4535 2282 4536 2287 4536 1494 4536 2282 4537 2288 4537 2287 4537 2282 4538 2266 4538 2288 4538 2289 4539 2270 4539 2333 4539 2290 4540 2333 4540 2332 4540 1499 4541 2290 4541 2332 4541 1499 4542 1497 4542 2290 4542 2290 4543 1497 4543 2289 4543 2333 4544 2290 4544 2289 4544 2274 4545 2273 4545 2296 4545 1503 4546 2296 4546 1986 4546 2329 4547 1986 4547 2328 4547 1504 4548 2328 4548 2291 4548 2004 4549 1504 4549 2291 4549 2004 4550 2292 4550 1504 4550 2004 4551 2294 4551 2292 4551 2004 4552 2003 4552 2294 4552 2294 4553 2003 4553 2293 4553 2295 4554 2293 4554 2292 4554 2294 4555 2295 4555 2292 4555 2294 4556 2293 4556 2295 4556 2273 4557 1988 4557 2296 4557 2296 4558 1988 4558 1986 4558 1986 4559 1987 4559 2328 4559 2328 4560 1987 4560 2291 4560 2003 4561 2002 4561 2293 4561 2293 4562 2002 4562 2298 4562 2298 4563 2002 4563 2299 4563 2297 4564 2299 4564 1507 4564 2298 4565 2297 4565 1507 4565 2298 4566 2299 4566 2297 4566 2002 4567 2300 4567 2299 4567 2299 4568 2300 4568 1507 4568 1507 4569 2300 4569 2301 4569 1508 4570 2301 4570 2302 4570 2304 4571 2302 4571 2303 4571 1508 4572 2304 4572 2303 4572 1508 4573 2302 4573 2304 4573 2301 4574 2012 4574 2302 4574 2302 4575 2012 4575 2303 4575 2303 4576 2012 4576 2010 4576 2305 4577 2010 4577 2310 4577 2306 4578 2310 4578 2308 4578 2305 4579 2306 4579 2308 4579 2305 4580 2310 4580 2306 4580 2010 4581 2307 4581 2310 4581 2310 4582 2307 4582 2309 4582 2308 4583 2309 4583 2311 4583 2308 4584 2310 4584 2309 4584 2309 4585 2018 4585 2311 4585 2311 4586 2018 4586 1509 4586 1509 4587 2018 4587 2312 4587 2312 4588 2018 4588 2080 4588 2319 4589 2080 4589 2313 4589 1574 4590 2313 4590 2022 4590 1575 4591 2022 4591 2314 4591 2315 4592 1575 4592 2314 4592 2315 4593 1577 4593 1575 4593 2315 4594 2316 4594 1577 4594 2315 4595 2317 4595 2316 4595 2316 4596 2317 4596 2028 4596 2318 4597 2028 4597 2321 4597 2318 4598 2316 4598 2028 4598 2312 4599 2080 4599 2319 4599 2319 4600 2313 4600 1574 4600 1574 4601 2022 4601 1575 4601 2028 4602 2320 4602 2321 4602 2321 4603 2320 4603 2076 4603 1582 4604 2076 4604 1583 4604 1582 4605 2321 4605 2076 4605 2076 4606 2069 4606 1583 4606 1583 4607 2069 4607 2322 4607 1584 4608 2322 4608 2324 4608 1584 4609 1583 4609 2322 4609 2322 4610 2066 4610 2324 4610 2324 4611 2066 4611 2323 4611 1591 4612 2323 4612 1595 4612 1591 4613 2324 4613 2323 4613 2323 4614 2325 4614 1595 4614 1595 4615 2325 4615 2063 4615 2326 4616 2063 4616 2327 4616 2326 4617 1595 4617 2063 4617 2063 4618 2504 4618 2327 4618 2305 4619 2303 4619 2010 4619 1508 4620 1507 4620 2301 4620 2328 4621 1504 4621 2329 4621 2329 4622 1504 4622 2330 4622 1503 4623 2329 4623 2330 4623 1503 4624 1986 4624 2329 4624 2296 4625 1503 4625 2274 4625 2331 4626 2339 4626 1500 4626 2331 4627 2332 4627 2339 4627 2339 4628 2332 4628 2333 4628 2289 4629 1497 4629 2334 4629 2268 4630 2289 4630 2334 4630 1497 4631 2335 4631 2334 4631 1458 4632 2336 4632 2247 4632 2247 4633 2336 4633 2240 4633 2337 4634 2240 4634 2249 4634 2337 4635 2247 4635 2240 4635 2336 4636 1454 4636 2240 4636 2240 4637 1454 4637 2250 4637 2254 4638 2243 4638 2242 4638 2255 4639 2253 4639 2254 4639 2260 4640 2256 4640 2255 4640 2276 4641 2262 4641 2260 4641 2277 4642 2338 4642 2276 4642 2278 4643 2280 4643 2277 4643 2266 4644 2279 4644 2278 4644 2268 4645 2287 4645 2288 4645 1500 4646 2339 4646 2340 4646 2273 4647 1500 4647 2340 4647 2505 4648 1598 4648 2506 4648 2506 4649 1598 4649 2341 4649 1598 4650 2327 4650 2341 4650 2341 4651 2327 4651 2507 4651 0 4652 2344 4652 2342 4652 2343 4653 2342 4653 48 4653 2343 4654 0 4654 2342 4654 2344 4655 2345 4655 2342 4655 2342 4656 2345 4656 46 4656 45 4657 2342 4657 46 4657 45 4658 89 4658 2342 4658 2342 4659 89 4659 44 4659 2346 4660 2342 4660 44 4660 2346 4661 2347 4661 2342 4661 2342 4662 2347 4662 42 4662 41 4663 2342 4663 42 4663 41 4664 2348 4664 2342 4664 2342 4665 2348 4665 40 4665 39 4666 2342 4666 40 4666 39 4667 38 4667 2342 4667 2342 4668 38 4668 2349 4668 2350 4669 2342 4669 2349 4669 2350 4670 2351 4670 2342 4670 2342 4671 2351 4671 2352 4671 2357 4672 2352 4672 82 4672 2353 4673 2357 4673 82 4673 2353 4674 2354 4674 2357 4674 2357 4675 2354 4675 35 4675 2355 4676 2357 4676 35 4676 2355 4677 34 4677 2357 4677 2357 4678 34 4678 78 4678 33 4679 2357 4679 78 4679 33 4680 2356 4680 2357 4680 2357 4681 2356 4681 77 4681 76 4682 2357 4682 77 4682 76 4683 2358 4683 2357 4683 2357 4684 2358 4684 32 4684 31 4685 2357 4685 32 4685 31 4686 2359 4686 2357 4686 2357 4687 2359 4687 30 4687 2360 4688 2357 4688 30 4688 2360 4689 29 4689 2357 4689 2357 4690 29 4690 5655 4690 5655 4691 29 4691 2361 4691 2362 4692 5655 4692 2361 4692 2362 4693 28 4693 5655 4693 5655 4694 28 4694 27 4694 26 4695 5655 4695 27 4695 26 4696 24 4696 5655 4696 5655 4697 24 4697 23 4697 22 4698 5655 4698 23 4698 22 4699 21 4699 5655 4699 5655 4700 21 4700 2363 4700 19 4701 5655 4701 2363 4701 19 4702 979 4702 5655 4702 19 4703 2364 4703 979 4703 979 4704 2364 4704 17 4704 15 4705 979 4705 17 4705 15 4706 2365 4706 979 4706 979 4707 2365 4707 14 4707 2366 4708 979 4708 14 4708 2366 4709 2367 4709 979 4709 979 4710 2367 4710 66 4710 65 4711 979 4711 66 4711 65 4712 2368 4712 979 4712 979 4713 2368 4713 2369 4713 2370 4714 979 4714 2369 4714 2370 4715 2371 4715 979 4715 979 4716 2371 4716 12 4716 11 4717 979 4717 12 4717 11 4718 2372 4718 979 4718 979 4719 2372 4719 469 4719 469 4720 2372 4720 2373 4720 2374 4721 469 4721 2373 4721 2374 4722 9 4722 469 4722 469 4723 9 4723 8 4723 2375 4724 469 4724 8 4724 2375 4725 2376 4725 469 4725 469 4726 2376 4726 58 4726 2377 4727 469 4727 58 4727 2377 4728 6 4728 469 4728 469 4729 6 4729 5 4729 2405 4730 5 4730 2378 4730 2379 4731 2405 4731 2378 4731 2379 4732 2380 4732 2405 4732 2405 4733 2380 4733 2381 4733 2342 4734 2381 4734 2382 4734 3 4735 2342 4735 2382 4735 3 4736 2383 4736 2342 4736 2342 4737 2383 4737 2384 4737 2385 4738 2342 4738 2384 4738 2385 4739 50 4739 2342 4739 2342 4740 50 4740 48 4740 2342 4741 2352 4741 2357 4741 2393 4742 2357 4742 2386 4742 2393 4743 2342 4743 2357 4743 2393 4744 2419 4744 2342 4744 2393 4745 2439 4745 2419 4745 2393 4746 2387 4746 2439 4746 2393 4747 2389 4747 2387 4747 2393 4748 2388 4748 2389 4748 2393 4749 2390 4749 2388 4749 2393 4750 2391 4750 2390 4750 2393 4751 101 4751 2391 4751 2393 4752 2392 4752 101 4752 2393 4753 103 4753 2392 4753 2393 4754 2395 4754 103 4754 2393 4755 2394 4755 2395 4755 2393 4756 104 4756 2394 4756 2393 4757 2396 4757 104 4757 2393 4758 105 4758 2396 4758 2393 4759 106 4759 105 4759 2393 4760 152 4760 106 4760 2393 4761 107 4761 152 4761 2393 4762 109 4762 107 4762 2393 4763 2397 4763 109 4763 2393 4764 5625 4764 2397 4764 2397 4765 5625 4765 2398 4765 2398 4766 5625 4766 155 4766 155 4767 5625 4767 110 4767 110 4768 5625 4768 2399 4768 2399 4769 5625 4769 2400 4769 2400 4770 5625 4770 2401 4770 2401 4771 5625 4771 2402 4771 2402 4772 5625 4772 112 4772 112 4773 5625 4773 157 4773 157 4774 5625 4774 2403 4774 2403 4775 5625 4775 2404 4775 2404 4776 5625 4776 2434 4776 159 4777 2434 4777 115 4777 159 4778 2404 4778 2434 4778 469 4779 5 4779 2405 4779 2405 4780 2381 4780 2342 4780 1855 4781 2405 4781 2342 4781 1855 4782 5861 4782 2405 4782 1855 4783 302 4783 5861 4783 1855 4784 2406 4784 302 4784 1855 4785 304 4785 2406 4785 1855 4786 305 4786 304 4786 1855 4787 2407 4787 305 4787 1855 4788 307 4788 2407 4788 1855 4789 2409 4789 307 4789 1855 4790 2408 4790 2409 4790 1855 4791 310 4791 2408 4791 1855 4792 2410 4792 310 4792 1855 4793 312 4793 2410 4793 1855 4794 352 4794 312 4794 1855 4795 2411 4795 352 4795 1855 4796 313 4796 2411 4796 1855 4797 314 4797 313 4797 1855 4798 2412 4798 314 4798 1855 4799 2413 4799 2412 4799 1855 4800 2414 4800 2413 4800 1855 4801 355 4801 2414 4801 1855 4802 316 4802 355 4802 1855 4803 357 4803 316 4803 1855 4804 317 4804 357 4804 1855 4805 2415 4805 317 4805 1855 4806 2111 4806 2415 4806 2415 4807 2111 4807 200 4807 358 4808 200 4808 2416 4808 4248 4809 2416 4809 2476 4809 6353 4810 2476 4810 6424 4810 6353 4811 4248 4811 2476 4811 91 4812 92 4812 2419 4812 2417 4813 2419 4813 2444 4813 2417 4814 91 4814 2419 4814 92 4815 136 4815 2419 4815 2419 4816 136 4816 2420 4816 2418 4817 2419 4817 2420 4817 2418 4818 178 4818 2419 4818 2419 4819 178 4819 134 4819 2421 4820 2419 4820 134 4820 2421 4821 177 4821 2419 4821 2419 4822 177 4822 2422 4822 2423 4823 2419 4823 2422 4823 2423 4824 3792 4824 2419 4824 2423 4825 2424 4825 3792 4825 3792 4826 2424 4826 2425 4826 133 4827 3792 4827 2425 4827 133 4828 132 4828 3792 4828 3792 4829 132 4829 3794 4829 3794 4830 132 4830 2426 4830 2427 4831 3794 4831 2426 4831 2427 4832 2428 4832 3794 4832 3794 4833 2428 4833 2429 4833 131 4834 3794 4834 2429 4834 131 4835 2430 4835 3794 4835 3794 4836 2430 4836 130 4836 2431 4837 3794 4837 130 4837 2431 4838 128 4838 3794 4838 3794 4839 128 4839 2432 4839 2433 4840 3794 4840 2432 4840 2433 4841 164 4841 3794 4841 3794 4842 164 4842 2434 4842 2434 4843 164 4843 2436 4843 2435 4844 2434 4844 2436 4844 2435 4845 125 4845 2434 4845 2434 4846 125 4846 124 4846 163 4847 2434 4847 124 4847 163 4848 121 4848 2434 4848 2434 4849 121 4849 2437 4849 2438 4850 2434 4850 2437 4850 2438 4851 162 4851 2434 4851 2434 4852 162 4852 119 4852 161 4853 2434 4853 119 4853 161 4854 117 4854 2434 4854 2434 4855 117 4855 115 4855 2439 4856 2440 4856 2419 4856 2419 4857 2440 4857 147 4857 2441 4858 2419 4858 147 4858 2441 4859 146 4859 2419 4859 2419 4860 146 4860 2442 4860 144 4861 2419 4861 2442 4861 144 4862 99 4862 2419 4862 2419 4863 99 4863 98 4863 2443 4864 2419 4864 98 4864 2443 4865 140 4865 2419 4865 2419 4866 140 4866 139 4866 138 4867 2419 4867 139 4867 138 4868 96 4868 2419 4868 2419 4869 96 4869 2444 4869 180 4870 2446 4870 2445 4870 182 4871 2445 4871 230 4871 182 4872 180 4872 2445 4872 2446 4873 276 4873 2445 4873 2445 4874 276 4874 2447 4874 275 4875 2445 4875 2447 4875 275 4876 2448 4876 2445 4876 2445 4877 2448 4877 2449 4877 2450 4878 2445 4878 2449 4878 2450 4879 272 4879 2445 4879 2445 4880 272 4880 226 4880 2451 4881 2445 4881 226 4881 2451 4882 225 4882 2445 4882 2445 4883 225 4883 224 4883 269 4884 2445 4884 224 4884 269 4885 2452 4885 2445 4885 2445 4886 2452 4886 3858 4886 3858 4887 2452 4887 2453 4887 267 4888 3858 4888 2453 4888 267 4889 2454 4889 3858 4889 3858 4890 2454 4890 221 4890 219 4891 3858 4891 221 4891 219 4892 218 4892 3858 4892 3858 4893 218 4893 2455 4893 2456 4894 3858 4894 2455 4894 2456 4895 2457 4895 3858 4895 3858 4896 2457 4896 263 4896 262 4897 3858 4897 263 4897 262 4898 2458 4898 3858 4898 3858 4899 2458 4899 3857 4899 3857 4900 2458 4900 2459 4900 261 4901 3857 4901 2459 4901 261 4902 214 4902 3857 4902 3857 4903 214 4903 213 4903 212 4904 3857 4904 213 4904 212 4905 2460 4905 3857 4905 3857 4906 2460 4906 258 4906 210 4907 3857 4907 258 4907 210 4908 2461 4908 3857 4908 3857 4909 2461 4909 256 4909 209 4910 3857 4910 256 4910 209 4911 255 4911 3857 4911 3857 4912 255 4912 2462 4912 254 4913 3857 4913 2462 4913 254 4914 252 4914 3857 4914 3857 4915 252 4915 2463 4915 2464 4916 3857 4916 2463 4916 2464 4917 2465 4917 3857 4917 3857 4918 2465 4918 2466 4918 251 4919 3857 4919 2466 4919 251 4920 250 4920 3857 4920 3857 4921 250 4921 205 4921 2467 4922 3857 4922 205 4922 2467 4923 2468 4923 3857 4923 3857 4924 2468 4924 2469 4924 2111 4925 2469 4925 2470 4925 2471 4926 2111 4926 2470 4926 2471 4927 203 4927 2111 4927 2111 4928 203 4928 2472 4928 201 4929 2111 4929 2472 4929 201 4930 200 4930 2111 4930 3857 4931 2469 4931 2111 4931 3792 4932 2111 4932 2419 4932 3792 4933 3857 4933 2111 4933 3792 4934 4042 4934 3857 4934 3857 4935 4042 4935 6265 4935 2415 4936 200 4936 358 4936 2416 4937 2473 4937 2476 4937 2476 4938 2473 4938 2474 4938 199 4939 2476 4939 2474 4939 199 4940 198 4940 2476 4940 2476 4941 198 4941 197 4941 2475 4942 2476 4942 197 4942 2475 4943 195 4943 2476 4943 2476 4944 195 4944 244 4944 243 4945 2476 4945 244 4945 243 4946 242 4946 2476 4946 2476 4947 242 4947 241 4947 2477 4948 2476 4948 241 4948 2477 4949 192 4949 2476 4949 2476 4950 192 4950 2478 4950 190 4951 2476 4951 2478 4951 190 4952 2479 4952 2476 4952 2476 4953 2479 4953 2481 4953 2480 4954 2476 4954 2481 4954 2480 4955 189 4955 2476 4955 2476 4956 189 4956 188 4956 187 4957 2476 4957 188 4957 187 4958 2445 4958 2476 4958 187 4959 234 4959 2445 4959 2445 4960 234 4960 2482 4960 185 4961 2445 4961 2482 4961 185 4962 184 4962 2445 4962 2445 4963 184 4963 230 4963 278 4964 334 4964 808 4964 279 4965 808 4965 281 4965 279 4966 278 4966 808 4966 334 4967 2483 4967 808 4967 808 4968 2483 4968 2484 4968 2485 4969 808 4969 2484 4969 2485 4970 2486 4970 808 4970 808 4971 2486 4971 330 4971 374 4972 808 4972 330 4972 374 4973 2487 4973 808 4973 808 4974 2487 4974 373 4974 4248 4975 373 4975 2488 4975 372 4976 4248 4976 2488 4976 372 4977 370 4977 4248 4977 4248 4978 370 4978 369 4978 2489 4979 4248 4979 369 4979 2489 4980 2490 4980 4248 4980 4248 4981 2490 4981 326 4981 2491 4982 4248 4982 326 4982 2491 4983 325 4983 4248 4983 4248 4984 325 4984 324 4984 366 4985 4248 4985 324 4985 366 4986 365 4986 4248 4986 4248 4987 365 4987 323 4987 2492 4988 4248 4988 323 4988 2492 4989 362 4989 4248 4989 4248 4990 362 4990 361 4990 360 4991 4248 4991 361 4991 360 4992 320 4992 4248 4992 4248 4993 320 4993 359 4993 318 4994 4248 4994 359 4994 318 4995 358 4995 4248 4995 4248 4996 358 4996 2416 4996 808 4997 373 4997 4248 4997 4258 4998 808 4998 4248 4998 302 4999 349 4999 5861 4999 5861 5000 349 5000 301 5000 300 5001 5861 5001 301 5001 300 5002 2498 5002 5861 5002 300 5003 2493 5003 2498 5003 2498 5004 2493 5004 299 5004 298 5005 2498 5005 299 5005 298 5006 297 5006 2498 5006 2498 5007 297 5007 2495 5007 2494 5008 2498 5008 2495 5008 2494 5009 295 5009 2498 5009 2498 5010 295 5010 293 5010 2496 5011 2498 5011 293 5011 2496 5012 2497 5012 2498 5012 2498 5013 2497 5013 345 5013 344 5014 2498 5014 345 5014 344 5015 342 5015 2498 5015 2498 5016 342 5016 2499 5016 341 5017 2498 5017 2499 5017 341 5018 808 5018 2498 5018 341 5019 339 5019 808 5019 808 5020 339 5020 288 5020 2500 5021 808 5021 288 5021 2500 5022 338 5022 808 5022 808 5023 338 5023 2501 5023 286 5024 808 5024 2501 5024 286 5025 337 5025 808 5025 808 5026 337 5026 2502 5026 2503 5027 808 5027 2502 5027 2503 5028 283 5028 808 5028 808 5029 283 5029 282 5029 335 5030 808 5030 282 5030 335 5031 281 5031 808 5031 2357 5032 6166 5032 2386 5032 2445 5033 4455 5033 2476 5033 2504 5034 2505 5034 2508 5034 2508 5035 2505 5035 2506 5035 2327 5036 2504 5036 2507 5036 2507 5037 2504 5037 2508 5037 2508 5038 2506 5038 2507 5038 2507 5039 2506 5039 2341 5039 2510 5040 2509 5040 6182 5040 2510 5041 2511 5041 2509 5041 2510 5042 6181 5042 2511 5042 2511 5043 6181 5043 2514 5043 2513 5044 2514 5044 2512 5044 2513 5045 2511 5045 2514 5045 2513 5046 2515 5046 2511 5046 2511 5047 2515 5047 2509 5047 2509 5048 2515 5048 2516 5048 2690 5049 2516 5049 3960 5049 2691 5050 3960 5050 4026 5050 2517 5051 4026 5051 4025 5051 2685 5052 4025 5052 4024 5052 2686 5053 4024 5053 2518 5053 2683 5054 2518 5054 4023 5054 2519 5055 4023 5055 2520 5055 2680 5056 2520 5056 2681 5056 2678 5057 2681 5057 2521 5057 2677 5058 2521 5058 2675 5058 2676 5059 2675 5059 4021 5059 2672 5060 4021 5060 2673 5060 2671 5061 2673 5061 4020 5061 2669 5062 4020 5062 4019 5062 2522 5063 4019 5063 4018 5063 2667 5064 4018 5064 2523 5064 2663 5065 2523 5065 4017 5065 2662 5066 4017 5066 4016 5066 2524 5067 4016 5067 4015 5067 2660 5068 4015 5068 4014 5068 2657 5069 4014 5069 2525 5069 2526 5070 2525 5070 2527 5070 2655 5071 2527 5071 4012 5071 2654 5072 4012 5072 4013 5072 2652 5073 4013 5073 4011 5073 2650 5074 4011 5074 2651 5074 2649 5075 2651 5075 4010 5075 2528 5076 4010 5076 2648 5076 2529 5077 2648 5077 3980 5077 2645 5078 3980 5078 3978 5078 2643 5079 3978 5079 3977 5079 2641 5080 3977 5080 4008 5080 2640 5081 4008 5081 3976 5081 2638 5082 3976 5082 4007 5082 2530 5083 4007 5083 4006 5083 2636 5084 4006 5084 2531 5084 2634 5085 2531 5085 2532 5085 2632 5086 2532 5086 3974 5086 2631 5087 3974 5087 4003 5087 2629 5088 4003 5088 3972 5088 2628 5089 3972 5089 2626 5089 2627 5090 2626 5090 2533 5090 2625 5091 2533 5091 2624 5091 2623 5092 2624 5092 2534 5092 2621 5093 2534 5093 3971 5093 2535 5094 3971 5094 3970 5094 2536 5095 3970 5095 4001 5095 2537 5096 4001 5096 3969 5096 2538 5097 3969 5097 4000 5097 2616 5098 4000 5098 3999 5098 2614 5099 3999 5099 2612 5099 2613 5100 2612 5100 3998 5100 2610 5101 3998 5101 3967 5101 2609 5102 3967 5102 2539 5102 2607 5103 2539 5103 3997 5103 2605 5104 3997 5104 3966 5104 2604 5105 3966 5105 2540 5105 2602 5106 2540 5106 3964 5106 2601 5107 3964 5107 2541 5107 2600 5108 2541 5108 2542 5108 2599 5109 2542 5109 2543 5109 2598 5110 2543 5110 2594 5110 2595 5111 2594 5111 2592 5111 2591 5112 2592 5112 2544 5112 2589 5113 2544 5113 3957 5113 2588 5114 3957 5114 2587 5114 2586 5115 2587 5115 2545 5115 2584 5116 2545 5116 2546 5116 2582 5117 2546 5117 2581 5117 2580 5118 2581 5118 3958 5118 2577 5119 3958 5119 2576 5119 2575 5120 2576 5120 2574 5120 2572 5121 2574 5121 2547 5121 2571 5122 2547 5122 2569 5122 2566 5123 2569 5123 2567 5123 2565 5124 2567 5124 2548 5124 2549 5125 2548 5125 3959 5125 2550 5126 3959 5126 2551 5126 2563 5127 2551 5127 2552 5127 2561 5128 2552 5128 2553 5128 2554 5129 2553 5129 2555 5129 2559 5130 2555 5130 2557 5130 2556 5131 2557 5131 2512 5131 2514 5132 2556 5132 2512 5132 2514 5133 2558 5133 2556 5133 2514 5134 6181 5134 2558 5134 2558 5135 6322 5135 2556 5135 2556 5136 6322 5136 2559 5136 2557 5137 2556 5137 2559 5137 6322 5138 2560 5138 2559 5138 2559 5139 2560 5139 2554 5139 2555 5140 2559 5140 2554 5140 2560 5141 6321 5141 2554 5141 2554 5142 6321 5142 2561 5142 2553 5143 2554 5143 2561 5143 6321 5144 6320 5144 2561 5144 2561 5145 6320 5145 2563 5145 2552 5146 2561 5146 2563 5146 6320 5147 2562 5147 2563 5147 2563 5148 2562 5148 2550 5148 2551 5149 2563 5149 2550 5149 2562 5150 2564 5150 2550 5150 2550 5151 2564 5151 2549 5151 3959 5152 2550 5152 2549 5152 2564 5153 6319 5153 2549 5153 2549 5154 6319 5154 2565 5154 2548 5155 2549 5155 2565 5155 6319 5156 6318 5156 2565 5156 2565 5157 6318 5157 2566 5157 2567 5158 2565 5158 2566 5158 6318 5159 2568 5159 2566 5159 2566 5160 2568 5160 2571 5160 2569 5161 2566 5161 2571 5161 2568 5162 2570 5162 2571 5162 2571 5163 2570 5163 2572 5163 2547 5164 2571 5164 2572 5164 2570 5165 2573 5165 2572 5165 2572 5166 2573 5166 2575 5166 2574 5167 2572 5167 2575 5167 2573 5168 2578 5168 2575 5168 2575 5169 2578 5169 2577 5169 2576 5170 2575 5170 2577 5170 2578 5171 2579 5171 2577 5171 2577 5172 2579 5172 2580 5172 3958 5173 2577 5173 2580 5173 2579 5174 6317 5174 2580 5174 2580 5175 6317 5175 2582 5175 2581 5176 2580 5176 2582 5176 6317 5177 2583 5177 2582 5177 2582 5178 2583 5178 2584 5178 2546 5179 2582 5179 2584 5179 2583 5180 2585 5180 2584 5180 2584 5181 2585 5181 2586 5181 2545 5182 2584 5182 2586 5182 2585 5183 6316 5183 2586 5183 2586 5184 6316 5184 2588 5184 2587 5185 2586 5185 2588 5185 6316 5186 6315 5186 2588 5186 2588 5187 6315 5187 2589 5187 3957 5188 2588 5188 2589 5188 6315 5189 2590 5189 2589 5189 2589 5190 2590 5190 2591 5190 2544 5191 2589 5191 2591 5191 2590 5192 2593 5192 2591 5192 2591 5193 2593 5193 2595 5193 2592 5194 2591 5194 2595 5194 2593 5195 2596 5195 2595 5195 2595 5196 2596 5196 2598 5196 2594 5197 2595 5197 2598 5197 2596 5198 2597 5198 2598 5198 2598 5199 2597 5199 2599 5199 2543 5200 2598 5200 2599 5200 2597 5201 6314 5201 2599 5201 2599 5202 6314 5202 2600 5202 2542 5203 2599 5203 2600 5203 6314 5204 6313 5204 2600 5204 2600 5205 6313 5205 2601 5205 2541 5206 2600 5206 2601 5206 6313 5207 6312 5207 2601 5207 2601 5208 6312 5208 2602 5208 3964 5209 2601 5209 2602 5209 6312 5210 2603 5210 2602 5210 2602 5211 2603 5211 2604 5211 2540 5212 2602 5212 2604 5212 2603 5213 6311 5213 2604 5213 2604 5214 6311 5214 2605 5214 3966 5215 2604 5215 2605 5215 6311 5216 2606 5216 2605 5216 2605 5217 2606 5217 2607 5217 3997 5218 2605 5218 2607 5218 2606 5219 2608 5219 2607 5219 2607 5220 2608 5220 2609 5220 2539 5221 2607 5221 2609 5221 2608 5222 6308 5222 2609 5222 2609 5223 6308 5223 2610 5223 3967 5224 2609 5224 2610 5224 6308 5225 6279 5225 2610 5225 2610 5226 6279 5226 2613 5226 3998 5227 2610 5227 2613 5227 6279 5228 2611 5228 2613 5228 2613 5229 2611 5229 2614 5229 2612 5230 2613 5230 2614 5230 2611 5231 2615 5231 2614 5231 2614 5232 2615 5232 2616 5232 3999 5233 2614 5233 2616 5233 2615 5234 2617 5234 2616 5234 2616 5235 2617 5235 2538 5235 4000 5236 2616 5236 2538 5236 2617 5237 2618 5237 2538 5237 2538 5238 2618 5238 2537 5238 3969 5239 2538 5239 2537 5239 2618 5240 2619 5240 2537 5240 2537 5241 2619 5241 2536 5241 4001 5242 2537 5242 2536 5242 2619 5243 2620 5243 2536 5243 2536 5244 2620 5244 2535 5244 3970 5245 2536 5245 2535 5245 2620 5246 6302 5246 2535 5246 2535 5247 6302 5247 2621 5247 3971 5248 2535 5248 2621 5248 6302 5249 2622 5249 2621 5249 2621 5250 2622 5250 2623 5250 2534 5251 2621 5251 2623 5251 2622 5252 6278 5252 2623 5252 2623 5253 6278 5253 2625 5253 2624 5254 2623 5254 2625 5254 6278 5255 6277 5255 2625 5255 2625 5256 6277 5256 2627 5256 2533 5257 2625 5257 2627 5257 6277 5258 6300 5258 2627 5258 2627 5259 6300 5259 2628 5259 2626 5260 2627 5260 2628 5260 6300 5261 6276 5261 2628 5261 2628 5262 6276 5262 2629 5262 3972 5263 2628 5263 2629 5263 6276 5264 2630 5264 2629 5264 2629 5265 2630 5265 2631 5265 4003 5266 2629 5266 2631 5266 2630 5267 6275 5267 2631 5267 2631 5268 6275 5268 2632 5268 3974 5269 2631 5269 2632 5269 6275 5270 2633 5270 2632 5270 2632 5271 2633 5271 2634 5271 2532 5272 2632 5272 2634 5272 2633 5273 2635 5273 2634 5273 2634 5274 2635 5274 2636 5274 2531 5275 2634 5275 2636 5275 2635 5276 2637 5276 2636 5276 2636 5277 2637 5277 2530 5277 4006 5278 2636 5278 2530 5278 2637 5279 6273 5279 2530 5279 2530 5280 6273 5280 2638 5280 4007 5281 2530 5281 2638 5281 6273 5282 6272 5282 2638 5282 2638 5283 6272 5283 2640 5283 3976 5284 2638 5284 2640 5284 6272 5285 2639 5285 2640 5285 2640 5286 2639 5286 2641 5286 4008 5287 2640 5287 2641 5287 2639 5288 2642 5288 2641 5288 2641 5289 2642 5289 2643 5289 3977 5290 2641 5290 2643 5290 2642 5291 2644 5291 2643 5291 2643 5292 2644 5292 2645 5292 3978 5293 2643 5293 2645 5293 2644 5294 2646 5294 2645 5294 2645 5295 2646 5295 2529 5295 3980 5296 2645 5296 2529 5296 2646 5297 2647 5297 2529 5297 2529 5298 2647 5298 2528 5298 2648 5299 2529 5299 2528 5299 2647 5300 6193 5300 2528 5300 2528 5301 6193 5301 2649 5301 4010 5302 2528 5302 2649 5302 6193 5303 6192 5303 2649 5303 2649 5304 6192 5304 2650 5304 2651 5305 2649 5305 2650 5305 6192 5306 6191 5306 2650 5306 2650 5307 6191 5307 2652 5307 4011 5308 2650 5308 2652 5308 6191 5309 6190 5309 2652 5309 2652 5310 6190 5310 2654 5310 4013 5311 2652 5311 2654 5311 6190 5312 2653 5312 2654 5312 2654 5313 2653 5313 2655 5313 4012 5314 2654 5314 2655 5314 2653 5315 6189 5315 2655 5315 2655 5316 6189 5316 2526 5316 2527 5317 2655 5317 2526 5317 6189 5318 2656 5318 2526 5318 2526 5319 2656 5319 2657 5319 2525 5320 2526 5320 2657 5320 2656 5321 2658 5321 2657 5321 2657 5322 2658 5322 2660 5322 4014 5323 2657 5323 2660 5323 2658 5324 2659 5324 2660 5324 2660 5325 2659 5325 2524 5325 4015 5326 2660 5326 2524 5326 2659 5327 2661 5327 2524 5327 2524 5328 2661 5328 2662 5328 4016 5329 2524 5329 2662 5329 2661 5330 2664 5330 2662 5330 2662 5331 2664 5331 2663 5331 4017 5332 2662 5332 2663 5332 2664 5333 2665 5333 2663 5333 2663 5334 2665 5334 2667 5334 2523 5335 2663 5335 2667 5335 2665 5336 2666 5336 2667 5336 2667 5337 2666 5337 2522 5337 4018 5338 2667 5338 2522 5338 2666 5339 2668 5339 2522 5339 2522 5340 2668 5340 2669 5340 4019 5341 2522 5341 2669 5341 2668 5342 2670 5342 2669 5342 2669 5343 2670 5343 2671 5343 4020 5344 2669 5344 2671 5344 2670 5345 6188 5345 2671 5345 2671 5346 6188 5346 2672 5346 2673 5347 2671 5347 2672 5347 6188 5348 2674 5348 2672 5348 2672 5349 2674 5349 2676 5349 4021 5350 2672 5350 2676 5350 2674 5351 6186 5351 2676 5351 2676 5352 6186 5352 2677 5352 2675 5353 2676 5353 2677 5353 6186 5354 2679 5354 2677 5354 2677 5355 2679 5355 2678 5355 2521 5356 2677 5356 2678 5356 2679 5357 6185 5357 2678 5357 2678 5358 6185 5358 2680 5358 2681 5359 2678 5359 2680 5359 6185 5360 6184 5360 2680 5360 2680 5361 6184 5361 2519 5361 2520 5362 2680 5362 2519 5362 6184 5363 2682 5363 2519 5363 2519 5364 2682 5364 2683 5364 4023 5365 2519 5365 2683 5365 2682 5366 2684 5366 2683 5366 2683 5367 2684 5367 2686 5367 2518 5368 2683 5368 2686 5368 2684 5369 2687 5369 2686 5369 2686 5370 2687 5370 2685 5370 4024 5371 2686 5371 2685 5371 2687 5372 2688 5372 2685 5372 2685 5373 2688 5373 2517 5373 4025 5374 2685 5374 2517 5374 2688 5375 2689 5375 2517 5375 2517 5376 2689 5376 2691 5376 4026 5377 2517 5377 2691 5377 2689 5378 6183 5378 2691 5378 2691 5379 6183 5379 2690 5379 3960 5380 2691 5380 2690 5380 6183 5381 6182 5381 2690 5381 2690 5382 6182 5382 2509 5382 2516 5383 2690 5383 2509 5383 2692 5384 2693 5384 1211 5384 2692 5385 2696 5385 2693 5385 2692 5386 2746 5386 2696 5386 2696 5387 2746 5387 2745 5387 2694 5388 2745 5388 2695 5388 2694 5389 2696 5389 2745 5389 2694 5390 643 5390 2696 5390 2696 5391 643 5391 2693 5391 2693 5392 643 5392 642 5392 2874 5393 642 5393 2697 5393 2873 5394 2697 5394 2871 5394 2698 5395 2871 5395 2869 5395 2868 5396 2869 5396 2699 5396 2866 5397 2699 5397 703 5397 2700 5398 703 5398 702 5398 2865 5399 702 5399 2864 5399 2863 5400 2864 5400 701 5400 2860 5401 701 5401 2861 5401 2858 5402 2861 5402 700 5402 2857 5403 700 5403 699 5403 2854 5404 699 5404 2701 5404 2853 5405 2701 5405 698 5405 2702 5406 698 5406 697 5406 2852 5407 697 5407 2703 5407 2851 5408 2703 5408 2849 5408 2704 5409 2849 5409 2705 5409 2848 5410 2705 5410 2706 5410 2846 5411 2706 5411 2707 5411 2845 5412 2707 5412 2708 5412 2843 5413 2708 5413 696 5413 2844 5414 696 5414 2709 5414 2842 5415 2709 5415 2710 5415 2841 5416 2710 5416 2711 5416 2839 5417 2711 5417 2712 5417 2837 5418 2712 5418 2714 5418 2713 5419 2714 5419 2834 5419 2835 5420 2834 5420 695 5420 2833 5421 695 5421 694 5421 2715 5422 694 5422 693 5422 2830 5423 693 5423 2831 5423 2829 5424 2831 5424 692 5424 2826 5425 692 5425 2716 5425 2825 5426 2716 5426 605 5426 2824 5427 605 5427 2717 5427 2822 5428 2717 5428 2718 5428 2821 5429 2718 5429 604 5429 2820 5430 604 5430 2816 5430 2817 5431 2816 5431 689 5431 2814 5432 689 5432 2813 5432 2810 5433 2813 5433 2811 5433 2809 5434 2811 5434 601 5434 2719 5435 601 5435 600 5435 2808 5436 600 5436 688 5436 2807 5437 688 5437 2806 5437 2805 5438 2806 5438 2720 5438 2721 5439 2720 5439 2722 5439 2723 5440 2722 5440 2804 5440 2802 5441 2804 5441 598 5441 2801 5442 598 5442 2799 5442 2798 5443 2799 5443 2724 5443 2795 5444 2724 5444 2796 5444 2793 5445 2796 5445 597 5445 2725 5446 597 5446 596 5446 2726 5447 596 5447 2791 5447 2787 5448 2791 5448 2788 5448 2786 5449 2788 5449 2727 5449 2728 5450 2727 5450 2729 5450 2785 5451 2729 5451 2730 5451 2784 5452 2730 5452 654 5452 2780 5453 654 5453 2731 5453 2778 5454 2731 5454 653 5454 2777 5455 653 5455 2732 5455 2776 5456 2732 5456 652 5456 2733 5457 652 5457 651 5457 2734 5458 651 5458 650 5458 2773 5459 650 5459 2774 5459 2772 5460 2774 5460 649 5460 2769 5461 649 5461 2735 5461 2768 5462 2735 5462 2736 5462 2767 5463 2736 5463 2765 5463 2766 5464 2765 5464 2737 5464 2763 5465 2737 5465 2739 5465 2738 5466 2739 5466 648 5466 2759 5467 648 5467 2740 5467 2757 5468 2740 5468 647 5468 2756 5469 647 5469 2755 5469 2741 5470 2755 5470 646 5470 2752 5471 646 5471 645 5471 2750 5472 645 5472 2742 5472 2743 5473 2742 5473 644 5473 2749 5474 644 5474 2748 5474 2744 5475 2748 5475 2695 5475 2745 5476 2744 5476 2695 5476 2745 5477 2747 5477 2744 5477 2745 5478 2746 5478 2747 5478 2747 5479 3942 5479 2744 5479 2744 5480 3942 5480 2749 5480 2748 5481 2744 5481 2749 5481 3942 5482 3941 5482 2749 5482 2749 5483 3941 5483 2743 5483 644 5484 2749 5484 2743 5484 3941 5485 3940 5485 2743 5485 2743 5486 3940 5486 2750 5486 2742 5487 2743 5487 2750 5487 3940 5488 2751 5488 2750 5488 2750 5489 2751 5489 2752 5489 645 5490 2750 5490 2752 5490 2751 5491 2753 5491 2752 5491 2752 5492 2753 5492 2741 5492 646 5493 2752 5493 2741 5493 2753 5494 2754 5494 2741 5494 2741 5495 2754 5495 2756 5495 2755 5496 2741 5496 2756 5496 2754 5497 3939 5497 2756 5497 2756 5498 3939 5498 2757 5498 647 5499 2756 5499 2757 5499 3939 5500 2758 5500 2757 5500 2757 5501 2758 5501 2759 5501 2740 5502 2757 5502 2759 5502 2758 5503 2760 5503 2759 5503 2759 5504 2760 5504 2738 5504 648 5505 2759 5505 2738 5505 2760 5506 2761 5506 2738 5506 2738 5507 2761 5507 2763 5507 2739 5508 2738 5508 2763 5508 2761 5509 2762 5509 2763 5509 2763 5510 2762 5510 2766 5510 2737 5511 2763 5511 2766 5511 2762 5512 2764 5512 2766 5512 2766 5513 2764 5513 2767 5513 2765 5514 2766 5514 2767 5514 2764 5515 3938 5515 2767 5515 2767 5516 3938 5516 2768 5516 2736 5517 2767 5517 2768 5517 3938 5518 2770 5518 2768 5518 2768 5519 2770 5519 2769 5519 2735 5520 2768 5520 2769 5520 2770 5521 2771 5521 2769 5521 2769 5522 2771 5522 2772 5522 649 5523 2769 5523 2772 5523 2771 5524 3937 5524 2772 5524 2772 5525 3937 5525 2773 5525 2774 5526 2772 5526 2773 5526 3937 5527 3936 5527 2773 5527 2773 5528 3936 5528 2734 5528 650 5529 2773 5529 2734 5529 3936 5530 2775 5530 2734 5530 2734 5531 2775 5531 2733 5531 651 5532 2734 5532 2733 5532 2775 5533 3935 5533 2733 5533 2733 5534 3935 5534 2776 5534 652 5535 2733 5535 2776 5535 3935 5536 3934 5536 2776 5536 2776 5537 3934 5537 2777 5537 2732 5538 2776 5538 2777 5538 3934 5539 2779 5539 2777 5539 2777 5540 2779 5540 2778 5540 653 5541 2777 5541 2778 5541 2779 5542 2781 5542 2778 5542 2778 5543 2781 5543 2780 5543 2731 5544 2778 5544 2780 5544 2781 5545 2782 5545 2780 5545 2780 5546 2782 5546 2784 5546 654 5547 2780 5547 2784 5547 2782 5548 2783 5548 2784 5548 2784 5549 2783 5549 2785 5549 2730 5550 2784 5550 2785 5550 2783 5551 3876 5551 2785 5551 2785 5552 3876 5552 2728 5552 2729 5553 2785 5553 2728 5553 3876 5554 3878 5554 2728 5554 2728 5555 3878 5555 2786 5555 2727 5556 2728 5556 2786 5556 3878 5557 2789 5557 2786 5557 2786 5558 2789 5558 2787 5558 2788 5559 2786 5559 2787 5559 2789 5560 2790 5560 2787 5560 2787 5561 2790 5561 2726 5561 2791 5562 2787 5562 2726 5562 2790 5563 2792 5563 2726 5563 2726 5564 2792 5564 2725 5564 596 5565 2726 5565 2725 5565 2792 5566 3879 5566 2725 5566 2725 5567 3879 5567 2793 5567 597 5568 2725 5568 2793 5568 3879 5569 2794 5569 2793 5569 2793 5570 2794 5570 2795 5570 2796 5571 2793 5571 2795 5571 2794 5572 2797 5572 2795 5572 2795 5573 2797 5573 2798 5573 2724 5574 2795 5574 2798 5574 2797 5575 2800 5575 2798 5575 2798 5576 2800 5576 2801 5576 2799 5577 2798 5577 2801 5577 2800 5578 3883 5578 2801 5578 2801 5579 3883 5579 2802 5579 598 5580 2801 5580 2802 5580 3883 5581 2803 5581 2802 5581 2802 5582 2803 5582 2723 5582 2804 5583 2802 5583 2723 5583 2803 5584 3907 5584 2723 5584 2723 5585 3907 5585 2721 5585 2722 5586 2723 5586 2721 5586 3907 5587 3885 5587 2721 5587 2721 5588 3885 5588 2805 5588 2720 5589 2721 5589 2805 5589 3885 5590 3886 5590 2805 5590 2805 5591 3886 5591 2807 5591 2806 5592 2805 5592 2807 5592 3886 5593 3887 5593 2807 5593 2807 5594 3887 5594 2808 5594 688 5595 2807 5595 2808 5595 3887 5596 3908 5596 2808 5596 2808 5597 3908 5597 2719 5597 600 5598 2808 5598 2719 5598 3908 5599 3910 5599 2719 5599 2719 5600 3910 5600 2809 5600 601 5601 2719 5601 2809 5601 3910 5602 2812 5602 2809 5602 2809 5603 2812 5603 2810 5603 2811 5604 2809 5604 2810 5604 2812 5605 2815 5605 2810 5605 2810 5606 2815 5606 2814 5606 2813 5607 2810 5607 2814 5607 2815 5608 3913 5608 2814 5608 2814 5609 3913 5609 2817 5609 689 5610 2814 5610 2817 5610 3913 5611 2818 5611 2817 5611 2817 5612 2818 5612 2820 5612 2816 5613 2817 5613 2820 5613 2818 5614 2819 5614 2820 5614 2820 5615 2819 5615 2821 5615 604 5616 2820 5616 2821 5616 2819 5617 3915 5617 2821 5617 2821 5618 3915 5618 2822 5618 2718 5619 2821 5619 2822 5619 3915 5620 3889 5620 2822 5620 2822 5621 3889 5621 2824 5621 2717 5622 2822 5622 2824 5622 3889 5623 2823 5623 2824 5623 2824 5624 2823 5624 2825 5624 605 5625 2824 5625 2825 5625 2823 5626 3891 5626 2825 5626 2825 5627 3891 5627 2826 5627 2716 5628 2825 5628 2826 5628 3891 5629 2827 5629 2826 5629 2826 5630 2827 5630 2829 5630 692 5631 2826 5631 2829 5631 2827 5632 2828 5632 2829 5632 2829 5633 2828 5633 2830 5633 2831 5634 2829 5634 2830 5634 2828 5635 2832 5635 2830 5635 2830 5636 2832 5636 2715 5636 693 5637 2830 5637 2715 5637 2832 5638 3916 5638 2715 5638 2715 5639 3916 5639 2833 5639 694 5640 2715 5640 2833 5640 3916 5641 3917 5641 2833 5641 2833 5642 3917 5642 2835 5642 695 5643 2833 5643 2835 5643 3917 5644 3918 5644 2835 5644 2835 5645 3918 5645 2713 5645 2834 5646 2835 5646 2713 5646 3918 5647 2836 5647 2713 5647 2713 5648 2836 5648 2837 5648 2714 5649 2713 5649 2837 5649 2836 5650 2838 5650 2837 5650 2837 5651 2838 5651 2839 5651 2712 5652 2837 5652 2839 5652 2838 5653 3919 5653 2839 5653 2839 5654 3919 5654 2841 5654 2711 5655 2839 5655 2841 5655 3919 5656 2840 5656 2841 5656 2841 5657 2840 5657 2842 5657 2710 5658 2841 5658 2842 5658 2840 5659 3922 5659 2842 5659 2842 5660 3922 5660 2844 5660 2709 5661 2842 5661 2844 5661 3922 5662 3921 5662 2844 5662 2844 5663 3921 5663 2843 5663 696 5664 2844 5664 2843 5664 3921 5665 3924 5665 2843 5665 2843 5666 3924 5666 2845 5666 2708 5667 2843 5667 2845 5667 3924 5668 3923 5668 2845 5668 2845 5669 3923 5669 2846 5669 2707 5670 2845 5670 2846 5670 3923 5671 2847 5671 2846 5671 2846 5672 2847 5672 2848 5672 2706 5673 2846 5673 2848 5673 2847 5674 3926 5674 2848 5674 2848 5675 3926 5675 2704 5675 2705 5676 2848 5676 2704 5676 3926 5677 3925 5677 2704 5677 2704 5678 3925 5678 2851 5678 2849 5679 2704 5679 2851 5679 3925 5680 2850 5680 2851 5680 2851 5681 2850 5681 2852 5681 2703 5682 2851 5682 2852 5682 2850 5683 3927 5683 2852 5683 2852 5684 3927 5684 2702 5684 697 5685 2852 5685 2702 5685 3927 5686 3928 5686 2702 5686 2702 5687 3928 5687 2853 5687 698 5688 2702 5688 2853 5688 3928 5689 2855 5689 2853 5689 2853 5690 2855 5690 2854 5690 2701 5691 2853 5691 2854 5691 2855 5692 3929 5692 2854 5692 2854 5693 3929 5693 2857 5693 699 5694 2854 5694 2857 5694 3929 5695 2856 5695 2857 5695 2857 5696 2856 5696 2858 5696 700 5697 2857 5697 2858 5697 2856 5698 2859 5698 2858 5698 2858 5699 2859 5699 2860 5699 2861 5700 2858 5700 2860 5700 2859 5701 2862 5701 2860 5701 2860 5702 2862 5702 2863 5702 701 5703 2860 5703 2863 5703 2862 5704 3931 5704 2863 5704 2863 5705 3931 5705 2865 5705 2864 5706 2863 5706 2865 5706 3931 5707 3930 5707 2865 5707 2865 5708 3930 5708 2700 5708 702 5709 2865 5709 2700 5709 3930 5710 3932 5710 2700 5710 2700 5711 3932 5711 2866 5711 703 5712 2700 5712 2866 5712 3932 5713 2867 5713 2866 5713 2866 5714 2867 5714 2868 5714 2699 5715 2866 5715 2868 5715 2867 5716 2870 5716 2868 5716 2868 5717 2870 5717 2698 5717 2869 5718 2868 5718 2698 5718 2870 5719 2872 5719 2698 5719 2698 5720 2872 5720 2873 5720 2871 5721 2698 5721 2873 5721 2872 5722 3933 5722 2873 5722 2873 5723 3933 5723 2874 5723 2697 5724 2873 5724 2874 5724 3933 5725 1211 5725 2874 5725 2874 5726 1211 5726 2693 5726 642 5727 2874 5727 2693 5727 704 5728 2878 5728 705 5728 704 5729 2877 5729 2878 5729 704 5730 546 5730 2877 5730 2877 5731 546 5731 2925 5731 2875 5732 2925 5732 457 5732 2875 5733 2877 5733 2925 5733 2875 5734 2876 5734 2877 5734 2877 5735 2876 5735 2878 5735 2878 5736 2876 5736 456 5736 3054 5737 456 5737 2879 5737 3051 5738 2879 5738 3052 5738 3050 5739 3052 5739 3049 5739 3048 5740 3049 5740 2880 5740 3044 5741 2880 5741 2881 5741 3045 5742 2881 5742 3042 5742 3040 5743 3042 5743 454 5743 3039 5744 454 5744 453 5744 3037 5745 453 5745 3036 5745 3035 5746 3036 5746 452 5746 3033 5747 452 5747 3031 5747 3030 5748 3031 5748 2882 5748 3028 5749 2882 5749 451 5749 2883 5750 451 5750 450 5750 3026 5751 450 5751 449 5751 3025 5752 449 5752 448 5752 3024 5753 448 5753 2884 5753 3022 5754 2884 5754 2885 5754 2886 5755 2885 5755 3021 5755 3019 5756 3021 5756 447 5756 3017 5757 447 5757 446 5757 2887 5758 446 5758 2888 5758 3014 5759 2888 5759 411 5759 3013 5760 411 5760 424 5760 3011 5761 424 5761 419 5761 2889 5762 419 5762 2890 5762 3009 5763 2890 5763 422 5763 3007 5764 422 5764 421 5764 3005 5765 421 5765 2891 5765 3003 5766 2891 5766 3004 5766 3000 5767 3004 5767 3001 5767 2997 5768 3001 5768 2892 5768 2893 5769 2892 5769 428 5769 2894 5770 428 5770 442 5770 2994 5771 442 5771 443 5771 2895 5772 443 5772 444 5772 2896 5773 444 5773 432 5773 2989 5774 432 5774 433 5774 2988 5775 433 5775 445 5775 2986 5776 445 5776 434 5776 2983 5777 434 5777 2984 5777 2897 5778 2984 5778 437 5778 2980 5779 437 5779 2898 5779 2899 5780 2898 5780 439 5780 2978 5781 439 5781 2901 5781 2900 5782 2901 5782 477 5782 2976 5783 477 5783 476 5783 2902 5784 476 5784 2973 5784 2972 5785 2973 5785 475 5785 2903 5786 475 5786 2904 5786 2970 5787 2904 5787 474 5787 2968 5788 474 5788 2905 5788 2906 5789 2905 5789 2907 5789 2967 5790 2907 5790 2965 5790 2964 5791 2965 5791 473 5791 2961 5792 473 5792 472 5792 2960 5793 472 5793 471 5793 2959 5794 471 5794 470 5794 2957 5795 470 5795 2908 5795 2956 5796 2908 5796 468 5796 2954 5797 468 5797 2909 5797 2953 5798 2909 5798 2910 5798 2911 5799 2910 5799 2912 5799 2952 5800 2912 5800 2914 5800 2913 5801 2914 5801 467 5801 2915 5802 467 5802 466 5802 2950 5803 466 5803 464 5803 2949 5804 464 5804 465 5804 2916 5805 465 5805 2917 5805 2946 5806 2917 5806 462 5806 2918 5807 462 5807 463 5807 2919 5808 463 5808 2944 5808 2945 5809 2944 5809 2942 5809 2943 5810 2942 5810 2920 5810 2940 5811 2920 5811 2939 5811 2921 5812 2939 5812 2922 5812 2937 5813 2922 5813 2935 5813 2933 5814 2935 5814 461 5814 2923 5815 461 5815 460 5815 2932 5816 460 5816 459 5816 2929 5817 459 5817 2930 5817 2924 5818 2930 5818 458 5818 2926 5819 458 5819 457 5819 2925 5820 2926 5820 457 5820 2925 5821 2927 5821 2926 5821 2925 5822 546 5822 2927 5822 2927 5823 2928 5823 2926 5823 2926 5824 2928 5824 2924 5824 458 5825 2926 5825 2924 5825 2928 5826 548 5826 2924 5826 2924 5827 548 5827 2929 5827 2930 5828 2924 5828 2929 5828 548 5829 547 5829 2929 5829 2929 5830 547 5830 2932 5830 459 5831 2929 5831 2932 5831 547 5832 2931 5832 2932 5832 2932 5833 2931 5833 2923 5833 460 5834 2932 5834 2923 5834 2931 5835 2934 5835 2923 5835 2923 5836 2934 5836 2933 5836 461 5837 2923 5837 2933 5837 2934 5838 549 5838 2933 5838 2933 5839 549 5839 2937 5839 2935 5840 2933 5840 2937 5840 549 5841 2936 5841 2937 5841 2937 5842 2936 5842 2921 5842 2922 5843 2937 5843 2921 5843 2936 5844 2938 5844 2921 5844 2921 5845 2938 5845 2940 5845 2939 5846 2921 5846 2940 5846 2938 5847 550 5847 2940 5847 2940 5848 550 5848 2943 5848 2920 5849 2940 5849 2943 5849 550 5850 2941 5850 2943 5850 2943 5851 2941 5851 2945 5851 2942 5852 2943 5852 2945 5852 2941 5853 551 5853 2945 5853 2945 5854 551 5854 2919 5854 2944 5855 2945 5855 2919 5855 551 5856 552 5856 2919 5856 2919 5857 552 5857 2918 5857 463 5858 2919 5858 2918 5858 552 5859 553 5859 2918 5859 2918 5860 553 5860 2946 5860 462 5861 2918 5861 2946 5861 553 5862 554 5862 2946 5862 2946 5863 554 5863 2916 5863 2917 5864 2946 5864 2916 5864 554 5865 2947 5865 2916 5865 2916 5866 2947 5866 2949 5866 465 5867 2916 5867 2949 5867 2947 5868 2948 5868 2949 5868 2949 5869 2948 5869 2950 5869 464 5870 2949 5870 2950 5870 2948 5871 2951 5871 2950 5871 2950 5872 2951 5872 2915 5872 466 5873 2950 5873 2915 5873 2951 5874 555 5874 2915 5874 2915 5875 555 5875 2913 5875 467 5876 2915 5876 2913 5876 555 5877 556 5877 2913 5877 2913 5878 556 5878 2952 5878 2914 5879 2913 5879 2952 5879 556 5880 557 5880 2952 5880 2952 5881 557 5881 2911 5881 2912 5882 2952 5882 2911 5882 557 5883 558 5883 2911 5883 2911 5884 558 5884 2953 5884 2910 5885 2911 5885 2953 5885 558 5886 559 5886 2953 5886 2953 5887 559 5887 2954 5887 2909 5888 2953 5888 2954 5888 559 5889 2955 5889 2954 5889 2954 5890 2955 5890 2956 5890 468 5891 2954 5891 2956 5891 2955 5892 560 5892 2956 5892 2956 5893 560 5893 2957 5893 2908 5894 2956 5894 2957 5894 560 5895 561 5895 2957 5895 2957 5896 561 5896 2959 5896 470 5897 2957 5897 2959 5897 561 5898 2958 5898 2959 5898 2959 5899 2958 5899 2960 5899 471 5900 2959 5900 2960 5900 2958 5901 562 5901 2960 5901 2960 5902 562 5902 2961 5902 472 5903 2960 5903 2961 5903 562 5904 2962 5904 2961 5904 2961 5905 2962 5905 2964 5905 473 5906 2961 5906 2964 5906 2962 5907 2963 5907 2964 5907 2964 5908 2963 5908 2967 5908 2965 5909 2964 5909 2967 5909 2963 5910 2966 5910 2967 5910 2967 5911 2966 5911 2906 5911 2907 5912 2967 5912 2906 5912 2966 5913 563 5913 2906 5913 2906 5914 563 5914 2968 5914 2905 5915 2906 5915 2968 5915 563 5916 2969 5916 2968 5916 2968 5917 2969 5917 2970 5917 474 5918 2968 5918 2970 5918 2969 5919 2971 5919 2970 5919 2970 5920 2971 5920 2903 5920 2904 5921 2970 5921 2903 5921 2971 5922 564 5922 2903 5922 2903 5923 564 5923 2972 5923 475 5924 2903 5924 2972 5924 564 5925 565 5925 2972 5925 2972 5926 565 5926 2902 5926 2973 5927 2972 5927 2902 5927 565 5928 2974 5928 2902 5928 2902 5929 2974 5929 2976 5929 476 5930 2902 5930 2976 5930 2974 5931 2975 5931 2976 5931 2976 5932 2975 5932 2900 5932 477 5933 2976 5933 2900 5933 2975 5934 2977 5934 2900 5934 2900 5935 2977 5935 2978 5935 2901 5936 2900 5936 2978 5936 2977 5937 566 5937 2978 5937 2978 5938 566 5938 2899 5938 439 5939 2978 5939 2899 5939 566 5940 2979 5940 2899 5940 2899 5941 2979 5941 2980 5941 2898 5942 2899 5942 2980 5942 2979 5943 2981 5943 2980 5943 2980 5944 2981 5944 2897 5944 437 5945 2980 5945 2897 5945 2981 5946 2982 5946 2897 5946 2897 5947 2982 5947 2983 5947 2984 5948 2897 5948 2983 5948 2982 5949 2985 5949 2983 5949 2983 5950 2985 5950 2986 5950 434 5951 2983 5951 2986 5951 2985 5952 2987 5952 2986 5952 2986 5953 2987 5953 2988 5953 445 5954 2986 5954 2988 5954 2987 5955 2990 5955 2988 5955 2988 5956 2990 5956 2989 5956 433 5957 2988 5957 2989 5957 2990 5958 2991 5958 2989 5958 2989 5959 2991 5959 2896 5959 432 5960 2989 5960 2896 5960 2991 5961 2992 5961 2896 5961 2896 5962 2992 5962 2895 5962 444 5963 2896 5963 2895 5963 2992 5964 2993 5964 2895 5964 2895 5965 2993 5965 2994 5965 443 5966 2895 5966 2994 5966 2993 5967 2995 5967 2994 5967 2994 5968 2995 5968 2894 5968 442 5969 2994 5969 2894 5969 2995 5970 2996 5970 2894 5970 2894 5971 2996 5971 2893 5971 428 5972 2894 5972 2893 5972 2996 5973 2998 5973 2893 5973 2893 5974 2998 5974 2997 5974 2892 5975 2893 5975 2997 5975 2998 5976 2999 5976 2997 5976 2997 5977 2999 5977 3000 5977 3001 5978 2997 5978 3000 5978 2999 5979 3002 5979 3000 5979 3000 5980 3002 5980 3003 5980 3004 5981 3000 5981 3003 5981 3002 5982 675 5982 3003 5982 3003 5983 675 5983 3005 5983 2891 5984 3003 5984 3005 5984 675 5985 3006 5985 3005 5985 3005 5986 3006 5986 3007 5986 421 5987 3005 5987 3007 5987 3006 5988 3008 5988 3007 5988 3007 5989 3008 5989 3009 5989 422 5990 3007 5990 3009 5990 3008 5991 3010 5991 3009 5991 3009 5992 3010 5992 2889 5992 2890 5993 3009 5993 2889 5993 3010 5994 676 5994 2889 5994 2889 5995 676 5995 3011 5995 419 5996 2889 5996 3011 5996 676 5997 3012 5997 3011 5997 3011 5998 3012 5998 3013 5998 424 5999 3011 5999 3013 5999 3012 6000 679 6000 3013 6000 3013 6001 679 6001 3014 6001 411 6002 3013 6002 3014 6002 679 6003 3015 6003 3014 6003 3014 6004 3015 6004 2887 6004 2888 6005 3014 6005 2887 6005 3015 6006 3016 6006 2887 6006 2887 6007 3016 6007 3017 6007 446 6008 2887 6008 3017 6008 3016 6009 3018 6009 3017 6009 3017 6010 3018 6010 3019 6010 447 6011 3017 6011 3019 6011 3018 6012 3020 6012 3019 6012 3019 6013 3020 6013 2886 6013 3021 6014 3019 6014 2886 6014 3020 6015 681 6015 2886 6015 2886 6016 681 6016 3022 6016 2885 6017 2886 6017 3022 6017 681 6018 3023 6018 3022 6018 3022 6019 3023 6019 3024 6019 2884 6020 3022 6020 3024 6020 3023 6021 536 6021 3024 6021 3024 6022 536 6022 3025 6022 448 6023 3024 6023 3025 6023 536 6024 538 6024 3025 6024 3025 6025 538 6025 3026 6025 449 6026 3025 6026 3026 6026 538 6027 542 6027 3026 6027 3026 6028 542 6028 2883 6028 450 6029 3026 6029 2883 6029 542 6030 3027 6030 2883 6030 2883 6031 3027 6031 3028 6031 451 6032 2883 6032 3028 6032 3027 6033 3029 6033 3028 6033 3028 6034 3029 6034 3030 6034 2882 6035 3028 6035 3030 6035 3029 6036 3032 6036 3030 6036 3030 6037 3032 6037 3033 6037 3031 6038 3030 6038 3033 6038 3032 6039 545 6039 3033 6039 3033 6040 545 6040 3035 6040 452 6041 3033 6041 3035 6041 545 6042 3034 6042 3035 6042 3035 6043 3034 6043 3037 6043 3036 6044 3035 6044 3037 6044 3034 6045 3038 6045 3037 6045 3037 6046 3038 6046 3039 6046 453 6047 3037 6047 3039 6047 3038 6048 3041 6048 3039 6048 3039 6049 3041 6049 3040 6049 454 6050 3039 6050 3040 6050 3041 6051 3043 6051 3040 6051 3040 6052 3043 6052 3045 6052 3042 6053 3040 6053 3045 6053 3043 6054 3046 6054 3045 6054 3045 6055 3046 6055 3044 6055 2881 6056 3045 6056 3044 6056 3046 6057 3047 6057 3044 6057 3044 6058 3047 6058 3048 6058 2880 6059 3044 6059 3048 6059 3047 6060 708 6060 3048 6060 3048 6061 708 6061 3050 6061 3049 6062 3048 6062 3050 6062 708 6063 707 6063 3050 6063 3050 6064 707 6064 3051 6064 3052 6065 3050 6065 3051 6065 707 6066 3053 6066 3051 6066 3051 6067 3053 6067 3054 6067 2879 6068 3051 6068 3054 6068 3053 6069 705 6069 3054 6069 3054 6070 705 6070 2878 6070 456 6071 3054 6071 2878 6071 3775 6072 3055 6072 3238 6072 3775 6073 3056 6073 3055 6073 3775 6074 3776 6074 3056 6074 3056 6075 3776 6075 3100 6075 3058 6076 3100 6076 3057 6076 3058 6077 3056 6077 3100 6077 3058 6078 6242 6078 3056 6078 3056 6079 6242 6079 3055 6079 3055 6080 6242 6080 3059 6080 3237 6081 3059 6081 3235 6081 3236 6082 3235 6082 3060 6082 3233 6083 3060 6083 6240 6083 3232 6084 6240 6084 3061 6084 3228 6085 3061 6085 3229 6085 3227 6086 3229 6086 3063 6086 3062 6087 3063 6087 6239 6087 3226 6088 6239 6088 6238 6088 3225 6089 6238 6089 3224 6089 3223 6090 3224 6090 3064 6090 3222 6091 3064 6091 3066 6091 3065 6092 3066 6092 3067 6092 3219 6093 3067 6093 3220 6093 3218 6094 3220 6094 6237 6094 3217 6095 6237 6095 6236 6095 3214 6096 6236 6096 3068 6096 3213 6097 3068 6097 3069 6097 3211 6098 3069 6098 3070 6098 3209 6099 3070 6099 3071 6099 3207 6100 3071 6100 3206 6100 3204 6101 3206 6101 3203 6101 3202 6102 3203 6102 3201 6102 3200 6103 3201 6103 3072 6103 3199 6104 3072 6104 6235 6104 3196 6105 6235 6105 3073 6105 3194 6106 3073 6106 3074 6106 3193 6107 3074 6107 3075 6107 3191 6108 3075 6108 6234 6108 3190 6109 6234 6109 6233 6109 3076 6110 6233 6110 3187 6110 3186 6111 3187 6111 6232 6111 3184 6112 6232 6112 6231 6112 3182 6113 6231 6113 3077 6113 3180 6114 3077 6114 3078 6114 3181 6115 3078 6115 6230 6115 3178 6116 6230 6116 6229 6116 3176 6117 6229 6117 6227 6117 3175 6118 6227 6118 6228 6118 3171 6119 6228 6119 3172 6119 3173 6120 3172 6120 3169 6120 3168 6121 3169 6121 3167 6121 3165 6122 3167 6122 6226 6122 3164 6123 6226 6123 6225 6123 3163 6124 6225 6124 3162 6124 3079 6125 3162 6125 3161 6125 3080 6126 3161 6126 6224 6126 3081 6127 6224 6127 3159 6127 3160 6128 3159 6128 3082 6128 3156 6129 3082 6129 3157 6129 3155 6130 3157 6130 3083 6130 3154 6131 3083 6131 6223 6131 3152 6132 6223 6132 3084 6132 3150 6133 3084 6133 6222 6133 3149 6134 6222 6134 3085 6134 3147 6135 3085 6135 6221 6135 3146 6136 6221 6136 6220 6136 3145 6137 6220 6137 6219 6137 3086 6138 6219 6138 3143 6138 3141 6139 3143 6139 3139 6139 3138 6140 3139 6140 6218 6140 3137 6141 6218 6141 3087 6141 3136 6142 3087 6142 6217 6142 3088 6143 6217 6143 3134 6143 3133 6144 3134 6144 3090 6144 3089 6145 3090 6145 6216 6145 3130 6146 6216 6146 6215 6146 3128 6147 6215 6147 6214 6147 3126 6148 6214 6148 6213 6148 3124 6149 6213 6149 3091 6149 3122 6150 3091 6150 6212 6150 3121 6151 6212 6151 3092 6151 3119 6152 3092 6152 6210 6152 3118 6153 6210 6153 3093 6153 3115 6154 3093 6154 3114 6154 3113 6155 3114 6155 3094 6155 3111 6156 3094 6156 6209 6156 3110 6157 6209 6157 3096 6157 3095 6158 3096 6158 3109 6158 3106 6159 3109 6159 6208 6159 3107 6160 6208 6160 3097 6160 3104 6161 3097 6161 6207 6161 3103 6162 6207 6162 3098 6162 3099 6163 3098 6163 3057 6163 3100 6164 3099 6164 3057 6164 3100 6165 3101 6165 3099 6165 3100 6166 3776 6166 3101 6166 3101 6167 3777 6167 3099 6167 3099 6168 3777 6168 3103 6168 3098 6169 3099 6169 3103 6169 3777 6170 3102 6170 3103 6170 3103 6171 3102 6171 3104 6171 6207 6172 3103 6172 3104 6172 3102 6173 3778 6173 3104 6173 3104 6174 3778 6174 3107 6174 3097 6175 3104 6175 3107 6175 3778 6176 3105 6176 3107 6176 3107 6177 3105 6177 3106 6177 6208 6178 3107 6178 3106 6178 3105 6179 3108 6179 3106 6179 3106 6180 3108 6180 3095 6180 3109 6181 3106 6181 3095 6181 3108 6182 3779 6182 3095 6182 3095 6183 3779 6183 3110 6183 3096 6184 3095 6184 3110 6184 3779 6185 3112 6185 3110 6185 3110 6186 3112 6186 3111 6186 6209 6187 3110 6187 3111 6187 3112 6188 3780 6188 3111 6188 3111 6189 3780 6189 3113 6189 3094 6190 3111 6190 3113 6190 3780 6191 3781 6191 3113 6191 3113 6192 3781 6192 3115 6192 3114 6193 3113 6193 3115 6193 3781 6194 3116 6194 3115 6194 3115 6195 3116 6195 3118 6195 3093 6196 3115 6196 3118 6196 3116 6197 3117 6197 3118 6197 3118 6198 3117 6198 3119 6198 6210 6199 3118 6199 3119 6199 3117 6200 3120 6200 3119 6200 3119 6201 3120 6201 3121 6201 3092 6202 3119 6202 3121 6202 3120 6203 3123 6203 3121 6203 3121 6204 3123 6204 3122 6204 6212 6205 3121 6205 3122 6205 3123 6206 3125 6206 3122 6206 3122 6207 3125 6207 3124 6207 3091 6208 3122 6208 3124 6208 3125 6209 3782 6209 3124 6209 3124 6210 3782 6210 3126 6210 6213 6211 3124 6211 3126 6211 3782 6212 3127 6212 3126 6212 3126 6213 3127 6213 3128 6213 6214 6214 3126 6214 3128 6214 3127 6215 3129 6215 3128 6215 3128 6216 3129 6216 3130 6216 6215 6217 3128 6217 3130 6217 3129 6218 3131 6218 3130 6218 3130 6219 3131 6219 3089 6219 6216 6220 3130 6220 3089 6220 3131 6221 3132 6221 3089 6221 3089 6222 3132 6222 3133 6222 3090 6223 3089 6223 3133 6223 3132 6224 3783 6224 3133 6224 3133 6225 3783 6225 3088 6225 3134 6226 3133 6226 3088 6226 3783 6227 3135 6227 3088 6227 3088 6228 3135 6228 3136 6228 6217 6229 3088 6229 3136 6229 3135 6230 3784 6230 3136 6230 3136 6231 3784 6231 3137 6231 3087 6232 3136 6232 3137 6232 3784 6233 3785 6233 3137 6233 3137 6234 3785 6234 3138 6234 6218 6235 3137 6235 3138 6235 3785 6236 3140 6236 3138 6236 3138 6237 3140 6237 3141 6237 3139 6238 3138 6238 3141 6238 3140 6239 3142 6239 3141 6239 3141 6240 3142 6240 3086 6240 3143 6241 3141 6241 3086 6241 3142 6242 3144 6242 3086 6242 3086 6243 3144 6243 3145 6243 6219 6244 3086 6244 3145 6244 3144 6245 3786 6245 3145 6245 3145 6246 3786 6246 3146 6246 6220 6247 3145 6247 3146 6247 3786 6248 3787 6248 3146 6248 3146 6249 3787 6249 3147 6249 6221 6250 3146 6250 3147 6250 3787 6251 3148 6251 3147 6251 3147 6252 3148 6252 3149 6252 3085 6253 3147 6253 3149 6253 3148 6254 3151 6254 3149 6254 3149 6255 3151 6255 3150 6255 6222 6256 3149 6256 3150 6256 3151 6257 3788 6257 3150 6257 3150 6258 3788 6258 3152 6258 3084 6259 3150 6259 3152 6259 3788 6260 3153 6260 3152 6260 3152 6261 3153 6261 3154 6261 6223 6262 3152 6262 3154 6262 3153 6263 3789 6263 3154 6263 3154 6264 3789 6264 3155 6264 3083 6265 3154 6265 3155 6265 3789 6266 3791 6266 3155 6266 3155 6267 3791 6267 3156 6267 3157 6268 3155 6268 3156 6268 3791 6269 3158 6269 3156 6269 3156 6270 3158 6270 3160 6270 3082 6271 3156 6271 3160 6271 3158 6272 3790 6272 3160 6272 3160 6273 3790 6273 3081 6273 3159 6274 3160 6274 3081 6274 3790 6275 3816 6275 3081 6275 3081 6276 3816 6276 3080 6276 6224 6277 3081 6277 3080 6277 3816 6278 3815 6278 3080 6278 3080 6279 3815 6279 3079 6279 3161 6280 3080 6280 3079 6280 3815 6281 3814 6281 3079 6281 3079 6282 3814 6282 3163 6282 3162 6283 3079 6283 3163 6283 3814 6284 3813 6284 3163 6284 3163 6285 3813 6285 3164 6285 6225 6286 3163 6286 3164 6286 3813 6287 3166 6287 3164 6287 3164 6288 3166 6288 3165 6288 6226 6289 3164 6289 3165 6289 3166 6290 3812 6290 3165 6290 3165 6291 3812 6291 3168 6291 3167 6292 3165 6292 3168 6292 3812 6293 3170 6293 3168 6293 3168 6294 3170 6294 3173 6294 3169 6295 3168 6295 3173 6295 3170 6296 3811 6296 3173 6296 3173 6297 3811 6297 3171 6297 3172 6298 3173 6298 3171 6298 3811 6299 3810 6299 3171 6299 3171 6300 3810 6300 3175 6300 6228 6301 3171 6301 3175 6301 3810 6302 3174 6302 3175 6302 3175 6303 3174 6303 3176 6303 6227 6304 3175 6304 3176 6304 3174 6305 3177 6305 3176 6305 3176 6306 3177 6306 3178 6306 6229 6307 3176 6307 3178 6307 3177 6308 3809 6308 3178 6308 3178 6309 3809 6309 3181 6309 6230 6310 3178 6310 3181 6310 3809 6311 3179 6311 3181 6311 3181 6312 3179 6312 3180 6312 3078 6313 3181 6313 3180 6313 3179 6314 3183 6314 3180 6314 3180 6315 3183 6315 3182 6315 3077 6316 3180 6316 3182 6316 3183 6317 3793 6317 3182 6317 3182 6318 3793 6318 3184 6318 6231 6319 3182 6319 3184 6319 3793 6320 3185 6320 3184 6320 3184 6321 3185 6321 3186 6321 6232 6322 3184 6322 3186 6322 3185 6323 3795 6323 3186 6323 3186 6324 3795 6324 3076 6324 3187 6325 3186 6325 3076 6325 3795 6326 3188 6326 3076 6326 3076 6327 3188 6327 3190 6327 6233 6328 3076 6328 3190 6328 3188 6329 3189 6329 3190 6329 3190 6330 3189 6330 3191 6330 6234 6331 3190 6331 3191 6331 3189 6332 3192 6332 3191 6332 3191 6333 3192 6333 3193 6333 3075 6334 3191 6334 3193 6334 3192 6335 3195 6335 3193 6335 3193 6336 3195 6336 3194 6336 3074 6337 3193 6337 3194 6337 3195 6338 3796 6338 3194 6338 3194 6339 3796 6339 3196 6339 3073 6340 3194 6340 3196 6340 3796 6341 3197 6341 3196 6341 3196 6342 3197 6342 3199 6342 6235 6343 3196 6343 3199 6343 3197 6344 3198 6344 3199 6344 3199 6345 3198 6345 3200 6345 3072 6346 3199 6346 3200 6346 3198 6347 3797 6347 3200 6347 3200 6348 3797 6348 3202 6348 3201 6349 3200 6349 3202 6349 3797 6350 3205 6350 3202 6350 3202 6351 3205 6351 3204 6351 3203 6352 3202 6352 3204 6352 3205 6353 3798 6353 3204 6353 3204 6354 3798 6354 3207 6354 3206 6355 3204 6355 3207 6355 3798 6356 3208 6356 3207 6356 3207 6357 3208 6357 3209 6357 3071 6358 3207 6358 3209 6358 3208 6359 3210 6359 3209 6359 3209 6360 3210 6360 3211 6360 3070 6361 3209 6361 3211 6361 3210 6362 3799 6362 3211 6362 3211 6363 3799 6363 3213 6363 3069 6364 3211 6364 3213 6364 3799 6365 3212 6365 3213 6365 3213 6366 3212 6366 3214 6366 3068 6367 3213 6367 3214 6367 3212 6368 3215 6368 3214 6368 3214 6369 3215 6369 3217 6369 6236 6370 3214 6370 3217 6370 3215 6371 3216 6371 3217 6371 3217 6372 3216 6372 3218 6372 6237 6373 3217 6373 3218 6373 3216 6374 3800 6374 3218 6374 3218 6375 3800 6375 3219 6375 3220 6376 3218 6376 3219 6376 3800 6377 3801 6377 3219 6377 3219 6378 3801 6378 3065 6378 3067 6379 3219 6379 3065 6379 3801 6380 3221 6380 3065 6380 3065 6381 3221 6381 3222 6381 3066 6382 3065 6382 3222 6382 3221 6383 3802 6383 3222 6383 3222 6384 3802 6384 3223 6384 3064 6385 3222 6385 3223 6385 3802 6386 3804 6386 3223 6386 3223 6387 3804 6387 3225 6387 3224 6388 3223 6388 3225 6388 3804 6389 3803 6389 3225 6389 3225 6390 3803 6390 3226 6390 6238 6391 3225 6391 3226 6391 3803 6392 3805 6392 3226 6392 3226 6393 3805 6393 3062 6393 6239 6394 3226 6394 3062 6394 3805 6395 3807 6395 3062 6395 3062 6396 3807 6396 3227 6396 3063 6397 3062 6397 3227 6397 3807 6398 3806 6398 3227 6398 3227 6399 3806 6399 3228 6399 3229 6400 3227 6400 3228 6400 3806 6401 3230 6401 3228 6401 3228 6402 3230 6402 3232 6402 3061 6403 3228 6403 3232 6403 3230 6404 3231 6404 3232 6404 3232 6405 3231 6405 3233 6405 6240 6406 3232 6406 3233 6406 3231 6407 3234 6407 3233 6407 3233 6408 3234 6408 3236 6408 3060 6409 3233 6409 3236 6409 3234 6410 3808 6410 3236 6410 3236 6411 3808 6411 3237 6411 3235 6412 3236 6412 3237 6412 3808 6413 3238 6413 3237 6413 3237 6414 3238 6414 3055 6414 3059 6415 3237 6415 3055 6415 3818 6416 3243 6416 3819 6416 3818 6417 3242 6417 3243 6417 3818 6418 3239 6418 3242 6418 3242 6419 3239 6419 3295 6419 3241 6420 3295 6420 3240 6420 3241 6421 3242 6421 3295 6421 3241 6422 6325 6422 3242 6422 3242 6423 6325 6423 3243 6423 3243 6424 6325 6424 3244 6424 3411 6425 3244 6425 3412 6425 3409 6426 3412 6426 3245 6426 3406 6427 3245 6427 6324 6427 3407 6428 6324 6428 3246 6428 3405 6429 3246 6429 3247 6429 3404 6430 3247 6430 3249 6430 3248 6431 3249 6431 3250 6431 3401 6432 3250 6432 3251 6432 3398 6433 3251 6433 3399 6433 3252 6434 3399 6434 3254 6434 3253 6435 3254 6435 3255 6435 3396 6436 3255 6436 3256 6436 3393 6437 3256 6437 3394 6437 3257 6438 3394 6438 6243 6438 3392 6439 6243 6439 3258 6439 3259 6440 3258 6440 3391 6440 3260 6441 3391 6441 6244 6441 3261 6442 6244 6442 6245 6442 3389 6443 6245 6443 3390 6443 3262 6444 3390 6444 6247 6444 3263 6445 6247 6445 6246 6445 3386 6446 6246 6446 6248 6446 3385 6447 6248 6447 3265 6447 3264 6448 3265 6448 3383 6448 3381 6449 3383 6449 3382 6449 3380 6450 3382 6450 6250 6450 3379 6451 6250 6451 6249 6451 3377 6452 6249 6452 6251 6452 3376 6453 6251 6453 6253 6453 3266 6454 6253 6454 6252 6454 3372 6455 6252 6455 6254 6455 3371 6456 6254 6456 6255 6456 3370 6457 6255 6457 3369 6457 3368 6458 3369 6458 3367 6458 3366 6459 3367 6459 3364 6459 3365 6460 3364 6460 3363 6460 3267 6461 3363 6461 6257 6461 3268 6462 6257 6462 3361 6462 3360 6463 3361 6463 3269 6463 3359 6464 3269 6464 6258 6464 3358 6465 6258 6465 6259 6465 3356 6466 6259 6466 6260 6466 3355 6467 6260 6467 6261 6467 3354 6468 6261 6468 6262 6468 3352 6469 6262 6469 3271 6469 3270 6470 3271 6470 3349 6470 3350 6471 3349 6471 3272 6471 3347 6472 3272 6472 6263 6472 3273 6473 6263 6473 6264 6473 3345 6474 6264 6474 3274 6474 3344 6475 3274 6475 3342 6475 3340 6476 3342 6476 3275 6476 3339 6477 3275 6477 3338 6477 3336 6478 3338 6478 3276 6478 3335 6479 3276 6479 6266 6479 3277 6480 6266 6480 3334 6480 3332 6481 3334 6481 6267 6481 3278 6482 6267 6482 6268 6482 3331 6483 6268 6483 3279 6483 3330 6484 3279 6484 3280 6484 3281 6485 3280 6485 6269 6485 3326 6486 6269 6486 3327 6486 3282 6487 3327 6487 3283 6487 3323 6488 3283 6488 6270 6488 3322 6489 6270 6489 6271 6489 3321 6490 6271 6490 3284 6490 3319 6491 3284 6491 3285 6491 3318 6492 3285 6492 6333 6492 3316 6493 6333 6493 3286 6493 3314 6494 3286 6494 3288 6494 3287 6495 3288 6495 3313 6495 3311 6496 3313 6496 6332 6496 3289 6497 6332 6497 3310 6497 3309 6498 3310 6498 6331 6498 3307 6499 6331 6499 6330 6499 3306 6500 6330 6500 6329 6500 3290 6501 6329 6501 6328 6501 3291 6502 6328 6502 3292 6502 3303 6503 3292 6503 3293 6503 3302 6504 3293 6504 3301 6504 3299 6505 3301 6505 6327 6505 3294 6506 6327 6506 6326 6506 3298 6507 6326 6507 3240 6507 3295 6508 3298 6508 3240 6508 3295 6509 3296 6509 3298 6509 3295 6510 3239 6510 3296 6510 3296 6511 3297 6511 3298 6511 3298 6512 3297 6512 3294 6512 6326 6513 3298 6513 3294 6513 3297 6514 3300 6514 3294 6514 3294 6515 3300 6515 3299 6515 6327 6516 3294 6516 3299 6516 3300 6517 3868 6517 3299 6517 3299 6518 3868 6518 3302 6518 3301 6519 3299 6519 3302 6519 3868 6520 3867 6520 3302 6520 3302 6521 3867 6521 3303 6521 3293 6522 3302 6522 3303 6522 3867 6523 3866 6523 3303 6523 3303 6524 3866 6524 3291 6524 3292 6525 3303 6525 3291 6525 3866 6526 3304 6526 3291 6526 3291 6527 3304 6527 3290 6527 6328 6528 3291 6528 3290 6528 3304 6529 3305 6529 3290 6529 3290 6530 3305 6530 3306 6530 6329 6531 3290 6531 3306 6531 3305 6532 3308 6532 3306 6532 3306 6533 3308 6533 3307 6533 6330 6534 3306 6534 3307 6534 3308 6535 3865 6535 3307 6535 3307 6536 3865 6536 3309 6536 6331 6537 3307 6537 3309 6537 3865 6538 3864 6538 3309 6538 3309 6539 3864 6539 3289 6539 3310 6540 3309 6540 3289 6540 3864 6541 3863 6541 3289 6541 3289 6542 3863 6542 3311 6542 6332 6543 3289 6543 3311 6543 3863 6544 3312 6544 3311 6544 3311 6545 3312 6545 3287 6545 3313 6546 3311 6546 3287 6546 3312 6547 3315 6547 3287 6547 3287 6548 3315 6548 3314 6548 3288 6549 3287 6549 3314 6549 3315 6550 3862 6550 3314 6550 3314 6551 3862 6551 3316 6551 3286 6552 3314 6552 3316 6552 3862 6553 3317 6553 3316 6553 3316 6554 3317 6554 3318 6554 6333 6555 3316 6555 3318 6555 3317 6556 3320 6556 3318 6556 3318 6557 3320 6557 3319 6557 3285 6558 3318 6558 3319 6558 3320 6559 3860 6559 3319 6559 3319 6560 3860 6560 3321 6560 3284 6561 3319 6561 3321 6561 3860 6562 3861 6562 3321 6562 3321 6563 3861 6563 3322 6563 6271 6564 3321 6564 3322 6564 3861 6565 3324 6565 3322 6565 3322 6566 3324 6566 3323 6566 6270 6567 3322 6567 3323 6567 3324 6568 3325 6568 3323 6568 3323 6569 3325 6569 3282 6569 3283 6570 3323 6570 3282 6570 3325 6571 3859 6571 3282 6571 3282 6572 3859 6572 3326 6572 3327 6573 3282 6573 3326 6573 3859 6574 3328 6574 3326 6574 3326 6575 3328 6575 3281 6575 6269 6576 3326 6576 3281 6576 3328 6577 3329 6577 3281 6577 3281 6578 3329 6578 3330 6578 3280 6579 3281 6579 3330 6579 3329 6580 3856 6580 3330 6580 3330 6581 3856 6581 3331 6581 3279 6582 3330 6582 3331 6582 3856 6583 3855 6583 3331 6583 3331 6584 3855 6584 3278 6584 6268 6585 3331 6585 3278 6585 3855 6586 3333 6586 3278 6586 3278 6587 3333 6587 3332 6587 6267 6588 3278 6588 3332 6588 3333 6589 3854 6589 3332 6589 3332 6590 3854 6590 3277 6590 3334 6591 3332 6591 3277 6591 3854 6592 3853 6592 3277 6592 3277 6593 3853 6593 3335 6593 6266 6594 3277 6594 3335 6594 3853 6595 3852 6595 3335 6595 3335 6596 3852 6596 3336 6596 3276 6597 3335 6597 3336 6597 3852 6598 3337 6598 3336 6598 3336 6599 3337 6599 3339 6599 3338 6600 3336 6600 3339 6600 3337 6601 3851 6601 3339 6601 3339 6602 3851 6602 3340 6602 3275 6603 3339 6603 3340 6603 3851 6604 3341 6604 3340 6604 3340 6605 3341 6605 3344 6605 3342 6606 3340 6606 3344 6606 3341 6607 3343 6607 3344 6607 3344 6608 3343 6608 3345 6608 3274 6609 3344 6609 3345 6609 3343 6610 3850 6610 3345 6610 3345 6611 3850 6611 3273 6611 6264 6612 3345 6612 3273 6612 3850 6613 3346 6613 3273 6613 3273 6614 3346 6614 3347 6614 6263 6615 3273 6615 3347 6615 3346 6616 3348 6616 3347 6616 3347 6617 3348 6617 3350 6617 3272 6618 3347 6618 3350 6618 3348 6619 3849 6619 3350 6619 3350 6620 3849 6620 3270 6620 3349 6621 3350 6621 3270 6621 3849 6622 3351 6622 3270 6622 3270 6623 3351 6623 3352 6623 3271 6624 3270 6624 3352 6624 3351 6625 3353 6625 3352 6625 3352 6626 3353 6626 3354 6626 6262 6627 3352 6627 3354 6627 3353 6628 3848 6628 3354 6628 3354 6629 3848 6629 3355 6629 6261 6630 3354 6630 3355 6630 3848 6631 3847 6631 3355 6631 3355 6632 3847 6632 3356 6632 6260 6633 3355 6633 3356 6633 3847 6634 3357 6634 3356 6634 3356 6635 3357 6635 3358 6635 6259 6636 3356 6636 3358 6636 3357 6637 3846 6637 3358 6637 3358 6638 3846 6638 3359 6638 6258 6639 3358 6639 3359 6639 3846 6640 3845 6640 3359 6640 3359 6641 3845 6641 3360 6641 3269 6642 3359 6642 3360 6642 3845 6643 3844 6643 3360 6643 3360 6644 3844 6644 3268 6644 3361 6645 3360 6645 3268 6645 3844 6646 3843 6646 3268 6646 3268 6647 3843 6647 3267 6647 6257 6648 3268 6648 3267 6648 3843 6649 3362 6649 3267 6649 3267 6650 3362 6650 3365 6650 3363 6651 3267 6651 3365 6651 3362 6652 3840 6652 3365 6652 3365 6653 3840 6653 3366 6653 3364 6654 3365 6654 3366 6654 3840 6655 3842 6655 3366 6655 3366 6656 3842 6656 3368 6656 3367 6657 3366 6657 3368 6657 3842 6658 3839 6658 3368 6658 3368 6659 3839 6659 3370 6659 3369 6660 3368 6660 3370 6660 3839 6661 3838 6661 3370 6661 3370 6662 3838 6662 3371 6662 6255 6663 3370 6663 3371 6663 3838 6664 3837 6664 3371 6664 3371 6665 3837 6665 3372 6665 6254 6666 3371 6666 3372 6666 3837 6667 3373 6667 3372 6667 3372 6668 3373 6668 3266 6668 6252 6669 3372 6669 3266 6669 3373 6670 3374 6670 3266 6670 3266 6671 3374 6671 3376 6671 6253 6672 3266 6672 3376 6672 3374 6673 3375 6673 3376 6673 3376 6674 3375 6674 3377 6674 6251 6675 3376 6675 3377 6675 3375 6676 3378 6676 3377 6676 3377 6677 3378 6677 3379 6677 6249 6678 3377 6678 3379 6678 3378 6679 3835 6679 3379 6679 3379 6680 3835 6680 3380 6680 6250 6681 3379 6681 3380 6681 3835 6682 3836 6682 3380 6682 3380 6683 3836 6683 3381 6683 3382 6684 3380 6684 3381 6684 3836 6685 3834 6685 3381 6685 3381 6686 3834 6686 3264 6686 3383 6687 3381 6687 3264 6687 3834 6688 3384 6688 3264 6688 3264 6689 3384 6689 3385 6689 3265 6690 3264 6690 3385 6690 3384 6691 3833 6691 3385 6691 3385 6692 3833 6692 3386 6692 6248 6693 3385 6693 3386 6693 3833 6694 3387 6694 3386 6694 3386 6695 3387 6695 3263 6695 6246 6696 3386 6696 3263 6696 3387 6697 3388 6697 3263 6697 3263 6698 3388 6698 3262 6698 6247 6699 3263 6699 3262 6699 3388 6700 3832 6700 3262 6700 3262 6701 3832 6701 3389 6701 3390 6702 3262 6702 3389 6702 3832 6703 3831 6703 3389 6703 3389 6704 3831 6704 3261 6704 6245 6705 3389 6705 3261 6705 3831 6706 3829 6706 3261 6706 3261 6707 3829 6707 3260 6707 6244 6708 3261 6708 3260 6708 3829 6709 3830 6709 3260 6709 3260 6710 3830 6710 3259 6710 3391 6711 3260 6711 3259 6711 3830 6712 3828 6712 3259 6712 3259 6713 3828 6713 3392 6713 3258 6714 3259 6714 3392 6714 3828 6715 3827 6715 3392 6715 3392 6716 3827 6716 3257 6716 6243 6717 3392 6717 3257 6717 3827 6718 3395 6718 3257 6718 3257 6719 3395 6719 3393 6719 3394 6720 3257 6720 3393 6720 3395 6721 3826 6721 3393 6721 3393 6722 3826 6722 3396 6722 3256 6723 3393 6723 3396 6723 3826 6724 3397 6724 3396 6724 3396 6725 3397 6725 3253 6725 3255 6726 3396 6726 3253 6726 3397 6727 3825 6727 3253 6727 3253 6728 3825 6728 3252 6728 3254 6729 3253 6729 3252 6729 3825 6730 3400 6730 3252 6730 3252 6731 3400 6731 3398 6731 3399 6732 3252 6732 3398 6732 3400 6733 3824 6733 3398 6733 3398 6734 3824 6734 3401 6734 3251 6735 3398 6735 3401 6735 3824 6736 3823 6736 3401 6736 3401 6737 3823 6737 3248 6737 3250 6738 3401 6738 3248 6738 3823 6739 3402 6739 3248 6739 3248 6740 3402 6740 3404 6740 3249 6741 3248 6741 3404 6741 3402 6742 3403 6742 3404 6742 3404 6743 3403 6743 3405 6743 3247 6744 3404 6744 3405 6744 3403 6745 3821 6745 3405 6745 3405 6746 3821 6746 3407 6746 3246 6747 3405 6747 3407 6747 3821 6748 3820 6748 3407 6748 3407 6749 3820 6749 3406 6749 6324 6750 3407 6750 3406 6750 3820 6751 3408 6751 3406 6751 3406 6752 3408 6752 3409 6752 3245 6753 3406 6753 3409 6753 3408 6754 3410 6754 3409 6754 3409 6755 3410 6755 3411 6755 3412 6756 3409 6756 3411 6756 3410 6757 3819 6757 3411 6757 3411 6758 3819 6758 3243 6758 3244 6759 3411 6759 3243 6759 599 6760 3597 6760 3596 6760 599 6761 3416 6761 3597 6761 599 6762 3413 6762 3416 6762 3416 6763 3413 6763 3465 6763 3414 6764 3465 6764 3415 6764 3414 6765 3416 6765 3465 6765 3414 6766 3912 6766 3416 6766 3416 6767 3912 6767 3597 6767 3597 6768 3912 6768 3911 6768 3417 6769 3911 6769 3909 6769 3418 6770 3909 6770 3595 6770 3593 6771 3595 6771 3419 6771 3591 6772 3419 6772 3592 6772 3590 6773 3592 6773 3420 6773 3589 6774 3420 6774 3884 6774 3587 6775 3884 6775 3585 6775 3586 6776 3585 6776 3882 6776 3582 6777 3882 6777 3421 6777 3580 6778 3421 6778 3881 6778 3581 6779 3881 6779 3422 6779 3579 6780 3422 6780 3880 6780 3578 6781 3880 6781 3423 6781 3577 6782 3423 6782 3906 6782 3576 6783 3906 6783 3425 6783 3424 6784 3425 6784 3877 6784 3574 6785 3877 6785 3875 6785 3573 6786 3875 6786 3426 6786 3427 6787 3426 6787 3869 6787 3571 6788 3869 6788 3428 6788 3569 6789 3428 6789 3430 6789 3429 6790 3430 6790 3871 6790 3566 6791 3871 6791 3431 6791 3565 6792 3431 6792 3432 6792 3564 6793 3432 6793 3433 6793 3560 6794 3433 6794 3561 6794 3562 6795 3561 6795 3434 6795 3558 6796 3434 6796 3873 6796 3555 6797 3873 6797 3872 6797 3554 6798 3872 6798 3874 6798 3551 6799 3874 6799 3552 6799 3550 6800 3552 6800 3435 6800 3547 6801 3435 6801 3548 6801 3436 6802 3548 6802 3437 6802 3546 6803 3437 6803 3545 6803 3544 6804 3545 6804 3439 6804 3438 6805 3439 6805 3543 6805 3542 6806 3543 6806 3441 6806 3440 6807 3441 6807 3541 6807 3442 6808 3541 6808 3443 6808 3539 6809 3443 6809 3445 6809 3444 6810 3445 6810 3537 6810 3536 6811 3537 6811 3446 6811 3534 6812 3446 6812 3533 6812 3532 6813 3533 6813 3531 6813 3447 6814 3531 6814 3905 6814 3448 6815 3905 6815 3527 6815 3526 6816 3527 6816 3904 6816 3524 6817 3904 6817 3903 6817 3449 6818 3903 6818 3522 6818 3450 6819 3522 6819 3520 6819 3521 6820 3520 6820 3451 6820 3518 6821 3451 6821 3902 6821 3452 6822 3902 6822 3514 6822 3515 6823 3514 6823 3453 6823 3511 6824 3453 6824 3900 6824 3512 6825 3900 6825 3509 6825 3507 6826 3509 6826 3899 6826 3454 6827 3899 6827 3898 6827 3455 6828 3898 6828 3897 6828 3505 6829 3897 6829 3896 6829 3504 6830 3896 6830 3503 6830 3500 6831 3503 6831 3456 6831 3499 6832 3456 6832 3457 6832 3498 6833 3457 6833 3458 6833 3494 6834 3458 6834 3459 6834 3495 6835 3459 6835 3460 6835 3491 6836 3460 6836 3490 6836 3487 6837 3490 6837 3488 6837 3461 6838 3488 6838 3895 6838 3486 6839 3895 6839 3462 6839 3484 6840 3462 6840 3483 6840 3482 6841 3483 6841 3481 6841 3480 6842 3481 6842 3894 6842 3479 6843 3894 6843 3477 6843 3478 6844 3477 6844 3893 6844 3475 6845 3893 6845 3892 6845 3474 6846 3892 6846 3472 6846 3473 6847 3472 6847 3463 6847 3464 6848 3463 6848 3890 6848 3469 6849 3890 6849 3888 6849 3470 6850 3888 6850 3914 6850 3467 6851 3914 6851 3415 6851 3465 6852 3467 6852 3415 6852 3465 6853 3466 6853 3467 6853 3465 6854 3413 6854 3466 6854 3466 6855 602 6855 3467 6855 3467 6856 602 6856 3470 6856 3914 6857 3467 6857 3470 6857 602 6858 3468 6858 3470 6858 3470 6859 3468 6859 3469 6859 3888 6860 3470 6860 3469 6860 3468 6861 3471 6861 3469 6861 3469 6862 3471 6862 3464 6862 3890 6863 3469 6863 3464 6863 3471 6864 603 6864 3464 6864 3464 6865 603 6865 3473 6865 3463 6866 3464 6866 3473 6866 603 6867 690 6867 3473 6867 3473 6868 690 6868 3474 6868 3472 6869 3473 6869 3474 6869 690 6870 3476 6870 3474 6870 3474 6871 3476 6871 3475 6871 3892 6872 3474 6872 3475 6872 3476 6873 691 6873 3475 6873 3475 6874 691 6874 3478 6874 3893 6875 3475 6875 3478 6875 691 6876 606 6876 3478 6876 3478 6877 606 6877 3479 6877 3477 6878 3478 6878 3479 6878 606 6879 607 6879 3479 6879 3479 6880 607 6880 3480 6880 3894 6881 3479 6881 3480 6881 607 6882 609 6882 3480 6882 3480 6883 609 6883 3482 6883 3481 6884 3480 6884 3482 6884 609 6885 608 6885 3482 6885 3482 6886 608 6886 3484 6886 3483 6887 3482 6887 3484 6887 608 6888 3485 6888 3484 6888 3484 6889 3485 6889 3486 6889 3462 6890 3484 6890 3486 6890 3485 6891 611 6891 3486 6891 3486 6892 611 6892 3461 6892 3895 6893 3486 6893 3461 6893 611 6894 610 6894 3461 6894 3461 6895 610 6895 3487 6895 3488 6896 3461 6896 3487 6896 610 6897 3489 6897 3487 6897 3487 6898 3489 6898 3491 6898 3490 6899 3487 6899 3491 6899 3489 6900 3492 6900 3491 6900 3491 6901 3492 6901 3495 6901 3460 6902 3491 6902 3495 6902 3492 6903 3493 6903 3495 6903 3495 6904 3493 6904 3494 6904 3459 6905 3495 6905 3494 6905 3493 6906 3496 6906 3494 6906 3494 6907 3496 6907 3498 6907 3458 6908 3494 6908 3498 6908 3496 6909 3497 6909 3498 6909 3498 6910 3497 6910 3499 6910 3457 6911 3498 6911 3499 6911 3497 6912 3501 6912 3499 6912 3499 6913 3501 6913 3500 6913 3456 6914 3499 6914 3500 6914 3501 6915 3502 6915 3500 6915 3500 6916 3502 6916 3504 6916 3503 6917 3500 6917 3504 6917 3502 6918 613 6918 3504 6918 3504 6919 613 6919 3505 6919 3896 6920 3504 6920 3505 6920 613 6921 612 6921 3505 6921 3505 6922 612 6922 3455 6922 3897 6923 3505 6923 3455 6923 612 6924 3506 6924 3455 6924 3455 6925 3506 6925 3454 6925 3898 6926 3455 6926 3454 6926 3506 6927 614 6927 3454 6927 3454 6928 614 6928 3507 6928 3899 6929 3454 6929 3507 6929 614 6930 3508 6930 3507 6930 3507 6931 3508 6931 3512 6931 3509 6932 3507 6932 3512 6932 3508 6933 3510 6933 3512 6933 3512 6934 3510 6934 3511 6934 3900 6935 3512 6935 3511 6935 3510 6936 3513 6936 3511 6936 3511 6937 3513 6937 3515 6937 3453 6938 3511 6938 3515 6938 3513 6939 3516 6939 3515 6939 3515 6940 3516 6940 3452 6940 3514 6941 3515 6941 3452 6941 3516 6942 3517 6942 3452 6942 3452 6943 3517 6943 3518 6943 3902 6944 3452 6944 3518 6944 3517 6945 3519 6945 3518 6945 3518 6946 3519 6946 3521 6946 3451 6947 3518 6947 3521 6947 3519 6948 615 6948 3521 6948 3521 6949 615 6949 3450 6949 3520 6950 3521 6950 3450 6950 615 6951 616 6951 3450 6951 3450 6952 616 6952 3449 6952 3522 6953 3450 6953 3449 6953 616 6954 3523 6954 3449 6954 3449 6955 3523 6955 3524 6955 3903 6956 3449 6956 3524 6956 3523 6957 3525 6957 3524 6957 3524 6958 3525 6958 3526 6958 3904 6959 3524 6959 3526 6959 3525 6960 3528 6960 3526 6960 3526 6961 3528 6961 3448 6961 3527 6962 3526 6962 3448 6962 3528 6963 3529 6963 3448 6963 3448 6964 3529 6964 3447 6964 3905 6965 3448 6965 3447 6965 3529 6966 3530 6966 3447 6966 3447 6967 3530 6967 3532 6967 3531 6968 3447 6968 3532 6968 3530 6969 575 6969 3532 6969 3532 6970 575 6970 3534 6970 3533 6971 3532 6971 3534 6971 575 6972 3535 6972 3534 6972 3534 6973 3535 6973 3536 6973 3446 6974 3534 6974 3536 6974 3535 6975 577 6975 3536 6975 3536 6976 577 6976 3444 6976 3537 6977 3536 6977 3444 6977 577 6978 3538 6978 3444 6978 3444 6979 3538 6979 3539 6979 3445 6980 3444 6980 3539 6980 3538 6981 3540 6981 3539 6981 3539 6982 3540 6982 3442 6982 3443 6983 3539 6983 3442 6983 3540 6984 578 6984 3442 6984 3442 6985 578 6985 3440 6985 3541 6986 3442 6986 3440 6986 578 6987 579 6987 3440 6987 3440 6988 579 6988 3542 6988 3441 6989 3440 6989 3542 6989 579 6990 580 6990 3542 6990 3542 6991 580 6991 3438 6991 3543 6992 3542 6992 3438 6992 580 6993 582 6993 3438 6993 3438 6994 582 6994 3544 6994 3439 6995 3438 6995 3544 6995 582 6996 581 6996 3544 6996 3544 6997 581 6997 3546 6997 3545 6998 3544 6998 3546 6998 581 6999 584 6999 3546 6999 3546 7000 584 7000 3436 7000 3437 7001 3546 7001 3436 7001 584 7002 583 7002 3436 7002 3436 7003 583 7003 3547 7003 3548 7004 3436 7004 3547 7004 583 7005 3549 7005 3547 7005 3547 7006 3549 7006 3550 7006 3435 7007 3547 7007 3550 7007 3549 7008 585 7008 3550 7008 3550 7009 585 7009 3551 7009 3552 7010 3550 7010 3551 7010 585 7011 3553 7011 3551 7011 3551 7012 3553 7012 3554 7012 3874 7013 3551 7013 3554 7013 3553 7014 3556 7014 3554 7014 3554 7015 3556 7015 3555 7015 3872 7016 3554 7016 3555 7016 3556 7017 586 7017 3555 7017 3555 7018 586 7018 3558 7018 3873 7019 3555 7019 3558 7019 586 7020 3557 7020 3558 7020 3558 7021 3557 7021 3562 7021 3434 7022 3558 7022 3562 7022 3557 7023 3559 7023 3562 7023 3562 7024 3559 7024 3560 7024 3561 7025 3562 7025 3560 7025 3559 7026 3563 7026 3560 7026 3560 7027 3563 7027 3564 7027 3433 7028 3560 7028 3564 7028 3563 7029 587 7029 3564 7029 3564 7030 587 7030 3565 7030 3432 7031 3564 7031 3565 7031 587 7032 3567 7032 3565 7032 3565 7033 3567 7033 3566 7033 3431 7034 3565 7034 3566 7034 3567 7035 588 7035 3566 7035 3566 7036 588 7036 3429 7036 3871 7037 3566 7037 3429 7037 588 7038 3568 7038 3429 7038 3429 7039 3568 7039 3569 7039 3430 7040 3429 7040 3569 7040 3568 7041 3570 7041 3569 7041 3569 7042 3570 7042 3571 7042 3428 7043 3569 7043 3571 7043 3570 7044 3572 7044 3571 7044 3571 7045 3572 7045 3427 7045 3869 7046 3571 7046 3427 7046 3572 7047 589 7047 3427 7047 3427 7048 589 7048 3573 7048 3426 7049 3427 7049 3573 7049 589 7050 590 7050 3573 7050 3573 7051 590 7051 3574 7051 3875 7052 3573 7052 3574 7052 590 7053 591 7053 3574 7053 3574 7054 591 7054 3424 7054 3877 7055 3574 7055 3424 7055 591 7056 3575 7056 3424 7056 3424 7057 3575 7057 3576 7057 3425 7058 3424 7058 3576 7058 3575 7059 592 7059 3576 7059 3576 7060 592 7060 3577 7060 3906 7061 3576 7061 3577 7061 592 7062 593 7062 3577 7062 3577 7063 593 7063 3578 7063 3423 7064 3577 7064 3578 7064 593 7065 594 7065 3578 7065 3578 7066 594 7066 3579 7066 3880 7067 3578 7067 3579 7067 594 7068 595 7068 3579 7068 3579 7069 595 7069 3581 7069 3422 7070 3579 7070 3581 7070 595 7071 682 7071 3581 7071 3581 7072 682 7072 3580 7072 3881 7073 3581 7073 3580 7073 682 7074 3583 7074 3580 7074 3580 7075 3583 7075 3582 7075 3421 7076 3580 7076 3582 7076 3583 7077 3584 7077 3582 7077 3582 7078 3584 7078 3586 7078 3882 7079 3582 7079 3586 7079 3584 7080 683 7080 3586 7080 3586 7081 683 7081 3587 7081 3585 7082 3586 7082 3587 7082 683 7083 3588 7083 3587 7083 3587 7084 3588 7084 3589 7084 3884 7085 3587 7085 3589 7085 3588 7086 684 7086 3589 7086 3589 7087 684 7087 3590 7087 3420 7088 3589 7088 3590 7088 684 7089 685 7089 3590 7089 3590 7090 685 7090 3591 7090 3592 7091 3590 7091 3591 7091 685 7092 3594 7092 3591 7092 3591 7093 3594 7093 3593 7093 3419 7094 3591 7094 3593 7094 3594 7095 686 7095 3593 7095 3593 7096 686 7096 3418 7096 3595 7097 3593 7097 3418 7097 686 7098 687 7098 3418 7098 3418 7099 687 7099 3417 7099 3909 7100 3418 7100 3417 7100 687 7101 3596 7101 3417 7101 3417 7102 3596 7102 3597 7102 3911 7103 3417 7103 3597 7103 4041 7104 3602 7104 3994 7104 4041 7105 3598 7105 3602 7105 4041 7106 3599 7106 3598 7106 3598 7107 3599 7107 3600 7107 3601 7108 3600 7108 6294 7108 3601 7109 3598 7109 3600 7109 3601 7110 6296 7110 3598 7110 3598 7111 6296 7111 3602 7111 3602 7112 6296 7112 6297 7112 3774 7113 6297 7113 3603 7113 3773 7114 3603 7114 6298 7114 3771 7115 6298 7115 3605 7115 3604 7116 3605 7116 3606 7116 3769 7117 3606 7117 3768 7117 3767 7118 3768 7118 6206 7118 3766 7119 6206 7119 3607 7119 3764 7120 3607 7120 6205 7120 3762 7121 6205 7121 3608 7121 3761 7122 3608 7122 6203 7122 3759 7123 6203 7123 3757 7123 3609 7124 3757 7124 3756 7124 3753 7125 3756 7125 3610 7125 3754 7126 3610 7126 6202 7126 3751 7127 6202 7127 6201 7127 3750 7128 6201 7128 6200 7128 3749 7129 6200 7129 3611 7129 3748 7130 3611 7130 3612 7130 3747 7131 3612 7131 3613 7131 3745 7132 3613 7132 3744 7132 3743 7133 3744 7133 3741 7133 3739 7134 3741 7134 6199 7134 3740 7135 6199 7135 6198 7135 3736 7136 6198 7136 3614 7136 3735 7137 3614 7137 3615 7137 3733 7138 3615 7138 3616 7138 3731 7139 3616 7139 6197 7139 3730 7140 6197 7140 3618 7140 3617 7141 3618 7141 6196 7141 3728 7142 6196 7142 6195 7142 3727 7143 6195 7143 6194 7143 3725 7144 6194 7144 3724 7144 3722 7145 3724 7145 3619 7145 3720 7146 3619 7146 3718 7146 3719 7147 3718 7147 3620 7147 3621 7148 3620 7148 3622 7148 3716 7149 3622 7149 3623 7149 3715 7150 3623 7150 3624 7150 3714 7151 3624 7151 6299 7151 3712 7152 6299 7152 6274 7152 3708 7153 6274 7153 3709 7153 3707 7154 3709 7154 3706 7154 3704 7155 3706 7155 3703 7155 3702 7156 3703 7156 3625 7156 3699 7157 3625 7157 3627 7157 3626 7158 3627 7158 6301 7158 3698 7159 6301 7159 3628 7159 3629 7160 3628 7160 6303 7160 3695 7161 6303 7161 3631 7161 3630 7162 3631 7162 6304 7162 3632 7163 6304 7163 6305 7163 3691 7164 6305 7164 6306 7164 3689 7165 6306 7165 3633 7165 3688 7166 3633 7166 6307 7166 3685 7167 6307 7167 6280 7167 3686 7168 6280 7168 6309 7168 3684 7169 6309 7169 6310 7169 3683 7170 6310 7170 3682 7170 3680 7171 3682 7171 6281 7171 3679 7172 6281 7172 3678 7172 3677 7173 3678 7173 6284 7173 3634 7174 6284 7174 6283 7174 3674 7175 6283 7175 6285 7175 3673 7176 6285 7176 3671 7176 3670 7177 3671 7177 6286 7177 3669 7178 6286 7178 3635 7178 3667 7179 3635 7179 6288 7179 3636 7180 6288 7180 6287 7180 3663 7181 6287 7181 3637 7181 3662 7182 3637 7182 3661 7182 3658 7183 3661 7183 3638 7183 3659 7184 3638 7184 3639 7184 3657 7185 3639 7185 6289 7185 3656 7186 6289 7186 3640 7186 3641 7187 3640 7187 3642 7187 3654 7188 3642 7188 6290 7188 3643 7189 6290 7189 3652 7189 3651 7190 3652 7190 6291 7190 3650 7191 6291 7191 6292 7191 3644 7192 6292 7192 6293 7192 3645 7193 6293 7193 3646 7193 3648 7194 3646 7194 6295 7194 3647 7195 6295 7195 6294 7195 3600 7196 3647 7196 6294 7196 3600 7197 4039 7197 3647 7197 3600 7198 3599 7198 4039 7198 4039 7199 4040 7199 3647 7199 3647 7200 4040 7200 3648 7200 6295 7201 3647 7201 3648 7201 4040 7202 4038 7202 3648 7202 3648 7203 4038 7203 3645 7203 3646 7204 3648 7204 3645 7204 4038 7205 4037 7205 3645 7205 3645 7206 4037 7206 3644 7206 6293 7207 3645 7207 3644 7207 4037 7208 3649 7208 3644 7208 3644 7209 3649 7209 3650 7209 6292 7210 3644 7210 3650 7210 3649 7211 4036 7211 3650 7211 3650 7212 4036 7212 3651 7212 6291 7213 3650 7213 3651 7213 4036 7214 3653 7214 3651 7214 3651 7215 3653 7215 3643 7215 3652 7216 3651 7216 3643 7216 3653 7217 4035 7217 3643 7217 3643 7218 4035 7218 3654 7218 6290 7219 3643 7219 3654 7219 4035 7220 4034 7220 3654 7220 3654 7221 4034 7221 3641 7221 3642 7222 3654 7222 3641 7222 4034 7223 3655 7223 3641 7223 3641 7224 3655 7224 3656 7224 3640 7225 3641 7225 3656 7225 3655 7226 4033 7226 3656 7226 3656 7227 4033 7227 3657 7227 6289 7228 3656 7228 3657 7228 4033 7229 4032 7229 3657 7229 3657 7230 4032 7230 3659 7230 3639 7231 3657 7231 3659 7231 4032 7232 4031 7232 3659 7232 3659 7233 4031 7233 3658 7233 3638 7234 3659 7234 3658 7234 4031 7235 3660 7235 3658 7235 3658 7236 3660 7236 3662 7236 3661 7237 3658 7237 3662 7237 3660 7238 3664 7238 3662 7238 3662 7239 3664 7239 3663 7239 3637 7240 3662 7240 3663 7240 3664 7241 3665 7241 3663 7241 3663 7242 3665 7242 3636 7242 6287 7243 3663 7243 3636 7243 3665 7244 3666 7244 3636 7244 3636 7245 3666 7245 3667 7245 6288 7246 3636 7246 3667 7246 3666 7247 4030 7247 3667 7247 3667 7248 4030 7248 3669 7248 3635 7249 3667 7249 3669 7249 4030 7250 3668 7250 3669 7250 3669 7251 3668 7251 3670 7251 6286 7252 3669 7252 3670 7252 3668 7253 4029 7253 3670 7253 3670 7254 4029 7254 3673 7254 3671 7255 3670 7255 3673 7255 4029 7256 3672 7256 3673 7256 3673 7257 3672 7257 3674 7257 6285 7258 3673 7258 3674 7258 3672 7259 3675 7259 3674 7259 3674 7260 3675 7260 3634 7260 6283 7261 3674 7261 3634 7261 3675 7262 3676 7262 3634 7262 3634 7263 3676 7263 3677 7263 6284 7264 3634 7264 3677 7264 3676 7265 4028 7265 3677 7265 3677 7266 4028 7266 3679 7266 3678 7267 3677 7267 3679 7267 4028 7268 3681 7268 3679 7268 3679 7269 3681 7269 3680 7269 6281 7270 3679 7270 3680 7270 3681 7271 3961 7271 3680 7271 3680 7272 3961 7272 3683 7272 3682 7273 3680 7273 3683 7273 3961 7274 3962 7274 3683 7274 3683 7275 3962 7275 3684 7275 6310 7276 3683 7276 3684 7276 3962 7277 3963 7277 3684 7277 3684 7278 3963 7278 3686 7278 6309 7279 3684 7279 3686 7279 3963 7280 3965 7280 3686 7280 3686 7281 3965 7281 3685 7281 6280 7282 3686 7282 3685 7282 3965 7283 3996 7283 3685 7283 3685 7284 3996 7284 3688 7284 6307 7285 3685 7285 3688 7285 3996 7286 3687 7286 3688 7286 3688 7287 3687 7287 3689 7287 3633 7288 3688 7288 3689 7288 3687 7289 3690 7289 3689 7289 3689 7290 3690 7290 3691 7290 6306 7291 3689 7291 3691 7291 3690 7292 3692 7292 3691 7292 3691 7293 3692 7293 3632 7293 6305 7294 3691 7294 3632 7294 3692 7295 3693 7295 3632 7295 3632 7296 3693 7296 3630 7296 6304 7297 3632 7297 3630 7297 3693 7298 3694 7298 3630 7298 3630 7299 3694 7299 3695 7299 3631 7300 3630 7300 3695 7300 3694 7301 3696 7301 3695 7301 3695 7302 3696 7302 3629 7302 6303 7303 3695 7303 3629 7303 3696 7304 3968 7304 3629 7304 3629 7305 3968 7305 3698 7305 3628 7306 3629 7306 3698 7306 3968 7307 3697 7307 3698 7307 3698 7308 3697 7308 3626 7308 6301 7309 3698 7309 3626 7309 3697 7310 4002 7310 3626 7310 3626 7311 4002 7311 3699 7311 3627 7312 3626 7312 3699 7312 4002 7313 3700 7313 3699 7313 3699 7314 3700 7314 3702 7314 3625 7315 3699 7315 3702 7315 3700 7316 3701 7316 3702 7316 3702 7317 3701 7317 3704 7317 3703 7318 3702 7318 3704 7318 3701 7319 3705 7319 3704 7319 3704 7320 3705 7320 3707 7320 3706 7321 3704 7321 3707 7321 3705 7322 3710 7322 3707 7322 3707 7323 3710 7323 3708 7323 3709 7324 3707 7324 3708 7324 3710 7325 3711 7325 3708 7325 3708 7326 3711 7326 3712 7326 6274 7327 3708 7327 3712 7327 3711 7328 3713 7328 3712 7328 3712 7329 3713 7329 3714 7329 6299 7330 3712 7330 3714 7330 3713 7331 3973 7331 3714 7331 3714 7332 3973 7332 3715 7332 3624 7333 3714 7333 3715 7333 3973 7334 4004 7334 3715 7334 3715 7335 4004 7335 3716 7335 3623 7336 3715 7336 3716 7336 4004 7337 4005 7337 3716 7337 3716 7338 4005 7338 3621 7338 3622 7339 3716 7339 3621 7339 4005 7340 3717 7340 3621 7340 3621 7341 3717 7341 3719 7341 3620 7342 3621 7342 3719 7342 3717 7343 3975 7343 3719 7343 3719 7344 3975 7344 3720 7344 3718 7345 3719 7345 3720 7345 3975 7346 3721 7346 3720 7346 3720 7347 3721 7347 3722 7347 3619 7348 3720 7348 3722 7348 3721 7349 3723 7349 3722 7349 3722 7350 3723 7350 3725 7350 3724 7351 3722 7351 3725 7351 3723 7352 3726 7352 3725 7352 3725 7353 3726 7353 3727 7353 6194 7354 3725 7354 3727 7354 3726 7355 4009 7355 3727 7355 3727 7356 4009 7356 3728 7356 6195 7357 3727 7357 3728 7357 4009 7358 3729 7358 3728 7358 3728 7359 3729 7359 3617 7359 6196 7360 3728 7360 3617 7360 3729 7361 3979 7361 3617 7361 3617 7362 3979 7362 3730 7362 3618 7363 3617 7363 3730 7363 3979 7364 3981 7364 3730 7364 3730 7365 3981 7365 3731 7365 6197 7366 3730 7366 3731 7366 3981 7367 3732 7367 3731 7367 3731 7368 3732 7368 3733 7368 3616 7369 3731 7369 3733 7369 3732 7370 3982 7370 3733 7370 3733 7371 3982 7371 3735 7371 3615 7372 3733 7372 3735 7372 3982 7373 3734 7373 3735 7373 3735 7374 3734 7374 3736 7374 3614 7375 3735 7375 3736 7375 3734 7376 3737 7376 3736 7376 3736 7377 3737 7377 3740 7377 6198 7378 3736 7378 3740 7378 3737 7379 3738 7379 3740 7379 3740 7380 3738 7380 3739 7380 6199 7381 3740 7381 3739 7381 3738 7382 3983 7382 3739 7382 3739 7383 3983 7383 3743 7383 3741 7384 3739 7384 3743 7384 3983 7385 3742 7385 3743 7385 3743 7386 3742 7386 3745 7386 3744 7387 3743 7387 3745 7387 3742 7388 3746 7388 3745 7388 3745 7389 3746 7389 3747 7389 3613 7390 3745 7390 3747 7390 3746 7391 3984 7391 3747 7391 3747 7392 3984 7392 3748 7392 3612 7393 3747 7393 3748 7393 3984 7394 3986 7394 3748 7394 3748 7395 3986 7395 3749 7395 3611 7396 3748 7396 3749 7396 3986 7397 3985 7397 3749 7397 3749 7398 3985 7398 3750 7398 6200 7399 3749 7399 3750 7399 3985 7400 3987 7400 3750 7400 3750 7401 3987 7401 3751 7401 6201 7402 3750 7402 3751 7402 3987 7403 3752 7403 3751 7403 3751 7404 3752 7404 3754 7404 6202 7405 3751 7405 3754 7405 3752 7406 3988 7406 3754 7406 3754 7407 3988 7407 3753 7407 3610 7408 3754 7408 3753 7408 3988 7409 3755 7409 3753 7409 3753 7410 3755 7410 3609 7410 3756 7411 3753 7411 3609 7411 3755 7412 3758 7412 3609 7412 3609 7413 3758 7413 3759 7413 3757 7414 3609 7414 3759 7414 3758 7415 3760 7415 3759 7415 3759 7416 3760 7416 3761 7416 6203 7417 3759 7417 3761 7417 3760 7418 3989 7418 3761 7418 3761 7419 3989 7419 3762 7419 3608 7420 3761 7420 3762 7420 3989 7421 3763 7421 3762 7421 3762 7422 3763 7422 3764 7422 6205 7423 3762 7423 3764 7423 3763 7424 3765 7424 3764 7424 3764 7425 3765 7425 3766 7425 3607 7426 3764 7426 3766 7426 3765 7427 3990 7427 3766 7427 3766 7428 3990 7428 3767 7428 6206 7429 3766 7429 3767 7429 3990 7430 3770 7430 3767 7430 3767 7431 3770 7431 3769 7431 3768 7432 3767 7432 3769 7432 3770 7433 3991 7433 3769 7433 3769 7434 3991 7434 3604 7434 3606 7435 3769 7435 3604 7435 3991 7436 3993 7436 3604 7436 3604 7437 3993 7437 3771 7437 3605 7438 3604 7438 3771 7438 3993 7439 3772 7439 3771 7439 3771 7440 3772 7440 3773 7440 6298 7441 3771 7441 3773 7441 3772 7442 3995 7442 3773 7442 3773 7443 3995 7443 3774 7443 3603 7444 3773 7444 3774 7444 3995 7445 3994 7445 3774 7445 3774 7446 3994 7446 3602 7446 6297 7447 3774 7447 3602 7447 3238 7448 3944 7448 3775 7448 3775 7449 3944 7449 3776 7449 3776 7450 3944 7450 3101 7450 3101 7451 3944 7451 3777 7451 3777 7452 3944 7452 3102 7452 3102 7453 3944 7453 3778 7453 3778 7454 3944 7454 3105 7454 3105 7455 3944 7455 3108 7455 3108 7456 3944 7456 3779 7456 3779 7457 3944 7457 3112 7457 3112 7458 3944 7458 3780 7458 3780 7459 3944 7459 3781 7459 3781 7460 3944 7460 3116 7460 3116 7461 3944 7461 3117 7461 3117 7462 3944 7462 3120 7462 3120 7463 3944 7463 3123 7463 3123 7464 3944 7464 3125 7464 3125 7465 3944 7465 3782 7465 3782 7466 3944 7466 3127 7466 3127 7467 3944 7467 3129 7467 3129 7468 3944 7468 3131 7468 3131 7469 3944 7469 3132 7469 3132 7470 3944 7470 3783 7470 3783 7471 3944 7471 3135 7471 3135 7472 3944 7472 3784 7472 3784 7473 3944 7473 3785 7473 3785 7474 3944 7474 3140 7474 3140 7475 3944 7475 3142 7475 3142 7476 3944 7476 3144 7476 3144 7477 3944 7477 3786 7477 3786 7478 3944 7478 3787 7478 3787 7479 3944 7479 3148 7479 3148 7480 3944 7480 3151 7480 3151 7481 3944 7481 3788 7481 3788 7482 3944 7482 3153 7482 3153 7483 3944 7483 3789 7483 3789 7484 3944 7484 3791 7484 3791 7485 3944 7485 3792 7485 3158 7486 3792 7486 3790 7486 3158 7487 3791 7487 3792 7487 3794 7488 3793 7488 3792 7488 3794 7489 3185 7489 3793 7489 3794 7490 3795 7490 3185 7490 3794 7491 3188 7491 3795 7491 3794 7492 3189 7492 3188 7492 3794 7493 3192 7493 3189 7493 3794 7494 3195 7494 3192 7494 3794 7495 3796 7495 3195 7495 3794 7496 3197 7496 3796 7496 3794 7497 3198 7497 3197 7497 3794 7498 3797 7498 3198 7498 3794 7499 3205 7499 3797 7499 3794 7500 3798 7500 3205 7500 3794 7501 3208 7501 3798 7501 3794 7502 3210 7502 3208 7502 3794 7503 3799 7503 3210 7503 3794 7504 3212 7504 3799 7504 3794 7505 3215 7505 3212 7505 3794 7506 3216 7506 3215 7506 3794 7507 3800 7507 3216 7507 3794 7508 3801 7508 3800 7508 3794 7509 3221 7509 3801 7509 3794 7510 3802 7510 3221 7510 3794 7511 3804 7511 3802 7511 3794 7512 3803 7512 3804 7512 3794 7513 3805 7513 3803 7513 3794 7514 3807 7514 3805 7514 3794 7515 3806 7515 3807 7515 3794 7516 3230 7516 3806 7516 3794 7517 3231 7517 3230 7517 3794 7518 3234 7518 3231 7518 3794 7519 3808 7519 3234 7519 3794 7520 3238 7520 3808 7520 3793 7521 3183 7521 3792 7521 3792 7522 3183 7522 3179 7522 3809 7523 3792 7523 3179 7523 3809 7524 3177 7524 3792 7524 3792 7525 3177 7525 3174 7525 3810 7526 3792 7526 3174 7526 3810 7527 3811 7527 3792 7527 3792 7528 3811 7528 3170 7528 3812 7529 3792 7529 3170 7529 3812 7530 3166 7530 3792 7530 3792 7531 3166 7531 3813 7531 3814 7532 3792 7532 3813 7532 3814 7533 3815 7533 3792 7533 3792 7534 3815 7534 3816 7534 3790 7535 3792 7535 3816 7535 3841 7536 6256 7536 3817 7536 3817 7537 6256 7537 4022 7537 6553 7538 4022 7538 6480 7538 6553 7539 3817 7539 4022 7539 6553 7540 809 7540 3817 7540 6256 7541 6187 7541 4022 7541 4022 7542 3960 7542 6480 7542 6480 7543 3960 7543 6481 7543 3818 7544 3819 7544 3858 7544 3239 7545 3858 7545 3296 7545 3239 7546 3818 7546 3858 7546 3819 7547 3410 7547 3858 7547 3858 7548 3410 7548 3408 7548 3822 7549 3408 7549 3820 7549 3821 7550 3822 7550 3820 7550 3821 7551 3403 7551 3822 7551 3822 7552 3403 7552 3402 7552 3841 7553 3402 7553 3823 7553 3824 7554 3841 7554 3823 7554 3824 7555 3400 7555 3841 7555 3841 7556 3400 7556 3825 7556 3397 7557 3841 7557 3825 7557 3397 7558 3826 7558 3841 7558 3841 7559 3826 7559 3395 7559 3827 7560 3841 7560 3395 7560 3827 7561 3828 7561 3841 7561 3841 7562 3828 7562 3830 7562 3829 7563 3841 7563 3830 7563 3829 7564 3831 7564 3841 7564 3841 7565 3831 7565 3832 7565 3388 7566 3841 7566 3832 7566 3388 7567 3387 7567 3841 7567 3841 7568 3387 7568 3833 7568 3384 7569 3841 7569 3833 7569 3384 7570 3834 7570 3841 7570 3841 7571 3834 7571 3836 7571 3835 7572 3841 7572 3836 7572 3835 7573 3378 7573 3841 7573 3841 7574 3378 7574 3375 7574 3374 7575 3841 7575 3375 7575 3374 7576 3373 7576 3841 7576 3841 7577 3373 7577 3837 7577 3838 7578 3841 7578 3837 7578 3838 7579 3839 7579 3841 7579 3841 7580 3839 7580 3842 7580 3840 7581 3841 7581 3842 7581 3840 7582 3362 7582 3841 7582 3841 7583 3362 7583 3843 7583 3857 7584 3843 7584 3844 7584 3845 7585 3857 7585 3844 7585 3845 7586 3846 7586 3857 7586 3857 7587 3846 7587 3357 7587 3847 7588 3857 7588 3357 7588 3847 7589 3848 7589 3857 7589 3857 7590 3848 7590 3353 7590 3351 7591 3857 7591 3353 7591 3351 7592 3849 7592 3857 7592 3857 7593 3849 7593 3348 7593 3346 7594 3857 7594 3348 7594 3346 7595 3850 7595 3857 7595 3857 7596 3850 7596 3343 7596 3341 7597 3857 7597 3343 7597 3341 7598 3851 7598 3857 7598 3857 7599 3851 7599 3337 7599 3852 7600 3857 7600 3337 7600 3852 7601 3853 7601 3857 7601 3857 7602 3853 7602 3854 7602 3333 7603 3857 7603 3854 7603 3333 7604 3855 7604 3857 7604 3857 7605 3855 7605 3856 7605 3329 7606 3857 7606 3856 7606 3329 7607 3858 7607 3857 7607 3329 7608 3328 7608 3858 7608 3858 7609 3328 7609 3859 7609 3325 7610 3858 7610 3859 7610 3325 7611 3324 7611 3858 7611 3858 7612 3324 7612 3861 7612 3860 7613 3858 7613 3861 7613 3860 7614 3320 7614 3858 7614 3858 7615 3320 7615 3317 7615 3862 7616 3858 7616 3317 7616 3862 7617 3315 7617 3858 7617 3858 7618 3315 7618 3312 7618 3863 7619 3858 7619 3312 7619 3863 7620 3864 7620 3858 7620 3858 7621 3864 7621 3865 7621 3308 7622 3858 7622 3865 7622 3308 7623 3305 7623 3858 7623 3858 7624 3305 7624 3304 7624 3866 7625 3858 7625 3304 7625 3866 7626 3867 7626 3858 7626 3858 7627 3867 7627 3868 7627 3300 7628 3858 7628 3868 7628 3300 7629 3297 7629 3858 7629 3858 7630 3297 7630 3296 7630 3858 7631 3408 7631 3822 7631 3822 7632 3402 7632 3841 7632 3817 7633 3822 7633 3841 7633 3841 7634 3843 7634 3857 7634 3870 7635 2783 7635 6486 7635 3870 7636 3875 7636 2783 7636 3870 7637 3426 7637 3875 7637 3870 7638 3869 7638 3426 7638 3870 7639 3428 7639 3869 7639 3870 7640 3430 7640 3428 7640 3870 7641 3871 7641 3430 7641 3870 7642 3431 7642 3871 7642 3870 7643 3432 7643 3431 7643 3870 7644 3433 7644 3432 7644 3870 7645 3561 7645 3433 7645 3870 7646 3434 7646 3561 7646 3870 7647 3873 7647 3434 7647 3870 7648 3872 7648 3873 7648 3870 7649 3874 7649 3872 7649 3870 7650 3552 7650 3874 7650 3870 7651 3435 7651 3552 7651 3870 7652 3548 7652 3435 7652 3870 7653 3437 7653 3548 7653 3870 7654 3545 7654 3437 7654 3870 7655 3439 7655 3545 7655 3870 7656 3543 7656 3439 7656 3870 7657 3441 7657 3543 7657 3870 7658 3541 7658 3441 7658 3870 7659 3443 7659 3541 7659 3870 7660 3445 7660 3443 7660 3870 7661 3537 7661 3445 7661 3870 7662 3446 7662 3537 7662 3870 7663 3533 7663 3446 7663 2783 7664 3875 7664 3876 7664 3876 7665 3875 7665 3877 7665 3878 7666 3877 7666 3425 7666 2789 7667 3425 7667 3906 7667 2790 7668 3906 7668 3423 7668 2792 7669 3423 7669 3880 7669 3879 7670 3880 7670 3422 7670 2794 7671 3422 7671 3881 7671 2797 7672 3881 7672 3421 7672 2800 7673 3421 7673 3882 7673 3883 7674 3882 7674 3585 7674 2803 7675 3585 7675 3884 7675 3907 7676 3884 7676 3420 7676 3885 7677 3420 7677 3592 7677 3886 7678 3592 7678 3419 7678 3887 7679 3419 7679 3595 7679 3908 7680 3595 7680 3909 7680 3910 7681 3909 7681 3911 7681 2812 7682 3911 7682 3912 7682 2815 7683 3912 7683 3414 7683 3913 7684 3414 7684 3415 7684 2818 7685 3415 7685 3914 7685 2819 7686 3914 7686 3888 7686 3915 7687 3888 7687 3890 7687 3889 7688 3890 7688 3463 7688 2823 7689 3463 7689 3472 7689 3891 7690 3472 7690 3892 7690 2827 7691 3892 7691 3893 7691 2828 7692 3893 7692 3477 7692 2832 7693 3477 7693 3894 7693 3916 7694 3894 7694 3481 7694 3917 7695 3481 7695 3483 7695 3901 7696 3483 7696 3462 7696 3895 7697 3901 7697 3462 7697 3895 7698 3488 7698 3901 7698 3901 7699 3488 7699 3490 7699 3460 7700 3901 7700 3490 7700 3460 7701 3459 7701 3901 7701 3901 7702 3459 7702 3458 7702 3457 7703 3901 7703 3458 7703 3457 7704 3456 7704 3901 7704 3901 7705 3456 7705 3503 7705 3896 7706 3901 7706 3503 7706 3896 7707 3897 7707 3901 7707 3901 7708 3897 7708 3898 7708 3899 7709 3901 7709 3898 7709 3899 7710 3509 7710 3901 7710 3901 7711 3509 7711 3900 7711 3453 7712 3901 7712 3900 7712 3453 7713 3514 7713 3901 7713 3901 7714 3514 7714 3902 7714 3451 7715 3901 7715 3902 7715 3451 7716 3520 7716 3901 7716 3901 7717 3520 7717 3522 7717 3903 7718 3901 7718 3522 7718 3903 7719 3904 7719 3901 7719 3901 7720 3904 7720 3527 7720 3905 7721 3901 7721 3527 7721 3905 7722 3531 7722 3901 7722 3901 7723 3531 7723 3533 7723 3876 7724 3877 7724 3878 7724 3878 7725 3425 7725 2789 7725 2789 7726 3906 7726 2790 7726 2790 7727 3423 7727 2792 7727 2792 7728 3880 7728 3879 7728 3879 7729 3422 7729 2794 7729 2794 7730 3881 7730 2797 7730 2797 7731 3421 7731 2800 7731 2800 7732 3882 7732 3883 7732 3883 7733 3585 7733 2803 7733 2803 7734 3884 7734 3907 7734 3907 7735 3420 7735 3885 7735 3885 7736 3592 7736 3886 7736 3886 7737 3419 7737 3887 7737 3887 7738 3595 7738 3908 7738 3908 7739 3909 7739 3910 7739 3910 7740 3911 7740 2812 7740 2812 7741 3912 7741 2815 7741 2815 7742 3414 7742 3913 7742 3913 7743 3415 7743 2818 7743 2818 7744 3914 7744 2819 7744 2819 7745 3888 7745 3915 7745 3915 7746 3890 7746 3889 7746 3889 7747 3463 7747 2823 7747 2823 7748 3472 7748 3891 7748 3891 7749 3892 7749 2827 7749 2827 7750 3893 7750 2828 7750 2828 7751 3477 7751 2832 7751 2832 7752 3894 7752 3916 7752 3916 7753 3481 7753 3917 7753 3917 7754 3483 7754 3901 7754 3920 7755 3917 7755 3901 7755 3920 7756 3918 7756 3917 7756 3920 7757 2836 7757 3918 7757 3920 7758 2838 7758 2836 7758 3920 7759 3919 7759 2838 7759 3920 7760 2840 7760 3919 7760 3920 7761 3922 7761 2840 7761 3920 7762 3921 7762 3922 7762 3920 7763 3924 7763 3921 7763 3920 7764 3923 7764 3924 7764 3920 7765 2847 7765 3923 7765 3920 7766 3926 7766 2847 7766 3920 7767 3925 7767 3926 7767 3920 7768 2850 7768 3925 7768 3920 7769 3927 7769 2850 7769 3920 7770 3928 7770 3927 7770 3920 7771 2855 7771 3928 7771 3920 7772 3929 7772 2855 7772 3920 7773 2856 7773 3929 7773 3920 7774 2859 7774 2856 7774 3920 7775 2862 7775 2859 7775 3920 7776 3931 7776 2862 7776 3920 7777 3930 7777 3931 7777 3920 7778 3932 7778 3930 7778 3920 7779 2867 7779 3932 7779 3920 7780 2870 7780 2867 7780 3920 7781 2872 7781 2870 7781 3920 7782 3933 7782 2872 7782 3920 7783 1211 7783 3933 7783 2783 7784 2782 7784 6486 7784 6486 7785 2782 7785 2781 7785 2779 7786 6486 7786 2781 7786 2779 7787 3934 7787 6486 7787 6486 7788 3934 7788 3935 7788 2775 7789 6486 7789 3935 7789 2775 7790 3936 7790 6486 7790 6486 7791 3936 7791 3937 7791 2771 7792 6486 7792 3937 7792 2771 7793 2770 7793 6486 7793 6486 7794 2770 7794 3938 7794 2764 7795 6486 7795 3938 7795 2764 7796 2762 7796 6486 7796 6486 7797 2762 7797 2761 7797 2760 7798 6486 7798 2761 7798 2760 7799 2758 7799 6486 7799 6486 7800 2758 7800 3939 7800 2754 7801 6486 7801 3939 7801 2754 7802 2753 7802 6486 7802 6486 7803 2753 7803 2751 7803 3940 7804 6486 7804 2751 7804 3940 7805 3941 7805 6486 7805 6486 7806 3941 7806 3942 7806 2747 7807 6486 7807 3942 7807 2747 7808 2746 7808 6486 7808 6486 7809 2746 7809 2692 7809 1211 7810 6486 7810 2692 7810 3943 7811 3992 7811 6529 7811 3943 7812 3994 7812 3992 7812 3943 7813 4027 7813 3994 7813 6204 7814 6211 7814 3992 7814 3992 7815 6211 7815 3944 7815 6529 7816 3944 7816 3950 7816 6529 7817 3992 7817 3944 7817 3944 7818 3238 7818 3950 7818 3950 7819 3238 7819 2434 7819 3945 7820 3950 7820 2434 7820 3945 7821 980 7821 3950 7821 3950 7822 980 7822 983 7822 997 7823 3950 7823 983 7823 997 7824 3946 7824 3950 7824 3950 7825 3946 7825 985 7825 3947 7826 3950 7826 985 7826 3947 7827 1000 7827 3950 7827 3950 7828 1000 7828 3948 7828 3949 7829 3950 7829 3948 7829 3949 7830 993 7830 3950 7830 3950 7831 993 7831 991 7831 3951 7832 3950 7832 991 7832 3951 7833 3952 7833 3950 7833 3950 7834 3952 7834 3953 7834 3954 7835 3950 7835 3953 7835 3954 7836 3955 7836 3950 7836 3950 7837 3955 7837 1008 7837 3956 7838 3950 7838 1008 7838 3956 7839 1019 7839 3950 7839 3950 7840 1019 7840 1020 7840 1016 7841 3950 7841 1020 7841 1016 7842 824 7842 3950 7842 3238 7843 3794 7843 2434 7843 6481 7844 3681 7844 4027 7844 6481 7845 2542 7845 3681 7845 6481 7846 2543 7846 2542 7846 6481 7847 2594 7847 2543 7847 6481 7848 2592 7848 2594 7848 6481 7849 2544 7849 2592 7849 6481 7850 3957 7850 2544 7850 6481 7851 2587 7851 3957 7851 6481 7852 2545 7852 2587 7852 6481 7853 2546 7853 2545 7853 6481 7854 2581 7854 2546 7854 6481 7855 3958 7855 2581 7855 6481 7856 2576 7856 3958 7856 6481 7857 2574 7857 2576 7857 6481 7858 2547 7858 2574 7858 6481 7859 2569 7859 2547 7859 6481 7860 2567 7860 2569 7860 6481 7861 2548 7861 2567 7861 6481 7862 3959 7862 2548 7862 6481 7863 2551 7863 3959 7863 6481 7864 2552 7864 2551 7864 6481 7865 2553 7865 2552 7865 6481 7866 2555 7866 2553 7866 6481 7867 2557 7867 2555 7867 6481 7868 2512 7868 2557 7868 6481 7869 2513 7869 2512 7869 6481 7870 2515 7870 2513 7870 6481 7871 2516 7871 2515 7871 6481 7872 3960 7872 2516 7872 3681 7873 2542 7873 3961 7873 3961 7874 2542 7874 2541 7874 3962 7875 2541 7875 3964 7875 3963 7876 3964 7876 2540 7876 3965 7877 2540 7877 3966 7877 3996 7878 3966 7878 3997 7878 3687 7879 3997 7879 2539 7879 3690 7880 2539 7880 3967 7880 3692 7881 3967 7881 3998 7881 3693 7882 3998 7882 2612 7882 3694 7883 2612 7883 3999 7883 3696 7884 3999 7884 4000 7884 3968 7885 4000 7885 3969 7885 3697 7886 3969 7886 4001 7886 4002 7887 4001 7887 3970 7887 3700 7888 3970 7888 3971 7888 3701 7889 3971 7889 2534 7889 3705 7890 2534 7890 2624 7890 3710 7891 2624 7891 2533 7891 3711 7892 2533 7892 2626 7892 3713 7893 2626 7893 3972 7893 3973 7894 3972 7894 4003 7894 4004 7895 4003 7895 3974 7895 4005 7896 3974 7896 2532 7896 3717 7897 2532 7897 2531 7897 3975 7898 2531 7898 4006 7898 3721 7899 4006 7899 4007 7899 3723 7900 4007 7900 3976 7900 3726 7901 3976 7901 4008 7901 4009 7902 4008 7902 3977 7902 3729 7903 3977 7903 3978 7903 3979 7904 3978 7904 3980 7904 3992 7905 3980 7905 4022 7905 3992 7906 3979 7906 3980 7906 3992 7907 3981 7907 3979 7907 3992 7908 3732 7908 3981 7908 3992 7909 3982 7909 3732 7909 3992 7910 3734 7910 3982 7910 3992 7911 3737 7911 3734 7911 3992 7912 3738 7912 3737 7912 3992 7913 3983 7913 3738 7913 3992 7914 3742 7914 3983 7914 3992 7915 3746 7915 3742 7915 3992 7916 3984 7916 3746 7916 3992 7917 3986 7917 3984 7917 3992 7918 3985 7918 3986 7918 3992 7919 3987 7919 3985 7919 3992 7920 3752 7920 3987 7920 3992 7921 3988 7921 3752 7921 3992 7922 3755 7922 3988 7922 3992 7923 3758 7923 3755 7923 3992 7924 3760 7924 3758 7924 3992 7925 3989 7925 3760 7925 3992 7926 3763 7926 3989 7926 3992 7927 3765 7927 3763 7927 3992 7928 3990 7928 3765 7928 3992 7929 3770 7929 3990 7929 3992 7930 3991 7930 3770 7930 3992 7931 3993 7931 3991 7931 3992 7932 3772 7932 3993 7932 3992 7933 3995 7933 3772 7933 3992 7934 3994 7934 3995 7934 3961 7935 2541 7935 3962 7935 3962 7936 3964 7936 3963 7936 3963 7937 2540 7937 3965 7937 3965 7938 3966 7938 3996 7938 3996 7939 3997 7939 3687 7939 3687 7940 2539 7940 3690 7940 3690 7941 3967 7941 3692 7941 3692 7942 3998 7942 3693 7942 3693 7943 2612 7943 3694 7943 3694 7944 3999 7944 3696 7944 3696 7945 4000 7945 3968 7945 3968 7946 3969 7946 3697 7946 3697 7947 4001 7947 4002 7947 4002 7948 3970 7948 3700 7948 3700 7949 3971 7949 3701 7949 3701 7950 2534 7950 3705 7950 3705 7951 2624 7951 3710 7951 3710 7952 2533 7952 3711 7952 3711 7953 2626 7953 3713 7953 3713 7954 3972 7954 3973 7954 3973 7955 4003 7955 4004 7955 4004 7956 3974 7956 4005 7956 4005 7957 2532 7957 3717 7957 3717 7958 2531 7958 3975 7958 3975 7959 4006 7959 3721 7959 3721 7960 4007 7960 3723 7960 3723 7961 3976 7961 3726 7961 3726 7962 4008 7962 4009 7962 4009 7963 3977 7963 3729 7963 3729 7964 3978 7964 3979 7964 3980 7965 2648 7965 4022 7965 4022 7966 2648 7966 4010 7966 2651 7967 4022 7967 4010 7967 2651 7968 4011 7968 4022 7968 4022 7969 4011 7969 4013 7969 4012 7970 4022 7970 4013 7970 4012 7971 2527 7971 4022 7971 4022 7972 2527 7972 2525 7972 4014 7973 4022 7973 2525 7973 4014 7974 4015 7974 4022 7974 4022 7975 4015 7975 4016 7975 4017 7976 4022 7976 4016 7976 4017 7977 2523 7977 4022 7977 4022 7978 2523 7978 4018 7978 4019 7979 4022 7979 4018 7979 4019 7980 4020 7980 4022 7980 4022 7981 4020 7981 2673 7981 4021 7982 4022 7982 2673 7982 4021 7983 2675 7983 4022 7983 4022 7984 2675 7984 2521 7984 2681 7985 4022 7985 2521 7985 2681 7986 2520 7986 4022 7986 4022 7987 2520 7987 4023 7987 2518 7988 4022 7988 4023 7988 2518 7989 4024 7989 4022 7989 4022 7990 4024 7990 4025 7990 4026 7991 4022 7991 4025 7991 4026 7992 3960 7992 4022 7992 3681 7993 4028 7993 4027 7993 4027 7994 4028 7994 3676 7994 3675 7995 4027 7995 3676 7995 3675 7996 3672 7996 4027 7996 4027 7997 3672 7997 4029 7997 3668 7998 4027 7998 4029 7998 3668 7999 4030 7999 4027 7999 4027 8000 4030 8000 3666 8000 3665 8001 4027 8001 3666 8001 3665 8002 3664 8002 4027 8002 4027 8003 3664 8003 3660 8003 4031 8004 4027 8004 3660 8004 4031 8005 4032 8005 4027 8005 4027 8006 4032 8006 4033 8006 3655 8007 4027 8007 4033 8007 3655 8008 4034 8008 4027 8008 4027 8009 4034 8009 4035 8009 3653 8010 4027 8010 4035 8010 3653 8011 4036 8011 4027 8011 4027 8012 4036 8012 3649 8012 4037 8013 4027 8013 3649 8013 4037 8014 4038 8014 4027 8014 4027 8015 4038 8015 4040 8015 4039 8016 4027 8016 4040 8016 4039 8017 3599 8017 4027 8017 4027 8018 3599 8018 4041 8018 3994 8019 4027 8019 4041 8019 6265 8020 6256 8020 3857 8020 3857 8021 6256 8021 3841 8021 6211 8022 4042 8022 3944 8022 3944 8023 4042 8023 3792 8023 455 8024 2405 8024 706 8024 706 8025 2405 8025 5861 8025 4265 8026 4233 8026 4264 8026 4265 8027 4045 8027 4233 8027 4265 8028 4095 8028 4045 8028 4045 8029 4095 8029 4044 8029 4043 8030 4044 8030 6335 8030 4043 8031 4045 8031 4044 8031 4043 8032 4046 8032 4045 8032 4045 8033 4046 8033 4233 8033 4233 8034 4046 8034 6334 8034 4232 8035 6334 8035 4231 8035 4047 8036 4231 8036 4229 8036 4227 8037 4229 8037 4048 8037 4226 8038 4048 8038 4049 8038 4224 8039 4049 8039 6371 8039 4223 8040 6371 8040 4050 8040 4221 8041 4050 8041 6370 8041 4051 8042 6370 8042 6369 8042 4218 8043 6369 8043 6367 8043 4052 8044 6367 8044 4053 8044 4213 8045 4053 8045 4214 8045 4054 8046 4214 8046 4055 8046 4210 8047 4055 8047 6366 8047 4208 8048 6366 8048 4056 8048 4207 8049 4056 8049 6365 8049 4205 8050 6365 8050 6364 8050 4202 8051 6364 8051 4057 8051 4201 8052 4057 8052 4059 8052 4058 8053 4059 8053 4060 8053 4199 8054 4060 8054 6363 8054 4197 8055 6363 8055 4198 8055 4195 8056 4198 8056 6362 8056 4194 8057 6362 8057 6361 8057 4192 8058 6361 8058 6360 8058 4061 8059 6360 8059 6359 8059 4062 8060 6359 8060 6358 8060 4189 8061 6358 8061 6357 8061 4187 8062 6357 8062 4186 8062 4184 8063 4186 8063 6356 8063 4183 8064 6356 8064 6355 8064 4181 8065 6355 8065 4179 8065 4063 8066 4179 8066 4064 8066 4178 8067 4064 8067 6354 8067 4176 8068 6354 8068 4066 8068 4065 8069 4066 8069 4067 8069 4175 8070 4067 8070 4173 8070 4172 8071 4173 8071 4170 8071 4171 8072 4170 8072 6352 8072 4068 8073 6352 8073 6351 8073 4167 8074 6351 8074 4069 8074 4166 8075 4069 8075 4070 8075 4164 8076 4070 8076 4163 8076 4162 8077 4163 8077 6350 8077 4161 8078 6350 8078 4071 8078 4159 8079 4071 8079 6349 8079 4157 8080 6349 8080 4072 8080 4155 8081 4072 8081 4073 8081 4154 8082 4073 8082 6348 8082 4152 8083 6348 8083 4074 8083 4075 8084 4074 8084 4076 8084 4077 8085 4076 8085 4078 8085 4149 8086 4078 8086 4148 8086 4146 8087 4148 8087 4079 8087 4144 8088 4079 8088 6347 8088 4143 8089 6347 8089 4142 8089 4080 8090 4142 8090 6346 8090 4141 8091 6346 8091 6345 8091 4139 8092 6345 8092 6344 8092 4138 8093 6344 8093 4081 8093 4136 8094 4081 8094 4134 8094 4133 8095 4134 8095 4082 8095 4083 8096 4082 8096 6343 8096 4130 8097 6343 8097 4084 8097 4127 8098 4084 8098 6372 8098 4128 8099 6372 8099 4124 8099 4122 8100 4124 8100 6342 8100 4121 8101 6342 8101 4085 8101 4119 8102 4085 8102 4117 8102 4116 8103 4117 8103 4115 8103 4114 8104 4115 8104 4086 8104 4113 8105 4086 8105 4087 8105 4111 8106 4087 8106 6341 8106 4110 8107 6341 8107 6340 8107 4108 8108 6340 8108 4088 8108 4106 8109 4088 8109 6339 8109 4089 8110 6339 8110 4090 8110 4104 8111 4090 8111 4091 8111 4103 8112 4091 8112 4102 8112 4101 8113 4102 8113 6338 8113 4092 8114 6338 8114 4093 8114 4099 8115 4093 8115 6336 8115 4094 8116 6336 8116 6337 8116 4097 8117 6337 8117 6335 8117 4044 8118 4097 8118 6335 8118 4044 8119 4096 8119 4097 8119 4044 8120 4095 8120 4096 8120 4096 8121 4263 8121 4097 8121 4097 8122 4263 8122 4094 8122 6337 8123 4097 8123 4094 8123 4263 8124 4098 8124 4094 8124 4094 8125 4098 8125 4099 8125 6336 8126 4094 8126 4099 8126 4098 8127 4262 8127 4099 8127 4099 8128 4262 8128 4092 8128 4093 8129 4099 8129 4092 8129 4262 8130 4261 8130 4092 8130 4092 8131 4261 8131 4101 8131 6338 8132 4092 8132 4101 8132 4261 8133 4100 8133 4101 8133 4101 8134 4100 8134 4103 8134 4102 8135 4101 8135 4103 8135 4100 8136 4260 8136 4103 8136 4103 8137 4260 8137 4104 8137 4091 8138 4103 8138 4104 8138 4260 8139 4105 8139 4104 8139 4104 8140 4105 8140 4089 8140 4090 8141 4104 8141 4089 8141 4105 8142 4259 8142 4089 8142 4089 8143 4259 8143 4106 8143 6339 8144 4089 8144 4106 8144 4259 8145 4107 8145 4106 8145 4106 8146 4107 8146 4108 8146 4088 8147 4106 8147 4108 8147 4107 8148 4109 8148 4108 8148 4108 8149 4109 8149 4110 8149 6340 8150 4108 8150 4110 8150 4109 8151 4257 8151 4110 8151 4110 8152 4257 8152 4111 8152 6341 8153 4110 8153 4111 8153 4257 8154 4256 8154 4111 8154 4111 8155 4256 8155 4113 8155 4087 8156 4111 8156 4113 8156 4256 8157 4112 8157 4113 8157 4113 8158 4112 8158 4114 8158 4086 8159 4113 8159 4114 8159 4112 8160 4255 8160 4114 8160 4114 8161 4255 8161 4116 8161 4115 8162 4114 8162 4116 8162 4255 8163 4118 8163 4116 8163 4116 8164 4118 8164 4119 8164 4117 8165 4116 8165 4119 8165 4118 8166 4120 8166 4119 8166 4119 8167 4120 8167 4121 8167 4085 8168 4119 8168 4121 8168 4120 8169 4123 8169 4121 8169 4121 8170 4123 8170 4122 8170 6342 8171 4121 8171 4122 8171 4123 8172 4125 8172 4122 8172 4122 8173 4125 8173 4128 8173 4124 8174 4122 8174 4128 8174 4125 8175 4126 8175 4128 8175 4128 8176 4126 8176 4127 8176 6372 8177 4128 8177 4127 8177 4126 8178 4129 8178 4127 8178 4127 8179 4129 8179 4130 8179 4084 8180 4127 8180 4130 8180 4129 8181 4131 8181 4130 8181 4130 8182 4131 8182 4083 8182 6343 8183 4130 8183 4083 8183 4131 8184 4132 8184 4083 8184 4083 8185 4132 8185 4133 8185 4082 8186 4083 8186 4133 8186 4132 8187 4135 8187 4133 8187 4133 8188 4135 8188 4136 8188 4134 8189 4133 8189 4136 8189 4135 8190 4137 8190 4136 8190 4136 8191 4137 8191 4138 8191 4081 8192 4136 8192 4138 8192 4137 8193 4254 8193 4138 8193 4138 8194 4254 8194 4139 8194 6344 8195 4138 8195 4139 8195 4254 8196 4253 8196 4139 8196 4139 8197 4253 8197 4141 8197 6345 8198 4139 8198 4141 8198 4253 8199 4140 8199 4141 8199 4141 8200 4140 8200 4080 8200 6346 8201 4141 8201 4080 8201 4140 8202 4252 8202 4080 8202 4080 8203 4252 8203 4143 8203 4142 8204 4080 8204 4143 8204 4252 8205 4251 8205 4143 8205 4143 8206 4251 8206 4144 8206 6347 8207 4143 8207 4144 8207 4251 8208 4145 8208 4144 8208 4144 8209 4145 8209 4146 8209 4079 8210 4144 8210 4146 8210 4145 8211 4147 8211 4146 8211 4146 8212 4147 8212 4149 8212 4148 8213 4146 8213 4149 8213 4147 8214 4150 8214 4149 8214 4149 8215 4150 8215 4077 8215 4078 8216 4149 8216 4077 8216 4150 8217 4151 8217 4077 8217 4077 8218 4151 8218 4075 8218 4076 8219 4077 8219 4075 8219 4151 8220 4153 8220 4075 8220 4075 8221 4153 8221 4152 8221 4074 8222 4075 8222 4152 8222 4153 8223 4250 8223 4152 8223 4152 8224 4250 8224 4154 8224 6348 8225 4152 8225 4154 8225 4250 8226 4156 8226 4154 8226 4154 8227 4156 8227 4155 8227 4073 8228 4154 8228 4155 8228 4156 8229 4249 8229 4155 8229 4155 8230 4249 8230 4157 8230 4072 8231 4155 8231 4157 8231 4249 8232 4158 8232 4157 8232 4157 8233 4158 8233 4159 8233 6349 8234 4157 8234 4159 8234 4158 8235 4247 8235 4159 8235 4159 8236 4247 8236 4161 8236 4071 8237 4159 8237 4161 8237 4247 8238 4160 8238 4161 8238 4161 8239 4160 8239 4162 8239 6350 8240 4161 8240 4162 8240 4160 8241 4246 8241 4162 8241 4162 8242 4246 8242 4164 8242 4163 8243 4162 8243 4164 8243 4246 8244 4165 8244 4164 8244 4164 8245 4165 8245 4166 8245 4070 8246 4164 8246 4166 8246 4165 8247 4244 8247 4166 8247 4166 8248 4244 8248 4167 8248 4069 8249 4166 8249 4167 8249 4244 8250 4245 8250 4167 8250 4167 8251 4245 8251 4068 8251 6351 8252 4167 8252 4068 8252 4245 8253 4168 8253 4068 8253 4068 8254 4168 8254 4171 8254 6352 8255 4068 8255 4171 8255 4168 8256 4169 8256 4171 8256 4171 8257 4169 8257 4172 8257 4170 8258 4171 8258 4172 8258 4169 8259 4234 8259 4172 8259 4172 8260 4234 8260 4175 8260 4173 8261 4172 8261 4175 8261 4234 8262 4174 8262 4175 8262 4175 8263 4174 8263 4065 8263 4067 8264 4175 8264 4065 8264 4174 8265 4235 8265 4065 8265 4065 8266 4235 8266 4176 8266 4066 8267 4065 8267 4176 8267 4235 8268 4236 8268 4176 8268 4176 8269 4236 8269 4178 8269 6354 8270 4176 8270 4178 8270 4236 8271 4177 8271 4178 8271 4178 8272 4177 8272 4063 8272 4064 8273 4178 8273 4063 8273 4177 8274 4180 8274 4063 8274 4063 8275 4180 8275 4181 8275 4179 8276 4063 8276 4181 8276 4180 8277 4237 8277 4181 8277 4181 8278 4237 8278 4183 8278 6355 8279 4181 8279 4183 8279 4237 8280 4182 8280 4183 8280 4183 8281 4182 8281 4184 8281 6356 8282 4183 8282 4184 8282 4182 8283 4185 8283 4184 8283 4184 8284 4185 8284 4187 8284 4186 8285 4184 8285 4187 8285 4185 8286 4188 8286 4187 8286 4187 8287 4188 8287 4189 8287 6357 8288 4187 8288 4189 8288 4188 8289 4238 8289 4189 8289 4189 8290 4238 8290 4062 8290 6358 8291 4189 8291 4062 8291 4238 8292 4190 8292 4062 8292 4062 8293 4190 8293 4061 8293 6359 8294 4062 8294 4061 8294 4190 8295 4191 8295 4061 8295 4061 8296 4191 8296 4192 8296 6360 8297 4061 8297 4192 8297 4191 8298 4193 8298 4192 8298 4192 8299 4193 8299 4194 8299 6361 8300 4192 8300 4194 8300 4193 8301 4239 8301 4194 8301 4194 8302 4239 8302 4195 8302 6362 8303 4194 8303 4195 8303 4239 8304 4196 8304 4195 8304 4195 8305 4196 8305 4197 8305 4198 8306 4195 8306 4197 8306 4196 8307 4240 8307 4197 8307 4197 8308 4240 8308 4199 8308 6363 8309 4197 8309 4199 8309 4240 8310 4241 8310 4199 8310 4199 8311 4241 8311 4058 8311 4060 8312 4199 8312 4058 8312 4241 8313 4242 8313 4058 8313 4058 8314 4242 8314 4201 8314 4059 8315 4058 8315 4201 8315 4242 8316 4200 8316 4201 8316 4201 8317 4200 8317 4202 8317 4057 8318 4201 8318 4202 8318 4200 8319 4203 8319 4202 8319 4202 8320 4203 8320 4205 8320 6364 8321 4202 8321 4205 8321 4203 8322 4204 8322 4205 8322 4205 8323 4204 8323 4207 8323 6365 8324 4205 8324 4207 8324 4204 8325 4206 8325 4207 8325 4207 8326 4206 8326 4208 8326 4056 8327 4207 8327 4208 8327 4206 8328 4209 8328 4208 8328 4208 8329 4209 8329 4210 8329 6366 8330 4208 8330 4210 8330 4209 8331 4211 8331 4210 8331 4210 8332 4211 8332 4054 8332 4055 8333 4210 8333 4054 8333 4211 8334 4212 8334 4054 8334 4054 8335 4212 8335 4213 8335 4214 8336 4054 8336 4213 8336 4212 8337 4215 8337 4213 8337 4213 8338 4215 8338 4052 8338 4053 8339 4213 8339 4052 8339 4215 8340 4216 8340 4052 8340 4052 8341 4216 8341 4218 8341 6367 8342 4052 8342 4218 8342 4216 8343 4217 8343 4218 8343 4218 8344 4217 8344 4051 8344 6369 8345 4218 8345 4051 8345 4217 8346 4219 8346 4051 8346 4051 8347 4219 8347 4221 8347 6370 8348 4051 8348 4221 8348 4219 8349 4220 8349 4221 8349 4221 8350 4220 8350 4223 8350 4050 8351 4221 8351 4223 8351 4220 8352 4222 8352 4223 8352 4223 8353 4222 8353 4224 8353 6371 8354 4223 8354 4224 8354 4222 8355 4225 8355 4224 8355 4224 8356 4225 8356 4226 8356 4049 8357 4224 8357 4226 8357 4225 8358 4243 8358 4226 8358 4226 8359 4243 8359 4227 8359 4048 8360 4226 8360 4227 8360 4243 8361 4228 8361 4227 8361 4227 8362 4228 8362 4047 8362 4229 8363 4227 8363 4047 8363 4228 8364 4230 8364 4047 8364 4047 8365 4230 8365 4232 8365 4231 8366 4047 8366 4232 8366 4230 8367 4264 8367 4232 8367 4232 8368 4264 8368 4233 8368 6334 8369 4232 8369 4233 8369 834 8370 4169 8370 4248 8370 834 8371 4234 8371 4169 8371 834 8372 4174 8372 4234 8372 834 8373 4235 8373 4174 8373 834 8374 4236 8374 4235 8374 834 8375 4177 8375 4236 8375 834 8376 4180 8376 4177 8376 834 8377 4237 8377 4180 8377 834 8378 4182 8378 4237 8378 834 8379 4185 8379 4182 8379 834 8380 4188 8380 4185 8380 834 8381 4238 8381 4188 8381 834 8382 4190 8382 4238 8382 834 8383 4191 8383 4190 8383 834 8384 4193 8384 4191 8384 834 8385 4239 8385 4193 8385 834 8386 4196 8386 4239 8386 834 8387 4240 8387 4196 8387 834 8388 4241 8388 4240 8388 834 8389 4242 8389 4241 8389 834 8390 4200 8390 4242 8390 834 8391 4203 8391 4200 8391 834 8392 4204 8392 4203 8392 834 8393 4206 8393 4204 8393 834 8394 4209 8394 4206 8394 834 8395 4211 8395 4209 8395 834 8396 4212 8396 4211 8396 834 8397 4215 8397 4212 8397 834 8398 4216 8398 4215 8398 834 8399 4217 8399 4216 8399 834 8400 4219 8400 4217 8400 834 8401 4220 8401 4219 8401 834 8402 4222 8402 4220 8402 834 8403 4225 8403 4222 8403 834 8404 4243 8404 4225 8404 834 8405 4228 8405 4243 8405 834 8406 4230 8406 4228 8406 834 8407 4264 8407 4230 8407 4169 8408 4168 8408 4248 8408 4248 8409 4168 8409 4245 8409 4244 8410 4248 8410 4245 8410 4244 8411 4165 8411 4248 8411 4248 8412 4165 8412 4246 8412 4160 8413 4248 8413 4246 8413 4160 8414 4247 8414 4248 8414 4248 8415 4247 8415 4158 8415 4249 8416 4248 8416 4158 8416 4249 8417 4156 8417 4248 8417 4248 8418 4156 8418 4250 8418 4153 8419 4248 8419 4250 8419 4153 8420 4151 8420 4248 8420 4248 8421 4151 8421 4150 8421 4147 8422 4248 8422 4150 8422 4147 8423 4145 8423 4248 8423 4248 8424 4145 8424 4251 8424 4258 8425 4251 8425 4252 8425 4140 8426 4258 8426 4252 8426 4140 8427 4253 8427 4258 8427 4258 8428 4253 8428 4254 8428 4137 8429 4258 8429 4254 8429 4137 8430 4135 8430 4258 8430 4258 8431 4135 8431 4132 8431 4131 8432 4258 8432 4132 8432 4131 8433 4129 8433 4258 8433 4258 8434 4129 8434 4126 8434 4125 8435 4258 8435 4126 8435 4125 8436 4123 8436 4258 8436 4258 8437 4123 8437 4120 8437 4118 8438 4258 8438 4120 8438 4118 8439 4255 8439 4258 8439 4258 8440 4255 8440 4112 8440 4256 8441 4258 8441 4112 8441 4256 8442 4257 8442 4258 8442 4258 8443 4257 8443 4109 8443 4107 8444 4258 8444 4109 8444 4107 8445 4259 8445 4258 8445 4258 8446 4259 8446 4105 8446 4260 8447 4258 8447 4105 8447 4260 8448 4100 8448 4258 8448 4258 8449 4100 8449 4261 8449 4262 8450 4258 8450 4261 8450 4262 8451 4098 8451 4258 8451 4258 8452 4098 8452 4263 8452 4096 8453 4258 8453 4263 8453 4096 8454 4095 8454 4258 8454 4258 8455 4095 8455 4265 8455 4264 8456 4258 8456 4265 8456 4248 8457 4251 8457 4258 8457 6353 8458 6368 8458 4248 8458 4248 8459 6368 8459 834 8459 4266 8460 4269 8460 4450 8460 4266 8461 4267 8461 4269 8461 4266 8462 4494 8462 4267 8462 4267 8463 4494 8463 4268 8463 6409 8464 4268 8464 4318 8464 6409 8465 4267 8465 4268 8465 6409 8466 6408 8466 4267 8466 4267 8467 6408 8467 4269 8467 4269 8468 6408 8468 4271 8468 4270 8469 4271 8469 4272 8469 4448 8470 4272 8470 6411 8470 4447 8471 6411 8471 6410 8471 4446 8472 6410 8472 4445 8472 4444 8473 4445 8473 4273 8473 4442 8474 4273 8474 4274 8474 4441 8475 4274 8475 4276 8475 4275 8476 4276 8476 4438 8476 4439 8477 4438 8477 6414 8477 4436 8478 6414 8478 6413 8478 4435 8479 6413 8479 6415 8479 4432 8480 6415 8480 4433 8480 4434 8481 4433 8481 4277 8481 4430 8482 4277 8482 4279 8482 4278 8483 4279 8483 4280 8483 4427 8484 4280 8484 4281 8484 4426 8485 4281 8485 6416 8485 4425 8486 6416 8486 6417 8486 4423 8487 6417 8487 4422 8487 4420 8488 4422 8488 6418 8488 4419 8489 6418 8489 6419 8489 4418 8490 6419 8490 4282 8490 4416 8491 4282 8491 6420 8491 4414 8492 6420 8492 6421 8492 4415 8493 6421 8493 4413 8493 4412 8494 4413 8494 4410 8494 4408 8495 4410 8495 4409 8495 4406 8496 4409 8496 4284 8496 4283 8497 4284 8497 4405 8497 4402 8498 4405 8498 6422 8498 4285 8499 6422 8499 4286 8499 4401 8500 4286 8500 6423 8500 4399 8501 6423 8501 4287 8501 4397 8502 4287 8502 4288 8502 4395 8503 4288 8503 4394 8503 4289 8504 4394 8504 4290 8504 4392 8505 4290 8505 4291 8505 4391 8506 4291 8506 6425 8506 4292 8507 6425 8507 4293 8507 4389 8508 4293 8508 4390 8508 4388 8509 4390 8509 4294 8509 4386 8510 4294 8510 6426 8510 4387 8511 6426 8511 4295 8511 4384 8512 4295 8512 4296 8512 4383 8513 4296 8513 4382 8513 4381 8514 4382 8514 6427 8514 4380 8515 6427 8515 6428 8515 4379 8516 6428 8516 4378 8516 4377 8517 4378 8517 4298 8517 4297 8518 4298 8518 6429 8518 4373 8519 6429 8519 4372 8519 4299 8520 4372 8520 4369 8520 4370 8521 4369 8521 6430 8521 4367 8522 6430 8522 6476 8522 4365 8523 6476 8523 4300 8523 4364 8524 4300 8524 4301 8524 4302 8525 4301 8525 4303 8525 4362 8526 4303 8526 4304 8526 4360 8527 4304 8527 6475 8527 4358 8528 6475 8528 4305 8528 4359 8529 4305 8529 4306 8529 4355 8530 4306 8530 4356 8530 4353 8531 4356 8531 4307 8531 4352 8532 4307 8532 4308 8532 4351 8533 4308 8533 6473 8533 4349 8534 6473 8534 6472 8534 4347 8535 6472 8535 6471 8535 4346 8536 6471 8536 4345 8536 4343 8537 4345 8537 4342 8537 4309 8538 4342 8538 4310 8538 4339 8539 4310 8539 4311 8539 4337 8540 4311 8540 4338 8540 4312 8541 4338 8541 4313 8541 4336 8542 4313 8542 6470 8542 4333 8543 6470 8543 4334 8543 4331 8544 4334 8544 4314 8544 4329 8545 4314 8545 6469 8545 4328 8546 6469 8546 6468 8546 4327 8547 6468 8547 6467 8547 4324 8548 6467 8548 4323 8548 4322 8549 4323 8549 4316 8549 4315 8550 4316 8550 6407 8550 4317 8551 6407 8551 4318 8551 4268 8552 4317 8552 4318 8552 4268 8553 4319 8553 4317 8553 4268 8554 4494 8554 4319 8554 4319 8555 4320 8555 4317 8555 4317 8556 4320 8556 4315 8556 6407 8557 4317 8557 4315 8557 4320 8558 4321 8558 4315 8558 4315 8559 4321 8559 4322 8559 4316 8560 4315 8560 4322 8560 4321 8561 4493 8561 4322 8561 4322 8562 4493 8562 4324 8562 4323 8563 4322 8563 4324 8563 4493 8564 4325 8564 4324 8564 4324 8565 4325 8565 4327 8565 6467 8566 4324 8566 4327 8566 4325 8567 4326 8567 4327 8567 4327 8568 4326 8568 4328 8568 6468 8569 4327 8569 4328 8569 4326 8570 4492 8570 4328 8570 4328 8571 4492 8571 4329 8571 6469 8572 4328 8572 4329 8572 4492 8573 4330 8573 4329 8573 4329 8574 4330 8574 4331 8574 4314 8575 4329 8575 4331 8575 4330 8576 4332 8576 4331 8576 4331 8577 4332 8577 4333 8577 4334 8578 4331 8578 4333 8578 4332 8579 4490 8579 4333 8579 4333 8580 4490 8580 4336 8580 6470 8581 4333 8581 4336 8581 4490 8582 4335 8582 4336 8582 4336 8583 4335 8583 4312 8583 4313 8584 4336 8584 4312 8584 4335 8585 4489 8585 4312 8585 4312 8586 4489 8586 4337 8586 4338 8587 4312 8587 4337 8587 4489 8588 4488 8588 4337 8588 4337 8589 4488 8589 4339 8589 4311 8590 4337 8590 4339 8590 4488 8591 4340 8591 4339 8591 4339 8592 4340 8592 4309 8592 4310 8593 4339 8593 4309 8593 4340 8594 4341 8594 4309 8594 4309 8595 4341 8595 4343 8595 4342 8596 4309 8596 4343 8596 4341 8597 4344 8597 4343 8597 4343 8598 4344 8598 4346 8598 4345 8599 4343 8599 4346 8599 4344 8600 4487 8600 4346 8600 4346 8601 4487 8601 4347 8601 6471 8602 4346 8602 4347 8602 4487 8603 4348 8603 4347 8603 4347 8604 4348 8604 4349 8604 6472 8605 4347 8605 4349 8605 4348 8606 4486 8606 4349 8606 4349 8607 4486 8607 4351 8607 6473 8608 4349 8608 4351 8608 4486 8609 4350 8609 4351 8609 4351 8610 4350 8610 4352 8610 4308 8611 4351 8611 4352 8611 4350 8612 4485 8612 4352 8612 4352 8613 4485 8613 4353 8613 4307 8614 4352 8614 4353 8614 4485 8615 4354 8615 4353 8615 4353 8616 4354 8616 4355 8616 4356 8617 4353 8617 4355 8617 4354 8618 4357 8618 4355 8618 4355 8619 4357 8619 4359 8619 4306 8620 4355 8620 4359 8620 4357 8621 4484 8621 4359 8621 4359 8622 4484 8622 4358 8622 4305 8623 4359 8623 4358 8623 4484 8624 4483 8624 4358 8624 4358 8625 4483 8625 4360 8625 6475 8626 4358 8626 4360 8626 4483 8627 4361 8627 4360 8627 4360 8628 4361 8628 4362 8628 4304 8629 4360 8629 4362 8629 4361 8630 4482 8630 4362 8630 4362 8631 4482 8631 4302 8631 4303 8632 4362 8632 4302 8632 4482 8633 4363 8633 4302 8633 4302 8634 4363 8634 4364 8634 4301 8635 4302 8635 4364 8635 4363 8636 4481 8636 4364 8636 4364 8637 4481 8637 4365 8637 4300 8638 4364 8638 4365 8638 4481 8639 4366 8639 4365 8639 4365 8640 4366 8640 4367 8640 6476 8641 4365 8641 4367 8641 4366 8642 4368 8642 4367 8642 4367 8643 4368 8643 4370 8643 6430 8644 4367 8644 4370 8644 4368 8645 4371 8645 4370 8645 4370 8646 4371 8646 4299 8646 4369 8647 4370 8647 4299 8647 4371 8648 4374 8648 4299 8648 4299 8649 4374 8649 4373 8649 4372 8650 4299 8650 4373 8650 4374 8651 4375 8651 4373 8651 4373 8652 4375 8652 4297 8652 6429 8653 4373 8653 4297 8653 4375 8654 4480 8654 4297 8654 4297 8655 4480 8655 4377 8655 4298 8656 4297 8656 4377 8656 4480 8657 4376 8657 4377 8657 4377 8658 4376 8658 4379 8658 4378 8659 4377 8659 4379 8659 4376 8660 4479 8660 4379 8660 4379 8661 4479 8661 4380 8661 6428 8662 4379 8662 4380 8662 4479 8663 4478 8663 4380 8663 4380 8664 4478 8664 4381 8664 6427 8665 4380 8665 4381 8665 4478 8666 4477 8666 4381 8666 4381 8667 4477 8667 4383 8667 4382 8668 4381 8668 4383 8668 4477 8669 4476 8669 4383 8669 4383 8670 4476 8670 4384 8670 4296 8671 4383 8671 4384 8671 4476 8672 4385 8672 4384 8672 4384 8673 4385 8673 4387 8673 4295 8674 4384 8674 4387 8674 4385 8675 4475 8675 4387 8675 4387 8676 4475 8676 4386 8676 6426 8677 4387 8677 4386 8677 4475 8678 4473 8678 4386 8678 4386 8679 4473 8679 4388 8679 4294 8680 4386 8680 4388 8680 4473 8681 4474 8681 4388 8681 4388 8682 4474 8682 4389 8682 4390 8683 4388 8683 4389 8683 4474 8684 4472 8684 4389 8684 4389 8685 4472 8685 4292 8685 4293 8686 4389 8686 4292 8686 4472 8687 4471 8687 4292 8687 4292 8688 4471 8688 4391 8688 6425 8689 4292 8689 4391 8689 4471 8690 4470 8690 4391 8690 4391 8691 4470 8691 4392 8691 4291 8692 4391 8692 4392 8692 4470 8693 4393 8693 4392 8693 4392 8694 4393 8694 4289 8694 4290 8695 4392 8695 4289 8695 4393 8696 4469 8696 4289 8696 4289 8697 4469 8697 4395 8697 4394 8698 4289 8698 4395 8698 4469 8699 4396 8699 4395 8699 4395 8700 4396 8700 4397 8700 4288 8701 4395 8701 4397 8701 4396 8702 4398 8702 4397 8702 4397 8703 4398 8703 4399 8703 4287 8704 4397 8704 4399 8704 4398 8705 4400 8705 4399 8705 4399 8706 4400 8706 4401 8706 6423 8707 4399 8707 4401 8707 4400 8708 4451 8708 4401 8708 4401 8709 4451 8709 4285 8709 4286 8710 4401 8710 4285 8710 4451 8711 4403 8711 4285 8711 4285 8712 4403 8712 4402 8712 6422 8713 4285 8713 4402 8713 4403 8714 4404 8714 4402 8714 4402 8715 4404 8715 4283 8715 4405 8716 4402 8716 4283 8716 4404 8717 4452 8717 4283 8717 4283 8718 4452 8718 4406 8718 4284 8719 4283 8719 4406 8719 4452 8720 4407 8720 4406 8720 4406 8721 4407 8721 4408 8721 4409 8722 4406 8722 4408 8722 4407 8723 4411 8723 4408 8723 4408 8724 4411 8724 4412 8724 4410 8725 4408 8725 4412 8725 4411 8726 4454 8726 4412 8726 4412 8727 4454 8727 4415 8727 4413 8728 4412 8728 4415 8728 4454 8729 4453 8729 4415 8729 4415 8730 4453 8730 4414 8730 6421 8731 4415 8731 4414 8731 4453 8732 4417 8732 4414 8732 4414 8733 4417 8733 4416 8733 6420 8734 4414 8734 4416 8734 4417 8735 4456 8735 4416 8735 4416 8736 4456 8736 4418 8736 4282 8737 4416 8737 4418 8737 4456 8738 4458 8738 4418 8738 4418 8739 4458 8739 4419 8739 6419 8740 4418 8740 4419 8740 4458 8741 4457 8741 4419 8741 4419 8742 4457 8742 4420 8742 6418 8743 4419 8743 4420 8743 4457 8744 4421 8744 4420 8744 4420 8745 4421 8745 4423 8745 4422 8746 4420 8746 4423 8746 4421 8747 4459 8747 4423 8747 4423 8748 4459 8748 4425 8748 6417 8749 4423 8749 4425 8749 4459 8750 4424 8750 4425 8750 4425 8751 4424 8751 4426 8751 6416 8752 4425 8752 4426 8752 4424 8753 4460 8753 4426 8753 4426 8754 4460 8754 4427 8754 4281 8755 4426 8755 4427 8755 4460 8756 4428 8756 4427 8756 4427 8757 4428 8757 4278 8757 4280 8758 4427 8758 4278 8758 4428 8759 4429 8759 4278 8759 4278 8760 4429 8760 4430 8760 4279 8761 4278 8761 4430 8761 4429 8762 4461 8762 4430 8762 4430 8763 4461 8763 4434 8763 4277 8764 4430 8764 4434 8764 4461 8765 4431 8765 4434 8765 4434 8766 4431 8766 4432 8766 4433 8767 4434 8767 4432 8767 4431 8768 4462 8768 4432 8768 4432 8769 4462 8769 4435 8769 6415 8770 4432 8770 4435 8770 4462 8771 4463 8771 4435 8771 4435 8772 4463 8772 4436 8772 6413 8773 4435 8773 4436 8773 4463 8774 4437 8774 4436 8774 4436 8775 4437 8775 4439 8775 6414 8776 4436 8776 4439 8776 4437 8777 4465 8777 4439 8777 4439 8778 4465 8778 4275 8778 4438 8779 4439 8779 4275 8779 4465 8780 4464 8780 4275 8780 4275 8781 4464 8781 4441 8781 4276 8782 4275 8782 4441 8782 4464 8783 4440 8783 4441 8783 4441 8784 4440 8784 4442 8784 4274 8785 4441 8785 4442 8785 4440 8786 4443 8786 4442 8786 4442 8787 4443 8787 4444 8787 4273 8788 4442 8788 4444 8788 4443 8789 4466 8789 4444 8789 4444 8790 4466 8790 4446 8790 4445 8791 4444 8791 4446 8791 4466 8792 4467 8792 4446 8792 4446 8793 4467 8793 4447 8793 6410 8794 4446 8794 4447 8794 4467 8795 4468 8795 4447 8795 4447 8796 4468 8796 4448 8796 6411 8797 4447 8797 4448 8797 4468 8798 4449 8798 4448 8798 4448 8799 4449 8799 4270 8799 4272 8800 4448 8800 4270 8800 4449 8801 4450 8801 4270 8801 4270 8802 4450 8802 4269 8802 4271 8803 4270 8803 4269 8803 4455 8804 4400 8804 2476 8804 4455 8805 4451 8805 4400 8805 4455 8806 4403 8806 4451 8806 4455 8807 4404 8807 4403 8807 4455 8808 4452 8808 4404 8808 4455 8809 4407 8809 4452 8809 4455 8810 4411 8810 4407 8810 4455 8811 4454 8811 4411 8811 4455 8812 4453 8812 4454 8812 4455 8813 4417 8813 4453 8813 4455 8814 4456 8814 4417 8814 4455 8815 4458 8815 4456 8815 4455 8816 4457 8816 4458 8816 4455 8817 4421 8817 4457 8817 4455 8818 4459 8818 4421 8818 4455 8819 4424 8819 4459 8819 4455 8820 4460 8820 4424 8820 4455 8821 4428 8821 4460 8821 4455 8822 4429 8822 4428 8822 4455 8823 4461 8823 4429 8823 4455 8824 4431 8824 4461 8824 4455 8825 4462 8825 4431 8825 4455 8826 4463 8826 4462 8826 4455 8827 4437 8827 4463 8827 4455 8828 4465 8828 4437 8828 4455 8829 4464 8829 4465 8829 4455 8830 4440 8830 4464 8830 4455 8831 4443 8831 4440 8831 4455 8832 4466 8832 4443 8832 4455 8833 4467 8833 4466 8833 4455 8834 4468 8834 4467 8834 4455 8835 4449 8835 4468 8835 4455 8836 4450 8836 4449 8836 4400 8837 4398 8837 2476 8837 2476 8838 4398 8838 4396 8838 4469 8839 2476 8839 4396 8839 4469 8840 4393 8840 2476 8840 2476 8841 4393 8841 4470 8841 4471 8842 2476 8842 4470 8842 4471 8843 4472 8843 2476 8843 2476 8844 4472 8844 4474 8844 4473 8845 2476 8845 4474 8845 4473 8846 4475 8846 2476 8846 2476 8847 4475 8847 4385 8847 4476 8848 2476 8848 4385 8848 4476 8849 4477 8849 2476 8849 2476 8850 4477 8850 4478 8850 4479 8851 2476 8851 4478 8851 4479 8852 4376 8852 2476 8852 2476 8853 4376 8853 4480 8853 4491 8854 4480 8854 4375 8854 4374 8855 4491 8855 4375 8855 4374 8856 4371 8856 4491 8856 4491 8857 4371 8857 4368 8857 4366 8858 4491 8858 4368 8858 4366 8859 4481 8859 4491 8859 4491 8860 4481 8860 4363 8860 4482 8861 4491 8861 4363 8861 4482 8862 4361 8862 4491 8862 4491 8863 4361 8863 4483 8863 4484 8864 4491 8864 4483 8864 4484 8865 4357 8865 4491 8865 4491 8866 4357 8866 4354 8866 4485 8867 4491 8867 4354 8867 4485 8868 4350 8868 4491 8868 4491 8869 4350 8869 4486 8869 4348 8870 4491 8870 4486 8870 4348 8871 4487 8871 4491 8871 4491 8872 4487 8872 4344 8872 4341 8873 4491 8873 4344 8873 4341 8874 4340 8874 4491 8874 4491 8875 4340 8875 4488 8875 4489 8876 4491 8876 4488 8876 4489 8877 4335 8877 4491 8877 4491 8878 4335 8878 4490 8878 4332 8879 4491 8879 4490 8879 4332 8880 4330 8880 4491 8880 4491 8881 4330 8881 4492 8881 4326 8882 4491 8882 4492 8882 4326 8883 4325 8883 4491 8883 4491 8884 4325 8884 4493 8884 4321 8885 4491 8885 4493 8885 4321 8886 4320 8886 4491 8886 4491 8887 4320 8887 4319 8887 4494 8888 4491 8888 4319 8888 4494 8889 4266 8889 4491 8889 4491 8890 4266 8890 4450 8890 2476 8891 4480 8891 4491 8891 6474 8892 6424 8892 4491 8892 4491 8893 6424 8893 2476 8893 810 8894 4495 8894 831 8894 831 8895 4495 8895 5796 8895 4491 8896 5796 8896 6474 8896 4491 8897 831 8897 5796 8897 4491 8898 4450 8898 831 8898 831 8899 4450 8899 2445 8899 2445 8900 4450 8900 4455 8900 4495 8901 6552 8901 5796 8901 5796 8902 6552 8902 4496 8902 4496 8903 6552 8903 5843 8903 5796 8904 6463 8904 6474 8904 6452 8905 4678 8905 4677 8905 6452 8906 4497 8906 4678 8906 6452 8907 6451 8907 4497 8907 4497 8908 6451 8908 4498 8908 5787 8909 4498 8909 5817 8909 5787 8910 4497 8910 4498 8910 5787 8911 5786 8911 4497 8911 4497 8912 5786 8912 4678 8912 4678 8913 5786 8913 5816 8913 4499 8914 5816 8914 5785 8914 4676 8915 5785 8915 5784 8915 4500 8916 5784 8916 4501 8916 4673 8917 4501 8917 4503 8917 4502 8918 4503 8918 4505 8918 4504 8919 4505 8919 5782 8919 4506 8920 5782 8920 5781 8920 4670 8921 5781 8921 5780 8921 4667 8922 5780 8922 5779 8922 4665 8923 5779 8923 4664 8923 4507 8924 4664 8924 5812 8924 4663 8925 5812 8925 5811 8925 4659 8926 5811 8926 4660 8926 4661 8927 4660 8927 4509 8927 4508 8928 4509 8928 4510 8928 4656 8929 4510 8929 4655 8929 4511 8930 4655 8930 4653 8930 4652 8931 4653 8931 5767 8931 4651 8932 5767 8932 4512 8932 4648 8933 4512 8933 4646 8933 4647 8934 4646 8934 5768 8934 4641 8935 5768 8935 4642 8935 4643 8936 4642 8936 4640 8936 4639 8937 4640 8937 4638 8937 4513 8938 4638 8938 4515 8938 4514 8939 4515 8939 4516 8939 4635 8940 4516 8940 5769 8940 4632 8941 5769 8941 4517 8941 4631 8942 4517 8942 4518 8942 4519 8943 4518 8943 5770 8943 4630 8944 5770 8944 4520 8944 4627 8945 4520 8945 4522 8945 4521 8946 4522 8946 4523 8946 4626 8947 4523 8947 4524 8947 4624 8948 4524 8948 5772 8948 4622 8949 5772 8949 5774 8949 4620 8950 5774 8950 5773 8950 4618 8951 5773 8951 5775 8951 4525 8952 5775 8952 4615 8952 4616 8953 4615 8953 4613 8953 4614 8954 4613 8954 4526 8954 4611 8955 4526 8955 4527 8955 4608 8956 4527 8956 5776 8956 4607 8957 5776 8957 4606 8957 4604 8958 4606 8958 4603 8958 4528 8959 4603 8959 4529 8959 4602 8960 4529 8960 4530 8960 4531 8961 4530 8961 4532 8961 4600 8962 4532 8962 5838 8962 4597 8963 5838 8963 5837 8963 4596 8964 5837 8964 4595 8964 4593 8965 4595 8965 5836 8965 4592 8966 5836 8966 5835 8966 4590 8967 5835 8967 5834 8967 4533 8968 5834 8968 5833 8968 4534 8969 5833 8969 4588 8969 4587 8970 4588 8970 4535 8970 4584 8971 4535 8971 4585 8971 4581 8972 4585 8972 4536 8972 4580 8973 4536 8973 4537 8973 4579 8974 4537 8974 5832 8974 4578 8975 5832 8975 5831 8975 4576 8976 5831 8976 5830 8976 4575 8977 5830 8977 5829 8977 4573 8978 5829 8978 5828 8978 4571 8979 5828 8979 5827 8979 4538 8980 5827 8980 5826 8980 4569 8981 5826 8981 5825 8981 4566 8982 5825 8982 4567 8982 4564 8983 4567 8983 4539 8983 4562 8984 4539 8984 4540 8984 4561 8985 4540 8985 4560 8985 4541 8986 4560 8986 5793 8986 4542 8987 5793 8987 5823 8987 4557 8988 5823 8988 5792 8988 4556 8989 5792 8989 4554 8989 4555 8990 4554 8990 4553 8990 4543 8991 4553 8991 4544 8991 4551 8992 4544 8992 5790 8992 4549 8993 5790 8993 5789 8993 4547 8994 5789 8994 5788 8994 4546 8995 5788 8995 5819 8995 4545 8996 5819 8996 5817 8996 4498 8997 4545 8997 5817 8997 4498 8998 6389 8998 4545 8998 4498 8999 6451 8999 6389 8999 6389 9000 6390 9000 4545 9000 4545 9001 6390 9001 4546 9001 5819 9002 4545 9002 4546 9002 6390 9003 4548 9003 4546 9003 4546 9004 4548 9004 4547 9004 5788 9005 4546 9005 4547 9005 4548 9006 6391 9006 4547 9006 4547 9007 6391 9007 4549 9007 5789 9008 4547 9008 4549 9008 6391 9009 4550 9009 4549 9009 4549 9010 4550 9010 4551 9010 5790 9011 4549 9011 4551 9011 4550 9012 6447 9012 4551 9012 4551 9013 6447 9013 4543 9013 4544 9014 4551 9014 4543 9014 6447 9015 4552 9015 4543 9015 4543 9016 4552 9016 4555 9016 4553 9017 4543 9017 4555 9017 4552 9018 6446 9018 4555 9018 4555 9019 6446 9019 4556 9019 4554 9020 4555 9020 4556 9020 6446 9021 6394 9021 4556 9021 4556 9022 6394 9022 4557 9022 5792 9023 4556 9023 4557 9023 6394 9024 4558 9024 4557 9024 4557 9025 4558 9025 4542 9025 5823 9026 4557 9026 4542 9026 4558 9027 6395 9027 4542 9027 4542 9028 6395 9028 4541 9028 5793 9029 4542 9029 4541 9029 6395 9030 4559 9030 4541 9030 4541 9031 4559 9031 4561 9031 4560 9032 4541 9032 4561 9032 4559 9033 4563 9033 4561 9033 4561 9034 4563 9034 4562 9034 4540 9035 4561 9035 4562 9035 4563 9036 6396 9036 4562 9036 4562 9037 6396 9037 4564 9037 4539 9038 4562 9038 4564 9038 6396 9039 4565 9039 4564 9039 4564 9040 4565 9040 4566 9040 4567 9041 4564 9041 4566 9041 4565 9042 4568 9042 4566 9042 4566 9043 4568 9043 4569 9043 5825 9044 4566 9044 4569 9044 4568 9045 4570 9045 4569 9045 4569 9046 4570 9046 4538 9046 5826 9047 4569 9047 4538 9047 4570 9048 6397 9048 4538 9048 4538 9049 6397 9049 4571 9049 5827 9050 4538 9050 4571 9050 6397 9051 4572 9051 4571 9051 4571 9052 4572 9052 4573 9052 5828 9053 4571 9053 4573 9053 4572 9054 4574 9054 4573 9054 4573 9055 4574 9055 4575 9055 5829 9056 4573 9056 4575 9056 4574 9057 6398 9057 4575 9057 4575 9058 6398 9058 4576 9058 5830 9059 4575 9059 4576 9059 6398 9060 4577 9060 4576 9060 4576 9061 4577 9061 4578 9061 5831 9062 4576 9062 4578 9062 4577 9063 6399 9063 4578 9063 4578 9064 6399 9064 4579 9064 5832 9065 4578 9065 4579 9065 6399 9066 6400 9066 4579 9066 4579 9067 6400 9067 4580 9067 4537 9068 4579 9068 4580 9068 6400 9069 4582 9069 4580 9069 4580 9070 4582 9070 4581 9070 4536 9071 4580 9071 4581 9071 4582 9072 4583 9072 4581 9072 4581 9073 4583 9073 4584 9073 4585 9074 4581 9074 4584 9074 4583 9075 6401 9075 4584 9075 4584 9076 6401 9076 4587 9076 4535 9077 4584 9077 4587 9077 6401 9078 4586 9078 4587 9078 4587 9079 4586 9079 4534 9079 4588 9080 4587 9080 4534 9080 4586 9081 4589 9081 4534 9081 4534 9082 4589 9082 4533 9082 5833 9083 4534 9083 4533 9083 4589 9084 6402 9084 4533 9084 4533 9085 6402 9085 4590 9085 5834 9086 4533 9086 4590 9086 6402 9087 4591 9087 4590 9087 4590 9088 4591 9088 4592 9088 5835 9089 4590 9089 4592 9089 4591 9090 6403 9090 4592 9090 4592 9091 6403 9091 4593 9091 5836 9092 4592 9092 4593 9092 6403 9093 4594 9093 4593 9093 4593 9094 4594 9094 4596 9094 4595 9095 4593 9095 4596 9095 4594 9096 4598 9096 4596 9096 4596 9097 4598 9097 4597 9097 5837 9098 4596 9098 4597 9098 4598 9099 4599 9099 4597 9099 4597 9100 4599 9100 4600 9100 5838 9101 4597 9101 4600 9101 4599 9102 6404 9102 4600 9102 4600 9103 6404 9103 4531 9103 4532 9104 4600 9104 4531 9104 6404 9105 4601 9105 4531 9105 4531 9106 4601 9106 4602 9106 4530 9107 4531 9107 4602 9107 4601 9108 6406 9108 4602 9108 4602 9109 6406 9109 4528 9109 4529 9110 4602 9110 4528 9110 6406 9111 6405 9111 4528 9111 4528 9112 6405 9112 4604 9112 4603 9113 4528 9113 4604 9113 6405 9114 4605 9114 4604 9114 4604 9115 4605 9115 4607 9115 4606 9116 4604 9116 4607 9116 4605 9117 4609 9117 4607 9117 4607 9118 4609 9118 4608 9118 5776 9119 4607 9119 4608 9119 4609 9120 4610 9120 4608 9120 4608 9121 4610 9121 4611 9121 4527 9122 4608 9122 4611 9122 4610 9123 6373 9123 4611 9123 4611 9124 6373 9124 4614 9124 4526 9125 4611 9125 4614 9125 6373 9126 4612 9126 4614 9126 4614 9127 4612 9127 4616 9127 4613 9128 4614 9128 4616 9128 4612 9129 4617 9129 4616 9129 4616 9130 4617 9130 4525 9130 4615 9131 4616 9131 4525 9131 4617 9132 4619 9132 4525 9132 4525 9133 4619 9133 4618 9133 5775 9134 4525 9134 4618 9134 4619 9135 4621 9135 4618 9135 4618 9136 4621 9136 4620 9136 5773 9137 4618 9137 4620 9137 4621 9138 6374 9138 4620 9138 4620 9139 6374 9139 4622 9139 5774 9140 4620 9140 4622 9140 6374 9141 6375 9141 4622 9141 4622 9142 6375 9142 4624 9142 5772 9143 4622 9143 4624 9143 6375 9144 4623 9144 4624 9144 4624 9145 4623 9145 4626 9145 4524 9146 4624 9146 4626 9146 4623 9147 4625 9147 4626 9147 4626 9148 4625 9148 4521 9148 4523 9149 4626 9149 4521 9149 4625 9150 4628 9150 4521 9150 4521 9151 4628 9151 4627 9151 4522 9152 4521 9152 4627 9152 4628 9153 4629 9153 4627 9153 4627 9154 4629 9154 4630 9154 4520 9155 4627 9155 4630 9155 4629 9156 6376 9156 4630 9156 4630 9157 6376 9157 4519 9157 5770 9158 4630 9158 4519 9158 6376 9159 6377 9159 4519 9159 4519 9160 6377 9160 4631 9160 4518 9161 4519 9161 4631 9161 6377 9162 6378 9162 4631 9162 4631 9163 6378 9163 4632 9163 4517 9164 4631 9164 4632 9164 6378 9165 4633 9165 4632 9165 4632 9166 4633 9166 4635 9166 5769 9167 4632 9167 4635 9167 4633 9168 4634 9168 4635 9168 4635 9169 4634 9169 4514 9169 4516 9170 4635 9170 4514 9170 4634 9171 4636 9171 4514 9171 4514 9172 4636 9172 4513 9172 4515 9173 4514 9173 4513 9173 4636 9174 4637 9174 4513 9174 4513 9175 4637 9175 4639 9175 4638 9176 4513 9176 4639 9176 4637 9177 6379 9177 4639 9177 4639 9178 6379 9178 4643 9178 4640 9179 4639 9179 4643 9179 6379 9180 6380 9180 4643 9180 4643 9181 6380 9181 4641 9181 4642 9182 4643 9182 4641 9182 6380 9183 4644 9183 4641 9183 4641 9184 4644 9184 4647 9184 5768 9185 4641 9185 4647 9185 4644 9186 4645 9186 4647 9186 4647 9187 4645 9187 4648 9187 4646 9188 4647 9188 4648 9188 4645 9189 4649 9189 4648 9189 4648 9190 4649 9190 4651 9190 4512 9191 4648 9191 4651 9191 4649 9192 4650 9192 4651 9192 4651 9193 4650 9193 4652 9193 5767 9194 4651 9194 4652 9194 4650 9195 4654 9195 4652 9195 4652 9196 4654 9196 4511 9196 4653 9197 4652 9197 4511 9197 4654 9198 6381 9198 4511 9198 4511 9199 6381 9199 4656 9199 4655 9200 4511 9200 4656 9200 6381 9201 4657 9201 4656 9201 4656 9202 4657 9202 4508 9202 4510 9203 4656 9203 4508 9203 4657 9204 4658 9204 4508 9204 4508 9205 4658 9205 4661 9205 4509 9206 4508 9206 4661 9206 4658 9207 4662 9207 4661 9207 4661 9208 4662 9208 4659 9208 4660 9209 4661 9209 4659 9209 4662 9210 6382 9210 4659 9210 4659 9211 6382 9211 4663 9211 5811 9212 4659 9212 4663 9212 6382 9213 6445 9213 4663 9213 4663 9214 6445 9214 4507 9214 5812 9215 4663 9215 4507 9215 6445 9216 4666 9216 4507 9216 4507 9217 4666 9217 4665 9217 4664 9218 4507 9218 4665 9218 4666 9219 4668 9219 4665 9219 4665 9220 4668 9220 4667 9220 5779 9221 4665 9221 4667 9221 4668 9222 4669 9222 4667 9222 4667 9223 4669 9223 4670 9223 5780 9224 4667 9224 4670 9224 4669 9225 6385 9225 4670 9225 4670 9226 6385 9226 4506 9226 5781 9227 4670 9227 4506 9227 6385 9228 4671 9228 4506 9228 4506 9229 4671 9229 4504 9229 5782 9230 4506 9230 4504 9230 4671 9231 4672 9231 4504 9231 4504 9232 4672 9232 4502 9232 4505 9233 4504 9233 4502 9233 4672 9234 6443 9234 4502 9234 4502 9235 6443 9235 4673 9235 4503 9236 4502 9236 4673 9236 6443 9237 4674 9237 4673 9237 4673 9238 4674 9238 4500 9238 4501 9239 4673 9239 4500 9239 4674 9240 4675 9240 4500 9240 4500 9241 4675 9241 4676 9241 5784 9242 4500 9242 4676 9242 4675 9243 6387 9243 4676 9243 4676 9244 6387 9244 4499 9244 5785 9245 4676 9245 4499 9245 6387 9246 4677 9246 4499 9246 4499 9247 4677 9247 4678 9247 5816 9248 4499 9248 4678 9248 4679 9249 4683 9249 6116 9249 4679 9250 4680 9250 4683 9250 4679 9251 4716 9251 4680 9251 4680 9252 4716 9252 4681 9252 5736 9253 4681 9253 5735 9253 5736 9254 4680 9254 4681 9254 5736 9255 4682 9255 4680 9255 4680 9256 4682 9256 4683 9256 4683 9257 4682 9257 5709 9257 4684 9258 5709 9258 4685 9258 4867 9259 4685 9259 5710 9259 4868 9260 5710 9260 4686 9260 4864 9261 4686 9261 4863 9261 4862 9262 4863 9262 4687 9262 4859 9263 4687 9263 5737 9263 4857 9264 5737 9264 5712 9264 4856 9265 5712 9265 5738 9265 4854 9266 5738 9266 5739 9266 4852 9267 5739 9267 5714 9267 4850 9268 5714 9268 5715 9268 4849 9269 5715 9269 4847 9269 4845 9270 4847 9270 4844 9270 4843 9271 4844 9271 5741 9271 4842 9272 5741 9272 4839 9272 4840 9273 4839 9273 4688 9273 4836 9274 4688 9274 5718 9274 4834 9275 5718 9275 4689 9275 4833 9276 4689 9276 5719 9276 4829 9277 5719 9277 4690 9277 4827 9278 4690 9278 5720 9278 4828 9279 5720 9279 4826 9279 4825 9280 4826 9280 4824 9280 4823 9281 4824 9281 4820 9281 4819 9282 4820 9282 5721 9282 4818 9283 5721 9283 5722 9283 4817 9284 5722 9284 4816 9284 4815 9285 4816 9285 5725 9285 4812 9286 5725 9286 5724 9286 4809 9287 5724 9287 4691 9287 4810 9288 4691 9288 4692 9288 4693 9289 4692 9289 5726 9289 4806 9290 5726 9290 4807 9290 4801 9291 4807 9291 4802 9291 4803 9292 4802 9292 4694 9292 4799 9293 4694 9293 4695 9293 4798 9294 4695 9294 4696 9294 4697 9295 4696 9295 4794 9295 4795 9296 4794 9296 4793 9296 4792 9297 4793 9297 5727 9297 4791 9298 5727 9298 4788 9298 4786 9299 4788 9299 5728 9299 4783 9300 5728 9300 4698 9300 4784 9301 4698 9301 4699 9301 4781 9302 4699 9302 5766 9302 4780 9303 5766 9303 4778 9303 4700 9304 4778 9304 4701 9304 4776 9305 4701 9305 4702 9305 4775 9306 4702 9306 5765 9306 4773 9307 5765 9307 4774 9307 4772 9308 4774 9308 4703 9308 4769 9309 4703 9309 5763 9309 4767 9310 5763 9310 4704 9310 4766 9311 4704 9311 4705 9311 4765 9312 4705 9312 5762 9312 4763 9313 5762 9313 4706 9313 4761 9314 4706 9314 4707 9314 4760 9315 4707 9315 4708 9315 4758 9316 4708 9316 4756 9316 4757 9317 4756 9317 5761 9317 4753 9318 5761 9318 4752 9318 4750 9319 4752 9319 5760 9319 4749 9320 5760 9320 5759 9320 4747 9321 5759 9321 5758 9321 4746 9322 5758 9322 4745 9322 4709 9323 4745 9323 5757 9323 4742 9324 5757 9324 4710 9324 4741 9325 4710 9325 5756 9325 4740 9326 5756 9326 4711 9326 4739 9327 4711 9327 5755 9327 4737 9328 5755 9328 5754 9328 4736 9329 5754 9329 4735 9329 4734 9330 4735 9330 4733 9330 4712 9331 4733 9331 5731 9331 4731 9332 5731 9332 4730 9332 4729 9333 4730 9333 5703 9333 4727 9334 5703 9334 4725 9334 4726 9335 4725 9335 5732 9335 4722 9336 5732 9336 4713 9336 4721 9337 4713 9337 5705 9337 4720 9338 5705 9338 5706 9338 4718 9339 5706 9339 4714 9339 4717 9340 4714 9340 5735 9340 4681 9341 4717 9341 5735 9341 4681 9342 4715 9342 4717 9342 4681 9343 4716 9343 4715 9343 4715 9344 6115 9344 4717 9344 4717 9345 6115 9345 4718 9345 4714 9346 4717 9346 4718 9346 6115 9347 6114 9347 4718 9347 4718 9348 6114 9348 4720 9348 5706 9349 4718 9349 4720 9349 6114 9350 4719 9350 4720 9350 4720 9351 4719 9351 4721 9351 5705 9352 4720 9352 4721 9352 4719 9353 6118 9353 4721 9353 4721 9354 6118 9354 4722 9354 4713 9355 4721 9355 4722 9355 6118 9356 4723 9356 4722 9356 4722 9357 4723 9357 4726 9357 5732 9358 4722 9358 4726 9358 4723 9359 4724 9359 4726 9359 4726 9360 4724 9360 4727 9360 4725 9361 4726 9361 4727 9361 4724 9362 6112 9362 4727 9362 4727 9363 6112 9363 4729 9363 5703 9364 4727 9364 4729 9364 6112 9365 4728 9365 4729 9365 4729 9366 4728 9366 4731 9366 4730 9367 4729 9367 4731 9367 4728 9368 4732 9368 4731 9368 4731 9369 4732 9369 4712 9369 5731 9370 4731 9370 4712 9370 4732 9371 6110 9371 4712 9371 4712 9372 6110 9372 4734 9372 4733 9373 4712 9373 4734 9373 6110 9374 6109 9374 4734 9374 4734 9375 6109 9375 4736 9375 4735 9376 4734 9376 4736 9376 6109 9377 6107 9377 4736 9377 4736 9378 6107 9378 4737 9378 5754 9379 4736 9379 4737 9379 6107 9380 4738 9380 4737 9380 4737 9381 4738 9381 4739 9381 5755 9382 4737 9382 4739 9382 4738 9383 6106 9383 4739 9383 4739 9384 6106 9384 4740 9384 4711 9385 4739 9385 4740 9385 6106 9386 6105 9386 4740 9386 4740 9387 6105 9387 4741 9387 5756 9388 4740 9388 4741 9388 6105 9389 6104 9389 4741 9389 4741 9390 6104 9390 4742 9390 4710 9391 4741 9391 4742 9391 6104 9392 4743 9392 4742 9392 4742 9393 4743 9393 4709 9393 5757 9394 4742 9394 4709 9394 4743 9395 4744 9395 4709 9395 4709 9396 4744 9396 4746 9396 4745 9397 4709 9397 4746 9397 4744 9398 6103 9398 4746 9398 4746 9399 6103 9399 4747 9399 5758 9400 4746 9400 4747 9400 6103 9401 4748 9401 4747 9401 4747 9402 4748 9402 4749 9402 5759 9403 4747 9403 4749 9403 4748 9404 6102 9404 4749 9404 4749 9405 6102 9405 4750 9405 5760 9406 4749 9406 4750 9406 6102 9407 4751 9407 4750 9407 4750 9408 4751 9408 4753 9408 4752 9409 4750 9409 4753 9409 4751 9410 4754 9410 4753 9410 4753 9411 4754 9411 4757 9411 5761 9412 4753 9412 4757 9412 4754 9413 4755 9413 4757 9413 4757 9414 4755 9414 4758 9414 4756 9415 4757 9415 4758 9415 4755 9416 4759 9416 4758 9416 4758 9417 4759 9417 4760 9417 4708 9418 4758 9418 4760 9418 4759 9419 6101 9419 4760 9419 4760 9420 6101 9420 4761 9420 4707 9421 4760 9421 4761 9421 6101 9422 4762 9422 4761 9422 4761 9423 4762 9423 4763 9423 4706 9424 4761 9424 4763 9424 4762 9425 6100 9425 4763 9425 4763 9426 6100 9426 4765 9426 5762 9427 4763 9427 4765 9427 6100 9428 4764 9428 4765 9428 4765 9429 4764 9429 4766 9429 4705 9430 4765 9430 4766 9430 4764 9431 6099 9431 4766 9431 4766 9432 6099 9432 4767 9432 4704 9433 4766 9433 4767 9433 6099 9434 4768 9434 4767 9434 4767 9435 4768 9435 4769 9435 5763 9436 4767 9436 4769 9436 4768 9437 4770 9437 4769 9437 4769 9438 4770 9438 4772 9438 4703 9439 4769 9439 4772 9439 4770 9440 4771 9440 4772 9440 4772 9441 4771 9441 4773 9441 4774 9442 4772 9442 4773 9442 4771 9443 6097 9443 4773 9443 4773 9444 6097 9444 4775 9444 5765 9445 4773 9445 4775 9445 6097 9446 6096 9446 4775 9446 4775 9447 6096 9447 4776 9447 4702 9448 4775 9448 4776 9448 6096 9449 6095 9449 4776 9449 4776 9450 6095 9450 4700 9450 4701 9451 4776 9451 4700 9451 6095 9452 4777 9452 4700 9452 4700 9453 4777 9453 4780 9453 4778 9454 4700 9454 4780 9454 4777 9455 4779 9455 4780 9455 4780 9456 4779 9456 4781 9456 5766 9457 4780 9457 4781 9457 4779 9458 4782 9458 4781 9458 4781 9459 4782 9459 4784 9459 4699 9460 4781 9460 4784 9460 4782 9461 4785 9461 4784 9461 4784 9462 4785 9462 4783 9462 4698 9463 4784 9463 4783 9463 4785 9464 4787 9464 4783 9464 4783 9465 4787 9465 4786 9465 5728 9466 4783 9466 4786 9466 4787 9467 4789 9467 4786 9467 4786 9468 4789 9468 4791 9468 4788 9469 4786 9469 4791 9469 4789 9470 4790 9470 4791 9470 4791 9471 4790 9471 4792 9471 5727 9472 4791 9472 4792 9472 4790 9473 6094 9473 4792 9473 4792 9474 6094 9474 4795 9474 4793 9475 4792 9475 4795 9475 6094 9476 4796 9476 4795 9476 4795 9477 4796 9477 4697 9477 4794 9478 4795 9478 4697 9478 4796 9479 4797 9479 4697 9479 4697 9480 4797 9480 4798 9480 4696 9481 4697 9481 4798 9481 4797 9482 6093 9482 4798 9482 4798 9483 6093 9483 4799 9483 4695 9484 4798 9484 4799 9484 6093 9485 4800 9485 4799 9485 4799 9486 4800 9486 4803 9486 4694 9487 4799 9487 4803 9487 4800 9488 4804 9488 4803 9488 4803 9489 4804 9489 4801 9489 4802 9490 4803 9490 4801 9490 4804 9491 4805 9491 4801 9491 4801 9492 4805 9492 4806 9492 4807 9493 4801 9493 4806 9493 4805 9494 6092 9494 4806 9494 4806 9495 6092 9495 4693 9495 5726 9496 4806 9496 4693 9496 6092 9497 4808 9497 4693 9497 4693 9498 4808 9498 4810 9498 4692 9499 4693 9499 4810 9499 4808 9500 6090 9500 4810 9500 4810 9501 6090 9501 4809 9501 4691 9502 4810 9502 4809 9502 6090 9503 4811 9503 4809 9503 4809 9504 4811 9504 4812 9504 5724 9505 4809 9505 4812 9505 4811 9506 4813 9506 4812 9506 4812 9507 4813 9507 4815 9507 5725 9508 4812 9508 4815 9508 4813 9509 4814 9509 4815 9509 4815 9510 4814 9510 4817 9510 4816 9511 4815 9511 4817 9511 4814 9512 6088 9512 4817 9512 4817 9513 6088 9513 4818 9513 5722 9514 4817 9514 4818 9514 6088 9515 6089 9515 4818 9515 4818 9516 6089 9516 4819 9516 5721 9517 4818 9517 4819 9517 6089 9518 4821 9518 4819 9518 4819 9519 4821 9519 4823 9519 4820 9520 4819 9520 4823 9520 4821 9521 4822 9521 4823 9521 4823 9522 4822 9522 4825 9522 4824 9523 4823 9523 4825 9523 4822 9524 6087 9524 4825 9524 4825 9525 6087 9525 4828 9525 4826 9526 4825 9526 4828 9526 6087 9527 6086 9527 4828 9527 4828 9528 6086 9528 4827 9528 5720 9529 4828 9529 4827 9529 6086 9530 4830 9530 4827 9530 4827 9531 4830 9531 4829 9531 4690 9532 4827 9532 4829 9532 4830 9533 4831 9533 4829 9533 4829 9534 4831 9534 4833 9534 5719 9535 4829 9535 4833 9535 4831 9536 4832 9536 4833 9536 4833 9537 4832 9537 4834 9537 4689 9538 4833 9538 4834 9538 4832 9539 4835 9539 4834 9539 4834 9540 4835 9540 4836 9540 5718 9541 4834 9541 4836 9541 4835 9542 4837 9542 4836 9542 4836 9543 4837 9543 4840 9543 4688 9544 4836 9544 4840 9544 4837 9545 4838 9545 4840 9545 4840 9546 4838 9546 4842 9546 4839 9547 4840 9547 4842 9547 4838 9548 4841 9548 4842 9548 4842 9549 4841 9549 4843 9549 5741 9550 4842 9550 4843 9550 4841 9551 6085 9551 4843 9551 4843 9552 6085 9552 4845 9552 4844 9553 4843 9553 4845 9553 6085 9554 4846 9554 4845 9554 4845 9555 4846 9555 4849 9555 4847 9556 4845 9556 4849 9556 4846 9557 4848 9557 4849 9557 4849 9558 4848 9558 4850 9558 5715 9559 4849 9559 4850 9559 4848 9560 4851 9560 4850 9560 4850 9561 4851 9561 4852 9561 5714 9562 4850 9562 4852 9562 4851 9563 6038 9563 4852 9563 4852 9564 6038 9564 4854 9564 5739 9565 4852 9565 4854 9565 6038 9566 4853 9566 4854 9566 4854 9567 4853 9567 4856 9567 5738 9568 4854 9568 4856 9568 4853 9569 4855 9569 4856 9569 4856 9570 4855 9570 4857 9570 5712 9571 4856 9571 4857 9571 4855 9572 4858 9572 4857 9572 4857 9573 4858 9573 4859 9573 5737 9574 4857 9574 4859 9574 4858 9575 4860 9575 4859 9575 4859 9576 4860 9576 4862 9576 4687 9577 4859 9577 4862 9577 4860 9578 4861 9578 4862 9578 4862 9579 4861 9579 4864 9579 4863 9580 4862 9580 4864 9580 4861 9581 4865 9581 4864 9581 4864 9582 4865 9582 4868 9582 4686 9583 4864 9583 4868 9583 4865 9584 4866 9584 4868 9584 4868 9585 4866 9585 4867 9585 5710 9586 4868 9586 4867 9586 4866 9587 4869 9587 4867 9587 4867 9588 4869 9588 4684 9588 4685 9589 4867 9589 4684 9589 4869 9590 6116 9590 4684 9590 4684 9591 6116 9591 4683 9591 5709 9592 4684 9592 4683 9592 4870 9593 4871 9593 5848 9593 4870 9594 4874 9594 4871 9594 4870 9595 4872 9595 4874 9595 4874 9596 4872 9596 4936 9596 4873 9597 4936 9597 4935 9597 4873 9598 4874 9598 4936 9598 4873 9599 4875 9599 4874 9599 4874 9600 4875 9600 4871 9600 4871 9601 4875 9601 4876 9601 5074 9602 4876 9602 5073 9602 5071 9603 5073 9603 5070 9603 4877 9604 5070 9604 5067 9604 5066 9605 5067 9605 5065 9605 5064 9606 5065 9606 4878 9606 5061 9607 4878 9607 6081 9607 5062 9608 6081 9608 5059 9608 5058 9609 5059 9609 6080 9609 5056 9610 6080 9610 4880 9610 4879 9611 4880 9611 5055 9611 5054 9612 5055 9612 4881 9612 5051 9613 4881 9613 5052 9613 4882 9614 5052 9614 4884 9614 4883 9615 4884 9615 4885 9615 5048 9616 4885 9616 6078 9616 5049 9617 6078 9617 4886 9617 5046 9618 4886 9618 6077 9618 5045 9619 6077 9619 4887 9619 5042 9620 4887 9620 4888 9620 4889 9621 4888 9621 6076 9621 5040 9622 6076 9622 5039 9622 5037 9623 5039 9623 6075 9623 5035 9624 6075 9624 4890 9624 5033 9625 4890 9625 5032 9625 5031 9626 5032 9626 4891 9626 5028 9627 4891 9627 5029 9627 5030 9628 5029 9628 6074 9628 5025 9629 6074 9629 6073 9629 5023 9630 6073 9630 4892 9630 5021 9631 4892 9631 5020 9631 5017 9632 5020 9632 4893 9632 4894 9633 4893 9633 4895 9633 5015 9634 4895 9634 4897 9634 4896 9635 4897 9635 5014 9635 4898 9636 5014 9636 5013 9636 5012 9637 5013 9637 4899 9637 5010 9638 4899 9638 6072 9638 5009 9639 6072 9639 6071 9639 4900 9640 6071 9640 6070 9640 4901 9641 6070 9641 6069 9641 5007 9642 6069 9642 4902 9642 4903 9643 4902 9643 5004 9643 4904 9644 5004 9644 4905 9644 4906 9645 4905 9645 4907 9645 5002 9646 4907 9646 5001 9646 4908 9647 5001 9647 4909 9647 4998 9648 4909 9648 4910 9648 4911 9649 4910 9649 4912 9649 4996 9650 4912 9650 4913 9650 4993 9651 4913 9651 4992 9651 4990 9652 4992 9652 6068 9652 4989 9653 6068 9653 4914 9653 4988 9654 4914 9654 6067 9654 4987 9655 6067 9655 6066 9655 4985 9656 6066 9656 4915 9656 4983 9657 4915 9657 6065 9657 4982 9658 6065 9658 4916 9658 4979 9659 4916 9659 4978 9659 4917 9660 4978 9660 4918 9660 4976 9661 4918 9661 4919 9661 4975 9662 4919 9662 6064 9662 4974 9663 6064 9663 6063 9663 4972 9664 6063 9664 6062 9664 4969 9665 6062 9665 4970 9665 4968 9666 4970 9666 6061 9666 4965 9667 6061 9667 4966 9667 4963 9668 4966 9668 4962 9668 4920 9669 4962 9669 4921 9669 4960 9670 4921 9670 6060 9670 4956 9671 6060 9671 6059 9671 4922 9672 6059 9672 6058 9672 4955 9673 6058 9673 4923 9673 4953 9674 4923 9674 4924 9674 4951 9675 4924 9675 4925 9675 4950 9676 4925 9676 4926 9676 4947 9677 4926 9677 4946 9677 4945 9678 4946 9678 4944 9678 4927 9679 4944 9679 4928 9679 4943 9680 4928 9680 4929 9680 4942 9681 4929 9681 6057 9681 4930 9682 6057 9682 4932 9682 4931 9683 4932 9683 4933 9683 4934 9684 4933 9684 4935 9684 4936 9685 4934 9685 4935 9685 4936 9686 4937 9686 4934 9686 4936 9687 4872 9687 4937 9687 4937 9688 4938 9688 4934 9688 4934 9689 4938 9689 4931 9689 4933 9690 4934 9690 4931 9690 4938 9691 4939 9691 4931 9691 4931 9692 4939 9692 4930 9692 4932 9693 4931 9693 4930 9693 4939 9694 4940 9694 4930 9694 4930 9695 4940 9695 4942 9695 6057 9696 4930 9696 4942 9696 4940 9697 4941 9697 4942 9697 4942 9698 4941 9698 4943 9698 4929 9699 4942 9699 4943 9699 4941 9700 5623 9700 4943 9700 4943 9701 5623 9701 4927 9701 4928 9702 4943 9702 4927 9702 5623 9703 5624 9703 4927 9703 4927 9704 5624 9704 4945 9704 4944 9705 4927 9705 4945 9705 5624 9706 4948 9706 4945 9706 4945 9707 4948 9707 4947 9707 4946 9708 4945 9708 4947 9708 4948 9709 4949 9709 4947 9709 4947 9710 4949 9710 4950 9710 4926 9711 4947 9711 4950 9711 4949 9712 5626 9712 4950 9712 4950 9713 5626 9713 4951 9713 4925 9714 4950 9714 4951 9714 5626 9715 4952 9715 4951 9715 4951 9716 4952 9716 4953 9716 4924 9717 4951 9717 4953 9717 4952 9718 4954 9718 4953 9718 4953 9719 4954 9719 4955 9719 4923 9720 4953 9720 4955 9720 4954 9721 5627 9721 4955 9721 4955 9722 5627 9722 4922 9722 6058 9723 4955 9723 4922 9723 5627 9724 4957 9724 4922 9724 4922 9725 4957 9725 4956 9725 6059 9726 4922 9726 4956 9726 4957 9727 4958 9727 4956 9727 4956 9728 4958 9728 4960 9728 6060 9729 4956 9729 4960 9729 4958 9730 4959 9730 4960 9730 4960 9731 4959 9731 4920 9731 4921 9732 4960 9732 4920 9732 4959 9733 4961 9733 4920 9733 4920 9734 4961 9734 4963 9734 4962 9735 4920 9735 4963 9735 4961 9736 4964 9736 4963 9736 4963 9737 4964 9737 4965 9737 4966 9738 4963 9738 4965 9738 4964 9739 4967 9739 4965 9739 4965 9740 4967 9740 4968 9740 6061 9741 4965 9741 4968 9741 4967 9742 4971 9742 4968 9742 4968 9743 4971 9743 4969 9743 4970 9744 4968 9744 4969 9744 4971 9745 5628 9745 4969 9745 4969 9746 5628 9746 4972 9746 6062 9747 4969 9747 4972 9747 5628 9748 4973 9748 4972 9748 4972 9749 4973 9749 4974 9749 6063 9750 4972 9750 4974 9750 4973 9751 5629 9751 4974 9751 4974 9752 5629 9752 4975 9752 6064 9753 4974 9753 4975 9753 5629 9754 5630 9754 4975 9754 4975 9755 5630 9755 4976 9755 4919 9756 4975 9756 4976 9756 5630 9757 5631 9757 4976 9757 4976 9758 5631 9758 4917 9758 4918 9759 4976 9759 4917 9759 5631 9760 4977 9760 4917 9760 4917 9761 4977 9761 4979 9761 4978 9762 4917 9762 4979 9762 4977 9763 4980 9763 4979 9763 4979 9764 4980 9764 4982 9764 4916 9765 4979 9765 4982 9765 4980 9766 4981 9766 4982 9766 4982 9767 4981 9767 4983 9767 6065 9768 4982 9768 4983 9768 4981 9769 5632 9769 4983 9769 4983 9770 5632 9770 4985 9770 4915 9771 4983 9771 4985 9771 5632 9772 4984 9772 4985 9772 4985 9773 4984 9773 4987 9773 6066 9774 4985 9774 4987 9774 4984 9775 4986 9775 4987 9775 4987 9776 4986 9776 4988 9776 6067 9777 4987 9777 4988 9777 4986 9778 5652 9778 4988 9778 4988 9779 5652 9779 4989 9779 4914 9780 4988 9780 4989 9780 5652 9781 5651 9781 4989 9781 4989 9782 5651 9782 4990 9782 6068 9783 4989 9783 4990 9783 5651 9784 4991 9784 4990 9784 4990 9785 4991 9785 4993 9785 4992 9786 4990 9786 4993 9786 4991 9787 4994 9787 4993 9787 4993 9788 4994 9788 4996 9788 4913 9789 4993 9789 4996 9789 4994 9790 4995 9790 4996 9790 4996 9791 4995 9791 4911 9791 4912 9792 4996 9792 4911 9792 4995 9793 4997 9793 4911 9793 4911 9794 4997 9794 4998 9794 4910 9795 4911 9795 4998 9795 4997 9796 4999 9796 4998 9796 4998 9797 4999 9797 4908 9797 4909 9798 4998 9798 4908 9798 4999 9799 5000 9799 4908 9799 4908 9800 5000 9800 5002 9800 5001 9801 4908 9801 5002 9801 5000 9802 5003 9802 5002 9802 5002 9803 5003 9803 4906 9803 4907 9804 5002 9804 4906 9804 5003 9805 5650 9805 4906 9805 4906 9806 5650 9806 4904 9806 4905 9807 4906 9807 4904 9807 5650 9808 5649 9808 4904 9808 4904 9809 5649 9809 4903 9809 5004 9810 4904 9810 4903 9810 5649 9811 5005 9811 4903 9811 4903 9812 5005 9812 5007 9812 4902 9813 4903 9813 5007 9813 5005 9814 5006 9814 5007 9814 5007 9815 5006 9815 4901 9815 6069 9816 5007 9816 4901 9816 5006 9817 5648 9817 4901 9817 4901 9818 5648 9818 4900 9818 6070 9819 4901 9819 4900 9819 5648 9820 5008 9820 4900 9820 4900 9821 5008 9821 5009 9821 6071 9822 4900 9822 5009 9822 5008 9823 5633 9823 5009 9823 5009 9824 5633 9824 5010 9824 6072 9825 5009 9825 5010 9825 5633 9826 5011 9826 5010 9826 5010 9827 5011 9827 5012 9827 4899 9828 5010 9828 5012 9828 5011 9829 5634 9829 5012 9829 5012 9830 5634 9830 4898 9830 5013 9831 5012 9831 4898 9831 5634 9832 5635 9832 4898 9832 4898 9833 5635 9833 4896 9833 5014 9834 4898 9834 4896 9834 5635 9835 5636 9835 4896 9835 4896 9836 5636 9836 5015 9836 4897 9837 4896 9837 5015 9837 5636 9838 5016 9838 5015 9838 5015 9839 5016 9839 4894 9839 4895 9840 5015 9840 4894 9840 5016 9841 5018 9841 4894 9841 4894 9842 5018 9842 5017 9842 4893 9843 4894 9843 5017 9843 5018 9844 5019 9844 5017 9844 5017 9845 5019 9845 5021 9845 5020 9846 5017 9846 5021 9846 5019 9847 5022 9847 5021 9847 5021 9848 5022 9848 5023 9848 4892 9849 5021 9849 5023 9849 5022 9850 5024 9850 5023 9850 5023 9851 5024 9851 5025 9851 6073 9852 5023 9852 5025 9852 5024 9853 5026 9853 5025 9853 5025 9854 5026 9854 5030 9854 6074 9855 5025 9855 5030 9855 5026 9856 5027 9856 5030 9856 5030 9857 5027 9857 5028 9857 5029 9858 5030 9858 5028 9858 5027 9859 5637 9859 5028 9859 5028 9860 5637 9860 5031 9860 4891 9861 5028 9861 5031 9861 5637 9862 5034 9862 5031 9862 5031 9863 5034 9863 5033 9863 5032 9864 5031 9864 5033 9864 5034 9865 5638 9865 5033 9865 5033 9866 5638 9866 5035 9866 4890 9867 5033 9867 5035 9867 5638 9868 5036 9868 5035 9868 5035 9869 5036 9869 5037 9869 6075 9870 5035 9870 5037 9870 5036 9871 5038 9871 5037 9871 5037 9872 5038 9872 5040 9872 5039 9873 5037 9873 5040 9873 5038 9874 5041 9874 5040 9874 5040 9875 5041 9875 4889 9875 6076 9876 5040 9876 4889 9876 5041 9877 5639 9877 4889 9877 4889 9878 5639 9878 5042 9878 4888 9879 4889 9879 5042 9879 5639 9880 5043 9880 5042 9880 5042 9881 5043 9881 5045 9881 4887 9882 5042 9882 5045 9882 5043 9883 5044 9883 5045 9883 5045 9884 5044 9884 5046 9884 6077 9885 5045 9885 5046 9885 5044 9886 5047 9886 5046 9886 5046 9887 5047 9887 5049 9887 4886 9888 5046 9888 5049 9888 5047 9889 5640 9889 5049 9889 5049 9890 5640 9890 5048 9890 6078 9891 5049 9891 5048 9891 5640 9892 5050 9892 5048 9892 5048 9893 5050 9893 4883 9893 4885 9894 5048 9894 4883 9894 5050 9895 5641 9895 4883 9895 4883 9896 5641 9896 4882 9896 4884 9897 4883 9897 4882 9897 5641 9898 5642 9898 4882 9898 4882 9899 5642 9899 5051 9899 5052 9900 4882 9900 5051 9900 5642 9901 5053 9901 5051 9901 5051 9902 5053 9902 5054 9902 4881 9903 5051 9903 5054 9903 5053 9904 5644 9904 5054 9904 5054 9905 5644 9905 4879 9905 5055 9906 5054 9906 4879 9906 5644 9907 5643 9907 4879 9907 4879 9908 5643 9908 5056 9908 4880 9909 4879 9909 5056 9909 5643 9910 5057 9910 5056 9910 5056 9911 5057 9911 5058 9911 6080 9912 5056 9912 5058 9912 5057 9913 5060 9913 5058 9913 5058 9914 5060 9914 5062 9914 5059 9915 5058 9915 5062 9915 5060 9916 5646 9916 5062 9916 5062 9917 5646 9917 5061 9917 6081 9918 5062 9918 5061 9918 5646 9919 5063 9919 5061 9919 5061 9920 5063 9920 5064 9920 4878 9921 5061 9921 5064 9921 5063 9922 5647 9922 5064 9922 5064 9923 5647 9923 5066 9923 5065 9924 5064 9924 5066 9924 5647 9925 5068 9925 5066 9925 5066 9926 5068 9926 4877 9926 5067 9927 5066 9927 4877 9927 5068 9928 5069 9928 4877 9928 4877 9929 5069 9929 5071 9929 5070 9930 4877 9930 5071 9930 5069 9931 5072 9931 5071 9931 5071 9932 5072 9932 5074 9932 5073 9933 5071 9933 5074 9933 5072 9934 5848 9934 5074 9934 5074 9935 5848 9935 4871 9935 4876 9936 5074 9936 4871 9936 5077 9937 5075 9937 5076 9937 5077 9938 5079 9938 5075 9938 5077 9939 5688 9939 5079 9939 5079 9940 5688 9940 5078 9940 6152 9941 5078 9941 5129 9941 6152 9942 5079 9942 5078 9942 6152 9943 6151 9943 5079 9943 5079 9944 6151 9944 5075 9944 5075 9945 6151 9945 6150 9945 5257 9946 6150 9946 6149 9946 5080 9947 6149 9947 5081 9947 5254 9948 5081 9948 5082 9948 5253 9949 5082 9949 6148 9949 5251 9950 6148 9950 5250 9950 5249 9951 5250 9951 6147 9951 5247 9952 6147 9952 6146 9952 5246 9953 6146 9953 6145 9953 5083 9954 6145 9954 6144 9954 5084 9955 6144 9955 6143 9955 5243 9956 6143 9956 5242 9956 5239 9957 5242 9957 5240 9957 5236 9958 5240 9958 6142 9958 5235 9959 6142 9959 6141 9959 5085 9960 6141 9960 6140 9960 5233 9961 6140 9961 6139 9961 5231 9962 6139 9962 5086 9962 5230 9963 5086 9963 6138 9963 5228 9964 6138 9964 5227 9964 5226 9965 5227 9965 6137 9965 5225 9966 6137 9966 6136 9966 5087 9967 6136 9967 5088 9967 5222 9968 5088 9968 6134 9968 5221 9969 6134 9969 5220 9969 5089 9970 5220 9970 5090 9970 5091 9971 5090 9971 6133 9971 5092 9972 6133 9972 5093 9972 5215 9973 5093 9973 5214 9973 5094 9974 5214 9974 6132 9974 5213 9975 6132 9975 5095 9975 5211 9976 5095 9976 5096 9976 5209 9977 5096 9977 5097 9977 5208 9978 5097 9978 5098 9978 5206 9979 5098 9979 6131 9979 5204 9980 6131 9980 6130 9980 5203 9981 6130 9981 5201 9981 5197 9982 5201 9982 5198 9982 5195 9983 5198 9983 5194 9983 5099 9984 5194 9984 5193 9984 5100 9985 5193 9985 6129 9985 5192 9986 6129 9986 5190 9986 5189 9987 5190 9987 5101 9987 5102 9988 5101 9988 5103 9988 5187 9989 5103 9989 6125 9989 5185 9990 6125 9990 6128 9990 5184 9991 6128 9991 6126 9991 5183 9992 6126 9992 6127 9992 5179 9993 6127 9993 5104 9993 5180 9994 5104 9994 6167 9994 5105 9995 6167 9995 5176 9995 5177 9996 5176 9996 6168 9996 5174 9997 6168 9997 5106 9997 5107 9998 5106 9998 5108 9998 5173 9999 5108 9999 6165 9999 5170 10000 6165 10000 6164 10000 5169 10001 6164 10001 5109 10001 5167 10002 5109 10002 5110 10002 5111 10003 5110 10003 5112 10003 5165 10004 5112 10004 5164 10004 5163 10005 5164 10005 5113 10005 5160 10006 5113 10006 5115 10006 5114 10007 5115 10007 5116 10007 5158 10008 5116 10008 6163 10008 5117 10009 6163 10009 5156 10009 5118 10010 5156 10010 6162 10010 5119 10011 6162 10011 5155 10011 5152 10012 5155 10012 6161 10012 5153 10013 6161 10013 5120 10013 5121 10014 5120 10014 5122 10014 5123 10015 5122 10015 6160 10015 5148 10016 6160 10016 6158 10016 5147 10017 6158 10017 5124 10017 5146 10018 5124 10018 6157 10018 5144 10019 6157 10019 5125 10019 5142 10020 5125 10020 6156 10020 5140 10021 6156 10021 6155 10021 5126 10022 6155 10022 6154 10022 5138 10023 6154 10023 6153 10023 5137 10024 6153 10024 5127 10024 5134 10025 5127 10025 5135 10025 5136 10026 5135 10026 5132 10026 5130 10027 5132 10027 5128 10027 5131 10028 5128 10028 5129 10028 5078 10029 5131 10029 5129 10029 5078 10030 5687 10030 5131 10030 5078 10031 5688 10031 5687 10031 5687 10032 5686 10032 5131 10032 5131 10033 5686 10033 5130 10033 5128 10034 5131 10034 5130 10034 5686 10035 5685 10035 5130 10035 5130 10036 5685 10036 5136 10036 5132 10037 5130 10037 5136 10037 5685 10038 5133 10038 5136 10038 5136 10039 5133 10039 5134 10039 5135 10040 5136 10040 5134 10040 5133 10041 5684 10041 5134 10041 5134 10042 5684 10042 5137 10042 5127 10043 5134 10043 5137 10043 5684 10044 5683 10044 5137 10044 5137 10045 5683 10045 5138 10045 6153 10046 5137 10046 5138 10046 5683 10047 5682 10047 5138 10047 5138 10048 5682 10048 5126 10048 6154 10049 5138 10049 5126 10049 5682 10050 5139 10050 5126 10050 5126 10051 5139 10051 5140 10051 6155 10052 5126 10052 5140 10052 5139 10053 5141 10053 5140 10053 5140 10054 5141 10054 5142 10054 6156 10055 5140 10055 5142 10055 5141 10056 5143 10056 5142 10056 5142 10057 5143 10057 5144 10057 5125 10058 5142 10058 5144 10058 5143 10059 5145 10059 5144 10059 5144 10060 5145 10060 5146 10060 6157 10061 5144 10061 5146 10061 5145 10062 5681 10062 5146 10062 5146 10063 5681 10063 5147 10063 5124 10064 5146 10064 5147 10064 5681 10065 5680 10065 5147 10065 5147 10066 5680 10066 5148 10066 6158 10067 5147 10067 5148 10067 5680 10068 5149 10068 5148 10068 5148 10069 5149 10069 5123 10069 6160 10070 5148 10070 5123 10070 5149 10071 5150 10071 5123 10071 5123 10072 5150 10072 5121 10072 5122 10073 5123 10073 5121 10073 5150 10074 5679 10074 5121 10074 5121 10075 5679 10075 5153 10075 5120 10076 5121 10076 5153 10076 5679 10077 5151 10077 5153 10077 5153 10078 5151 10078 5152 10078 6161 10079 5153 10079 5152 10079 5151 10080 5154 10080 5152 10080 5152 10081 5154 10081 5119 10081 5155 10082 5152 10082 5119 10082 5154 10083 5678 10083 5119 10083 5119 10084 5678 10084 5118 10084 6162 10085 5119 10085 5118 10085 5678 10086 5677 10086 5118 10086 5118 10087 5677 10087 5117 10087 5156 10088 5118 10088 5117 10088 5677 10089 5157 10089 5117 10089 5117 10090 5157 10090 5158 10090 6163 10091 5117 10091 5158 10091 5157 10092 5159 10092 5158 10092 5158 10093 5159 10093 5114 10093 5116 10094 5158 10094 5114 10094 5159 10095 5161 10095 5114 10095 5114 10096 5161 10096 5160 10096 5115 10097 5114 10097 5160 10097 5161 10098 5162 10098 5160 10098 5160 10099 5162 10099 5163 10099 5113 10100 5160 10100 5163 10100 5162 10101 5676 10101 5163 10101 5163 10102 5676 10102 5165 10102 5164 10103 5163 10103 5165 10103 5676 10104 5675 10104 5165 10104 5165 10105 5675 10105 5111 10105 5112 10106 5165 10106 5111 10106 5675 10107 5166 10107 5111 10107 5111 10108 5166 10108 5167 10108 5110 10109 5111 10109 5167 10109 5166 10110 5168 10110 5167 10110 5167 10111 5168 10111 5169 10111 5109 10112 5167 10112 5169 10112 5168 10113 5171 10113 5169 10113 5169 10114 5171 10114 5170 10114 6164 10115 5169 10115 5170 10115 5171 10116 5673 10116 5170 10116 5170 10117 5673 10117 5173 10117 6165 10118 5170 10118 5173 10118 5673 10119 5172 10119 5173 10119 5173 10120 5172 10120 5107 10120 5108 10121 5173 10121 5107 10121 5172 10122 5672 10122 5107 10122 5107 10123 5672 10123 5174 10123 5106 10124 5107 10124 5174 10124 5672 10125 5671 10125 5174 10125 5174 10126 5671 10126 5177 10126 6168 10127 5174 10127 5177 10127 5671 10128 5175 10128 5177 10128 5177 10129 5175 10129 5105 10129 5176 10130 5177 10130 5105 10130 5175 10131 5178 10131 5105 10131 5105 10132 5178 10132 5180 10132 6167 10133 5105 10133 5180 10133 5178 10134 5670 10134 5180 10134 5180 10135 5670 10135 5179 10135 5104 10136 5180 10136 5179 10136 5670 10137 5181 10137 5179 10137 5179 10138 5181 10138 5183 10138 6127 10139 5179 10139 5183 10139 5181 10140 5182 10140 5183 10140 5183 10141 5182 10141 5184 10141 6126 10142 5183 10142 5184 10142 5182 10143 5669 10143 5184 10143 5184 10144 5669 10144 5185 10144 6128 10145 5184 10145 5185 10145 5669 10146 5186 10146 5185 10146 5185 10147 5186 10147 5187 10147 6125 10148 5185 10148 5187 10148 5186 10149 5668 10149 5187 10149 5187 10150 5668 10150 5102 10150 5103 10151 5187 10151 5102 10151 5668 10152 5188 10152 5102 10152 5102 10153 5188 10153 5189 10153 5101 10154 5102 10154 5189 10154 5188 10155 5191 10155 5189 10155 5189 10156 5191 10156 5192 10156 5190 10157 5189 10157 5192 10157 5191 10158 5667 10158 5192 10158 5192 10159 5667 10159 5100 10159 6129 10160 5192 10160 5100 10160 5667 10161 5666 10161 5100 10161 5100 10162 5666 10162 5099 10162 5193 10163 5100 10163 5099 10163 5666 10164 5196 10164 5099 10164 5099 10165 5196 10165 5195 10165 5194 10166 5099 10166 5195 10166 5196 10167 5199 10167 5195 10167 5195 10168 5199 10168 5197 10168 5198 10169 5195 10169 5197 10169 5199 10170 5200 10170 5197 10170 5197 10171 5200 10171 5203 10171 5201 10172 5197 10172 5203 10172 5200 10173 5202 10173 5203 10173 5203 10174 5202 10174 5204 10174 6130 10175 5203 10175 5204 10175 5202 10176 5205 10176 5204 10176 5204 10177 5205 10177 5206 10177 6131 10178 5204 10178 5206 10178 5205 10179 5207 10179 5206 10179 5206 10180 5207 10180 5208 10180 5098 10181 5206 10181 5208 10181 5207 10182 5653 10182 5208 10182 5208 10183 5653 10183 5209 10183 5097 10184 5208 10184 5209 10184 5653 10185 5210 10185 5209 10185 5209 10186 5210 10186 5211 10186 5096 10187 5209 10187 5211 10187 5210 10188 5654 10188 5211 10188 5211 10189 5654 10189 5213 10189 5095 10190 5211 10190 5213 10190 5654 10191 5212 10191 5213 10191 5213 10192 5212 10192 5094 10192 6132 10193 5213 10193 5094 10193 5212 10194 5216 10194 5094 10194 5094 10195 5216 10195 5215 10195 5214 10196 5094 10196 5215 10196 5216 10197 5217 10197 5215 10197 5215 10198 5217 10198 5092 10198 5093 10199 5215 10199 5092 10199 5217 10200 5656 10200 5092 10200 5092 10201 5656 10201 5091 10201 6133 10202 5092 10202 5091 10202 5656 10203 5218 10203 5091 10203 5091 10204 5218 10204 5089 10204 5090 10205 5091 10205 5089 10205 5218 10206 5219 10206 5089 10206 5089 10207 5219 10207 5221 10207 5220 10208 5089 10208 5221 10208 5219 10209 5223 10209 5221 10209 5221 10210 5223 10210 5222 10210 6134 10211 5221 10211 5222 10211 5223 10212 5657 10212 5222 10212 5222 10213 5657 10213 5087 10213 5088 10214 5222 10214 5087 10214 5657 10215 5224 10215 5087 10215 5087 10216 5224 10216 5225 10216 6136 10217 5087 10217 5225 10217 5224 10218 5658 10218 5225 10218 5225 10219 5658 10219 5226 10219 6137 10220 5225 10220 5226 10220 5658 10221 5659 10221 5226 10221 5226 10222 5659 10222 5228 10222 5227 10223 5226 10223 5228 10223 5659 10224 5229 10224 5228 10224 5228 10225 5229 10225 5230 10225 6138 10226 5228 10226 5230 10226 5229 10227 5232 10227 5230 10227 5230 10228 5232 10228 5231 10228 5086 10229 5230 10229 5231 10229 5232 10230 5660 10230 5231 10230 5231 10231 5660 10231 5233 10231 6139 10232 5231 10232 5233 10232 5660 10233 5661 10233 5233 10233 5233 10234 5661 10234 5085 10234 6140 10235 5233 10235 5085 10235 5661 10236 5234 10236 5085 10236 5085 10237 5234 10237 5235 10237 6141 10238 5085 10238 5235 10238 5234 10239 5237 10239 5235 10239 5235 10240 5237 10240 5236 10240 6142 10241 5235 10241 5236 10241 5237 10242 5238 10242 5236 10242 5236 10243 5238 10243 5239 10243 5240 10244 5236 10244 5239 10244 5238 10245 5241 10245 5239 10245 5239 10246 5241 10246 5243 10246 5242 10247 5239 10247 5243 10247 5241 10248 5244 10248 5243 10248 5243 10249 5244 10249 5084 10249 6143 10250 5243 10250 5084 10250 5244 10251 5662 10251 5084 10251 5084 10252 5662 10252 5083 10252 6144 10253 5084 10253 5083 10253 5662 10254 5245 10254 5083 10254 5083 10255 5245 10255 5246 10255 6145 10256 5083 10256 5246 10256 5245 10257 5663 10257 5246 10257 5246 10258 5663 10258 5247 10258 6146 10259 5246 10259 5247 10259 5663 10260 5664 10260 5247 10260 5247 10261 5664 10261 5249 10261 6147 10262 5247 10262 5249 10262 5664 10263 5248 10263 5249 10263 5249 10264 5248 10264 5251 10264 5250 10265 5249 10265 5251 10265 5248 10266 5252 10266 5251 10266 5251 10267 5252 10267 5253 10267 6148 10268 5251 10268 5253 10268 5252 10269 5665 10269 5253 10269 5253 10270 5665 10270 5254 10270 5082 10271 5253 10271 5254 10271 5665 10272 5255 10272 5254 10272 5254 10273 5255 10273 5080 10273 5081 10274 5254 10274 5080 10274 5255 10275 5256 10275 5080 10275 5080 10276 5256 10276 5257 10276 6149 10277 5080 10277 5257 10277 5256 10278 5076 10278 5257 10278 5257 10279 5076 10279 5075 10279 6150 10280 5257 10280 5075 10280 5258 10281 5434 10281 5433 10281 5258 10282 5261 10282 5434 10282 5258 10283 6169 10283 5261 10283 5261 10284 6169 10284 5307 10284 5700 10285 5307 10285 5259 10285 5700 10286 5261 10286 5307 10286 5700 10287 5260 10287 5261 10287 5261 10288 5260 10288 5434 10288 5434 10289 5260 10289 5435 10289 5262 10290 5435 10290 5753 10290 5432 10291 5753 10291 5752 10291 5431 10292 5752 10292 5751 10292 5429 10293 5751 10293 5428 10293 5263 10294 5428 10294 5750 10294 5424 10295 5750 10295 5425 10295 5426 10296 5425 10296 5264 10296 5422 10297 5264 10297 5749 10297 5419 10298 5749 10298 5265 10298 5418 10299 5265 10299 5267 10299 5266 10300 5267 10300 5748 10300 5415 10301 5748 10301 5268 10301 5411 10302 5268 10302 5412 10302 5413 10303 5412 10303 5269 10303 5410 10304 5269 10304 5747 10304 5270 10305 5747 10305 5271 10305 5409 10306 5271 10306 5746 10306 5408 10307 5746 10307 5745 10307 5406 10308 5745 10308 5744 10308 5405 10309 5744 10309 5272 10309 5404 10310 5272 10310 5402 10310 5273 10311 5402 10311 5274 10311 5401 10312 5274 10312 5743 10312 5400 10313 5743 10313 5742 10313 5398 10314 5742 10314 5397 10314 5275 10315 5397 10315 5394 10315 5395 10316 5394 10316 5276 10316 5392 10317 5276 10317 5391 10317 5390 10318 5391 10318 5717 10318 5389 10319 5717 10319 5716 10319 5387 10320 5716 10320 5277 10320 5386 10321 5277 10321 5740 10321 5383 10322 5740 10322 5384 10322 5382 10323 5384 10323 5380 10323 5378 10324 5380 10324 5278 10324 5377 10325 5278 10325 5279 10325 5374 10326 5279 10326 5713 10326 5375 10327 5713 10327 5281 10327 5280 10328 5281 10328 5282 10328 5373 10329 5282 10329 5283 10329 5371 10330 5283 10330 5284 10330 5370 10331 5284 10331 5286 10331 5285 10332 5286 10332 5711 10332 5287 10333 5711 10333 5289 10333 5288 10334 5289 10334 5368 10334 5366 10335 5368 10335 5291 10335 5290 10336 5291 10336 5708 10336 5292 10337 5708 10337 5365 10337 5364 10338 5365 10338 5707 10338 5363 10339 5707 10339 5361 10339 5362 10340 5361 10340 5734 10340 5359 10341 5734 10341 5733 10341 5358 10342 5733 10342 5704 10342 5357 10343 5704 10343 5293 10343 5354 10344 5293 10344 5352 10344 5353 10345 5352 10345 5349 10345 5350 10346 5349 10346 5702 10346 5348 10347 5702 10347 5730 10347 5294 10348 5730 10348 5729 10348 5344 10349 5729 10349 5701 10349 5343 10350 5701 10350 5342 10350 5341 10351 5342 10351 5689 10351 5340 10352 5689 10352 5690 10352 5295 10353 5690 10353 5691 10353 5337 10354 5691 10354 5692 10354 5335 10355 5692 10355 5693 10355 5296 10356 5693 10356 5332 10356 5331 10357 5332 10357 5330 10357 5329 10358 5330 10358 5297 10358 5327 10359 5297 10359 5325 10359 5298 10360 5325 10360 5299 10360 5300 10361 5299 10361 5301 10361 5323 10362 5301 10362 5302 10362 5321 10363 5302 10363 5694 10363 5303 10364 5694 10364 5304 10364 5317 10365 5304 10365 5318 10365 5319 10366 5318 10366 5695 10366 5316 10367 5695 10367 5697 10367 5314 10368 5697 10368 5696 10368 5311 10369 5696 10369 5698 10369 5310 10370 5698 10370 5305 10370 5309 10371 5305 10371 5699 10371 5306 10372 5699 10372 5259 10372 5307 10373 5306 10373 5259 10373 5307 10374 6180 10374 5306 10374 5307 10375 6169 10375 6180 10375 6180 10376 5308 10376 5306 10376 5306 10377 5308 10377 5309 10377 5699 10378 5306 10378 5309 10378 5308 10379 6179 10379 5309 10379 5309 10380 6179 10380 5310 10380 5305 10381 5309 10381 5310 10381 6179 10382 6178 10382 5310 10382 5310 10383 6178 10383 5311 10383 5698 10384 5310 10384 5311 10384 6178 10385 5312 10385 5311 10385 5311 10386 5312 10386 5314 10386 5696 10387 5311 10387 5314 10387 5312 10388 5313 10388 5314 10388 5314 10389 5313 10389 5316 10389 5697 10390 5314 10390 5316 10390 5313 10391 5315 10391 5316 10391 5316 10392 5315 10392 5319 10392 5695 10393 5316 10393 5319 10393 5315 10394 6177 10394 5319 10394 5319 10395 6177 10395 5317 10395 5318 10396 5319 10396 5317 10396 6177 10397 5320 10397 5317 10397 5317 10398 5320 10398 5303 10398 5304 10399 5317 10399 5303 10399 5320 10400 6176 10400 5303 10400 5303 10401 6176 10401 5321 10401 5694 10402 5303 10402 5321 10402 6176 10403 5322 10403 5321 10403 5321 10404 5322 10404 5323 10404 5302 10405 5321 10405 5323 10405 5322 10406 6174 10406 5323 10406 5323 10407 6174 10407 5300 10407 5301 10408 5323 10408 5300 10408 6174 10409 5324 10409 5300 10409 5300 10410 5324 10410 5298 10410 5299 10411 5300 10411 5298 10411 5324 10412 5326 10412 5298 10412 5298 10413 5326 10413 5327 10413 5325 10414 5298 10414 5327 10414 5326 10415 5328 10415 5327 10415 5327 10416 5328 10416 5329 10416 5297 10417 5327 10417 5329 10417 5328 10418 6173 10418 5329 10418 5329 10419 6173 10419 5331 10419 5330 10420 5329 10420 5331 10420 6173 10421 5333 10421 5331 10421 5331 10422 5333 10422 5296 10422 5332 10423 5331 10423 5296 10423 5333 10424 5334 10424 5296 10424 5296 10425 5334 10425 5335 10425 5693 10426 5296 10426 5335 10426 5334 10427 5336 10427 5335 10427 5335 10428 5336 10428 5337 10428 5692 10429 5335 10429 5337 10429 5336 10430 6172 10430 5337 10430 5337 10431 6172 10431 5295 10431 5691 10432 5337 10432 5295 10432 6172 10433 5338 10433 5295 10433 5295 10434 5338 10434 5340 10434 5690 10435 5295 10435 5340 10435 5338 10436 5339 10436 5340 10436 5340 10437 5339 10437 5341 10437 5689 10438 5340 10438 5341 10438 5339 10439 6171 10439 5341 10439 5341 10440 6171 10440 5343 10440 5342 10441 5341 10441 5343 10441 6171 10442 5345 10442 5343 10442 5343 10443 5345 10443 5344 10443 5701 10444 5343 10444 5344 10444 5345 10445 5346 10445 5344 10445 5344 10446 5346 10446 5294 10446 5729 10447 5344 10447 5294 10447 5346 10448 5347 10448 5294 10448 5294 10449 5347 10449 5348 10449 5730 10450 5294 10450 5348 10450 5347 10451 6108 10451 5348 10451 5348 10452 6108 10452 5350 10452 5702 10453 5348 10453 5350 10453 6108 10454 6117 10454 5350 10454 5350 10455 6117 10455 5353 10455 5349 10456 5350 10456 5353 10456 6117 10457 5351 10457 5353 10457 5353 10458 5351 10458 5354 10458 5352 10459 5353 10459 5354 10459 5351 10460 5355 10460 5354 10460 5354 10461 5355 10461 5357 10461 5293 10462 5354 10462 5357 10462 5355 10463 5356 10463 5357 10463 5357 10464 5356 10464 5358 10464 5704 10465 5357 10465 5358 10465 5356 10466 6111 10466 5358 10466 5358 10467 6111 10467 5359 10467 5733 10468 5358 10468 5359 10468 6111 10469 6113 10469 5359 10469 5359 10470 6113 10470 5362 10470 5734 10471 5359 10471 5362 10471 6113 10472 5360 10472 5362 10472 5362 10473 5360 10473 5363 10473 5361 10474 5362 10474 5363 10474 5360 10475 6119 10475 5363 10475 5363 10476 6119 10476 5364 10476 5707 10477 5363 10477 5364 10477 6119 10478 6120 10478 5364 10478 5364 10479 6120 10479 5292 10479 5365 10480 5364 10480 5292 10480 6120 10481 6121 10481 5292 10481 5292 10482 6121 10482 5290 10482 5708 10483 5292 10483 5290 10483 6121 10484 5367 10484 5290 10484 5290 10485 5367 10485 5366 10485 5291 10486 5290 10486 5366 10486 5367 10487 6122 10487 5366 10487 5366 10488 6122 10488 5288 10488 5368 10489 5366 10489 5288 10489 6122 10490 6123 10490 5288 10490 5288 10491 6123 10491 5287 10491 5289 10492 5288 10492 5287 10492 6123 10493 6124 10493 5287 10493 5287 10494 6124 10494 5285 10494 5711 10495 5287 10495 5285 10495 6124 10496 5369 10496 5285 10496 5285 10497 5369 10497 5370 10497 5286 10498 5285 10498 5370 10498 5369 10499 6036 10499 5370 10499 5370 10500 6036 10500 5371 10500 5284 10501 5370 10501 5371 10501 6036 10502 5372 10502 5371 10502 5371 10503 5372 10503 5373 10503 5283 10504 5371 10504 5373 10504 5372 10505 6037 10505 5373 10505 5373 10506 6037 10506 5280 10506 5282 10507 5373 10507 5280 10507 6037 10508 6082 10508 5280 10508 5280 10509 6082 10509 5375 10509 5281 10510 5280 10510 5375 10510 6082 10511 6083 10511 5375 10511 5375 10512 6083 10512 5374 10512 5713 10513 5375 10513 5374 10513 6083 10514 5376 10514 5374 10514 5374 10515 5376 10515 5377 10515 5279 10516 5374 10516 5377 10516 5376 10517 5379 10517 5377 10517 5377 10518 5379 10518 5378 10518 5278 10519 5377 10519 5378 10519 5379 10520 6084 10520 5378 10520 5378 10521 6084 10521 5382 10521 5380 10522 5378 10522 5382 10522 6084 10523 5381 10523 5382 10523 5382 10524 5381 10524 5383 10524 5384 10525 5382 10525 5383 10525 5381 10526 5385 10526 5383 10526 5383 10527 5385 10527 5386 10527 5740 10528 5383 10528 5386 10528 5385 10529 5388 10529 5386 10529 5386 10530 5388 10530 5387 10530 5277 10531 5386 10531 5387 10531 5388 10532 6040 10532 5387 10532 5387 10533 6040 10533 5389 10533 5716 10534 5387 10534 5389 10534 6040 10535 6039 10535 5389 10535 5389 10536 6039 10536 5390 10536 5717 10537 5389 10537 5390 10537 6039 10538 6041 10538 5390 10538 5390 10539 6041 10539 5392 10539 5391 10540 5390 10540 5392 10540 6041 10541 5393 10541 5392 10541 5392 10542 5393 10542 5395 10542 5276 10543 5392 10543 5395 10543 5393 10544 6042 10544 5395 10544 5395 10545 6042 10545 5275 10545 5394 10546 5395 10546 5275 10546 6042 10547 5396 10547 5275 10547 5275 10548 5396 10548 5398 10548 5397 10549 5275 10549 5398 10549 5396 10550 5399 10550 5398 10550 5398 10551 5399 10551 5400 10551 5742 10552 5398 10552 5400 10552 5399 10553 6043 10553 5400 10553 5400 10554 6043 10554 5401 10554 5743 10555 5400 10555 5401 10555 6043 10556 6044 10556 5401 10556 5401 10557 6044 10557 5273 10557 5274 10558 5401 10558 5273 10558 6044 10559 5403 10559 5273 10559 5273 10560 5403 10560 5404 10560 5402 10561 5273 10561 5404 10561 5403 10562 6045 10562 5404 10562 5404 10563 6045 10563 5405 10563 5272 10564 5404 10564 5405 10564 6045 10565 6046 10565 5405 10565 5405 10566 6046 10566 5406 10566 5744 10567 5405 10567 5406 10567 6046 10568 5407 10568 5406 10568 5406 10569 5407 10569 5408 10569 5745 10570 5406 10570 5408 10570 5407 10571 6048 10571 5408 10571 5408 10572 6048 10572 5409 10572 5746 10573 5408 10573 5409 10573 6048 10574 6047 10574 5409 10574 5409 10575 6047 10575 5270 10575 5271 10576 5409 10576 5270 10576 6047 10577 6050 10577 5270 10577 5270 10578 6050 10578 5410 10578 5747 10579 5270 10579 5410 10579 6050 10580 6049 10580 5410 10580 5410 10581 6049 10581 5413 10581 5269 10582 5410 10582 5413 10582 6049 10583 6051 10583 5413 10583 5413 10584 6051 10584 5411 10584 5412 10585 5413 10585 5411 10585 6051 10586 5414 10586 5411 10586 5411 10587 5414 10587 5415 10587 5268 10588 5411 10588 5415 10588 5414 10589 5416 10589 5415 10589 5415 10590 5416 10590 5266 10590 5748 10591 5415 10591 5266 10591 5416 10592 5417 10592 5266 10592 5266 10593 5417 10593 5418 10593 5267 10594 5266 10594 5418 10594 5417 10595 5420 10595 5418 10595 5418 10596 5420 10596 5419 10596 5265 10597 5418 10597 5419 10597 5420 10598 5421 10598 5419 10598 5419 10599 5421 10599 5422 10599 5749 10600 5419 10600 5422 10600 5421 10601 5423 10601 5422 10601 5422 10602 5423 10602 5426 10602 5264 10603 5422 10603 5426 10603 5423 10604 5427 10604 5426 10604 5426 10605 5427 10605 5424 10605 5425 10606 5426 10606 5424 10606 5427 10607 6052 10607 5424 10607 5424 10608 6052 10608 5263 10608 5750 10609 5424 10609 5263 10609 6052 10610 6053 10610 5263 10610 5263 10611 6053 10611 5429 10611 5428 10612 5263 10612 5429 10612 6053 10613 5430 10613 5429 10613 5429 10614 5430 10614 5431 10614 5751 10615 5429 10615 5431 10615 5430 10616 6055 10616 5431 10616 5431 10617 6055 10617 5432 10617 5752 10618 5431 10618 5432 10618 6055 10619 6170 10619 5432 10619 5432 10620 6170 10620 5262 10620 5753 10621 5432 10621 5262 10621 6170 10622 5433 10622 5262 10622 5262 10623 5433 10623 5434 10623 5435 10624 5262 10624 5434 10624 5437 10625 5622 10625 6431 10625 5437 10626 5436 10626 5622 10626 5437 10627 5438 10627 5436 10627 5436 10628 5438 10628 5489 10628 5439 10629 5489 10629 5488 10629 5439 10630 5436 10630 5489 10630 5439 10631 5440 10631 5436 10631 5436 10632 5440 10632 5622 10632 5622 10633 5440 10633 5808 10633 5621 10634 5808 10634 4496 10634 5619 10635 4496 10635 5618 10635 5616 10636 5618 10636 5441 10636 5613 10637 5441 10637 5614 10637 5442 10638 5614 10638 5443 10638 5610 10639 5443 10639 5444 10639 5609 10640 5444 10640 5445 10640 5608 10641 5445 10641 5847 10641 5606 10642 5847 10642 5846 10642 5605 10643 5846 10643 5446 10643 5603 10644 5446 10644 5448 10644 5447 10645 5448 10645 5601 10645 5599 10646 5601 10646 5449 10646 5450 10647 5449 10647 5845 10647 5597 10648 5845 10648 5451 10648 5595 10649 5451 10649 5844 10649 5593 10650 5844 10650 5452 10650 5453 10651 5452 10651 5454 10651 5592 10652 5454 10652 5589 10652 5590 10653 5589 10653 5455 10653 5588 10654 5455 10654 5842 10654 5456 10655 5842 10655 5457 10655 5585 10656 5457 10656 5458 10656 5584 10657 5458 10657 5459 10657 5582 10658 5459 10658 5460 10658 5580 10659 5460 10659 5841 10659 5579 10660 5841 10660 5840 10660 5578 10661 5840 10661 5839 10661 5576 10662 5839 10662 5461 10662 5575 10663 5461 10663 5777 10663 5572 10664 5777 10664 5809 10664 5462 10665 5809 10665 5778 10665 5570 10666 5778 10666 5810 10666 5568 10667 5810 10667 5463 10667 5567 10668 5463 10668 5566 10668 5563 10669 5566 10669 5564 10669 5562 10670 5564 10670 5813 10670 5561 10671 5813 10671 5559 10671 5464 10672 5559 10672 5465 10672 5558 10673 5465 10673 5466 10673 5554 10674 5466 10674 5555 10674 5553 10675 5555 10675 5783 10675 5552 10676 5783 10676 5550 10676 5549 10677 5550 10677 5814 10677 5547 10678 5814 10678 5815 10678 5546 10679 5815 10679 5467 10679 5544 10680 5467 10680 5468 10680 5543 10681 5468 10681 5542 10681 5540 10682 5542 10682 5818 10682 5539 10683 5818 10683 5820 10683 5537 10684 5820 10684 5821 10684 5535 10685 5821 10685 5822 10685 5533 10686 5822 10686 5469 10686 5534 10687 5469 10687 5791 10687 5470 10688 5791 10688 5471 10688 5531 10689 5471 10689 5472 10689 5473 10690 5472 10690 5474 10690 5475 10691 5474 10691 5824 10691 5528 10692 5824 10692 5794 10692 5527 10693 5794 10693 5476 10693 5525 10694 5476 10694 5477 10694 5524 10695 5477 10695 5795 10695 5522 10696 5795 10696 5478 10696 5519 10697 5478 10697 5520 10697 5521 10698 5520 10698 5518 10698 5479 10699 5518 10699 5797 10699 5480 10700 5797 10700 5516 10700 5481 10701 5516 10701 5482 10701 5514 10702 5482 10702 5798 10702 5512 10703 5798 10703 5800 10703 5483 10704 5800 10704 5799 10704 5509 10705 5799 10705 5801 10705 5507 10706 5801 10706 5506 10706 5504 10707 5506 10707 5802 10707 5484 10708 5802 10708 5485 10708 5502 10709 5485 10709 5803 10709 5501 10710 5803 10710 5486 10710 5499 10711 5486 10711 5804 10711 5498 10712 5804 10712 5805 10712 5496 10713 5805 10713 5487 10713 5493 10714 5487 10714 5806 10714 5494 10715 5806 10715 5807 10715 5492 10716 5807 10716 5488 10716 5489 10717 5492 10717 5488 10717 5489 10718 5490 10718 5492 10718 5489 10719 5438 10719 5490 10719 5490 10720 5491 10720 5492 10720 5492 10721 5491 10721 5494 10721 5807 10722 5492 10722 5494 10722 5491 10723 6466 10723 5494 10723 5494 10724 6466 10724 5493 10724 5806 10725 5494 10725 5493 10725 6466 10726 6465 10726 5493 10726 5493 10727 6465 10727 5496 10727 5487 10728 5493 10728 5496 10728 6465 10729 5495 10729 5496 10729 5496 10730 5495 10730 5498 10730 5805 10731 5496 10731 5498 10731 5495 10732 5497 10732 5498 10732 5498 10733 5497 10733 5499 10733 5804 10734 5498 10734 5499 10734 5497 10735 5500 10735 5499 10735 5499 10736 5500 10736 5501 10736 5486 10737 5499 10737 5501 10737 5500 10738 6464 10738 5501 10738 5501 10739 6464 10739 5502 10739 5803 10740 5501 10740 5502 10740 6464 10741 5503 10741 5502 10741 5502 10742 5503 10742 5484 10742 5485 10743 5502 10743 5484 10743 5503 10744 6462 10744 5484 10744 5484 10745 6462 10745 5504 10745 5802 10746 5484 10746 5504 10746 6462 10747 5505 10747 5504 10747 5504 10748 5505 10748 5507 10748 5506 10749 5504 10749 5507 10749 5505 10750 5508 10750 5507 10750 5507 10751 5508 10751 5509 10751 5801 10752 5507 10752 5509 10752 5508 10753 5510 10753 5509 10753 5509 10754 5510 10754 5483 10754 5799 10755 5509 10755 5483 10755 5510 10756 5511 10756 5483 10756 5483 10757 5511 10757 5512 10757 5800 10758 5483 10758 5512 10758 5511 10759 5513 10759 5512 10759 5512 10760 5513 10760 5514 10760 5798 10761 5512 10761 5514 10761 5513 10762 5515 10762 5514 10762 5514 10763 5515 10763 5481 10763 5482 10764 5514 10764 5481 10764 5515 10765 6461 10765 5481 10765 5481 10766 6461 10766 5480 10766 5516 10767 5481 10767 5480 10767 6461 10768 6460 10768 5480 10768 5480 10769 6460 10769 5479 10769 5797 10770 5480 10770 5479 10770 6460 10771 5517 10771 5479 10771 5479 10772 5517 10772 5521 10772 5518 10773 5479 10773 5521 10773 5517 10774 6459 10774 5521 10774 5521 10775 6459 10775 5519 10775 5520 10776 5521 10776 5519 10776 6459 10777 6458 10777 5519 10777 5519 10778 6458 10778 5522 10778 5478 10779 5519 10779 5522 10779 6458 10780 6457 10780 5522 10780 5522 10781 6457 10781 5524 10781 5795 10782 5522 10782 5524 10782 6457 10783 5523 10783 5524 10783 5524 10784 5523 10784 5525 10784 5477 10785 5524 10785 5525 10785 5523 10786 5526 10786 5525 10786 5525 10787 5526 10787 5527 10787 5476 10788 5525 10788 5527 10788 5526 10789 6456 10789 5527 10789 5527 10790 6456 10790 5528 10790 5794 10791 5527 10791 5528 10791 6456 10792 5529 10792 5528 10792 5528 10793 5529 10793 5475 10793 5824 10794 5528 10794 5475 10794 5529 10795 5530 10795 5475 10795 5475 10796 5530 10796 5473 10796 5474 10797 5475 10797 5473 10797 5530 10798 6455 10798 5473 10798 5473 10799 6455 10799 5531 10799 5472 10800 5473 10800 5531 10800 6455 10801 5532 10801 5531 10801 5531 10802 5532 10802 5470 10802 5471 10803 5531 10803 5470 10803 5532 10804 6454 10804 5470 10804 5470 10805 6454 10805 5534 10805 5791 10806 5470 10806 5534 10806 6454 10807 6393 10807 5534 10807 5534 10808 6393 10808 5533 10808 5469 10809 5534 10809 5533 10809 6393 10810 6392 10810 5533 10810 5533 10811 6392 10811 5535 10811 5822 10812 5533 10812 5535 10812 6392 10813 5536 10813 5535 10813 5535 10814 5536 10814 5537 10814 5821 10815 5535 10815 5537 10815 5536 10816 5538 10816 5537 10816 5537 10817 5538 10817 5539 10817 5820 10818 5537 10818 5539 10818 5538 10819 6448 10819 5539 10819 5539 10820 6448 10820 5540 10820 5818 10821 5539 10821 5540 10821 6448 10822 5541 10822 5540 10822 5540 10823 5541 10823 5543 10823 5542 10824 5540 10824 5543 10824 5541 10825 6449 10825 5543 10825 5543 10826 6449 10826 5544 10826 5468 10827 5543 10827 5544 10827 6449 10828 5545 10828 5544 10828 5544 10829 5545 10829 5546 10829 5467 10830 5544 10830 5546 10830 5545 10831 6450 10831 5546 10831 5546 10832 6450 10832 5547 10832 5815 10833 5546 10833 5547 10833 6450 10834 5548 10834 5547 10834 5547 10835 5548 10835 5549 10835 5814 10836 5547 10836 5549 10836 5548 10837 6453 10837 5549 10837 5549 10838 6453 10838 5552 10838 5550 10839 5549 10839 5552 10839 6453 10840 5551 10840 5552 10840 5552 10841 5551 10841 5553 10841 5783 10842 5552 10842 5553 10842 5551 10843 6388 10843 5553 10843 5553 10844 6388 10844 5554 10844 5555 10845 5553 10845 5554 10845 6388 10846 5556 10846 5554 10846 5554 10847 5556 10847 5558 10847 5466 10848 5554 10848 5558 10848 5556 10849 5557 10849 5558 10849 5558 10850 5557 10850 5464 10850 5465 10851 5558 10851 5464 10851 5557 10852 5560 10852 5464 10852 5464 10853 5560 10853 5561 10853 5559 10854 5464 10854 5561 10854 5560 10855 6386 10855 5561 10855 5561 10856 6386 10856 5562 10856 5813 10857 5561 10857 5562 10857 6386 10858 6444 10858 5562 10858 5562 10859 6444 10859 5563 10859 5564 10860 5562 10860 5563 10860 6444 10861 5565 10861 5563 10861 5563 10862 5565 10862 5567 10862 5566 10863 5563 10863 5567 10863 5565 10864 6384 10864 5567 10864 5567 10865 6384 10865 5568 10865 5463 10866 5567 10866 5568 10866 6384 10867 5569 10867 5568 10867 5568 10868 5569 10868 5570 10868 5810 10869 5568 10869 5570 10869 5569 10870 5571 10870 5570 10870 5570 10871 5571 10871 5462 10871 5778 10872 5570 10872 5462 10872 5571 10873 6383 10873 5462 10873 5462 10874 6383 10874 5572 10874 5809 10875 5462 10875 5572 10875 6383 10876 5573 10876 5572 10876 5572 10877 5573 10877 5575 10877 5777 10878 5572 10878 5575 10878 5573 10879 5574 10879 5575 10879 5575 10880 5574 10880 5576 10880 5461 10881 5575 10881 5576 10881 5574 10882 5577 10882 5576 10882 5576 10883 5577 10883 5578 10883 5839 10884 5576 10884 5578 10884 5577 10885 6442 10885 5578 10885 5578 10886 6442 10886 5579 10886 5840 10887 5578 10887 5579 10887 6442 10888 6441 10888 5579 10888 5579 10889 6441 10889 5580 10889 5841 10890 5579 10890 5580 10890 6441 10891 5581 10891 5580 10891 5580 10892 5581 10892 5582 10892 5460 10893 5580 10893 5582 10893 5581 10894 6440 10894 5582 10894 5582 10895 6440 10895 5584 10895 5459 10896 5582 10896 5584 10896 6440 10897 5583 10897 5584 10897 5584 10898 5583 10898 5585 10898 5458 10899 5584 10899 5585 10899 5583 10900 5586 10900 5585 10900 5585 10901 5586 10901 5456 10901 5457 10902 5585 10902 5456 10902 5586 10903 6439 10903 5456 10903 5456 10904 6439 10904 5588 10904 5842 10905 5456 10905 5588 10905 6439 10906 5587 10906 5588 10906 5588 10907 5587 10907 5590 10907 5455 10908 5588 10908 5590 10908 5587 10909 6437 10909 5590 10909 5590 10910 6437 10910 5592 10910 5589 10911 5590 10911 5592 10911 6437 10912 5591 10912 5592 10912 5592 10913 5591 10913 5453 10913 5454 10914 5592 10914 5453 10914 5591 10915 6436 10915 5453 10915 5453 10916 6436 10916 5593 10916 5452 10917 5453 10917 5593 10917 6436 10918 5594 10918 5593 10918 5593 10919 5594 10919 5595 10919 5844 10920 5593 10920 5595 10920 5594 10921 6435 10921 5595 10921 5595 10922 6435 10922 5597 10922 5451 10923 5595 10923 5597 10923 6435 10924 5596 10924 5597 10924 5597 10925 5596 10925 5450 10925 5845 10926 5597 10926 5450 10926 5596 10927 5598 10927 5450 10927 5450 10928 5598 10928 5599 10928 5449 10929 5450 10929 5599 10929 5598 10930 5600 10930 5599 10930 5599 10931 5600 10931 5447 10931 5601 10932 5599 10932 5447 10932 5600 10933 5602 10933 5447 10933 5447 10934 5602 10934 5603 10934 5448 10935 5447 10935 5603 10935 5602 10936 5604 10936 5603 10936 5603 10937 5604 10937 5605 10937 5446 10938 5603 10938 5605 10938 5604 10939 6434 10939 5605 10939 5605 10940 6434 10940 5606 10940 5846 10941 5605 10941 5606 10941 6434 10942 5607 10942 5606 10942 5606 10943 5607 10943 5608 10943 5847 10944 5606 10944 5608 10944 5607 10945 6433 10945 5608 10945 5608 10946 6433 10946 5609 10946 5445 10947 5608 10947 5609 10947 6433 10948 6432 10948 5609 10948 5609 10949 6432 10949 5610 10949 5444 10950 5609 10950 5610 10950 6432 10951 5611 10951 5610 10951 5610 10952 5611 10952 5442 10952 5443 10953 5610 10953 5442 10953 5611 10954 5612 10954 5442 10954 5442 10955 5612 10955 5613 10955 5614 10956 5442 10956 5613 10956 5612 10957 5615 10957 5613 10957 5613 10958 5615 10958 5616 10958 5441 10959 5613 10959 5616 10959 5615 10960 5617 10960 5616 10960 5616 10961 5617 10961 5619 10961 5618 10962 5616 10962 5619 10962 5617 10963 5620 10963 5619 10963 5619 10964 5620 10964 5621 10964 4496 10965 5619 10965 5621 10965 5620 10966 6431 10966 5621 10966 5621 10967 6431 10967 5622 10967 5808 10968 5621 10968 5622 10968 5848 10969 5625 10969 4870 10969 4870 10970 5625 10970 4872 10970 4872 10971 5625 10971 4937 10971 4937 10972 5625 10972 4938 10972 4938 10973 5625 10973 4939 10973 4939 10974 5625 10974 4940 10974 4940 10975 5625 10975 4941 10975 4941 10976 5625 10976 5623 10976 5623 10977 5625 10977 5624 10977 5624 10978 5625 10978 4948 10978 4948 10979 5625 10979 4949 10979 4949 10980 5625 10980 5626 10980 5626 10981 5625 10981 4952 10981 4952 10982 5625 10982 4954 10982 4954 10983 5625 10983 5627 10983 5627 10984 5625 10984 4957 10984 4957 10985 5625 10985 4958 10985 4958 10986 5625 10986 4959 10986 4959 10987 5625 10987 4961 10987 4961 10988 5625 10988 4964 10988 4964 10989 5625 10989 4967 10989 4967 10990 5625 10990 4971 10990 4971 10991 5625 10991 5628 10991 5628 10992 5625 10992 4973 10992 4973 10993 5625 10993 5629 10993 5629 10994 5625 10994 5630 10994 5630 10995 5625 10995 5631 10995 5631 10996 5625 10996 4977 10996 4977 10997 5625 10997 4980 10997 4980 10998 5625 10998 4981 10998 4981 10999 5625 10999 5632 10999 5632 11000 5625 11000 4984 11000 4984 11001 5625 11001 2393 11001 4986 11002 2393 11002 5652 11002 4986 11003 4984 11003 2393 11003 5645 11004 5633 11004 2393 11004 5645 11005 5011 11005 5633 11005 5645 11006 5634 11006 5011 11006 5645 11007 5635 11007 5634 11007 5645 11008 5636 11008 5635 11008 5645 11009 5016 11009 5636 11009 5645 11010 5018 11010 5016 11010 5645 11011 5019 11011 5018 11011 5645 11012 5022 11012 5019 11012 5645 11013 5024 11013 5022 11013 5645 11014 5026 11014 5024 11014 5645 11015 5027 11015 5026 11015 5645 11016 5637 11016 5027 11016 5645 11017 5034 11017 5637 11017 5645 11018 5638 11018 5034 11018 5645 11019 5036 11019 5638 11019 5645 11020 5038 11020 5036 11020 5645 11021 5041 11021 5038 11021 5645 11022 5639 11022 5041 11022 5645 11023 5043 11023 5639 11023 5645 11024 5044 11024 5043 11024 5645 11025 5047 11025 5044 11025 5645 11026 5640 11026 5047 11026 5645 11027 5050 11027 5640 11027 5645 11028 5641 11028 5050 11028 5645 11029 5642 11029 5641 11029 5645 11030 5053 11030 5642 11030 5645 11031 5644 11031 5053 11031 5645 11032 5643 11032 5644 11032 5645 11033 5057 11033 5643 11033 5645 11034 5060 11034 5057 11034 5645 11035 5646 11035 5060 11035 5645 11036 5063 11036 5646 11036 5645 11037 5647 11037 5063 11037 5645 11038 5068 11038 5647 11038 5645 11039 5069 11039 5068 11039 5645 11040 5072 11040 5069 11040 5645 11041 5848 11041 5072 11041 5633 11042 5008 11042 2393 11042 2393 11043 5008 11043 5648 11043 5006 11044 2393 11044 5648 11044 5006 11045 5005 11045 2393 11045 2393 11046 5005 11046 5649 11046 5650 11047 2393 11047 5649 11047 5650 11048 5003 11048 2393 11048 2393 11049 5003 11049 5000 11049 4999 11050 2393 11050 5000 11050 4999 11051 4997 11051 2393 11051 2393 11052 4997 11052 4995 11052 4994 11053 2393 11053 4995 11053 4994 11054 4991 11054 2393 11054 2393 11055 4991 11055 5651 11055 5652 11056 2393 11056 5651 11056 5655 11057 5653 11057 2357 11057 5655 11058 5210 11058 5653 11058 5655 11059 5654 11059 5210 11059 5655 11060 5212 11060 5654 11060 5655 11061 5216 11061 5212 11061 5655 11062 5217 11062 5216 11062 5655 11063 5656 11063 5217 11063 5655 11064 5218 11064 5656 11064 5655 11065 5219 11065 5218 11065 5655 11066 5223 11066 5219 11066 5655 11067 5657 11067 5223 11067 5655 11068 5224 11068 5657 11068 5655 11069 5658 11069 5224 11069 5655 11070 5659 11070 5658 11070 5655 11071 5229 11071 5659 11071 5655 11072 5232 11072 5229 11072 5655 11073 5660 11073 5232 11073 5655 11074 5661 11074 5660 11074 5655 11075 5234 11075 5661 11075 5655 11076 5237 11076 5234 11076 5655 11077 5238 11077 5237 11077 5655 11078 5241 11078 5238 11078 5655 11079 5244 11079 5241 11079 5655 11080 5662 11080 5244 11080 5655 11081 5245 11081 5662 11081 5655 11082 5663 11082 5245 11082 5655 11083 5664 11083 5663 11083 5655 11084 5248 11084 5664 11084 5655 11085 5252 11085 5248 11085 5655 11086 5665 11086 5252 11086 5655 11087 5255 11087 5665 11087 5655 11088 5256 11088 5255 11088 5655 11089 5076 11089 5256 11089 5653 11090 5207 11090 2357 11090 2357 11091 5207 11091 5205 11091 5202 11092 2357 11092 5205 11092 5202 11093 5200 11093 2357 11093 2357 11094 5200 11094 5199 11094 5196 11095 2357 11095 5199 11095 5196 11096 5666 11096 2357 11096 2357 11097 5666 11097 5667 11097 5191 11098 2357 11098 5667 11098 5191 11099 5188 11099 2357 11099 2357 11100 5188 11100 5668 11100 5186 11101 2357 11101 5668 11101 5186 11102 5669 11102 2357 11102 2357 11103 5669 11103 5182 11103 5181 11104 2357 11104 5182 11104 5181 11105 5670 11105 2357 11105 2357 11106 5670 11106 5178 11106 5674 11107 5178 11107 5175 11107 5671 11108 5674 11108 5175 11108 5671 11109 5672 11109 5674 11109 5674 11110 5672 11110 5172 11110 5673 11111 5674 11111 5172 11111 5673 11112 5171 11112 5674 11112 5674 11113 5171 11113 5168 11113 5166 11114 5674 11114 5168 11114 5166 11115 5675 11115 5674 11115 5674 11116 5675 11116 5676 11116 5162 11117 5674 11117 5676 11117 5162 11118 5161 11118 5674 11118 5674 11119 5161 11119 5159 11119 5157 11120 5674 11120 5159 11120 5157 11121 5677 11121 5674 11121 5674 11122 5677 11122 5678 11122 5154 11123 5674 11123 5678 11123 5154 11124 5151 11124 5674 11124 5674 11125 5151 11125 5679 11125 5150 11126 5674 11126 5679 11126 5150 11127 5149 11127 5674 11127 5674 11128 5149 11128 5680 11128 5681 11129 5674 11129 5680 11129 5681 11130 5145 11130 5674 11130 5674 11131 5145 11131 5143 11131 5141 11132 5674 11132 5143 11132 5141 11133 5139 11133 5674 11133 5674 11134 5139 11134 5682 11134 5683 11135 5674 11135 5682 11135 5683 11136 5684 11136 5674 11136 5674 11137 5684 11137 5133 11137 5685 11138 5674 11138 5133 11138 5685 11139 5686 11139 5674 11139 5674 11140 5686 11140 5687 11140 5688 11141 5674 11141 5687 11141 5688 11142 5077 11142 5674 11142 5674 11143 5077 11143 5076 11143 2357 11144 5178 11144 5674 11144 5850 11145 5754 11145 5764 11145 5850 11146 5701 11146 5754 11146 5850 11147 5342 11147 5701 11147 5850 11148 5689 11148 5342 11148 5850 11149 5690 11149 5689 11149 5850 11150 5691 11150 5690 11150 5850 11151 5692 11151 5691 11151 5850 11152 5693 11152 5692 11152 5850 11153 5332 11153 5693 11153 5850 11154 5330 11154 5332 11154 5850 11155 5297 11155 5330 11155 5850 11156 5325 11156 5297 11156 5850 11157 5299 11157 5325 11157 5850 11158 5301 11158 5299 11158 5850 11159 5302 11159 5301 11159 5850 11160 5694 11160 5302 11160 5850 11161 5304 11161 5694 11161 5850 11162 5318 11162 5304 11162 5850 11163 5695 11163 5318 11163 5850 11164 5697 11164 5695 11164 5850 11165 5696 11165 5697 11165 5850 11166 5698 11166 5696 11166 5850 11167 5305 11167 5698 11167 5850 11168 5699 11168 5305 11168 5850 11169 5259 11169 5699 11169 5850 11170 5700 11170 5259 11170 5850 11171 5260 11171 5700 11171 5850 11172 5435 11172 5260 11172 5850 11173 5753 11173 5435 11173 5754 11174 5701 11174 4735 11174 4735 11175 5701 11175 5729 11175 4733 11176 5729 11176 5730 11176 5731 11177 5730 11177 5702 11177 4730 11178 5702 11178 5349 11178 5703 11179 5349 11179 5352 11179 4725 11180 5352 11180 5293 11180 5732 11181 5293 11181 5704 11181 4713 11182 5704 11182 5733 11182 5705 11183 5733 11183 5734 11183 5706 11184 5734 11184 5361 11184 4714 11185 5361 11185 5707 11185 5735 11186 5707 11186 5365 11186 5736 11187 5365 11187 5708 11187 4682 11188 5708 11188 5291 11188 5709 11189 5291 11189 5368 11189 4685 11190 5368 11190 5289 11190 5710 11191 5289 11191 5711 11191 4686 11192 5711 11192 5286 11192 4863 11193 5286 11193 5284 11193 4687 11194 5284 11194 5283 11194 5737 11195 5283 11195 5282 11195 5712 11196 5282 11196 5281 11196 5738 11197 5281 11197 5713 11197 5739 11198 5713 11198 5279 11198 5714 11199 5279 11199 5278 11199 5715 11200 5278 11200 5380 11200 4847 11201 5380 11201 5384 11201 4844 11202 5384 11202 5740 11202 5741 11203 5740 11203 5277 11203 4839 11204 5277 11204 5716 11204 4688 11205 5716 11205 5717 11205 5723 11206 5717 11206 5849 11206 5723 11207 4688 11207 5717 11207 5723 11208 5718 11208 4688 11208 5723 11209 4689 11209 5718 11209 5723 11210 5719 11210 4689 11210 5723 11211 4690 11211 5719 11211 5723 11212 5720 11212 4690 11212 5723 11213 4826 11213 5720 11213 5723 11214 4824 11214 4826 11214 5723 11215 4820 11215 4824 11215 5723 11216 5721 11216 4820 11216 5723 11217 5722 11217 5721 11217 5723 11218 4816 11218 5722 11218 5723 11219 5725 11219 4816 11219 5723 11220 5724 11220 5725 11220 5723 11221 4691 11221 5724 11221 5723 11222 4692 11222 4691 11222 5723 11223 5726 11223 4692 11223 5723 11224 4807 11224 5726 11224 5723 11225 4802 11225 4807 11225 5723 11226 4694 11226 4802 11226 5723 11227 4695 11227 4694 11227 5723 11228 4696 11228 4695 11228 5723 11229 4794 11229 4696 11229 5723 11230 4793 11230 4794 11230 5723 11231 5727 11231 4793 11231 5723 11232 4788 11232 5727 11232 5723 11233 5728 11233 4788 11233 5723 11234 4698 11234 5728 11234 5723 11235 4699 11235 4698 11235 4735 11236 5729 11236 4733 11236 4733 11237 5730 11237 5731 11237 5731 11238 5702 11238 4730 11238 4730 11239 5349 11239 5703 11239 5703 11240 5352 11240 4725 11240 4725 11241 5293 11241 5732 11241 5732 11242 5704 11242 4713 11242 4713 11243 5733 11243 5705 11243 5705 11244 5734 11244 5706 11244 5706 11245 5361 11245 4714 11245 4714 11246 5707 11246 5735 11246 5735 11247 5365 11247 5736 11247 5736 11248 5708 11248 4682 11248 4682 11249 5291 11249 5709 11249 5709 11250 5368 11250 4685 11250 4685 11251 5289 11251 5710 11251 5710 11252 5711 11252 4686 11252 4686 11253 5286 11253 4863 11253 4863 11254 5284 11254 4687 11254 4687 11255 5283 11255 5737 11255 5737 11256 5282 11256 5712 11256 5712 11257 5281 11257 5738 11257 5738 11258 5713 11258 5739 11258 5739 11259 5279 11259 5714 11259 5714 11260 5278 11260 5715 11260 5715 11261 5380 11261 4847 11261 4847 11262 5384 11262 4844 11262 4844 11263 5740 11263 5741 11263 5741 11264 5277 11264 4839 11264 4839 11265 5716 11265 4688 11265 5717 11266 5391 11266 5849 11266 5849 11267 5391 11267 5276 11267 5394 11268 5849 11268 5276 11268 5394 11269 5397 11269 5849 11269 5849 11270 5397 11270 5742 11270 5743 11271 5849 11271 5742 11271 5743 11272 5274 11272 5849 11272 5849 11273 5274 11273 5402 11273 5272 11274 5849 11274 5402 11274 5272 11275 5744 11275 5849 11275 5849 11276 5744 11276 5745 11276 5746 11277 5849 11277 5745 11277 5746 11278 5271 11278 5849 11278 5849 11279 5271 11279 5747 11279 5269 11280 5849 11280 5747 11280 5269 11281 5412 11281 5849 11281 5849 11282 5412 11282 5268 11282 5748 11283 5849 11283 5268 11283 5748 11284 5267 11284 5849 11284 5849 11285 5267 11285 5265 11285 5749 11286 5849 11286 5265 11286 5749 11287 5264 11287 5849 11287 5849 11288 5264 11288 5425 11288 5750 11289 5849 11289 5425 11289 5750 11290 5428 11290 5849 11290 5849 11291 5428 11291 5751 11291 5752 11292 5849 11292 5751 11292 5752 11293 5753 11293 5849 11293 5754 11294 5755 11294 5764 11294 5764 11295 5755 11295 4711 11295 5756 11296 5764 11296 4711 11296 5756 11297 4710 11297 5764 11297 5764 11298 4710 11298 5757 11298 4745 11299 5764 11299 5757 11299 4745 11300 5758 11300 5764 11300 5764 11301 5758 11301 5759 11301 5760 11302 5764 11302 5759 11302 5760 11303 4752 11303 5764 11303 5764 11304 4752 11304 5761 11304 4756 11305 5764 11305 5761 11305 4756 11306 4708 11306 5764 11306 5764 11307 4708 11307 4707 11307 4706 11308 5764 11308 4707 11308 4706 11309 5762 11309 5764 11309 5764 11310 5762 11310 4705 11310 4704 11311 5764 11311 4705 11311 4704 11312 5763 11312 5764 11312 5764 11313 5763 11313 4703 11313 4774 11314 5764 11314 4703 11314 4774 11315 5765 11315 5764 11315 5764 11316 5765 11316 4702 11316 4701 11317 5764 11317 4702 11317 4701 11318 4778 11318 5764 11318 5764 11319 4778 11319 5766 11319 4699 11320 5764 11320 5766 11320 5771 11321 5839 11321 5843 11321 5771 11322 4653 11322 5839 11322 5771 11323 5767 11323 4653 11323 5771 11324 4512 11324 5767 11324 5771 11325 4646 11325 4512 11325 5771 11326 5768 11326 4646 11326 5771 11327 4642 11327 5768 11327 5771 11328 4640 11328 4642 11328 5771 11329 4638 11329 4640 11329 5771 11330 4515 11330 4638 11330 5771 11331 4516 11331 4515 11331 5771 11332 5769 11332 4516 11332 5771 11333 4517 11333 5769 11333 5771 11334 4518 11334 4517 11334 5771 11335 5770 11335 4518 11335 5771 11336 4520 11336 5770 11336 5771 11337 4522 11337 4520 11337 5771 11338 4523 11338 4522 11338 5771 11339 4524 11339 4523 11339 5771 11340 5772 11340 4524 11340 5771 11341 5774 11341 5772 11341 5771 11342 5773 11342 5774 11342 5771 11343 5775 11343 5773 11343 5771 11344 4615 11344 5775 11344 5771 11345 4613 11345 4615 11345 5771 11346 4526 11346 4613 11346 5771 11347 4527 11347 4526 11347 5771 11348 5776 11348 4527 11348 5771 11349 4606 11349 5776 11349 5839 11350 4653 11350 5461 11350 5461 11351 4653 11351 4655 11351 5777 11352 4655 11352 4510 11352 5809 11353 4510 11353 4509 11353 5778 11354 4509 11354 4660 11354 5810 11355 4660 11355 5811 11355 5463 11356 5811 11356 5812 11356 5566 11357 5812 11357 4664 11357 5564 11358 4664 11358 5779 11358 5813 11359 5779 11359 5780 11359 5559 11360 5780 11360 5781 11360 5465 11361 5781 11361 5782 11361 5466 11362 5782 11362 4505 11362 5555 11363 4505 11363 4503 11363 5783 11364 4503 11364 4501 11364 5550 11365 4501 11365 5784 11365 5814 11366 5784 11366 5785 11366 5815 11367 5785 11367 5816 11367 5467 11368 5816 11368 5786 11368 5468 11369 5786 11369 5787 11369 5542 11370 5787 11370 5817 11370 5818 11371 5817 11371 5819 11371 5820 11372 5819 11372 5788 11372 5821 11373 5788 11373 5789 11373 5822 11374 5789 11374 5790 11374 5469 11375 5790 11375 4544 11375 5791 11376 4544 11376 4553 11376 5471 11377 4553 11377 4554 11377 5472 11378 4554 11378 5792 11378 5474 11379 5792 11379 5823 11379 5824 11380 5823 11380 5793 11380 5794 11381 5793 11381 4560 11381 5796 11382 4560 11382 6495 11382 5796 11383 5794 11383 4560 11383 5796 11384 5476 11384 5794 11384 5796 11385 5477 11385 5476 11385 5796 11386 5795 11386 5477 11386 5796 11387 5478 11387 5795 11387 5796 11388 5520 11388 5478 11388 5796 11389 5518 11389 5520 11389 5796 11390 5797 11390 5518 11390 5796 11391 5516 11391 5797 11391 5796 11392 5482 11392 5516 11392 5796 11393 5798 11393 5482 11393 5796 11394 5800 11394 5798 11394 5796 11395 5799 11395 5800 11395 5796 11396 5801 11396 5799 11396 5796 11397 5506 11397 5801 11397 5796 11398 5802 11398 5506 11398 5796 11399 5485 11399 5802 11399 5796 11400 5803 11400 5485 11400 5796 11401 5486 11401 5803 11401 5796 11402 5804 11402 5486 11402 5796 11403 5805 11403 5804 11403 5796 11404 5487 11404 5805 11404 5796 11405 5806 11405 5487 11405 5796 11406 5807 11406 5806 11406 5796 11407 5488 11407 5807 11407 5796 11408 5439 11408 5488 11408 5796 11409 5440 11409 5439 11409 5796 11410 5808 11410 5440 11410 5796 11411 4496 11411 5808 11411 5461 11412 4655 11412 5777 11412 5777 11413 4510 11413 5809 11413 5809 11414 4509 11414 5778 11414 5778 11415 4660 11415 5810 11415 5810 11416 5811 11416 5463 11416 5463 11417 5812 11417 5566 11417 5566 11418 4664 11418 5564 11418 5564 11419 5779 11419 5813 11419 5813 11420 5780 11420 5559 11420 5559 11421 5781 11421 5465 11421 5465 11422 5782 11422 5466 11422 5466 11423 4505 11423 5555 11423 5555 11424 4503 11424 5783 11424 5783 11425 4501 11425 5550 11425 5550 11426 5784 11426 5814 11426 5814 11427 5785 11427 5815 11427 5815 11428 5816 11428 5467 11428 5467 11429 5786 11429 5468 11429 5468 11430 5787 11430 5542 11430 5542 11431 5817 11431 5818 11431 5818 11432 5819 11432 5820 11432 5820 11433 5788 11433 5821 11433 5821 11434 5789 11434 5822 11434 5822 11435 5790 11435 5469 11435 5469 11436 4544 11436 5791 11436 5791 11437 4553 11437 5471 11437 5471 11438 4554 11438 5472 11438 5472 11439 5792 11439 5474 11439 5474 11440 5823 11440 5824 11440 5824 11441 5793 11441 5794 11441 4560 11442 4540 11442 6495 11442 6495 11443 4540 11443 4539 11443 4567 11444 6495 11444 4539 11444 4567 11445 5825 11445 6495 11445 6495 11446 5825 11446 5826 11446 5827 11447 6495 11447 5826 11447 5827 11448 5828 11448 6495 11448 6495 11449 5828 11449 5829 11449 5830 11450 6495 11450 5829 11450 5830 11451 5831 11451 6495 11451 6495 11452 5831 11452 5832 11452 4537 11453 6495 11453 5832 11453 4537 11454 4536 11454 6495 11454 6495 11455 4536 11455 4585 11455 4535 11456 6495 11456 4585 11456 4535 11457 4588 11457 6495 11457 6495 11458 4588 11458 5833 11458 5834 11459 6495 11459 5833 11459 5834 11460 5835 11460 6495 11460 6495 11461 5835 11461 5836 11461 4595 11462 6495 11462 5836 11462 4595 11463 5837 11463 6495 11463 6495 11464 5837 11464 5838 11464 4532 11465 6495 11465 5838 11465 4532 11466 4530 11466 6495 11466 6495 11467 4530 11467 4529 11467 4603 11468 6495 11468 4529 11468 4603 11469 4606 11469 6495 11469 5839 11470 5840 11470 5843 11470 5843 11471 5840 11471 5841 11471 5460 11472 5843 11472 5841 11472 5460 11473 5459 11473 5843 11473 5843 11474 5459 11474 5458 11474 5457 11475 5843 11475 5458 11475 5457 11476 5842 11476 5843 11476 5843 11477 5842 11477 5455 11477 5589 11478 5843 11478 5455 11478 5589 11479 5454 11479 5843 11479 5843 11480 5454 11480 5452 11480 5844 11481 5843 11481 5452 11481 5844 11482 5451 11482 5843 11482 5843 11483 5451 11483 5845 11483 5449 11484 5843 11484 5845 11484 5449 11485 5601 11485 5843 11485 5843 11486 5601 11486 5448 11486 5446 11487 5843 11487 5448 11487 5446 11488 5846 11488 5843 11488 5843 11489 5846 11489 5847 11489 5445 11490 5843 11490 5847 11490 5445 11491 5444 11491 5843 11491 5843 11492 5444 11492 5443 11492 5614 11493 5843 11493 5443 11493 5614 11494 5441 11494 5843 11494 5843 11495 5441 11495 5618 11495 4496 11496 5843 11496 5618 11496 2386 11497 6079 11497 2393 11497 2393 11498 6079 11498 5645 11498 6159 11499 6166 11499 5674 11499 5674 11500 6166 11500 2357 11500 3945 11501 2434 11501 1025 11501 1025 11502 2434 11502 5848 11502 5645 11503 1025 11503 5848 11503 5645 11504 6497 11504 1025 11504 5645 11505 5849 11505 6497 11505 5645 11506 6079 11506 5849 11506 5849 11507 6079 11507 6054 11507 2434 11508 5625 11508 5848 11508 6497 11509 5849 11509 6484 11509 6484 11510 5849 11510 5753 11510 5850 11511 6484 11511 5753 11511 6556 11512 6353 11512 5851 11512 5851 11513 6353 11513 5852 11513 5852 11514 6353 11514 6424 11514 6412 11515 5852 11515 6424 11515 6056 11516 2386 11516 5853 11516 5853 11517 2386 11517 6166 11517 5854 11518 6166 11518 6135 11518 5854 11519 5853 11519 6166 11519 5852 11520 6412 11520 6477 11520 6477 11521 6412 11521 6438 11521 5855 11522 5859 11522 5867 11522 5855 11523 5856 11523 5859 11523 5859 11524 5866 11524 5867 11524 5867 11525 5866 11525 5857 11525 5857 11526 5866 11526 5862 11526 5862 11527 5866 11527 5858 11527 5854 11528 6135 11528 5865 11528 5865 11529 6135 11529 6098 11529 6323 11530 6265 11530 5859 11530 5859 11531 6265 11531 5866 11531 5866 11532 6265 11532 4042 11532 6241 11533 5866 11533 4042 11533 5860 11534 5861 11534 5871 11534 5871 11535 5861 11535 5870 11535 5870 11536 5861 11536 2498 11536 619 11537 5870 11537 2498 11537 5862 11538 5853 11538 5857 11538 5862 11539 6489 11539 5853 11539 5853 11540 5854 11540 5857 11540 5857 11541 5854 11541 5863 11541 5863 11542 5854 11542 5864 11542 5864 11543 5854 11543 5865 11543 5866 11544 6241 11544 5858 11544 5858 11545 6241 11545 6282 11545 6493 11546 5851 11546 5868 11546 6493 11547 6492 11547 5851 11547 5851 11548 5852 11548 5868 11548 5868 11549 5852 11549 5867 11549 5867 11550 5852 11550 5855 11550 5855 11551 5852 11551 6477 11551 5870 11552 619 11552 5869 11552 5869 11553 619 11553 618 11553 5867 11554 5857 11554 6030 11554 6030 11555 5857 11555 5987 11555 5934 11556 5903 11556 5868 11556 5868 11557 5903 11557 5863 11557 5857 11558 5863 11558 5987 11558 5987 11559 5863 11559 5903 11559 5868 11560 5867 11560 5934 11560 5934 11561 5867 11561 6030 11561 6493 11562 5868 11562 5870 11562 5869 11563 6493 11563 5870 11563 5864 11564 5871 11564 5863 11564 5864 11565 6491 11565 5871 11565 5871 11566 5870 11566 5863 11566 5863 11567 5870 11567 5868 11567 5873 11568 5872 11568 6035 11568 311 11569 5873 11569 6035 11569 311 11570 5875 11570 5873 11570 311 11571 5874 11571 5875 11571 5875 11572 5874 11572 90 11572 90 11573 5874 11573 5968 11573 5967 11574 5968 11574 353 11574 88 11575 353 11575 5876 11575 43 11576 5876 11576 315 11576 5966 11577 315 11577 354 11577 87 11578 354 11578 5965 11578 86 11579 5965 11579 5964 11579 85 11580 5964 11580 356 11580 84 11581 356 11581 5877 11581 248 11582 5877 11582 5945 11582 248 11583 84 11583 5877 11583 248 11584 143 11584 84 11584 248 11585 202 11585 143 11585 143 11586 202 11586 5975 11586 5975 11587 202 11587 5976 11587 142 11588 5976 11588 204 11588 141 11589 204 11589 5878 11589 97 11590 5878 11590 249 11590 5974 11591 249 11591 5973 11591 5972 11592 5973 11592 5971 11592 5879 11593 5971 11593 5970 11593 95 11594 5970 11594 5880 11594 94 11595 5880 11595 5969 11595 93 11596 5969 11596 206 11596 137 11597 206 11597 135 11597 137 11598 93 11598 206 11598 5881 11599 309 11599 1 11599 5881 11600 308 11600 309 11600 5881 11601 47 11601 308 11601 308 11602 47 11602 5887 11602 5887 11603 47 11603 49 11603 5888 11604 49 11604 51 11604 306 11605 51 11605 2 11605 351 11606 2 11606 52 11606 350 11607 52 11607 5882 11607 303 11608 5882 11608 53 11608 5889 11609 53 11609 54 11609 5890 11610 54 11610 5883 11610 5886 11611 5883 11611 55 11611 5884 11612 55 11612 5885 11612 5884 11613 5886 11613 55 11613 5887 11614 49 11614 5888 11614 5888 11615 51 11615 306 11615 306 11616 2 11616 351 11616 351 11617 52 11617 350 11617 350 11618 5882 11618 303 11618 303 11619 53 11619 5889 11619 5889 11620 54 11620 5890 11620 5890 11621 5883 11621 5886 11621 55 11622 56 11622 5885 11622 5885 11623 56 11623 5920 11623 5920 11624 56 11624 4 11624 348 11625 4 11625 5891 11625 5903 11626 5891 11626 57 11626 7 11627 5903 11627 57 11627 7 11628 5892 11628 5903 11628 5903 11629 5892 11629 5893 11629 59 11630 5903 11630 5893 11630 59 11631 60 11631 5903 11631 5903 11632 60 11632 5895 11632 5894 11633 5903 11633 5895 11633 5894 11634 61 11634 5903 11634 5903 11635 61 11635 10 11635 62 11636 5903 11636 10 11636 62 11637 5896 11637 5903 11637 5903 11638 5896 11638 5897 11638 5898 11639 5903 11639 5897 11639 5898 11640 63 11640 5903 11640 5903 11641 63 11641 64 11641 5899 11642 5903 11642 64 11642 5899 11643 5900 11643 5903 11643 5903 11644 5900 11644 13 11644 67 11645 5903 11645 13 11645 67 11646 68 11646 5903 11646 5903 11647 68 11647 69 11647 16 11648 5903 11648 69 11648 16 11649 5901 11649 5903 11649 5903 11650 5901 11650 18 11650 5902 11651 5903 11651 18 11651 5902 11652 70 11652 5903 11652 5903 11653 70 11653 20 11653 5904 11654 5903 11654 20 11654 5904 11655 5905 11655 5903 11655 5903 11656 5905 11656 25 11656 5906 11657 5903 11657 25 11657 5906 11658 5907 11658 5903 11658 5903 11659 5907 11659 5908 11659 71 11660 5903 11660 5908 11660 71 11661 154 11661 5903 11661 71 11662 153 11662 154 11662 71 11663 72 11663 153 11663 153 11664 72 11664 108 11664 108 11665 72 11665 5946 11665 5947 11666 5946 11666 5909 11666 151 11667 5909 11667 73 11667 150 11668 73 11668 74 11668 5948 11669 74 11669 5910 11669 149 11670 5910 11670 75 11670 5949 11671 75 11671 5950 11671 5951 11672 5950 11672 5911 11672 102 11673 5911 11673 5912 11673 5952 11674 5912 11674 5953 11674 5954 11675 5953 11675 5913 11675 5955 11676 5913 11676 79 11676 5956 11677 79 11677 5957 11677 5958 11678 5957 11678 5914 11678 5915 11679 5914 11679 80 11679 5916 11680 80 11680 5959 11680 5917 11681 5959 11681 81 11681 148 11682 81 11682 5918 11682 5960 11683 5918 11683 5919 11683 5961 11684 5919 11684 36 11684 5962 11685 36 11685 5963 11685 100 11686 5963 11686 83 11686 145 11687 83 11687 37 11687 143 11688 37 11688 84 11688 143 11689 145 11689 37 11689 5920 11690 4 11690 348 11690 348 11691 5891 11691 5903 11691 5934 11692 348 11692 5903 11692 5934 11693 347 11693 348 11693 5934 11694 296 11694 347 11694 5934 11695 346 11695 296 11695 5934 11696 294 11696 346 11696 5934 11697 292 11697 294 11697 5934 11698 291 11698 292 11698 5934 11699 290 11699 291 11699 5934 11700 5921 11700 290 11700 5934 11701 343 11701 5921 11701 5934 11702 289 11702 343 11702 5934 11703 5922 11703 289 11703 5934 11704 340 11704 5922 11704 5934 11705 5923 11705 340 11705 5934 11706 287 11706 5923 11706 5934 11707 5924 11707 287 11707 5934 11708 5925 11708 5924 11708 5934 11709 5926 11709 5925 11709 5934 11710 285 11710 5926 11710 5934 11711 5927 11711 285 11711 5934 11712 5928 11712 5927 11712 5934 11713 284 11713 5928 11713 5934 11714 336 11714 284 11714 5934 11715 5929 11715 336 11715 5934 11716 5930 11716 5929 11716 5934 11717 280 11717 5930 11717 5934 11718 5931 11718 280 11718 5934 11719 277 11719 5931 11719 5934 11720 333 11720 277 11720 5934 11721 5932 11721 333 11721 5934 11722 332 11722 5932 11722 5934 11723 331 11723 332 11723 5934 11724 375 11724 331 11724 5934 11725 329 11725 375 11725 5934 11726 5933 11726 329 11726 5934 11727 5935 11727 5933 11727 5934 11728 6030 11728 5935 11728 5935 11729 6030 11729 5936 11729 328 11730 5936 11730 186 11730 6009 11731 186 11731 6010 11731 371 11732 6010 11732 235 11732 327 11733 235 11733 236 11733 5937 11734 236 11734 237 11734 6011 11735 237 11735 238 11735 6012 11736 238 11736 239 11736 5938 11737 239 11737 6013 11737 5939 11738 6013 11738 191 11738 368 11739 191 11739 240 11739 367 11740 240 11740 193 11740 6014 11741 193 11741 194 11741 364 11742 194 11742 5940 11742 322 11743 5940 11743 5941 11743 363 11744 5941 11744 245 11744 6015 11745 245 11745 246 11745 321 11746 246 11746 196 11746 319 11747 196 11747 5942 11747 6016 11748 5942 11748 6017 11748 6018 11749 6017 11749 6019 11749 6020 11750 6019 11750 247 11750 5943 11751 247 11751 5944 11751 6021 11752 5944 11752 6022 11752 6023 11753 6022 11753 5945 11753 5877 11754 6023 11754 5945 11754 108 11755 5946 11755 5947 11755 5947 11756 5909 11756 151 11756 151 11757 73 11757 150 11757 150 11758 74 11758 5948 11758 5948 11759 5910 11759 149 11759 149 11760 75 11760 5949 11760 5949 11761 5950 11761 5951 11761 5951 11762 5911 11762 102 11762 102 11763 5912 11763 5952 11763 5952 11764 5953 11764 5954 11764 5954 11765 5913 11765 5955 11765 5955 11766 79 11766 5956 11766 5956 11767 5957 11767 5958 11767 5958 11768 5914 11768 5915 11768 5915 11769 80 11769 5916 11769 5916 11770 5959 11770 5917 11770 5917 11771 81 11771 148 11771 148 11772 5918 11772 5960 11772 5960 11773 5919 11773 5961 11773 5961 11774 36 11774 5962 11774 5962 11775 5963 11775 100 11775 100 11776 83 11776 145 11776 84 11777 85 11777 356 11777 85 11778 86 11778 5964 11778 86 11779 87 11779 5965 11779 87 11780 5966 11780 354 11780 5966 11781 43 11781 315 11781 43 11782 88 11782 5876 11782 88 11783 5967 11783 353 11783 5967 11784 90 11784 5968 11784 93 11785 94 11785 5969 11785 94 11786 95 11786 5880 11786 95 11787 5879 11787 5970 11787 5879 11788 5972 11788 5971 11788 5972 11789 5974 11789 5973 11789 5974 11790 97 11790 249 11790 97 11791 141 11791 5878 11791 141 11792 142 11792 204 11792 142 11793 5975 11793 5976 11793 5903 11794 154 11794 5987 11794 5987 11795 154 11795 5977 11795 5978 11796 5987 11796 5977 11796 5978 11797 5979 11797 5987 11797 5987 11798 5979 11798 5981 11798 5980 11799 5987 11799 5981 11799 5980 11800 5982 11800 5987 11800 5987 11801 5982 11801 111 11801 156 11802 5987 11802 111 11802 156 11803 158 11803 5987 11803 5987 11804 158 11804 5983 11804 113 11805 5987 11805 5983 11805 113 11806 114 11806 5987 11806 5987 11807 114 11807 160 11807 116 11808 5987 11808 160 11808 116 11809 118 11809 5987 11809 5987 11810 118 11810 120 11810 5984 11811 5987 11811 120 11811 5984 11812 5985 11812 5987 11812 5987 11813 5985 11813 5986 11813 122 11814 5987 11814 5986 11814 122 11815 123 11815 5987 11815 5987 11816 123 11816 5988 11816 5989 11817 5987 11817 5988 11817 5989 11818 5990 11818 5987 11818 5987 11819 5990 11819 126 11819 5991 11820 5987 11820 126 11820 5991 11821 127 11821 5987 11821 5987 11822 127 11822 165 11822 166 11823 5987 11823 165 11823 166 11824 129 11824 5987 11824 5987 11825 129 11825 167 11825 5992 11826 5987 11826 167 11826 5992 11827 168 11827 5987 11827 5987 11828 168 11828 5993 11828 169 11829 5987 11829 5993 11829 169 11830 260 11830 5987 11830 169 11831 170 11831 260 11831 260 11832 170 11832 259 11832 259 11833 170 11833 5994 11833 5995 11834 5994 11834 171 11834 211 11835 171 11835 257 11835 211 11836 5995 11836 171 11836 259 11837 5994 11837 5995 11837 171 11838 172 11838 257 11838 257 11839 172 11839 5996 11839 5996 11840 172 11840 173 11840 6003 11841 173 11841 174 11841 5997 11842 174 11842 175 11842 208 11843 175 11843 176 11843 5998 11844 176 11844 5999 11844 6004 11845 5999 11845 6000 11845 253 11846 6000 11846 6001 11846 6005 11847 6001 11847 6002 11847 6006 11848 6002 11848 179 11848 207 11849 179 11849 135 11849 206 11850 207 11850 135 11850 5996 11851 173 11851 6003 11851 6003 11852 174 11852 5997 11852 5997 11853 175 11853 208 11853 208 11854 176 11854 5998 11854 5998 11855 5999 11855 6004 11855 6004 11856 6000 11856 253 11856 253 11857 6001 11857 6005 11857 6005 11858 6002 11858 6006 11858 6006 11859 179 11859 207 11859 229 11860 6007 11860 6030 11860 228 11861 6030 11861 274 11861 228 11862 229 11862 6030 11862 6007 11863 6008 11863 6030 11863 6030 11864 6008 11864 181 11864 183 11865 6030 11865 181 11865 183 11866 231 11866 6030 11866 6030 11867 231 11867 232 11867 233 11868 6030 11868 232 11868 233 11869 5936 11869 6030 11869 5935 11870 5936 11870 328 11870 328 11871 186 11871 6009 11871 6009 11872 6010 11872 371 11872 371 11873 235 11873 327 11873 327 11874 236 11874 5937 11874 5937 11875 237 11875 6011 11875 6011 11876 238 11876 6012 11876 6012 11877 239 11877 5938 11877 5938 11878 6013 11878 5939 11878 5939 11879 191 11879 368 11879 368 11880 240 11880 367 11880 367 11881 193 11881 6014 11881 6014 11882 194 11882 364 11882 364 11883 5940 11883 322 11883 322 11884 5941 11884 363 11884 363 11885 245 11885 6015 11885 6015 11886 246 11886 321 11886 321 11887 196 11887 319 11887 319 11888 5942 11888 6016 11888 6016 11889 6017 11889 6018 11889 6018 11890 6019 11890 6020 11890 6020 11891 247 11891 5943 11891 5943 11892 5944 11892 6021 11892 6021 11893 6022 11893 6023 11893 5987 11894 260 11894 6030 11894 6030 11895 260 11895 215 11895 6024 11896 6030 11896 215 11896 6024 11897 216 11897 6030 11897 6030 11898 216 11898 6025 11898 217 11899 6030 11899 6025 11899 217 11900 6026 11900 6030 11900 6030 11901 6026 11901 264 11901 265 11902 6030 11902 264 11902 265 11903 6027 11903 6030 11903 6030 11904 6027 11904 266 11904 6028 11905 6030 11905 266 11905 6028 11906 220 11906 6030 11906 6030 11907 220 11907 222 11907 6029 11908 6030 11908 222 11908 6029 11909 268 11909 6030 11909 6030 11910 268 11910 6031 11910 223 11911 6030 11911 6031 11911 223 11912 270 11912 6030 11912 6030 11913 270 11913 6032 11913 271 11914 6030 11914 6032 11914 271 11915 6033 11915 6030 11915 6030 11916 6033 11916 227 11916 6034 11917 6030 11917 227 11917 6034 11918 273 11918 6030 11918 6030 11919 273 11919 274 11919 309 11920 6035 11920 1 11920 1 11921 6035 11921 5872 11921 4869 11922 5369 11922 6116 11922 4869 11923 6036 11923 5369 11923 4869 11924 4866 11924 6036 11924 6036 11925 4866 11925 5372 11925 5372 11926 4866 11926 4865 11926 6037 11927 4865 11927 4861 11927 6082 11928 4861 11928 4860 11928 6083 11929 4860 11929 4858 11929 5376 11930 4858 11930 4855 11930 5379 11931 4855 11931 4853 11931 6084 11932 4853 11932 6038 11932 5381 11933 6038 11933 4851 11933 5385 11934 4851 11934 4848 11934 6054 11935 4848 11935 6091 11935 6054 11936 5385 11936 4848 11936 6054 11937 5388 11937 5385 11937 6054 11938 6040 11938 5388 11938 6054 11939 6039 11939 6040 11939 6054 11940 6041 11940 6039 11940 6054 11941 5393 11941 6041 11941 6054 11942 6042 11942 5393 11942 6054 11943 5396 11943 6042 11943 6054 11944 5399 11944 5396 11944 6054 11945 6043 11945 5399 11945 6054 11946 6044 11946 6043 11946 6054 11947 5403 11947 6044 11947 6054 11948 6045 11948 5403 11948 6054 11949 6046 11949 6045 11949 6054 11950 5407 11950 6046 11950 6054 11951 6048 11951 5407 11951 6054 11952 6047 11952 6048 11952 6054 11953 6050 11953 6047 11953 6054 11954 6049 11954 6050 11954 6054 11955 6051 11955 6049 11955 6054 11956 5414 11956 6051 11956 6054 11957 5416 11957 5414 11957 6054 11958 5417 11958 5416 11958 6054 11959 5420 11959 5417 11959 6054 11960 5421 11960 5420 11960 6054 11961 5423 11961 5421 11961 6054 11962 5427 11962 5423 11962 6054 11963 6052 11963 5427 11963 6054 11964 6053 11964 6052 11964 6054 11965 5430 11965 6053 11965 6054 11966 6055 11966 5430 11966 6054 11967 6175 11967 6055 11967 6054 11968 6079 11968 6175 11968 6175 11969 6079 11969 6056 11969 6056 11970 6079 11970 6081 11970 4878 11971 6056 11971 6081 11971 4878 11972 5065 11972 6056 11972 6056 11973 5065 11973 5067 11973 5070 11974 6056 11974 5067 11974 5070 11975 5073 11975 6056 11975 6056 11976 5073 11976 4876 11976 4875 11977 6056 11977 4876 11977 4875 11978 4873 11978 6056 11978 6056 11979 4873 11979 4935 11979 4933 11980 6056 11980 4935 11980 4933 11981 4932 11981 6056 11981 6056 11982 4932 11982 6057 11982 4929 11983 6056 11983 6057 11983 4929 11984 4928 11984 6056 11984 6056 11985 4928 11985 4944 11985 4946 11986 6056 11986 4944 11986 4946 11987 4926 11987 6056 11987 6056 11988 4926 11988 4925 11988 4924 11989 6056 11989 4925 11989 4924 11990 4923 11990 6056 11990 6056 11991 4923 11991 6058 11991 6059 11992 6056 11992 6058 11992 6059 11993 6060 11993 6056 11993 6056 11994 6060 11994 4921 11994 4962 11995 6056 11995 4921 11995 4962 11996 4966 11996 6056 11996 6056 11997 4966 11997 6061 11997 4970 11998 6056 11998 6061 11998 4970 11999 2386 11999 6056 11999 4970 12000 6062 12000 2386 12000 2386 12001 6062 12001 6063 12001 6064 12002 2386 12002 6063 12002 6064 12003 4919 12003 2386 12003 2386 12004 4919 12004 4918 12004 4978 12005 2386 12005 4918 12005 4978 12006 4916 12006 2386 12006 2386 12007 4916 12007 6065 12007 4915 12008 2386 12008 6065 12008 4915 12009 6066 12009 2386 12009 2386 12010 6066 12010 6067 12010 4914 12011 2386 12011 6067 12011 4914 12012 6068 12012 2386 12012 2386 12013 6068 12013 4992 12013 4913 12014 2386 12014 4992 12014 4913 12015 4912 12015 2386 12015 2386 12016 4912 12016 4910 12016 4909 12017 2386 12017 4910 12017 4909 12018 5001 12018 2386 12018 2386 12019 5001 12019 4907 12019 4905 12020 2386 12020 4907 12020 4905 12021 5004 12021 2386 12021 2386 12022 5004 12022 4902 12022 6069 12023 2386 12023 4902 12023 6069 12024 6070 12024 2386 12024 2386 12025 6070 12025 6071 12025 6072 12026 2386 12026 6071 12026 6072 12027 6079 12027 2386 12027 6072 12028 4899 12028 6079 12028 6079 12029 4899 12029 5013 12029 5014 12030 6079 12030 5013 12030 5014 12031 4897 12031 6079 12031 6079 12032 4897 12032 4895 12032 4893 12033 6079 12033 4895 12033 4893 12034 5020 12034 6079 12034 6079 12035 5020 12035 4892 12035 6073 12036 6079 12036 4892 12036 6073 12037 6074 12037 6079 12037 6079 12038 6074 12038 5029 12038 4891 12039 6079 12039 5029 12039 4891 12040 5032 12040 6079 12040 6079 12041 5032 12041 4890 12041 6075 12042 6079 12042 4890 12042 6075 12043 5039 12043 6079 12043 6079 12044 5039 12044 6076 12044 4888 12045 6079 12045 6076 12045 4888 12046 4887 12046 6079 12046 6079 12047 4887 12047 6077 12047 4886 12048 6079 12048 6077 12048 4886 12049 6078 12049 6079 12049 6079 12050 6078 12050 4885 12050 4884 12051 6079 12051 4885 12051 4884 12052 5052 12052 6079 12052 6079 12053 5052 12053 4881 12053 5055 12054 6079 12054 4881 12054 5055 12055 4880 12055 6079 12055 6079 12056 4880 12056 6080 12056 5059 12057 6079 12057 6080 12057 5059 12058 6081 12058 6079 12058 5372 12059 4865 12059 6037 12059 6037 12060 4861 12060 6082 12060 6082 12061 4860 12061 6083 12061 6083 12062 4858 12062 5376 12062 5376 12063 4855 12063 5379 12063 5379 12064 4853 12064 6084 12064 6084 12065 6038 12065 5381 12065 5381 12066 4851 12066 5385 12066 4848 12067 4846 12067 6091 12067 6091 12068 4846 12068 6085 12068 4841 12069 6091 12069 6085 12069 4841 12070 4838 12070 6091 12070 6091 12071 4838 12071 4837 12071 4835 12072 6091 12072 4837 12072 4835 12073 4832 12073 6091 12073 6091 12074 4832 12074 4831 12074 4830 12075 6091 12075 4831 12075 4830 12076 6086 12076 6091 12076 6091 12077 6086 12077 6087 12077 4822 12078 6091 12078 6087 12078 4822 12079 4821 12079 6091 12079 6091 12080 4821 12080 6089 12080 6088 12081 6091 12081 6089 12081 6088 12082 4814 12082 6091 12082 6091 12083 4814 12083 4813 12083 4811 12084 6091 12084 4813 12084 4811 12085 6090 12085 6091 12085 6091 12086 6090 12086 4808 12086 6092 12087 6091 12087 4808 12087 6092 12088 4805 12088 6091 12088 6091 12089 4805 12089 4804 12089 4800 12090 6091 12090 4804 12090 4800 12091 6093 12091 6091 12091 6091 12092 6093 12092 4797 12092 4796 12093 6091 12093 4797 12093 4796 12094 6094 12094 6091 12094 6091 12095 6094 12095 4790 12095 4789 12096 6091 12096 4790 12096 4789 12097 6098 12097 6091 12097 4789 12098 4787 12098 6098 12098 6098 12099 4787 12099 4785 12099 4782 12100 6098 12100 4785 12100 4782 12101 4779 12101 6098 12101 6098 12102 4779 12102 4777 12102 6095 12103 6098 12103 4777 12103 6095 12104 6096 12104 6098 12104 6098 12105 6096 12105 6097 12105 4771 12106 6098 12106 6097 12106 4771 12107 4770 12107 6098 12107 6098 12108 4770 12108 4768 12108 6099 12109 6098 12109 4768 12109 6099 12110 4764 12110 6098 12110 6098 12111 4764 12111 6100 12111 4762 12112 6098 12112 6100 12112 4762 12113 6101 12113 6098 12113 6098 12114 6101 12114 4759 12114 4755 12115 6098 12115 4759 12115 4755 12116 4754 12116 6098 12116 6098 12117 4754 12117 4751 12117 6102 12118 6098 12118 4751 12118 6102 12119 4748 12119 6098 12119 6098 12120 4748 12120 6103 12120 4744 12121 6098 12121 6103 12121 4744 12122 4743 12122 6098 12122 6098 12123 4743 12123 6104 12123 6105 12124 6098 12124 6104 12124 6105 12125 6106 12125 6098 12125 6098 12126 6106 12126 4738 12126 6107 12127 6098 12127 4738 12127 6107 12128 5347 12128 6098 12128 6107 12129 6108 12129 5347 12129 6107 12130 6109 12130 6108 12130 6108 12131 6109 12131 6117 12131 6117 12132 6109 12132 6110 12132 5351 12133 6110 12133 4732 12133 5355 12134 4732 12134 4728 12134 5356 12135 4728 12135 6112 12135 6111 12136 6112 12136 4724 12136 6113 12137 4724 12137 4723 12137 5360 12138 4723 12138 6118 12138 6119 12139 6118 12139 4719 12139 6120 12140 4719 12140 6114 12140 6121 12141 6114 12141 6115 12141 5367 12142 6115 12142 4715 12142 6122 12143 4715 12143 4716 12143 6123 12144 4716 12144 4679 12144 6124 12145 4679 12145 6116 12145 5369 12146 6124 12146 6116 12146 6117 12147 6110 12147 5351 12147 5351 12148 4732 12148 5355 12148 5355 12149 4728 12149 5356 12149 5356 12150 6112 12150 6111 12150 6111 12151 4724 12151 6113 12151 6113 12152 4723 12152 5360 12152 5360 12153 6118 12153 6119 12153 6119 12154 4719 12154 6120 12154 6120 12155 6114 12155 6121 12155 6121 12156 6115 12156 5367 12156 5367 12157 4715 12157 6122 12157 6122 12158 4716 12158 6123 12158 6123 12159 4679 12159 6124 12159 6128 12160 6125 12160 6166 12160 6126 12161 6166 12161 6127 12161 6126 12162 6128 12162 6166 12162 6125 12163 5103 12163 6166 12163 6166 12164 5103 12164 5101 12164 5190 12165 6166 12165 5101 12165 5190 12166 6129 12166 6166 12166 6166 12167 6129 12167 5193 12167 5194 12168 6166 12168 5193 12168 5194 12169 5198 12169 6166 12169 6166 12170 5198 12170 5201 12170 6130 12171 6166 12171 5201 12171 6130 12172 6131 12172 6166 12172 6166 12173 6131 12173 5098 12173 5097 12174 6166 12174 5098 12174 5097 12175 5096 12175 6166 12175 6166 12176 5096 12176 5095 12176 6132 12177 6166 12177 5095 12177 6132 12178 5214 12178 6166 12178 6166 12179 5214 12179 5093 12179 6133 12180 6166 12180 5093 12180 6133 12181 5090 12181 6166 12181 6166 12182 5090 12182 5220 12182 6135 12183 5220 12183 6134 12183 5088 12184 6135 12184 6134 12184 5088 12185 6136 12185 6135 12185 6135 12186 6136 12186 6137 12186 5227 12187 6135 12187 6137 12187 5227 12188 6138 12188 6135 12188 6135 12189 6138 12189 5086 12189 6139 12190 6135 12190 5086 12190 6139 12191 6140 12191 6135 12191 6135 12192 6140 12192 6141 12192 6142 12193 6135 12193 6141 12193 6142 12194 5240 12194 6135 12194 6135 12195 5240 12195 5242 12195 6143 12196 6135 12196 5242 12196 6143 12197 6144 12197 6135 12197 6135 12198 6144 12198 6145 12198 6146 12199 6135 12199 6145 12199 6146 12200 6147 12200 6135 12200 6135 12201 6147 12201 5250 12201 6148 12202 6135 12202 5250 12202 6148 12203 5082 12203 6135 12203 6135 12204 5082 12204 5081 12204 6149 12205 6135 12205 5081 12205 6149 12206 6150 12206 6135 12206 6135 12207 6150 12207 6151 12207 6152 12208 6135 12208 6151 12208 6152 12209 5129 12209 6135 12209 6135 12210 5129 12210 5128 12210 6159 12211 5128 12211 5132 12211 5135 12212 6159 12212 5132 12212 5135 12213 5127 12213 6159 12213 6159 12214 5127 12214 6153 12214 6154 12215 6159 12215 6153 12215 6154 12216 6155 12216 6159 12216 6159 12217 6155 12217 6156 12217 5125 12218 6159 12218 6156 12218 5125 12219 6157 12219 6159 12219 6159 12220 6157 12220 5124 12220 6158 12221 6159 12221 5124 12221 6158 12222 6160 12222 6159 12222 6159 12223 6160 12223 5122 12223 5120 12224 6159 12224 5122 12224 5120 12225 6161 12225 6159 12225 6159 12226 6161 12226 5155 12226 6162 12227 6159 12227 5155 12227 6162 12228 5156 12228 6159 12228 6159 12229 5156 12229 6163 12229 5116 12230 6159 12230 6163 12230 5116 12231 5115 12231 6159 12231 6159 12232 5115 12232 5113 12232 5164 12233 6159 12233 5113 12233 5164 12234 5112 12234 6159 12234 6159 12235 5112 12235 5110 12235 5109 12236 6159 12236 5110 12236 5109 12237 6164 12237 6159 12237 6159 12238 6164 12238 6165 12238 5108 12239 6159 12239 6165 12239 5108 12240 5106 12240 6159 12240 6159 12241 5106 12241 6168 12241 6166 12242 6168 12242 5176 12242 6167 12243 6166 12243 5176 12243 6167 12244 5104 12244 6166 12244 6166 12245 5104 12245 6127 12245 6166 12246 5220 12246 6135 12246 6135 12247 5128 12247 6159 12247 6098 12248 6159 12248 6091 12248 6098 12249 6135 12249 6159 12249 6159 12250 6168 12250 6166 12250 5258 12251 5433 12251 6175 12251 6169 12252 6175 12252 6180 12252 6169 12253 5258 12253 6175 12253 5433 12254 6170 12254 6175 12254 6175 12255 6170 12255 6055 12255 6098 12256 5347 12256 6175 12256 6175 12257 5347 12257 5346 12257 5345 12258 6175 12258 5346 12258 5345 12259 6171 12259 6175 12259 6175 12260 6171 12260 5339 12260 5338 12261 6175 12261 5339 12261 5338 12262 6172 12262 6175 12262 6175 12263 6172 12263 5336 12263 5334 12264 6175 12264 5336 12264 5334 12265 5333 12265 6175 12265 6175 12266 5333 12266 6173 12266 5328 12267 6175 12267 6173 12267 5328 12268 5326 12268 6175 12268 6175 12269 5326 12269 5324 12269 6174 12270 6175 12270 5324 12270 6174 12271 5322 12271 6175 12271 6175 12272 5322 12272 6176 12272 5320 12273 6175 12273 6176 12273 5320 12274 6177 12274 6175 12274 6175 12275 6177 12275 5315 12275 5313 12276 6175 12276 5315 12276 5313 12277 5312 12277 6175 12277 6175 12278 5312 12278 6178 12278 6179 12279 6175 12279 6178 12279 6179 12280 5308 12280 6175 12280 6175 12281 5308 12281 6180 12281 2510 12282 6182 12282 6479 12282 6181 12283 6479 12283 2558 12283 6181 12284 2510 12284 6479 12284 6182 12285 6183 12285 6479 12285 6479 12286 6183 12286 2689 12286 6187 12287 2689 12287 2688 12287 2687 12288 6187 12288 2688 12288 2687 12289 2684 12289 6187 12289 6187 12290 2684 12290 2682 12290 6184 12291 6187 12291 2682 12291 6184 12292 6185 12292 6187 12292 6187 12293 6185 12293 2679 12293 6186 12294 6187 12294 2679 12294 6186 12295 2674 12295 6187 12295 6187 12296 2674 12296 6188 12296 2670 12297 6187 12297 6188 12297 2670 12298 2668 12298 6187 12298 6187 12299 2668 12299 2666 12299 2665 12300 6187 12300 2666 12300 2665 12301 2664 12301 6187 12301 6187 12302 2664 12302 2661 12302 2659 12303 6187 12303 2661 12303 2659 12304 2658 12304 6187 12304 6187 12305 2658 12305 2656 12305 6189 12306 6187 12306 2656 12306 6189 12307 2653 12307 6187 12307 6187 12308 2653 12308 6190 12308 6191 12309 6187 12309 6190 12309 6191 12310 6192 12310 6187 12310 6187 12311 6192 12311 6193 12311 2647 12312 6187 12312 6193 12312 2647 12313 2646 12313 6187 12313 6187 12314 2646 12314 2644 12314 2642 12315 6187 12315 2644 12315 2642 12316 2639 12316 6187 12316 6187 12317 2639 12317 3619 12317 6204 12318 3619 12318 3724 12318 6194 12319 6204 12319 3724 12319 6194 12320 6195 12320 6204 12320 6204 12321 6195 12321 6196 12321 3618 12322 6204 12322 6196 12322 3618 12323 6197 12323 6204 12323 6204 12324 6197 12324 3616 12324 3615 12325 6204 12325 3616 12325 3615 12326 3614 12326 6204 12326 6204 12327 3614 12327 6198 12327 6199 12328 6204 12328 6198 12328 6199 12329 3741 12329 6204 12329 6204 12330 3741 12330 3744 12330 3613 12331 6204 12331 3744 12331 3613 12332 3612 12332 6204 12332 6204 12333 3612 12333 3611 12333 6200 12334 6204 12334 3611 12334 6200 12335 6201 12335 6204 12335 6204 12336 6201 12336 6202 12336 3610 12337 6204 12337 6202 12337 3610 12338 3756 12338 6204 12338 6204 12339 3756 12339 3757 12339 6203 12340 6204 12340 3757 12340 6203 12341 3608 12341 6204 12341 6204 12342 3608 12342 6205 12342 3607 12343 6204 12343 6205 12343 3607 12344 6206 12344 6204 12344 6204 12345 6206 12345 3768 12345 3606 12346 6204 12346 3768 12346 3606 12347 3605 12347 6204 12347 6204 12348 3605 12348 6282 12348 6211 12349 6282 12349 6241 12349 3098 12350 6241 12350 3057 12350 3098 12351 6211 12351 6241 12351 3098 12352 6207 12352 6211 12352 6211 12353 6207 12353 3097 12353 6208 12354 6211 12354 3097 12354 6208 12355 3109 12355 6211 12355 6211 12356 3109 12356 3096 12356 6209 12357 6211 12357 3096 12357 6209 12358 3094 12358 6211 12358 6211 12359 3094 12359 3114 12359 3093 12360 6211 12360 3114 12360 3093 12361 6210 12361 6211 12361 6211 12362 6210 12362 3092 12362 6212 12363 6211 12363 3092 12363 6212 12364 3091 12364 6211 12364 6211 12365 3091 12365 6213 12365 6214 12366 6211 12366 6213 12366 6214 12367 6215 12367 6211 12367 6211 12368 6215 12368 6216 12368 3090 12369 6211 12369 6216 12369 3090 12370 3134 12370 6211 12370 6211 12371 3134 12371 6217 12371 3087 12372 6211 12372 6217 12372 3087 12373 6218 12373 6211 12373 6211 12374 6218 12374 3139 12374 3143 12375 6211 12375 3139 12375 3143 12376 6219 12376 6211 12376 6211 12377 6219 12377 6220 12377 6221 12378 6211 12378 6220 12378 6221 12379 3085 12379 6211 12379 6211 12380 3085 12380 6222 12380 3084 12381 6211 12381 6222 12381 3084 12382 6223 12382 6211 12382 6211 12383 6223 12383 4042 12383 4042 12384 6223 12384 3083 12384 3157 12385 4042 12385 3083 12385 3157 12386 3082 12386 4042 12386 4042 12387 3082 12387 3159 12387 6224 12388 4042 12388 3159 12388 6224 12389 3161 12389 4042 12389 4042 12390 3161 12390 3162 12390 6225 12391 4042 12391 3162 12391 6225 12392 6226 12392 4042 12392 4042 12393 6226 12393 3167 12393 3169 12394 4042 12394 3167 12394 3169 12395 3172 12395 4042 12395 4042 12396 3172 12396 6228 12396 6227 12397 4042 12397 6228 12397 6227 12398 6229 12398 4042 12398 4042 12399 6229 12399 6230 12399 3078 12400 4042 12400 6230 12400 3078 12401 3077 12401 4042 12401 4042 12402 3077 12402 6231 12402 6232 12403 4042 12403 6231 12403 6232 12404 3187 12404 4042 12404 4042 12405 3187 12405 6233 12405 6234 12406 4042 12406 6233 12406 6234 12407 3075 12407 4042 12407 4042 12408 3075 12408 3074 12408 3073 12409 4042 12409 3074 12409 3073 12410 6235 12410 4042 12410 4042 12411 6235 12411 6241 12411 6241 12412 6235 12412 3072 12412 3201 12413 6241 12413 3072 12413 3201 12414 3203 12414 6241 12414 6241 12415 3203 12415 3206 12415 3071 12416 6241 12416 3206 12416 3071 12417 3070 12417 6241 12417 6241 12418 3070 12418 3069 12418 3068 12419 6241 12419 3069 12419 3068 12420 6236 12420 6241 12420 6241 12421 6236 12421 6237 12421 3220 12422 6241 12422 6237 12422 3220 12423 3067 12423 6241 12423 6241 12424 3067 12424 3066 12424 3064 12425 6241 12425 3066 12425 3064 12426 3224 12426 6241 12426 6241 12427 3224 12427 6238 12427 6239 12428 6241 12428 6238 12428 6239 12429 3063 12429 6241 12429 6241 12430 3063 12430 3229 12430 3061 12431 6241 12431 3229 12431 3061 12432 6240 12432 6241 12432 6241 12433 6240 12433 3060 12433 3235 12434 6241 12434 3060 12434 3235 12435 3059 12435 6241 12435 6241 12436 3059 12436 6242 12436 3058 12437 6241 12437 6242 12437 3058 12438 3057 12438 6241 12438 6479 12439 2689 12439 6187 12439 6256 12440 6479 12440 6187 12440 6256 12441 6323 12441 6479 12441 6256 12442 3249 12442 6323 12442 6256 12443 3250 12443 3249 12443 6256 12444 3251 12444 3250 12444 6256 12445 3399 12445 3251 12445 6256 12446 3254 12446 3399 12446 6256 12447 3255 12447 3254 12447 6256 12448 3256 12448 3255 12448 6256 12449 3394 12449 3256 12449 6256 12450 6243 12450 3394 12450 6256 12451 3258 12451 6243 12451 6256 12452 3391 12452 3258 12452 6256 12453 6244 12453 3391 12453 6256 12454 6245 12454 6244 12454 6256 12455 3390 12455 6245 12455 6256 12456 6247 12456 3390 12456 6256 12457 6246 12457 6247 12457 6256 12458 6248 12458 6246 12458 6256 12459 3265 12459 6248 12459 6256 12460 3383 12460 3265 12460 6256 12461 3382 12461 3383 12461 6256 12462 6250 12462 3382 12462 6256 12463 6249 12463 6250 12463 6256 12464 6251 12464 6249 12464 6256 12465 6253 12465 6251 12465 6256 12466 6252 12466 6253 12466 6256 12467 6254 12467 6252 12467 6256 12468 6255 12468 6254 12468 6256 12469 3369 12469 6255 12469 6256 12470 3367 12470 3369 12470 6256 12471 3364 12471 3367 12471 6256 12472 3363 12472 3364 12472 6256 12473 6257 12473 3363 12473 6256 12474 6265 12474 6257 12474 6257 12475 6265 12475 3361 12475 3361 12476 6265 12476 3269 12476 3269 12477 6265 12477 6258 12477 6258 12478 6265 12478 6259 12478 6259 12479 6265 12479 6260 12479 6260 12480 6265 12480 6261 12480 6261 12481 6265 12481 6262 12481 6262 12482 6265 12482 3271 12482 3271 12483 6265 12483 3349 12483 3349 12484 6265 12484 3272 12484 3272 12485 6265 12485 6263 12485 6263 12486 6265 12486 6264 12486 6264 12487 6265 12487 3274 12487 3274 12488 6265 12488 3342 12488 3342 12489 6265 12489 3275 12489 3275 12490 6265 12490 3338 12490 3338 12491 6265 12491 3276 12491 3276 12492 6265 12492 6266 12492 6266 12493 6265 12493 3334 12493 3334 12494 6265 12494 6267 12494 6267 12495 6265 12495 6268 12495 6268 12496 6265 12496 3279 12496 3279 12497 6265 12497 3280 12497 3280 12498 6265 12498 6269 12498 6269 12499 6265 12499 3327 12499 3327 12500 6265 12500 3283 12500 3283 12501 6265 12501 6270 12501 6270 12502 6265 12502 6323 12502 6271 12503 6323 12503 3284 12503 6271 12504 6270 12504 6323 12504 3619 12505 2639 12505 3718 12505 3718 12506 2639 12506 6272 12506 3620 12507 6272 12507 6273 12507 3622 12508 6273 12508 2637 12508 3623 12509 2637 12509 2635 12509 3624 12510 2635 12510 2633 12510 6299 12511 2633 12511 6275 12511 6274 12512 6275 12512 2630 12512 3709 12513 2630 12513 6276 12513 3706 12514 6276 12514 6300 12514 3703 12515 6300 12515 6277 12515 3625 12516 6277 12516 6278 12516 3627 12517 6278 12517 2622 12517 6301 12518 2622 12518 6302 12518 3628 12519 6302 12519 2620 12519 6303 12520 2620 12520 2619 12520 3631 12521 2619 12521 2618 12521 6304 12522 2618 12522 2617 12522 6305 12523 2617 12523 2615 12523 6306 12524 2615 12524 2611 12524 3633 12525 2611 12525 6279 12525 6307 12526 6279 12526 6308 12526 6280 12527 6308 12527 2608 12527 6309 12528 2608 12528 2606 12528 6310 12529 2606 12529 6311 12529 3682 12530 6311 12530 2603 12530 6281 12531 2603 12531 6312 12531 6282 12532 6312 12532 6479 12532 6282 12533 6281 12533 6312 12533 6282 12534 3678 12534 6281 12534 6282 12535 6284 12535 3678 12535 6282 12536 6283 12536 6284 12536 6282 12537 6285 12537 6283 12537 6282 12538 3671 12538 6285 12538 6282 12539 6286 12539 3671 12539 6282 12540 3635 12540 6286 12540 6282 12541 6288 12541 3635 12541 6282 12542 6287 12542 6288 12542 6282 12543 3637 12543 6287 12543 6282 12544 3661 12544 3637 12544 6282 12545 3638 12545 3661 12545 6282 12546 3639 12546 3638 12546 6282 12547 6289 12547 3639 12547 6282 12548 3640 12548 6289 12548 6282 12549 3642 12549 3640 12549 6282 12550 6290 12550 3642 12550 6282 12551 3652 12551 6290 12551 6282 12552 6291 12552 3652 12552 6282 12553 6292 12553 6291 12553 6282 12554 6293 12554 6292 12554 6282 12555 3646 12555 6293 12555 6282 12556 6295 12556 3646 12556 6282 12557 6294 12557 6295 12557 6282 12558 3601 12558 6294 12558 6282 12559 6296 12559 3601 12559 6282 12560 6297 12560 6296 12560 6282 12561 3603 12561 6297 12561 6282 12562 6298 12562 3603 12562 6282 12563 3605 12563 6298 12563 3718 12564 6272 12564 3620 12564 3620 12565 6273 12565 3622 12565 3622 12566 2637 12566 3623 12566 3623 12567 2635 12567 3624 12567 3624 12568 2633 12568 6299 12568 6299 12569 6275 12569 6274 12569 6274 12570 2630 12570 3709 12570 3709 12571 6276 12571 3706 12571 3706 12572 6300 12572 3703 12572 3703 12573 6277 12573 3625 12573 3625 12574 6278 12574 3627 12574 3627 12575 2622 12575 6301 12575 6301 12576 6302 12576 3628 12576 3628 12577 2620 12577 6303 12577 6303 12578 2619 12578 3631 12578 3631 12579 2618 12579 6304 12579 6304 12580 2617 12580 6305 12580 6305 12581 2615 12581 6306 12581 6306 12582 2611 12582 3633 12582 3633 12583 6279 12583 6307 12583 6307 12584 6308 12584 6280 12584 6280 12585 2608 12585 6309 12585 6309 12586 2606 12586 6310 12586 6310 12587 6311 12587 3682 12587 3682 12588 2603 12588 6281 12588 6312 12589 6313 12589 6479 12589 6479 12590 6313 12590 6314 12590 2597 12591 6479 12591 6314 12591 2597 12592 2596 12592 6479 12592 6479 12593 2596 12593 2593 12593 2590 12594 6479 12594 2593 12594 2590 12595 6315 12595 6479 12595 6479 12596 6315 12596 6316 12596 2585 12597 6479 12597 6316 12597 2585 12598 2583 12598 6479 12598 6479 12599 2583 12599 6317 12599 2579 12600 6479 12600 6317 12600 2579 12601 2578 12601 6479 12601 6479 12602 2578 12602 2573 12602 2570 12603 6479 12603 2573 12603 2570 12604 2568 12604 6479 12604 6479 12605 2568 12605 6318 12605 6319 12606 6479 12606 6318 12606 6319 12607 2564 12607 6479 12607 6479 12608 2564 12608 2562 12608 6320 12609 6479 12609 2562 12609 6320 12610 6321 12610 6479 12610 6479 12611 6321 12611 2560 12611 6322 12612 6479 12612 2560 12612 6322 12613 2558 12613 6479 12613 3245 12614 3412 12614 6323 12614 6324 12615 6323 12615 3246 12615 6324 12616 3245 12616 6323 12616 3412 12617 3244 12617 6323 12617 6323 12618 3244 12618 6325 12618 3241 12619 6323 12619 6325 12619 3241 12620 3240 12620 6323 12620 6323 12621 3240 12621 6326 12621 6327 12622 6323 12622 6326 12622 6327 12623 3301 12623 6323 12623 6323 12624 3301 12624 3293 12624 3292 12625 6323 12625 3293 12625 3292 12626 6328 12626 6323 12626 6323 12627 6328 12627 6329 12627 6330 12628 6323 12628 6329 12628 6330 12629 6331 12629 6323 12629 6323 12630 6331 12630 3310 12630 6332 12631 6323 12631 3310 12631 6332 12632 3313 12632 6323 12632 6323 12633 3313 12633 3288 12633 3286 12634 6323 12634 3288 12634 3286 12635 6333 12635 6323 12635 6323 12636 6333 12636 3285 12636 3284 12637 6323 12637 3285 12637 3249 12638 3247 12638 6323 12638 6323 12639 3247 12639 3246 12639 6187 12640 3619 12640 6204 12640 6211 12641 6204 12641 6282 12641 4229 12642 4231 12642 6556 12642 4048 12643 6556 12643 4049 12643 4048 12644 4229 12644 6556 12644 4231 12645 6334 12645 6556 12645 6556 12646 6334 12646 4046 12646 4043 12647 6556 12647 4046 12647 4043 12648 6335 12648 6556 12648 6556 12649 6335 12649 6337 12649 6336 12650 6556 12650 6337 12650 6336 12651 4093 12651 6556 12651 6556 12652 4093 12652 6338 12652 4102 12653 6556 12653 6338 12653 4102 12654 4091 12654 6556 12654 6556 12655 4091 12655 4090 12655 6339 12656 6556 12656 4090 12656 6339 12657 4088 12657 6556 12657 6556 12658 4088 12658 6340 12658 6341 12659 6556 12659 6340 12659 6341 12660 4087 12660 6556 12660 6556 12661 4087 12661 4086 12661 4115 12662 6556 12662 4086 12662 4115 12663 4117 12663 6556 12663 6556 12664 4117 12664 4085 12664 6342 12665 6556 12665 4085 12665 6342 12666 4124 12666 6556 12666 6556 12667 4124 12667 6372 12667 6353 12668 6372 12668 4084 12668 6343 12669 6353 12669 4084 12669 6343 12670 4082 12670 6353 12670 6353 12671 4082 12671 4134 12671 4081 12672 6353 12672 4134 12672 4081 12673 6344 12673 6353 12673 6353 12674 6344 12674 6345 12674 6346 12675 6353 12675 6345 12675 6346 12676 4142 12676 6353 12676 6353 12677 4142 12677 6347 12677 4079 12678 6353 12678 6347 12678 4079 12679 4148 12679 6353 12679 6353 12680 4148 12680 4078 12680 4076 12681 6353 12681 4078 12681 4076 12682 4074 12682 6353 12682 6353 12683 4074 12683 6348 12683 4073 12684 6353 12684 6348 12684 4073 12685 4072 12685 6353 12685 6353 12686 4072 12686 6349 12686 4071 12687 6353 12687 6349 12687 4071 12688 6350 12688 6353 12688 6353 12689 6350 12689 4163 12689 4070 12690 6353 12690 4163 12690 4070 12691 4069 12691 6353 12691 6353 12692 4069 12692 6351 12692 6352 12693 6353 12693 6351 12693 6352 12694 4170 12694 6353 12694 6353 12695 4170 12695 6368 12695 6368 12696 4170 12696 4173 12696 4067 12697 6368 12697 4173 12697 4067 12698 4066 12698 6368 12698 6368 12699 4066 12699 6354 12699 4064 12700 6368 12700 6354 12700 4064 12701 4179 12701 6368 12701 6368 12702 4179 12702 6355 12702 6356 12703 6368 12703 6355 12703 6356 12704 4186 12704 6368 12704 6368 12705 4186 12705 6357 12705 6358 12706 6368 12706 6357 12706 6358 12707 6359 12707 6368 12707 6368 12708 6359 12708 6360 12708 6361 12709 6368 12709 6360 12709 6361 12710 6362 12710 6368 12710 6368 12711 6362 12711 4198 12711 6363 12712 6368 12712 4198 12712 6363 12713 4060 12713 6368 12713 6368 12714 4060 12714 4059 12714 4057 12715 6368 12715 4059 12715 4057 12716 6364 12716 6368 12716 6368 12717 6364 12717 6365 12717 4056 12718 6368 12718 6365 12718 4056 12719 6366 12719 6368 12719 6368 12720 6366 12720 4055 12720 4214 12721 6368 12721 4055 12721 4214 12722 4053 12722 6368 12722 6368 12723 4053 12723 6367 12723 6369 12724 6368 12724 6367 12724 6369 12725 6370 12725 6368 12725 6368 12726 6370 12726 4050 12726 6556 12727 4050 12727 6371 12727 4049 12728 6556 12728 6371 12728 6556 12729 6372 12729 6353 12729 6368 12730 4050 12730 6556 12730 6557 12731 6368 12731 6556 12731 6557 12732 835 12732 6368 12732 6557 12733 6405 12733 835 12733 6557 12734 4605 12734 6405 12734 6557 12735 4609 12735 4605 12735 6557 12736 4610 12736 4609 12736 6557 12737 6373 12737 4610 12737 6557 12738 4612 12738 6373 12738 6557 12739 4617 12739 4612 12739 6557 12740 4619 12740 4617 12740 6557 12741 4621 12741 4619 12741 6557 12742 6374 12742 4621 12742 6557 12743 6375 12743 6374 12743 6557 12744 4623 12744 6375 12744 6557 12745 4625 12745 4623 12745 6557 12746 4628 12746 4625 12746 6557 12747 4629 12747 4628 12747 6557 12748 6376 12748 4629 12748 6557 12749 6377 12749 6376 12749 6557 12750 6378 12750 6377 12750 6557 12751 4633 12751 6378 12751 6557 12752 4634 12752 4633 12752 6557 12753 4636 12753 4634 12753 6557 12754 4637 12754 4636 12754 6557 12755 6379 12755 4637 12755 6557 12756 6380 12756 6379 12756 6557 12757 4644 12757 6380 12757 6557 12758 4645 12758 4644 12758 6557 12759 4649 12759 4645 12759 6557 12760 4650 12760 4649 12760 6557 12761 4654 12761 4650 12761 6557 12762 6381 12762 4654 12762 6557 12763 4657 12763 6381 12763 6557 12764 6438 12764 4657 12764 4657 12765 6438 12765 5577 12765 4658 12766 5577 12766 5574 12766 4662 12767 5574 12767 5573 12767 6382 12768 5573 12768 6383 12768 6445 12769 6383 12769 5571 12769 4666 12770 5571 12770 5569 12770 4668 12771 5569 12771 6384 12771 4669 12772 6384 12772 5565 12772 6385 12773 5565 12773 6444 12773 4671 12774 6444 12774 6386 12774 4672 12775 6386 12775 5560 12775 6443 12776 5560 12776 5557 12776 4674 12777 5557 12777 5556 12777 4675 12778 5556 12778 6388 12778 6387 12779 6388 12779 5551 12779 4677 12780 5551 12780 6453 12780 6452 12781 6453 12781 5548 12781 6451 12782 5548 12782 6450 12782 6389 12783 6450 12783 5545 12783 6390 12784 5545 12784 6449 12784 4548 12785 6449 12785 5541 12785 6391 12786 5541 12786 6448 12786 4550 12787 6448 12787 5538 12787 6447 12788 5538 12788 5536 12788 4552 12789 5536 12789 6392 12789 6446 12790 6392 12790 6393 12790 6394 12791 6393 12791 6454 12791 835 12792 6454 12792 6463 12792 835 12793 6394 12793 6454 12793 835 12794 4558 12794 6394 12794 835 12795 6395 12795 4558 12795 835 12796 4559 12796 6395 12796 835 12797 4563 12797 4559 12797 835 12798 6396 12798 4563 12798 835 12799 4565 12799 6396 12799 835 12800 4568 12800 4565 12800 835 12801 4570 12801 4568 12801 835 12802 6397 12802 4570 12802 835 12803 4572 12803 6397 12803 835 12804 4574 12804 4572 12804 835 12805 6398 12805 4574 12805 835 12806 4577 12806 6398 12806 835 12807 6399 12807 4577 12807 835 12808 6400 12808 6399 12808 835 12809 4582 12809 6400 12809 835 12810 4583 12810 4582 12810 835 12811 6401 12811 4583 12811 835 12812 4586 12812 6401 12812 835 12813 4589 12813 4586 12813 835 12814 6402 12814 4589 12814 835 12815 4591 12815 6402 12815 835 12816 6403 12816 4591 12816 835 12817 4594 12817 6403 12817 835 12818 4598 12818 4594 12818 835 12819 4599 12819 4598 12819 835 12820 6404 12820 4599 12820 835 12821 4601 12821 6404 12821 835 12822 6406 12822 4601 12822 835 12823 6405 12823 6406 12823 6412 12824 6474 12824 6438 12824 6412 12825 6407 12825 6474 12825 6412 12826 4318 12826 6407 12826 6412 12827 6409 12827 4318 12827 6412 12828 6408 12828 6409 12828 6412 12829 4271 12829 6408 12829 6412 12830 4272 12830 4271 12830 6412 12831 6411 12831 4272 12831 6412 12832 6410 12832 6411 12832 6412 12833 4445 12833 6410 12833 6412 12834 4273 12834 4445 12834 6412 12835 4274 12835 4273 12835 6412 12836 4276 12836 4274 12836 6412 12837 4438 12837 4276 12837 6412 12838 6414 12838 4438 12838 6412 12839 6413 12839 6414 12839 6412 12840 6415 12840 6413 12840 6412 12841 4433 12841 6415 12841 6412 12842 4277 12842 4433 12842 6412 12843 4279 12843 4277 12843 6412 12844 4280 12844 4279 12844 6412 12845 4281 12845 4280 12845 6412 12846 6416 12846 4281 12846 6412 12847 6417 12847 6416 12847 6412 12848 4422 12848 6417 12848 6412 12849 6418 12849 4422 12849 6412 12850 6419 12850 6418 12850 6412 12851 4282 12851 6419 12851 6412 12852 6420 12852 4282 12852 6412 12853 6421 12853 6420 12853 6412 12854 6424 12854 6421 12854 6421 12855 6424 12855 4413 12855 4413 12856 6424 12856 4410 12856 4410 12857 6424 12857 4409 12857 4409 12858 6424 12858 4284 12858 4284 12859 6424 12859 4405 12859 4405 12860 6424 12860 6422 12860 6422 12861 6424 12861 4286 12861 4286 12862 6424 12862 6423 12862 6423 12863 6424 12863 4287 12863 4287 12864 6424 12864 4288 12864 4288 12865 6424 12865 4394 12865 4394 12866 6424 12866 4290 12866 4290 12867 6424 12867 4291 12867 4291 12868 6424 12868 6425 12868 6425 12869 6424 12869 4293 12869 4293 12870 6424 12870 4390 12870 4390 12871 6424 12871 4294 12871 4294 12872 6424 12872 6426 12872 6426 12873 6424 12873 4295 12873 4295 12874 6424 12874 4296 12874 4296 12875 6424 12875 4382 12875 4382 12876 6424 12876 6427 12876 6427 12877 6424 12877 6428 12877 6428 12878 6424 12878 4378 12878 4378 12879 6424 12879 4298 12879 4298 12880 6424 12880 6429 12880 6429 12881 6424 12881 4372 12881 4372 12882 6424 12882 6474 12882 4369 12883 6474 12883 6430 12883 4369 12884 4372 12884 6474 12884 6474 12885 6463 12885 6438 12885 6438 12886 6463 12886 5438 12886 5437 12887 6438 12887 5438 12887 5437 12888 6431 12888 6438 12888 6438 12889 6431 12889 5620 12889 5617 12890 6438 12890 5620 12890 5617 12891 5615 12891 6438 12891 6438 12892 5615 12892 5612 12892 5611 12893 6438 12893 5612 12893 5611 12894 6432 12894 6438 12894 6438 12895 6432 12895 6433 12895 5607 12896 6438 12896 6433 12896 5607 12897 6434 12897 6438 12897 6438 12898 6434 12898 5604 12898 5602 12899 6438 12899 5604 12899 5602 12900 5600 12900 6438 12900 6438 12901 5600 12901 5598 12901 5596 12902 6438 12902 5598 12902 5596 12903 6435 12903 6438 12903 6438 12904 6435 12904 5594 12904 6436 12905 6438 12905 5594 12905 6436 12906 5591 12906 6438 12906 6438 12907 5591 12907 6437 12907 5587 12908 6438 12908 6437 12908 5587 12909 6439 12909 6438 12909 6438 12910 6439 12910 5586 12910 5583 12911 6438 12911 5586 12911 5583 12912 6440 12912 6438 12912 6438 12913 6440 12913 5581 12913 6441 12914 6438 12914 5581 12914 6441 12915 6442 12915 6438 12915 6438 12916 6442 12916 5577 12916 4677 12917 6387 12917 5551 12917 6387 12918 4675 12918 6388 12918 4675 12919 4674 12919 5556 12919 4674 12920 6443 12920 5557 12920 6443 12921 4672 12921 5560 12921 4672 12922 4671 12922 6386 12922 4671 12923 6385 12923 6444 12923 6385 12924 4669 12924 5565 12924 4669 12925 4668 12925 6384 12925 4668 12926 4666 12926 5569 12926 4666 12927 6445 12927 5571 12927 6445 12928 6382 12928 6383 12928 6382 12929 4662 12929 5573 12929 4662 12930 4658 12930 5574 12930 4658 12931 4657 12931 5577 12931 6394 12932 6446 12932 6393 12932 6446 12933 4552 12933 6392 12933 4552 12934 6447 12934 5536 12934 6447 12935 4550 12935 5538 12935 4550 12936 6391 12936 6448 12936 6391 12937 4548 12937 5541 12937 4548 12938 6390 12938 6449 12938 6390 12939 6389 12939 5545 12939 6389 12940 6451 12940 6450 12940 6451 12941 6452 12941 5548 12941 6452 12942 4677 12942 6453 12942 6454 12943 5532 12943 6463 12943 6463 12944 5532 12944 6455 12944 5530 12945 6463 12945 6455 12945 5530 12946 5529 12946 6463 12946 6463 12947 5529 12947 6456 12947 5526 12948 6463 12948 6456 12948 5526 12949 5523 12949 6463 12949 6463 12950 5523 12950 6457 12950 6458 12951 6463 12951 6457 12951 6458 12952 6459 12952 6463 12952 6463 12953 6459 12953 5517 12953 6460 12954 6463 12954 5517 12954 6460 12955 6461 12955 6463 12955 6463 12956 6461 12956 5515 12956 5513 12957 6463 12957 5515 12957 5513 12958 5511 12958 6463 12958 6463 12959 5511 12959 5510 12959 5508 12960 6463 12960 5510 12960 5508 12961 5505 12961 6463 12961 6463 12962 5505 12962 6462 12962 5503 12963 6463 12963 6462 12963 5503 12964 6464 12964 6463 12964 6463 12965 6464 12965 5500 12965 5497 12966 6463 12966 5500 12966 5497 12967 5495 12967 6463 12967 6463 12968 5495 12968 6465 12968 6466 12969 6463 12969 6465 12969 6466 12970 5491 12970 6463 12970 6463 12971 5491 12971 5490 12971 5438 12972 6463 12972 5490 12972 6407 12973 4316 12973 6474 12973 6474 12974 4316 12974 4323 12974 6467 12975 6474 12975 4323 12975 6467 12976 6468 12976 6474 12976 6474 12977 6468 12977 6469 12977 4314 12978 6474 12978 6469 12978 4314 12979 4334 12979 6474 12979 6474 12980 4334 12980 6470 12980 4313 12981 6474 12981 6470 12981 4313 12982 4338 12982 6474 12982 6474 12983 4338 12983 4311 12983 4310 12984 6474 12984 4311 12984 4310 12985 4342 12985 6474 12985 6474 12986 4342 12986 4345 12986 6471 12987 6474 12987 4345 12987 6471 12988 6472 12988 6474 12988 6474 12989 6472 12989 6473 12989 4308 12990 6474 12990 6473 12990 4308 12991 4307 12991 6474 12991 6474 12992 4307 12992 4356 12992 4306 12993 6474 12993 4356 12993 4306 12994 4305 12994 6474 12994 6474 12995 4305 12995 6475 12995 4304 12996 6474 12996 6475 12996 4304 12997 4303 12997 6474 12997 6474 12998 4303 12998 4301 12998 4300 12999 6474 12999 4301 12999 4300 13000 6476 13000 6474 13000 6474 13001 6476 13001 6430 13001 5855 13002 6477 13002 6478 13002 6478 13003 6477 13003 6438 13003 6478 13004 6479 13004 5855 13004 5855 13005 6479 13005 5856 13005 5859 13006 5856 13006 6323 13006 6323 13007 5856 13007 6479 13007 6552 13008 6482 13008 5843 13008 5843 13009 6482 13009 6438 13009 6557 13010 5843 13010 6438 13010 6557 13011 5771 13011 5843 13011 6557 13012 1391 13012 5771 13012 6557 13013 618 13013 1391 13013 6557 13014 6494 13014 618 13014 6480 13015 6481 13015 6482 13015 6482 13016 6481 13016 6479 13016 6438 13017 6479 13017 6478 13017 6438 13018 6482 13018 6479 13018 6479 13019 6481 13019 6282 13019 6282 13020 6481 13020 4027 13020 6483 13021 4027 13021 3943 13021 6483 13022 6282 13022 4027 13022 6483 13023 6175 13023 6282 13023 6483 13024 5850 13024 6175 13024 6483 13025 6484 13025 5850 13025 5850 13026 5764 13026 6175 13026 6175 13027 5764 13027 6098 13027 6098 13028 5764 13028 6485 13028 576 13029 6485 13029 3870 13029 6486 13030 576 13030 3870 13030 6486 13031 618 13031 576 13031 6486 13032 1391 13032 618 13032 6486 13033 1401 13033 1391 13033 5764 13034 6487 13034 6485 13034 6485 13035 6551 13035 3870 13035 1391 13036 832 13036 5771 13036 6490 13037 6098 13037 576 13037 576 13038 6098 13038 6485 13038 6175 13039 6488 13039 6282 13039 6488 13040 6175 13040 5862 13040 5862 13041 6175 13041 6489 13041 5862 13042 5858 13042 6488 13042 6488 13043 5858 13043 6282 13043 5853 13044 6489 13044 6056 13044 6056 13045 6489 13045 6175 13045 5864 13046 5865 13046 6490 13046 6490 13047 5865 13047 6098 13047 6490 13048 576 13048 5864 13048 5864 13049 576 13049 6491 13049 6494 13050 6557 13050 6493 13050 6493 13051 6557 13051 6492 13051 6493 13052 5869 13052 6494 13052 6494 13053 5869 13053 618 13053 5796 13054 6495 13054 6463 13054 6463 13055 6495 13055 835 13055 5723 13056 5849 13056 6091 13056 6091 13057 5849 13057 6054 13057 3901 13058 617 13058 3920 13058 3920 13059 617 13059 657 13059 4022 13060 6187 13060 3992 13060 3992 13061 6187 13061 6204 13061 3950 13062 809 13062 6529 13062 6529 13063 809 13063 6553 13063 1025 13064 6497 13064 981 13064 981 13065 6497 13065 995 13065 995 13066 6497 13066 996 13066 996 13067 6497 13067 998 13067 998 13068 6497 13068 999 13068 999 13069 6497 13069 6496 13069 6496 13070 6497 13070 1002 13070 1002 13071 6497 13071 6498 13071 6498 13072 6497 13072 988 13072 988 13073 6497 13073 1003 13073 1003 13074 6497 13074 6499 13074 6499 13075 6497 13075 6500 13075 6500 13076 6497 13076 6501 13076 6501 13077 6497 13077 6502 13077 6502 13078 6497 13078 1007 13078 1007 13079 6497 13079 1010 13079 1010 13080 6497 13080 1011 13080 1011 13081 6497 13081 1012 13081 1012 13082 6497 13082 6503 13082 6503 13083 6497 13083 1014 13083 1014 13084 6497 13084 1017 13084 1017 13085 6497 13085 6504 13085 6504 13086 6497 13086 6549 13086 926 13087 6549 13087 6505 13087 926 13088 6504 13088 6549 13088 978 13089 972 13089 6549 13089 6549 13090 972 13090 970 13090 6506 13091 6549 13091 970 13091 6506 13092 964 13092 6549 13092 6549 13093 964 13093 962 13093 961 13094 6549 13094 962 13094 961 13095 957 13095 6549 13095 6549 13096 957 13096 6507 13096 952 13097 6549 13097 6507 13097 952 13098 6508 13098 6549 13098 6549 13099 6508 13099 933 13099 951 13100 6549 13100 933 13100 951 13101 938 13101 6549 13101 6549 13102 938 13102 932 13102 6509 13103 6549 13103 932 13103 6509 13104 931 13104 6549 13104 6549 13105 931 13105 930 13105 940 13106 6549 13106 930 13106 940 13107 929 13107 6549 13107 6549 13108 929 13108 6510 13108 6505 13109 6549 13109 6510 13109 6555 13110 4495 13110 814 13110 814 13111 4495 13111 810 13111 6554 13112 6529 13112 6497 13112 6497 13113 6529 13113 758 13113 6549 13114 758 13114 756 13114 6511 13115 6549 13115 756 13115 6511 13116 744 13116 6549 13116 6549 13117 744 13117 754 13117 742 13118 6549 13118 754 13118 742 13119 6512 13119 6549 13119 6549 13120 6512 13120 6513 13120 6523 13121 6513 13121 739 13121 752 13122 6523 13122 739 13122 752 13123 751 13123 6523 13123 6523 13124 751 13124 736 13124 6514 13125 6523 13125 736 13125 6514 13126 749 13126 6523 13126 6523 13127 749 13127 748 13127 747 13128 6523 13128 748 13128 747 13129 734 13129 6523 13129 6523 13130 734 13130 733 13130 732 13131 6523 13131 733 13131 732 13132 6515 13132 6523 13132 6523 13133 6515 13133 711 13133 712 13134 6523 13134 711 13134 712 13135 6516 13135 6523 13135 6523 13136 6516 13136 6517 13136 6518 13137 6523 13137 6517 13137 6518 13138 717 13138 6523 13138 6523 13139 717 13139 6519 13139 731 13140 6523 13140 6519 13140 731 13141 6520 13141 6523 13141 6523 13142 6520 13142 6521 13142 724 13143 6523 13143 6521 13143 724 13144 6522 13144 6523 13144 6523 13145 6522 13145 6524 13145 6525 13146 6523 13146 6524 13146 6525 13147 6526 13147 6523 13147 6523 13148 6526 13148 6527 13148 6528 13149 6523 13149 6527 13149 6528 13150 730 13150 6523 13150 758 13151 6529 13151 6531 13151 6531 13152 6529 13152 6553 13152 4495 13153 6553 13153 6530 13153 4495 13154 6531 13154 6553 13154 4495 13155 6555 13155 6531 13155 6531 13156 6555 13156 6532 13156 6532 13157 6555 13157 760 13157 760 13158 6555 13158 781 13158 781 13159 6555 13159 782 13159 782 13160 6555 13160 761 13160 761 13161 6555 13161 6533 13161 6533 13162 6555 13162 6534 13162 6534 13163 6555 13163 6548 13163 762 13164 6548 13164 788 13164 762 13165 6534 13165 6548 13165 6555 13166 1384 13166 6548 13166 6535 13167 6536 13167 6548 13167 6548 13168 6536 13168 805 13168 6537 13169 6548 13169 805 13169 6537 13170 776 13170 6548 13170 6548 13171 776 13171 804 13171 6538 13172 6548 13172 804 13172 6538 13173 6539 13173 6548 13173 6548 13174 6539 13174 6540 13174 6541 13175 6548 13175 6540 13175 6541 13176 6542 13176 6548 13176 6548 13177 6542 13177 800 13177 798 13178 6548 13178 800 13178 798 13179 774 13179 6548 13179 6548 13180 774 13180 6543 13180 6544 13181 6548 13181 6543 13181 6544 13182 794 13182 6548 13182 6548 13183 794 13183 6545 13183 6546 13184 6548 13184 6545 13184 6546 13185 772 13185 6548 13185 6548 13186 772 13186 771 13186 6547 13187 6548 13187 771 13187 6547 13188 790 13188 6548 13188 6548 13189 790 13189 769 13189 768 13190 6548 13190 769 13190 768 13191 766 13191 6548 13191 6548 13192 766 13192 765 13192 763 13193 6548 13193 765 13193 763 13194 788 13194 6548 13194 6497 13195 758 13195 6549 13195 6549 13196 6513 13196 6523 13196 6550 13197 6549 13197 6523 13197 6549 13198 6550 13198 6487 13198 6487 13199 6550 13199 6485 13199 6550 13200 6523 13200 6485 13200 6485 13201 6523 13201 6551 13201 5871 13202 6491 13202 5860 13202 5860 13203 6491 13203 576 13203 4495 13204 6530 13204 6552 13204 6552 13205 6530 13205 6482 13205 6530 13206 6553 13206 6482 13206 6482 13207 6553 13207 6480 13207 6554 13208 6497 13208 6483 13208 6483 13209 6497 13209 6484 13209 6529 13210 6554 13210 3943 13210 3943 13211 6554 13211 6483 13211 1384 13212 6555 13212 1391 13212 1391 13213 6555 13213 832 13213 5851 13214 6492 13214 6556 13214 6556 13215 6492 13215 6557 13215

-
-
-
-
- - - - - - - - - - - - - - -
+ + + + + Blender User + Blender 2.80.75 commit date:2019-07-29, commit time:14:47, hash:f6cb5f54494e + + 2019-10-18T17:54:25 + 2019-10-18T17:54:25 + + Z_UP + + + + + + + + 13.86785 37.03556 17.56595 13.86785 37.03556 13.86786 17.56596 37.03727 13.86786 17.25778 37.03727 19.04519 19.04519 37.03726 17.25778 19.04519 37.03726 19.04519 13.86785 1.469036 13.86786 13.86785 1.469035 17.56595 17.56595 1.469036 13.86786 15.59363 8.013267 13.86786 13.86785 5.639702 13.86786 15.59363 28.84395 13.86786 13.86785 31.48312 13.86786 13.86785 8.013267 15.59364 13.86785 28.84395 15.59364 20.33953 35.74655 20.33953 15.96345 35.74655 20.33953 20.33952 35.74655 15.96345 19.04518 1.469035 17.25778 17.25778 1.469035 19.04519 15.96344 2.632022 20.33952 20.33952 2.632023 15.96344 15.59363 10.10692 15.59364 15.59363 26.74141 15.59364 20.33953 1.469035 20.33952 19.04519 -1.43787e-6 19.04519 13.86785 -9.81434e-7 13.86786 13.86785 -1.18906e-6 17.56595 17.56595 -9.81434e-7 13.86786 0 37.03556 19.04519 0 37.03556 17.56595 -13.86785 37.03556 17.56595 -13.86785 37.03556 13.86786 -17.56596 37.03727 13.86786 -17.25778 37.03727 19.04519 -19.04519 37.03726 17.25778 -19.04519 37.03726 19.04519 0 1.469035 19.04519 -13.86785 1.469036 13.86786 -13.86785 1.469035 17.56595 -17.56595 1.469036 13.86786 -15.59363 8.013267 13.86786 -13.86785 5.639702 13.86786 -15.59363 28.84395 13.86786 -13.86785 31.48312 13.86786 -13.86785 8.013267 15.59364 -13.86785 28.84395 15.59364 -20.33953 35.74655 20.33953 -15.96345 35.74655 20.33953 -20.33952 35.74655 15.96345 -19.04518 1.469035 17.25778 -17.25778 1.469035 19.04519 0 2.632022 20.33952 -15.96344 2.632022 20.33952 -20.33952 2.632023 15.96344 -15.59363 10.10692 15.59364 -15.59363 26.74141 15.59364 0 1.469035 20.33952 -20.33953 1.469035 20.33952 0 -1.43787e-6 19.04519 -19.04519 -1.43787e-6 19.04519 0 1.469035 17.56595 -13.86785 -9.81434e-7 13.86786 -13.86785 -1.18906e-6 17.56595 -17.56595 -9.81434e-7 13.86786 0 -1.18906e-6 17.56595 0 -1.42586e-6 19.04519 19.04519 37.03727 0 17.56595 37.03727 0 13.86785 37.03556 -17.56595 13.86785 37.03556 -13.86786 17.56596 37.03727 -13.86786 17.25778 37.03727 -19.04519 19.04519 37.03726 -17.25778 19.04519 37.03726 -19.04519 17.56596 1.469037 0 19.04519 1.469037 0 13.86785 1.469036 -13.86786 13.86785 1.469035 -17.56595 17.56595 1.469036 -13.86786 15.59363 8.013267 -13.86786 13.86785 5.639702 -13.86786 15.59363 28.84395 -13.86786 13.86785 31.48312 -13.86786 13.86785 8.013267 -15.59364 13.86785 28.84395 -15.59364 20.33953 35.74655 -20.33953 15.96345 35.74655 -20.33953 20.33952 35.74655 -15.96345 19.04518 1.469035 -17.25778 17.25778 1.469035 -19.04519 15.96344 2.632022 -20.33952 20.33952 2.632023 -15.96344 20.33953 2.632024 0 15.59363 10.10692 -15.59364 15.59363 26.74141 -15.59364 20.33953 1.469035 -20.33952 20.33953 1.469037 0 19.04519 -1.43787e-6 -19.04519 19.04519 0 0 17.56595 0 0 13.86785 -9.81434e-7 -13.86786 13.86785 -1.18906e-6 -17.56595 17.56595 -9.81434e-7 -13.86786 0 37.03556 -19.04519 -19.04519 37.03727 0 0 37.03556 -17.56595 -17.56595 37.03727 0 -13.86785 37.03556 -17.56595 -13.86785 37.03556 -13.86786 -17.56596 37.03727 -13.86786 -17.25778 37.03727 -19.04519 -19.04519 37.03726 -17.25778 -19.04519 37.03726 -19.04519 0 1.469035 -19.04519 -17.56596 1.469037 0 -19.04519 1.469037 0 -13.86785 1.469036 -13.86786 -13.86785 1.469035 -17.56595 -17.56595 1.469036 -13.86786 -15.59363 8.013267 -13.86786 -13.86785 5.639702 -13.86786 -15.59363 28.84395 -13.86786 -13.86785 31.48312 -13.86786 -13.86785 8.013267 -15.59364 -13.86785 28.84395 -15.59364 -20.33953 35.74655 -20.33953 -15.96345 35.74655 -20.33953 -20.33952 35.74655 -15.96345 -19.04518 1.469035 -17.25778 -17.25778 1.469035 -19.04519 0 2.632022 -20.33952 -15.96344 2.632022 -20.33952 -20.33952 2.632023 -15.96344 -20.33953 2.632024 0 -15.59363 10.10692 -15.59364 -15.59363 26.74141 -15.59364 0 1.469035 -20.33952 -20.33953 1.469035 -20.33952 -20.33953 1.469037 0 0 -1.43787e-6 -19.04519 -19.04519 -1.43787e-6 -19.04519 -19.04519 0 0 0 1.469035 -17.56595 -17.56595 0 0 -13.86785 -9.81434e-7 -13.86786 -13.86785 -1.18906e-6 -17.56595 -17.56595 -9.81434e-7 -13.86786 0 -1.18906e-6 -17.56595 0 -1.42586e-6 -19.04519 + + + + + + + + + + -0.6288625 0.4572352 -0.6288625 -0.7071059 0 -0.7071078 0.7503131 -0.6610828 1.75832e-7 -1 0 0 -0.7071067 2.3172e-7 -0.707107 1 0 -8.86504e-7 -1.69782e-6 0 -1 0 -0.6610822 0.7503135 0 -1.28718e-6 1 0.7061172 0.708095 0 0 0.7080976 0.7061147 -1 0 5.15766e-7 1 -1.79943e-7 4.42084e-7 0 0 -1 0 0 -1 -1 0 -2.75075e-7 -1 1.2374e-7 -2.75075e-7 -0.6683558 0.7438418 -2.22158e-7 -1.98877e-7 0 1 0 0 -1 0 -1 0 0 0.7438403 -0.6683574 -0.6115639 -0.501976 -0.6115633 4.3502e-7 0 -1 -3.21668e-4 1 1.44058e-4 0.6288625 0.4572352 -0.6288625 0.7071076 0 -0.707106 -0.7503131 -0.6610828 1.75832e-7 1 0 0 0.707107 2.34367e-7 -0.7071066 -1 0 -8.71715e-7 1.27336e-6 0 -1 0 0 1 -0.7061172 0.708095 0 -2.33513e-6 0.7080956 0.7061166 1 0 5.15766e-7 -1 -1.98867e-7 4.42084e-7 1 0 -2.75075e-7 1 0 -2.75075e-7 0.6683566 0.743841 -1.20164e-7 0 0 1 0 0 -1 0 -1 -1.68206e-7 0 0.7438402 -0.6683575 1 0 7.44329e-7 0.6418178 -0.4196902 -0.6418178 9.55147e-7 0 -1 4.9247e-4 0.9999999 -2.88115e-5 -0.6288625 0.4572352 0.6288625 -0.7071076 0 0.707106 0.7503131 -0.6610828 -1.75832e-7 -1 0 0 -0.707107 2.34367e-7 0.7071066 1 0 8.71715e-7 -1.27336e-6 0 1 0 -0.6610822 -0.7503135 0 0 -1 2.33513e-6 0.7080956 -0.7061166 -1 0 -5.15766e-7 1 -1.98867e-7 -4.42084e-7 0 0 1 -1 0 2.75075e-7 -1 0 2.75075e-7 -0.6683566 0.743841 1.20164e-7 0 0 -1 0 0 1 0 -1 1.68206e-7 0 0.7438402 0.6683575 -1 0 -7.44329e-7 -0.6418178 -0.4196902 0.6418178 -9.55147e-7 0 1 -4.9247e-4 0.9999999 2.88115e-5 0.6288625 0.4572352 0.6288625 0.7071059 0 0.7071078 -0.7503131 -0.6610828 -1.75832e-7 1 0 0 0.7071067 2.3172e-7 0.707107 -1 0 8.86504e-7 1.69782e-6 0 1 0 -1.28718e-6 -1 0 0.7080976 -0.7061147 1 0 -5.15766e-7 -1 -1.79943e-7 -4.42084e-7 0 0 1 1 0 2.75075e-7 1 1.2374e-7 2.75075e-7 0.6683558 0.7438418 2.22158e-7 1.98877e-7 0 -1 0 0 1 0 -1 0 0 0.7438403 0.6683574 0.6115639 -0.501976 0.6115633 -4.3502e-7 0 1 3.21668e-4 1 -1.44058e-4 -0.6109087 0.503567 -0.61091 -0.7071076 0 -0.707106 0.7503119 -0.6610841 0 -1 0 0 -0.7071052 0 -0.7071084 1 2.58027e-6 0 1 0 2.38965e-7 1 0 -8.71715e-7 1.86034e-6 0 -1 0 -0.6610826 0.7503131 0 0 1 0.7061149 0.7080973 -2.41732e-7 2.33513e-6 0.7080956 0.7061166 -1 0 5.15765e-7 1 0 0 0 6.49184e-7 -1 -1 5.99195e-6 2.75075e-7 -1 0 1.37537e-7 -0.6683566 0.743841 -1.20164e-7 0 0 1 0 -1 -1.60084e-7 0 -1 0 0 -1 0 0 -1 -1.65998e-7 0 -1 0 0 0.7438409 -0.6683568 -1 0 5.15766e-7 -1 -1.98055e-7 2.57883e-7 -1 1.97221e-7 -5.4753e-7 -1 0 7.4433e-7 -0.6418178 -0.4196902 -0.6418178 0 2.28662e-7 -1 -9.55147e-7 0 -1 4.94746e-7 1.71758e-7 -1 -1.48559e-6 0 -1 2.1342e-6 1 0 2.07228e-6 1 0 2.57883e-6 1 1.79027e-7 -4.63156e-4 0.9999999 -7.02296e-7 -9.9248e-5 0.9999996 -9.34235e-4 -3.51116e-4 0.9999999 -3.57661e-4 0 1 0 0.6109087 0.503567 -0.61091 0.7071059 0 -0.7071078 -0.7503116 -0.6610845 0 0.7071052 0 -0.7071084 -1 0 2.38965e-7 -1 1.9352e-6 0 -1 0 -8.86504e-7 -2.12611e-6 0 -1 -0.7061144 0.7080978 0 1 6.55785e-7 5.15765e-7 -1 0 0 0 6.49184e-7 -1 1 5.99195e-6 2.75075e-7 1 0 1.37537e-7 0.6683558 0.7438418 -2.22158e-7 1.98877e-7 0 1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -1.68205e-7 1 2.30988e-7 5.15766e-7 1 -3.7156e-7 2.57883e-7 1 0 0 1 0 -5.4753e-7 0.6115639 -0.501976 -0.6115633 -7.43117e-7 1.71758e-7 -1 3.94383e-7 0 -1 0 0 -1 1.48559e-6 0 -1 -2.1342e-6 1 0 9.9248e-5 0.9999996 -9.32442e-4 4.63156e-4 0.9999999 0 -2.07228e-6 1 0 -2.26928e-6 1 8.76621e-7 -2.57883e-6 1 0 -0.6109087 0.503567 0.61091 -0.7071059 0 0.7071078 0.7503116 -0.6610845 0 -0.7071052 0 0.7071084 1 0 -2.38965e-7 1 1.9352e-6 0 1 0 8.86504e-7 2.12611e-6 0 1 0 -0.6610826 -0.7503131 0 0 -1 0.7061144 0.7080978 0 -1 6.55785e-7 -5.15765e-7 1 0 0 0 6.49184e-7 1 -1 5.99195e-6 -2.75075e-7 -1 0 -1.37537e-7 -0.6683558 0.7438418 2.22158e-7 -1.98877e-7 0 -1 0 0 1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 1.68205e-7 0 0.7438409 0.6683568 -1 2.30988e-7 -5.15766e-7 -1 -3.7156e-7 -2.57883e-7 -1 0 0 -1 0 5.4753e-7 -0.6115639 -0.501976 0.6115633 7.43117e-7 1.71758e-7 1 -3.94383e-7 0 1 0 2.28662e-7 1 0 0 1 -1.48559e-6 0 1 -9.9248e-5 0.9999996 9.32442e-4 -4.63156e-4 0.9999999 0 2.07228e-6 1 0 2.26928e-6 1 -8.76621e-7 2.57883e-6 1 0 0.6109087 0.503567 0.61091 0.7071076 0 0.707106 -0.7503119 -0.6610841 0 1 0 0 0.7071052 0 0.7071084 -1 2.58027e-6 0 -1 0 -2.38965e-7 -1 0 8.71715e-7 -1.86034e-6 0 1 -0.7061149 0.7080973 2.41732e-7 -2.33513e-6 0.7080956 -0.7061166 1 0 -5.15765e-7 -1 0 0 0 6.49184e-7 1 1 5.99195e-6 -2.75075e-7 1 0 -1.37537e-7 0.6683566 0.743841 1.20164e-7 0 0 -1 0 -1 1.60084e-7 0 -1 0 0 -1 0 0 -1 1.65998e-7 0 -1 0 1 0 -5.15766e-7 1 -1.98055e-7 -2.57883e-7 1 1.97221e-7 5.4753e-7 1 0 -7.4433e-7 0.6418178 -0.4196902 0.6418178 9.55147e-7 0 1 -4.94746e-7 1.71758e-7 1 1.48559e-6 0 1 -2.07228e-6 1 0 -2.57883e-6 1 -1.79027e-7 4.63156e-4 0.9999999 7.02296e-7 9.9248e-5 0.9999996 9.34235e-4 3.51116e-4 0.9999999 3.57661e-4 + + + + + + + + + + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + + + + + + + + + + + + + + +

13 0 0 9 0 1 10 0 2 16 1 3 19 1 4 20 1 5 97 2 6 25 2 7 99 2 8 9 3 9 23 3 10 11 3 11 17 4 12 18 4 13 4 4 14 21 5 15 15 5 16 24 5 17 23 6 18 13 6 19 14 6 20 57 7 21 25 7 22 24 7 23 24 8 24 20 8 25 57 8 26 5 9 27 17 9 28 4 9 29 5 10 30 16 10 31 15 10 32 6 11 33 27 11 34 7 11 35 76 12 36 4 12 37 18 12 38 27 13 39 61 13 40 7 13 41 28 14 42 6 14 43 8 14 44 75 15 45 28 15 46 8 15 47 2 16 48 75 16 49 8 16 50 21 17 51 76 17 52 18 17 53 19 18 54 29 18 55 37 18 56 30 19 57 7 19 58 61 19 59 25 20 60 27 20 61 28 20 62 37 21 63 20 21 64 19 21 65 14 3 66 13 3 67 7 3 68 14 22 69 11 22 70 23 22 71 8 23 72 11 23 73 2 23 74 4 24 75 2 24 76 0 24 77 41 25 78 45 25 79 42 25 80 48 26 81 51 26 82 34 26 83 60 27 84 139 27 85 142 27 86 56 28 87 41 28 88 43 28 89 50 29 90 49 29 91 35 29 92 47 30 93 49 30 94 54 30 95 45 31 96 56 31 97 46 31 98 60 7 99 57 7 100 58 7 101 58 32 102 53 32 103 47 32 104 49 33 105 36 33 106 35 33 107 36 34 108 48 34 109 34 34 110 63 35 111 38 35 112 39 35 113 35 36 114 116 36 115 50 36 116 61 14 117 63 14 118 39 14 119 38 14 120 64 14 121 40 14 122 64 37 123 115 37 124 40 37 125 115 38 126 33 38 127 40 38 128 54 39 129 116 39 130 134 39 131 51 40 132 29 40 133 34 40 134 30 41 135 39 41 136 31 41 137 60 42 138 63 42 139 59 42 140 53 43 141 37 43 142 51 43 143 46 44 144 31 44 145 39 44 146 46 45 147 43 45 148 44 45 149 41 46 150 42 46 151 40 46 152 34 47 153 31 47 154 33 47 155 80 48 156 84 48 157 81 48 158 87 49 159 90 49 160 72 49 161 98 50 162 97 50 163 99 50 164 95 51 165 80 51 166 82 51 167 89 52 168 88 52 169 73 52 170 86 53 171 88 53 172 92 53 173 84 54 174 95 54 175 85 54 176 98 55 177 137 55 178 96 55 179 96 56 180 91 56 181 86 56 182 88 9 183 74 9 184 73 9 185 74 57 186 87 57 187 72 57 188 102 58 189 77 58 190 78 58 191 73 59 192 76 59 193 89 59 194 143 60 195 102 60 196 78 60 197 77 60 198 103 60 199 79 60 200 103 61 201 75 61 202 79 61 203 75 62 204 71 62 205 79 62 206 92 63 207 76 63 208 93 63 209 90 64 210 104 64 211 72 64 212 106 65 213 78 65 214 69 65 215 98 66 216 102 66 217 140 66 218 91 67 219 114 67 220 90 67 221 85 68 222 69 68 223 78 68 224 85 69 225 82 69 226 83 69 227 80 70 228 81 70 229 79 70 230 72 71 231 69 71 232 71 71 233 124 72 234 120 72 235 121 72 236 127 73 237 130 73 238 132 73 239 139 74 240 141 74 241 142 74 242 120 75 243 136 75 244 122 75 245 128 76 246 129 76 247 112 76 248 133 77 249 126 77 250 138 77 251 136 78 252 124 78 253 125 78 254 137 55 255 141 55 256 138 55 257 138 79 258 132 79 259 137 79 260 113 33 261 128 33 262 112 33 263 113 80 264 127 80 265 126 80 266 117 81 267 146 81 268 118 81 269 116 82 270 112 82 271 129 82 272 146 83 273 143 83 274 118 83 275 147 60 276 117 60 277 119 60 278 115 84 279 147 84 280 119 84 281 110 85 282 115 85 283 119 85 284 133 86 285 116 86 286 129 86 287 130 87 288 104 87 289 114 87 290 106 88 291 118 88 292 143 88 293 141 89 294 146 89 295 147 89 296 114 90 297 132 90 298 130 90 299 125 75 300 124 75 301 118 75 302 125 91 303 122 91 304 136 91 305 119 92 306 122 92 307 110 92 308 112 93 309 110 93 310 108 93 311 13 94 312 22 94 313 9 94 314 16 95 315 3 95 316 19 95 317 97 96 318 24 96 319 25 96 320 9 97 321 22 97 322 23 97 323 17 98 324 21 98 325 18 98 326 24 99 327 97 99 328 21 99 329 97 100 330 93 100 331 21 100 332 21 101 333 17 101 334 15 101 335 23 102 336 22 102 337 13 102 338 57 103 339 59 103 340 25 103 341 24 32 342 15 32 343 20 32 344 15 104 345 16 104 346 20 104 347 20 60 348 52 60 349 57 60 350 5 105 351 15 105 352 17 105 353 5 106 354 3 106 355 16 106 356 6 107 357 26 107 358 27 107 359 76 108 360 67 108 361 4 108 362 27 14 363 65 14 364 61 14 365 28 109 366 26 109 367 6 109 368 75 110 369 100 110 370 28 110 371 2 111 372 68 111 373 75 111 374 21 112 375 93 112 376 76 112 377 19 113 378 3 113 379 29 113 380 30 41 381 0 41 382 7 41 383 25 14 384 59 14 385 66 14 386 66 114 387 65 114 388 27 114 389 27 115 390 26 115 391 28 115 392 28 116 393 100 116 394 99 116 395 25 117 396 66 117 397 27 117 398 28 118 399 99 118 400 25 118 401 37 119 402 52 119 403 20 119 404 10 120 405 6 120 406 7 120 407 0 121 408 1 121 409 12 121 410 0 122 411 12 122 412 14 122 413 13 3 414 10 3 415 7 3 416 0 123 417 14 123 418 7 123 419 14 124 420 12 124 421 11 124 422 8 125 423 6 125 424 10 125 425 8 126 426 10 126 427 9 126 428 12 127 429 1 127 430 2 127 431 8 128 432 9 128 433 11 128 434 11 14 435 12 14 436 2 14 437 3 129 438 5 129 439 4 129 440 4 130 441 67 130 442 2 130 443 67 131 444 68 131 445 2 131 446 2 132 447 1 132 448 0 132 449 29 133 450 3 133 451 0 133 452 3 134 453 4 134 454 0 134 455 0 135 456 30 135 457 29 135 458 41 136 459 55 136 460 45 136 461 48 137 462 53 137 463 51 137 464 60 138 465 58 138 466 139 138 467 56 75 468 55 75 469 41 75 470 50 139 471 54 139 472 49 139 473 134 140 474 139 140 475 54 140 476 139 141 477 58 141 478 54 141 479 58 142 480 47 142 481 54 142 482 45 143 483 55 143 484 56 143 485 60 103 486 59 103 487 57 103 488 58 8 489 57 8 490 53 8 491 57 60 492 52 60 493 53 60 494 53 104 495 48 104 496 47 104 497 49 144 498 47 144 499 36 144 500 36 10 501 47 10 502 48 10 503 63 145 504 62 145 505 38 145 506 35 146 507 105 146 508 116 146 509 61 14 510 65 14 511 63 14 512 38 147 513 62 147 514 64 147 515 64 148 516 144 148 517 115 148 518 115 149 519 107 149 520 33 149 521 54 150 522 50 150 523 116 150 524 51 151 525 37 151 526 29 151 527 30 152 528 61 152 529 39 152 530 60 153 531 142 153 532 64 153 533 142 154 534 144 154 535 64 154 536 64 155 537 62 155 538 63 155 539 65 75 540 66 75 541 59 75 542 60 156 543 64 156 544 63 156 545 63 157 546 65 157 547 59 157 548 53 119 549 52 119 550 37 119 551 39 158 552 38 158 553 42 158 554 39 75 555 42 75 556 45 75 557 44 159 558 32 159 559 31 159 560 39 160 561 45 160 562 46 160 563 46 161 564 44 161 565 31 161 566 46 162 567 56 162 568 43 162 569 33 163 570 32 163 571 44 163 572 33 164 573 44 164 574 43 164 575 42 125 576 38 125 577 40 125 578 33 165 579 43 165 580 40 165 581 43 166 582 41 166 583 40 166 584 35 167 585 36 167 586 34 167 587 34 168 588 29 168 589 31 168 590 29 135 591 30 135 592 31 135 593 31 169 594 32 169 595 33 169 596 105 170 597 35 170 598 33 170 599 35 171 600 34 171 601 33 171 602 33 172 603 107 172 604 105 172 605 80 173 606 94 173 607 84 173 608 87 174 609 91 174 610 90 174 611 98 175 612 96 175 613 97 175 614 95 3 615 94 3 616 80 3 617 89 176 618 92 176 619 88 176 620 93 177 621 97 177 622 92 177 623 97 178 624 96 178 625 92 178 626 96 179 627 86 179 628 92 179 629 84 180 630 94 180 631 95 180 632 98 181 633 140 181 634 137 181 635 96 79 636 137 79 637 91 79 638 137 14 639 131 14 640 91 14 641 91 182 642 87 182 643 86 182 644 88 183 645 86 183 646 74 183 647 74 80 648 86 80 649 87 80 650 102 184 651 101 184 652 77 184 653 73 185 654 67 185 655 76 185 656 143 60 657 148 60 658 102 60 659 77 186 660 101 186 661 103 186 662 103 187 663 100 187 664 75 187 665 75 188 666 68 188 667 71 188 668 92 189 669 89 189 670 76 189 671 90 190 672 114 190 673 104 190 674 106 191 675 143 191 676 78 191 677 98 192 678 99 192 679 103 192 680 99 193 681 100 193 682 103 193 683 103 194 684 101 194 685 102 194 686 148 3 687 149 3 688 140 3 689 98 195 690 103 195 691 102 195 692 102 196 693 148 196 694 140 196 695 91 197 696 131 197 697 114 197 698 78 198 699 77 198 700 81 198 701 78 3 702 81 3 703 84 3 704 83 199 705 70 199 706 69 199 707 78 200 708 84 200 709 85 200 710 85 201 711 83 201 712 69 201 713 85 202 714 95 202 715 82 202 716 71 203 717 70 203 718 83 203 719 71 204 720 83 204 721 82 204 722 81 205 723 77 205 724 79 205 725 71 206 726 82 206 727 79 206 728 82 207 729 80 207 730 79 207 731 73 129 732 74 129 733 72 129 734 72 208 735 104 208 736 69 208 737 104 135 738 106 135 739 69 135 740 69 209 741 70 209 742 71 209 743 67 210 744 73 210 745 71 210 746 73 211 747 72 211 748 71 211 749 71 212 750 68 212 751 67 212 752 124 213 753 135 213 754 120 213 755 127 214 756 111 214 757 130 214 758 139 215 759 138 215 760 141 215 761 120 216 762 135 216 763 136 216 764 128 217 765 133 217 766 129 217 767 138 218 768 139 218 769 133 218 770 139 219 771 134 219 772 133 219 773 133 220 774 128 220 775 126 220 776 136 221 777 135 221 778 124 221 779 137 181 780 140 181 781 141 181 782 138 56 783 126 56 784 132 56 785 126 182 786 127 182 787 132 182 788 132 14 789 131 14 790 137 14 791 113 222 792 126 222 793 128 222 794 113 223 795 111 223 796 127 223 797 117 224 798 145 224 799 146 224 800 116 225 801 105 225 802 112 225 803 146 60 804 148 60 805 143 60 806 147 226 807 145 226 808 117 226 809 115 227 810 144 227 811 147 227 812 110 228 813 107 228 814 115 228 815 133 229 816 134 229 817 116 229 818 130 230 819 111 230 820 104 230 821 106 65 822 108 65 823 118 65 824 141 60 825 140 60 826 149 60 827 149 231 828 148 231 829 146 231 830 146 232 831 145 232 832 147 232 833 147 233 834 144 233 835 142 233 836 141 234 837 149 234 838 146 234 839 147 235 840 142 235 841 141 235 842 114 197 843 131 197 844 132 197 845 121 236 846 117 236 847 118 236 848 108 237 849 109 237 850 123 237 851 108 238 852 123 238 853 125 238 854 124 75 855 121 75 856 118 75 857 108 239 858 125 239 859 118 239 860 125 240 861 123 240 862 122 240 863 119 205 864 117 205 865 121 205 866 119 241 867 121 241 868 120 241 869 123 242 870 109 242 871 110 242 872 119 243 873 120 243 874 122 243 875 122 60 876 123 60 877 110 60 878 111 167 879 113 167 880 112 167 881 112 244 882 105 244 883 110 244 884 105 245 885 107 245 886 110 245 887 110 246 888 109 246 889 108 246 890 104 247 891 111 247 892 108 247 893 111 248 894 112 248 895 108 248 896 108 135 897 106 135 898 104 135 899

+
+
+
+
+ + + + 0.00162246 0 0 0 0 1.23342e-10 -0.00162246 0 0 0.00163372 1.22492e-10 -0.03015 0 0 0 1 + + + + + + + +
\ No newline at end of file diff --git a/models/rg_robot/meshes/FixedBrickV1.dae b/models/rg_robot/meshes/FixedBrickV1.dae new file mode 100644 index 0000000000..2227e7bd7e --- /dev/null +++ b/models/rg_robot/meshes/FixedBrickV1.dae @@ -0,0 +1,63 @@ + + + + + VCGLab + VCGLib | MeshLab + + Y_UP + do sep. 17 13:54:34 2015 + do sep. 17 13:54:34 2015 + + + + + + + + + -0.012573 -0.0115027 -0.01625 -0.012573 -0.0115027 -0.01775 -0.0129245 -0.0115946 -0.01775 -0.0130518 -0.011666 -0.01625 -0.0133536 -0.0119791 -0.01775 -0.0133536 -0.0119791 -0.01625 -0.0133893 -0.0120428 -0.01625 -0.0134464 -0.0121771 -0.01775 -0.013494 -0.0123906 -0.01625 -0.0134993 -0.0124635 -0.01625 -0.0134675 -0.0127529 -0.01775 -0.0134675 -0.0127529 -0.01625 -0.0134464 -0.0128229 -0.01625 -0.0131112 -0.0132915 -0.01775 -0.0130518 -0.013334 -0.01625 -0.0129245 -0.0134054 -0.01625 -0.0128572 -0.013434 -0.01775 -0.0128572 -0.013434 -0.01625 -0.0127174 -0.0134761 -0.01775 -0.0127174 -0.0134761 -0.01625 -0.0125 -0.0135 -0.01775 -0.012573 -0.0134973 -0.01625 -0.0125 -0.0135 -0.01625 -0.012427 -0.0134973 -0.01625 -0.0123544 -0.0134893 -0.01625 -0.0122826 -0.0134761 -0.01775 -0.0122826 -0.0134761 -0.01625 -0.0122119 -0.0134576 -0.01625 -0.0121428 -0.013434 -0.01625 -0.0119482 -0.013334 -0.01625 -0.0118327 -0.0132448 -0.01625 -0.0117314 -0.0131397 -0.01625 -0.0116867 -0.0130819 -0.01625 -0.0115325 -0.0127529 -0.01625 -0.011506 -0.0126094 -0.01625 -0.0115007 -0.0124635 -0.01625 -0.0116107 -0.0120428 -0.01775 -0.0117314 -0.0118603 -0.01775 -0.0116867 -0.0119181 -0.01625 -0.0117314 -0.0118603 -0.01625 -0.0117801 -0.0118059 -0.01625 -0.0118888 -0.0117085 -0.01625 -0.0119482 -0.011666 -0.01625 -0.0120755 -0.0115946 -0.01775 -0.0121428 -0.011566 -0.01625 -0.0122826 -0.0115239 -0.01625 -0.0123544 -0.0115107 -0.01625 -0.0127174 -0.0115239 -0.01775 -0.0127174 -0.0115239 -0.01625 -0.0127881 -0.0115424 -0.01775 -0.0127881 -0.0115424 -0.01625 -0.0128572 -0.011566 -0.01775 -0.0129894 -0.0116279 -0.01775 -0.0131112 -0.0117085 -0.01775 -0.0131673 -0.0117552 -0.01775 -0.0132686 -0.0118603 -0.01775 -0.0133133 -0.0119181 -0.01775 -0.0134203 -0.0121089 -0.01775 -0.0134464 -0.0121771 -0.01625 -0.013494 -0.0123906 -0.01775 -0.0134993 -0.0124635 -0.01775 -0.0134834 -0.0126816 -0.01775 -0.0134464 -0.0128229 -0.01775 -0.0133133 -0.0130819 -0.01775 -0.0132686 -0.0131397 -0.01775 -0.0132686 -0.0131397 -0.01625 -0.0132199 -0.0131941 -0.01625 -0.0130518 -0.013334 -0.01775 -0.0129894 -0.013372 -0.01775 -0.0129245 -0.0134054 -0.01775 -0.012573 -0.0134973 -0.01775 -0.0120106 -0.013372 -0.01775 -0.0119482 -0.013334 -0.01775 -0.0117801 -0.0131941 -0.01775 -0.0117314 -0.0131397 -0.01775 -0.0116464 -0.0130209 -0.01775 -0.0116107 -0.0129572 -0.01625 -0.0115797 -0.0128911 -0.01625 -0.0115166 -0.0126816 -0.01625 -0.011506 -0.0126094 -0.01775 -0.011506 -0.0123906 -0.01775 -0.0115325 -0.0122471 -0.01775 -0.0115325 -0.0122471 -0.01625 -0.0116867 -0.0119181 -0.01775 -0.0117801 -0.0118059 -0.01775 -0.0118327 -0.0117552 -0.01775 -0.0118888 -0.0117085 -0.01775 -0.0119482 -0.011666 -0.01775 -0.0121428 -0.011566 -0.01775 -0.0122119 -0.0115424 -0.01625 -0.0122826 -0.0115239 -0.01775 0.012427 -0.0115027 -0.01625 0.0125 -0.0115 -0.01625 0.012427 -0.0115027 -0.01775 0.0123544 -0.0115107 -0.01775 0.0122826 -0.0115239 -0.01775 0.0122119 -0.0115424 -0.01625 0.0120106 -0.0116279 -0.01775 0.0118888 -0.0117085 -0.01625 0.0118327 -0.0117552 -0.01625 0.0116867 -0.0119181 -0.01775 0.011506 -0.0126094 -0.01625 0.0115536 -0.0128229 -0.01775 0.0115325 -0.0127529 -0.01625 0.0116107 -0.0129572 -0.01625 0.0116867 -0.0130819 -0.01625 0.0117314 -0.0131397 -0.01625 0.0118327 -0.0132448 -0.01625 0.0118888 -0.0132915 -0.01775 0.0118888 -0.0132915 -0.01625 0.0121428 -0.013434 -0.01625 0.0125 -0.0135 -0.01775 0.0125 -0.0135 -0.01625 0.0127881 -0.0134576 -0.01775 0.0128572 -0.013434 -0.01775 0.0128572 -0.013434 -0.01625 0.0129894 -0.013372 -0.01775 0.0129245 -0.0134054 -0.01625 0.0130518 -0.013334 -0.01775 0.0130518 -0.013334 -0.01625 0.0131112 -0.0132915 -0.01775 0.0132686 -0.0131397 -0.01625 0.0133133 -0.0130819 -0.01775 0.0133536 -0.0130209 -0.01775 0.0133536 -0.0130209 -0.01625 0.0133893 -0.0129572 -0.01625 0.0134675 -0.0127529 -0.01775 0.013494 -0.0126094 -0.01775 0.0134993 -0.0125365 -0.01625 0.013494 -0.0123906 -0.01775 0.013494 -0.0123906 -0.01625 0.0134675 -0.0122471 -0.01625 0.0133133 -0.0119181 -0.01625 0.0132686 -0.0118603 -0.01625 0.0128572 -0.011566 -0.01625 0.012573 -0.0115027 -0.01775 0.012573 -0.0115027 -0.01625 0.0125 -0.0115 -0.01775 0.0121428 -0.011566 -0.01625 0.0120755 -0.0115946 -0.01625 0.0120106 -0.0116279 -0.01625 0.0119482 -0.011666 -0.01775 0.0118888 -0.0117085 -0.01775 0.0117801 -0.0118059 -0.01775 0.0117801 -0.0118059 -0.01625 0.0117314 -0.0118603 -0.01775 0.0116867 -0.0119181 -0.01625 0.0116107 -0.0120428 -0.01625 0.0115536 -0.0121771 -0.01775 0.0116464 -0.0130209 -0.01775 0.0117314 -0.0131397 -0.01775 0.0117801 -0.0131941 -0.01775 0.0117801 -0.0131941 -0.01625 0.0119482 -0.013334 -0.01775 0.0120106 -0.013372 -0.01775 0.0120755 -0.0134054 -0.01625 0.012573 -0.0134973 -0.01775 0.012573 -0.0134973 -0.01625 0.0126456 -0.0134893 -0.01775 0.0127881 -0.0134576 -0.01625 0.0129245 -0.0134054 -0.01775 0.0129894 -0.013372 -0.01625 0.0131112 -0.0132915 -0.01625 0.0133133 -0.0130819 -0.01625 0.0134675 -0.0127529 -0.01625 0.0134993 -0.0125365 -0.01775 0.0134993 -0.0124635 -0.01775 0.0134834 -0.0123184 -0.01775 0.0134464 -0.0121771 -0.01775 0.0133893 -0.0120428 -0.01775 0.0133536 -0.0119791 -0.01775 0.0132686 -0.0118603 -0.01775 0.0132199 -0.0118059 -0.01775 0.0131673 -0.0117552 -0.01775 0.0131112 -0.0117085 -0.01775 0.0130518 -0.011666 -0.01775 0.0129894 -0.0116279 -0.01775 0.0129894 -0.0116279 -0.01625 0.0127881 -0.0115424 -0.01625 0.0126456 -0.0115107 -0.01775 0.012427 0.0134973 -0.01625 0.0123544 0.0134893 -0.01775 0.0123544 0.0134893 -0.01625 0.0122826 0.0134761 -0.01775 0.0122119 0.0134576 -0.01625 0.0121428 0.013434 -0.01625 0.0119482 0.013334 -0.01775 0.0119482 0.013334 -0.01625 0.0118888 0.0132915 -0.01625 0.0118327 0.0132448 -0.01625 0.0116464 0.0130209 -0.01625 0.0115797 0.0128911 -0.01775 0.0115797 0.0128911 -0.01625 0.0115325 0.0127529 -0.01775 0.0115166 0.0126816 -0.01775 0.0115007 0.0124635 -0.01625 0.0115166 0.0123184 -0.01775 0.0115166 0.0123184 -0.01625 0.0115325 0.0122471 -0.01625 0.0115536 0.0121771 -0.01625 0.0116867 0.0119181 -0.01625 0.0117314 0.0118603 -0.01625 0.0118327 0.0117552 -0.01775 0.0118327 0.0117552 -0.01625 0.0119482 0.011666 -0.01775 0.0122119 0.0115424 -0.01625 0.0125 0.0115 -0.01775 0.012573 0.0115027 -0.01775 0.0129894 0.011628 -0.01775 0.0129894 0.011628 -0.01625 0.0131673 0.0117552 -0.01625 0.0132686 0.0118603 -0.01775 0.0133133 0.0119181 -0.01625 0.0133536 0.0119791 -0.01625 0.0133893 0.0120428 -0.01625 0.0134203 0.0121089 -0.01775 0.0134675 0.0122471 -0.01775 0.013494 0.0123906 -0.01775 0.0134834 0.0126816 -0.01625 0.0134675 0.0127529 -0.01625 0.0134203 0.0128911 -0.01775 0.0134464 0.0128229 -0.01625 0.0133893 0.0129572 -0.01775 0.0132199 0.0131941 -0.01775 0.0132199 0.0131941 -0.01625 0.0131673 0.0132448 -0.01625 0.0130518 0.013334 -0.01625 0.0129245 0.0134054 -0.01775 0.0126456 0.0134893 -0.01775 0.012573 0.0134973 -0.01775 0.0122826 0.0134761 -0.01625 0.0122119 0.0134576 -0.01775 0.0121428 0.013434 -0.01775 0.0120755 0.0134054 -0.01775 0.0120106 0.013372 -0.01625 0.0118327 0.0132448 -0.01775 0.0117801 0.0131941 -0.01775 0.0117314 0.0131397 -0.01775 0.0116867 0.0130819 -0.01775 0.0116464 0.0130209 -0.01775 0.0115536 0.0128229 -0.01775 0.0115325 0.0127529 -0.01625 0.0115166 0.0126816 -0.01625 0.011506 0.0126094 -0.01625 0.0115007 0.0125365 -0.01625 0.0115007 0.0124635 -0.01775 0.011506 0.0123906 -0.01775 0.0116107 0.0120428 -0.01775 0.0117801 0.0118059 -0.01775 0.0120755 0.0115946 -0.01775 0.0122826 0.0115239 -0.01625 0.0123544 0.0115107 -0.01625 0.0127174 0.0115239 -0.01625 0.0127881 0.0115424 -0.01775 0.0127881 0.0115424 -0.01625 0.0129245 0.0115946 -0.01625 0.0130518 0.011666 -0.01625 0.0132199 0.0118059 -0.01775 0.0132199 0.0118059 -0.01625 0.0133536 0.0119791 -0.01775 0.0133893 0.0120428 -0.01775 0.0134203 0.0121089 -0.01625 0.0134834 0.0123184 -0.01625 0.013494 0.0123906 -0.01625 0.0134993 0.0125365 -0.01775 0.013494 0.0126094 -0.01775 0.0134675 0.0127529 -0.01775 0.0133893 0.0129572 -0.01625 0.0133133 0.0130819 -0.01775 0.0132686 0.0131397 -0.01625 0.0131673 0.0132448 -0.01775 0.0130518 0.013334 -0.01775 0.0129894 0.013372 -0.01625 0.0127881 0.0134576 -0.01775 0.0127174 0.0134761 -0.01775 0.0127174 0.0134761 -0.01625 0.012573 0.0134973 -0.01625 -0.012573 0.0134973 -0.01775 -0.012573 0.0134973 -0.01625 -0.0126456 0.0134893 -0.01625 -0.0127174 0.0134761 -0.01775 -0.0127174 0.0134761 -0.01625 -0.0128572 0.013434 -0.01625 -0.0129245 0.0134054 -0.01625 -0.0129894 0.013372 -0.01775 -0.0131673 0.0132448 -0.01775 -0.0131673 0.0132448 -0.01625 -0.0133536 0.0130209 -0.01775 -0.0133536 0.0130209 -0.01625 -0.0134675 0.0127529 -0.01775 -0.0134993 0.0125365 -0.01775 -0.0134993 0.0124635 -0.01775 -0.013494 0.0123906 -0.01775 -0.013494 0.0123906 -0.01625 -0.0134834 0.0123184 -0.01775 -0.0134834 0.0123184 -0.01625 -0.0134464 0.0121771 -0.01775 -0.0134203 0.0121089 -0.01625 -0.0133893 0.0120428 -0.01625 -0.0133536 0.0119791 -0.01625 -0.0132686 0.0118603 -0.01625 -0.0132199 0.0118059 -0.01625 -0.0131112 0.0117085 -0.01625 -0.0130518 0.011666 -0.01775 -0.0129894 0.011628 -0.01625 -0.0129245 0.0115946 -0.01625 -0.0128572 0.011566 -0.01775 -0.0127881 0.0115424 -0.01625 -0.0126456 0.0115107 -0.01775 -0.012573 0.0115027 -0.01775 -0.012573 0.0115027 -0.01625 -0.012427 0.0115027 -0.01775 -0.012427 0.0115027 -0.01625 -0.0122119 0.0115424 -0.01625 -0.0121428 0.011566 -0.01625 -0.0120755 0.0115946 -0.01775 -0.0118327 0.0117552 -0.01625 -0.0117314 0.0118603 -0.01625 -0.0116107 0.0120428 -0.01625 -0.0115325 0.0122471 -0.01775 -0.0115536 0.0121771 -0.01625 -0.0115166 0.0123184 -0.01775 -0.0115007 0.0125365 -0.01775 -0.0115007 0.0125365 -0.01625 -0.0115325 0.0127529 -0.01625 -0.0115536 0.0128229 -0.01625 -0.0116107 0.0129572 -0.01625 -0.0117801 0.0131941 -0.01775 -0.0119482 0.013334 -0.01775 -0.0121428 0.013434 -0.01775 -0.0121428 0.013434 -0.01625 -0.0122826 0.0134761 -0.01775 -0.0123544 0.0134893 -0.01775 -0.0125 0.0135 -0.01775 -0.0125 0.0135 -0.01625 -0.0127881 0.0134576 -0.01625 -0.0129245 0.0134054 -0.01775 -0.0131112 0.0132915 -0.01625 -0.0132686 0.0131397 -0.01625 -0.0133893 0.0129572 -0.01625 -0.0134203 0.0128911 -0.01775 -0.0134203 0.0128911 -0.01625 -0.0134675 0.0127529 -0.01625 -0.0134834 0.0126816 -0.01775 -0.0134834 0.0126816 -0.01625 -0.013494 0.0126094 -0.01625 -0.0134675 0.0122471 -0.01775 -0.0134203 0.0121089 -0.01775 -0.0133893 0.0120428 -0.01775 -0.0131673 0.0117552 -0.01625 -0.0129894 0.011628 -0.01775 -0.0129245 0.0115946 -0.01775 -0.0123544 0.0115107 -0.01625 -0.0122119 0.0115424 -0.01775 -0.0120106 0.011628 -0.01775 -0.0118888 0.0117085 -0.01625 -0.0118327 0.0117552 -0.01775 -0.0117801 0.0118059 -0.01625 -0.0116464 0.0119791 -0.01625 -0.0115797 0.0121089 -0.01625 -0.0115325 0.0122471 -0.01625 -0.0115166 0.0123184 -0.01625 -0.011506 0.0123906 -0.01625 -0.0115007 0.0124635 -0.01775 -0.011506 0.0126094 -0.01775 -0.011506 0.0126094 -0.01625 -0.0115166 0.0126816 -0.01625 -0.0115325 0.0127529 -0.01775 -0.0115536 0.0128229 -0.01775 -0.0117314 0.0131397 -0.01625 -0.0117801 0.0131941 -0.01625 -0.0118327 0.0132448 -0.01775 -0.0118327 0.0132448 -0.01625 -0.0119482 0.013334 -0.01625 -0.0120755 0.0134054 -0.01625 -0.0122119 0.0134576 -0.01775 -0.019 -0.010428 -0.00953638 -0.017625 -0.010428 -0.00953638 -0.017 -0.0105544 -0.0093798 -0.017625 -0.0104892 -0.0094565 -0.017 -0.0104892 -0.0094565 -0.017625 -0.0103709 -0.00961925 -0.019 -0.0104892 -0.0094565 -0.017625 -0.0105544 -0.0093798 -0.017 -0.0106233 -0.00930647 -0.017625 -0.0106233 -0.00930647 -0.017 -0.0106959 -0.0092367 -0.017625 -0.0106959 -0.0092367 -0.019 -0.0106233 -0.00930647 -0.019 -0.0106959 -0.0092367 -0.017625 -0.0107718 -0.00917067 -0.017625 -0.010851 -0.00910855 -0.017 -0.0109332 -0.00905048 -0.017625 -0.0109332 -0.00905048 -0.017625 -0.0110182 -0.00899663 -0.017 -0.0110182 -0.00899663 -0.017625 -0.0111059 -0.00894712 -0.017625 -0.0111959 -0.00890208 -0.017 -0.011288 -0.00886163 -0.017625 -0.0113821 -0.00882587 -0.017 -0.0113821 -0.00882587 -0.017625 -0.011288 -0.00886163 -0.017 -0.0114779 -0.00879488 -0.017625 -0.0114779 -0.00879488 -0.019 -0.0114779 -0.00879488 -0.017 -0.0117728 -0.00873131 -0.017625 -0.0115751 -0.00876875 -0.019 -0.0115751 -0.00876875 -0.017625 -0.0116734 -0.00874754 -0.017 -0.0103709 -0.00961925 -0.017 -0.010428 -0.00953638 -0.017 -0.0125365 -0.0107507 -0.017 -0.0107718 -0.00917067 -0.017 -0.010851 -0.00910855 -0.017 -0.0111059 -0.00894712 -0.017 -0.0111959 -0.00890208 -0.017 -0.0115751 -0.00876875 -0.017 -0.0116734 -0.00874754 -0.017 -0.0122657 -0.00859762 -0.017 -0.0126816 -0.0107666 -0.017 -0.0124506 -0.00850965 -0.017 -0.0128911 -0.0108297 -0.017 -0.0128229 -0.0108036 -0.017 -0.0123593 -0.008556 -0.017 -0.0126094 -0.010756 -0.017 -0.0126255 -0.00840322 -0.017 -0.0127087 -0.00834344 -0.017 -0.0128653 -0.0082115 -0.017 -0.0131941 -0.0110301 -0.017 -0.013134 -0.00790321 -0.017 -0.0132434 -0.00773008 -0.017 -0.0132913 -0.00763955 -0.017 -0.013372 -0.0112606 -0.017 -0.0134054 -0.0113255 -0.017 -0.0134576 -0.0114619 -0.017 -0.0134582 -0.00715685 -0.017 -0.0134764 -0.00705608 -0.017 -0.0134893 -0.0116044 -0.017 -0.0134895 -0.0069545 -0.017 -0.0135 -0.01175 -0.017 -0.0134974 -0.00685238 -0.017 -0.0127887 -0.00827947 -0.017 -0.0132448 -0.0110827 -0.017 -0.0132915 -0.0111388 -0.017 -0.013334 -0.0111982 -0.017 -0.013434 -0.0113928 -0.017 -0.0123906 -0.010756 -0.017 -0.0123184 -0.0107666 -0.017 -0.0120428 -0.0108607 -0.017 -0.0119791 -0.0108964 -0.017 -0.0119181 -0.0109367 -0.017 -0.0118603 -0.0109814 -0.017 -0.0117085 -0.0111388 -0.017 -0.0116279 -0.0112606 -0.017 -0.0115946 -0.0113255 -0.017 -0.010318 -0.00970488 -0.017 -0.0115027 -0.011823 -0.017 -0.0115424 -0.0120381 -0.017 -0.011566 -0.0121072 -0.017 -0.0116279 -0.0122394 -0.017 -0.011666 -0.0123018 -0.017 -0.0117085 -0.0123612 -0.017 -0.0121771 -0.0126964 -0.017 -0.0121089 -0.0126703 -0.017 -0.0123906 -0.012744 -0.017 -0.0123184 -0.0127334 -0.017 -0.0124635 -0.0127493 -0.017 -0.0125365 -0.0127493 -0.017 -0.0128911 -0.0126703 -0.017 -0.0135 -0.01625 -0.017 -0.0130209 -0.0126036 -0.017 -0.0130819 -0.0125633 -0.017 -0.0131397 -0.0125186 -0.017 -0.0131941 -0.0124699 -0.017 -0.013372 -0.0122394 -0.017 -0.013434 -0.0121072 -0.017 -0.0134761 -0.0119674 -0.017 -0.0134893 -0.0118956 -0.017 -0.0118738 -0.00871475 -0.0174167 -0.0118738 -0.00871475 -0.017 -0.0120728 -0.00866623 -0.0174167 -0.0120759 -0.00866529 -0.0174167 -0.0121745 -0.00863281 -0.0174167 -0.0124585 -0.00850536 -0.017 -0.0125394 -0.00845867 -0.0174167 -0.0127192 -0.00833539 -0.017 -0.0130076 -0.00806424 -0.017 -0.0133345 -0.00754669 -0.0174167 -0.0133828 -0.00742454 -0.0174167 -0.0134153 -0.00732591 -0.017 -0.0134348 -0.00725656 -0.0174167 -0.0134426 -0.00722572 -0.0174167 -0.0134647 -0.00712425 -0.0174167 -0.0134814 -0.00702177 -0.017 -0.0135 -0.00675 -0.019 -0.0117728 -0.00873131 -0.019 -0.0118738 -0.00871475 -0.0174167 -0.0119757 -0.0086926 -0.019 -0.0120728 -0.00866623 -0.0174167 -0.0122714 -0.00859527 -0.019 -0.0122657 -0.00859762 -0.019 -0.0127087 -0.00834344 -0.0174167 -0.0127999 -0.00826997 -0.019 -0.0127887 -0.00827947 -0.0174167 -0.0129505 -0.00812702 -0.0174167 -0.01302 -0.00804988 -0.019 -0.0133345 -0.00754669 -0.019 -0.0134348 -0.00725656 -0.0174167 -0.0134989 -0.0068149 -0.019 -0.0121701 -0.00863439 -0.0174167 -0.0126353 -0.00839654 -0.019 -0.0129383 -0.00813969 -0.019 -0.0130076 -0.00806424 -0.019 -0.0130729 -0.00798534 -0.0174167 -0.0130854 -0.00796924 -0.0174167 -0.0131465 -0.00788531 -0.0174167 -0.0132032 -0.00779832 -0.0174167 -0.0133027 -0.0076161 -0.0174167 -0.0134929 -0.00691856 -0.017 -0.0134063 -0.00735494 -0.017 -0.0133729 -0.00745173 -0.0174167 -0.0133453 -0.00752136 -0.0174167 -0.0132554 -0.0077085 -0.017 -0.0131909 -0.00781805 -0.017 -0.0130729 -0.00798534 -0.017 -0.0129383 -0.00813969 -0.0174167 -0.012877 -0.00820045 -0.0174167 -0.0125483 -0.00845324 -0.0174167 -0.0123661 -0.00855274 -0.017 -0.0121701 -0.00863439 -0.017 -0.0119739 -0.00869304 -0.019 -0.0116734 -0.00874754 -0.019 -0.0113821 -0.00882587 -0.019 -0.011288 -0.00886163 -0.019 -0.0111059 -0.00894712 -0.019 -0.0111959 -0.00890208 -0.019 -0.0121089 -0.0108297 -0.019 -0.0110182 -0.00899663 -0.019 -0.0120428 -0.0108607 -0.019 -0.0109332 -0.00905048 -0.019 -0.010851 -0.00910855 -0.019 -0.0107718 -0.00917067 -0.019 -0.0119791 -0.0108964 -0.019 -0.0105544 -0.0093798 -0.019 -0.0103709 -0.00961925 -0.019 -0.0117552 -0.0110827 -0.019 -0.0115107 -0.0118956 -0.019 -0.0115946 -0.0121745 -0.019 -0.011566 -0.0121072 -0.019 -0.0117085 -0.0123612 -0.019 -0.0118603 -0.0125186 -0.019 -0.0119791 -0.0126036 -0.019 -0.0120428 -0.0126393 -0.019 -0.0121089 -0.0126703 -0.019 -0.0121771 -0.0126964 -0.019 -0.0124635 -0.0127493 -0.019 -0.0125365 -0.0127493 -0.019 -0.0126094 -0.012744 -0.019 -0.0126816 -0.0127334 -0.019 -0.0127529 -0.0127175 -0.019 -0.0128911 -0.0126703 -0.019 -0.0129572 -0.0126393 -0.019 -0.0130819 -0.0125633 -0.019 -0.0132915 -0.0123612 -0.019 -0.0134054 -0.0121745 -0.019 -0.013434 -0.0121072 -0.019 -0.0134973 -0.011823 -0.019 -0.0134063 -0.00735494 -0.019 -0.0133729 -0.00745173 -0.019 -0.0132913 -0.00763955 -0.019 -0.0132434 -0.00773008 -0.019 -0.0131909 -0.00781805 -0.019 -0.013134 -0.00790321 -0.019 -0.0126255 -0.00840322 -0.019 -0.0125394 -0.00845867 -0.019 -0.0134973 0.013177 -0.019 -0.01725 0.01775 -0.019 -0.0134973 0.013323 -0.019 -0.0134576 0.0135381 -0.019 -0.013434 0.0136072 -0.019 -0.0134054 0.0136745 -0.019 -0.013334 0.0138018 -0.019 -0.013372 0.0137394 -0.019 -0.0132448 0.0139173 -0.019 -0.0132915 0.0138612 -0.019 -0.0131397 0.0140186 -0.019 -0.0129572 0.0141393 -0.019 -0.0126816 0.0142334 -0.019 -0.0125365 0.0142493 -0.019 -0.0122471 0.0142175 -0.019 -0.0121771 0.0141964 -0.019 -0.0121089 0.0141703 -0.019 -0.0119791 0.0141036 -0.019 -0.0119181 0.0140633 -0.019 -0.0118603 0.0140186 -0.019 -0.0118059 0.0139699 -0.019 0.0117552 0.0139173 -0.019 0.0117085 0.0138612 -0.019 0.011566 0.0136072 -0.019 -0.0115027 0.013177 -0.019 0.0115027 0.013177 -0.019 0.0115107 0.0131044 -0.019 -0.0115424 0.0129619 -0.019 -0.011628 0.0127606 -0.019 0.0116279 0.0127606 -0.019 0.0117552 0.0125827 -0.019 -0.0118059 0.0125301 -0.019 -0.0118603 0.0124814 -0.019 -0.0119791 0.0123964 -0.019 -0.0119181 0.0124367 -0.019 -0.0121771 0.0123036 -0.019 -0.0121089 0.0123297 -0.019 -0.0128229 0.0123036 -0.019 -0.0127529 0.0122825 -0.019 -0.0129572 0.0123607 -0.019 -0.013334 0.0126982 -0.019 -0.013372 0.0127606 -0.019 -0.0135 0.01075 -0.019 0.01725 0.01775 -0.019 0.01725 -0.01625 -0.019 0.0135 -0.00225 -0.019 0.0147817 0.000540508 -0.019 0.0148495 0.000563053 -0.019 0.0150406 0.000658748 -0.019 0.0150993 0.000699461 -0.019 0.0155 -0.00225 -0.019 0.0152557 0.00084514 -0.019 0.0154096 0.00108459 -0.019 0.0154595 0.00121827 -0.019 0.0154369 0.00115054 -0.019 0.0154771 0.00128744 -0.019 0.0154975 0.00142866 -0.019 0.0135229 -0.000287434 -0.019 0.0136995 9.92789e-05 -0.019 0.0145 0.000500001 -0.019 0.0137929 0.000207109 -0.019 0.0139007 0.000300542 -0.019 0.0141505 0.00043695 -0.019 0.0145713 0.00050255 -0.019 0.0155 0.0085 -0.019 0.0154369 0.00884947 -0.019 0.0153777 0.00897925 -0.019 0.0134973 0.013323 -0.019 0.0134893 0.0133956 -0.019 0.0134054 0.0136745 -0.019 0.013334 0.0138018 -0.019 0.0132915 0.0138612 -0.019 0.0131941 0.0139699 -0.019 0.0130819 0.0140633 -0.019 0.0126816 0.0142334 -0.019 0.0125365 0.0142493 -0.019 0.0124635 0.0142493 -0.019 0.0123906 0.014244 -0.019 0.0122471 0.0142175 -0.019 0.0121089 0.0141703 -0.019 0.0150406 0.00934126 -0.019 0.0149154 0.00940963 -0.019 0.0135 0.01075 -0.019 0.0147817 0.0094595 -0.019 0.0146423 0.00948982 -0.019 0.0143577 0.00951018 -0.019 0.0142874 0.00952286 -0.019 0.0141505 0.00956305 -0.019 0.0145 0.0095 -0.019 0.0139594 0.00965875 -0.019 0.0139007 0.00969946 -0.019 0.0138451 0.00974425 -0.019 0.0136587 0.00995936 -0.019 0.0136223 0.0100208 -0.019 0.0135102 0.0103577 -0.019 -0.0134974 -0.00685238 -0.019 -0.0134895 -0.0069545 -0.019 -0.0134764 -0.00705608 -0.019 -0.0134582 -0.00715685 -0.019 -0.0128653 -0.0082115 -0.019 -0.0130209 -0.0108964 -0.019 -0.0127529 -0.0107825 -0.019 -0.0124506 -0.00850965 -0.019 -0.0123593 -0.008556 -0.019 -0.0126094 -0.010756 -0.019 -0.0119739 -0.00869304 -0.019 -0.0122471 -0.0107825 -0.019 -0.0117552 0.0139173 -0.019 -0.011628 0.0137394 -0.019 -0.011566 0.0136072 -0.019 -0.0115424 0.0135381 -0.019 -0.0115107 0.0133956 -0.019 -0.0115027 0.013323 -0.019 0.0115 0.01325 -0.019 0.011566 0.0128928 -0.019 -0.011666 0.0126982 -0.019 -0.0117552 0.0125827 -0.019 0.0118603 0.0124814 -0.019 0.0119791 0.0123964 -0.019 0.0120428 0.0123607 -0.019 0.0121089 0.0123297 -0.019 0.0126094 0.012256 -0.019 0.0130819 0.0124367 -0.019 0.0131397 0.0124814 -0.019 0.0132448 0.0125827 -0.019 0.0132915 0.0126388 -0.019 0.013372 0.0127606 -0.019 0.013434 0.0128928 -0.019 0.0134576 0.0129619 -0.019 -0.0115027 -0.011823 -0.019 -0.0115 -0.01175 -0.019 -0.010318 -0.00970488 -0.019 -0.0115107 -0.0116044 -0.019 -0.0115239 -0.0115326 -0.0160139 -0.0125 -0.00225 -0.0161287 -0.0126213 -0.00225738 -0.016268 -0.012752 -0.001 -0.0163653 -0.0128341 -0.001 -0.0163103 -0.0127886 -0.00229254 -0.0164374 -0.0128905 -0.00232939 -0.0165026 -0.0129386 -0.0023513 -0.0166362 -0.013029 -0.00240137 -0.0167869 -0.013119 -0.001 -0.0167737 -0.0131116 -0.00245886 -0.0169147 -0.0131864 -0.00252282 -0.0169863 -0.0132209 -0.00255694 -0.0171317 -0.0132838 -0.00262895 -0.0172052 -0.0133122 -0.0026666 -0.0172792 -0.0133386 -0.00270523 -0.0173715 -0.0133685 -0.001 -0.0174284 -0.0133853 -0.00278498 -0.0175034 -0.0134056 -0.0028259 -0.0175786 -0.0134239 -0.00286739 -0.0179545 -0.0134854 -0.00307981 -0.0180291 -0.0134919 -0.00312261 -0.01825 -0.0135 -0.001 -0.0170143 -0.0132337 -0.001 -0.0160841 -0.0125758 -0.001 -0.015998 -0.012482 -0.001 -0.0159159 -0.0123847 -0.001 -0.0155434 -0.0117939 -0.00225 -0.0155712 -0.0118506 -0.001 -0.0154932 -0.0116832 -0.00225 -0.0154475 -0.0115705 -0.00225 -0.0154214 -0.0114994 -0.001 -0.0154064 -0.011456 -0.00225 -0.01537 -0.01134 -0.00225 -0.0153173 -0.011132 -0.001 -0.0152894 -0.0109844 -0.00225 -0.0152743 -0.0108811 -0.001 -0.01525 -0.0105 -0.00225 -0.0158593 -0.0123123 -0.00225 -0.015838 -0.0122839 -0.001 -0.0157645 -0.0121799 -0.001 -0.0156954 -0.0120729 -0.001 -0.0155981 -0.0119026 -0.00225 -0.0155163 -0.0117357 -0.001 -0.0154663 -0.0116186 -0.001 -0.0153114 -0.011104 -0.00225 -0.0152932 -0.011007 -0.001 -0.0152722 -0.010864 -0.00225 -0.0152527 -0.0106273 -0.001 -0.0152525 -0.0106216 -0.00225 -0.01525 -0.0105 -0.001 -0.01525 0.0105 -0.00225 -0.0152608 0.0107544 -0.000999999 -0.0153173 0.011132 -0.000999999 -0.0154214 0.0114994 -0.000999999 -0.0155163 0.0117357 -0.000999999 -0.0155712 0.0118506 -0.00225 -0.0155712 0.0118506 -0.000999999 -0.015631 0.0119631 -0.000999999 -0.0156954 0.0120729 -0.00225 -0.0156954 0.0120729 -0.000999999 -0.0157645 0.0121799 -0.000999999 -0.015838 0.0122839 -0.00225 -0.015998 0.012482 -0.000999999 -0.0160841 0.0125758 -0.000999999 -0.0160841 0.0125758 -0.00225 -0.0166771 0.0130546 -0.000999999 -0.0168994 0.0131788 -0.00225 -0.017743 0.0134568 -0.000999999 -0.017743 0.0134568 -0.00225 -0.0178689 0.0134757 -0.00225 -0.0181227 0.0134973 -0.00225 -0.0152608 0.0107544 -0.00225 -0.0152743 0.0108811 -0.000999999 -0.0152932 0.011007 -0.000999999 -0.0152932 0.011007 -0.00225 -0.0153173 0.011132 -0.00225 -0.0153468 0.0112559 -0.00225 -0.0153815 0.0113785 -0.00225 -0.0154214 0.0114994 -0.00225 -0.0154663 0.0116186 -0.000999999 -0.0155163 0.0117357 -0.00225 -0.015838 0.0122839 -0.000999999 -0.0159159 0.0123847 -0.00225 -0.015998 0.012482 -0.00225 -0.016268 0.012752 -0.00225 -0.0163653 0.0128341 -0.000999999 -0.0164661 0.012912 -0.00225 -0.0165701 0.0129855 -0.00225 -0.0166771 0.0130546 -0.00225 -0.0167869 0.013119 -0.000999999 -0.0167869 0.013119 -0.00225 -0.0168994 0.0131788 -0.000999999 -0.0172506 0.0133286 -0.00225 -0.0173715 0.0133685 -0.00225 -0.0174941 0.0134032 -0.00225 -0.017618 0.0134327 -0.000999999 -0.0179956 0.0134892 -0.000999999 -0.01525 0.0135 0.00226795 -0.017 0.0135 -0.000499999 -0.0135 0.0155 -0.01625 0.01525 0.0135 -0.00225 0.0135 0.01525 -0.00225 -0.0152527 0.0106273 -0.00225 -0.0152743 0.0108811 -0.00225 -0.0154663 0.0116186 -0.00225 -0.0135 0.01525 -0.00225 -0.015631 0.0119631 -0.00225 -0.0157645 0.0121799 -0.00225 -0.0161742 0.0126659 -0.00225 -0.0163653 0.0128341 -0.00225 -0.0170143 0.0132337 -0.00225 -0.0171314 0.0132837 -0.00225 -0.017618 0.0134327 -0.00225 -0.0179956 0.0134892 -0.00225 -0.01825 0.0135 -0.00225 0.0125 -0.0135 -0.00225 -0.0159347 -0.0124077 -0.00225 -0.0157878 -0.0122139 -0.00225 -0.0157204 -0.0121127 -0.00225 -0.0156571 -0.0120089 -0.00225 -0.0153383 -0.0112226 -0.00225 -0.0152599 -0.010743 -0.00225 0.0135 0.0155 -0.00225 -0.0135 0.01525 0.01775 -0.0135 0.0155 -0.00225 -0.0135 0.017 -0.00425 -0.0135 0.019 0.01075 -0.01825 -0.0135 -0.00325 -0.0134952 -0.0134952 -0.00315198 -0.0135625 -0.0134985 -0.00319523 -0.0166875 -0.0134985 -0.00319523 -0.0181032 -0.0134964 -0.00316531 -0.0166875 -0.0134918 -0.00312258 -0.0166875 -0.0134799 -0.00305043 -0.0134808 -0.0134808 -0.00305491 -0.0135625 -0.0134799 -0.00305043 -0.0181769 -0.0134991 -0.0032078 -0.0178796 -0.013477 -0.00303698 -0.0178045 -0.0134667 -0.00299423 -0.0176539 -0.0134402 -0.00290933 -0.0166875 -0.0134402 -0.00290946 -0.0135625 -0.0133439 -0.00271356 -0.0135625 -0.0133025 -0.00265338 -0.0132071 -0.0132071 -0.00254289 -0.013241 -0.013241 -0.00257844 -0.0135625 -0.0132071 -0.00254289 -0.0166875 -0.0134627 -0.00297944 -0.0135625 -0.0134404 -0.00290987 -0.0177292 -0.0134545 -0.00295165 -0.015125 -0.013413 -0.00284215 -0.015125 -0.0133808 -0.00277659 -0.0166875 -0.013413 -0.00284215 -0.0166875 -0.0133808 -0.00277659 -0.0166875 -0.0133439 -0.00271356 -0.0131716 -0.0131716 -0.00250905 -0.0173536 -0.013363 -0.00274473 -0.015125 -0.0133025 -0.00265338 -0.0170587 -0.0132533 -0.00259235 -0.015125 -0.0132071 -0.00254289 -0.0135625 -0.0130966 -0.00244747 -0.0131344 -0.0131344 -0.00247699 -0.0135625 -0.0131536 -0.00249316 -0.0166875 -0.0132568 -0.0025964 -0.0166875 -0.0132071 -0.00254289 -0.0166875 -0.0131536 -0.00249316 -0.015125 -0.0130966 -0.00244747 -0.0166875 -0.0130966 -0.00244747 -0.0167045 -0.0130713 -0.00242925 -0.015125 -0.0130364 -0.00240606 -0.0135625 -0.0129734 -0.00236916 -0.0129714 -0.0129714 -0.00236808 -0.0135625 -0.0130364 -0.00240606 -0.0168438 -0.01315 -0.00249009 -0.015125 -0.0129078 -0.00233695 -0.0163733 -0.0128405 -0.00230975 -0.0162485 -0.0127347 -0.00227794 -0.015125 -0.0126996 -0.00227012 -0.0126467 -0.0126467 -0.00226082 -0.012743 -0.012743 -0.00227997 -0.0135625 -0.0128402 -0.00230964 -0.0128827 -0.0128827 -0.00232612 -0.0135625 -0.0129078 -0.00233695 -0.0165689 -0.0129847 -0.00237534 -0.015125 -0.0129734 -0.00236916 -0.015125 -0.0128403 -0.0023097 -0.015125 -0.0127706 -0.0022873 -0.0135625 -0.0127706 -0.0022873 -0.0135625 -0.0126996 -0.00227012 -0.016188 -0.012679 -0.00226615 -0.015125 -0.0126275 -0.00225816 -0.012598 -0.012598 -0.00225482 -0.015125 -0.0125548 -0.0022515 -0.0160706 -0.0125616 -0.0022519 -0.0125 -0.0125 -0.00225 -0.0135625 -0.0125548 -0.0022515 -0.0127903 -0.0127903 -0.00229306 -0.0129276 -0.0129276 -0.00234601 -0.0135625 -0.0132568 -0.0025964 -0.013273 -0.013273 -0.00261561 -0.0133577 -0.0133577 -0.0027359 -0.015125 -0.0133439 -0.00271356 -0.0135625 -0.0133808 -0.00277659 -0.0134239 -0.0134239 -0.00286732 -0.0135625 -0.013413 -0.00284215 -0.0135625 -0.0134627 -0.00297944 -0.0134569 -0.0134569 -0.00295971 -0.015125 -0.0134918 -0.00312254 -0.015125 -0.0134985 -0.00319523 -0.0135625 -0.0134918 -0.00312249 -0.015125 -0.0134799 -0.00305043 -0.015125 -0.0134627 -0.00297944 -0.015125 -0.0134403 -0.00290967 -0.015125 -0.0132568 -0.0025964 -0.0166875 -0.0133025 -0.00265338 -0.015125 -0.0131536 -0.00249316 -0.0135625 -0.0126275 -0.00225816 -0.0125491 -0.0125491 -0.0022512 -0.0125 -0.01525 -0.00225 -0.0126275 -0.0138125 -0.00225816 -0.0126951 -0.0126951 -0.00226921 -0.0127126 -0.01525 -0.00227285 -0.0128495 -0.01525 -0.00231305 -0.0129154 -0.01525 -0.00234037 -0.0130406 -0.01525 -0.00240875 -0.0132071 -0.01525 -0.00254289 -0.0132568 -0.0138125 -0.0025964 -0.0133032 -0.0133032 -0.0026543 -0.0132071 -0.0138125 -0.00254289 -0.0130966 -0.0138125 -0.00244747 -0.0130993 -0.01525 -0.00244946 -0.0126996 -0.0138125 -0.00227012 -0.0127817 -0.01525 -0.00229051 -0.0128401 -0.0138125 -0.00230962 -0.0127706 -0.0138125 -0.0022873 -0.0128369 -0.0128369 -0.00230846 -0.0129078 -0.0138125 -0.00233695 -0.0129734 -0.0138125 -0.00236916 -0.0130141 -0.0130141 -0.00239227 -0.0130364 -0.0138125 -0.00240606 -0.0130556 -0.0130556 -0.00241853 -0.0130957 -0.0130957 -0.00244679 -0.0131536 -0.0138125 -0.00249316 -0.0131549 -0.01525 -0.00249425 -0.0133005 -0.01525 -0.00265072 -0.0133439 -0.0138125 -0.00271356 -0.0133315 -0.0133315 -0.00269443 -0.0133025 -0.0138125 -0.00265338 -0.0133808 -0.0138125 -0.00277659 -0.0133777 -0.01525 -0.00277075 -0.013413 -0.0138125 -0.00284215 -0.013404 -0.013404 -0.00282245 -0.0133819 -0.0133819 -0.0027786 -0.0134096 -0.01525 -0.00283459 -0.0134369 -0.01525 -0.00290054 -0.0134415 -0.0134415 -0.00291311 -0.0134595 -0.01525 -0.00296827 -0.0134627 -0.0138125 -0.00297944 -0.01347 -0.01347 -0.00300702 -0.0134404 -0.0138125 -0.00290989 -0.0134799 -0.0138125 -0.00305043 -0.0134892 -0.0134892 -0.00310327 -0.0134898 -0.01525 -0.00310769 -0.0134918 -0.0138125 -0.00312249 -0.0134975 -0.01525 -0.00317866 -0.0134985 -0.0138125 -0.00319523 -0.0134988 -0.0134988 -0.00320093 -0.0125548 -0.0138125 -0.0022515 -0.019 -0.0135 -0.00675 -0.0135 -0.0135 -0.00325 -0.0135 -0.01525 -0.00325 -0.0135 -0.0135 -0.01625 0.0134975 -0.0135 -0.00317866 0.0134975 -0.01525 -0.00317866 0.0134918 -0.0136667 -0.00312249 0.0134898 -0.0135 -0.00310769 0.0134627 -0.0136667 -0.00297944 0.0134369 -0.0135 -0.00290054 0.013413 -0.0136667 -0.00284215 0.0133808 -0.0136667 -0.00277659 0.0133005 -0.01525 -0.00265072 0.0132071 -0.0136667 -0.00254289 0.0131536 -0.0136667 -0.00249317 0.0132071 -0.0135 -0.00254289 0.0132568 -0.0136667 -0.0025964 0.0132557 -0.0135 -0.00259514 0.0133025 -0.0136667 -0.00265339 0.0134898 -0.01525 -0.00310769 0.0134771 -0.01525 -0.00303744 0.0134771 -0.0135 -0.00303744 0.0134595 -0.01525 -0.00296827 0.0134369 -0.01525 -0.00290054 0.0133777 -0.0135 -0.00277075 0.0133439 -0.0136667 -0.00271356 0.0133777 -0.01525 -0.00277075 0.0132557 -0.01525 -0.00259514 0.0130966 -0.0136667 -0.00244747 0.0130364 -0.0136667 -0.00240607 0.0129734 -0.0136667 -0.00236916 0.0129792 -0.01525 -0.00237232 0.0128495 -0.0135 -0.00231305 0.0129078 -0.0136667 -0.00233695 0.0129154 -0.01525 -0.00234037 0.0128495 -0.01525 -0.00231305 0.0127817 -0.01525 -0.00229051 0.0127706 -0.0136667 -0.0022873 0.0126423 -0.01525 -0.00226018 0.0126275 -0.0136667 -0.00225816 0.0125713 -0.0135 -0.00225255 0.0125713 -0.01525 -0.00225255 0.0125548 -0.0136667 -0.0022515 0.0127126 -0.0135 -0.00227285 0.0126423 -0.0135 -0.00226018 0.0126996 -0.0136667 -0.00227012 0.0128401 -0.0136667 -0.00230962 0.0134404 -0.0136667 -0.00290989 0.0134799 -0.0136667 -0.00305043 0.0135 -0.01525 -0.00325 0.0134985 -0.0136667 -0.00319523 -0.0185 0.0144452 0.0004985 -0.0185 0.0143725 0.00049184 -0.0185 0.0143004 0.000479887 -0.017 0.0142183 0.000459495 -0.017 0.0139594 0.000341255 -0.0185 0.0139034 0.000302528 -0.0185 0.0138464 0.000256837 -0.019 0.0143577 0.000489824 -0.019 0.0142874 0.000477148 -0.0185 0.0141599 0.000440385 -0.019 0.0139594 0.000341255 -0.0185 0.0137929 0.000207109 -0.019 0.0138451 0.00025575 -0.019 0.0137442 0.000154863 -0.0185 0.0136975 9.66182e-05 -0.0185 0.0137432 0.000153606 -0.019 0.0142183 0.000459495 -0.019 0.0140846 0.000409633 -0.0185 0.0140266 0.000380844 -0.019 0.0140208 0.000377681 -0.019 0.0136587 4.06413e-05 -0.0185 0.0136561 3.64451e-05 -0.017 0.0135904 -8.45839e-05 -0.0185 0.0136192 -2.65889e-05 -0.019 0.0136223 -2.07505e-05 -0.019 0.0135904 -8.45838e-05 -0.019 0.013563 -0.000150534 -0.0185 0.0135869 -9.21502e-05 -0.017 0.013563 -0.000150534 -0.0185 0.0135596 -0.000159886 -0.019 0.0135405 -0.000218266 -0.017 0.0135405 -0.000218266 -0.0185 0.0135373 -0.000229437 -0.017 0.0135102 -0.000357684 -0.0185 0.0135201 -0.000300433 -0.019 0.0135102 -0.000357684 -0.0185 0.0135015 -0.000445231 -0.0185 0.0135082 -0.00037249 -0.019 0.0135025 -0.00042866 -0.019 0.0135 -0.000499999 -0.0185 0.0139636 0.000343938 -0.0185 0.0140922 0.000413051 -0.0185 0.0142294 0.000462703 -0.017 0.0142874 0.000477148 -0.019 0.0144287 0.000497453 -0.019 0.0147126 0.000522855 -0.018 0.0150966 0.000697475 -0.018 0.0151536 0.000743166 -0.019 0.0152071 0.000792894 -0.019 0.0151549 0.000744253 -0.017 0.0150406 0.000658748 -0.017 0.0152071 0.000792894 -0.019 0.0153005 0.000900724 -0.019 0.0153413 0.000959362 -0.018 0.0152568 0.000846397 -0.018 0.0152071 0.000792894 -0.018 0.0147706 0.0005373 -0.017 0.0149154 0.00059037 -0.018 0.0150364 0.000656065 -0.017 0.0151549 0.000744253 -0.017 0.0153005 0.000900724 -0.018 0.0153025 0.000903385 -0.018 0.0153808 0.00102659 -0.019 0.0153777 0.00102075 -0.018 0.0153439 0.000963558 -0.017 0.0153777 0.00102075 -0.017 0.0154096 0.00108459 -0.018 0.015413 0.00109215 -0.017 0.0154369 0.00115054 -0.018 0.0154404 0.00115989 -0.018 0.0154627 0.00122944 -0.017 0.0154771 0.00128744 -0.018 0.0154799 0.00130044 -0.018 0.0154918 0.00137249 -0.019 0.0154898 0.00135769 -0.018 0.0154985 0.00144523 -0.019 0.0155 0.0015 -0.019 0.0149792 0.000622322 -0.018 0.0149734 0.000619159 -0.019 0.0149154 0.00059037 -0.018 0.0149078 0.000586952 -0.018 0.0148401 0.000559618 -0.019 0.0146423 0.000510179 -0.018 0.0146996 0.000520116 -0.018 0.0146275 0.000508163 -0.017 0.0145 0.000500001 -0.018 0.0145548 0.000501503 -0.019 0.0154771 0.00871257 -0.018 0.0154627 0.00877056 -0.018 0.015413 0.00890785 -0.018 0.0152568 0.00915361 -0.019 0.0152071 0.00920711 -0.018 0.0154799 0.00869957 -0.018 0.0154918 0.00862751 -0.017 0.0154771 0.00871257 -0.017 0.0153777 0.00897925 -0.018 0.0153025 0.00909662 -0.017 0.0152071 0.00920711 -0.017 0.0151549 0.00925575 -0.017 0.0150993 0.00930054 -0.018 0.0150966 0.00930253 -0.019 0.0150993 0.00930054 -0.018 0.0151536 0.00925684 -0.019 0.0151549 0.00925575 -0.017 0.0154369 0.00884947 -0.018 0.0153808 0.00897341 -0.017 0.0153005 0.00909928 -0.017 0.0152557 0.00915486 -0.018 0.0152071 0.00920711 -0.017 0.0150406 0.00934126 -0.018 0.0150364 0.00934394 -0.017 0.0149792 0.00937768 -0.018 0.0149734 0.00938084 -0.019 0.0149792 0.00937768 -0.019 0.0148495 0.00943695 -0.017 0.0149154 0.00940963 -0.018 0.0149078 0.00941305 -0.018 0.0148401 0.00944039 -0.018 0.0147706 0.0094627 -0.019 0.0147126 0.00947715 -0.017 0.0147126 0.00947715 -0.017 0.0146423 0.00948982 -0.018 0.0146996 0.00947989 -0.018 0.0146275 0.00949184 -0.019 0.0145713 0.00949745 -0.018 0.0145548 0.0094985 -0.017 0.0145 0.0095 -0.019 0.0152557 0.00915486 -0.019 0.0153005 0.00909928 -0.019 0.0153413 0.00904064 -0.018 0.0153439 0.00903645 -0.019 0.0154096 0.00891542 -0.018 0.0154404 0.00884011 -0.019 0.0154595 0.00878173 -0.019 0.0154898 0.00864232 -0.019 0.0154975 0.00857134 -0.017 0.0154975 0.00857134 -0.018 0.0154985 0.00855477 -0.019 0.0135 0.0105 -0.017 0.0135229 0.0102874 -0.018 0.0135373 0.0102294 -0.017 0.0135904 0.0100846 -0.017 0.0136223 0.0100208 -0.017 0.0136587 0.00995936 -0.018 0.0136561 0.00996356 -0.018 0.0136975 0.00990339 -0.018 0.0137432 0.0098464 -0.018 0.0137929 0.0097929 -0.017 0.0137929 0.0097929 -0.018 0.0135201 0.0103004 -0.019 0.0135229 0.0102874 -0.019 0.0135405 0.0102183 -0.018 0.0135596 0.0101599 -0.019 0.013563 0.0101505 -0.019 0.0135904 0.0100846 -0.019 0.0137442 0.00984514 -0.019 0.0137929 0.0097929 -0.018 0.0138464 0.00974317 -0.018 0.0139034 0.00969748 -0.017 0.0138451 0.00974425 -0.018 0.0135869 0.0100922 -0.019 0.0136995 0.00990072 -0.019 0.0140208 0.00962232 -0.018 0.0139636 0.00965607 -0.018 0.0140266 0.00961916 -0.019 0.0140846 0.00959037 -0.017 0.0140846 0.00959037 -0.018 0.0140922 0.00958695 -0.017 0.0141505 0.00956305 -0.018 0.0141599 0.00955962 -0.018 0.0142294 0.0095373 -0.019 0.0142183 0.00954051 -0.018 0.0143004 0.00952012 -0.017 0.0143577 0.00951018 -0.019 0.0144287 0.00950255 -0.018 0.0143725 0.00950816 -0.018 0.0144452 0.0095015 -0.017 0.0137442 0.00984514 -0.018 0.0136192 0.0100266 -0.017 0.0135405 0.0102183 -0.017 0.0135025 0.0104287 -0.019 0.0135025 0.0104287 -0.018 0.0135015 0.0104452 -0.018 0.0135082 0.0103725 -0.017125 0.0135 0.01325 -0.017 0.0135 0.0105 -0.0168125 0.0139895 0.00312782 -0.0168125 0.0139281 0.00308827 -0.0168125 0.013716 0.00288872 -0.017 0.013652 0.00279787 -0.01525 0.0135603 0.00260997 -0.0168125 0.013546 0.00256769 -0.0168125 0.0135266 0.00249725 -0.0168125 0.0135125 0.00242559 -0.01525 0.0135024 0.00233771 -0.0168125 0.0135001 0.00228013 -0.017 0.0138843 0.00305596 -0.017 0.0138309 0.00301109 -0.0168125 0.0138147 0.00299625 -0.017 0.0137807 0.00296261 -0.017 0.013734 0.00291074 -0.0168125 0.0136728 0.00282983 -0.0168125 0.013634 0.00276795 -0.01525 0.0136171 0.00273742 -0.017 0.0135865 0.00267469 -0.01525 0.0135865 0.00267469 -0.0168125 0.0135704 0.00263653 -0.017 0.0135219 0.00247586 -0.01525 0.0135387 0.00254359 -0.017 0.0135097 0.00240712 -0.0168125 0.0135036 0.00235308 -0.01525 0.0135097 0.00240712 -0.01525 0.0135219 0.00247586 -0.0168125 0.0135998 0.0027034 -0.01525 0.013652 0.00279787 -0.01525 0.013691 0.00285574 -0.0168125 0.0137634 0.00294429 -0.01525 0.0138309 0.00301109 -0.0168125 0.0138697 0.00304433 -0.01525 0.0139408 0.00309699 -0.01525 0.014 0.00313398 -0.017 0.0139408 0.00309699 -0.017 0.0139007 0.000300542 -0.017 0.0154975 0.00142866 -0.017 0.0154898 0.00135769 -0.017 0.0154595 0.00121827 -0.017 0.0140846 0.000409633 -0.017 0.0153413 0.000959362 -0.017 0.0152557 0.00084514 -0.017 0.0141505 0.00043695 -0.017 0.0149792 0.000622322 -0.017 0.0143577 0.000489824 -0.017 0.0147817 0.000540508 -0.017 0.0148495 0.000563053 -0.017 0.0144287 0.000497453 -0.017 0.0147126 0.000522855 -0.017 0.0146423 0.000510179 -0.017 0.0145713 0.00050255 -0.017 0.0140208 0.000377681 -0.017 0.0150993 0.000699461 -0.017 0.0140592 0.00317096 -0.017 0.0135 0.00226795 -0.017 0.0155 0.0015 -0.017 0.014266 0.00335721 -0.017 0.014309 0.00341222 -0.017 0.0143829 0.00353053 -0.017 0.0144135 0.00359327 -0.017 0.0144613 0.00372436 -0.017 0.0144903 0.00386083 -0.017 0.0144976 0.00393025 -0.017 0.0155 0.0085 -0.017 0.0153413 0.00904064 -0.017 0.0154096 0.00891542 -0.017 0.0154595 0.00878173 -0.017 0.0154898 0.00864232 -0.017 0.0138451 0.00025575 -0.017 0.0137929 0.000207109 -0.017 0.0137442 0.000154863 -0.017 0.0136995 9.92788e-05 -0.017 0.0136587 4.06412e-05 -0.017 0.0136223 -2.07506e-05 -0.017 0.0135229 -0.000287434 -0.017 0.0135025 -0.00042866 -0.017 0.0135024 0.00233771 -0.017 0.0135387 0.00254359 -0.017 0.0135603 0.00260997 -0.017 0.0136171 0.00273742 -0.017 0.013691 0.00285574 -0.017 0.0145 0.007 -0.017 0.0147817 0.0094595 -0.017 0.0148495 0.00943695 -0.017 0.0144613 0.00727564 -0.017 0.0145713 0.00949745 -0.017 0.0144397 0.00734202 -0.017 0.0144287 0.00950255 -0.017 0.014309 0.00758779 -0.017 0.0142874 0.00952286 -0.017 0.0139408 0.00790302 -0.017 0.0139007 0.00969946 -0.017 0.013652 0.00820213 -0.017 0.0136995 0.00990072 -0.017 0.0142183 0.00954051 -0.017 0.0140208 0.00962232 -0.017 0.0139594 0.00965875 -0.017 0.0137807 0.00803739 -0.017 0.0135387 0.00845642 -0.017 0.0135219 0.00852414 -0.017 0.0135097 0.00859288 -0.017 0.0135102 0.0103577 -0.017 0.0135 0.00873205 -0.017 0.013563 0.0101505 -0.0165625 0.0140364 0.00315606 -0.01525 0.0140592 0.00317096 -0.0165625 0.0140966 0.00319748 -0.01525 0.0141157 0.00321199 -0.0165625 0.0141536 0.00324317 -0.01525 0.0141691 0.00325686 -0.017 0.0142193 0.00330534 -0.017 0.0141691 0.00325686 -0.017 0.0141157 0.00321199 -0.017 0.014 0.00313398 -0.0165625 0.0142071 0.00329289 -0.0165625 0.0142568 0.0033464 -0.0165625 0.0143025 0.00340338 -0.01525 0.014309 0.00341222 -0.01525 0.014348 0.00347008 -0.0165625 0.0143439 0.00346356 -0.0165625 0.0143808 0.00352659 -0.017 0.014348 0.00347008 -0.01525 0.0143829 0.00353053 -0.0165625 0.0144404 0.00365989 -0.0165625 0.0144627 0.00372944 -0.01525 0.0144781 0.00379209 -0.01525 0.0144903 0.00386083 -0.017 0.0145 0.004 -0.0165625 0.0144985 0.00394523 -0.0165625 0.0144918 0.00387249 -0.01525 0.0144397 0.00365798 -0.0165625 0.014413 0.00359215 -0.017 0.0144397 0.00365798 -0.017 0.0144781 0.00379209 -0.0165625 0.0144799 0.00380044 -0.01525 0.014 0.00786603 -0.017 0.0138843 0.00794404 -0.017 0.0138309 0.00798891 -0.0165625 0.0138147 0.00800375 -0.017 0.013734 0.00808927 -0.017 0.013691 0.00814427 -0.0165625 0.013634 0.00823205 -0.017 0.0136171 0.00826258 -0.0165625 0.013546 0.00843231 -0.0165625 0.0135125 0.00857442 -0.0165625 0.0135036 0.00864692 -0.0165625 0.0135001 0.00871988 -0.01525 0.0138843 0.00794404 -0.01525 0.0139408 0.00790302 -0.0165625 0.0138697 0.00795567 -0.0165625 0.013716 0.00811129 -0.01525 0.013734 0.00808927 -0.0165625 0.0136728 0.00817017 -0.0165625 0.0135998 0.0082966 -0.0165625 0.0135704 0.00836348 -0.01525 0.0135603 0.00839003 -0.01525 0.0135219 0.00852414 -0.01525 0.0137807 0.00803739 -0.0165625 0.0137634 0.00805572 -0.01525 0.013691 0.00814427 -0.0165625 0.0135266 0.00850275 -0.017 0.0135024 0.0086623 -0.017 0.0135603 0.00839003 -0.017 0.0135865 0.00832532 -0.0165625 0.0139281 0.00791174 -0.017 0.014 0.00786603 -0.0165625 0.0139895 0.00787218 -0.01525 0.0138843 0.00305596 -0.01525 0.0137807 0.00296261 -0.01525 0.013734 0.00291074 -0.01525 0.01525 -0.000999999 -0.01525 0.0144135 0.00359327 -0.01525 0.0144613 0.00372436 -0.01525 0.0144976 0.00393025 -0.01525 0.0145 0.004 -0.01525 0.0144903 0.00713917 -0.01525 0.0144781 0.00720791 -0.01525 0.01525 0.01775 -0.01525 0.0144135 0.00740674 -0.01525 0.0135 0.00873205 -0.01525 0.0136171 0.00826258 -0.01525 0.0135865 0.00832532 -0.01525 0.0135387 0.00845642 -0.01525 0.0135024 0.0086623 -0.01525 0.0135097 0.00859288 -0.01525 0.013652 0.00820213 -0.01525 0.0138309 0.00798891 -0.01525 0.0135 0.01775 -0.01525 0.0141157 0.00778801 -0.01525 0.0140592 0.00782904 -0.01525 0.014266 0.00335721 -0.01525 0.0142193 0.00330534 -0.017 0.0140592 0.00782904 -0.017 0.0141157 0.00778801 -0.0165625 0.0141536 0.00775684 -0.0165625 0.0140966 0.00780253 -0.01525 0.0142193 0.00769466 -0.01525 0.0141691 0.00774315 -0.0165625 0.0140364 0.00784394 -0.017 0.0141691 0.00774315 -0.0165625 0.0142071 0.00770711 -0.0165625 0.0142568 0.00765361 -0.01525 0.014266 0.00764279 -0.017 0.0142193 0.00769466 -0.017 0.014266 0.00764279 -0.0165625 0.0143025 0.00759662 -0.0165625 0.0143439 0.00753645 -0.01525 0.014309 0.00758779 -0.017 0.014348 0.00752992 -0.01525 0.014348 0.00752992 -0.0165625 0.0143808 0.00747341 -0.01525 0.0143829 0.00746947 -0.017 0.0143829 0.00746947 -0.017 0.0144135 0.00740674 -0.0165625 0.0144404 0.00734011 -0.017 0.0144976 0.00706976 -0.0165625 0.0144985 0.00705477 -0.0165625 0.014413 0.00740785 -0.01525 0.0144397 0.00734202 -0.01525 0.0144613 0.00727564 -0.017 0.0144781 0.00720791 -0.0165625 0.0144627 0.00727056 -0.017 0.0144903 0.00713918 -0.01525 0.0144976 0.00706976 -0.0165625 0.0144799 0.00719957 -0.0165625 0.0144918 0.00712751 -0.01525 0.0145 0.007 0.00109091 -0.0089351 -0.0162489 0.00981818 -0.0089351 -0.0162489 0.00327273 -0.00852428 -0.0161926 -0.00763636 -0.0081339 -0.0160527 -0.00545455 -0.0081339 -0.0160527 -0.00981818 -0.0080415 -0.0160054 -0.00981818 -0.00795168 -0.0159532 -0.00327273 -0.00832622 -0.0161331 -0.00109091 -0.00842409 -0.0161653 -0.00109091 -0.00852428 -0.0161926 0.00545454 -0.00872822 -0.0162314 0.00763636 -0.00883144 -0.0162429 0.00763636 -0.0089351 -0.0162489 0.0119019 -0.00890186 -0.0162476 0.00327273 -0.00842409 -0.0161653 -0.00327273 -0.00822864 -0.0160953 -0.0108484 -0.00784838 -0.0158852 0.0117065 -0.00870654 -0.0162284 0.00981818 -0.00862575 -0.0162147 0.00763636 -0.00852428 -0.0161926 0.00545454 -0.00852428 -0.0161926 0.00109091 -0.00822864 -0.0160953 -0.00327273 -0.0080415 -0.0160054 -0.00545455 -0.0080415 -0.0160054 -0.00763636 -0.00786469 -0.0158965 -0.00981818 -0.00778076 -0.0158354 -0.0107695 -0.00776954 -0.0158267 -0.0108086 -0.0078086 -0.0158564 -0.00981818 -0.00786469 -0.0158965 0.00327273 -0.00832622 -0.0161331 -0.00545455 -0.00786469 -0.0158965 -0.00763636 -0.00778076 -0.0158354 -0.0107312 -0.00773121 -0.015796 0.00763636 -0.00842409 -0.0161653 0.00981818 -0.00842409 -0.0161653 0.00109091 -0.0080415 -0.0160054 -0.00109091 -0.0080415 -0.0160054 -0.00327273 -0.00795168 -0.0159532 -0.00545455 -0.00778076 -0.0158354 -0.00763636 -0.00770012 -0.01577 -0.0106569 -0.00765688 -0.0157319 -0.00981818 -0.00770012 -0.01577 0.00763636 -0.00822864 -0.0160953 0.00327273 -0.0081339 -0.0160527 0.00327273 -0.0080415 -0.0160054 -0.00109091 -0.00786469 -0.0158965 -0.00763636 -0.00762298 -0.0157005 -0.00981818 -0.00754955 -0.015627 -0.0105515 -0.00755151 -0.0156291 -0.0106209 -0.00762092 -0.0156985 0.0114194 -0.00841943 -0.0161639 0.0109718 -0.00797179 -0.0159655 0.0108889 -0.00788886 -0.0159129 0.0108086 -0.0078086 -0.0158564 0.00981818 -0.00778076 -0.0158354 0.00981818 -0.00770012 -0.01577 0.0106569 -0.00765688 -0.0157319 0.00981818 -0.00762298 -0.0157005 0.0106209 -0.00762092 -0.0156985 0.0105181 -0.0075181 -0.0155931 0.0104856 -0.00748558 -0.0155563 0.00981818 -0.00748003 -0.0155499 0.010454 -0.00745398 -0.0155188 0.0103936 -0.00739358 -0.0154414 0.00981818 -0.00735346 -0.0153853 0.00981818 -0.00729676 -0.0152983 0.0102845 -0.00728454 -0.0152782 0.0102598 -0.00725983 -0.0152358 0.0101522 -0.00715224 -0.0150154 0.00981818 -0.00711691 -0.0149238 0.00763636 -0.00711691 -0.0149238 0.00545454 -0.0070574 -0.0147257 0.00327273 -0.0070574 -0.0147257 0.00109091 -0.00703533 -0.0146243 0.00109091 -0.00701855 -0.0145218 -0.00327273 -0.00700105 -0.0143149 -0.0100024 -0.00700241 -0.0143481 0.00981818 -0.00719726 -0.0151161 0.00763636 -0.00715473 -0.0150214 -0.00327273 -0.00778076 -0.0158354 -0.00327273 -0.00770012 -0.01577 -0.00763636 -0.00754955 -0.015627 0.00763636 -0.0081339 -0.0160527 0.00763636 -0.0080415 -0.0160054 0.00109091 -0.00778076 -0.0158354 -0.00109091 -0.00778076 -0.0158354 -0.00327273 -0.00762298 -0.0157005 -0.00763636 -0.00748003 -0.0155499 -0.00981818 -0.00748003 -0.0155499 -0.0104856 -0.00748558 -0.0155563 0.0110572 -0.00805721 -0.0160138 -0.00109091 -0.00762298 -0.0157005 -0.00327273 -0.00754955 -0.015627 -0.00545455 -0.00754955 -0.015627 -0.00981818 -0.00741461 -0.0154692 -0.0104233 -0.00742331 -0.0154805 0.00763636 -0.00795168 -0.0159532 0.00545454 -0.00786469 -0.0158965 0.00327273 -0.00770012 -0.01577 0.00109091 -0.00762298 -0.0157005 -0.00327273 -0.00748003 -0.0155499 -0.00545455 -0.00741461 -0.0154692 -0.00763636 -0.00741461 -0.0154692 -0.0103371 -0.00733706 -0.0153611 -0.00981818 -0.00735346 -0.0153853 0.00981818 -0.00786469 -0.0158965 0.00545454 -0.00770012 -0.01577 0.00327273 -0.00762298 -0.0157005 0.00327273 -0.00754955 -0.015627 0.00109091 -0.00748003 -0.0155499 -0.00545455 -0.00729676 -0.0152983 -0.00981818 -0.00724464 -0.0152085 -0.00763636 -0.00729676 -0.0152983 0.00545454 -0.00754955 -0.015627 0.00109091 -0.00741461 -0.0154692 0.00109091 -0.00735346 -0.0153853 -0.00327273 -0.00729676 -0.0152983 -0.00327273 -0.00724464 -0.0152085 -0.0101522 -0.00715224 -0.0150154 0.00763636 -0.00741461 -0.0154692 -0.00109091 -0.00724464 -0.0152085 -0.00327273 -0.00715473 -0.0150214 -0.00545455 -0.00711691 -0.0149238 -0.0100861 -0.00708612 -0.0148306 -0.00981818 -0.00708471 -0.0148259 0.00545454 -0.00729676 -0.0152983 0.00545454 -0.00724464 -0.0152085 0.00327273 -0.00719726 -0.0151161 -0.00545455 -0.0070574 -0.0147257 -0.0100384 -0.00703843 -0.0146402 -0.00981818 -0.00703533 -0.0146243 0.00981818 -0.00715473 -0.0150214 0.00763636 -0.00708471 -0.0148259 0.0101009 -0.00710094 -0.0148774 0.0100861 -0.00708612 -0.0148306 0.00981818 -0.00708471 -0.0148259 0.0100724 -0.00707245 -0.0147834 0.00327273 -0.00700712 -0.0144186 -0.00109091 -0.00700105 -0.0143149 0.00109091 -0.00700712 -0.0144186 0.00763636 -0.00703533 -0.0146243 0.0100294 -0.00702944 -0.0145919 0.0100216 -0.00702165 -0.0145435 0.010015 -0.00701504 -0.0144948 0.00763636 -0.00701855 -0.0145218 0.00981818 -0.00701855 -0.0145218 0.00109091 -0.00700105 -0.0143149 0.00545454 -0.00700712 -0.0144186 0.00981818 -0.00703533 -0.0146243 0.00981818 -0.00700712 -0.0144186 0.0100054 -0.00700542 -0.0143971 0.00763636 -0.00700712 -0.0144186 0.00763636 -0.00700105 -0.0143149 0.00545454 -0.00700105 -0.0143149 0.0100024 -0.00700241 -0.0143481 0.00981818 -0.00700105 -0.0143149 -0.00545455 -0.00700105 -0.0143149 -0.01 -0.007 -0.01425 -0.00763636 -0.00700712 -0.0144186 -0.00545455 -0.00700712 -0.0144186 -0.00327273 -0.00701855 -0.0145218 -0.00981818 -0.00700105 -0.0143149 -0.00763636 -0.00700105 -0.0143149 -0.00981818 -0.00700712 -0.0144186 -0.00981818 -0.00701855 -0.0145218 -0.0100294 -0.00702944 -0.0145919 -0.00763636 -0.00701855 -0.0145218 -0.00545455 -0.00703533 -0.0146243 -0.00327273 -0.0070574 -0.0147257 -0.00109091 -0.00708471 -0.0148259 0.00109091 -0.00715473 -0.0150214 0.00763636 -0.00729676 -0.0152983 0.00763636 -0.00724464 -0.0152085 0.00545454 -0.00719726 -0.0151161 0.00763636 -0.00719726 -0.0151161 0.00545454 -0.00715473 -0.0150214 0.00109091 -0.00708471 -0.0148259 -0.00109091 -0.0070574 -0.0147257 -0.00545455 -0.00701855 -0.0145218 -0.0100724 -0.00707245 -0.0147834 -0.00763636 -0.00703533 -0.0146243 -0.0100486 -0.0070486 -0.0146882 -0.00981818 -0.0070574 -0.0147257 -0.00763636 -0.0070574 -0.0147257 -0.0101169 -0.00711691 -0.0149238 -0.00763636 -0.00711691 -0.0149238 -0.0101009 -0.00710094 -0.0148774 -0.00981818 -0.00711691 -0.0149238 -0.010192 -0.00719202 -0.0151051 -0.00981818 -0.00719726 -0.0151161 -0.00981818 -0.00729676 -0.0152983 -0.00763636 -0.00735346 -0.0153853 -0.00545455 -0.00735346 -0.0153853 -0.00327273 -0.00741461 -0.0154692 -0.0111449 -0.00814489 -0.016058 -0.00981818 -0.0081339 -0.0160527 -0.00981818 -0.00822864 -0.0160953 -0.00763636 -0.00832622 -0.0161331 0.00109091 -0.00872822 -0.0162314 0.00545454 -0.0089351 -0.0162489 0.00327273 -0.00883144 -0.0162429 0.00109091 -0.00883144 -0.0162429 0.00327273 -0.0089351 -0.0162489 -0.0116098 -0.00860982 -0.0162116 -0.00545455 -0.0089351 -0.0162489 -0.00545455 -0.00872822 -0.0162314 -0.00545455 -0.00862575 -0.0162147 -0.00545455 -0.00852428 -0.0161926 -0.00763636 -0.00842409 -0.0161653 -0.00981818 -0.00842409 -0.0161653 -0.00981818 -0.00852428 -0.0161926 -0.00763636 -0.00862575 -0.0162147 -0.00763636 -0.00872822 -0.0162314 -0.0117065 -0.00870654 -0.0162284 -0.00981818 -0.00872822 -0.0162314 -0.00763636 -0.00883144 -0.0162429 -0.011804 -0.00880396 -0.0162404 -0.00981818 -0.00883144 -0.0162429 -0.00763636 -0.0089351 -0.0162489 -0.00981818 -0.0089351 -0.0162489 -0.00545455 -0.00883144 -0.0162429 -0.00327273 -0.0089351 -0.0162489 -0.00327273 -0.00883144 -0.0162429 -0.00109091 -0.0089351 -0.0162489 -0.00109091 -0.00883144 -0.0162429 0.00981818 -0.00872822 -0.0162314 0.00763636 -0.00872822 -0.0162314 0.00981818 -0.00883144 -0.0162429 0.00545454 -0.00883144 -0.0162429 -0.00327273 -0.00872822 -0.0162314 0.00327273 -0.00872822 -0.0162314 0.00545454 -0.00862575 -0.0162147 0.00327273 -0.00862575 -0.0162147 0.00763636 -0.00862575 -0.0162147 -0.00109091 -0.00872822 -0.0162314 -0.00327273 -0.00862575 -0.0162147 0.00109091 -0.00862575 -0.0162147 0.00109091 -0.00852428 -0.0161926 -0.00109091 -0.00862575 -0.0162147 -0.00981818 -0.00862575 -0.0162147 0.00981818 -0.00852428 -0.0161926 0.00109091 -0.00842409 -0.0161653 -0.00545455 -0.00842409 -0.0161653 -0.00327273 -0.00852428 -0.0161926 -0.00763636 -0.00852428 -0.0161926 0.00981818 -0.00832622 -0.0161331 0.00545454 -0.00832622 -0.0161331 0.00545454 -0.00842409 -0.0161653 -0.00327273 -0.00842409 -0.0161653 0.00109091 -0.00832622 -0.0161331 0.00981818 -0.00822864 -0.0160953 0.00545454 -0.00822864 -0.0160953 0.00763636 -0.00832622 -0.0161331 0.00327273 -0.00822864 -0.0160953 -0.00545455 -0.00822864 -0.0160953 -0.00109091 -0.00832622 -0.0161331 -0.00545455 -0.00832622 -0.0161331 -0.00981818 -0.00832622 -0.0161331 0.00545454 -0.0081339 -0.0160527 0.00109091 -0.0081339 -0.0160527 -0.00109091 -0.0081339 -0.0160527 -0.00109091 -0.00822864 -0.0160953 -0.00327273 -0.0081339 -0.0160527 -0.00763636 -0.00822864 -0.0160953 0.00981818 -0.0081339 -0.0160527 0.00545454 -0.0080415 -0.0160054 -0.00763636 -0.0080415 -0.0160054 0.00981818 -0.00795168 -0.0159532 0.00981818 -0.0080415 -0.0160054 0.00545454 -0.00795168 -0.0159532 -0.00109091 -0.00795168 -0.0159532 -0.00763636 -0.00795168 -0.0159532 0.00327273 -0.00786469 -0.0158965 0.00327273 -0.00795168 -0.0159532 0.00109091 -0.00795168 -0.0159532 -0.00545455 -0.00795168 -0.0159532 0.00763636 -0.00786469 -0.0158965 0.00763636 -0.00778076 -0.0158354 0.00545454 -0.00778076 -0.0158354 -0.00109091 -0.00754955 -0.015627 0.00327273 -0.00778076 -0.0158354 0.00109091 -0.00786469 -0.0158965 -0.00327273 -0.00786469 -0.0158965 0.00109091 -0.00770012 -0.01577 -0.00545455 -0.00770012 -0.01577 0.00763636 -0.00770012 -0.01577 0.00763636 -0.00762298 -0.0157005 0.00545454 -0.00762298 -0.0157005 0.00327273 -0.00748003 -0.0155499 -0.00545455 -0.00724464 -0.0152085 -0.00763636 -0.00724464 -0.0152085 -0.00109091 -0.00770012 -0.01577 -0.00545455 -0.00762298 -0.0157005 -0.00981818 -0.00762298 -0.0157005 0.00981818 -0.00754955 -0.015627 -0.00109091 -0.00729676 -0.0152983 -0.010134 -0.00713401 -0.0149698 -0.00981818 -0.00715473 -0.0150214 0.00763636 -0.00748003 -0.0155499 0.00545454 -0.00748003 -0.0155499 0.00763636 -0.00754955 -0.015627 0.00109091 -0.00754955 -0.015627 -0.00109091 -0.00748003 -0.0155499 0.00545454 -0.00741461 -0.0154692 -0.00109091 -0.00741461 -0.0154692 -0.00545455 -0.00748003 -0.0155499 0.00545454 -0.00735346 -0.0153853 0.00109091 -0.00719726 -0.0151161 0.00981818 -0.00741461 -0.0154692 0.00763636 -0.00735346 -0.0153853 0.00327273 -0.00741461 -0.0154692 -0.00109091 -0.00735346 -0.0153853 -0.00327273 -0.00735346 -0.0153853 0.00109091 -0.00729676 -0.0152983 0.00327273 -0.00735346 -0.0153853 0.00327273 -0.00729676 -0.0152983 0.00327273 -0.00724464 -0.0152085 0.00109091 -0.00724464 -0.0152085 0.00981818 -0.00724464 -0.0152085 -0.00109091 -0.00719726 -0.0151161 -0.00545455 -0.00719726 -0.0151161 -0.00327273 -0.00719726 -0.0151161 -0.00763636 -0.00719726 -0.0151161 0.00327273 -0.00715473 -0.0150214 -0.00109091 -0.00715473 -0.0150214 -0.00763636 -0.00715473 -0.0150214 -0.00545455 -0.00715473 -0.0150214 0.00545454 -0.00711691 -0.0149238 0.00545454 -0.00708471 -0.0148259 0.00109091 -0.00711691 -0.0149238 0.00327273 -0.00711691 -0.0149238 -0.00327273 -0.00711691 -0.0149238 0.00327273 -0.00708471 -0.0148259 -0.00109091 -0.00711691 -0.0149238 -0.00327273 -0.00708471 -0.0148259 -0.00763636 -0.00708471 -0.0148259 0.00109091 -0.0070574 -0.0147257 -0.00545455 -0.00708471 -0.0148259 0.00981818 -0.0070574 -0.0147257 0.00763636 -0.0070574 -0.0147257 0.00327273 -0.00701855 -0.0145218 0.00545454 -0.00703533 -0.0146243 0.00327273 -0.00703533 -0.0146243 -0.00109091 -0.00703533 -0.0146243 -0.00109091 -0.00701855 -0.0145218 -0.00327273 -0.00703533 -0.0146243 0.00545454 -0.00701855 -0.0145218 -0.00109091 -0.00700712 -0.0144186 0.00327273 -0.00700105 -0.0143149 -0.00327273 -0.00700712 -0.0144186 -0.0100006 -0.0070006 -0.0142991 -0.0100011 -0.0069375 -0.0143149 -0.0100054 -0.00700542 -0.0143971 -0.010015 -0.00548496 -0.0144948 -0.0100096 -0.00549037 -0.014446 -0.0100096 -0.00700963 -0.014446 -0.0100186 -0.0069375 -0.0145218 -0.010015 -0.00701504 -0.0144948 -0.0100216 -0.00702165 -0.0145435 -0.0100024 -0.00549759 -0.0143481 -0.0100071 -0.0069375 -0.0144186 -0.0100384 -0.00546157 -0.0146402 -0.0100486 -0.0054514 -0.0146882 -0.0100599 -0.00544006 -0.014736 -0.0100574 -0.0069375 -0.0147257 -0.0100353 -0.0069375 -0.0146243 -0.0100599 -0.00705994 -0.014736 -0.0100847 -0.0069375 -0.0148259 -0.0101169 -0.0069375 -0.0149238 -0.010134 -0.00536598 -0.0149698 -0.0101547 -0.0069375 -0.0150214 -0.0101973 -0.0069375 -0.0151161 -0.0101716 -0.00717158 -0.0150605 -0.010192 -0.00530798 -0.0151051 -0.0102446 -0.0069375 -0.0152085 -0.0102598 -0.00524017 -0.0152358 -0.0102845 -0.00728454 -0.0152782 -0.0102968 -0.0069375 -0.0152983 -0.0102136 -0.00721355 -0.0151492 -0.0102362 -0.00526384 -0.0151928 -0.0103103 -0.00731029 -0.01532 -0.0103371 -0.00516294 -0.0153611 -0.0103648 -0.00736483 -0.0154016 -0.0103535 -0.0069375 -0.0153853 -0.0103648 -0.00513517 -0.0154016 -0.0103936 -0.00739358 -0.0154414 -0.0103936 -0.00510641 -0.0154414 -0.0104233 -0.00507669 -0.0154805 -0.01048 -0.0069375 -0.0155499 -0.0105181 -0.0049819 -0.0155931 -0.010454 -0.00745398 -0.0155188 -0.0104146 -0.0069375 -0.0154692 -0.0105515 -0.00494849 -0.0156291 -0.0105858 -0.00758579 -0.0156642 -0.010623 -0.0069375 -0.0157005 -0.0105858 -0.00491421 -0.0156642 -0.0106209 -0.00487908 -0.0156985 -0.0106569 -0.00484312 -0.0157319 -0.0107001 -0.0069375 -0.01577 -0.0107312 -0.00476879 -0.015796 -0.0107808 -0.0069375 -0.0158354 -0.0106937 -0.00769365 -0.0157644 -0.0107001 -0.004875 -0.01577 -0.0107695 -0.00473046 -0.0158267 -0.0107808 -0.004875 -0.0158354 -0.0108086 -0.0046914 -0.0158564 -0.0108647 -0.004875 -0.0158965 -0.0108889 -0.00461114 -0.0159129 -0.0108647 -0.0069375 -0.0158965 -0.01093 -0.00456999 -0.0159397 -0.0109517 -0.004875 -0.0159532 -0.0112286 -0.004875 -0.0160953 -0.0115243 -0.0069375 -0.0161926 -0.0116257 -0.004875 -0.0162147 -0.012 -0.0035 -0.01625 -0.0119019 -0.00890186 -0.0162476 -0.0118314 -0.0069375 -0.0162429 -0.0117282 -0.0069375 -0.0162314 -0.0114194 -0.00841943 -0.0161639 -0.0113262 -0.00832622 -0.0161331 -0.0112346 -0.00823463 -0.0160978 -0.0111339 -0.0069375 -0.0160527 -0.0111339 -0.004875 -0.0160527 -0.0112286 -0.0069375 -0.0160953 -0.0110415 -0.004875 -0.0160054 -0.0112346 -0.00426537 -0.0160978 -0.0114194 -0.00408057 -0.0161639 -0.0114241 -0.004875 -0.0161653 -0.011514 -0.00398596 -0.0161901 -0.0116098 -0.00389018 -0.0162116 -0.011804 -0.00369603 -0.0162404 -0.0119019 -0.00359813 -0.0162476 -0.0119351 -0.004875 -0.0162489 -0.0111449 -0.00435511 -0.016058 -0.0115243 -0.004875 -0.0161926 -0.0117282 -0.004875 -0.0162314 -0.0118314 -0.004875 -0.0162429 -0.0119351 -0.0069375 -0.0162489 -0.0116257 -0.0069375 -0.0162147 -0.011514 -0.00851404 -0.0161901 -0.0114241 -0.0069375 -0.0161653 -0.0113262 -0.004875 -0.0161331 -0.0110572 -0.00805721 -0.0160138 -0.0110415 -0.0069375 -0.0160054 -0.0109718 -0.00797179 -0.0159655 -0.0109517 -0.0069375 -0.0159532 -0.01093 -0.00793 -0.0159397 -0.0108889 -0.00788886 -0.0159129 -0.0105495 -0.0069375 -0.015627 -0.0105181 -0.0075181 -0.0155931 -0.0102598 -0.00725983 -0.0152358 -0.0102362 -0.00723616 -0.0151928 -0.0101716 -0.00532842 -0.0150605 -0.0113262 -0.0069375 -0.0161331 0.00109091 -0.0035649 -0.0162489 -0.00327273 -0.0035649 -0.0162489 -0.00763636 -0.0035649 -0.0162489 -0.00981818 -0.0035649 -0.0162489 -0.00763636 -0.00377177 -0.0162314 -0.00327273 -0.00397572 -0.0161926 -0.00109091 -0.00397572 -0.0161926 -0.00109091 -0.00407591 -0.0161653 0.00109091 -0.00417378 -0.0161331 0.00327273 -0.00427136 -0.0160953 0.00981818 -0.00454832 -0.0159532 0.00763636 -0.00436609 -0.0160527 0.00545454 -0.00417378 -0.0161331 0.00109091 -0.00407591 -0.0161653 0.00327273 -0.00407591 -0.0161653 -0.00327273 -0.00377177 -0.0162314 -0.00545455 -0.00366856 -0.0162429 -0.00763636 -0.00366856 -0.0162429 -0.00763636 -0.00387425 -0.0162147 -0.00545455 -0.00387425 -0.0162147 -0.00327273 -0.00407591 -0.0161653 -0.00109091 -0.00417378 -0.0161331 0.00109091 -0.00427136 -0.0160953 0.00327273 -0.00436609 -0.0160527 0.0108484 -0.00465162 -0.0158852 0.0108889 -0.00461114 -0.0159129 -0.0117065 -0.00379346 -0.0162284 -0.00981818 -0.00387425 -0.0162147 -0.00545455 -0.00397572 -0.0161926 -0.00109091 -0.00427136 -0.0160953 0.00545454 -0.0044585 -0.0160054 0.00763636 -0.00463531 -0.0158965 0.00981818 -0.00463531 -0.0158965 0.0108086 -0.0046914 -0.0158564 -0.00763636 -0.00397572 -0.0161926 -0.00763636 -0.00407591 -0.0161653 -0.00327273 -0.00427136 -0.0160953 0.00545454 -0.00454832 -0.0159532 0.00545454 -0.00463531 -0.0158965 0.0107312 -0.00476879 -0.015796 -0.00981818 -0.00397572 -0.0161926 -0.00981818 -0.00407591 -0.0161653 -0.00545455 -0.00427136 -0.0160953 -0.00109091 -0.00436609 -0.0160527 0.00327273 -0.00463531 -0.0158965 0.00763636 -0.00479988 -0.01577 0.00981818 -0.00487702 -0.0157005 0.00981818 -0.00479988 -0.01577 -0.00981818 -0.00417378 -0.0161331 -0.00763636 -0.00427136 -0.0160953 -0.00109091 -0.00454832 -0.0159532 0.00109091 -0.00454832 -0.0159532 0.00545454 -0.00471924 -0.0158354 0.00545454 -0.00479988 -0.01577 -0.0113262 -0.00417378 -0.0161331 -0.0109718 -0.0045282 -0.0159655 -0.0108484 -0.00465162 -0.0158852 -0.00981818 -0.00471924 -0.0158354 -0.00981818 -0.00479988 -0.01577 -0.0106937 -0.00480634 -0.0157644 -0.00981818 -0.00495045 -0.015627 -0.0104856 -0.00501442 -0.0155563 -0.010454 -0.00504602 -0.0155188 -0.00981818 -0.00508539 -0.0154692 -0.0103103 -0.00518971 -0.01532 -0.0102845 -0.00521546 -0.0152782 -0.00763636 -0.00534526 -0.0150214 -0.0101522 -0.00534776 -0.0150154 -0.0101169 -0.00538309 -0.0149238 -0.00981818 -0.00538309 -0.0149238 -0.00545455 -0.0054426 -0.0147257 -0.00327273 -0.00546467 -0.0146243 0.00981818 -0.00549895 -0.0143149 0.00763636 -0.00549895 -0.0143149 0.00327273 -0.00546467 -0.0146243 0.00109091 -0.0054426 -0.0147257 -0.00327273 -0.00541529 -0.0148259 -0.00545455 -0.00538309 -0.0149238 -0.00327273 -0.00538309 -0.0149238 -0.00763636 -0.00530274 -0.0151161 -0.00763636 -0.00436609 -0.0160527 -0.00545455 -0.0044585 -0.0160054 -0.00327273 -0.0044585 -0.0160054 -0.00109091 -0.00463531 -0.0158965 0.00327273 -0.00479988 -0.01577 0.00545454 -0.00487702 -0.0157005 0.0105515 -0.00494849 -0.0156291 -0.00763636 -0.0044585 -0.0160054 0.00109091 -0.00479988 -0.01577 0.00327273 -0.00487702 -0.0157005 0.00763636 -0.00495045 -0.015627 0.010454 -0.00504602 -0.0155188 0.0104233 -0.00507669 -0.0154805 0.0104856 -0.00501442 -0.0155563 0.00981818 -0.00501997 -0.0155499 -0.0110572 -0.00444279 -0.0160138 -0.00109091 -0.00471924 -0.0158354 -0.00327273 -0.00471924 -0.0158354 -0.00109091 -0.00479988 -0.01577 0.00981818 -0.00508539 -0.0154692 -0.00763636 -0.00454832 -0.0159532 -0.00327273 -0.00479988 -0.01577 -0.00109091 -0.00487702 -0.0157005 0.00109091 -0.00495045 -0.015627 0.00545454 -0.00501997 -0.0155499 0.00545454 -0.00508539 -0.0154692 0.00763636 -0.00514653 -0.0153853 0.0103103 -0.00518971 -0.01532 0.0103371 -0.00516294 -0.0153611 0.0103648 -0.00513517 -0.0154016 -0.00981818 -0.00463531 -0.0158965 0.00327273 -0.00508539 -0.0154692 0.00327273 -0.00514653 -0.0153853 0.00763636 -0.00520324 -0.0152983 0.00545454 -0.00520324 -0.0152983 0.0102136 -0.00528645 -0.0151492 0.00981818 -0.00525536 -0.0152085 0.0102362 -0.00526384 -0.0151928 -0.00763636 -0.00487702 -0.0157005 -0.00981818 -0.00487702 -0.0157005 0.00109091 -0.00520324 -0.0152983 0.00763636 -0.00534526 -0.0150214 0.00981818 -0.00534526 -0.0150214 0.0101522 -0.00534776 -0.0150154 -0.00981818 -0.00501997 -0.0155499 0.00327273 -0.00530274 -0.0151161 0.00763636 -0.00538309 -0.0149238 0.0101009 -0.00539906 -0.0148774 -0.00981818 -0.00514653 -0.0153853 -0.00763636 -0.00520324 -0.0152983 0.00327273 -0.00541529 -0.0148259 0.00545454 -0.00541529 -0.0148259 0.00981818 -0.00546467 -0.0146243 0.0100486 -0.0054514 -0.0146882 -0.0102136 -0.00528645 -0.0151492 -0.00981818 -0.00530274 -0.0151161 -0.00981818 -0.00534526 -0.0150214 -0.0101009 -0.00539906 -0.0148774 -0.0100861 -0.00541388 -0.0148306 -0.00981818 -0.00541529 -0.0148259 -0.0100724 -0.00542755 -0.0147834 -0.00763636 -0.00546467 -0.0146243 -0.00545455 -0.00548145 -0.0145218 -0.00327273 -0.00548145 -0.0145218 -0.00327273 -0.00549288 -0.0144186 -0.00109091 -0.00549895 -0.0143149 0.00109091 -0.00549288 -0.0144186 0.00109091 -0.00549895 -0.0143149 -0.00109091 -0.00549288 -0.0144186 -0.00981818 -0.00546467 -0.0146243 -0.0100294 -0.00547055 -0.0145919 -0.00981818 -0.00548145 -0.0145218 -0.0100216 -0.00547835 -0.0145435 -0.00763636 -0.00548145 -0.0145218 -0.00545455 -0.00549895 -0.0143149 -0.00327273 -0.00549895 -0.0143149 -0.00763636 -0.00549288 -0.0144186 -0.00545455 -0.00549288 -0.0144186 -0.0100054 -0.00549458 -0.0143971 -0.00981818 -0.00549288 -0.0144186 -0.0100006 -0.0054994 -0.0142991 -0.00763636 -0.00549895 -0.0143149 -0.00981818 -0.00549895 -0.0143149 0.00327273 -0.00549288 -0.0144186 0.00545454 -0.00549895 -0.0143149 0.00327273 -0.00548145 -0.0145218 0.00545454 -0.00549288 -0.0144186 0.00545454 -0.00548145 -0.0145218 0.0100006 -0.0054994 -0.0142991 0.00763636 -0.00549288 -0.0144186 0.00981818 -0.00549288 -0.0144186 0.0100096 -0.00549037 -0.014446 0.00763636 -0.00548145 -0.0145218 0.00981818 -0.00548145 -0.0145218 0.0100216 -0.00547835 -0.0145435 0.00327273 -0.0054426 -0.0147257 -0.00327273 -0.00534526 -0.0150214 -0.00981818 -0.00520324 -0.0152983 -0.00981818 -0.00525536 -0.0152085 -0.00763636 -0.00525536 -0.0152085 -0.00109091 -0.00541529 -0.0148259 0.0100294 -0.00547055 -0.0145919 0.00981818 -0.0054426 -0.0147257 0.00763636 -0.00546467 -0.0146243 0.00981818 -0.00538309 -0.0149238 0.010134 -0.00536598 -0.0149698 0.00763636 -0.00541529 -0.0148259 0.00981818 -0.00530274 -0.0151161 0.00763636 -0.00525536 -0.0152085 0.00545454 -0.00530274 -0.0151161 0.00763636 -0.00530274 -0.0151161 0.00545454 -0.00514653 -0.0153853 0.00981818 -0.00520324 -0.0152983 0.00763636 -0.00417378 -0.0161331 -0.00109091 -0.00377177 -0.0162314 -0.00327273 -0.00366856 -0.0162429 -0.00545455 -0.0035649 -0.0162489 0.0112346 -0.00426537 -0.0160978 0.00981818 -0.00427136 -0.0160953 0.00981818 -0.00417378 -0.0161331 0.00545454 -0.00397572 -0.0161926 0.00327273 -0.00397572 -0.0161926 0.0113262 -0.00417378 -0.0161331 0.00981818 -0.00377177 -0.0162314 0.00981818 -0.0035649 -0.0162489 0.00763636 -0.00377177 -0.0162314 0.00763636 -0.00397572 -0.0161926 0.00763636 -0.00407591 -0.0161653 0.00981818 -0.00407591 -0.0161653 0.00763636 -0.00387425 -0.0162147 0.00981818 -0.00387425 -0.0162147 0.00545454 -0.00377177 -0.0162314 0.00763636 -0.00366856 -0.0162429 0.011804 -0.00369603 -0.0162404 0.00545454 -0.0035649 -0.0162489 0.00763636 -0.0035649 -0.0162489 0.012 -0.0035 -0.01625 -0.00109091 -0.00366856 -0.0162429 0.00327273 -0.00366856 -0.0162429 0.00327273 -0.0035649 -0.0162489 0.00545454 -0.00366856 -0.0162429 -0.00109091 -0.0035649 -0.0162489 -0.00545455 -0.00377177 -0.0162314 -0.00981818 -0.00366856 -0.0162429 0.00109091 -0.00366856 -0.0162429 0.00981818 -0.00366856 -0.0162429 -0.00981818 -0.00377177 -0.0162314 0.00109091 -0.00377177 -0.0162314 0.00545454 -0.00387425 -0.0162147 0.00327273 -0.00377177 -0.0162314 -0.00327273 -0.00387425 -0.0162147 0.00109091 -0.00387425 -0.0162147 -0.00109091 -0.00387425 -0.0162147 0.00327273 -0.00387425 -0.0162147 0.00981818 -0.00397572 -0.0161926 -0.00545455 -0.00407591 -0.0161653 0.00109091 -0.00397572 -0.0161926 -0.00763636 -0.00417378 -0.0161331 -0.00545455 -0.00417378 -0.0161331 0.00545454 -0.00407591 -0.0161653 -0.00327273 -0.00417378 -0.0161331 0.00327273 -0.00417378 -0.0161331 0.00545454 -0.00427136 -0.0160953 0.00763636 -0.00427136 -0.0160953 -0.00981818 -0.00427136 -0.0160953 -0.00327273 -0.00436609 -0.0160527 0.00109091 -0.00436609 -0.0160527 0.00545454 -0.00436609 -0.0160527 0.00981818 -0.00436609 -0.0160527 -0.00981818 -0.00436609 -0.0160527 -0.00545455 -0.00436609 -0.0160527 -0.00109091 -0.0044585 -0.0160054 0.00109091 -0.0044585 -0.0160054 0.00327273 -0.0044585 -0.0160054 0.00981818 -0.0044585 -0.0160054 0.00763636 -0.0044585 -0.0160054 -0.00981818 -0.0044585 -0.0160054 -0.00545455 -0.00454832 -0.0159532 -0.00327273 -0.00454832 -0.0159532 0.00763636 -0.00454832 -0.0159532 -0.00545455 -0.00463531 -0.0158965 0.00109091 -0.00463531 -0.0158965 0.00327273 -0.00454832 -0.0159532 -0.00981818 -0.00454832 -0.0159532 -0.00763636 -0.00463531 -0.0158965 -0.00763636 -0.00471924 -0.0158354 -0.00545455 -0.00479988 -0.01577 0.00327273 -0.00501997 -0.0155499 -0.00545455 -0.00471924 -0.0158354 -0.00327273 -0.00463531 -0.0158965 0.00327273 -0.00471924 -0.0158354 0.00763636 -0.00471924 -0.0158354 0.00981818 -0.00471924 -0.0158354 0.00109091 -0.00471924 -0.0158354 -0.00763636 -0.00479988 -0.01577 -0.00545455 -0.00487702 -0.0157005 -0.00545455 -0.00495045 -0.015627 -0.00109091 -0.00508539 -0.0154692 0.00545454 -0.00525536 -0.0152085 -0.00327273 -0.00487702 -0.0157005 0.00109091 -0.00487702 -0.0157005 -0.00327273 -0.00495045 -0.015627 -0.00109091 -0.00495045 -0.015627 0.00545454 -0.00495045 -0.015627 0.00981818 -0.00495045 -0.015627 0.00763636 -0.00487702 -0.0157005 -0.00545455 -0.00501997 -0.0155499 0.00327273 -0.00525536 -0.0152085 -0.00763636 -0.00495045 -0.015627 -0.00327273 -0.00501997 -0.0155499 0.00327273 -0.00495045 -0.015627 0.00763636 -0.00501997 -0.0155499 -0.00763636 -0.00501997 -0.0155499 -0.00545455 -0.00508539 -0.0154692 -0.00109091 -0.00501997 -0.0155499 0.00109091 -0.00508539 -0.0154692 0.00109091 -0.00501997 -0.0155499 0.00763636 -0.00508539 -0.0154692 -0.00763636 -0.00508539 -0.0154692 -0.00763636 -0.00514653 -0.0153853 -0.00327273 -0.00520324 -0.0152983 0.00327273 -0.00538309 -0.0149238 0.00763636 -0.0054426 -0.0147257 0.00981818 -0.00541529 -0.0148259 -0.00545455 -0.00514653 -0.0153853 -0.00327273 -0.00508539 -0.0154692 -0.00109091 -0.00514653 -0.0153853 0.00981818 -0.00514653 -0.0153853 -0.00545455 -0.00520324 -0.0152983 -0.00327273 -0.00514653 -0.0153853 -0.00109091 -0.00520324 -0.0152983 0.00327273 -0.00520324 -0.0152983 0.00109091 -0.00514653 -0.0153853 -0.00327273 -0.00525536 -0.0152085 0.00109091 -0.00525536 -0.0152085 -0.00545455 -0.00530274 -0.0151161 -0.00545455 -0.00525536 -0.0152085 -0.00109091 -0.00530274 -0.0151161 -0.00327273 -0.00530274 -0.0151161 -0.00109091 -0.00525536 -0.0152085 0.00109091 -0.00530274 -0.0151161 0.00109091 -0.00534526 -0.0150214 -0.00109091 -0.00534526 -0.0150214 0.00327273 -0.00534526 -0.0150214 0.00545454 -0.00534526 -0.0150214 -0.00545455 -0.00534526 -0.0150214 -0.00763636 -0.00538309 -0.0149238 -0.00109091 -0.00538309 -0.0149238 0.00109091 -0.00538309 -0.0149238 0.00545454 -0.00538309 -0.0149238 -0.00327273 -0.0054426 -0.0147257 -0.00545455 -0.00541529 -0.0148259 0.00109091 -0.00541529 -0.0148259 -0.00109091 -0.0054426 -0.0147257 0.00545454 -0.0054426 -0.0147257 -0.00981818 -0.0054426 -0.0147257 -0.00763636 -0.00541529 -0.0148259 -0.00109091 -0.00548145 -0.0145218 -0.00763636 -0.0054426 -0.0147257 0.00109091 -0.00546467 -0.0146243 -0.00109091 -0.00546467 -0.0146243 0.00545454 -0.00546467 -0.0146243 -0.00545455 -0.00546467 -0.0146243 0.00109091 -0.00548145 -0.0145218 0.00327273 -0.00549895 -0.0143149 0.0118314 -0.00716667 -0.0162429 0.0117065 -0.00379346 -0.0162284 0.0116257 -0.00533333 -0.0162147 0.0115243 -0.00716667 -0.0161926 0.0116098 -0.00860982 -0.0162116 0.0116257 -0.00716667 -0.0162147 0.0117282 -0.00533333 -0.0162314 0.0117282 -0.00716667 -0.0162314 0.0119019 -0.00359813 -0.0162476 0.0119351 -0.00533333 -0.0162489 0.0119351 -0.00716667 -0.0162489 0.0116098 -0.00389018 -0.0162116 0.011514 -0.00851404 -0.0161901 0.0114241 -0.00716667 -0.0161653 0.0115243 -0.00533333 -0.0161926 0.0114241 -0.00533333 -0.0161653 0.0113262 -0.00716667 -0.0161331 0.0113262 -0.00832622 -0.0161331 0.011514 -0.00398596 -0.0161901 0.0114194 -0.00408057 -0.0161639 0.0113262 -0.00533333 -0.0161331 0.0112346 -0.00823463 -0.0160978 0.0112286 -0.00716667 -0.0160953 0.0111449 -0.00435511 -0.016058 0.0110572 -0.00444279 -0.0160138 0.01093 -0.00456999 -0.0159397 0.0109517 -0.00533333 -0.0159532 0.0107695 -0.00473046 -0.0158267 0.0107808 -0.00533333 -0.0158354 0.0106937 -0.00480634 -0.0157644 0.0106569 -0.00484312 -0.0157319 0.0106209 -0.00487908 -0.0156985 0.0105858 -0.00491421 -0.0156642 0.0105181 -0.0049819 -0.0155931 0.01048 -0.00716667 -0.0155499 0.0111449 -0.00814489 -0.016058 0.0112286 -0.00533333 -0.0160953 0.0111339 -0.00533333 -0.0160527 0.0110415 -0.00533333 -0.0160054 0.0109517 -0.00716667 -0.0159532 0.0110415 -0.00716667 -0.0160054 0.0109718 -0.0045282 -0.0159655 0.0108647 -0.00716667 -0.0158965 0.0108484 -0.00784838 -0.0158852 0.01093 -0.00793 -0.0159397 0.0107312 -0.00773121 -0.015796 0.0107695 -0.00776954 -0.0158267 0.0107808 -0.00716667 -0.0158354 0.0108647 -0.00533333 -0.0158965 0.0107001 -0.00533333 -0.01577 0.010623 -0.00716667 -0.0157005 0.0103936 -0.00510641 -0.0154414 0.0103648 -0.00736483 -0.0154016 0.0103371 -0.00733706 -0.0153611 0.0103535 -0.00533333 -0.0153853 0.0103535 -0.00716667 -0.0153853 0.01048 -0.00533333 -0.0155499 0.0102968 -0.00716667 -0.0152983 0.0103103 -0.00731029 -0.01532 0.0102968 -0.00533333 -0.0152983 0.0102845 -0.00521546 -0.0152782 0.0102598 -0.00524017 -0.0152358 0.0102446 -0.00533333 -0.0152085 0.0102362 -0.00723616 -0.0151928 0.0102446 -0.00716667 -0.0152085 0.0102136 -0.00721355 -0.0151492 0.0101973 -0.00716667 -0.0151161 0.010192 -0.00530798 -0.0151051 0.010192 -0.00719202 -0.0151051 0.0101716 -0.00532842 -0.0150605 0.0101973 -0.00533333 -0.0151161 0.0101716 -0.00717158 -0.0150605 0.010134 -0.00713401 -0.0149698 0.0101169 -0.00538309 -0.0149238 0.0100861 -0.00541388 -0.0148306 0.0100724 -0.00542755 -0.0147834 0.0100599 -0.00705994 -0.014736 0.0100599 -0.00544006 -0.014736 0.0100486 -0.0070486 -0.0146882 0.0101169 -0.00711691 -0.0149238 0.0100384 -0.00546157 -0.0146402 0.0100384 -0.00703843 -0.0146402 0.010015 -0.00548496 -0.0144948 0.0100054 -0.00549458 -0.0143971 0.0100096 -0.00700963 -0.014446 0.0100024 -0.00549759 -0.0143481 0.0100006 -0.0070006 -0.0142991 0.01 -0.007 -0.01425 0.0104146 -0.00533333 -0.0154692 0.0104146 -0.00716667 -0.0154692 0.0104233 -0.00742331 -0.0154805 0.0105515 -0.00755151 -0.0156291 0.0105858 -0.00758579 -0.0156642 0.010623 -0.00533333 -0.0157005 0.0107001 -0.00716667 -0.01577 0.0106937 -0.00769365 -0.0157644 0.011804 -0.00880396 -0.0162404 0.0118314 -0.00533333 -0.0162429 0.0111339 -0.00716667 -0.0160527 0.0105495 -0.00716667 -0.015627 0.0105495 -0.00533333 -0.015627 -0.01 -0.007 -0.01125 -0.012 -0.009 -0.01625 -0.0126456 -0.0115107 -0.01625 -0.0125 -0.0115 -0.01625 -0.012427 -0.0115027 -0.01625 -0.0120755 -0.0115946 -0.01625 -0.0120106 -0.0116279 -0.01625 -0.0118327 -0.0117552 -0.01625 -0.0116464 -0.0119791 -0.01625 -0.0116107 -0.0120428 -0.01625 -0.0115797 -0.0121089 -0.01625 -0.0115536 -0.0121771 -0.01625 -0.0115166 -0.0123184 -0.01625 -0.011506 -0.0123906 -0.01625 -0.0115007 -0.0125365 -0.01625 -0.0115536 -0.0128229 -0.01625 -0.0065 -0.017 -0.01625 -0.0116464 -0.0130209 -0.01625 -0.0117801 -0.0131941 -0.01625 -0.0118888 -0.0132915 -0.01625 -0.0120106 -0.013372 -0.01625 -0.0120755 -0.0134054 -0.01625 -0.0126456 -0.0134893 -0.01625 -0.0127881 -0.0134576 -0.01625 -0.0129894 -0.013372 -0.01625 -0.0131112 -0.0132915 -0.01625 -0.0131673 -0.0132448 -0.01625 -0.0133133 -0.0130819 -0.01625 -0.0133536 -0.0130209 -0.01625 -0.0133893 -0.0129572 -0.01625 -0.0134203 -0.0128911 -0.01625 -0.0134834 -0.0126816 -0.01625 -0.013494 -0.0126094 -0.01625 -0.0134993 -0.0125365 -0.01625 -0.0134834 -0.0123184 -0.01625 -0.0134675 -0.0122471 -0.01625 -0.0134203 -0.0121089 -0.01625 -0.0133133 -0.0119181 -0.01625 -0.0132686 -0.0118603 -0.01625 -0.0132199 -0.0118059 -0.01625 -0.0131673 -0.0117552 -0.01625 -0.0131112 -0.0117085 -0.01625 -0.0129894 -0.0116279 -0.01625 -0.0129245 -0.0115946 -0.01625 -0.0128572 -0.011566 -0.01625 0.0065 -0.019 -0.01625 0.0115325 -0.0122471 -0.01625 0.011506 -0.0123906 -0.01625 0.0115166 -0.0123184 -0.01625 0.0115007 -0.0124635 -0.01625 0.0115007 -0.0125365 -0.01625 0.0115166 -0.0126816 -0.01625 0.0065 -0.017 -0.01625 0.0115797 -0.0128911 -0.01625 0.0115536 -0.0128229 -0.01625 0.0116464 -0.0130209 -0.01625 0.0119482 -0.013334 -0.01625 0.0120106 -0.013372 -0.01625 0.0122119 -0.0134576 -0.01625 0.0122826 -0.0134761 -0.01625 0.0123544 -0.0134893 -0.01625 0.012427 -0.0134973 -0.01625 0.0126456 -0.0134893 -0.01625 0.0127174 -0.0134761 -0.01625 -0.017 -0.0065 -0.01625 -0.0130518 0.011666 -0.01625 -0.0128572 0.011566 -0.01625 -0.0126456 0.0115107 -0.01625 -0.0127174 0.0115239 -0.01625 -0.0125 0.0115 -0.01625 -0.0122826 0.0115239 -0.01625 -0.0120755 0.0115946 -0.01625 -0.0120106 0.011628 -0.01625 -0.0119482 0.011666 -0.01625 -0.0116867 0.0119181 -0.01625 0.0116464 0.0119791 -0.01625 0.0123544 -0.0115107 -0.01625 0.0127174 -0.0115239 -0.01625 0.012 -0.009 -0.01625 0.0126456 -0.0115107 -0.01625 0.0129245 -0.0115946 -0.01625 0.0130518 -0.011666 -0.01625 0.0131112 -0.0117085 -0.01625 0.0131673 -0.0117552 -0.01625 0.0132199 -0.0118059 -0.01625 0.0133536 -0.0119791 -0.01625 0.0133893 -0.0120428 -0.01625 0.0134203 -0.0121089 -0.01625 0.0134464 -0.0121771 -0.01625 0.0134834 -0.0123184 -0.01625 0.0134993 -0.0124635 -0.01625 0.013494 -0.0126094 -0.01625 0.0134834 -0.0126816 -0.01625 0.0135 -0.0135 -0.01625 0.0134203 -0.0128911 -0.01625 0.0134464 -0.0128229 -0.01625 0.0132199 -0.0131941 -0.01625 0.0131673 -0.0132448 -0.01625 0.0115536 -0.0121771 -0.01625 0.0115797 -0.0121089 -0.01625 0.0116464 -0.0119791 -0.01625 0.0117314 -0.0118603 -0.01625 0.0119482 -0.011666 -0.01625 0.0122826 -0.0115239 -0.01625 0.0135 0.0155 -0.01625 0.0125 0.0135 -0.01625 0.0126456 0.0134893 -0.01625 0.0127881 0.0134576 -0.01625 0.0128572 0.013434 -0.01625 0.0129245 0.0134054 -0.01625 0.0131112 0.0132915 -0.01625 0.0133133 0.0130819 -0.01625 0.0133536 0.0130209 -0.01625 0.0134203 0.0128911 -0.01625 0.013494 0.0126094 -0.01625 0.0134993 0.0125365 -0.01625 0.0134993 0.0124635 -0.01625 0.0134675 0.0122471 -0.01625 0.0134464 0.0121771 -0.01625 0.0132686 0.0118603 -0.01625 0.0131112 0.0117085 -0.01625 0.0128572 0.011566 -0.01625 0.0126456 0.0115107 -0.01625 0.012573 0.0115027 -0.01625 0.0125 0.0115 -0.01625 0.012427 0.0115027 -0.01625 0.0121428 0.011566 -0.01625 0.0120755 0.0115946 -0.01625 0.0120106 0.011628 -0.01625 0.0119482 0.011666 -0.01625 0.0118888 0.0117085 -0.01625 0.0117801 0.0118059 -0.01625 0.0116107 0.0120428 -0.01625 0.0115797 0.0121089 -0.01625 0.011506 0.0123906 -0.01625 0.0065 0.017 -0.01625 0.0115536 0.0128229 -0.01625 0.0116107 0.0129572 -0.01625 0.0116867 0.0130819 -0.01625 0.0117801 0.0131941 -0.01625 0.0117314 0.0131397 -0.01625 0.0120755 0.0134054 -0.01625 -0.012427 0.0134973 -0.01625 -0.0123544 0.0134893 -0.01625 -0.0122826 0.0134761 -0.01625 -0.0122119 0.0134576 -0.01625 -0.0120106 0.013372 -0.01625 -0.0118888 0.0132915 -0.01625 -0.0116867 0.0130819 -0.01625 -0.0116464 0.0130209 -0.01625 -0.0115797 0.0128911 -0.01625 -0.0115007 0.0124635 -0.01625 -0.0133133 0.0119181 -0.01625 -0.0134675 0.0122471 -0.01625 -0.0134464 0.0121771 -0.01625 -0.0134993 0.0124635 -0.01625 -0.0134993 0.0125365 -0.01625 -0.019 0.0155 -0.01625 -0.0134464 0.0128229 -0.01625 -0.0133133 0.0130819 -0.01625 -0.0132199 0.0131941 -0.01625 -0.0130518 0.013334 -0.01625 -0.0129894 0.013372 -0.01625 0.01 -0.0055 -0.01425 -0.01 -0.0055 -0.01425 -0.01 -0.0055 -0.01125 0.01 -0.007 -0.01125 0.01 -0.0055 -0.01125 0.0180625 0.0134973 0.013323 0.019 0.0134973 0.013323 0.0180625 0.0134893 0.0133956 0.017125 0.0134576 0.0135381 0.017125 0.0134761 0.0134674 0.0180625 0.0134761 0.0134674 0.017125 0.0134893 0.0133956 0.017125 0.0134973 0.013323 0.0180625 0.0134893 0.0131044 0.017125 0.0134576 0.0129619 0.0180625 0.0134054 0.0128255 0.017125 0.0134054 0.0128255 0.017125 0.013334 0.0126982 0.0180625 0.0130209 0.0123964 0.017125 0.0129572 0.0123607 0.0180625 0.0127529 0.0122825 0.017125 0.0126094 0.012256 0.0180625 0.0125365 0.0122507 0.017125 0.0125365 0.0122507 0.0180625 0.0121089 0.0123297 0.0180625 0.0120428 0.0123607 0.0180625 0.0117085 0.0126388 0.017125 0.011666 0.0126982 0.017125 0.0116279 0.0127606 0.017125 0.0115107 0.0131044 0.017125 0.0115 0.01325 0.0180625 0.0115107 0.0133956 0.0180625 0.0115239 0.0134674 0.0180625 0.0115424 0.0135381 0.0180625 0.011566 0.0136072 0.017125 0.0117552 0.0139173 0.017125 0.0119181 0.0140633 0.017125 0.0120428 0.0141393 0.017125 0.0121089 0.0141703 0.017125 0.0121771 0.0141964 0.017125 0.0123906 0.014244 0.017125 0.0126094 0.014244 0.017125 0.0126816 0.0142334 0.017125 0.0130209 0.0141036 0.017125 0.0131941 0.0139699 0.0180625 0.0132448 0.0139173 0.0180625 0.0132915 0.0138612 0.017125 0.0132915 0.0138612 0.017125 0.013334 0.0138018 0.017125 0.013372 0.0137394 0.0180625 0.0134054 0.0136745 0.017125 0.0134054 0.0136745 0.0180625 0.0134576 0.0135381 0.017125 0.013434 0.0136072 0.019 0.0134761 0.0134674 0.0180625 0.013434 0.0136072 0.019 0.013434 0.0136072 0.0180625 0.013372 0.0137394 0.019 0.013334 0.0138018 0.0180625 0.013334 0.0138018 0.019 0.0132915 0.0138612 0.0180625 0.0131941 0.0139699 0.0180625 0.0131397 0.0140186 0.017125 0.0131397 0.0140186 0.019 0.0131397 0.0140186 0.017125 0.0130819 0.0140633 0.019 0.0130819 0.0140633 0.0180625 0.0130819 0.0140633 0.0180625 0.0130209 0.0141036 0.019 0.0130209 0.0141036 0.017125 0.0129572 0.0141393 0.0180625 0.0129572 0.0141393 0.017125 0.0128911 0.0141703 0.0180625 0.0128911 0.0141703 0.019 0.0129572 0.0141393 0.019 0.0128911 0.0141703 0.0180625 0.0128229 0.0141964 0.017125 0.0127529 0.0142175 0.0180625 0.0127529 0.0142175 0.019 0.0127529 0.0142175 0.0180625 0.0126816 0.0142334 0.019 0.0126816 0.0142334 0.0180625 0.0126094 0.014244 0.017125 0.0125365 0.0142493 0.0180625 0.0125365 0.0142493 0.0180625 0.0124635 0.0142493 0.019 0.0124635 0.0142493 0.0180625 0.0123906 0.014244 0.017125 0.0123184 0.0142334 0.019 0.0123906 0.014244 0.017125 0.0122471 0.0142175 0.0180625 0.0123184 0.0142334 0.019 0.0123184 0.0142334 0.019 0.0122471 0.0142175 0.0180625 0.0122471 0.0142175 0.0180625 0.0121771 0.0141964 0.0180625 0.0121089 0.0141703 0.0180625 0.0120428 0.0141393 0.0180625 0.0119791 0.0141036 0.019 0.0119791 0.0141036 0.0180625 0.0119181 0.0140633 0.0180625 0.0118603 0.0140186 0.019 0.0118603 0.0140186 0.0180625 0.0118059 0.0139699 0.019 0.0118059 0.0139699 0.0180625 0.0117552 0.0139173 0.0180625 0.0117085 0.0138612 0.019 0.011666 0.0138018 0.017125 0.0116279 0.0137394 0.0180625 0.011666 0.0138018 0.0180625 0.0116279 0.0137394 0.019 0.0116279 0.0137394 0.0180625 0.0115946 0.0136745 0.019 0.0115946 0.0136745 0.019 0.011566 0.0136072 0.019 0.0115424 0.0135381 0.019 0.0115239 0.0134674 0.0180625 0.0115027 0.013323 0.019 0.0115027 0.013323 0.0180625 0.0115 0.01325 0.017125 0.0115027 0.013177 0.0180625 0.0115027 0.013177 0.017125 0.0115239 0.0130326 0.0180625 0.0115107 0.0131044 0.0180625 0.0115239 0.0130326 0.0180625 0.0115424 0.0129619 0.019 0.0115424 0.0129619 0.0180625 0.011566 0.0128928 0.0180625 0.0115946 0.0128255 0.019 0.0115946 0.0128255 0.0180625 0.0116279 0.0127606 0.019 0.0116279 0.0127606 0.0180625 0.011666 0.0126982 0.019 0.011666 0.0126982 0.0180625 0.0117552 0.0125827 0.019 0.0118059 0.0125301 0.0180625 0.0118059 0.0125301 0.0180625 0.0118603 0.0124814 0.019 0.0118603 0.0124814 0.0180625 0.0119181 0.0124367 0.019 0.0119181 0.0124367 0.0180625 0.0119791 0.0123964 0.019 0.0119791 0.0123964 0.019 0.0120428 0.0123607 0.017125 0.0121089 0.0123297 0.0180625 0.0121771 0.0123036 0.0180625 0.0122471 0.0122825 0.017125 0.0122471 0.0122825 0.0180625 0.0123184 0.0122666 0.019 0.0123906 0.012256 0.0180625 0.0123906 0.012256 0.0180625 0.0124635 0.0122507 0.019 0.0125365 0.0122507 0.0180625 0.0126094 0.012256 0.019 0.0126094 0.012256 0.019 0.0126816 0.0122666 0.0180625 0.0126816 0.0122666 0.019 0.0127529 0.0122825 0.0180625 0.0128229 0.0123036 0.0180625 0.0128911 0.0123297 0.019 0.0128229 0.0123036 0.019 0.0128911 0.0123297 0.019 0.0129572 0.0123607 0.0180625 0.0129572 0.0123607 0.019 0.0130209 0.0123964 0.0180625 0.0130819 0.0124367 0.019 0.0130819 0.0124367 0.0180625 0.0131397 0.0124814 0.0180625 0.0131941 0.0125301 0.017125 0.0131941 0.0125301 0.019 0.0131941 0.0125301 0.017125 0.0132915 0.0126388 0.0180625 0.0132448 0.0125827 0.0180625 0.0132915 0.0126388 0.0180625 0.013334 0.0126982 0.019 0.0132915 0.0126388 0.0180625 0.013372 0.0127606 0.017125 0.013372 0.0127606 0.019 0.0134054 0.0128255 0.0180625 0.013434 0.0128928 0.019 0.013434 0.0128928 0.0180625 0.0134761 0.0130326 0.0180625 0.0134576 0.0129619 0.019 0.0134576 0.0129619 0.019 0.0134761 0.0130326 0.019 0.0134893 0.0131044 0.0180625 0.0135 0.01325 0.0180625 0.0134973 0.013177 -0.017125 0.0134973 0.013323 -0.01748 0.0134973 0.013323 -0.019 0.0134761 0.0134674 -0.019 0.0134576 0.0135381 -0.01748 0.0134893 0.0133956 -0.019 0.0135 0.01325 -0.01748 0.0134893 0.0131044 -0.019 0.0134761 0.0130326 -0.01748 0.013434 0.0128928 -0.019 0.0131941 0.0125301 -0.01748 0.0130819 0.0124367 -0.019 0.0130209 0.0123964 -0.01748 0.0128911 0.0123297 -0.019 0.0128911 0.0123297 -0.019 0.0128229 0.0123036 -0.019 0.0127529 0.0122825 -0.019 0.0126816 0.0122666 -0.019 0.0125365 0.0122507 -0.019 0.0124635 0.0122507 -0.019 0.0123906 0.012256 -0.019 0.0123184 0.0122666 -0.01748 0.0121771 0.0123036 -0.019 0.0122471 0.0122825 -0.01748 0.0119791 0.0123964 -0.019 0.0118059 0.0125301 -0.019 0.0117085 0.0126388 -0.019 0.011666 0.0126982 -0.01748 0.0115027 0.013177 -0.019 0.0115107 0.0133956 -0.01748 0.0115239 0.0134674 -0.019 0.0115239 0.0134674 -0.01748 0.0115424 0.0135381 -0.019 0.0116279 0.0137394 -0.01748 0.0117552 0.0139173 -0.01748 0.0118059 0.0139699 -0.019 0.0119181 0.0140633 -0.01748 0.0119791 0.0141036 -0.019 0.0119791 0.0141036 -0.019 0.0120428 0.0141393 -0.019 0.0121771 0.0141964 -0.019 0.0123184 0.0142334 -0.01748 0.0124635 0.0142493 -0.01748 0.0125365 0.0142493 -0.019 0.0127529 0.0142175 -0.019 0.0128229 0.0141964 -0.019 0.0129572 0.0141393 -0.01748 0.0130819 0.0140633 -0.019 0.0130209 0.0141036 -0.019 0.0131397 0.0140186 -0.01748 0.0132915 0.0138612 -0.019 0.013372 0.0137394 -0.01748 0.0134054 0.0136745 -0.01748 0.0134576 0.0135381 -0.01748 0.0134761 0.0134674 -0.017125 0.0134893 0.0133956 -0.017125 0.0134761 0.0134674 -0.019 0.013434 0.0136072 -0.01748 0.013434 0.0136072 -0.01748 0.013372 0.0137394 -0.017125 0.013372 0.0137394 -0.01748 0.013334 0.0138018 -0.017125 0.013334 0.0138018 -0.017125 0.0132915 0.0138612 -0.019 0.0132448 0.0139173 -0.01748 0.0132448 0.0139173 -0.01748 0.0131941 0.0139699 -0.017125 0.0131941 0.0139699 -0.01748 0.0131397 0.0140186 -0.017125 0.0131397 0.0140186 -0.017125 0.0130819 0.0140633 -0.017125 0.0130209 0.0141036 -0.01748 0.0130209 0.0141036 -0.017125 0.0129572 0.0141393 -0.019 0.0128911 0.0141703 -0.01748 0.0129572 0.0141393 -0.01748 0.0128911 0.0141703 -0.01748 0.0128229 0.0141964 -0.01748 0.0127529 0.0142175 -0.017125 0.0128229 0.0141964 -0.017125 0.0127529 0.0142175 -0.01748 0.0126816 0.0142334 -0.01748 0.0126094 0.014244 -0.019 0.0126094 0.014244 -0.017125 0.0125365 0.0142493 -0.01748 0.0123906 0.014244 -0.01748 0.0123184 0.0142334 -0.01748 0.0122471 0.0142175 -0.017125 0.0123184 0.0142334 -0.01748 0.0121771 0.0141964 -0.017125 0.0122471 0.0142175 -0.017125 0.0121771 0.0141964 -0.017125 0.0121089 0.0141703 -0.01748 0.0121089 0.0141703 -0.01748 0.0120428 0.0141393 -0.01748 0.0119181 0.0140633 -0.01748 0.0118603 0.0140186 -0.019 0.0118603 0.0140186 -0.017125 0.0119181 0.0140633 -0.017125 0.0118603 0.0140186 -0.019 0.0118059 0.0139699 -0.017125 0.0118059 0.0139699 -0.01748 0.0117085 0.0138612 -0.017125 0.0117085 0.0138612 -0.01748 0.011666 0.0138018 -0.019 0.011666 0.0138018 -0.017125 0.011666 0.0138018 -0.01748 0.0116279 0.0137394 -0.019 0.0115946 0.0136745 -0.017125 0.0116279 0.0137394 -0.01748 0.0115946 0.0136745 -0.01748 0.011566 0.0136072 -0.017125 0.011566 0.0136072 -0.019 0.0115424 0.0135381 -0.01748 0.0115107 0.0133956 -0.019 0.0115027 0.013323 -0.01748 0.0115027 0.013323 -0.01748 0.0115 0.01325 -0.01748 0.0115107 0.0131044 -0.01748 0.0115239 0.0130326 -0.019 0.0115239 0.0130326 -0.017125 0.0115107 0.0131044 -0.019 0.0115424 0.0129619 -0.01748 0.0115424 0.0129619 -0.017125 0.0115239 0.0130326 -0.019 0.0115946 0.0128255 -0.01748 0.011566 0.0128928 -0.017125 0.011566 0.0128928 -0.017125 0.0115946 0.0128255 -0.01748 0.0115946 0.0128255 -0.01748 0.0116279 0.0127606 -0.01748 0.011666 0.0126982 -0.017125 0.0117085 0.0126388 -0.01748 0.0117085 0.0126388 -0.01748 0.0117552 0.0125827 -0.01748 0.0118059 0.0125301 -0.017125 0.0118059 0.0125301 -0.017125 0.0118603 0.0124814 -0.01748 0.0118603 0.0124814 -0.01748 0.0119181 0.0124367 -0.019 0.0119181 0.0124367 -0.017125 0.0119181 0.0124367 -0.01748 0.0120428 0.0123607 -0.019 0.0121771 0.0123036 -0.01748 0.0121089 0.0123297 -0.017125 0.0121771 0.0123036 -0.01748 0.0122471 0.0122825 -0.017125 0.0122471 0.0122825 -0.01748 0.0123184 0.0122666 -0.017125 0.0123906 0.012256 -0.01748 0.0123906 0.012256 -0.01748 0.0124635 0.0122507 -0.01748 0.0126094 0.012256 -0.01748 0.0125365 0.0122507 -0.01748 0.0126816 0.0122666 -0.01748 0.0127529 0.0122825 -0.017125 0.0127529 0.0122825 -0.01748 0.0128229 0.0123036 -0.019 0.0129572 0.0123607 -0.017125 0.0129572 0.0123607 -0.01748 0.0129572 0.0123607 -0.01748 0.0130209 0.0123964 -0.01748 0.0131397 0.0124814 -0.01748 0.0131941 0.0125301 -0.017125 0.0131397 0.0124814 -0.017125 0.0132448 0.0125827 -0.01748 0.0132448 0.0125827 -0.01748 0.0132915 0.0126388 -0.017125 0.0132915 0.0126388 -0.01748 0.013334 0.0126982 -0.019 0.013334 0.0126982 -0.017125 0.013334 0.0126982 -0.01748 0.013372 0.0127606 -0.019 0.0134054 0.0128255 -0.01748 0.0134054 0.0128255 -0.01748 0.0134576 0.0129619 -0.017125 0.0134576 0.0129619 -0.01748 0.0134761 0.0130326 -0.019 0.0134893 0.0131044 -0.017125 0.0134761 0.0130326 -0.019 0.0134973 0.013177 -0.017125 0.0134893 0.0131044 -0.01748 0.0134973 0.013177 -0.01748 0.0135 0.01325 -0.017 -0.0115239 -0.0119674 -0.017 -0.0115107 -0.0118956 -0.01748 -0.0115107 -0.0118956 -0.01748 -0.0115027 -0.011823 -0.017 -0.0115 -0.01175 -0.017 -0.0115239 -0.0115326 -0.017 -0.0115424 -0.0114619 -0.017 -0.0118059 -0.0110301 -0.01748 -0.0119181 -0.0109367 -0.017 -0.0121089 -0.0108297 -0.017 -0.0121771 -0.0108036 -0.01748 -0.0122471 -0.0107825 -0.01748 -0.0124635 -0.0107507 -0.017 -0.0124635 -0.0107507 -0.01748 -0.0127529 -0.0107825 -0.017 -0.0127529 -0.0107825 -0.017 -0.0129572 -0.0108607 -0.017 -0.0131397 -0.0109814 -0.01748 -0.0131941 -0.0110301 -0.01748 -0.0132448 -0.0110827 -0.01748 -0.013334 -0.0111982 -0.01748 -0.013372 -0.0112606 -0.01748 -0.0134893 -0.0116044 -0.017 -0.0134973 -0.011677 -0.01748 -0.0135 -0.01175 -0.01748 -0.0134893 -0.0118956 -0.017 -0.0134973 -0.011823 -0.01748 -0.0134576 -0.0120381 -0.01748 -0.0134054 -0.0121745 -0.017 -0.0134054 -0.0121745 -0.017 -0.013334 -0.0123018 -0.01748 -0.0132915 -0.0123612 -0.017 -0.0132915 -0.0123612 -0.017 -0.0129572 -0.0126393 -0.017 -0.0128229 -0.0126964 -0.017 -0.0127529 -0.0127175 -0.01748 -0.0126816 -0.0127334 -0.017 -0.0126816 -0.0127334 -0.01748 -0.0125365 -0.0127493 -0.017 -0.0126094 -0.012744 -0.01748 -0.0124635 -0.0127493 -0.01748 -0.0122471 -0.0127175 -0.017 -0.0122471 -0.0127175 -0.01748 -0.0121089 -0.0126703 -0.01748 -0.0120428 -0.0126393 -0.017 -0.0119181 -0.0125633 -0.01748 -0.0118059 -0.0124699 -0.017 -0.0118059 -0.0124699 -0.01748 -0.011666 -0.0123018 -0.01748 -0.011566 -0.0121072 -0.01748 -0.0115239 -0.0119674 -0.01748 -0.0115424 -0.0120381 -0.019 -0.0115239 -0.0119674 -0.019 -0.0115424 -0.0120381 -0.01748 -0.0115946 -0.0121745 -0.017 -0.0115946 -0.0121745 -0.019 -0.0116279 -0.0122394 -0.01748 -0.0116279 -0.0122394 -0.01748 -0.0117085 -0.0123612 -0.019 -0.011666 -0.0123018 -0.017 -0.0117552 -0.0124173 -0.019 -0.0117552 -0.0124173 -0.01748 -0.0117552 -0.0124173 -0.019 -0.0118059 -0.0124699 -0.017 -0.0118603 -0.0125186 -0.01748 -0.0118603 -0.0125186 -0.019 -0.0119181 -0.0125633 -0.017 -0.0119791 -0.0126036 -0.01748 -0.0119181 -0.0125633 -0.017 -0.0120428 -0.0126393 -0.01748 -0.0119791 -0.0126036 -0.01748 -0.0121771 -0.0126964 -0.019 -0.0122471 -0.0127175 -0.019 -0.0123184 -0.0127334 -0.01748 -0.0123184 -0.0127334 -0.01748 -0.0123906 -0.012744 -0.019 -0.0123906 -0.012744 -0.01748 -0.0126094 -0.012744 -0.01748 -0.0127529 -0.0127175 -0.01748 -0.0128229 -0.0126964 -0.019 -0.0128229 -0.0126964 -0.01748 -0.0128911 -0.0126703 -0.01748 -0.0129572 -0.0126393 -0.019 -0.0130209 -0.0126036 -0.01748 -0.0130209 -0.0126036 -0.01748 -0.0130819 -0.0125633 -0.01748 -0.0131397 -0.0125186 -0.019 -0.0131397 -0.0125186 -0.019 -0.0131941 -0.0124699 -0.01748 -0.0131941 -0.0124699 -0.017 -0.0132448 -0.0124173 -0.019 -0.0132448 -0.0124173 -0.01748 -0.0132448 -0.0124173 -0.01748 -0.013334 -0.0123018 -0.019 -0.013334 -0.0123018 -0.01748 -0.013372 -0.0122394 -0.019 -0.013372 -0.0122394 -0.01748 -0.013434 -0.0121072 -0.017 -0.0134576 -0.0120381 -0.019 -0.0134576 -0.0120381 -0.019 -0.0134761 -0.0119674 -0.01748 -0.0134761 -0.0119674 -0.019 -0.0134893 -0.0118956 -0.01748 -0.0134973 -0.011823 -0.019 -0.0135 -0.01175 -0.01748 -0.0134973 -0.011677 -0.019 -0.0134973 -0.011677 -0.019 -0.0134893 -0.0116044 -0.01748 -0.0134761 -0.0115326 -0.017 -0.0134761 -0.0115326 -0.019 -0.0134761 -0.0115326 -0.01748 -0.0134576 -0.0114619 -0.019 -0.0134576 -0.0114619 -0.01748 -0.013434 -0.0113928 -0.01748 -0.0134054 -0.0113255 -0.019 -0.013434 -0.0113928 -0.019 -0.0134054 -0.0113255 -0.019 -0.013372 -0.0112606 -0.019 -0.013334 -0.0111982 -0.01748 -0.0132915 -0.0111388 -0.019 -0.0132915 -0.0111388 -0.019 -0.0132448 -0.0110827 -0.01748 -0.0131397 -0.0109814 -0.019 -0.0131941 -0.0110301 -0.019 -0.0131397 -0.0109814 -0.01748 -0.0130819 -0.0109367 -0.017 -0.0130819 -0.0109367 -0.019 -0.0130819 -0.0109367 -0.01748 -0.0130209 -0.0108964 -0.017 -0.0130209 -0.0108964 -0.01748 -0.0129572 -0.0108607 -0.019 -0.0129572 -0.0108607 -0.01748 -0.0128911 -0.0108297 -0.019 -0.0128911 -0.0108297 -0.01748 -0.0128229 -0.0108036 -0.019 -0.0128229 -0.0108036 -0.01748 -0.0126816 -0.0107666 -0.019 -0.0126816 -0.0107666 -0.01748 -0.0126094 -0.010756 -0.01748 -0.0125365 -0.0107507 -0.019 -0.0125365 -0.0107507 -0.019 -0.0124635 -0.0107507 -0.01748 -0.0123906 -0.010756 -0.019 -0.0123906 -0.010756 -0.01748 -0.0123184 -0.0107666 -0.019 -0.0123184 -0.0107666 -0.017 -0.0122471 -0.0107825 -0.01748 -0.0121771 -0.0108036 -0.019 -0.0121771 -0.0108036 -0.01748 -0.0121089 -0.0108297 -0.01748 -0.0120428 -0.0108607 -0.01748 -0.0119791 -0.0108964 -0.019 -0.0119181 -0.0109367 -0.01748 -0.0118603 -0.0109814 -0.019 -0.0118603 -0.0109814 -0.01748 -0.0118059 -0.0110301 -0.017 -0.0117552 -0.0110827 -0.019 -0.0118059 -0.0110301 -0.01748 -0.0117552 -0.0110827 -0.019 -0.0117085 -0.0111388 -0.01748 -0.0117085 -0.0111388 -0.017 -0.011666 -0.0111982 -0.01748 -0.011666 -0.0111982 -0.019 -0.011666 -0.0111982 -0.01748 -0.0116279 -0.0112606 -0.01748 -0.0115946 -0.0113255 -0.019 -0.0116279 -0.0112606 -0.017 -0.011566 -0.0113928 -0.019 -0.0115946 -0.0113255 -0.01748 -0.0115424 -0.0114619 -0.01748 -0.011566 -0.0113928 -0.019 -0.011566 -0.0113928 -0.019 -0.0115424 -0.0114619 -0.01748 -0.0115239 -0.0115326 -0.017 -0.0115107 -0.0116044 -0.01748 -0.0115107 -0.0116044 -0.01748 -0.0115027 -0.011677 -0.017 -0.0115027 -0.011677 -0.019 -0.0115027 -0.011677 -0.01748 -0.0115 -0.01175 0.0175 -0.0134973 -0.011677 0.0175 -0.0134893 -0.0116044 0.019 -0.0134576 -0.0114619 0.019 -0.0134761 -0.0115326 0.019 -0.0134973 -0.011677 0.019 -0.0134973 -0.011823 0.019 -0.0134761 -0.0119674 0.0175 -0.0134054 -0.0121745 0.019 -0.013434 -0.0121072 0.019 -0.0132915 -0.0123612 0.0175 -0.0131941 -0.0124699 0.019 -0.0132448 -0.0124173 0.019 -0.0131941 -0.0124699 0.019 -0.0129572 -0.0126393 0.019 -0.0128911 -0.0126703 0.019 -0.0128229 -0.0126964 0.019 -0.0127529 -0.0127175 0.019 -0.0124635 -0.0127493 0.019 -0.0123184 -0.0127334 0.019 -0.0122471 -0.0127175 0.019 -0.0121771 -0.0126964 0.0175 -0.0119791 -0.0126036 0.019 -0.0118059 -0.0124699 0.019 -0.0117552 -0.0124173 0.0175 -0.0115027 -0.011677 0.0175 -0.0115107 -0.0116044 0.0175 -0.0115239 -0.0115326 0.019 -0.0115424 -0.0114619 0.019 -0.0115946 -0.0113255 0.019 -0.011666 -0.0111982 0.019 -0.0117552 -0.0110827 0.0175 -0.0119791 -0.0108964 0.019 -0.0121771 -0.0108036 0.0175 -0.0123184 -0.0107666 0.0175 -0.0124635 -0.0107507 0.019 -0.0123906 -0.010756 0.019 -0.0127529 -0.0107825 0.019 -0.0128911 -0.0108297 0.019 -0.0130209 -0.0108964 0.019 -0.0131397 -0.0109814 0.0175 -0.0132915 -0.0111388 0.019 -0.0132448 -0.0110827 0.019 -0.013372 -0.0112606 0.019 -0.013434 -0.0113928 0.0175 -0.0134576 -0.0114619 0.0175 -0.0134761 -0.0115326 0.016 -0.0134761 -0.0115326 0.016 -0.013434 -0.0113928 0.0175 -0.013434 -0.0113928 0.0175 -0.0134054 -0.0113255 0.016 -0.013372 -0.0112606 0.0175 -0.013334 -0.0111982 0.0175 -0.013372 -0.0112606 0.016 -0.013334 -0.0111982 0.019 -0.0132915 -0.0111388 0.0175 -0.0132448 -0.0110827 0.0175 -0.0131941 -0.0110301 0.016 -0.0132448 -0.0110827 0.0175 -0.0131397 -0.0109814 0.019 -0.0130819 -0.0109367 0.0175 -0.0130819 -0.0109367 0.016 -0.0130819 -0.0109367 0.016 -0.0130209 -0.0108964 0.0175 -0.0130209 -0.0108964 0.0175 -0.0129572 -0.0108607 0.016 -0.0129572 -0.0108607 0.0175 -0.0128911 -0.0108297 0.0175 -0.0128229 -0.0108036 0.016 -0.0128911 -0.0108297 0.0175 -0.0127529 -0.0107825 0.016 -0.0128229 -0.0108036 0.0175 -0.0126816 -0.0107666 0.016 -0.0126816 -0.0107666 0.0175 -0.0126094 -0.010756 0.016 -0.0126094 -0.010756 0.0175 -0.0125365 -0.0107507 0.016 -0.0125365 -0.0107507 0.016 -0.0124635 -0.0107507 0.0175 -0.0123906 -0.010756 0.019 -0.0123184 -0.0107666 0.016 -0.0123184 -0.0107666 0.0175 -0.0122471 -0.0107825 0.0175 -0.0121771 -0.0108036 0.0175 -0.0121089 -0.0108297 0.019 -0.0120428 -0.0108607 0.016 -0.0121089 -0.0108297 0.0175 -0.0120428 -0.0108607 0.016 -0.0120428 -0.0108607 0.019 -0.0119791 -0.0108964 0.016 -0.0119791 -0.0108964 0.0175 -0.0119181 -0.0109367 0.0175 -0.0118603 -0.0109814 0.0175 -0.0118059 -0.0110301 0.016 -0.0118059 -0.0110301 0.0175 -0.0117552 -0.0110827 0.0175 -0.0117085 -0.0111388 0.016 -0.0117552 -0.0110827 0.0175 -0.011666 -0.0111982 0.016 -0.011666 -0.0111982 0.0175 -0.0116279 -0.0112606 0.0175 -0.0115946 -0.0113255 0.0175 -0.011566 -0.0113928 0.019 -0.011566 -0.0113928 0.016 -0.011566 -0.0113928 0.019 -0.0115239 -0.0115326 0.0175 -0.0115424 -0.0114619 0.019 -0.0115027 -0.011677 0.019 -0.0115 -0.01175 0.0175 -0.0115 -0.01175 0.0175 -0.0115027 -0.011823 0.0175 -0.0115107 -0.0118956 0.016 -0.0115027 -0.011823 0.019 -0.0115239 -0.0119674 0.0175 -0.0115239 -0.0119674 0.019 -0.0115424 -0.0120381 0.016 -0.0115239 -0.0119674 0.0175 -0.011566 -0.0121072 0.019 -0.011566 -0.0121072 0.0175 -0.0115424 -0.0120381 0.016 -0.0115946 -0.0121745 0.0175 -0.0115946 -0.0121745 0.0175 -0.0116279 -0.0122394 0.016 -0.0116279 -0.0122394 0.0175 -0.011666 -0.0123018 0.016 -0.0117085 -0.0123612 0.0175 -0.0117552 -0.0124173 0.0175 -0.0117085 -0.0123612 0.0175 -0.0118059 -0.0124699 0.016 -0.0117552 -0.0124173 0.0175 -0.0118603 -0.0125186 0.016 -0.0118603 -0.0125186 0.0175 -0.0119181 -0.0125633 0.019 -0.0119791 -0.0126036 0.016 -0.0119791 -0.0126036 0.016 -0.0120428 -0.0126393 0.0175 -0.0120428 -0.0126393 0.0175 -0.0121089 -0.0126703 0.016 -0.0121089 -0.0126703 0.0175 -0.0121771 -0.0126964 0.0175 -0.0122471 -0.0127175 0.016 -0.0121771 -0.0126964 0.0175 -0.0123184 -0.0127334 0.016 -0.0123184 -0.0127334 0.016 -0.0123906 -0.012744 0.0175 -0.0123906 -0.012744 0.0175 -0.0124635 -0.0127493 0.019 -0.0125365 -0.0127493 0.0175 -0.0125365 -0.0127493 0.019 -0.0126094 -0.012744 0.0175 -0.0126094 -0.012744 0.016 -0.0125365 -0.0127493 0.019 -0.0126816 -0.0127334 0.0175 -0.0126816 -0.0127334 0.016 -0.0126816 -0.0127334 0.0175 -0.0127529 -0.0127175 0.016 -0.0127529 -0.0127175 0.0175 -0.0128229 -0.0126964 0.016 -0.0128911 -0.0126703 0.0175 -0.0128911 -0.0126703 0.0175 -0.0129572 -0.0126393 0.016 -0.0129572 -0.0126393 0.016 -0.0130209 -0.0126036 0.0175 -0.0130209 -0.0126036 0.0175 -0.0130819 -0.0125633 0.0175 -0.0131397 -0.0125186 0.019 -0.0131397 -0.0125186 0.016 -0.0131941 -0.0124699 0.0175 -0.0132448 -0.0124173 0.0175 -0.0132915 -0.0123612 0.019 -0.013334 -0.0123018 0.0175 -0.013334 -0.0123018 0.0175 -0.013372 -0.0122394 0.0175 -0.013434 -0.0121072 0.0175 -0.0134576 -0.0120381 0.019 -0.0134576 -0.0120381 0.016 -0.0134576 -0.0120381 0.016 -0.0134761 -0.0119674 0.0175 -0.0134761 -0.0119674 0.0175 -0.0134893 -0.0118956 0.016 -0.0134893 -0.0118956 0.019 -0.0135 -0.01175 0.0175 -0.0134973 -0.011823 0.0175 -0.0135 -0.01175 0.016 -0.0135 -0.01175 0.016 0.0134893 -0.0118956 0.019 0.0134576 -0.0120381 0.019 0.0134761 -0.0119674 0.0175 0.0134893 -0.0118956 0.0175 0.0134973 -0.011823 0.019 0.0134973 -0.011823 0.019 0.0134973 -0.011677 0.019 0.0134761 -0.0115326 0.019 0.0134576 -0.0114619 0.0175 0.0134054 -0.0113255 0.019 0.013434 -0.0113928 0.019 0.0134054 -0.0113255 0.019 0.013372 -0.0112606 0.0175 0.0132915 -0.0111388 0.0175 0.0132448 -0.0110827 0.019 0.0132915 -0.0111388 0.019 0.0132448 -0.0110827 0.019 0.0131941 -0.0110301 0.0175 0.0130819 -0.0109367 0.019 0.0130209 -0.0108964 0.0175 0.0129572 -0.0108607 0.0175 0.0128911 -0.0108297 0.0175 0.0128229 -0.0108036 0.0175 0.0126816 -0.0107666 0.0175 0.0126094 -0.010756 0.0175 0.0123906 -0.010756 0.019 0.0124635 -0.0107507 0.0175 0.0119791 -0.0108964 0.0175 0.011628 -0.0112606 0.0175 0.0115946 -0.0113255 0.019 0.011566 -0.0113928 0.0175 0.0115107 -0.0118956 0.019 0.0115027 -0.011823 0.019 0.0115239 -0.0119674 0.0175 0.011566 -0.0121072 0.019 0.0115946 -0.0121745 0.019 0.011666 -0.0123018 0.019 0.0117552 -0.0124173 0.0175 0.0118603 -0.0125186 0.0175 0.0119791 -0.0126036 0.019 0.0120428 -0.0126393 0.019 0.0121089 -0.0126703 0.0175 0.0121771 -0.0126964 0.0175 0.0123184 -0.0127334 0.019 0.0123184 -0.0127334 0.019 0.0125365 -0.0127493 0.019 0.0126094 -0.012744 0.019 0.0127529 -0.0127175 0.0175 0.0128911 -0.0126703 0.019 0.0128229 -0.0126964 0.0175 0.0130209 -0.0126036 0.0175 0.0132448 -0.0124173 0.0175 0.0132915 -0.0123612 0.019 0.0132915 -0.0123612 0.019 0.013334 -0.0123018 0.0175 0.013434 -0.0121072 0.0175 0.0134761 -0.0119674 0.016 0.0134761 -0.0119674 0.016 0.0134576 -0.0120381 0.0175 0.0134576 -0.0120381 0.0175 0.0134054 -0.0121745 0.016 0.013434 -0.0121072 0.019 0.013372 -0.0122394 0.0175 0.013372 -0.0122394 0.0175 0.013334 -0.0123018 0.016 0.0132915 -0.0123612 0.016 0.0132448 -0.0124173 0.0175 0.0131941 -0.0124699 0.0175 0.0131397 -0.0125186 0.016 0.0131941 -0.0124699 0.0175 0.0130819 -0.0125633 0.019 0.0130209 -0.0126036 0.0175 0.0129572 -0.0126393 0.016 0.0129572 -0.0126393 0.019 0.0128911 -0.0126703 0.0175 0.0128229 -0.0126964 0.016 0.0128911 -0.0126703 0.0175 0.0127529 -0.0127175 0.016 0.0127529 -0.0127175 0.0175 0.0126816 -0.0127334 0.0175 0.0126094 -0.012744 0.016 0.0126816 -0.0127334 0.0175 0.0125365 -0.0127493 0.0175 0.0124635 -0.0127493 0.0175 0.0123906 -0.012744 0.016 0.0124635 -0.0127493 0.016 0.0123906 -0.012744 0.0175 0.0122471 -0.0127175 0.019 0.0122471 -0.0127175 0.016 0.0122471 -0.0127175 0.016 0.0121771 -0.0126964 0.0175 0.0121089 -0.0126703 0.0175 0.0120428 -0.0126393 0.0175 0.0119181 -0.0125633 0.016 0.0119791 -0.0126036 0.019 0.0118603 -0.0125186 0.0175 0.0118059 -0.0124699 0.0175 0.0117552 -0.0124173 0.016 0.0117552 -0.0124173 0.019 0.0117085 -0.0123612 0.0175 0.0117085 -0.0123612 0.0175 0.011666 -0.0123018 0.016 0.011666 -0.0123018 0.019 0.011628 -0.0122394 0.016 0.011628 -0.0122394 0.0175 0.011628 -0.0122394 0.0175 0.0115946 -0.0121745 0.016 0.011566 -0.0121072 0.0175 0.0115424 -0.0120381 0.016 0.0115424 -0.0120381 0.019 0.0115107 -0.0118956 0.0175 0.0115239 -0.0119674 0.016 0.0115107 -0.0118956 0.0175 0.0115027 -0.011823 0.016 0.0115027 -0.011823 0.0175 0.0115 -0.01175 0.0175 0.0115027 -0.011677 0.0175 0.0115107 -0.0116044 0.016 0.0115107 -0.0116044 0.0175 0.0115239 -0.0115326 0.0175 0.0115424 -0.0114619 0.0175 0.011566 -0.0113928 0.019 0.0115946 -0.0113255 0.016 0.011628 -0.0112606 0.019 0.011666 -0.0111982 0.019 0.0117085 -0.0111388 0.0175 0.011666 -0.0111982 0.0175 0.0117085 -0.0111388 0.019 0.0117552 -0.0110827 0.0175 0.0117552 -0.0110827 0.019 0.0118059 -0.0110301 0.0175 0.0118059 -0.0110301 0.0175 0.0118603 -0.0109814 0.0175 0.0119181 -0.0109367 0.016 0.0119181 -0.0109367 0.016 0.0119791 -0.0108964 0.016 0.0120428 -0.0108607 0.0175 0.0120428 -0.0108607 0.0175 0.0121089 -0.0108297 0.016 0.0121089 -0.0108297 0.0175 0.0121771 -0.0108036 0.0175 0.0122471 -0.0107825 0.0175 0.0123184 -0.0107666 0.019 0.0123184 -0.0107666 0.019 0.0123906 -0.010756 0.016 0.0123906 -0.010756 0.0175 0.0124635 -0.0107507 0.0175 0.0125365 -0.0107507 0.016 0.0125365 -0.0107507 0.016 0.0126094 -0.010756 0.0175 0.0127529 -0.0107825 0.019 0.0127529 -0.0107825 0.019 0.0129572 -0.0108607 0.0175 0.0130209 -0.0108964 0.0175 0.0131397 -0.0109814 0.019 0.0131397 -0.0109814 0.016 0.0130819 -0.0109367 0.0175 0.0131941 -0.0110301 0.016 0.0131941 -0.0110301 0.0175 0.013334 -0.0111982 0.019 0.013334 -0.0111982 0.016 0.0132915 -0.0111388 0.0175 0.013372 -0.0112606 0.016 0.0134054 -0.0113255 0.016 0.013434 -0.0113928 0.0175 0.013434 -0.0113928 0.0175 0.0134576 -0.0114619 0.0175 0.0134893 -0.0116044 0.0175 0.0134761 -0.0115326 0.016 0.0134893 -0.0116044 0.0175 0.0134973 -0.011677 0.016 0.0134973 -0.011677 0.0175 0.0135 -0.01175 0.019 0.0135 -0.01175 -0.019 -0.0115107 0.0131044 -0.017125 -0.0115239 0.0130326 -0.017125 -0.0115424 0.0129619 -0.01748 -0.0115107 0.0131044 -0.01748 -0.0115 0.01325 -0.01748 -0.0115027 0.013323 -0.017125 -0.0115107 0.0133956 -0.017125 -0.0115424 0.0135381 -0.017125 -0.011666 0.0138018 -0.017125 -0.0117552 0.0139173 -0.017125 -0.0118603 0.0140186 -0.01748 -0.0120428 0.0141393 -0.017125 -0.0119791 0.0141036 -0.017125 -0.0121771 0.0141964 -0.01748 -0.0122471 0.0142175 -0.017125 -0.0123184 0.0142334 -0.01748 -0.0124635 0.0142493 -0.017125 -0.0123906 0.014244 -0.017125 -0.0125365 0.0142493 -0.017125 -0.0126094 0.014244 -0.017125 -0.0126816 0.0142334 -0.017125 -0.0128229 0.0141964 -0.017125 -0.0131397 0.0140186 -0.01748 -0.0132448 0.0139173 -0.017125 -0.0132448 0.0139173 -0.01748 -0.013372 0.0137394 -0.017125 -0.013334 0.0138018 -0.01748 -0.013434 0.0136072 -0.017125 -0.0134054 0.0136745 -0.01748 -0.0134576 0.0135381 -0.017125 -0.0134576 0.0135381 -0.01748 -0.0134893 0.0133956 -0.017125 -0.0134761 0.0134674 -0.017125 -0.0134973 0.013323 -0.01748 -0.0134893 0.0131044 -0.01748 -0.0134761 0.0130326 -0.01748 -0.0134054 0.0128255 -0.01748 -0.013372 0.0127606 -0.017125 -0.013334 0.0126982 -0.01748 -0.0132448 0.0125827 -0.017125 -0.0131941 0.0125301 -0.01748 -0.0129572 0.0123607 -0.01748 -0.0128911 0.0123297 -0.017125 -0.0126816 0.0122666 -0.017125 -0.0126094 0.012256 -0.017125 -0.0125365 0.0122507 -0.017125 -0.0124635 0.0122507 -0.017125 -0.0123906 0.012256 -0.01748 -0.0121771 0.0123036 -0.017125 -0.0121089 0.0123297 -0.017125 -0.011666 0.0126982 -0.01748 -0.011628 0.0127606 -0.01748 -0.0115239 0.0130326 -0.019 -0.0115239 0.0130326 -0.01748 -0.0115424 0.0129619 -0.019 -0.011566 0.0128928 -0.01748 -0.0115946 0.0128255 -0.01748 -0.011566 0.0128928 -0.019 -0.0115946 0.0128255 -0.017125 -0.0117085 0.0126388 -0.01748 -0.011666 0.0126982 -0.01748 -0.0117085 0.0126388 -0.01748 -0.0117552 0.0125827 -0.019 -0.0117085 0.0126388 -0.017125 -0.0118603 0.0124814 -0.01748 -0.0118059 0.0125301 -0.01748 -0.0118603 0.0124814 -0.01748 -0.0119181 0.0124367 -0.017125 -0.0119791 0.0123964 -0.01748 -0.0119791 0.0123964 -0.017125 -0.0120428 0.0123607 -0.01748 -0.0120428 0.0123607 -0.019 -0.0120428 0.0123607 -0.01748 -0.0121089 0.0123297 -0.01748 -0.0122471 0.0122825 -0.017125 -0.0122471 0.0122825 -0.019 -0.0122471 0.0122825 -0.017125 -0.0123184 0.0122666 -0.01748 -0.0123184 0.0122666 -0.019 -0.0123184 0.0122666 -0.019 -0.0123906 0.012256 -0.01748 -0.0124635 0.0122507 -0.01748 -0.0123906 0.012256 -0.019 -0.0124635 0.0122507 -0.019 -0.0125365 0.0122507 -0.01748 -0.0125365 0.0122507 -0.01748 -0.0126094 0.012256 -0.01748 -0.0126816 0.0122666 -0.019 -0.0126094 0.012256 -0.019 -0.0126816 0.0122666 -0.017125 -0.0127529 0.0122825 -0.01748 -0.0127529 0.0122825 -0.01748 -0.0128229 0.0123036 -0.019 -0.0128911 0.0123297 -0.01748 -0.0130209 0.0123964 -0.019 -0.0130209 0.0123964 -0.017125 -0.0130819 0.0124367 -0.019 -0.0130819 0.0124367 -0.01748 -0.0131397 0.0124814 -0.01748 -0.0130819 0.0124367 -0.019 -0.0131397 0.0124814 -0.017125 -0.0132448 0.0125827 -0.01748 -0.0131941 0.0125301 -0.019 -0.0131941 0.0125301 -0.019 -0.0132448 0.0125827 -0.01748 -0.0132915 0.0126388 -0.019 -0.0132915 0.0126388 -0.017125 -0.013372 0.0127606 -0.01748 -0.013334 0.0126982 -0.017125 -0.0134054 0.0128255 -0.019 -0.0134054 0.0128255 -0.01748 -0.013434 0.0128928 -0.019 -0.013434 0.0128928 -0.01748 -0.0134576 0.0129619 -0.017125 -0.0134761 0.0130326 -0.019 -0.0134576 0.0129619 -0.019 -0.0134761 0.0130326 -0.019 -0.0134893 0.0131044 -0.017125 -0.0134973 0.013177 -0.01748 -0.0134973 0.013177 -0.017125 -0.0135 0.01325 -0.01748 -0.0135 0.01325 -0.019 -0.0135 0.01325 -0.01748 -0.0134973 0.013323 -0.017125 -0.0134893 0.0133956 -0.019 -0.0134893 0.0133956 -0.01748 -0.0134761 0.0134674 -0.019 -0.0134761 0.0134674 -0.017125 -0.013434 0.0136072 -0.01748 -0.0134054 0.0136745 -0.017125 -0.013372 0.0137394 -0.01748 -0.013334 0.0138018 -0.017125 -0.0132915 0.0138612 -0.01748 -0.0132915 0.0138612 -0.01748 -0.0131941 0.0139699 -0.017125 -0.0131941 0.0139699 -0.019 -0.0131941 0.0139699 -0.01748 -0.0131397 0.0140186 -0.01748 -0.0130819 0.0140633 -0.017125 -0.0130819 0.0140633 -0.019 -0.0130819 0.0140633 -0.01748 -0.0130209 0.0141036 -0.01748 -0.0129572 0.0141393 -0.019 -0.0130209 0.0141036 -0.019 -0.0128911 0.0141703 -0.01748 -0.0128911 0.0141703 -0.019 -0.0128229 0.0141964 -0.01748 -0.0127529 0.0142175 -0.017125 -0.0127529 0.0142175 -0.01748 -0.0128229 0.0141964 -0.019 -0.0127529 0.0142175 -0.01748 -0.0126816 0.0142334 -0.01748 -0.0126094 0.014244 -0.01748 -0.0125365 0.0142493 -0.019 -0.0126094 0.014244 -0.019 -0.0124635 0.0142493 -0.01748 -0.0123906 0.014244 -0.019 -0.0123906 0.014244 -0.01748 -0.0123184 0.0142334 -0.019 -0.0123184 0.0142334 -0.01748 -0.0121771 0.0141964 -0.01748 -0.0121089 0.0141703 -0.019 -0.0120428 0.0141393 -0.01748 -0.0119791 0.0141036 -0.01748 -0.0119181 0.0140633 -0.01748 -0.0118603 0.0140186 -0.01748 -0.0118059 0.0139699 -0.01748 -0.0117085 0.0138612 -0.01748 -0.0117552 0.0139173 -0.01748 -0.011666 0.0138018 -0.019 -0.0117085 0.0138612 -0.019 -0.011666 0.0138018 -0.017125 -0.0115946 0.0136745 -0.01748 -0.011628 0.0137394 -0.01748 -0.0115946 0.0136745 -0.019 -0.0115946 0.0136745 -0.01748 -0.011566 0.0136072 -0.01748 -0.0115424 0.0135381 -0.01748 -0.0115239 0.0134674 -0.017125 -0.0115239 0.0134674 -0.01748 -0.0115107 0.0133956 -0.019 -0.0115239 0.0134674 -0.017125 -0.0115027 0.013323 -0.019 -0.0115 0.01325 -0.01748 -0.0115027 0.013177 0.0180625 -0.0134893 0.0133956 0.017125 -0.0134893 0.0133956 0.0180625 -0.0134761 0.0134674 0.019 -0.0134761 0.0134674 0.0180625 -0.0134973 0.013323 0.019 -0.0135 0.01325 0.0180625 -0.0134761 0.0130326 0.019 -0.0134893 0.0131044 0.019 -0.0134761 0.0130326 0.019 -0.0134054 0.0128255 0.019 -0.013334 0.0126982 0.0180625 -0.0131941 0.0125301 0.019 -0.0131397 0.0124814 0.019 -0.0128911 0.0123297 0.019 -0.0128229 0.0123036 0.019 -0.0127529 0.0122825 0.019 -0.0123906 0.012256 0.019 -0.0123184 0.0122666 0.019 -0.0122471 0.0122825 0.0180625 -0.0120428 0.0123607 0.019 -0.0121089 0.0123297 0.019 -0.0118059 0.0125301 0.019 -0.0117085 0.0126388 0.0180625 -0.011666 0.0126982 0.019 -0.011666 0.0126982 0.019 -0.011628 0.0127606 0.019 -0.0115946 0.0128255 0.019 -0.0115 0.01325 0.0180625 -0.0115107 0.0133956 0.019 -0.0115027 0.013323 0.019 -0.0115239 0.0134674 0.0180625 -0.0115424 0.0135381 0.0180625 -0.0115946 0.0136745 0.019 -0.011566 0.0136072 0.0180625 -0.011628 0.0137394 0.019 -0.0117085 0.0138612 0.0180625 -0.0122471 0.0142175 0.019 -0.0125365 0.0142493 0.0180625 -0.0126816 0.0142334 0.019 -0.0127529 0.0142175 0.019 -0.0128911 0.0141703 0.019 -0.0129572 0.0141393 0.019 -0.0130819 0.0140633 0.0180625 -0.0131397 0.0140186 0.019 -0.0131397 0.0140186 0.0180625 -0.0132448 0.0139173 0.0180625 -0.013372 0.0137394 0.0180625 -0.0134054 0.0136745 0.019 -0.0134054 0.0136745 0.0180625 -0.0134576 0.0135381 0.0180625 -0.013434 0.0136072 0.017125 -0.013372 0.0137394 0.0180625 -0.013334 0.0138018 0.0180625 -0.0132915 0.0138612 0.019 -0.0132448 0.0139173 0.017125 -0.0132915 0.0138612 0.0180625 -0.0131941 0.0139699 0.017125 -0.0131397 0.0140186 0.0180625 -0.0130819 0.0140633 0.0180625 -0.0130209 0.0141036 0.0180625 -0.0128911 0.0141703 0.0180625 -0.0129572 0.0141393 0.017125 -0.0128911 0.0141703 0.019 -0.0128229 0.0141964 0.0180625 -0.0128229 0.0141964 0.0180625 -0.0127529 0.0142175 0.017125 -0.0128229 0.0141964 0.017125 -0.0127529 0.0142175 0.017125 -0.0126816 0.0142334 0.0180625 -0.0126094 0.014244 0.017125 -0.0125365 0.0142493 0.0180625 -0.0125365 0.0142493 0.0180625 -0.0124635 0.0142493 0.019 -0.0123906 0.014244 0.017125 -0.0123906 0.014244 0.0180625 -0.0123906 0.014244 0.0180625 -0.0123184 0.0142334 0.017125 -0.0123184 0.0142334 0.017125 -0.0122471 0.0142175 0.0180625 -0.0121771 0.0141964 0.019 -0.0121089 0.0141703 0.0180625 -0.0121089 0.0141703 0.0180625 -0.0120428 0.0141393 0.017125 -0.0121089 0.0141703 0.019 -0.0119791 0.0141036 0.0180625 -0.0119791 0.0141036 0.0180625 -0.0119181 0.0140633 0.0180625 -0.0118059 0.0139699 0.0180625 -0.0118603 0.0140186 0.017125 -0.0117552 0.0139173 0.0180625 -0.0117552 0.0139173 0.0180625 -0.0117085 0.0138612 0.017125 -0.0117085 0.0138612 0.0180625 -0.011666 0.0138018 0.017125 -0.011666 0.0138018 0.017125 -0.011628 0.0137394 0.017125 -0.0115946 0.0136745 0.0180625 -0.011566 0.0136072 0.017125 -0.011566 0.0136072 0.017125 -0.0115239 0.0134674 0.0180625 -0.0115239 0.0134674 0.0180625 -0.0115027 0.013323 0.017125 -0.0115027 0.013323 0.017125 -0.0115 0.01325 0.0180625 -0.0115 0.01325 0.019 -0.0115027 0.013177 0.0180625 -0.0115027 0.013177 0.017125 -0.0115027 0.013177 0.019 -0.0115107 0.0131044 0.0180625 -0.0115107 0.0131044 0.0180625 -0.0115239 0.0130326 0.019 -0.0115239 0.0130326 0.017125 -0.0115107 0.0131044 0.017125 -0.0115239 0.0130326 0.0180625 -0.0115424 0.0129619 0.017125 -0.0115424 0.0129619 0.0180625 -0.011566 0.0128928 0.0180625 -0.0115946 0.0128255 0.0180625 -0.011628 0.0127606 0.017125 -0.011666 0.0126982 0.019 -0.0117552 0.0125827 0.0180625 -0.0117085 0.0126388 0.0180625 -0.0117552 0.0125827 0.017125 -0.0117552 0.0125827 0.0180625 -0.0118059 0.0125301 0.017125 -0.0118059 0.0125301 0.019 -0.0118603 0.0124814 0.0180625 -0.0118603 0.0124814 0.017125 -0.0118603 0.0124814 0.0180625 -0.0119181 0.0124367 0.0180625 -0.0119791 0.0123964 0.017125 -0.0119791 0.0123964 0.0180625 -0.0121089 0.0123297 0.0180625 -0.0121771 0.0123036 0.017125 -0.0121771 0.0123036 0.0180625 -0.0122471 0.0122825 0.017125 -0.0123184 0.0122666 0.0180625 -0.0123184 0.0122666 0.0180625 -0.0123906 0.012256 0.017125 -0.0123906 0.012256 0.017125 -0.0124635 0.0122507 0.0180625 -0.0125365 0.0122507 0.0180625 -0.0124635 0.0122507 0.019 -0.0126094 0.012256 0.017125 -0.0126094 0.012256 0.0180625 -0.0126094 0.012256 0.019 -0.0126816 0.0122666 0.0180625 -0.0126816 0.0122666 0.017125 -0.0126816 0.0122666 0.0180625 -0.0127529 0.0122825 0.0180625 -0.0128229 0.0123036 0.0180625 -0.0128911 0.0123297 0.0180625 -0.0129572 0.0123607 0.0180625 -0.0130209 0.0123964 0.017125 -0.0130209 0.0123964 0.0180625 -0.0131397 0.0124814 0.0180625 -0.0130819 0.0124367 0.017125 -0.0131397 0.0124814 0.019 -0.0131941 0.0125301 0.019 -0.0132448 0.0125827 0.017125 -0.0131941 0.0125301 0.0180625 -0.0132448 0.0125827 0.017125 -0.0132448 0.0125827 0.0180625 -0.0132915 0.0126388 0.0180625 -0.013334 0.0126982 0.017125 -0.013334 0.0126982 0.0180625 -0.013372 0.0127606 0.017125 -0.013372 0.0127606 0.0180625 -0.0134054 0.0128255 0.0180625 -0.013434 0.0128928 0.019 -0.0134576 0.0129619 0.0180625 -0.0134576 0.0129619 0.017125 -0.013434 0.0128928 0.0180625 -0.0134893 0.0131044 0.017125 -0.0134893 0.0131044 0.0180625 -0.0134973 0.013177 0.0180625 -0.0135 0.01325 0.016 -0.0134973 -0.011677 0.016 -0.0134893 -0.0116044 0.016 -0.0134576 -0.0114619 0.016 -0.0134054 -0.0113255 0.016 -0.0132915 -0.0111388 0.016 -0.0131941 -0.0110301 0.016 -0.0131397 -0.0109814 0.016 -0.0127529 -0.0107825 0.016 -0.0123906 -0.010756 0.016 -0.0122471 -0.0107825 0.016 -0.0121771 -0.0108036 0.016 -0.0119181 -0.0109367 0.016 -0.0118603 -0.0109814 0.016 -0.0117085 -0.0111388 0.016 -0.0116279 -0.0112606 0.016 -0.0115424 -0.0114619 0.016 -0.0115946 -0.0113255 0.016 -0.0065 -0.01625 0.016 -0.0118059 -0.0124699 0.016 -0.0135 -0.01625 0.016 -0.0119181 -0.0125633 0.016 -0.0122471 -0.0127175 0.016 -0.0124635 -0.0127493 0.016 -0.0126094 -0.012744 0.016 -0.0128229 -0.0126964 0.016 -0.0130819 -0.0125633 0.016 -0.0131397 -0.0125186 0.016 -0.0132448 -0.0124173 0.016 -0.013334 -0.0123018 0.016 -0.0132915 -0.0123612 0.016 -0.013372 -0.0122394 0.016 -0.013434 -0.0121072 0.016 -0.0134054 -0.0121745 0.016 -0.0134973 -0.011823 0.016 -0.011666 -0.0123018 0.016 -0.011566 -0.0121072 0.016 -0.0115424 -0.0120381 0.016 -0.0115107 -0.0118956 0.016 -0.0115 -0.01175 0.016 -0.0115027 -0.011677 0.016 -0.0115107 -0.0116044 0.016 -0.0115239 -0.0115326 0.016 0.0135 -0.00225 0.016 0.0134973 -0.011823 0.016 0.0135 -0.01175 0.016 0.0134761 -0.0115326 0.016 0.0134576 -0.0114619 0.016 0.0155 -0.00225 0.016 0.013372 -0.0112606 0.016 0.013334 -0.0111982 0.016 0.0132448 -0.0110827 0.016 0.0131397 -0.0109814 0.016 0.0130209 -0.0108964 0.016 0.0129572 -0.0108607 0.016 0.0128229 -0.0108036 0.016 0.0128911 -0.0108297 0.016 0.0127529 -0.0107825 0.016 0.0126816 -0.0107666 0.016 0.0124635 -0.0107507 0.016 0.0123184 -0.0107666 0.016 0.0121771 -0.0108036 0.016 0.0122471 -0.0107825 0.016 0.0118603 -0.0109814 0.016 0.0118059 -0.0110301 0.016 0.0117552 -0.0110827 0.016 0.011666 -0.0111982 0.016 0.0135 -0.00425 0.016 0.0117085 -0.0111388 0.016 0.0115946 -0.0113255 0.016 0.011566 -0.0113928 0.016 0.0115424 -0.0114619 0.016 0.0115239 -0.0115326 0.016 0.0115027 -0.011677 0.016 0.0115 -0.01175 0.016 0.0115239 -0.0119674 0.016 0.0115946 -0.0121745 0.016 0.0117085 -0.0123612 0.016 0.0118059 -0.0124699 0.016 0.0118603 -0.0125186 0.016 0.0119181 -0.0125633 0.016 0.0120428 -0.0126393 0.016 0.0121089 -0.0126703 0.016 0.0065 -0.01625 0.016 0.0155 -0.01625 0.016 0.0123184 -0.0127334 0.016 0.0126094 -0.012744 0.016 0.0125365 -0.0127493 0.016 0.0128229 -0.0126964 0.016 0.0130209 -0.0126036 0.016 0.0130819 -0.0125633 0.016 0.0131397 -0.0125186 0.016 0.013334 -0.0123018 0.016 0.013372 -0.0122394 0.016 0.0134054 -0.0121745 -0.017125 -0.0122471 0.0142175 -0.017125 -0.0135 0.01775 -0.017125 -0.0124635 0.0142493 -0.017125 -0.0129572 0.0141393 -0.017125 -0.0128911 0.0141703 -0.017125 -0.0130209 0.0141036 -0.017125 -0.0121089 0.0141703 -0.017125 0.0120428 0.0141393 -0.017125 -0.0120428 0.0141393 -0.017125 0.0119791 0.0141036 -0.017125 0.0117552 0.0139173 -0.017125 -0.0118059 0.0139699 -0.017125 -0.0117085 0.0138612 -0.017125 -0.011628 0.0137394 -0.017125 0.0115946 0.0136745 -0.017125 -0.011566 0.0136072 -0.017125 0.0115239 0.0134674 -0.017125 0.0115107 0.0133956 -0.017125 0.0115027 0.013323 -0.017125 -0.0115946 0.0128255 -0.017125 0.011666 0.0126982 -0.017125 -0.011628 0.0127606 -0.017125 0.0117552 0.0125827 -0.017125 -0.0117552 0.0125827 -0.017125 -0.0118059 0.0125301 -0.017125 -0.0119181 0.0124367 -0.017125 -0.0121771 0.0123036 -0.017125 -0.0128229 0.0123036 -0.017125 -0.0128911 0.0123297 -0.017125 -0.0129572 0.0123607 -0.017125 -0.0130209 0.0123964 -0.017125 -0.0131397 0.0124814 -0.017125 -0.0135 0.01075 -0.017125 -0.0132915 0.0126388 -0.017125 -0.013434 0.0128928 -0.017125 -0.0134576 0.0129619 -0.017125 -0.0134893 0.0131044 -0.017125 -0.0119181 0.0140633 -0.017125 0.0115424 0.0135381 -0.017125 0.0115 0.01325 -0.017125 -0.0115 0.01325 -0.017125 0.0115027 0.013177 -0.017125 -0.0115027 0.013177 -0.017125 -0.0115107 0.0131044 -0.017125 0.0115424 0.0129619 -0.017125 -0.011566 0.0128928 -0.017125 0.0116279 0.0127606 -0.017125 0.0119791 0.0123964 -0.017125 0.0120428 0.0123607 -0.017125 0.0121089 0.0123297 -0.017125 0.0123184 0.0122666 -0.017125 0.0135 0.01075 -0.017125 0.0125365 0.0122507 -0.017125 0.0124635 0.0122507 -0.017125 0.0126816 0.0122666 -0.017125 0.0126094 0.012256 -0.017125 0.0128911 0.0123297 -0.017125 0.0128229 0.0123036 -0.017125 0.0130209 0.0123964 -0.017125 0.0130819 0.0124367 -0.017125 0.0131941 0.0125301 -0.017125 0.0134054 0.0128255 -0.017125 0.013372 0.0127606 -0.017125 0.013434 0.0128928 -0.017125 0.0134973 0.013177 -0.017125 0.0123906 0.014244 -0.017125 0.0124635 0.0142493 -0.017125 0.0126094 0.014244 -0.017125 0.0126816 0.0142334 -0.017125 0.0128911 0.0141703 -0.017125 0.0132448 0.0139173 -0.017125 0.0134054 0.0136745 -0.017125 0.013434 0.0136072 -0.017125 0.0134576 0.0135381 0.01525 -0.0135 0.01775 0.016 -0.0135 -0.00425 0.0135 -0.0135 -0.00325 0.0134595 -0.0135 -0.00296827 0.0134096 -0.0135 -0.00283459 0.0133413 -0.0135 -0.00270936 0.0133005 -0.0135 -0.00265072 0.01525 -0.0135 -0.00225 0.0131549 -0.0135 -0.00249425 0.0130993 -0.0135 -0.00244946 0.0130406 -0.0135 -0.00240875 0.0129792 -0.0135 -0.00237232 0.0129154 -0.0135 -0.00234037 0.0127817 -0.0135 -0.00229051 0.017125 0.0124635 0.0142493 0.017125 0.0128229 0.0141964 0.017125 0.0132448 0.0139173 0.017125 0.0135 0.01325 0.017125 -0.0120428 0.0141393 0.017125 -0.0119791 0.0141036 0.017125 -0.0119181 0.0140633 0.017125 0.0119791 0.0141036 0.017125 -0.0118603 0.0140186 0.017125 0.0118603 0.0140186 0.017125 0.0117085 0.0138612 0.017125 -0.0115424 0.0135381 0.017125 0.0115424 0.0135381 0.017125 0.0115107 0.0133956 0.017125 0.0115027 0.013323 0.017125 0.0115424 0.0129619 0.017125 -0.011566 0.0128928 0.017125 0.0115946 0.0128255 0.017125 -0.0117085 0.0126388 0.017125 0.0118059 0.0125301 0.017125 0.0119181 0.0124367 0.017125 0.0119791 0.0123964 0.017125 -0.0120428 0.0123607 0.017125 0.0120428 0.0123607 0.017125 -0.0121089 0.0123297 0.017125 -0.0122471 0.0122825 0.017125 -0.0125365 0.0122507 0.017125 -0.0127529 0.0122825 0.017125 -0.0128911 0.0123297 0.017125 -0.0128229 0.0123036 0.017125 -0.0129572 0.0123607 0.017125 -0.0130819 0.0124367 0.017125 -0.0132915 0.0126388 0.017125 -0.0134054 0.0128255 0.017125 -0.0134576 0.0129619 0.017125 -0.0135 0.01075 0.017125 -0.0134761 0.0130326 0.017125 -0.0135 0.01325 0.017125 -0.0134973 0.013177 0.017125 -0.0118059 0.0139699 0.017125 0.0118059 0.0139699 0.017125 0.011666 0.0138018 0.017125 0.0115946 0.0136745 0.017125 0.011566 0.0136072 0.017125 0.0115239 0.0134674 0.017125 -0.0115107 0.0133956 0.017125 0.011566 0.0128928 0.017125 -0.0115946 0.0128255 0.017125 -0.011628 0.0127606 0.017125 0.0117085 0.0126388 0.017125 0.0117552 0.0125827 0.017125 0.0118603 0.0124814 0.017125 -0.0119181 0.0124367 0.017125 0.0121771 0.0123036 0.017125 0.0123184 0.0122666 0.017125 0.0124635 0.0122507 0.017125 0.0123906 0.012256 0.017125 0.0126816 0.0122666 0.017125 0.0127529 0.0122825 0.017125 0.0128229 0.0123036 0.017125 0.0128911 0.0123297 0.017125 0.0130209 0.0123964 0.017125 0.0130819 0.0124367 0.017125 0.0131397 0.0124814 0.017125 0.0132448 0.0125827 0.017125 0.0135 0.01075 0.017125 0.013434 0.0128928 0.017125 0.0134761 0.0130326 0.017125 0.0134893 0.0131044 0.017125 0.0134973 0.013177 0.017125 -0.0135 0.01775 0.017125 -0.0121771 0.0141964 0.017125 -0.0124635 0.0142493 0.017125 -0.0126094 0.014244 0.017125 -0.0129572 0.0141393 0.017125 -0.0130209 0.0141036 0.017125 -0.0130819 0.0140633 0.017125 -0.0131941 0.0139699 0.017125 -0.0132448 0.0139173 0.017125 -0.013334 0.0138018 0.017125 -0.0134054 0.0136745 0.017125 -0.013434 0.0136072 0.017125 -0.0134761 0.0134674 0.017125 -0.0134576 0.0135381 0.017125 -0.0134973 0.013323 0.019 -0.0065 -0.01625 -0.0134761 0.019 -0.0119674 -0.0134761 0.018 -0.0119674 -0.0134893 0.018 -0.0118956 -0.0134893 0.019 -0.0118956 -0.0134973 0.018 -0.011677 -0.0134893 0.019 -0.0116044 -0.0134761 0.019 -0.0115326 -0.013434 0.019 -0.0113928 -0.013372 0.018 -0.0112606 -0.0132915 0.018 -0.0111388 -0.0132915 0.019 -0.0111388 -0.0131941 0.018 -0.0110301 -0.0131941 0.019 -0.0110301 -0.0130819 0.019 -0.0109367 -0.0128911 0.019 -0.0108297 -0.0127529 0.018 -0.0107825 -0.0128229 0.019 -0.0108036 -0.0127529 0.019 -0.0107825 -0.0123184 0.018 -0.0107666 -0.0122471 0.018 -0.0107825 -0.0118603 0.018 -0.0109814 -0.0118603 0.019 -0.0109814 -0.0117085 0.018 -0.0111388 -0.0117552 0.019 -0.0110827 -0.0117085 0.019 -0.0111388 -0.011566 0.018 -0.0113928 -0.0115424 0.019 -0.0114619 -0.0115239 0.019 -0.0115326 -0.0115 0.019 -0.01175 -0.0115107 0.019 -0.0118956 -0.0115239 0.019 -0.0119674 -0.011566 0.019 -0.0121072 -0.0115946 0.018 -0.0121745 -0.0115946 0.019 -0.0121745 -0.011628 0.018 -0.0122394 -0.011628 0.019 -0.0122394 -0.0117085 0.019 -0.0123612 -0.0118603 0.018 -0.0125186 -0.0120428 0.019 -0.0126393 -0.0121771 0.019 -0.0126964 -0.0122471 0.018 -0.0127175 -0.0123184 0.019 -0.0127334 -0.0126094 0.019 -0.012744 -0.0128229 0.019 -0.0126964 -0.0128911 0.019 -0.0126703 -0.0130819 0.019 -0.0125633 -0.0131941 0.018 -0.0124699 -0.0131941 0.019 -0.0124699 -0.0132448 0.019 -0.0124173 -0.013372 0.018 -0.0122394 -0.013372 0.019 -0.0122394 -0.013434 0.018 -0.0121072 -0.0134893 0.017 -0.0118956 -0.0134761 0.017 -0.0119674 -0.0134576 0.018 -0.0120381 -0.013434 0.017 -0.0121072 -0.0134054 0.018 -0.0121745 -0.013334 0.017 -0.0123018 -0.013334 0.018 -0.0123018 -0.0132915 0.019 -0.0123612 -0.0132915 0.018 -0.0123612 -0.0132448 0.018 -0.0124173 -0.0132448 0.017 -0.0124173 -0.0131397 0.018 -0.0125186 -0.0131397 0.017 -0.0125186 -0.0130819 0.018 -0.0125633 -0.0130819 0.017 -0.0125633 -0.0130209 0.018 -0.0126036 -0.0129572 0.018 -0.0126393 -0.0128911 0.017 -0.0126703 -0.0128911 0.018 -0.0126703 -0.0128229 0.018 -0.0126964 -0.0127529 0.019 -0.0127175 -0.0127529 0.018 -0.0127175 -0.0126816 0.019 -0.0127334 -0.0127529 0.017 -0.0127175 -0.0126816 0.018 -0.0127334 -0.0126816 0.017 -0.0127334 -0.0126094 0.018 -0.012744 -0.0125365 0.018 -0.0127493 -0.0126094 0.017 -0.012744 -0.0124635 0.019 -0.0127493 -0.0125365 0.017 -0.0127493 -0.0124635 0.017 -0.0127493 -0.0123906 0.018 -0.012744 -0.0124635 0.018 -0.0127493 -0.0123906 0.017 -0.012744 -0.0123184 0.018 -0.0127334 -0.0123184 0.017 -0.0127334 -0.0122471 0.017 -0.0127175 -0.0121771 0.018 -0.0126964 -0.0121089 0.019 -0.0126703 -0.0121771 0.017 -0.0126964 -0.0121089 0.018 -0.0126703 -0.0121089 0.017 -0.0126703 -0.0120428 0.018 -0.0126393 -0.0119791 0.018 -0.0126036 -0.0119181 0.017 -0.0125633 -0.0119181 0.018 -0.0125633 -0.0118059 0.019 -0.0124699 -0.0118059 0.018 -0.0124699 -0.0117552 0.018 -0.0124173 -0.0117552 0.017 -0.0124173 -0.0117085 0.018 -0.0123612 -0.0117085 0.017 -0.0123612 -0.011666 0.019 -0.0123018 -0.011666 0.018 -0.0123018 -0.011666 0.017 -0.0123018 -0.011628 0.017 -0.0122394 -0.011566 0.018 -0.0121072 -0.0115946 0.017 -0.0121745 -0.0115424 0.018 -0.0120381 -0.0115239 0.018 -0.0119674 -0.0115424 0.017 -0.0120381 -0.0115107 0.018 -0.0118956 -0.0115107 0.017 -0.0118956 -0.0115027 0.018 -0.011823 -0.0115 0.017 -0.01175 -0.0115 0.018 -0.01175 -0.0115027 0.018 -0.011677 -0.0115107 0.019 -0.0116044 -0.0115107 0.018 -0.0116044 -0.0115107 0.017 -0.0116044 -0.0115239 0.018 -0.0115326 -0.0115424 0.018 -0.0114619 -0.011566 0.017 -0.0113928 -0.0115946 0.017 -0.0113255 -0.011628 0.019 -0.0112606 -0.0115946 0.018 -0.0113255 -0.011628 0.018 -0.0112606 -0.011666 0.019 -0.0111982 -0.011666 0.017 -0.0111982 -0.011666 0.018 -0.0111982 -0.0117552 0.018 -0.0110827 -0.0118059 0.017 -0.0110301 -0.0118059 0.018 -0.0110301 -0.0119181 0.019 -0.0109367 -0.0118603 0.017 -0.0109814 -0.0119181 0.018 -0.0109367 -0.0119791 0.017 -0.0108964 -0.0119791 0.018 -0.0108964 -0.0120428 0.018 -0.0108607 -0.0120428 0.017 -0.0108607 -0.0121089 0.019 -0.0108297 -0.0121089 0.018 -0.0108297 -0.0121089 0.017 -0.0108297 -0.0121771 0.018 -0.0108036 -0.0122471 0.017 -0.0107825 -0.0123184 0.017 -0.0107666 -0.0123906 0.018 -0.010756 -0.0123906 0.017 -0.010756 -0.0124635 0.018 -0.0107507 -0.0125365 0.018 -0.0107507 -0.0125365 0.017 -0.0107507 -0.0126094 0.018 -0.010756 -0.0126094 0.019 -0.010756 -0.0126816 0.018 -0.0107666 -0.0128229 0.017 -0.0108036 -0.0128229 0.018 -0.0108036 -0.0128911 0.018 -0.0108297 -0.0128911 0.017 -0.0108297 -0.0129572 0.017 -0.0108607 -0.0129572 0.018 -0.0108607 -0.0130209 0.017 -0.0108964 -0.0130209 0.018 -0.0108964 -0.0130819 0.018 -0.0109367 -0.0130819 0.017 -0.0109367 -0.0131397 0.018 -0.0109814 -0.0131397 0.017 -0.0109814 -0.0131941 0.017 -0.0110301 -0.0132448 0.018 -0.0110827 -0.0132448 0.019 -0.0110827 -0.0132448 0.017 -0.0110827 -0.0132915 0.017 -0.0111388 -0.013334 0.017 -0.0111982 -0.013334 0.018 -0.0111982 -0.013372 0.017 -0.0112606 -0.0134054 0.017 -0.0113255 -0.0134054 0.018 -0.0113255 -0.013434 0.017 -0.0113928 -0.013434 0.018 -0.0113928 -0.0134576 0.018 -0.0114619 -0.0134576 0.017 -0.0114619 -0.0134761 0.018 -0.0115326 -0.0134893 0.018 -0.0116044 -0.0134893 0.017 -0.0116044 -0.0134973 0.019 -0.011677 -0.0134973 0.017 -0.011677 -0.0135 0.019 -0.01175 -0.0135 0.018 -0.01175 -0.0134973 0.018 -0.011823 -0.011628 0.017 -0.0112606 -0.0117085 0.017 -0.0111388 -0.0117552 0.017 -0.0110827 -0.0119181 0.017 -0.0109367 -0.0121771 0.017 -0.0108036 -0.0124635 0.017 -0.0107507 -0.0126094 0.017 -0.010756 -0.0126816 0.017 -0.0107666 -0.0127529 0.017 -0.0107825 -0.0134761 0.017 -0.0115326 -0.0115239 0.017 -0.0115326 -0.0115424 0.017 -0.0114619 -0.0115027 0.017 -0.011677 -0.0115027 0.017 -0.011823 -0.0065 0.017 -0.01625 -0.0115239 0.017 -0.0119674 -0.011566 0.017 -0.0121072 -0.0118059 0.017 -0.0124699 -0.0118603 0.017 -0.0125186 -0.0119791 0.017 -0.0126036 -0.0120428 0.017 -0.0126393 -0.0128229 0.017 -0.0126964 -0.0129572 0.017 -0.0126393 -0.0130209 0.017 -0.0126036 -0.0135 0.017 -0.01625 -0.0131941 0.017 -0.0124699 -0.0132915 0.017 -0.0123612 -0.013372 0.017 -0.0122394 -0.0134054 0.017 -0.0121745 -0.0134576 0.017 -0.0120381 -0.0135 0.017 -0.01175 -0.0134973 0.017 -0.011823 0.0134973 0.017 -0.011677 0.0134893 0.018 -0.0116044 0.0134761 0.018 -0.0115326 0.0134973 0.018 -0.011677 0.0135 0.018 -0.01175 0.0134973 0.019 -0.011677 0.0135 0.019 -0.01175 0.0134576 0.019 -0.0120381 0.013434 0.019 -0.0121072 0.013372 0.018 -0.0122394 0.0134054 0.019 -0.0121745 0.0131397 0.019 -0.0125186 0.0130209 0.018 -0.0126036 0.0130819 0.019 -0.0125633 0.0130209 0.019 -0.0126036 0.0129572 0.019 -0.0126393 0.0125365 0.019 -0.0127493 0.0120428 0.018 -0.0126393 0.0121089 0.019 -0.0126703 0.0119181 0.018 -0.0125633 0.0119181 0.019 -0.0125633 0.0118059 0.019 -0.0124699 0.0117552 0.019 -0.0124173 0.011666 0.018 -0.0123018 0.011666 0.019 -0.0123018 0.0116279 0.019 -0.0122394 0.011566 0.018 -0.0121072 0.011566 0.019 -0.0121072 0.0115239 0.019 -0.0119674 0.0115027 0.019 -0.011823 0.0115 0.019 -0.01175 0.0115946 0.018 -0.0113255 0.011566 0.019 -0.0113928 0.011666 0.018 -0.0111982 0.0118059 0.019 -0.0110301 0.0118603 0.019 -0.0109814 0.0119181 0.018 -0.0109367 0.0119181 0.019 -0.0109367 0.0119791 0.019 -0.0108964 0.0121089 0.019 -0.0108297 0.0121771 0.019 -0.0108036 0.0123184 0.019 -0.0107666 0.0123906 0.019 -0.010756 0.0128229 0.018 -0.0108036 0.0128229 0.019 -0.0108036 0.0128911 0.019 -0.0108297 0.0130209 0.018 -0.0108964 0.0130209 0.019 -0.0108964 0.0131941 0.019 -0.0110301 0.013434 0.018 -0.0113928 0.0134054 0.019 -0.0113255 0.0134576 0.018 -0.0114619 0.0134576 0.019 -0.0114619 0.0134761 0.017 -0.0115326 0.0134576 0.017 -0.0114619 0.013434 0.017 -0.0113928 0.0134054 0.018 -0.0113255 0.013372 0.019 -0.0112606 0.013372 0.018 -0.0112606 0.013372 0.017 -0.0112606 0.013334 0.017 -0.0111982 0.013334 0.018 -0.0111982 0.0132915 0.018 -0.0111388 0.0132448 0.018 -0.0110827 0.0132448 0.017 -0.0110827 0.0131941 0.018 -0.0110301 0.0131941 0.017 -0.0110301 0.0131397 0.018 -0.0109814 0.0131397 0.019 -0.0109814 0.0130819 0.017 -0.0109367 0.0130819 0.018 -0.0109367 0.0129572 0.018 -0.0108607 0.0129572 0.019 -0.0108607 0.0128911 0.018 -0.0108297 0.0128911 0.017 -0.0108297 0.0128229 0.017 -0.0108036 0.0127529 0.019 -0.0107825 0.0127529 0.018 -0.0107825 0.0127529 0.017 -0.0107825 0.0126816 0.019 -0.0107666 0.0126816 0.018 -0.0107666 0.0126094 0.018 -0.010756 0.0126094 0.017 -0.010756 0.0125365 0.018 -0.0107507 0.0124635 0.017 -0.0107507 0.0124635 0.018 -0.0107507 0.0123906 0.018 -0.010756 0.0123184 0.018 -0.0107666 0.0123184 0.017 -0.0107666 0.0122471 0.018 -0.0107825 0.0122471 0.019 -0.0107825 0.0122471 0.017 -0.0107825 0.0121089 0.018 -0.0108297 0.0121771 0.018 -0.0108036 0.0120428 0.018 -0.0108607 0.0120428 0.017 -0.0108607 0.0119791 0.018 -0.0108964 0.0119181 0.017 -0.0109367 0.0118603 0.018 -0.0109814 0.0118059 0.018 -0.0110301 0.0118059 0.017 -0.0110301 0.0117552 0.018 -0.0110827 0.0117552 0.017 -0.0110827 0.011666 0.019 -0.0111982 0.0117085 0.018 -0.0111388 0.0117085 0.017 -0.0111388 0.0116279 0.019 -0.0112606 0.0116279 0.018 -0.0112606 0.011666 0.017 -0.0111982 0.0116279 0.017 -0.0112606 0.011566 0.017 -0.0113928 0.011566 0.018 -0.0113928 0.0115424 0.019 -0.0114619 0.0115424 0.018 -0.0114619 0.0115239 0.018 -0.0115326 0.0115107 0.018 -0.0116044 0.0115027 0.019 -0.011677 0.0115027 0.018 -0.011677 0.0115 0.018 -0.01175 0.0115 0.017 -0.01175 0.0115107 0.018 -0.0118956 0.0115027 0.018 -0.011823 0.0115239 0.018 -0.0119674 0.0115424 0.018 -0.0120381 0.0115424 0.019 -0.0120381 0.0115946 0.018 -0.0121745 0.0116279 0.018 -0.0122394 0.0116279 0.017 -0.0122394 0.0117085 0.019 -0.0123612 0.0117085 0.018 -0.0123612 0.0117085 0.017 -0.0123612 0.0117552 0.018 -0.0124173 0.0117552 0.017 -0.0124173 0.0118059 0.018 -0.0124699 0.0118059 0.017 -0.0124699 0.0118603 0.018 -0.0125186 0.0119791 0.018 -0.0126036 0.0119181 0.017 -0.0125633 0.0119791 0.017 -0.0126036 0.0120428 0.019 -0.0126393 0.0121089 0.018 -0.0126703 0.0121089 0.017 -0.0126703 0.0121771 0.018 -0.0126964 0.0121771 0.019 -0.0126964 0.0122471 0.019 -0.0127175 0.0121771 0.017 -0.0126964 0.0122471 0.018 -0.0127175 0.0123184 0.019 -0.0127334 0.0123906 0.018 -0.012744 0.0123184 0.018 -0.0127334 0.0124635 0.018 -0.0127493 0.0123906 0.017 -0.012744 0.0125365 0.018 -0.0127493 0.0126094 0.018 -0.012744 0.0126816 0.018 -0.0127334 0.0126816 0.017 -0.0127334 0.0127529 0.019 -0.0127175 0.0127529 0.018 -0.0127175 0.0128229 0.017 -0.0126964 0.0128229 0.018 -0.0126964 0.0128911 0.018 -0.0126703 0.0129572 0.018 -0.0126393 0.0129572 0.017 -0.0126393 0.0130209 0.017 -0.0126036 0.0130819 0.018 -0.0125633 0.0131397 0.017 -0.0125186 0.0131941 0.018 -0.0124699 0.0131941 0.019 -0.0124699 0.0131397 0.018 -0.0125186 0.0132448 0.018 -0.0124173 0.0132915 0.018 -0.0123612 0.0132915 0.017 -0.0123612 0.013372 0.019 -0.0122394 0.013334 0.018 -0.0123018 0.0134054 0.017 -0.0121745 0.0134054 0.018 -0.0121745 0.013434 0.018 -0.0121072 0.013434 0.017 -0.0121072 0.0134576 0.018 -0.0120381 0.0134761 0.019 -0.0119674 0.0134761 0.018 -0.0119674 0.0134893 0.018 -0.0118956 0.0134973 0.018 -0.011823 0.0134973 0.017 -0.011823 0.0135 0.017 -0.01175 0.0118603 0.017 -0.0125186 0.0120428 0.017 -0.0126393 0.0123184 0.017 -0.0127334 0.0122471 0.017 -0.0127175 0.0135 0.017 -0.01625 0.0124635 0.017 -0.0127493 0.0126094 0.017 -0.012744 0.0125365 0.017 -0.0127493 0.0127529 0.017 -0.0127175 0.0128911 0.017 -0.0126703 0.0130819 0.017 -0.0125633 0.0131941 0.017 -0.0124699 0.0132448 0.017 -0.0124173 0.013372 0.017 -0.0122394 0.013334 0.017 -0.0123018 0.0134576 0.017 -0.0120381 0.0134761 0.017 -0.0119674 0.0134893 0.017 -0.0118956 0.011666 0.017 -0.0123018 0.0115946 0.017 -0.0121745 0.011566 0.017 -0.0121072 0.0115424 0.017 -0.0120381 0.0115107 0.017 -0.0118956 0.0115239 0.017 -0.0119674 0.0115027 0.017 -0.011823 0.0115027 0.017 -0.011677 0.0115107 0.017 -0.0116044 0.0115239 0.017 -0.0115326 0.0115424 0.017 -0.0114619 0.0115946 0.017 -0.0113255 0.0118603 0.017 -0.0109814 0.0119791 0.017 -0.0108964 0.0121089 0.017 -0.0108297 0.0121771 0.017 -0.0108036 0.0123906 0.017 -0.010756 0.0125365 0.017 -0.0107507 0.0126816 0.017 -0.0107666 0.0129572 0.017 -0.0108607 0.0130209 0.017 -0.0108964 0.0131397 0.017 -0.0109814 0.0135 0.017 -0.00425 0.0132915 0.017 -0.0111388 0.0134054 0.017 -0.0113255 0.0134893 0.017 -0.0116044 0.0135 0.01525 -0.001 0.0135 0.017125 0.01325 -0.0115107 0.01748 0.0131044 -0.0115239 0.01748 0.0130326 -0.0115 0.01748 0.01325 -0.0115107 0.01748 0.0133956 -0.0115107 0.017125 0.0133956 -0.0115424 0.01748 0.0135381 -0.0115239 0.017125 0.0134674 -0.011566 0.01748 0.0136072 -0.0115424 0.017125 0.0135381 -0.0115946 0.01748 0.0136745 -0.0117552 0.01748 0.0139173 -0.0119791 0.01748 0.0141036 -0.0119181 0.017125 0.0140633 -0.0119791 0.017125 0.0141036 -0.0121089 0.01748 0.0141703 -0.0122471 0.017125 0.0142175 -0.0126816 0.01748 0.0142334 -0.0127529 0.01748 0.0142175 -0.0126816 0.017125 0.0142334 -0.0127529 0.017125 0.0142175 -0.0128911 0.017125 0.0141703 -0.0129572 0.017125 0.0141393 -0.0130209 0.01748 0.0141036 -0.0130819 0.017125 0.0140633 -0.0131941 0.01748 0.0139699 -0.0131397 0.017125 0.0140186 -0.0131941 0.017125 0.0139699 -0.0132448 0.017125 0.0139173 -0.013434 0.01748 0.0136072 -0.0134761 0.017125 0.0134674 -0.0134893 0.017125 0.0133956 -0.0134893 0.01748 0.0131044 -0.0134893 0.017125 0.0131044 -0.0134761 0.017125 0.0130326 -0.0134576 0.01748 0.0129619 -0.0134576 0.017125 0.0129619 -0.0131941 0.01748 0.0125301 -0.0131397 0.01748 0.0124814 -0.0130819 0.017125 0.0124367 -0.0129572 0.017125 0.0123607 -0.0128911 0.017125 0.0123297 -0.0123906 0.01748 0.012256 -0.0121771 0.017125 0.0123036 -0.0121089 0.017125 0.0123297 -0.0119791 0.01748 0.0123964 -0.0119181 0.01748 0.0124367 -0.0117085 0.01748 0.0126388 -0.0117085 0.017125 0.0126388 -0.0115424 0.01748 0.0129619 -0.011566 0.01748 0.0128928 -0.0115946 0.01748 0.0128255 -0.011566 0.019 0.0128928 -0.0116279 0.01748 0.0127606 -0.0116279 0.019 0.0127606 -0.011666 0.01748 0.0126982 -0.0117085 0.019 0.0126388 -0.0117552 0.017125 0.0125827 -0.0118059 0.017125 0.0125301 -0.0117552 0.01748 0.0125827 -0.0118059 0.01748 0.0125301 -0.0118603 0.01748 0.0124814 -0.0118603 0.019 0.0124814 -0.0119791 0.019 0.0123964 -0.0120428 0.017125 0.0123607 -0.0120428 0.01748 0.0123607 -0.0121089 0.01748 0.0123297 -0.0120428 0.019 0.0123607 -0.0121771 0.01748 0.0123036 -0.0121771 0.019 0.0123036 -0.0122471 0.01748 0.0122825 -0.0122471 0.017125 0.0122825 -0.0122471 0.019 0.0122825 -0.0123184 0.01748 0.0122666 -0.0123184 0.019 0.0122666 -0.0124635 0.01748 0.0122507 -0.0124635 0.019 0.0122507 -0.0125365 0.01748 0.0122507 -0.0125365 0.019 0.0122507 -0.0126094 0.01748 0.012256 -0.0126816 0.01748 0.0122666 -0.0126816 0.019 0.0122666 -0.0127529 0.01748 0.0122825 -0.0128229 0.01748 0.0123036 -0.0128911 0.01748 0.0123297 -0.0129572 0.01748 0.0123607 -0.0128911 0.019 0.0123297 -0.0129572 0.019 0.0123607 -0.0130209 0.01748 0.0123964 -0.0130209 0.017125 0.0123964 -0.0130819 0.019 0.0124367 -0.0130819 0.01748 0.0124367 -0.0131397 0.017125 0.0124814 -0.0131397 0.019 0.0124814 -0.0132448 0.01748 0.0125827 -0.0132448 0.019 0.0125827 -0.0132915 0.01748 0.0126388 -0.013334 0.01748 0.0126982 -0.013334 0.019 0.0126982 -0.013372 0.017125 0.0127606 -0.013372 0.01748 0.0127606 -0.0134054 0.01748 0.0128255 -0.013372 0.019 0.0127606 -0.0134054 0.019 0.0128255 -0.013434 0.01748 0.0128928 -0.0134576 0.019 0.0129619 -0.0134761 0.01748 0.0130326 -0.0134973 0.017125 0.013177 -0.0134973 0.01748 0.013177 -0.0134973 0.019 0.013177 -0.0135 0.017125 0.01325 -0.0135 0.01748 0.01325 -0.0134973 0.01748 0.013323 -0.0135 0.019 0.01325 -0.0134973 0.019 0.013323 -0.0134893 0.01748 0.0133956 -0.0134761 0.019 0.0134674 -0.0134576 0.017125 0.0135381 -0.0134761 0.01748 0.0134674 -0.013434 0.017125 0.0136072 -0.0134576 0.01748 0.0135381 -0.0134576 0.019 0.0135381 -0.0134054 0.01748 0.0136745 -0.013434 0.019 0.0136072 -0.013372 0.01748 0.0137394 -0.0134054 0.019 0.0136745 -0.013334 0.01748 0.0138018 -0.0132915 0.019 0.0138612 -0.0132915 0.01748 0.0138612 -0.0132448 0.019 0.0139173 -0.0132448 0.01748 0.0139173 -0.0131397 0.01748 0.0140186 -0.0131941 0.019 0.0139699 -0.0131397 0.019 0.0140186 -0.0130819 0.01748 0.0140633 -0.0129572 0.01748 0.0141393 -0.0128911 0.01748 0.0141703 -0.0128911 0.019 0.0141703 -0.0128229 0.019 0.0141964 -0.0128229 0.01748 0.0141964 -0.0127529 0.019 0.0142175 -0.0126816 0.019 0.0142334 -0.0126094 0.017125 0.014244 -0.0126094 0.01748 0.014244 -0.0125365 0.017125 0.0142493 -0.0124635 0.01748 0.0142493 -0.0124635 0.017125 0.0142493 -0.0125365 0.01748 0.0142493 -0.0124635 0.019 0.0142493 -0.0123906 0.019 0.014244 -0.0123184 0.017125 0.0142334 -0.0123906 0.01748 0.014244 -0.0123184 0.01748 0.0142334 -0.0123184 0.019 0.0142334 -0.0122471 0.019 0.0142175 -0.0122471 0.01748 0.0142175 -0.0121771 0.01748 0.0141964 -0.0121089 0.017125 0.0141703 -0.0121771 0.019 0.0141964 -0.0120428 0.017125 0.0141393 -0.0120428 0.01748 0.0141393 -0.0120428 0.019 0.0141393 -0.0119791 0.019 0.0141036 -0.0118603 0.01748 0.0140186 -0.0118603 0.017125 0.0140186 -0.0119181 0.01748 0.0140633 -0.0119181 0.019 0.0140633 -0.0118059 0.01748 0.0139699 -0.0117085 0.017125 0.0138612 -0.0117085 0.01748 0.0138612 -0.0117552 0.019 0.0139173 -0.011666 0.01748 0.0138018 -0.0117085 0.019 0.0138612 -0.011666 0.019 0.0138018 -0.0116279 0.01748 0.0137394 -0.0115946 0.019 0.0136745 -0.011566 0.019 0.0136072 -0.0115239 0.01748 0.0134674 -0.0115239 0.019 0.0134674 -0.0115107 0.019 0.0133956 -0.0115027 0.01748 0.013323 -0.0115 0.019 0.01325 -0.0115027 0.01748 0.013177 -0.0115027 -0.019 0.013323 -0.0115107 -0.0180625 0.0133956 -0.0115239 -0.0180625 0.0134674 -0.0115107 -0.017125 0.0133956 -0.0115027 -0.0180625 0.013323 -0.0115 -0.0180625 0.01325 -0.0115 -0.017125 0.01325 -0.0115107 -0.017125 0.0131044 -0.0115424 -0.017125 0.0129619 -0.0120428 -0.017125 0.0123607 -0.0121771 -0.017125 0.0123036 -0.0123184 -0.017125 0.0122666 -0.0130209 -0.017125 0.0123964 -0.0130819 -0.017125 0.0124367 -0.0131397 -0.0180625 0.0124814 -0.0132915 -0.017125 0.0126388 -0.013334 -0.017125 0.0126982 -0.013372 -0.017125 0.0127606 -0.0134054 -0.0180625 0.0128255 -0.0134973 -0.017125 0.013177 -0.0135 -0.017125 0.01325 -0.0134761 -0.0180625 0.0134674 -0.0134761 -0.017125 0.0134674 -0.0134576 -0.017125 0.0135381 -0.013372 -0.017125 0.0137394 -0.0132915 -0.017125 0.0138612 -0.0132448 -0.017125 0.0139173 -0.0131397 -0.017125 0.0140186 -0.0130819 -0.017125 0.0140633 -0.0130209 -0.017125 0.0141036 -0.0124635 -0.0180625 0.0142493 -0.0123906 -0.017125 0.014244 -0.0122471 -0.017125 0.0142175 -0.0119181 -0.0180625 0.0140633 -0.011666 -0.017125 0.0138018 -0.011566 -0.017125 0.0136072 -0.0115239 -0.019 0.0134674 -0.0115107 -0.019 0.0133956 -0.0115424 -0.0180625 0.0135381 -0.011566 -0.0180625 0.0136072 -0.0115946 -0.019 0.0136745 -0.0115946 -0.0180625 0.0136745 -0.0116279 -0.0180625 0.0137394 -0.011666 -0.0180625 0.0138018 -0.011666 -0.019 0.0138018 -0.0117085 -0.019 0.0138612 -0.0117552 -0.017125 0.0139173 -0.0117085 -0.0180625 0.0138612 -0.0117552 -0.0180625 0.0139173 -0.0118059 -0.019 0.0139699 -0.0118059 -0.0180625 0.0139699 -0.0118603 -0.017125 0.0140186 -0.0118603 -0.0180625 0.0140186 -0.0118603 -0.019 0.0140186 -0.0119791 -0.017125 0.0141036 -0.0119791 -0.0180625 0.0141036 -0.0120428 -0.017125 0.0141393 -0.0120428 -0.0180625 0.0141393 -0.0121089 -0.0180625 0.0141703 -0.0121089 -0.019 0.0141703 -0.0121771 -0.0180625 0.0141964 -0.0122471 -0.0180625 0.0142175 -0.0123184 -0.0180625 0.0142334 -0.0123906 -0.0180625 0.014244 -0.0123906 -0.019 0.014244 -0.0124635 -0.019 0.0142493 -0.0125365 -0.017125 0.0142493 -0.0125365 -0.0180625 0.0142493 -0.0126094 -0.0180625 0.014244 -0.0126094 -0.019 0.014244 -0.0126816 -0.0180625 0.0142334 -0.0127529 -0.0180625 0.0142175 -0.0127529 -0.019 0.0142175 -0.0128229 -0.017125 0.0141964 -0.0128229 -0.0180625 0.0141964 -0.0128229 -0.019 0.0141964 -0.0128911 -0.019 0.0141703 -0.0129572 -0.017125 0.0141393 -0.0128911 -0.0180625 0.0141703 -0.0129572 -0.0180625 0.0141393 -0.0129572 -0.019 0.0141393 -0.0130209 -0.0180625 0.0141036 -0.0130819 -0.0180625 0.0140633 -0.0130819 -0.019 0.0140633 -0.0131397 -0.0180625 0.0140186 -0.0131941 -0.019 0.0139699 -0.0131941 -0.0180625 0.0139699 -0.0132448 -0.0180625 0.0139173 -0.0132915 -0.0180625 0.0138612 -0.0132915 -0.019 0.0138612 -0.013334 -0.0180625 0.0138018 -0.013334 -0.019 0.0138018 -0.013372 -0.019 0.0137394 -0.013372 -0.0180625 0.0137394 -0.0134054 -0.0180625 0.0136745 -0.0134054 -0.017125 0.0136745 -0.013434 -0.0180625 0.0136072 -0.0134576 -0.0180625 0.0135381 -0.0134761 -0.019 0.0134674 -0.0134893 -0.017125 0.0133956 -0.0134893 -0.019 0.0133956 -0.0134893 -0.0180625 0.0133956 -0.0134973 -0.0180625 0.013323 -0.0134973 -0.019 0.013323 -0.0134973 -0.0180625 0.013177 -0.0135 -0.0180625 0.01325 -0.0135 -0.019 0.01325 -0.0134893 -0.0180625 0.0131044 -0.0134973 -0.019 0.013177 -0.0134761 -0.017125 0.0130326 -0.0134893 -0.019 0.0131044 -0.0134761 -0.019 0.0130326 -0.0134761 -0.0180625 0.0130326 -0.0134576 -0.0180625 0.0129619 -0.013434 -0.017125 0.0128928 -0.0134054 -0.017125 0.0128255 -0.013434 -0.0180625 0.0128928 -0.013434 -0.019 0.0128928 -0.0134054 -0.019 0.0128255 -0.013372 -0.0180625 0.0127606 -0.013334 -0.0180625 0.0126982 -0.013334 -0.019 0.0126982 -0.0132448 -0.0180625 0.0125827 -0.0132448 -0.017125 0.0125827 -0.0132915 -0.0180625 0.0126388 -0.0132915 -0.019 0.0126388 -0.0132448 -0.019 0.0125827 -0.0131941 -0.0180625 0.0125301 -0.0131941 -0.017125 0.0125301 -0.0131397 -0.019 0.0124814 -0.0130209 -0.0180625 0.0123964 -0.0130819 -0.0180625 0.0124367 -0.0130209 -0.019 0.0123964 -0.0129572 -0.0180625 0.0123607 -0.0129572 -0.019 0.0123607 -0.0128911 -0.019 0.0123297 -0.0128911 -0.0180625 0.0123297 -0.0128229 -0.017125 0.0123036 -0.0128229 -0.0180625 0.0123036 -0.0127529 -0.0180625 0.0122825 -0.0126816 -0.0180625 0.0122666 -0.0126094 -0.017125 0.012256 -0.0126816 -0.019 0.0122666 -0.0126094 -0.019 0.012256 -0.0126094 -0.0180625 0.012256 -0.0125365 -0.017125 0.0122507 -0.0125365 -0.0180625 0.0122507 -0.0124635 -0.017125 0.0122507 -0.0123906 -0.0180625 0.012256 -0.0124635 -0.0180625 0.0122507 -0.0123184 -0.0180625 0.0122666 -0.0123906 -0.019 0.012256 -0.0123184 -0.019 0.0122666 -0.0122471 -0.019 0.0122825 -0.0122471 -0.0180625 0.0122825 -0.0121771 -0.0180625 0.0123036 -0.0121771 -0.019 0.0123036 -0.0121089 -0.0180625 0.0123297 -0.0121089 -0.019 0.0123297 -0.0120428 -0.019 0.0123607 -0.0119791 -0.017125 0.0123964 -0.0120428 -0.0180625 0.0123607 -0.0119791 -0.019 0.0123964 -0.0119791 -0.0180625 0.0123964 -0.0119181 -0.0180625 0.0124367 -0.0118603 -0.017125 0.0124814 -0.0118603 -0.0180625 0.0124814 -0.0118603 -0.019 0.0124814 -0.0118059 -0.017125 0.0125301 -0.0118059 -0.019 0.0125301 -0.0118059 -0.0180625 0.0125301 -0.0117552 -0.0180625 0.0125827 -0.0117552 -0.019 0.0125827 -0.0117085 -0.0180625 0.0126388 -0.011666 -0.019 0.0126982 -0.011666 -0.0180625 0.0126982 -0.0116279 -0.019 0.0127606 -0.0116279 -0.0180625 0.0127606 -0.0115946 -0.0180625 0.0128255 -0.0115946 -0.019 0.0128255 -0.011566 -0.0180625 0.0128928 -0.011566 -0.019 0.0128928 -0.0115424 -0.019 0.0129619 -0.0115424 -0.0180625 0.0129619 -0.0115239 -0.017125 0.0130326 -0.0115239 -0.0180625 0.0130326 -0.0115239 -0.019 0.0130326 -0.0115107 -0.019 0.0131044 -0.0115027 -0.0180625 0.013177 -0.0115107 -0.0180625 0.0131044 -0.0115027 -0.019 0.013177 0.0134973 -0.017 -0.011823 0.0134973 -0.018 -0.011823 0.0134893 -0.017 -0.0118956 0.0134761 -0.019 -0.0119674 0.0134893 -0.018 -0.0118956 0.0134893 -0.019 -0.0118956 0.0134973 -0.019 -0.011823 0.0134893 -0.018 -0.0116044 0.0134576 -0.019 -0.0114619 0.0132915 -0.018 -0.0111388 0.013334 -0.019 -0.0111982 0.0132448 -0.019 -0.0110827 0.0131397 -0.018 -0.0109814 0.0130819 -0.018 -0.0109367 0.0131397 -0.019 -0.0109814 0.0130819 -0.019 -0.0109367 0.0129572 -0.019 -0.0108607 0.0128229 -0.019 -0.0108036 0.0127529 -0.019 -0.0107825 0.0126816 -0.018 -0.0107666 0.0124635 -0.019 -0.0107507 0.0123184 -0.019 -0.0107666 0.0120428 -0.019 -0.0108607 0.0119181 -0.019 -0.0109367 0.0118603 -0.018 -0.0109814 0.0118603 -0.019 -0.0109814 0.0117552 -0.018 -0.0110827 0.0118059 -0.019 -0.0110301 0.0117085 -0.018 -0.0111388 0.011666 -0.019 -0.0111982 0.011566 -0.018 -0.0113928 0.0115424 -0.018 -0.0114619 0.0115239 -0.019 -0.0115326 0.0115107 -0.018 -0.0116044 0.0115027 -0.018 -0.011677 0.0115027 -0.019 -0.011677 0.0115 -0.018 -0.01175 0.0115 -0.019 -0.01175 0.0115107 -0.018 -0.0118956 0.0115107 -0.019 -0.0118956 0.0115239 -0.019 -0.0119674 0.0115424 -0.018 -0.0120381 0.0115424 -0.019 -0.0120381 0.011566 -0.019 -0.0121072 0.011666 -0.019 -0.0123018 0.0118059 -0.019 -0.0124699 0.0119181 -0.019 -0.0125633 0.0120428 -0.018 -0.0126393 0.0120428 -0.019 -0.0126393 0.0121089 -0.019 -0.0126703 0.0126816 -0.018 -0.0127334 0.0126816 -0.019 -0.0127334 0.0128911 -0.018 -0.0126703 0.0129572 -0.019 -0.0126393 0.0130209 -0.019 -0.0126036 0.0130819 -0.019 -0.0125633 0.0131397 -0.019 -0.0125186 0.0132915 -0.018 -0.0123612 0.0132915 -0.019 -0.0123612 0.013334 -0.019 -0.0123018 0.0134054 -0.018 -0.0121745 0.013434 -0.018 -0.0121072 0.0134054 -0.019 -0.0121745 0.013434 -0.019 -0.0121072 0.0134576 -0.018 -0.0120381 0.0134576 -0.019 -0.0120381 0.0134761 -0.018 -0.0119674 0.0134761 -0.017 -0.0119674 0.0134576 -0.017 -0.0120381 0.013434 -0.017 -0.0121072 0.0134054 -0.017 -0.0121745 0.013372 -0.017 -0.0122394 0.013372 -0.018 -0.0122394 0.013334 -0.018 -0.0123018 0.0132448 -0.019 -0.0124173 0.0132448 -0.018 -0.0124173 0.0131941 -0.019 -0.0124699 0.0131941 -0.018 -0.0124699 0.0132448 -0.017 -0.0124173 0.0131941 -0.017 -0.0124699 0.0131397 -0.018 -0.0125186 0.0130819 -0.018 -0.0125633 0.0130819 -0.017 -0.0125633 0.0130209 -0.018 -0.0126036 0.0130209 -0.017 -0.0126036 0.0129572 -0.018 -0.0126393 0.0128229 -0.018 -0.0126964 0.0128911 -0.017 -0.0126703 0.0128229 -0.017 -0.0126964 0.0127529 -0.017 -0.0127175 0.0127529 -0.018 -0.0127175 0.0126816 -0.017 -0.0127334 0.0126094 -0.019 -0.012744 0.0126094 -0.018 -0.012744 0.0126094 -0.017 -0.012744 0.0125365 -0.018 -0.0127493 0.0125365 -0.019 -0.0127493 0.0125365 -0.017 -0.0127493 0.0124635 -0.018 -0.0127493 0.0123906 -0.018 -0.012744 0.0123906 -0.019 -0.012744 0.0124635 -0.017 -0.0127493 0.0123184 -0.018 -0.0127334 0.0123184 -0.017 -0.0127334 0.0122471 -0.018 -0.0127175 0.0121771 -0.018 -0.0126964 0.0121089 -0.018 -0.0126703 0.0120428 -0.017 -0.0126393 0.0119791 -0.019 -0.0126036 0.0119791 -0.018 -0.0126036 0.0119791 -0.017 -0.0126036 0.0119181 -0.017 -0.0125633 0.0119181 -0.018 -0.0125633 0.0118603 -0.018 -0.0125186 0.0118059 -0.017 -0.0124699 0.0118059 -0.018 -0.0124699 0.0117552 -0.017 -0.0124173 0.0117552 -0.018 -0.0124173 0.0117085 -0.018 -0.0123612 0.011666 -0.018 -0.0123018 0.0116279 -0.018 -0.0122394 0.0116279 -0.017 -0.0122394 0.0115946 -0.019 -0.0121745 0.0115946 -0.018 -0.0121745 0.0115946 -0.017 -0.0121745 0.011566 -0.017 -0.0121072 0.011566 -0.018 -0.0121072 0.0115424 -0.017 -0.0120381 0.0115239 -0.018 -0.0119674 0.0115239 -0.017 -0.0119674 0.0115107 -0.017 -0.0118956 0.0115027 -0.019 -0.011823 0.0115027 -0.018 -0.011823 0.0115027 -0.017 -0.011823 0.0115107 -0.019 -0.0116044 0.0115107 -0.017 -0.0116044 0.0115239 -0.017 -0.0115326 0.0115239 -0.018 -0.0115326 0.011566 -0.017 -0.0113928 0.0115946 -0.018 -0.0113255 0.0116279 -0.018 -0.0112606 0.0116279 -0.017 -0.0112606 0.011666 -0.018 -0.0111982 0.0117085 -0.019 -0.0111388 0.0117552 -0.019 -0.0110827 0.0118059 -0.018 -0.0110301 0.0118059 -0.017 -0.0110301 0.0119181 -0.018 -0.0109367 0.0118603 -0.017 -0.0109814 0.0119181 -0.017 -0.0109367 0.0119791 -0.019 -0.0108964 0.0119791 -0.018 -0.0108964 0.0119791 -0.017 -0.0108964 0.0120428 -0.018 -0.0108607 0.0120428 -0.017 -0.0108607 0.0121089 -0.018 -0.0108297 0.0121089 -0.017 -0.0108297 0.0121771 -0.017 -0.0108036 0.0122471 -0.018 -0.0107825 0.0122471 -0.019 -0.0107825 0.0121771 -0.018 -0.0108036 0.0123184 -0.018 -0.0107666 0.0123906 -0.019 -0.010756 0.0123906 -0.018 -0.010756 0.0123184 -0.017 -0.0107666 0.0124635 -0.018 -0.0107507 0.0124635 -0.017 -0.0107507 0.0125365 -0.018 -0.0107507 0.0125365 -0.017 -0.0107507 0.0126094 -0.019 -0.010756 0.0126094 -0.018 -0.010756 0.0126094 -0.017 -0.010756 0.0127529 -0.018 -0.0107825 0.0127529 -0.017 -0.0107825 0.0128229 -0.017 -0.0108036 0.0128229 -0.018 -0.0108036 0.0128911 -0.018 -0.0108297 0.0128911 -0.017 -0.0108297 0.0130209 -0.018 -0.0108964 0.0129572 -0.018 -0.0108607 0.0130209 -0.017 -0.0108964 0.0131941 -0.018 -0.0110301 0.0131941 -0.019 -0.0110301 0.0131941 -0.017 -0.0110301 0.0132448 -0.018 -0.0110827 0.0132915 -0.019 -0.0111388 0.013334 -0.018 -0.0111982 0.013334 -0.017 -0.0111982 0.013372 -0.018 -0.0112606 0.0134054 -0.019 -0.0113255 0.013372 -0.017 -0.0112606 0.013434 -0.018 -0.0113928 0.0134054 -0.018 -0.0113255 0.013434 -0.017 -0.0113928 0.0134576 -0.018 -0.0114619 0.0134761 -0.019 -0.0115326 0.0134761 -0.018 -0.0115326 0.0134893 -0.019 -0.0116044 0.0134761 -0.017 -0.0115326 0.0134893 -0.017 -0.0116044 0.0134973 -0.019 -0.011677 0.0134973 -0.018 -0.011677 0.0134973 -0.017 -0.011677 0.0135 -0.019 -0.01175 0.0135 -0.018 -0.01175 -0.0134973 -0.018 -0.011677 -0.0135 -0.017 -0.01175 -0.0134973 -0.017 -0.011677 -0.0134761 -0.018 -0.0115326 -0.0134893 -0.018 -0.0116044 -0.0134973 -0.018 -0.011823 -0.0134973 -0.019 -0.011823 -0.0134893 -0.019 -0.0118956 -0.013334 -0.018 -0.0123018 -0.0132915 -0.018 -0.0123612 -0.0130209 -0.018 -0.0126036 -0.0128911 -0.019 -0.0126703 -0.0125365 -0.018 -0.0127493 -0.0125365 -0.019 -0.0127493 -0.0123184 -0.018 -0.0127334 -0.0123184 -0.019 -0.0127334 -0.0122471 -0.018 -0.0127175 -0.0121771 -0.018 -0.0126964 -0.0121771 -0.019 -0.0126964 -0.0120428 -0.018 -0.0126393 -0.0119791 -0.019 -0.0126036 -0.0119181 -0.019 -0.0125633 -0.0118603 -0.019 -0.0125186 -0.0118059 -0.019 -0.0124699 -0.011566 -0.018 -0.0121072 -0.0115424 -0.018 -0.0120381 -0.0115107 -0.019 -0.0118956 -0.0115027 -0.018 -0.011823 -0.0115027 -0.019 -0.011823 -0.0115424 -0.019 -0.0114619 -0.0115946 -0.018 -0.0113255 -0.011666 -0.019 -0.0111982 -0.0117085 -0.018 -0.0111388 -0.0117085 -0.019 -0.0111388 -0.0118603 -0.019 -0.0109814 -0.0119181 -0.019 -0.0109367 -0.0119791 -0.018 -0.0108964 -0.0119791 -0.019 -0.0108964 -0.0121089 -0.019 -0.0108297 -0.0122471 -0.018 -0.0107825 -0.0121771 -0.019 -0.0108036 -0.0122471 -0.019 -0.0107825 -0.0123906 -0.018 -0.010756 -0.0124635 -0.018 -0.0107507 -0.0125365 -0.018 -0.0107507 -0.0126816 -0.019 -0.0107666 -0.0127529 -0.018 -0.0107825 -0.0127529 -0.019 -0.0107825 -0.0128229 -0.018 -0.0108036 -0.0129572 -0.019 -0.0108607 -0.0130819 -0.019 -0.0109367 -0.0132448 -0.018 -0.0110827 -0.013334 -0.019 -0.0111982 -0.013434 -0.019 -0.0113928 -0.0134576 -0.019 -0.0114619 -0.013434 -0.018 -0.0113928 -0.0134576 -0.018 -0.0114619 -0.0134054 -0.019 -0.0113255 -0.0134054 -0.017 -0.0113255 -0.013372 -0.018 -0.0112606 -0.013372 -0.019 -0.0112606 -0.0134054 -0.018 -0.0113255 -0.013334 -0.018 -0.0111982 -0.0132915 -0.018 -0.0111388 -0.0132448 -0.017 -0.0110827 -0.0131941 -0.018 -0.0110301 -0.0131941 -0.017 -0.0110301 -0.0131397 -0.018 -0.0109814 -0.0131397 -0.017 -0.0109814 -0.0130819 -0.018 -0.0109367 -0.0130819 -0.017 -0.0109367 -0.0130209 -0.018 -0.0108964 -0.0129572 -0.018 -0.0108607 -0.0128911 -0.018 -0.0108297 -0.0128911 -0.017 -0.0108297 -0.0128229 -0.017 -0.0108036 -0.0126816 -0.017 -0.0107666 -0.0126094 -0.018 -0.010756 -0.0126816 -0.018 -0.0107666 -0.0126094 -0.017 -0.010756 -0.0125365 -0.019 -0.0107507 -0.0123906 -0.019 -0.010756 -0.0123906 -0.017 -0.010756 -0.0123184 -0.018 -0.0107666 -0.0123184 -0.017 -0.0107666 -0.0121771 -0.018 -0.0108036 -0.0122471 -0.017 -0.0107825 -0.0121771 -0.017 -0.0108036 -0.0121089 -0.018 -0.0108297 -0.0120428 -0.019 -0.0108607 -0.0120428 -0.018 -0.0108607 -0.0119791 -0.017 -0.0108964 -0.0119181 -0.018 -0.0109367 -0.0119181 -0.017 -0.0109367 -0.0118603 -0.018 -0.0109814 -0.0118059 -0.018 -0.0110301 -0.0118603 -0.017 -0.0109814 -0.0117552 -0.017 -0.0110827 -0.0117552 -0.018 -0.0110827 -0.011666 -0.018 -0.0111982 -0.011628 -0.017 -0.0112606 -0.0115946 -0.019 -0.0113255 -0.011628 -0.018 -0.0112606 -0.0115946 -0.017 -0.0113255 -0.0115424 -0.018 -0.0114619 -0.011566 -0.018 -0.0113928 -0.0115424 -0.017 -0.0114619 -0.0115239 -0.017 -0.0115326 -0.0115239 -0.018 -0.0115326 -0.0115107 -0.018 -0.0116044 -0.0115027 -0.018 -0.011677 -0.0115027 -0.017 -0.011677 -0.0115 -0.018 -0.01175 -0.0115027 -0.017 -0.011823 -0.0115107 -0.018 -0.0118956 -0.0115239 -0.019 -0.0119674 -0.0115107 -0.017 -0.0118956 -0.0115239 -0.018 -0.0119674 -0.011566 -0.019 -0.0121072 -0.0115946 -0.019 -0.0121745 -0.0115946 -0.018 -0.0121745 -0.011566 -0.017 -0.0121072 -0.011628 -0.018 -0.0122394 -0.011628 -0.019 -0.0122394 -0.0115946 -0.017 -0.0121745 -0.011628 -0.017 -0.0122394 -0.011666 -0.019 -0.0123018 -0.011666 -0.017 -0.0123018 -0.011666 -0.018 -0.0123018 -0.0117085 -0.018 -0.0123612 -0.0117085 -0.017 -0.0123612 -0.0117552 -0.018 -0.0124173 -0.0117552 -0.017 -0.0124173 -0.0118059 -0.018 -0.0124699 -0.0118603 -0.018 -0.0125186 -0.0118603 -0.017 -0.0125186 -0.0119181 -0.018 -0.0125633 -0.0119791 -0.017 -0.0126036 -0.0119791 -0.018 -0.0126036 -0.0121089 -0.019 -0.0126703 -0.0121089 -0.018 -0.0126703 -0.0120428 -0.017 -0.0126393 -0.0121089 -0.017 -0.0126703 -0.0122471 -0.017 -0.0127175 -0.0123184 -0.017 -0.0127334 -0.0123906 -0.019 -0.012744 -0.0123906 -0.018 -0.012744 -0.0124635 -0.018 -0.0127493 -0.0123906 -0.017 -0.012744 -0.0125365 -0.017 -0.0127493 -0.0126094 -0.018 -0.012744 -0.0126816 -0.018 -0.0127334 -0.0127529 -0.019 -0.0127175 -0.0127529 -0.018 -0.0127175 -0.0127529 -0.017 -0.0127175 -0.0128229 -0.018 -0.0126964 -0.0128911 -0.018 -0.0126703 -0.0128229 -0.017 -0.0126964 -0.0129572 -0.018 -0.0126393 -0.0130209 -0.017 -0.0126036 -0.0130819 -0.018 -0.0125633 -0.0131397 -0.018 -0.0125186 -0.0130819 -0.017 -0.0125633 -0.0131397 -0.017 -0.0125186 -0.0131941 -0.018 -0.0124699 -0.0131941 -0.019 -0.0124699 -0.0131941 -0.017 -0.0124699 -0.0132448 -0.019 -0.0124173 -0.0132448 -0.018 -0.0124173 -0.0132448 -0.017 -0.0124173 -0.013334 -0.017 -0.0123018 -0.013372 -0.018 -0.0122394 -0.0134054 -0.018 -0.0121745 -0.013434 -0.017 -0.0121072 -0.013434 -0.018 -0.0121072 -0.0134576 -0.019 -0.0120381 -0.0134576 -0.018 -0.0120381 -0.0134576 -0.017 -0.0120381 -0.0134761 -0.018 -0.0119674 -0.0134893 -0.018 -0.0118956 -0.0134893 -0.017 -0.0118956 -0.0134973 -0.017 -0.011823 -0.0135 -0.018 -0.01175 0.0134973 -0.019 0.013323 0.0134576 -0.017125 0.0135381 0.0134893 -0.017125 0.0133956 0.0134893 -0.0180625 0.0133956 0.0135 -0.0180625 0.01325 0.0134576 -0.0180625 0.0129619 0.0134054 -0.017125 0.0128255 0.013334 -0.017125 0.0126982 0.0132448 -0.0180625 0.0125827 0.0132915 -0.017125 0.0126388 0.0131941 -0.017125 0.0125301 0.0130819 -0.017125 0.0124367 0.0129572 -0.0180625 0.0123607 0.0129572 -0.017125 0.0123607 0.0126816 -0.017125 0.0122666 0.0125365 -0.0180625 0.0122507 0.0125365 -0.017125 0.0122507 0.0122471 -0.0180625 0.0122825 0.0121771 -0.017125 0.0123036 0.0119181 -0.017125 0.0124367 0.0117085 -0.017125 0.0126388 0.011666 -0.017125 0.0126982 0.011566 -0.0180625 0.0128928 0.0115946 -0.017125 0.0128255 0.011566 -0.017125 0.0128928 0.0115424 -0.017125 0.0129619 0.0115239 -0.017125 0.0130326 0.0115027 -0.0180625 0.013177 0.0115107 -0.017125 0.0131044 0.0115 -0.0180625 0.01325 0.0115027 -0.0180625 0.013323 0.0115 -0.017125 0.01325 0.0115239 -0.0180625 0.0134674 0.0115107 -0.017125 0.0133956 0.0115424 -0.0180625 0.0135381 0.0117552 -0.017125 0.0139173 0.0120428 -0.0180625 0.0141393 0.0123906 -0.0180625 0.014244 0.0126094 -0.0180625 0.014244 0.0127529 -0.017125 0.0142175 0.0128911 -0.0180625 0.0141703 0.0128911 -0.017125 0.0141703 0.0129572 -0.0180625 0.0141393 0.0129572 -0.017125 0.0141393 0.0130209 -0.017125 0.0141036 0.0131397 -0.0180625 0.0140186 0.0131397 -0.017125 0.0140186 0.0134054 -0.017125 0.0136745 0.0134576 -0.0180625 0.0135381 0.0134761 -0.0180625 0.0134674 0.0134576 -0.019 0.0135381 0.013434 -0.0180625 0.0136072 0.0134054 -0.0180625 0.0136745 0.013372 -0.0180625 0.0137394 0.013372 -0.019 0.0137394 0.013334 -0.019 0.0138018 0.013334 -0.0180625 0.0138018 0.0132915 -0.019 0.0138612 0.0132915 -0.0180625 0.0138612 0.0131941 -0.0180625 0.0139699 0.0131941 -0.017125 0.0139699 0.0132448 -0.0180625 0.0139173 0.0131941 -0.019 0.0139699 0.0130819 -0.0180625 0.0140633 0.0130819 -0.019 0.0140633 0.0130209 -0.0180625 0.0141036 0.0129572 -0.019 0.0141393 0.0128229 -0.017125 0.0141964 0.0128911 -0.019 0.0141703 0.0128229 -0.0180625 0.0141964 0.0128229 -0.019 0.0141964 0.0127529 -0.0180625 0.0142175 0.0126816 -0.017125 0.0142334 0.0126816 -0.0180625 0.0142334 0.0126094 -0.017125 0.014244 0.0126816 -0.019 0.0142334 0.0126094 -0.019 0.014244 0.0125365 -0.0180625 0.0142493 0.0125365 -0.019 0.0142493 0.0124635 -0.0180625 0.0142493 0.0123906 -0.019 0.014244 0.0123184 -0.019 0.0142334 0.0123184 -0.0180625 0.0142334 0.0122471 -0.0180625 0.0142175 0.0121771 -0.017125 0.0141964 0.0121771 -0.0180625 0.0141964 0.0121089 -0.0180625 0.0141703 0.0121771 -0.019 0.0141964 0.0121089 -0.019 0.0141703 0.0120428 -0.019 0.0141393 0.0119791 -0.0180625 0.0141036 0.0118603 -0.017125 0.0140186 0.0119181 -0.0180625 0.0140633 0.0118603 -0.019 0.0140186 0.0118059 -0.017125 0.0139699 0.0118603 -0.0180625 0.0140186 0.0118059 -0.0180625 0.0139699 0.0118059 -0.019 0.0139699 0.0117552 -0.019 0.0139173 0.0117552 -0.0180625 0.0139173 0.0117085 -0.0180625 0.0138612 0.011666 -0.0180625 0.0138018 0.011628 -0.019 0.0137394 0.0115946 -0.017125 0.0136745 0.011628 -0.0180625 0.0137394 0.0115946 -0.0180625 0.0136745 0.011566 -0.0180625 0.0136072 0.0115424 -0.017125 0.0135381 0.0115107 -0.0180625 0.0133956 0.0115239 -0.019 0.0134674 0.0115027 -0.017125 0.013323 0.0115027 -0.019 0.013177 0.0115107 -0.0180625 0.0131044 0.0115239 -0.0180625 0.0130326 0.0115239 -0.019 0.0130326 0.0115424 -0.0180625 0.0129619 0.011628 -0.0180625 0.0127606 0.0115946 -0.0180625 0.0128255 0.011628 -0.019 0.0127606 0.011666 -0.0180625 0.0126982 0.0117085 -0.0180625 0.0126388 0.011666 -0.019 0.0126982 0.0117552 -0.017125 0.0125827 0.0117552 -0.019 0.0125827 0.0117552 -0.0180625 0.0125827 0.0118059 -0.0180625 0.0125301 0.0118059 -0.017125 0.0125301 0.0118059 -0.019 0.0125301 0.0118603 -0.0180625 0.0124814 0.0119181 -0.0180625 0.0124367 0.0118603 -0.019 0.0124814 0.0119791 -0.0180625 0.0123964 0.0120428 -0.0180625 0.0123607 0.0121089 -0.017125 0.0123297 0.0121089 -0.0180625 0.0123297 0.0121089 -0.019 0.0123297 0.0122471 -0.017125 0.0122825 0.0121771 -0.0180625 0.0123036 0.0122471 -0.019 0.0122825 0.0123184 -0.017125 0.0122666 0.0123184 -0.0180625 0.0122666 0.0123184 -0.019 0.0122666 0.0123906 -0.0180625 0.012256 0.0124635 -0.0180625 0.0122507 0.0126094 -0.017125 0.012256 0.0125365 -0.019 0.0122507 0.0126094 -0.0180625 0.012256 0.0126816 -0.0180625 0.0122666 0.0127529 -0.0180625 0.0122825 0.0127529 -0.019 0.0122825 0.0128229 -0.0180625 0.0123036 0.0128911 -0.0180625 0.0123297 0.0130209 -0.0180625 0.0123964 0.0131397 -0.0180625 0.0124814 0.0131397 -0.017125 0.0124814 0.0130819 -0.0180625 0.0124367 0.0131397 -0.019 0.0124814 0.0131941 -0.0180625 0.0125301 0.0131941 -0.019 0.0125301 0.0132448 -0.019 0.0125827 0.0132915 -0.0180625 0.0126388 0.013334 -0.0180625 0.0126982 0.0132915 -0.019 0.0126388 0.013334 -0.019 0.0126982 0.013372 -0.0180625 0.0127606 0.013372 -0.019 0.0127606 0.013434 -0.0180625 0.0128928 0.013434 -0.017125 0.0128928 0.0134054 -0.0180625 0.0128255 0.0134054 -0.019 0.0128255 0.0134761 -0.017125 0.0130326 0.0134761 -0.0180625 0.0130326 0.0134761 -0.019 0.0130326 0.0134893 -0.0180625 0.0131044 0.0134973 -0.0180625 0.013177 0.0135 -0.019 0.01325 0.0134973 -0.0180625 0.013323 0.0134973 -0.017125 0.013323 0.0134893 0.01748 0.0131044 0.0134973 0.019 0.013177 0.0134893 0.019 0.0131044 0.0134761 0.017125 0.0130326 0.0134893 0.017125 0.0131044 0.0134893 0.017125 0.0133956 0.0134576 0.01748 0.0135381 0.0134576 0.017125 0.0135381 0.013434 0.017125 0.0136072 0.0134054 0.017125 0.0136745 0.0132915 0.017125 0.0138612 0.0131941 0.01748 0.0139699 0.0132448 0.017125 0.0139173 0.0131397 0.017125 0.0140186 0.0130819 0.01748 0.0140633 0.0130209 0.017125 0.0141036 0.0128911 0.017125 0.0141703 0.0128229 0.01748 0.0141964 0.0128229 0.017125 0.0141964 0.0126816 0.017125 0.0142334 0.0125365 0.01748 0.0142493 0.0125365 0.017125 0.0142493 0.0124635 0.017125 0.0142493 0.0123906 0.017125 0.014244 0.0123184 0.017125 0.0142334 0.0120428 0.017125 0.0141393 0.0118603 0.01748 0.0140186 0.0117552 0.017125 0.0139173 0.011566 0.01748 0.0136072 0.011566 0.017125 0.0136072 0.0115424 0.017125 0.0135381 0.0115107 0.017125 0.0131044 0.0115239 0.017125 0.0130326 0.0117085 0.017125 0.0126388 0.0118059 0.01748 0.0125301 0.0118059 0.017125 0.0125301 0.0118603 0.017125 0.0124814 0.0119181 0.01748 0.0124367 0.0119181 0.017125 0.0124367 0.0119791 0.01748 0.0123964 0.0121089 0.017125 0.0123297 0.0121771 0.017125 0.0123036 0.0123184 0.017125 0.0122666 0.0125365 0.01748 0.0122507 0.0126094 0.01748 0.012256 0.0126816 0.01748 0.0122666 0.0126816 0.017125 0.0122666 0.0128911 0.01748 0.0123297 0.0131397 0.01748 0.0124814 0.0131397 0.017125 0.0124814 0.0132448 0.017125 0.0125827 0.013372 0.017125 0.0127606 0.0134576 0.017125 0.0129619 0.0134761 0.01748 0.0130326 0.0134761 0.019 0.0130326 0.0134576 0.019 0.0129619 0.0134576 0.01748 0.0129619 0.0134054 0.01748 0.0128255 0.013434 0.01748 0.0128928 0.013372 0.019 0.0127606 0.013372 0.01748 0.0127606 0.013334 0.019 0.0126982 0.013334 0.01748 0.0126982 0.0132915 0.01748 0.0126388 0.0132915 0.019 0.0126388 0.0132448 0.01748 0.0125827 0.0131941 0.01748 0.0125301 0.0131941 0.019 0.0125301 0.0130819 0.01748 0.0124367 0.0130819 0.019 0.0124367 0.0130209 0.017125 0.0123964 0.0130209 0.01748 0.0123964 0.0130209 0.019 0.0123964 0.0129572 0.01748 0.0123607 0.0129572 0.019 0.0123607 0.0128911 0.019 0.0123297 0.0128229 0.01748 0.0123036 0.0128229 0.019 0.0123036 0.0127529 0.01748 0.0122825 0.0127529 0.019 0.0122825 0.0126094 0.017125 0.012256 0.0125365 0.019 0.0122507 0.0124635 0.017125 0.0122507 0.0123906 0.01748 0.012256 0.0123906 0.017125 0.012256 0.0124635 0.01748 0.0122507 0.0123184 0.01748 0.0122666 0.0122471 0.019 0.0122825 0.0122471 0.01748 0.0122825 0.0121771 0.01748 0.0123036 0.0121771 0.019 0.0123036 0.0121089 0.01748 0.0123297 0.0120428 0.01748 0.0123607 0.0120428 0.019 0.0123607 0.0119791 0.019 0.0123964 0.0118603 0.01748 0.0124814 0.0118603 0.019 0.0124814 0.0117085 0.01748 0.0126388 0.0117552 0.01748 0.0125827 0.011666 0.01748 0.0126982 0.011666 0.019 0.0126982 0.011628 0.01748 0.0127606 0.011628 0.019 0.0127606 0.0115946 0.01748 0.0128255 0.011566 0.01748 0.0128928 0.011566 0.019 0.0128928 0.0115424 0.017125 0.0129619 0.0115424 0.01748 0.0129619 0.0115239 0.01748 0.0130326 0.0115239 0.019 0.0130326 0.0115107 0.01748 0.0131044 0.0115027 0.01748 0.013177 0.0115027 0.019 0.013177 0.0115 0.01748 0.01325 0.0115027 0.017125 0.013323 0.0115027 0.019 0.013323 0.0115027 0.01748 0.013323 0.0115107 0.01748 0.0133956 0.0115239 0.01748 0.0134674 0.0115239 0.017125 0.0134674 0.0115239 0.019 0.0134674 0.0115424 0.019 0.0135381 0.0115424 0.01748 0.0135381 0.0115946 0.017125 0.0136745 0.011566 0.019 0.0136072 0.0115946 0.01748 0.0136745 0.011628 0.01748 0.0137394 0.011666 0.01748 0.0138018 0.011666 0.017125 0.0138018 0.011666 0.019 0.0138018 0.0117085 0.017125 0.0138612 0.0117085 0.01748 0.0138612 0.0117552 0.01748 0.0139173 0.0117552 0.019 0.0139173 0.0118059 0.01748 0.0139699 0.0118059 0.019 0.0139699 0.0119181 0.01748 0.0140633 0.0119181 0.019 0.0140633 0.0119791 0.019 0.0141036 0.0119791 0.01748 0.0141036 0.0120428 0.01748 0.0141393 0.0120428 0.019 0.0141393 0.0121089 0.01748 0.0141703 0.0121771 0.01748 0.0141964 0.0122471 0.01748 0.0142175 0.0122471 0.019 0.0142175 0.0123184 0.01748 0.0142334 0.0123906 0.019 0.014244 0.0123906 0.01748 0.014244 0.0124635 0.01748 0.0142493 0.0124635 0.019 0.0142493 0.0126094 0.019 0.014244 0.0126094 0.01748 0.014244 0.0127529 0.017125 0.0142175 0.0126816 0.01748 0.0142334 0.0127529 0.019 0.0142175 0.0127529 0.01748 0.0142175 0.0128911 0.01748 0.0141703 0.0128911 0.019 0.0141703 0.0129572 0.01748 0.0141393 0.0130209 0.019 0.0141036 0.0130209 0.01748 0.0141036 0.0130819 0.019 0.0140633 0.0131397 0.01748 0.0140186 0.0131397 0.019 0.0140186 0.0131941 0.017125 0.0139699 0.0131941 0.019 0.0139699 0.0132448 0.01748 0.0139173 0.0132448 0.019 0.0139173 0.0132915 0.01748 0.0138612 0.013334 0.01748 0.0138018 0.013334 0.019 0.0138018 0.013372 0.01748 0.0137394 0.0134054 0.01748 0.0136745 0.013434 0.01748 0.0136072 0.013434 0.019 0.0136072 0.0134576 0.019 0.0135381 0.0134761 0.01748 0.0134674 0.0134761 0.017125 0.0134674 0.0134761 0.019 0.0134674 0.0134893 0.01748 0.0133956 0.0134893 0.019 0.0133956 0.0134973 0.017125 0.013323 0.0134973 0.01748 0.013323 0.0134973 0.019 0.013323 0.0135 0.01748 0.01325 0.0134973 0.01748 0.013177 0.013334 -0.017 -0.0123018 0.0132915 -0.017 -0.0123612 0.0135 -0.017 -0.01625 0.0131397 -0.017 -0.0125186 0.0129572 -0.017 -0.0126393 0.0123906 -0.017 -0.012744 0.0122471 -0.017 -0.0127175 0.0121771 -0.017 -0.0126964 0.0121089 -0.017 -0.0126703 0.0118603 -0.017 -0.0125186 0.0115946 -0.017 -0.0113255 0.011666 -0.017 -0.0111982 0.0117085 -0.017 -0.0111388 0.0117552 -0.017 -0.0110827 0.0122471 -0.017 -0.0107825 0.0123906 -0.017 -0.010756 0.0126816 -0.017 -0.0107666 0.0129572 -0.017 -0.0108607 0.0130819 -0.017 -0.0109367 0.0131397 -0.017 -0.0109814 0.0132915 -0.017 -0.0111388 0.0132448 -0.017 -0.0110827 0.0135 -0.017 -0.00425 0.0134054 -0.017 -0.0113255 0.0134576 -0.017 -0.0114619 0.0115424 -0.017 -0.0114619 0.0115027 -0.017 -0.011677 0.0115 -0.017 -0.01175 0.011666 -0.017 -0.0123018 0.0117085 -0.017 -0.0123612 -0.0118059 -0.017 -0.0124699 -0.0119181 -0.017 -0.0125633 -0.0135 -0.017 -0.01625 -0.0121771 -0.017 -0.0126964 -0.0124635 -0.017 -0.0127493 -0.0126094 -0.017 -0.012744 -0.0126816 -0.017 -0.0127334 -0.0128911 -0.017 -0.0126703 -0.0129572 -0.017 -0.0126393 -0.0132915 -0.017 -0.0123612 -0.013372 -0.017 -0.0122394 -0.0134054 -0.017 -0.0121745 -0.0134761 -0.017 -0.0119674 -0.0115424 -0.017 -0.0120381 -0.0115239 -0.017 -0.0119674 -0.0115 -0.017 -0.01175 -0.0115107 -0.017 -0.0116044 -0.011566 -0.017 -0.0113928 -0.011666 -0.017 -0.0111982 -0.0117085 -0.017 -0.0111388 -0.0118059 -0.017 -0.0110301 -0.0135 -0.017 -0.00425 -0.0120428 -0.017 -0.0108607 -0.0121089 -0.017 -0.0108297 -0.0124635 -0.017 -0.0107507 -0.0125365 -0.017 -0.0107507 -0.0127529 -0.017 -0.0107825 -0.0129572 -0.017 -0.0108607 -0.0130209 -0.017 -0.0108964 -0.0132915 -0.017 -0.0111388 -0.013334 -0.017 -0.0111982 -0.013372 -0.017 -0.0112606 -0.013434 -0.017 -0.0113928 -0.0134576 -0.017 -0.0114619 -0.0134761 -0.017 -0.0115326 -0.0134893 -0.017 -0.0116044 0.0122471 -0.017125 0.0142175 0.0123184 -0.017125 0.0142334 0.0123906 -0.017125 0.014244 0.0124635 -0.017125 0.0142493 0.0125365 -0.017125 0.0142493 0.0130819 -0.017125 0.0140633 0.0132448 -0.017125 0.0139173 0.013334 -0.017125 0.0138018 0.0132915 -0.017125 0.0138612 0.013372 -0.017125 0.0137394 0.013434 -0.017125 0.0136072 0.0134761 -0.017125 0.0134674 0.0121089 -0.017125 0.0141703 0.0119181 -0.017125 0.0140633 -0.0118059 -0.017125 0.0139699 0.0117085 -0.017125 0.0138612 -0.0116279 -0.017125 0.0137394 -0.0115946 -0.017125 0.0136745 0.011566 -0.017125 0.0136072 0.0115239 -0.017125 0.0134674 -0.0115027 -0.017125 0.013323 -0.0115027 -0.017125 0.013177 0.0115027 -0.017125 0.013177 -0.0115946 -0.017125 0.0128255 0.011628 -0.017125 0.0127606 -0.0117085 -0.017125 0.0126388 -0.0117552 -0.017125 0.0125827 0.0119791 -0.017125 0.0123964 0.0120428 -0.017125 0.0123607 -0.0121089 -0.017125 0.0123297 -0.0122471 -0.017125 0.0122825 -0.0123906 -0.017125 0.012256 -0.0126816 -0.017125 0.0122666 -0.0127529 -0.017125 0.0122825 -0.0135 -0.017125 0.01075 -0.0129572 -0.017125 0.0123607 -0.0128911 -0.017125 0.0123297 -0.0131397 -0.017125 0.0124814 -0.0134576 -0.017125 0.0129619 -0.0134893 -0.017125 0.0131044 0.0120428 -0.017125 0.0141393 0.0119791 -0.017125 0.0141036 -0.0119181 -0.017125 0.0140633 -0.0117085 -0.017125 0.0138612 0.011666 -0.017125 0.0138018 0.011628 -0.017125 0.0137394 -0.0115424 -0.017125 0.0135381 -0.0115239 -0.017125 0.0134674 -0.011566 -0.017125 0.0128928 -0.0116279 -0.017125 0.0127606 -0.011666 -0.017125 0.0126982 0.0118603 -0.017125 0.0124814 -0.0119181 -0.017125 0.0124367 0.0123906 -0.017125 0.012256 0.0124635 -0.017125 0.0122507 0.0127529 -0.017125 0.0122825 0.0128229 -0.017125 0.0123036 0.0128911 -0.017125 0.0123297 0.0130209 -0.017125 0.0123964 0.0132448 -0.017125 0.0125827 0.013372 -0.017125 0.0127606 0.0134576 -0.017125 0.0129619 0.0134893 -0.017125 0.0131044 0.0134973 -0.017125 0.013177 0.0135 -0.017125 0.01325 -0.0121089 -0.017125 0.0141703 -0.0121771 -0.017125 0.0141964 -0.0123184 -0.017125 0.0142334 -0.0124635 -0.017125 0.0142493 -0.0126094 -0.017125 0.014244 -0.0126816 -0.017125 0.0142334 -0.0127529 -0.017125 0.0142175 -0.0128911 -0.017125 0.0141703 -0.0131941 -0.017125 0.0139699 -0.013334 -0.017125 0.0138018 -0.0135 -0.017125 0.01775 -0.013434 -0.017125 0.0136072 -0.0134973 -0.017125 0.013323 -0.0121771 0.017125 0.0141964 -0.0123906 0.017125 0.014244 -0.0128229 0.017125 0.0141964 -0.0130209 0.017125 0.0141036 -0.0135 0.017125 0.01775 -0.0132915 0.017125 0.0138612 -0.013372 0.017125 0.0137394 -0.013334 0.017125 0.0138018 -0.0134054 0.017125 0.0136745 -0.0134973 0.017125 0.013323 0.0119791 0.017125 0.0141036 0.0118603 0.017125 0.0140186 -0.011666 0.017125 0.0138018 -0.0116279 0.017125 0.0137394 -0.0115946 0.017125 0.0136745 -0.011566 0.017125 0.0136072 0.0115107 0.017125 0.0133956 -0.0115027 0.017125 0.013323 -0.0115 0.017125 0.01325 -0.0115107 0.017125 0.0131044 -0.0115239 0.017125 0.0130326 -0.0115946 0.017125 0.0128255 -0.0116279 0.017125 0.0127606 -0.011666 0.017125 0.0126982 0.0117552 0.017125 0.0125827 -0.0118603 0.017125 0.0124814 -0.0119791 0.017125 0.0123964 0.0120428 0.017125 0.0123607 0.0122471 0.017125 0.0122825 0.0135 0.017125 0.01075 0.0125365 0.017125 0.0122507 0.0127529 0.017125 0.0122825 0.0128911 0.017125 0.0123297 0.0128229 0.017125 0.0123036 0.0129572 0.017125 0.0123607 0.0130819 0.017125 0.0124367 0.0131941 0.017125 0.0125301 0.0132915 0.017125 0.0126388 0.013334 0.017125 0.0126982 0.0134054 0.017125 0.0128255 0.013434 0.017125 0.0128928 0.0134973 0.017125 0.013177 0.0119181 0.017125 0.0140633 0.0118059 0.017125 0.0139699 -0.0118059 0.017125 0.0139699 -0.0117552 0.017125 0.0139173 0.011628 0.017125 0.0137394 0.0115 0.017125 0.01325 0.0115027 0.017125 0.013177 -0.0115027 0.017125 0.013177 -0.0115424 0.017125 0.0129619 0.011566 0.017125 0.0128928 -0.011566 0.017125 0.0128928 0.0115946 0.017125 0.0128255 0.011628 0.017125 0.0127606 0.011666 0.017125 0.0126982 -0.0119181 0.017125 0.0124367 0.0119791 0.017125 0.0123964 -0.0123184 0.017125 0.0122666 -0.0123906 0.017125 0.012256 -0.0124635 0.017125 0.0122507 -0.0125365 0.017125 0.0122507 -0.0126094 0.017125 0.012256 -0.0126816 0.017125 0.0122666 -0.0127529 0.017125 0.0122825 -0.0128229 0.017125 0.0123036 -0.0131941 0.017125 0.0125301 -0.0132448 0.017125 0.0125827 -0.0132915 0.017125 0.0126388 -0.013334 0.017125 0.0126982 -0.0134054 0.017125 0.0128255 -0.013434 0.017125 0.0128928 0.0121089 0.017125 0.0141703 0.0121771 0.017125 0.0141964 0.0122471 0.017125 0.0142175 0.0126094 0.017125 0.014244 0.0135 0.017125 0.01775 0.0129572 0.017125 0.0141393 0.0130819 0.017125 0.0140633 0.013334 0.017125 0.0138018 0.013372 0.017125 0.0137394 0.0135 -0.017 -0.01175 0.0135 -0.017125 0.01075 0.0135 -0.017125 0.01775 -0.01575 0.0205 -0.01475 0.01575 0.0205 -0.01475 0.01575 -0.0205 -0.01475 -0.01575 -0.0205 -0.01475 0.0205 0.0205 0.01625 0.0205 0.01575 0.01625 0.0205 -0.0205 -0.01625 0.0205 -0.01575 0.01625 0.0205 0.01575 -0.01475 -0.019 -0.01725 -0.01625 -0.019 -0.0065 -0.01625 0.0205 -0.0205 0.01625 -0.0205 -0.0205 -0.01625 -0.0205 -0.0205 0.01625 -0.01575 -0.0205 0.01625 0.0205 -0.01575 -0.01475 0.0205 0.0205 -0.01625 -0.0205 0.0205 -0.01625 -0.0205 0.01575 0.01625 -0.0205 0.01575 -0.01475 -0.0205 -0.01575 -0.01475 -0.0125 -0.0115 -0.01775 -0.012427 -0.0115027 -0.01775 -0.0123544 0.0115107 -0.01775 -0.0123544 -0.0115107 -0.01775 -0.0121428 0.011566 -0.01775 -0.0117801 0.0118059 -0.01775 0.0120106 0.011628 -0.01775 0.0122119 -0.0115424 -0.01775 0.0123544 0.0115107 -0.01775 -0.0126456 -0.0115107 -0.01775 -0.0130518 -0.011666 -0.01775 -0.0132199 -0.0118059 -0.01775 -0.0132686 0.0118603 -0.01775 -0.0133133 0.0119181 -0.01775 -0.0132199 0.0118059 -0.01775 -0.0127174 0.0115239 -0.01775 -0.0127881 0.0115424 -0.01775 -0.0131112 0.0117085 -0.01775 -0.0131673 0.0117552 -0.01775 -0.0133893 -0.0120428 -0.01775 -0.0134675 -0.0122471 -0.01775 -0.0134834 -0.0123184 -0.01775 -0.013494 -0.0126094 -0.01775 -0.0134993 -0.0125365 -0.01775 -0.0134203 -0.0128911 -0.01775 -0.0133893 -0.0129572 -0.01775 -0.0133536 -0.0130209 -0.01775 -0.0132199 -0.0131941 -0.01775 -0.0131673 -0.0132448 -0.01775 -0.0127881 -0.0134576 -0.01775 -0.0126456 -0.0134893 -0.01775 -0.019 -0.019 -0.01775 -0.012427 -0.0134973 -0.01775 -0.0123544 -0.0134893 -0.01775 -0.0122119 -0.0134576 -0.01775 -0.0121428 -0.013434 -0.01775 -0.0120755 -0.0134054 -0.01775 -0.0118327 -0.0132448 -0.01775 -0.0116867 -0.0130819 -0.01775 -0.0115797 -0.0128911 -0.01775 -0.0115536 -0.0128229 -0.01775 -0.0115166 -0.0126816 -0.01775 -0.0115007 -0.0124635 -0.01775 0.011506 -0.0123906 -0.01775 0.0115166 -0.0123184 -0.01775 0.0115325 -0.0122471 -0.01775 -0.0115536 -0.0121771 -0.01775 -0.0115797 -0.0121089 -0.01775 -0.0133536 0.0119791 -0.01775 -0.013494 0.0126094 -0.01775 -0.0134464 0.0128229 -0.01775 -0.0133893 0.0129572 -0.01775 -0.0133133 0.0130819 -0.01775 -0.0132686 0.0131397 -0.01775 -0.0132199 0.0131941 -0.01775 -0.0131112 0.0132915 -0.01775 -0.0130518 0.013334 -0.01775 -0.0128572 0.013434 -0.01775 -0.0127881 0.0134576 -0.01775 -0.0126456 0.0134893 -0.01775 -0.012427 0.0134973 -0.01775 -0.0120755 0.0134054 -0.01775 -0.019 0.019 -0.01775 -0.0120106 0.013372 -0.01775 0.0120106 0.013372 -0.01775 -0.0117314 0.0131397 -0.01775 -0.0116107 0.0129572 -0.01775 -0.0115797 0.0128911 -0.01775 0.011506 0.0126094 -0.01775 0.0115007 0.0125365 -0.01775 0.0115325 0.0122471 -0.01775 -0.0116464 0.0119791 -0.01775 0.0116464 0.0119791 -0.01775 0.0117314 0.0118603 -0.01775 -0.0118888 -0.0132915 -0.01775 0.0118327 -0.0132448 -0.01775 0.0116867 -0.0130819 -0.01775 0.0116107 -0.0129572 -0.01775 -0.0116107 -0.0129572 -0.01775 0.0115797 -0.0128911 -0.01775 0.0115325 -0.0127529 -0.01775 -0.0115325 -0.0127529 -0.01775 0.0115166 -0.0126816 -0.01775 0.011506 -0.0126094 -0.01775 0.0115007 -0.0125365 -0.01775 -0.0115007 -0.0125365 -0.01775 0.0115007 -0.0124635 -0.01775 -0.0115166 -0.0123184 -0.01775 0.0115797 -0.0121089 -0.01775 0.0116107 -0.0120428 -0.01775 0.0116464 -0.0119791 -0.01775 -0.0116464 -0.0119791 -0.01775 -0.0118888 0.0117085 -0.01775 -0.0119482 0.011666 -0.01775 -0.0120106 -0.0116279 -0.01775 -0.0122119 -0.0115424 -0.01775 -0.0122826 0.0115239 -0.01775 0.012427 0.0115027 -0.01775 0.0122826 0.0115239 -0.01775 0.0122119 0.0115424 -0.01775 0.0121428 -0.011566 -0.01775 0.0121428 0.011566 -0.01775 0.0120755 -0.0115946 -0.01775 0.0118327 -0.0117552 -0.01775 0.0118888 0.0117085 -0.01775 0.0120755 -0.0134054 -0.01775 0.0121428 -0.013434 -0.01775 0.0122119 -0.0134576 -0.01775 0.0123544 -0.0134893 -0.01775 0.0122826 -0.0134761 -0.01775 0.012427 -0.0134973 -0.01775 0.0127174 -0.0134761 -0.01775 0.0131673 -0.0132448 -0.01775 0.0132199 -0.0131941 -0.01775 0.0132686 -0.0131397 -0.01775 0.019 -0.019 -0.01775 0.0133893 -0.0129572 -0.01775 0.0134203 -0.0128911 -0.01775 0.0134464 -0.0128229 -0.01775 0.0134834 -0.0126816 -0.01775 0.0134675 -0.0122471 -0.01775 0.0134203 -0.0121089 -0.01775 0.0133133 -0.0119181 -0.01775 0.0133133 0.0119181 -0.01775 0.0131673 0.0117552 -0.01775 0.0130518 0.011666 -0.01775 0.0129245 0.0115946 -0.01775 0.0129245 -0.0115946 -0.01775 0.0128572 -0.011566 -0.01775 0.0127881 -0.0115424 -0.01775 0.0127174 -0.0115239 -0.01775 0.0131112 0.0117085 -0.01775 0.0128572 0.011566 -0.01775 0.0127174 0.0115239 -0.01775 0.0126456 0.0115107 -0.01775 0.0125 0.0135 -0.01775 0.012427 0.0134973 -0.01775 -0.0118888 0.0132915 -0.01775 0.0118888 0.0132915 -0.01775 -0.0116867 0.0130819 -0.01775 -0.0116464 0.0130209 -0.01775 0.0116107 0.0129572 -0.01775 -0.0115166 0.0126816 -0.01775 -0.011506 0.0123906 -0.01775 -0.0115536 0.0121771 -0.01775 0.0115536 0.0121771 -0.01775 -0.0115797 0.0121089 -0.01775 0.0115797 0.0121089 -0.01775 -0.0116107 0.0120428 -0.01775 -0.0116867 0.0119181 -0.01775 0.0116867 0.0119181 -0.01775 -0.0117314 0.0118603 -0.01775 0.0134464 0.0121771 -0.01775 0.0134834 0.0123184 -0.01775 0.0134993 0.0124635 -0.01775 0.0134834 0.0126816 -0.01775 0.0134464 0.0128229 -0.01775 0.0133536 0.0130209 -0.01775 0.019 0.019 -0.01775 0.0132686 0.0131397 -0.01775 0.0131112 0.0132915 -0.01775 0.0129894 0.013372 -0.01775 0.0128572 0.013434 -0.01775 -0.0125 0.0115 -0.01775 0.0115107 -0.019 0.0131044 0.0115424 -0.019 0.0129619 -0.0117085 -0.019 0.0126388 0.0119791 -0.019 0.0123964 0.0119181 -0.019 0.0124367 0.0120428 -0.019 0.0123607 0.0121771 -0.019 0.0123036 0.0123906 -0.019 0.012256 0.0124635 -0.019 0.0122507 0.0126094 -0.019 0.012256 0.0126816 -0.019 0.0122666 0.0128911 -0.019 0.0123297 0.0128229 -0.019 0.0123036 0.0130209 -0.019 0.0123964 0.0129572 -0.019 0.0123607 0.0130819 -0.019 0.0124367 0.013434 -0.019 0.0128928 0.0134576 -0.019 0.0129619 0.0135 -0.019 0.01075 0.0134893 -0.019 0.0131044 0.01725 -0.019 -0.01625 0.013372 -0.019 -0.0122394 0.0128911 -0.019 -0.0126703 0.0128229 -0.019 -0.0126964 0.0127529 -0.019 -0.0127175 0.0124635 -0.019 -0.0127493 0.0123184 -0.019 -0.0127334 0.0122471 -0.019 -0.0127175 0.0121771 -0.019 -0.0126964 0.0118603 -0.019 -0.0125186 0.0117552 -0.019 -0.0124173 0.0117085 -0.019 -0.0123612 0.0116279 -0.019 -0.0122394 0.0115424 -0.019 -0.0114619 0.011566 -0.019 -0.0113928 0.0115946 -0.019 -0.0113255 0.0116279 -0.019 -0.0112606 0.0121089 -0.019 -0.0108297 0.0121771 -0.019 -0.0108036 0.0125365 -0.019 -0.0107507 0.0126816 -0.019 -0.0107666 0.0128911 -0.019 -0.0108297 0.0130209 -0.019 -0.0108964 0.0135 -0.019 -0.00425 0.013372 -0.019 -0.0112606 0.013434 -0.019 -0.0113928 0.011566 -0.019 0.0128928 0.0115946 -0.019 0.0128255 0.0117085 -0.019 0.0126388 -0.0119181 -0.019 0.0124367 -0.0124635 -0.019 0.0122507 -0.0125365 -0.019 0.0122507 -0.0128229 -0.019 0.0123036 -0.0127529 -0.019 0.0122825 -0.0130819 -0.019 0.0124367 -0.0135 -0.019 0.01075 -0.0131941 -0.019 0.0125301 -0.013372 -0.019 0.0127606 -0.0134576 -0.019 0.0129619 -0.0134576 -0.019 0.0135381 -0.013434 -0.019 0.0136072 -0.0134054 -0.019 0.0136745 -0.01725 -0.019 0.01775 -0.0132448 -0.019 0.0139173 -0.0131397 -0.019 0.0140186 -0.0130209 -0.019 0.0141036 -0.0126816 -0.019 0.0142334 -0.0125365 -0.019 0.0142493 -0.0123184 -0.019 0.0142334 -0.0122471 -0.019 0.0142175 -0.0121771 -0.019 0.0141964 -0.0120428 -0.019 0.0141393 0.0119791 -0.019 0.0141036 -0.0119791 -0.019 0.0141036 -0.0119181 -0.019 0.0140633 0.0117085 -0.019 0.0138612 -0.0117552 -0.019 0.0139173 0.011666 -0.019 0.0138018 -0.011566 -0.019 0.0136072 -0.0115424 -0.019 0.0135381 -0.0115 -0.019 0.01325 0.0119181 -0.019 0.0140633 -0.0116279 -0.019 0.0137394 0.0115946 -0.019 0.0136745 0.011566 -0.019 0.0136072 0.0115424 -0.019 0.0135381 0.0115107 -0.019 0.0133956 0.0115027 -0.019 0.013323 0.0115 -0.019 0.01325 -0.0115 -0.019 -0.01175 -0.0115107 -0.019 -0.0116044 -0.0115239 -0.019 -0.0115326 -0.0115027 -0.019 -0.011677 -0.0115424 -0.019 -0.0120381 -0.0117085 -0.019 -0.0123612 -0.0117552 -0.019 -0.0124173 -0.0120428 -0.019 -0.0126393 -0.0122471 -0.019 -0.0127175 -0.0124635 -0.019 -0.0127493 -0.01725 -0.019 -0.01625 -0.0126094 -0.019 -0.012744 -0.0126816 -0.019 -0.0127334 -0.0128229 -0.019 -0.0126964 -0.0129572 -0.019 -0.0126393 -0.0130209 -0.019 -0.0126036 -0.0130819 -0.019 -0.0125633 -0.0131397 -0.019 -0.0125186 -0.0132915 -0.019 -0.0123612 -0.013334 -0.019 -0.0123018 -0.013372 -0.019 -0.0122394 -0.0134054 -0.019 -0.0121745 -0.013434 -0.019 -0.0121072 -0.0134761 -0.019 -0.0119674 -0.0135 -0.019 -0.01175 -0.0134973 -0.019 -0.011677 -0.0134893 -0.019 -0.0116044 -0.0134761 -0.019 -0.0115326 -0.0132915 -0.019 -0.0111388 -0.0132448 -0.019 -0.0110827 -0.0131941 -0.019 -0.0110301 -0.0131397 -0.019 -0.0109814 -0.0130209 -0.019 -0.0108964 -0.0128911 -0.019 -0.0108297 -0.0135 -0.019 -0.00425 -0.0128229 -0.019 -0.0108036 -0.0126094 -0.019 -0.010756 -0.0124635 -0.019 -0.0107507 -0.0123184 -0.019 -0.0107666 -0.0118059 -0.019 -0.0110301 -0.0117552 -0.019 -0.0110827 -0.0065 -0.019 -0.01625 -0.011566 -0.019 -0.0113928 -0.011628 -0.019 -0.0112606 0.0134893 -0.019 0.0133956 0.0134973 -0.019 0.013177 0.0122471 -0.019 0.0142175 0.0124635 -0.019 0.0142493 0.0127529 -0.019 0.0142175 0.0130209 -0.019 0.0141036 0.01725 -0.019 0.01775 0.0131397 -0.019 0.0140186 0.0132448 -0.019 0.0139173 0.0134054 -0.019 0.0136745 0.013434 -0.019 0.0136072 0.0134761 -0.019 0.0134674 0.019 0.0134893 0.0133956 0.019 0.0135 0.01325 0.019 0.0134973 0.013177 0.019 0.013372 0.0127606 0.019 0.013334 0.0126982 0.019 0.0132448 0.0125827 0.019 0.0135 0.01075 0.019 0.0131397 0.0124814 0.019 0.0124635 0.0122507 0.019 0.0123184 0.0122666 0.019 0.0122471 0.0122825 0.019 0.0121771 0.0123036 0.019 0.0121089 0.0123297 0.019 -0.0119181 0.0124367 0.019 -0.0119791 0.0123964 0.019 -0.0120428 0.0123607 0.019 -0.0121771 0.0123036 0.019 -0.0124635 0.0122507 0.019 -0.0125365 0.0122507 0.019 -0.0129572 0.0123607 0.019 -0.0130209 0.0123964 0.019 -0.0130819 0.0124367 0.019 -0.0132915 0.0126388 0.019 -0.0135 0.01075 0.019 -0.013372 0.0127606 0.019 -0.013434 0.0128928 0.019 -0.0134054 -0.0113255 0.019 -0.013334 -0.0111982 0.019 -0.0131941 -0.0110301 0.019 -0.0129572 -0.0108607 0.019 -0.0135 -0.00425 0.019 -0.0128229 -0.0108036 0.019 -0.0126816 -0.0107666 0.019 -0.0126094 -0.010756 0.019 -0.0125365 -0.0107507 0.019 -0.0124635 -0.0107507 0.019 -0.0122471 -0.0107825 0.019 -0.0121089 -0.0108297 0.019 -0.0119181 -0.0109367 0.019 -0.0118603 -0.0109814 0.019 -0.0118059 -0.0110301 0.019 -0.0117085 -0.0111388 0.019 -0.0116279 -0.0112606 0.019 -0.0115107 -0.0116044 0.019 -0.0115027 -0.011823 0.019 -0.0115107 -0.0118956 0.019 -0.0116279 -0.0122394 0.019 -0.0115946 -0.0121745 0.019 -0.011666 -0.0123018 0.019 -0.0117085 -0.0123612 0.019 -0.0118603 -0.0125186 0.019 -0.0119181 -0.0125633 0.019 -0.0120428 -0.0126393 0.019 -0.0121089 -0.0126703 0.019 -0.0123906 -0.012744 0.019 -0.0130209 -0.0126036 0.019 -0.0130819 -0.0125633 0.019 -0.013372 -0.0122394 0.019 -0.0134054 -0.0121745 0.019 -0.0134893 -0.0118956 0.019 -0.01725 -0.01625 0.019 -0.0134893 -0.0116044 0.019 0.0130819 -0.0109367 0.019 0.0128911 -0.0108297 0.019 0.0128229 -0.0108036 0.019 0.0126094 -0.010756 0.019 0.0126816 -0.0107666 0.019 0.0125365 -0.0107507 0.019 0.0121771 -0.0108036 0.019 0.0122471 -0.0107825 0.019 0.0121089 -0.0108297 0.019 0.0119791 -0.0108964 0.019 0.0120428 -0.0108607 0.019 0.0119181 -0.0109367 0.019 0.0118603 -0.0109814 0.019 0.0135 -0.00425 0.019 0.011628 -0.0112606 0.019 0.0115424 -0.0114619 0.019 0.0115239 -0.0115326 0.019 0.0115107 -0.0116044 0.019 0.0115027 -0.011677 0.019 0.0115 -0.01175 0.019 0.0115424 -0.0120381 0.019 0.011566 -0.0121072 0.019 0.0065 -0.01625 0.019 0.0118059 -0.0124699 0.019 0.0119181 -0.0125633 0.019 0.0119791 -0.0126036 0.019 0.0121771 -0.0126964 0.019 0.0123906 -0.012744 0.019 0.0124635 -0.0127493 0.019 0.0117552 0.0125827 0.019 0.0117085 0.0126388 0.019 -0.0115424 0.0129619 0.019 0.011566 0.0128928 0.019 0.0115239 0.0130326 0.019 0.0115027 0.013177 0.019 0.0115 0.01325 0.019 0.0117085 0.0138612 0.019 -0.0118059 0.0139699 0.019 -0.0120428 0.0141393 0.019 -0.01725 0.01775 0.019 -0.0122471 0.0142175 0.019 -0.0121771 0.0141964 0.019 -0.0123184 0.0142334 0.019 -0.0124635 0.0142493 0.019 -0.0126816 0.0142334 0.019 -0.0126094 0.014244 0.019 -0.0130209 0.0141036 0.019 -0.0131941 0.0139699 0.019 -0.0132915 0.0138612 0.019 -0.013334 0.0138018 0.019 -0.013372 0.0137394 0.019 -0.0134576 0.0135381 0.019 -0.013434 0.0136072 0.019 -0.0134893 0.0133956 0.019 -0.0134973 0.013323 0.019 -0.0134973 0.013177 0.019 -0.011566 0.0128928 0.019 0.0115107 0.0131044 0.019 -0.0115107 0.0133956 0.019 0.0115107 0.0133956 0.019 -0.0115424 0.0135381 0.019 -0.0115946 0.0136745 0.019 -0.011628 0.0137394 0.019 -0.011666 0.0138018 0.019 -0.0117552 0.0139173 0.019 0.0117552 0.0139173 0.019 -0.0118603 0.0140186 0.019 -0.0119181 0.0140633 0.019 0.0119181 0.0140633 0.019 0.0120428 0.0141393 0.019 0.0121089 0.0141703 0.019 0.0121771 0.0141964 0.019 0.0125365 0.0142493 0.019 0.0126094 0.014244 0.019 0.0128229 0.0141964 0.019 0.0131941 0.0139699 0.019 0.0132448 0.0139173 0.019 0.013372 0.0137394 0.019 0.0134054 0.0136745 0.019 0.0134576 0.0135381 0.019 0.01725 -0.01625 0.019 0.0134893 -0.0116044 0.019 0.0134893 -0.0118956 0.019 0.013434 -0.0121072 0.019 0.0134054 -0.0121745 0.019 0.0132448 -0.0124173 0.019 0.0131941 -0.0124699 0.019 0.0131397 -0.0125186 0.019 0.0130819 -0.0125633 0.019 0.0129572 -0.0126393 0.019 0.0126816 -0.0127334 -0.0134973 0.019 -0.011823 -0.0134576 0.019 -0.0120381 -0.0134054 0.019 -0.0121745 -0.013434 0.019 -0.0121072 -0.013334 0.019 -0.0123018 -0.0131397 0.019 -0.0125186 -0.0130209 0.019 -0.0126036 -0.0129572 0.019 -0.0126393 -0.0125365 0.019 -0.0127493 -0.0122471 0.019 -0.0127175 -0.0119791 0.019 -0.0126036 -0.0119181 0.019 -0.0125633 -0.0118603 0.019 -0.0125186 -0.0117552 0.019 -0.0124173 -0.0115424 0.019 -0.0120381 -0.0115027 0.019 -0.011823 -0.0115027 0.019 -0.011677 -0.011566 0.019 -0.0113928 -0.0115946 0.019 -0.0113255 -0.0065 0.019 -0.01625 -0.0118059 0.019 -0.0110301 -0.0119791 0.019 -0.0108964 -0.0120428 0.019 -0.0108607 -0.0121771 0.019 -0.0108036 -0.0122471 0.019 -0.0107825 -0.0123184 0.019 -0.0107666 -0.0123906 0.019 -0.010756 -0.0124635 0.019 -0.0107507 -0.0125365 0.019 -0.0107507 -0.0126816 0.019 -0.0107666 -0.0129572 0.019 -0.0108607 -0.0130209 0.019 -0.0108964 -0.0131397 0.019 -0.0109814 -0.013334 0.019 -0.0111982 -0.0135 0.019 -0.00425 -0.013372 0.019 -0.0112606 -0.0134054 0.019 -0.0113255 -0.0134576 0.019 -0.0114619 -0.0123906 0.019 -0.012744 -0.0134893 0.019 0.0133956 -0.013372 0.019 0.0137394 -0.013334 0.019 0.0138018 -0.0130819 0.019 0.0140633 -0.0130209 0.019 0.0141036 -0.0129572 0.019 0.0141393 -0.0126094 0.019 0.014244 -0.0125365 0.019 0.0142493 -0.0121089 0.019 0.0141703 -0.0118603 0.019 0.0140186 0.0118603 0.019 0.0140186 0.0117085 0.019 0.0138612 -0.0116279 0.019 0.0137394 0.0115946 0.019 0.0136745 -0.0115027 0.019 0.013323 0.0115107 0.019 0.0133956 -0.0115239 0.019 0.0130326 -0.0115424 0.019 0.0129619 -0.0115946 0.019 0.0128255 0.0117085 0.019 0.0126388 0.0117552 0.019 0.0125827 -0.0118059 0.019 0.0125301 -0.0119181 0.019 0.0124367 -0.0121089 0.019 0.0123297 -0.0123906 0.019 0.012256 -0.0126094 0.019 0.012256 -0.0127529 0.019 0.0122825 -0.0128229 0.019 0.0123036 -0.0130209 0.019 0.0123964 -0.0131941 0.019 0.0125301 -0.0132915 0.019 0.0126388 -0.013434 0.019 0.0128928 -0.0134893 0.019 0.0131044 -0.0134761 0.019 0.0130326 0.013434 0.019 -0.0113928 0.0134893 0.019 -0.0116044 0.0134761 0.019 -0.0115326 0.0134893 0.019 -0.0118956 0.0134973 0.019 -0.011823 0.01725 0.019 -0.01625 0.0132915 0.019 -0.0123612 0.013334 0.019 -0.0123018 0.0132448 0.019 -0.0124173 0.0128911 0.019 -0.0126703 0.0128229 0.019 -0.0126964 0.0126816 0.019 -0.0127334 0.0126094 0.019 -0.012744 0.0124635 0.019 -0.0127493 0.0123906 0.019 -0.012744 0.0119791 0.019 -0.0126036 0.0118603 0.019 -0.0125186 0.0065 0.019 -0.01625 0.0115946 0.019 -0.0121745 0.0115107 0.019 -0.0118956 0.0115107 0.019 -0.0116044 0.0115239 0.019 -0.0115326 0.0115946 0.019 -0.0113255 0.0117085 0.019 -0.0111388 0.0135 0.019 0.01325 0.0134054 0.019 0.0136745 0.013372 0.019 0.0137394 0.0132915 0.019 0.0138612 0.0129572 0.019 0.0141393 0.0128229 0.019 0.0141964 0.0126816 0.019 0.0142334 0.01725 0.019 0.01775 0.0125365 0.019 0.0142493 0.0123184 0.019 0.0142334 0.0121771 0.019 0.0141964 0.0121089 0.019 0.0141703 -0.0115424 0.019 0.0135381 0.011628 0.019 0.0137394 -0.0118059 0.019 0.0139699 -0.0117552 0.019 0.0125827 -0.011666 0.019 0.0126982 0.0115946 0.019 0.0128255 0.0115424 0.019 0.0129619 0.0115107 0.019 0.0131044 -0.0115107 0.019 0.0131044 -0.0115027 0.019 0.013177 0.0115 0.019 0.01325 0.0118059 0.019 0.0125301 0.0119181 0.019 0.0124367 0.0121089 0.019 0.0123297 0.0123184 0.019 0.0122666 0.0123906 0.019 0.012256 0.0124635 0.019 0.0122507 0.0126094 0.019 0.012256 0.0126816 0.019 0.0122666 0.0131397 0.019 0.0124814 0.0135 0.019 0.01075 0.0132448 0.019 0.0125827 0.0134054 0.019 0.0128255 0.013434 0.019 0.0128928 0.013334 0.019 -0.0111982 0.0132915 0.019 -0.0111388 0.0132448 0.019 -0.0110827 0.0130819 0.019 -0.0109367 0.0126094 0.019 -0.010756 0.0125365 0.019 -0.0107507 0.0124635 0.019 -0.0107507 0.0135 0.019 -0.00425 0.0120428 0.019 -0.0108607 0.0117552 0.019 -0.0110827 0.01575 0.0205 0.01625 0.019 0.019 0.01775 0.019 0.01725 0.01775 0.01525 0.0135 0.01775 0.017125 0.0135 0.01775 0.01525 0.01525 0.01775 0.01525 -0.01525 0.01775 0.0135 -0.01525 0.01775 -0.01525 -0.01525 0.01775 -0.017125 0.0135 0.01775 -0.0135 -0.01525 0.01775 0.019 -0.019 0.01775 0.01575 -0.0205 0.01625 -0.019 -0.019 0.01775 -0.0205 -0.01575 0.01625 -0.01575 0.0205 0.01625 -0.0205 0.0205 0.01625 -0.019 0.019 0.01775 -0.0135 0.017125 0.01075 0.0134096 -0.01525 -0.00283459 0.0135 -0.01525 -0.001 0.0133413 -0.01525 -0.00270936 0.0132071 -0.01525 -0.00254289 0.0131549 -0.01525 -0.00249425 0.0130993 -0.01525 -0.00244946 0.0130406 -0.01525 -0.00240875 0.0127126 -0.01525 -0.00227285 0.0125 -0.01525 -0.00225 -0.0125713 -0.01525 -0.00225255 -0.0134771 -0.01525 -0.00303743 -0.0133413 -0.01525 -0.00270936 -0.0132557 -0.01525 -0.00259514 -0.0129792 -0.01525 -0.00237232 -0.0126423 -0.01525 -0.00226018 -0.0152608 -0.0107544 -0.001 -0.0153468 -0.0112559 -0.001 -0.0153815 -0.0113785 -0.001 -0.015631 -0.0119631 -0.001 -0.0161742 -0.0126659 -0.001 -0.0164661 -0.012912 -0.001 -0.0165701 -0.0129855 -0.001 -0.0166771 -0.0130546 -0.001 -0.0168994 -0.0131788 -0.001 -0.0171314 -0.0132837 -0.001 -0.0172506 -0.0133286 -0.001 -0.0174941 -0.0134032 -0.001 -0.01525 -0.0135 -0.001 -0.017618 -0.0134327 -0.001 -0.017743 -0.0134568 -0.001 -0.0178689 -0.0134757 -0.001 -0.0179956 -0.0134892 -0.001 -0.0181227 -0.0134973 -0.001 0.01525 -0.0135 -0.001 0.01525 0.01525 -0.001 -0.01525 0.0105 -0.000999999 -0.0152527 0.0106273 -0.000999999 -0.0153468 0.0112559 -0.000999999 -0.0153815 0.0113785 -0.000999999 -0.01825 0.0135 -0.000999999 -0.0181227 0.0134973 -0.000999999 -0.0178689 0.0134757 -0.000999999 -0.0174941 0.0134032 -0.000999999 -0.0173715 0.0133685 -0.000999999 -0.0172506 0.0133286 -0.000999999 -0.0171314 0.0132837 -0.000999999 -0.0170143 0.0132337 -0.000999999 -0.0165701 0.0129855 -0.000999999 -0.0164661 0.012912 -0.000999999 -0.016268 0.012752 -0.000999999 -0.0161742 0.0126659 -0.000999999 -0.0159159 0.0123847 -0.000999999 -0.01525 0.0135 -0.000999999 -0.0135 -0.01525 -0.001 -0.01525 -0.01525 -0.001 -0.01525 -0.0135 0.01775 0.0135 0.01525 0.01775 0.01525 0.0135 -0.001 0.01525 -0.01525 -0.001 -0.0135 0.01525 -0.000999999 -0.01725 0.019 -0.01625 -0.01725 0.019 0.01775 + + + + + + + + + + 0.0365165 -0.999333 0 0.109369 -0.994001 0 0.109369 -0.994001 0 0.18164 -0.983365 0 0.18164 -0.983365 0 0.252935 -0.967483 0 0.322876 -0.946441 0 0.391111 -0.920344 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.69406 -0.719917 0 0.74478 -0.667311 0 0.791492 -0.611179 0 0.834001 -0.551763 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934019 -0.357224 0 0.9576 -0.288102 0 0.976074 -0.217441 0 0.989345 -0.145593 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989344 0.145595 0 0.976074 0.217438 0 0.9576 0.288102 0 0.934019 0.357224 0 0.905448 0.424457 0 0.872048 0.489421 0 0.833997 0.551769 0 0.791497 0.611173 0 0.744774 0.667317 0 0.694066 0.719911 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391111 0.920344 0 0.322876 0.946441 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109369 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109371 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322872 0.946443 0 -0.391115 0.920342 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.69406 0.719917 0 -0.74478 0.66731 0 -0.791497 0.611173 0 -0.833997 0.551769 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934015 0.357235 0 -0.957603 0.288091 0 -0.976074 0.217438 0 -0.989344 0.145595 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989345 -0.145593 0 -0.976074 -0.217441 0 -0.957603 -0.288091 0 -0.934015 -0.357235 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.834001 -0.551763 0 -0.791492 -0.611179 0 -0.744786 -0.667303 0 -0.694053 -0.719923 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391115 -0.920342 0 -0.322872 -0.946443 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109371 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.252935 -0.967483 0 0.322876 -0.946441 0 0.391111 -0.920344 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.69406 -0.719917 0 0.74478 -0.667311 0 0.791492 -0.611179 0 0.834001 -0.551763 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934019 -0.357224 0 0.9576 -0.288102 0 0.976074 -0.217441 0 0.989345 -0.145593 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989344 0.145595 0 0.976074 0.217438 0 0.9576 0.288102 0 0.934019 0.357224 0 0.905448 0.424457 0 0.872048 0.489421 0 0.833997 0.551769 0 0.791497 0.611173 0 0.744774 0.667317 0 0.694066 0.719911 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391111 0.920344 0 0.322876 0.946441 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109369 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109371 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322872 0.946443 0 -0.391115 0.920342 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.69406 0.719917 0 -0.74478 0.66731 0 -0.791497 0.611173 0 -0.833997 0.551769 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934015 0.357235 0 -0.957603 0.288091 0 -0.976074 0.217438 0 -0.989344 0.145595 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989345 -0.145593 0 -0.976074 -0.217441 0 -0.957603 -0.288091 0 -0.934015 -0.357235 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.834001 -0.551763 0 -0.791492 -0.611179 0 -0.744786 -0.667303 0 -0.694053 -0.719923 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391115 -0.920342 0 -0.322872 -0.946443 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109371 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.109371 -0.994001 0 0.109371 -0.994001 0 0.18164 -0.983365 0 0.18164 -0.983365 0 0.252935 -0.967483 0 0.322872 -0.946443 0 0.391115 -0.920342 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.694053 -0.719923 0 0.744786 -0.667303 0 0.791492 -0.611179 0 0.834001 -0.551763 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934015 -0.357235 0 0.957603 -0.288091 0 0.976074 -0.217441 0 0.989345 -0.145593 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989344 0.145595 0 0.976074 0.217438 0 0.957603 0.288091 0 0.934015 0.357235 0 0.905448 0.424457 0 0.872048 0.489421 0 0.833997 0.551769 0 0.791497 0.611173 0 0.74478 0.66731 0 0.69406 0.719917 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391115 0.920342 0 0.322872 0.946443 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109371 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109369 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322876 0.946441 0 -0.391111 0.920344 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.694066 0.719911 0 -0.744774 0.667317 0 -0.791497 0.611173 0 -0.833997 0.551769 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934019 0.357224 0 -0.9576 0.288102 0 -0.976074 0.217438 0 -0.989344 0.145595 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989345 -0.145593 0 -0.976074 -0.217441 0 -0.9576 -0.288102 0 -0.934019 -0.357224 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.834001 -0.551763 0 -0.791492 -0.611179 0 -0.74478 -0.667311 0 -0.69406 -0.719917 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391111 -0.920344 0 -0.322876 -0.946441 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109369 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.252935 -0.967483 0 0.322872 -0.946443 0 0.391115 -0.920342 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.694053 -0.719923 0 0.744786 -0.667303 0 0.791492 -0.611179 0 0.834001 -0.551763 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934015 -0.357235 0 0.957603 -0.288091 0 0.976074 -0.217441 0 0.989345 -0.145593 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989344 0.145595 0 0.976074 0.217438 0 0.957603 0.288091 0 0.934015 0.357235 0 0.905448 0.424457 0 0.872048 0.489421 0 0.833997 0.551769 0 0.791497 0.611173 0 0.74478 0.66731 0 0.69406 0.719917 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391115 0.920342 0 0.322872 0.946443 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109371 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109369 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322876 0.946441 0 -0.391111 0.920344 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.694066 0.719911 0 -0.744774 0.667317 0 -0.791497 0.611173 0 -0.833997 0.551769 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934019 0.357224 0 -0.9576 0.288102 0 -0.976074 0.217438 0 -0.989344 0.145595 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989345 -0.145593 0 -0.976074 -0.217441 0 -0.9576 -0.288102 0 -0.934019 -0.357224 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.834001 -0.551763 0 -0.791492 -0.611179 0 -0.74478 -0.667311 0 -0.69406 -0.719917 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391111 -0.920344 0 -0.322876 -0.946441 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109369 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.109371 -0.994001 0 0.109371 -0.994001 0 0.18164 -0.983365 0 0.18164 -0.983365 0 0.252935 -0.967483 0 0.322872 -0.946443 0 0.391115 -0.920342 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.69406 -0.719917 0 0.74478 -0.66731 0 0.791497 -0.611173 0 0.833997 -0.551769 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934015 -0.357235 0 0.957603 -0.288091 0 0.976074 -0.217438 0 0.989344 -0.145595 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989345 0.145593 0 0.976074 0.217441 0 0.957603 0.288091 0 0.934015 0.357235 0 0.905448 0.424457 0 0.872048 0.489421 0 0.834001 0.551763 0 0.791492 0.611179 0 0.744786 0.667303 0 0.694053 0.719923 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391115 0.920342 0 0.322872 0.946443 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109371 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109369 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322876 0.946441 0 -0.391111 0.920344 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.69406 0.719917 0 -0.74478 0.667311 0 -0.791492 0.611179 0 -0.834001 0.551763 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934019 0.357224 0 -0.9576 0.288102 0 -0.976074 0.217441 0 -0.989345 0.145593 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989344 -0.145595 0 -0.976074 -0.217438 0 -0.9576 -0.288102 0 -0.934019 -0.357224 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.833997 -0.551769 0 -0.791497 -0.611173 0 -0.744774 -0.667317 0 -0.694066 -0.719911 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391111 -0.920344 0 -0.322876 -0.946441 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109369 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.252935 -0.967483 0 0.322872 -0.946443 0 0.391115 -0.920342 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.69406 -0.719917 0 0.74478 -0.66731 0 0.791497 -0.611173 0 0.833997 -0.551769 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934015 -0.357235 0 0.957603 -0.288091 0 0.976074 -0.217438 0 0.989344 -0.145595 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989345 0.145593 0 0.976074 0.217441 0 0.957603 0.288091 0 0.934015 0.357235 0 0.905448 0.424457 0 0.872048 0.489421 0 0.834001 0.551763 0 0.791492 0.611179 0 0.744786 0.667303 0 0.694053 0.719923 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391115 0.920342 0 0.322872 0.946443 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109371 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109369 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322876 0.946441 0 -0.391111 0.920344 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.69406 0.719917 0 -0.74478 0.667311 0 -0.791492 0.611179 0 -0.834001 0.551763 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934019 0.357224 0 -0.9576 0.288102 0 -0.976074 0.217441 0 -0.989345 0.145593 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989344 -0.145595 0 -0.976074 -0.217438 0 -0.9576 -0.288102 0 -0.934019 -0.357224 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.833997 -0.551769 0 -0.791497 -0.611173 0 -0.744774 -0.667317 0 -0.694066 -0.719911 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391111 -0.920344 0 -0.322876 -0.946441 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109369 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.109369 -0.994001 0 0.109369 -0.994001 0 0.18164 -0.983365 0 0.18164 -0.983365 0 0.252935 -0.967483 0 0.322876 -0.946441 0 0.391111 -0.920344 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.694066 -0.719911 0 0.744774 -0.667317 0 0.791497 -0.611173 0 0.833997 -0.551769 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934019 -0.357224 0 0.9576 -0.288102 0 0.976074 -0.217438 0 0.989344 -0.145595 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989345 0.145593 0 0.976074 0.217441 0 0.9576 0.288102 0 0.934019 0.357224 0 0.905448 0.424457 0 0.872048 0.489421 0 0.834001 0.551763 0 0.791492 0.611179 0 0.74478 0.667311 0 0.69406 0.719917 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391111 0.920344 0 0.322876 0.946441 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109369 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109371 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322872 0.946443 0 -0.391115 0.920342 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.694053 0.719923 0 -0.744786 0.667303 0 -0.791492 0.611179 0 -0.834001 0.551763 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934015 0.357235 0 -0.957603 0.288091 0 -0.976074 0.217441 0 -0.989345 0.145593 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989344 -0.145595 0 -0.976074 -0.217438 0 -0.957603 -0.288091 0 -0.934015 -0.357235 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.833997 -0.551769 0 -0.791497 -0.611173 0 -0.74478 -0.66731 0 -0.69406 -0.719917 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391115 -0.920342 0 -0.322872 -0.946443 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109371 -0.994001 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.252935 -0.967483 0 0.322876 -0.946441 0 0.391111 -0.920344 0 0.457243 -0.889342 0 0.520939 -0.853594 0 0.581862 -0.813288 0 0.639676 -0.768644 0 0.694066 -0.719911 0 0.744774 -0.667317 0 0.791497 -0.611173 0 0.833997 -0.551769 0 0.872048 -0.489421 0 0.905448 -0.424457 0 0.934019 -0.357224 0 0.9576 -0.288102 0 0.976074 -0.217438 0 0.989344 -0.145595 0 0.997332 -0.0729948 0 1 0 0 0.997332 0.0729948 0 0.989345 0.145593 0 0.976074 0.217441 0 0.9576 0.288102 0 0.934019 0.357224 0 0.905448 0.424457 0 0.872048 0.489421 0 0.834001 0.551763 0 0.791492 0.611179 0 0.74478 0.667311 0 0.69406 0.719917 0 0.639676 0.768644 0 0.581862 0.813288 0 0.520939 0.853594 0 0.457243 0.889342 0 0.391111 0.920344 0 0.322876 0.946441 0 0.252935 0.967483 0 0.18164 0.983365 0 0.109369 0.994001 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.109371 0.994001 0 -0.18164 0.983365 0 -0.252935 0.967483 0 -0.322872 0.946443 0 -0.391115 0.920342 0 -0.457243 0.889342 0 -0.520939 0.853594 0 -0.581862 0.813288 0 -0.639676 0.768644 0 -0.694053 0.719923 0 -0.744786 0.667303 0 -0.791492 0.611179 0 -0.834001 0.551763 0 -0.872048 0.489421 0 -0.905448 0.424457 0 -0.934015 0.357235 0 -0.957603 0.288091 0 -0.976074 0.217441 0 -0.989345 0.145593 0 -0.997332 0.0729948 0 -1 0 0 -0.997332 -0.0729948 0 -0.989344 -0.145595 0 -0.976074 -0.217438 0 -0.957603 -0.288091 0 -0.934015 -0.357235 0 -0.905448 -0.424457 0 -0.872048 -0.489421 0 -0.833997 -0.551769 0 -0.791497 -0.611173 0 -0.74478 -0.66731 0 -0.69406 -0.719917 0 -0.639676 -0.768644 0 -0.581862 -0.813288 0 -0.520939 -0.853594 0 -0.457243 -0.889342 0 -0.391115 -0.920342 0 -0.322872 -0.946443 0 -0.252935 -0.967483 0 -0.18164 -0.983365 0 -0.109371 -0.994001 0 -0.0365165 -0.999333 0 0 0.85083 0.525441 0 0.823314 0.567586 0 0.85083 0.525441 0 0.823314 0.567586 0 0.793725 0.608276 0 0.793725 0.608276 0 0.762109 0.647449 0 0.72858 0.68496 0 0.762109 0.647449 0 0.762109 0.647449 0 0.793725 0.608277 0 0.793725 0.608277 0 0.823314 0.567586 0 0.823314 0.567586 0 0.85083 0.525441 0 0.762109 0.647449 0 0.728581 0.68496 0 0.693185 0.72076 0 0.72858 0.68496 0 0.728581 0.68496 0 0.693185 0.72076 0 0.656069 0.754701 0 0.693185 0.72076 0 0.693185 0.72076 0 0.656069 0.754701 0 0.617262 0.786758 0 0.656069 0.754701 0 0.656069 0.754701 0 0.617262 0.786758 0 0.576906 0.816811 0 0.617262 0.786758 0 0.617262 0.786758 0 0.576906 0.816811 0 0.53508 0.844801 0 0.576906 0.816811 0 0.576906 0.816811 0 0.53508 0.844801 0 0.491905 0.870649 0 0.53508 0.844801 0 0.53508 0.844801 0 0.491905 0.870649 0 0.447476 0.894296 0 0.491905 0.870649 0 0.491905 0.870649 0 0.447476 0.894296 0 0.401926 0.915672 0 0.447476 0.894296 0 0.447476 0.894296 0 0.401926 0.915672 0 0.35535 0.934734 0 0.401926 0.915672 0 0.401926 0.915672 0 0.35535 0.934734 0 0.307882 0.951425 0 0.35535 0.934734 0 0.35535 0.934734 0 0.307882 0.951425 0 0.259624 0.96571 0 0.307882 0.951425 0 0.307882 0.951425 0 0.259624 0.96571 0 0.210709 0.977549 0 0.259624 0.96571 0 0.259624 0.96571 0 0.210709 0.977549 0 0.16127 0.98691 0 0.210709 0.977549 0 0.210709 0.977549 0 0.161269 0.98691 0 0.16127 0.98691 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 0.161709 0.986839 0.000113632 0.212459 0.97717 0.00019721 0.262997 0.964797 0.000280519 0.312725 0.949844 0.000361859 0.361597 0.932334 0.000434048 0.409493 0.912313 0.000512117 0.456306 0.889823 0.000584135 0.501874 0.864941 0.000652503 0.546088 0.837728 0.00072235 0.588841 0.808249 0.000784741 0.629986 0.776606 0.000848159 0.669449 0.742857 0.000908524 0.707111 0.707102 0.000965873 0.742853 0.669454 0.00102128 0.776606 0.629986 0.00107505 0.808248 0.588841 0.00112566 0.837727 0.546087 0.00117328 0.864941 0.501872 0.00121858 0.889823 0.456304 0.00126107 0.912311 0.409496 0.00130406 0.932334 0.361596 0.00134229 0.949845 0.312717 0.0013755 0.964794 0.263004 0.00140887 0.977146 0.212564 0.00143726 0.986862 0.161561 0.00146993 0.993918 0.110116 0.00149132 0.998293 0.0583947 0 0.999869 0.016214 -0.00146354 0.999671 0.0256073 0 0.161709 0.986839 0 0.161709 0.986839 0 0.212008 0.977268 -2.9903e-05 0.212459 0.97717 2.93169e-05 0.261764 0.965132 -5.18972e-05 0.262998 0.964797 5.13039e-05 0.310827 0.950467 7.22379e-05 0.359066 0.933312 9.25004e-05 0.40639 0.9137 0.000113795 0.452611 0.891708 0.000132253 0.497677 0.867363 0.000151048 0.541427 0.840748 0.000169486 0.58375 0.811934 0.000186256 0.624554 0.780981 0.000203742 0.663706 0.747994 0.000219712 0.701135 0.713029 0.000234979 0.736713 0.676205 0.000250117 0.770367 0.637601 0.000263936 0.801988 0.59734 0.000277541 0.83152 0.555495 0.000290292 0.858868 0.512198 0.000302613 0.88396 0.467562 0.000314449 0.906739 0.421692 0.000325221 0.927132 0.374734 0.000335136 0.945103 0.326774 0.000344873 0.960592 0.277961 0.000354652 0.973561 0.228429 0.000363099 0.98398 0.17828 0.000371018 0.991813 0.127699 0.000377501 0.997052 0.0767313 0.000385143 0.999672 0.0256073 -0.000392453 0.998294 0.0583947 -7.38204e-05 0.312725 0.949844 -9.52249e-05 0.361597 0.932334 -0.000114222 0.409493 0.912313 -0.000134766 0.456306 0.889823 -0.000153718 0.501874 0.864941 -0.000171712 0.546088 0.837728 -0.000190092 0.588841 0.808249 -0.000206511 0.629986 0.776606 -0.000223198 0.66945 0.742857 -0.000239086 0.707111 0.707102 -0.000254176 0.742853 0.669455 -0.000268758 0.776606 0.629986 -0.000282906 0.808249 0.588841 -0.000296224 0.837728 0.546088 -0.000308758 0.864942 0.501872 -0.000320678 0.889824 0.456304 -0.000331859 0.912312 0.409497 -0.000343172 0.932335 0.361596 -0.000353232 0.949846 0.312717 -0.000361973 0.964795 0.263005 -0.000370754 0.977147 0.212564 -0.000378225 0.986863 0.161561 -0.000386822 0.993919 0.110116 -0.00143451 0.997051 0.0767312 -0.00140987 0.991812 0.127699 -0.00137978 0.983979 0.178279 -0.00134768 0.97356 0.228429 -0.00131052 0.960591 0.277961 -0.00127352 0.945102 0.326774 -0.00123584 0.927132 0.374734 -0.0011949 0.906738 0.421692 -0.00114993 0.883959 0.467562 -0.00110311 0.858867 0.512198 -0.00105466 0.831519 0.555495 -0.00100296 0.801988 0.59734 -0.000950445 0.770366 0.637601 -0.000892919 0.736713 0.676205 -0.000834906 0.701134 0.713029 -0.000774215 0.663706 0.747993 -0.000707763 0.624554 0.780981 -0.000644051 0.58375 0.811933 -0.000573981 0.541427 0.840748 -0.000502559 0.497677 0.867363 -0.000432419 0.452611 0.891708 -0.000351503 0.40639 0.9137 -0.000274503 0.359066 0.933312 -0.000194955 0.310827 0.950467 -0.000111403 0.261764 0.965132 0 0.212008 0.977268 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -0.735963 0.677022 -0.000269019 -0.716861 0.697216 0.00016538 -0.70711 0.707103 -0.00053762 -0.697309 0.716771 0.000159832 -0.677284 0.735722 -0.00025465 -0.676457 0.736482 -0.00034365 -0.656951 0.753933 0.000189245 -0.6446 0.76452 -0.000514448 -0.636156 0.77156 9.32422e-05 -0.614928 0.788583 -0.00011789 -0.611558 0.7912 -0.000411459 -0.593438 0.80488 0.000201418 -0.577434 0.816437 -0.000458341 -0.571443 0.820642 -1.31342e-05 -0.549209 0.835685 1.42212e-05 -0.54226 0.84021 -0.000454163 -0.526642 0.850087 0.0001815 -0.506117 0.862465 -0.000357185 -0.503612 0.86393 -0.000163791 -0.480496 0.876997 0.000111471 -0.469053 0.88317 -0.000453796 -0.45699 0.889472 0.000117763 -0.433134 0.90133 -0.000181777 -0.40926 0.912418 0.00015827 -0.384967 0.92293 1.99811e-06 -0.360556 0.932737 -2.34674e-06 -0.353085 0.935591 -0.000381937 -0.336017 0.941856 0.000144532 -0.313043 0.949739 -0.000287278 -0.311135 0.950366 -0.000172998 -0.28641 0.958107 0.000105204 -0.272466 0.962165 -0.000377159 -0.261417 0.965226 6.00699e-05 -0.236301 0.97168 -7.69856e-05 -0.211345 0.977412 0.000130787 -0.186084 0.982534 -0.00010723 -0.161108 0.986937 7.42313e-05 -0.148044 0.988981 -0.000338784 -0.136022 0.990706 5.74757e-05 -0.110943 0.993827 -7.59444e-05 -0.0861475 0.996282 0.000109451 -0.0612133 0.998125 -0.000140016 -0.0366933 0.999327 8.07895e-05 -0.0212223 0.999775 -0.000299518 -0.0121784 0.999926 0 -0.431153 0.902279 -0.000318074 -0.392465 0.919767 -0.000400721 -0.231383 0.972863 -0.000312774 -0.189881 0.981807 -0.000290701 -0.105928 0.994374 -0.000284787 -0.0636439 0.997973 -0.000247609 -0.736475 0.676465 -0.000331587 -0.758721 0.651416 0.000312456 -0.764526 0.644593 -0.000386843 -0.791201 0.611557 -0.000434993 -0.816433 0.57744 -0.000474005 -0.840209 0.542262 -0.000503014 -0.862465 0.506117 -0.000524132 -0.88317 0.469052 -0.000536238 -0.902284 0.431141 -0.000540546 -0.919767 0.392465 -0.000536246 -0.935591 0.353085 -0.000521905 -0.949737 0.313047 -0.000497955 -0.962166 0.272464 -0.000467704 -0.972863 0.231383 -0.000426516 -0.981807 0.189882 -0.000377766 -0.988981 0.148043 -0.000319734 -0.994374 0.105928 -0.000252885 -0.997973 0.0636448 -0.000177362 -0.999775 0.0212223 -9.25776e-05 -0.999795 0.020271 0 -0.784494 0.620136 0.000365146 -0.80898 0.587836 0.000407157 -0.832147 0.554555 0.000442174 -0.853935 0.52038 0.000468042 -0.874326 0.48534 0.000485149 -0.893275 0.44951 0.000492941 -0.910759 0.412938 0.000490944 -0.926749 0.375682 0.000481044 -0.941216 0.337805 0.00046403 -0.954128 0.2994 0.000435964 -0.965482 0.260468 0.000400287 -0.975243 0.221135 0.000355525 -0.983406 0.18142 0.000301896 -0.989951 0.141414 0.000239816 -0.994869 0.101171 0.000168479 -0.998152 0.0607748 8.92006e-05 -1 0 0 -1 0 0 -0.999775 -0.0212223 0 -0.999775 -0.0212223 0 -0.997973 -0.0636448 0 -0.994374 -0.105928 0 -0.988981 -0.148043 0 -0.981807 -0.189882 0 -0.972863 -0.231383 0 -0.962166 -0.272464 0 -0.949738 -0.313047 0 -0.935591 -0.353085 0 -0.919767 -0.392465 0 -0.902284 -0.431141 0 -0.88317 -0.469053 0 -0.862465 -0.506117 0 -0.84021 -0.542262 0 -0.816433 -0.57744 0 -0.791201 -0.611557 0 -0.764526 -0.644593 0 -0.736475 -0.676465 0 -0.70711 -0.707103 0 -0.676457 -0.736482 0 -0.6446 -0.76452 0 -0.611558 -0.7912 0 -0.577434 -0.816437 0 -0.54226 -0.840211 0 -0.506117 -0.862465 0 -0.469053 -0.88317 0 -0.431153 -0.902279 0 -0.392465 -0.919767 0 -0.353085 -0.935591 0 -0.313043 -0.949739 0 -0.272466 -0.962165 0 -0.231383 -0.972863 0 -0.189881 -0.981807 0 -0.148044 -0.988981 0 -0.105928 -0.994374 0 -0.0636439 -0.997973 0 -0.0212223 -0.999775 0 -0.0212223 -0.999775 0 -0.997973 -0.0636448 0 -0.994374 -0.105928 0 -0.988981 -0.148043 0 -0.981807 -0.189882 0 -0.972863 -0.231383 0 -0.962166 -0.272464 0 -0.949738 -0.313047 0 -0.935591 -0.353085 0 -0.919767 -0.392465 0 -0.902284 -0.431141 0 -0.88317 -0.469053 0 -0.862465 -0.506117 0 -0.84021 -0.542262 0 -0.816433 -0.57744 0 -0.791201 -0.611557 0 -0.764526 -0.644593 0 -0.736475 -0.676465 0 -0.70711 -0.707103 0 -0.676457 -0.736482 0 -0.6446 -0.76452 0 -0.611558 -0.7912 0 -0.577434 -0.816437 0 -0.54226 -0.840211 0 -0.506117 -0.862465 0 -0.469053 -0.88317 0 -0.431153 -0.902279 0 -0.392465 -0.919767 0 -0.353085 -0.935591 0 -0.313043 -0.949739 0 -0.272466 -0.962165 0 -0.231383 -0.972863 0 -0.189881 -0.981807 0 -0.148044 -0.988981 0 -0.105928 -0.994374 0 -0.0636439 -0.997973 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -6.6523e-08 0 -1 -6.6523e-08 0 -1 -6.6523e-08 0 -1 -6.6523e-08 -8.46657e-08 5.17401e-08 -1 -4.46595e-08 4.76654e-08 -1 -5.24121e-08 4.43487e-08 -1 -4.2646e-08 4.55164e-08 -1 -3.47508e-08 -3.47509e-08 -1 -5.17401e-08 6.80791e-08 -1 1.25798e-07 2.67033e-09 -1 1.13714e-07 7.252e-09 -1 1.03956e-07 1.10741e-08 -1 9.58822e-08 1.43529e-08 -1 8.90792e-08 1.7228e-08 -1 8.32507e-08 1.98001e-08 -1 7.81879e-08 2.21411e-08 -1 7.37362e-08 2.43045e-08 -1 6.97765e-08 2.63331e-08 -1 6.62228e-08 2.82573e-08 -1 6.30028e-08 3.01048e-08 -1 6.00599e-08 3.18979e-08 -1 5.73502e-08 3.36546e-08 -1 5.48371e-08 3.53912e-08 -1 5.24884e-08 3.71236e-08 -1 5.02803e-08 3.88641e-08 -1 4.81877e-08 4.06284e-08 -1 4.61927e-08 4.24288e-08 -1 4.42788e-08 4.42783e-08 -1 4.24284e-08 4.61932e-08 -1 4.06288e-08 4.81873e-08 -1 3.88641e-08 5.02803e-08 -1 3.71232e-08 5.24888e-08 -1 3.53911e-08 5.48372e-08 -1 3.36546e-08 5.73502e-08 -1 3.18979e-08 6.00599e-08 -1 3.01054e-08 6.30018e-08 -1 2.82573e-08 6.62228e-08 -1 6.41154e-08 0 -1 -1.18297e-06 2.04333e-06 -1 4.81226e-08 1.45999e-07 -1 3.94725e-08 1.3939e-07 -1 3.18502e-08 1.33916e-07 -1 2.50156e-08 1.29347e-07 -1 1.8789e-08 1.25516e-07 -1 1.3028e-08 1.22297e-07 -1 7.62724e-09 1.19599e-07 -1 2.49099e-09 1.17349e-07 -1 -3.10441e-07 0 -1 -3.72529e-08 0 -1 -4.31795e-08 8.46657e-08 -1 -6.62593e-08 5.68883e-08 -1 -6.6378e-08 5.24713e-08 -1 -6.66055e-08 4.83982e-08 -1 -6.69354e-08 4.46068e-08 -1 -6.73631e-08 4.10505e-08 -1 -6.78867e-08 3.76841e-08 -1 -6.85059e-08 3.44732e-08 -1 -6.92223e-08 3.13854e-08 -1 -7.00391e-08 2.83922e-08 -1 -7.09612e-08 2.54682e-08 -1 -7.19946e-08 2.25915e-08 -1 -7.31495e-08 1.97343e-08 -1 -7.4435e-08 1.6878e-08 -1 -7.58662e-08 1.39959e-08 -1 -7.74587e-08 1.10649e-08 -1 -7.92336e-08 8.05748e-09 -1 -8.12151e-08 4.94497e-09 -1 -8.34354e-08 1.69166e-09 -1 -4.23328e-08 7.31211e-08 -1 0 0 -1 0 0 -1 0 0 -1 1 0 0 1 -2.20432e-07 -4.40862e-08 1 0 0 1 0 0 1 1.99569e-07 -6.6523e-08 1 0 0 1 -1.65569e-07 -1.24176e-07 1 -8.18745e-08 -6.14059e-08 1 0 -6.20882e-08 1 0 -6.20882e-08 1 -1.85437e-07 -4.96706e-08 0 0.999699 -0.0245344 7.17082e-05 0.999505 -0.0314731 0 0.997076 -0.0764153 -0.00193336 0.997299 -0.0734232 0 0.998653 -0.0518866 0 0.999625 -0.0273848 -0.000455375 0.995836 -0.091157 -5.98203e-07 0.994359 -0.106067 5.71309e-07 0.988862 -0.148833 -0.000510028 0.986526 -0.163606 0 0.986523 -0.163625 0 0.984883 -0.173219 -0.00122473 0.975756 -0.218857 0.00148998 0.985233 -0.171212 -0.000234931 0.999786 -0.0206867 0.000128576 0.99797 -0.0636785 0.000168843 0.981409 -0.191926 -0.000326744 0.972257 -0.233914 0.000204068 0.960733 -0.277475 2.7829e-06 0.94756 -0.319579 -3.47403e-06 0.932281 -0.361735 -0.000695423 0.927251 -0.374439 0.000290651 0.914801 -0.403904 -0.000603637 0.897549 -0.440914 0 0.862961 -0.50527 0 0.850897 -0.525332 0 0.837757 -0.546043 0 0.823786 -0.566901 0.000728158 0.770358 -0.637611 0.00183144 0.732454 -0.680814 -4.74468e-07 0.689559 -0.72423 -4.51752e-07 0.724232 -0.689556 -0.000395984 0.971943 -0.235216 0 0.952142 -0.305655 3.85661e-06 0.948987 -0.315315 -0.000642895 0.933077 -0.359677 0.000740283 0.949453 -0.31391 -0.000618859 0.952108 -0.305762 -4.87086e-06 0.927291 -0.374342 0 0.918542 -0.395323 0 0.908565 -0.417743 0 0.897549 -0.440914 0 0.897549 -0.440914 -0.000360376 0.895958 -0.44414 0.000240387 0.874173 -0.485615 0.000298046 0.850747 -0.525575 -0.000460449 0.826038 -0.563615 -0.000896548 0.823786 -0.5669 0.000513012 0.79771 -0.603041 -0.00146428 0.780196 -0.625534 0 0.732456 -0.680815 0 0.680813 -0.732457 -0.000734171 0.653592 -0.756847 0.00150702 0.680812 -0.732457 -0.00100086 0.862961 -0.50527 0 0.823786 -0.566901 0 0.862961 -0.50527 0.00029778 0.768474 -0.639881 -0.000376846 0.737704 -0.675124 0.0010478 0.702761 -0.711425 -0.000356358 0.680813 -0.732457 0 0.625536 -0.780195 -6.01288e-05 0.615262 -0.788323 0.00140692 0.625535 -0.780195 -0.00175959 0.732455 -0.680814 0 0.69686 -0.717208 0 0.732456 -0.680815 0 0.670184 -0.742195 -0.000788557 0.631888 -0.775059 0 0.63286 -0.774266 -0.000309825 0.625536 -0.780195 0 0.604159 -0.796864 0.00489105 0.586167 -0.810176 0 0.58401 -0.811746 -0.000413266 0.566904 -0.823784 0 0.505263 -0.862965 -9.159e-05 0.492956 -0.870054 0.00112044 0.505263 -0.862965 -0.000569672 0.535407 -0.844594 0.00113047 0.566903 -0.823784 0 0.625536 -0.780195 0 0.566904 -0.823784 0 0.566904 -0.823784 0.000637203 0.667752 -0.744384 0.000121303 0.550167 -0.835055 -0.000173809 0.507231 -0.86181 0.000177003 0.461625 -0.887075 0.000240494 0.414379 -0.910104 -3.19622e-06 0.365626 -0.930762 2.77966e-06 0.314641 -0.949211 -0.000431771 0.305664 -0.952139 0.000298049 0.261427 -0.965223 -0.000593388 0.235221 -0.971942 0 0.163643 -0.98652 -0.00030774 0.122708 -0.992443 0.000568772 0.163643 -0.986519 0.000141839 0.170829 -0.985301 -0.000117958 0.219228 -0.975674 -0.000370352 0.267033 -0.963687 0.000127701 0.313564 -0.949567 -0.000110448 0.36 -0.932952 0.000918446 0.374274 -0.927318 -0.000454096 0.405637 -0.914034 0.000876245 0.440919 -0.897546 0 0.505263 -0.862965 -0.00026884 0.505263 -0.862965 -0.000482272 0.440919 -0.897547 0 0.374344 -0.92729 4.51856e-06 0.305665 -0.952139 0 0.235221 -0.971942 0.000774112 0.235221 -0.971942 -0.000388319 0.374344 -0.92729 0.00034821 0.206588 -0.978428 -0.000513109 0.163643 -0.986519 0 0.0911993 -0.995833 -0.000122824 0.073687 -0.997281 0.000667431 0.0911993 -0.995832 0.000160683 0.150067 -0.988676 -0.000205439 0.0917164 -0.995785 0.000200335 0.0306462 -0.99953 -6.62446e-08 0.0273942 -0.999625 0.000155217 0.0243839 -0.999703 -0.000241765 0.0911993 -0.995833 0 0.0273972 -0.999625 0 0.0911993 -0.995833 0.000700004 0.305573 -0.952169 0 0.305573 -0.952169 9.85161e-05 0.449528 -0.893266 6.00367e-05 0.575771 -0.817611 -0.0009831 0.757631 -0.652682 0 0.787989 -0.615689 -0.000107625 0.788389 -0.615177 0.00010783 0.817549 -0.575859 0.000439052 0.818425 -0.574613 -0.00136887 0.84524 -0.534386 -0.000298818 0.870167 -0.492756 0.000317803 0.893161 -0.449736 0 0.89272 -0.450612 0.000347768 0.862961 -0.50527 0 0.869522 -0.493894 -0.0019841 0.914525 -0.404524 -0.00293033 0.963987 -0.265933 0 0.966473 -0.256769 0.00038341 0.971943 -0.235216 0 0.975197 -0.221339 -0.00405291 0.992533 -0.121911 -5.28331e-07 0.99362 -0.112784 0.000394335 0.995834 -0.0911805 -1.10214e-06 0.995834 -0.0911805 8.84159e-07 0.986526 -0.163606 0 0.995837 -0.091157 1.34671e-07 0.990558 -0.137096 0.000319178 0.986523 -0.163625 0 0.971943 -0.235216 0 0.971943 -0.235216 0 0.959879 -0.280415 0.000325169 0.952142 -0.305655 5.30537e-06 0.952108 -0.305762 -2.99633e-06 0.932376 -0.361489 0.000366072 0.927291 -0.374342 0 0.927252 -0.374439 0.000323329 0.897549 -0.440914 0 0.823786 -0.566901 0 0.780197 -0.625534 0 0.732456 -0.680815 0 0.780197 -0.625534 0 0.780197 -0.625534 0 0.732456 -0.680815 0 0.680813 -0.732457 0 0.440919 -0.897547 0 0.440919 -0.897547 -3.4957e-06 0.374275 -0.927318 0 0.235221 -0.971942 0 0.163643 -0.98652 0.0245389 8.46402e-08 -0.999699 0.0300059 9.94695e-05 -0.99955 0.0911993 0.000530423 -0.995833 0.163643 0.000447193 -0.98652 0.219202 -9.10529e-05 -0.97568 0.170859 0.00011039 -0.985295 0.0356948 0.00031641 -0.999363 0.0911993 -0.000326035 -0.995833 0.106891 0.000293184 -0.994271 0.17755 0.000268216 -0.984112 0.247315 0.000241741 -0.968935 0.315795 0.000213054 -0.948827 0.382688 0.000182244 -0.923878 0.447615 0.000149748 -0.894226 0.510272 0.00011531 -0.860013 0.570322 7.91137e-05 -0.821421 0.627465 3.99383e-05 -0.778645 0.681426 0 -0.731887 0.731887 0 -0.681426 0.732456 -4.1487e-05 -0.680815 0.778645 4.0016e-05 -0.627464 0.780197 -8.00897e-05 -0.625534 0.788369 -5.43048e-05 -0.615203 0.780196 0.00111626 -0.625534 0.757443 -0.000540425 -0.652901 0.732455 0.00103091 -0.680814 0.724232 2.65199e-07 -0.689556 0.689558 2.80227e-07 -0.72423 0.680813 0.000919733 -0.732457 0.653432 -0.000455157 -0.756985 0.625536 0.000888759 -0.780195 0.625536 -8.00936e-05 -0.780195 0.163643 -0.000302303 -0.98652 0.235221 -0.000276435 -0.971942 0.26695 -0.000281067 -0.96371 0.235221 0.000593587 -0.971942 0.305537 -0.00024813 -0.95218 0.313595 9.32013e-05 -0.949557 0.305537 0.000528489 -0.95218 0.374236 -0.000218484 -0.927334 0.405532 -0.000328152 -0.914081 0.374236 0.0006732 -0.927333 0.359973 -7.9712e-05 -0.932963 0.440919 -0.000186895 -0.897547 0.449551 6.96591e-05 -0.893255 0.440919 0.000629175 -0.897547 0.505263 -0.000153275 -0.862965 0.535276 -0.000385688 -0.844677 0.505263 0.000770102 -0.862965 0.492935 -6.38199e-05 -0.870066 0.566904 -0.000117628 -0.823784 0.575785 3.96043e-05 -0.817601 0.566904 0.000757606 -0.823784 0.680813 -4.15861e-05 -0.732457 0.821422 7.91176e-05 -0.57032 0.823784 -0.000117545 -0.566904 0.845032 -0.000638908 -0.534716 0.823783 0.00119161 -0.566903 0.817566 5.54902e-05 -0.575834 0.860011 0.000115408 -0.510276 0.862961 -0.000153032 -0.50527 0.870125 -0.000125771 -0.49283 0.862961 0.00135145 -0.505269 0.894228 0.000150052 -0.447613 0.897549 -0.000186717 -0.440914 0.914323 -0.000743176 -0.404984 0.897548 0.00136214 -0.440914 0.893199 0.00013372 -0.449662 0.923875 0.000182326 -0.382694 0.927334 -0.000218829 -0.374234 0.933021 -0.000209544 -0.359822 0.927333 0.00161065 -0.374233 0.948829 0.000212724 -0.315791 0.952179 -0.000248065 -0.305542 0.963842 -0.000837715 -0.266472 0.952178 0.00151864 -0.305541 0.949502 0.000242116 -0.313761 0.968936 0.000241903 -0.24731 0.967792 0.000432495 -0.251749 0.975426 -0.00013496 -0.220326 0.985267 0.000369675 -0.171024 0.975713 -0.000304474 -0.219054 0.984112 0.00026837 -0.177551 0.986519 -0.000302001 -0.163647 0.992493 -0.000896637 -0.122296 0.986518 0.0016111 -0.163647 0.994271 0.000293527 -0.106891 0.994059 0.000370485 -0.108839 0.997155 -0.000142195 -0.0753781 0.999699 0.000497767 -0.0245466 0.997292 -0.000394669 -0.0735362 0.999363 0.000316892 -0.0356951 0.999625 0 -0.0273848 0.615248 -3.86193e-05 -0.788333 0.122643 -0.000242428 -0.992451 0.0736624 -9.80416e-05 -0.997283 0 1 0 0 1 0 -4.08008e-07 1 5.32184e-08 0 1 7.92615e-08 -1.85437e-07 1 4.96705e-08 3.07575e-15 1 7.92615e-08 0 1 0 0 1 0 0 1 0 -1.86264e-07 1 1.86264e-07 9.21089e-08 1 7.16402e-08 0 1 0 1 0 -7.16402e-08 1 -1.11387e-07 -8.66347e-08 1 0 0 1 7.0958e-08 -1.24176e-07 1 0 0 1 1.85437e-07 -4.96705e-08 1 0 0 1 0 0 1 1.13549e-07 -6.11419e-08 1 0 -6.20882e-08 1 0 -6.20882e-08 -0.999625 0 -0.0273848 -0.998007 0 -0.0630976 -0.994271 0.000266492 -0.106891 -0.985269 0 -0.171013 -0.97854 0 -0.206056 -0.968936 0.000219623 -0.24731 -0.950389 0 -0.311064 -0.938689 0 -0.344766 -0.923875 0.000165533 -0.382694 -0.894228 0.000136231 -0.447613 -0.860011 0.000104779 -0.510276 -0.821424 7.18382e-05 -0.570317 -0.778645 3.63303e-05 -0.627464 -0.731887 0 -0.681426 -0.681426 0 -0.731887 -0.680813 -3.77558e-05 -0.732457 -0.627465 3.62598e-05 -0.778645 -0.625536 -7.27171e-05 -0.780195 -0.570322 -0.000682352 -0.821421 -0.625536 0.000690803 -0.780195 -0.627465 -0.000344466 -0.778645 -0.680813 0.000358678 -0.732457 -0.681426 0 -0.731887 -0.731887 0 -0.681426 -0.732456 0.000357823 -0.680815 -0.778645 -0.000345136 -0.627464 -0.780196 0.00069077 -0.625534 -0.780197 -7.27124e-05 -0.625534 -0.995042 0 -0.0994555 -0.990856 0 -0.134921 -0.994268 -0.00253165 -0.10689 -0.984112 0.000243652 -0.177551 -0.970359 0 -0.241668 -0.961104 0 -0.276186 -0.968934 -0.0020864 -0.247309 -0.948829 0.000193131 -0.315791 -0.925457 0 -0.378853 -0.897548 0.00161044 -0.440914 -0.862961 0.00131988 -0.505269 -0.823786 0.00101371 -0.5669 -0.821424 -0.000682459 -0.570317 -0.897549 -0.000169521 -0.440914 -0.862961 -0.000138936 -0.50527 -0.823786 -0.000106706 -0.566901 -0.732456 -3.76659e-05 -0.680815 -0.570322 7.18269e-05 -0.821421 -0.566904 -0.000106793 -0.823784 -0.510272 -0.000994547 -0.860013 -0.566903 0.00101453 -0.823784 -0.510272 0.00010469 -0.860013 -0.505263 -0.000139159 -0.862965 -0.447615 -0.00129157 -0.894225 -0.505263 0.00132199 -0.862965 -0.447615 0.000135956 -0.894226 -0.440919 -0.000169681 -0.897547 -0.382687 -0.00157184 -0.923877 -0.440919 0.00161196 -0.897546 -0.382688 0.000165458 -0.923878 -0.378848 0 -0.925459 -0.344815 0 -0.938671 -0.311061 0 -0.95039 -0.247315 0.000219476 -0.968935 -0.20604 0 -0.978544 -0.171016 0 -0.985268 -0.106891 0.000266181 -0.994271 -0.0630557 0 -0.99801 -0.0273972 0 -0.999625 -0.0356947 -0.00272901 -0.999359 -0.315795 0.000193431 -0.948827 -0.276173 0 -0.961108 -0.241678 0 -0.970357 -0.247315 -0.002085 -0.968933 -0.17755 0.000243512 -0.984112 -0.134888 0 -0.990861 -0.0994645 0 -0.995041 -0.106891 -0.00252869 -0.994268 -0.0356948 0.000287267 -0.999363 -0.17755 -0.00231334 -0.984109 -0.315795 -0.00183757 -0.948826 -0.86001 -0.000995391 -0.510276 -0.894227 -0.00129419 -0.447612 -0.923874 -0.00157255 -0.382694 -0.948827 -0.00183473 -0.315791 -0.984109 -0.00231467 -0.17755 -0.999359 -0.00273317 -0.035695 -0.999363 0.000287705 -0.0356951 2.90929e-08 -0.0273974 0.999625 -0.000312163 -0.091184 0.995834 -0.000289451 -0.163647 0.986519 -0.000263792 -0.235244 0.971936 -0.000238973 -0.305539 0.952179 -0.000209245 -0.374219 0.92734 -0.000178864 -0.44093 0.897542 -0.000147825 -0.50525 0.862973 -0.000112593 -0.566922 0.823771 -7.72002e-05 -0.625527 0.780203 -3.93072e-05 -0.680799 0.73247 3.30532e-08 -0.731894 0.681419 3.55022e-08 -0.681405 0.731907 -0.000843953 -0.106878 0.994272 0.000868464 -0.163647 0.986519 -0.000774334 -0.177575 0.984107 0.000791543 -0.235244 0.971936 -0.000691944 -0.247288 0.968942 -0.000611874 -0.315821 0.948819 -0.000524821 -0.382676 0.923883 -0.000427465 -0.447601 0.894233 -0.00033206 -0.510281 0.860008 -0.000225772 -0.570328 0.821417 -0.000116331 -0.627472 0.778639 2.13012e-08 -0.681405 0.731907 1.86631e-08 -0.731894 0.681419 0.000120656 -0.732469 0.6808 -0.000113742 -0.778651 0.627458 0.000229135 -0.780191 0.625541 7.62093e-05 -0.821421 0.570322 -7.63404e-05 -0.780191 0.625541 3.79505e-05 -0.778651 0.627458 -4.01791e-05 -0.732469 0.6808 0.000717084 -0.305539 0.952179 0.000627897 -0.374219 0.92734 0.000536748 -0.44093 0.897541 0.000443623 -0.50525 0.862973 0.000337922 -0.566922 0.823771 0.000231737 -0.625527 0.780203 0.000118049 -0.6808 0.73247 -0.000228528 -0.821421 0.570322 0.000338452 -0.823789 0.566896 0.000110415 -0.860017 0.510265 -0.000112786 -0.823789 0.566896 -0.000331155 -0.860017 0.510265 0.000438117 -0.862958 0.505275 0.000144367 -0.894225 0.447618 -0.00014601 -0.862958 0.505275 -0.00043302 -0.894225 0.447618 0.000535198 -0.897546 0.44092 0.000175338 -0.923875 0.382695 -0.000178374 -0.897546 0.44092 -0.000525946 -0.923875 0.382695 0.00062896 -0.927339 0.374222 0.000203858 -0.948829 0.315791 -0.000209632 -0.927339 0.374222 -0.000611519 -0.948829 0.315791 0.000713147 -0.952178 0.305542 0.000231778 -0.968934 0.247319 -0.000237699 -0.952178 0.305542 -0.000695288 -0.968934 0.247319 0.00079596 -0.971944 0.235213 0.000256553 -0.984111 0.177553 -0.000265307 -0.971944 0.235213 -0.000769626 -0.984111 0.177552 0.000869884 -0.986518 0.163652 0.000280771 -0.994271 0.106889 -0.000289953 -0.986518 0.163652 -0.000842293 -0.994271 0.106889 0.000937698 -0.995832 0.0912007 0.000303086 -0.999363 0.0356956 -0.000312561 -0.995833 0.0912007 -0.000909249 -0.999362 0.0356955 7.59896e-10 -0.999625 0.0274017 3.88222e-05 -0.627472 0.778639 7.53054e-05 -0.570328 0.821417 0.000110737 -0.510281 0.860008 0.000142541 -0.447601 0.894234 0.000174995 -0.382676 0.923883 0.000204014 -0.315821 0.948819 0.000230705 -0.247288 0.968942 0.000258165 -0.177576 0.984107 0.000281357 -0.106878 0.994272 0.00030332 -0.0356969 0.999363 0.000936604 -0.0911839 0.995834 -0.00090984 -0.0356969 0.999362 2.90929e-08 -0.0273982 0.999625 -0.000468388 -0.091184 0.995834 -0.000434374 -0.163646 0.986519 -0.000395935 -0.235244 0.971936 -0.000358496 -0.30554 0.952179 -0.000314294 -0.374219 0.92734 -0.000268345 -0.44093 0.897542 -0.000221737 -0.50525 0.862973 -0.000168881 -0.566917 0.823775 -0.000115867 -0.625527 0.780203 -5.8362e-05 -0.680806 0.732464 0 -0.731894 0.681419 0 -0.681404 0.731907 -0.00042182 -0.106878 0.994272 0.000434431 -0.163646 0.986519 -0.000387002 -0.177575 0.984107 0.000395991 -0.235244 0.971936 -0.000345935 -0.247291 0.968941 -0.00030563 -0.315817 0.94882 -0.000262343 -0.38268 0.923881 -0.000213756 -0.447601 0.894233 -0.000166032 -0.510281 0.860008 -0.000112863 -0.570322 0.821421 -5.87357e-05 -0.627479 0.778634 4.26026e-08 -0.681404 0.731907 4.04369e-08 -0.731894 0.681419 5.973e-05 -0.732463 0.680807 -5.7516e-05 -0.778645 0.627465 0.000115268 -0.780197 0.625534 0.00011356 -0.821427 0.570313 -0.000115232 -0.780197 0.625534 5.75535e-05 -0.778645 0.627465 -5.96904e-05 -0.732463 0.680807 0.000358552 -0.30554 0.952179 0.000314348 -0.374219 0.92734 0.000268398 -0.44093 0.897542 0.000221787 -0.50525 0.862973 0.000168929 -0.566917 0.823775 0.000115913 -0.625527 0.780203 5.84046e-05 -0.680806 0.732464 -0.000113527 -0.821427 0.570313 0.000168544 -0.823784 0.566904 0.000166324 -0.860012 0.510274 -0.000168511 -0.823784 0.566904 -0.000166292 -0.860011 0.510275 0.000219814 -0.862964 0.505266 0.000215709 -0.894225 0.447619 -0.000219755 -0.862964 0.505266 -0.000215656 -0.894225 0.447619 0.000268508 -0.897547 0.440919 0.000262193 -0.92388 0.382683 -0.000268457 -0.897547 0.440919 -0.000262149 -0.92388 0.382683 0.000313784 -0.927335 0.374233 0.000306515 -0.948825 0.315803 -0.00031374 -0.927335 0.374233 -0.000306478 -0.948825 0.315803 0.00035735 -0.952182 0.305531 0.0003469 -0.968937 0.247307 -0.000357315 -0.952182 0.305531 -0.000346871 -0.968937 0.247307 0.000397235 -0.971941 0.235225 0.000385755 -0.984111 0.177553 -0.000397208 -0.971941 0.235225 -0.000385734 -0.984111 0.177553 0.000434036 -0.986518 0.163651 0.000422076 -0.994271 0.106889 -0.000434017 -0.986518 0.163651 -0.000422063 -0.994271 0.106889 0.000468646 -0.995834 0.0911881 0.000454627 -0.999363 0.0356956 -0.000468635 -0.995834 0.0911881 -0.000454623 -0.999363 0.0356956 1.58312e-09 -0.999625 0.0274018 5.8781e-05 -0.627479 0.778634 0.000112911 -0.570322 0.821421 0.000166082 -0.510281 0.860008 0.000213808 -0.447601 0.894233 0.000262397 -0.38268 0.923881 0.000305685 -0.315817 0.94882 0.000345992 -0.247291 0.968941 0.000387059 -0.177575 0.984107 0.000421878 -0.106878 0.994272 0.000454896 -0.0356964 0.999363 -0.000454837 -0.0356964 0.999363 0.000468446 -0.091184 0.995834 0 -1 0 0 -1 0 0 -0.999625 -0.0274019 -0.000468625 -0.995834 -0.0911875 -0.000433937 -0.986518 -0.163653 -0.000397193 -0.971941 -0.235224 -0.000357291 -0.952182 -0.305532 -0.00031394 -0.927335 -0.374231 -0.000268251 -0.897547 -0.440919 -0.000219825 -0.862963 -0.505267 -0.000168722 -0.823782 -0.566906 -0.000115272 -0.780197 -0.625534 -5.98813e-05 -0.732466 -0.680804 0 -0.681404 -0.731908 0 -0.731895 -0.681418 -0.000422135 -0.994271 -0.10689 0.000433937 -0.986518 -0.163653 -0.000385759 -0.984111 -0.177552 0.000397193 -0.971941 -0.235224 -0.000346938 -0.968937 -0.247306 -0.000306351 -0.948825 -0.315801 -0.000262341 -0.923878 -0.382687 -0.000215681 -0.894226 -0.447615 -0.000166145 -0.860012 -0.510274 -0.00011351 -0.821424 -0.570317 -5.73717e-05 -0.778645 -0.627464 0 -0.731895 -0.681418 0 -0.681404 -0.731908 5.8611e-05 -0.680803 -0.732467 -5.85133e-05 -0.627477 -0.778635 0.000115826 -0.625528 -0.780202 0.000112943 -0.570326 -0.821418 -0.000115828 -0.625528 -0.780202 5.85158e-05 -0.627477 -0.778635 -5.86111e-05 -0.680803 -0.732467 0.000357291 -0.952182 -0.305532 0.000313941 -0.927335 -0.374231 0.000268251 -0.897547 -0.440919 0.000219825 -0.862963 -0.505267 0.000168722 -0.823782 -0.566907 0.000115272 -0.780197 -0.625534 5.98813e-05 -0.732466 -0.680804 -0.000112943 -0.570326 -0.821418 0.0001693 -0.566915 -0.823776 0.000165614 -0.510284 -0.860006 -0.000169302 -0.566915 -0.823776 -0.000165614 -0.510284 -0.860006 0.000222258 -0.505252 -0.862972 0.000213298 -0.4476 -0.894234 -0.000222257 -0.505252 -0.862972 -0.000213298 -0.4476 -0.894234 0.000268391 -0.440935 -0.897539 0.000262441 -0.382676 -0.923883 -0.000268393 -0.440935 -0.897539 -0.000262441 -0.382676 -0.923883 0.000313823 -0.37422 -0.92734 0.000306228 -0.315813 -0.948821 -0.000313823 -0.37422 -0.92734 -0.000306228 -0.315813 -0.948821 0.000358178 -0.305532 -0.952182 0.000346107 -0.2473 -0.968939 -0.000358178 -0.305532 -0.952182 -0.000346107 -0.2473 -0.968939 0.00039597 -0.235251 -0.971935 0.000387116 -0.177575 -0.984107 -0.000395971 -0.235251 -0.971935 -0.000387116 -0.177575 -0.984107 0.00043491 -0.163636 -0.986521 0.000421183 -0.106878 -0.994272 -0.00043491 -0.163636 -0.986521 -0.000421183 -0.106878 -0.994272 0.000468814 -0.0911887 -0.995834 0.000454574 -0.0356948 -0.999363 -0.000468815 -0.0911887 -0.995834 -0.000454574 -0.0356948 -0.999363 0 -0.0274019 -0.999625 5.73718e-05 -0.778645 -0.627464 0.00011351 -0.821424 -0.570317 0.000166145 -0.860012 -0.510274 0.000215681 -0.894226 -0.447615 0.000262341 -0.923878 -0.382687 0.000306351 -0.948825 -0.315801 0.000346938 -0.968937 -0.247306 0.000385759 -0.984111 -0.177552 0.000422135 -0.994271 -0.10689 0.000454625 -0.999363 -0.0356957 -0.000454625 -0.999363 -0.0356957 0.000468625 -0.995834 -0.0911875 0 -0.999625 -0.0274014 -0.000468855 -0.995832 -0.0912013 -0.000434977 -0.986518 -0.16365 -0.000397974 -0.971943 -0.235215 -0.0003565 -0.952179 -0.30554 -0.000314385 -0.927338 -0.374225 -0.000267826 -0.897547 -0.440919 -0.000219094 -0.862957 -0.505277 -0.000169401 -0.823792 -0.566892 -0.000114578 -0.78019 -0.625542 -5.99284e-05 -0.732466 -0.680803 0 -0.681404 -0.731908 0 -0.731895 -0.681418 -0.00042111 -0.994271 -0.10689 0.000434977 -0.986518 -0.16365 -0.000384842 -0.984111 -0.177552 0.000397974 -0.971943 -0.235215 -0.000347656 -0.968933 -0.247321 -0.000305885 -0.948829 -0.315789 -0.000262786 -0.923875 -0.382693 -0.000216475 -0.894224 -0.447621 -0.0001654 -0.860017 -0.510265 -0.000114264 -0.821423 -0.57032 -5.72342e-05 -0.778647 -0.627463 0 -0.731895 -0.681418 0 -0.681404 -0.731908 5.85639e-05 -0.680803 -0.732466 -5.86508e-05 -0.627479 -0.778634 0.000115826 -0.625528 -0.780202 0.000112954 -0.570323 -0.82142 -0.000115828 -0.625528 -0.780202 5.86484e-05 -0.627479 -0.778634 -5.85639e-05 -0.680803 -0.732466 0.0003565 -0.952179 -0.30554 0.000314385 -0.927338 -0.374225 0.000267826 -0.897547 -0.440919 0.000219094 -0.862957 -0.505277 0.000169401 -0.823792 -0.566892 0.000114578 -0.78019 -0.625542 5.99283e-05 -0.732466 -0.680803 -0.000112954 -0.570323 -0.82142 0.000168573 -0.566921 -0.823772 0.000166415 -0.510284 -0.860006 -0.000168571 -0.566921 -0.823772 -0.000166415 -0.510284 -0.860006 0.000222184 -0.505243 -0.862977 0.000213298 -0.4476 -0.894234 -0.000222184 -0.505243 -0.862977 -0.000213298 -0.4476 -0.894234 0.000268391 -0.440935 -0.897539 0.000262482 -0.382671 -0.923885 -0.000268393 -0.440935 -0.897539 -0.000262482 -0.382671 -0.923885 0.000313475 -0.37422 -0.92734 0.000306358 -0.315829 -0.948816 -0.000313474 -0.37422 -0.92734 -0.000306358 -0.315829 -0.948816 0.000358319 -0.305544 -0.952178 0.000346291 -0.247285 -0.968943 -0.000358319 -0.305544 -0.952178 -0.000346291 -0.247285 -0.968943 0.000395587 -0.235239 -0.971938 0.000387282 -0.177575 -0.984107 -0.000395587 -0.235239 -0.971938 -0.000387282 -0.177575 -0.984107 0.000434027 -0.163648 -0.986519 0.000422209 -0.106878 -0.994272 -0.000434027 -0.163648 -0.986519 -0.000422209 -0.106878 -0.994272 0.000468519 -0.091176 -0.995835 0.000454599 -0.0356953 -0.999363 -0.000468519 -0.091176 -0.995835 -0.000454599 -0.0356953 -0.999363 0 -0.0274019 -0.999625 5.72342e-05 -0.778647 -0.627463 0.000114264 -0.821423 -0.57032 0.0001654 -0.860017 -0.510265 0.000216475 -0.894224 -0.447621 0.000262786 -0.923875 -0.382693 0.000305885 -0.948829 -0.315789 0.000347656 -0.968933 -0.247321 0.000384842 -0.984111 -0.177552 0.00042111 -0.994271 -0.10689 0.000454633 -0.999363 -0.0356953 -0.000454633 -0.999363 -0.0356953 0.000468855 -0.995832 -0.0912013 0 -1 -3.7253e-06 0 -1 -3.7253e-06 -3.58604e-07 -1 -3.54963e-07 2.47858e-07 -1 -1.03274e-07 0 -1 0 0 -1 0 7.45057e-06 -1 0 0 -0.505216 0.862993 0 -0.535081 0.844801 -0.00224079 -0.587784 0.809015 0 -0.622964 0.78225 0 -0.649929 0.759995 -0.00280006 -0.69464 0.719352 0 -0.728052 0.685522 0 -0.751503 0.65973 -0.00314001 -0.788009 0.615655 0 -0.818211 0.574918 0 -0.837777 0.546012 -0.00324683 -0.866021 0.499997 0 -0.891755 0.452518 0 -0.906978 0.421177 -0.00313364 -0.92718 0.374603 0 -0.947091 0.320966 0 -0.95772 0.287703 -0.00279806 -0.970293 0.241919 0 -0.983143 0.182837 0 -0.988929 0.148389 -0.00223476 -0.994519 0.104529 0 -0.99916 0.0409831 0 -0.999981 0.00611972 0.000224318 -0.999391 0.0349055 0 -0.565529 0.824728 0 -0.594018 0.804452 0.000268895 -0.587786 0.809017 -0.00255026 -0.642792 0.766036 0 -0.677213 0.735787 0 -0.702488 0.711696 0.000336007 -0.694643 0.719355 -0.00299204 -0.74315 0.669118 0 -0.775085 0.631856 0 -0.796689 0.604389 0.000376803 -0.788013 0.615658 -0.00322084 -0.829025 0.559202 0 -0.857173 0.515028 0 -0.874614 0.484819 0.00038962 -0.866026 0.499999 -0.00321498 -0.89879 0.438369 0 -0.921775 0.387726 0 -0.934734 0.355349 0.000376038 -0.927185 0.374605 -0.00299725 -0.951052 0.309017 0 -0.967588 0.252534 0 -0.975809 0.218624 0.000335767 -0.970296 0.241919 -0.00254803 -0.984805 0.173643 0 -0.993687 0.112189 0 -0.996994 0.0774722 0.000268171 -0.994522 0.10453 -0.00186932 -0.999389 0.0349055 0.000305764 -0.984809 0.173644 0.000359671 -0.951056 0.309019 0.000385799 -0.898794 0.438371 0.000386501 -0.82903 0.559205 0.000359044 -0.743153 0.669121 0.000306031 -0.642794 0.766039 0.000224943 -0.529919 0.848049 -0.00187454 -0.529918 0.848047 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0.00132653 -0.529931 0.84804 0.000474366 -0.566917 0.823775 -0.000417989 -0.587768 0.809029 0.000448702 -0.625526 0.780203 -0.000386853 -0.642792 0.76604 0.000413071 -0.680806 0.732464 0.00104538 -0.694665 0.719333 -0.00123921 -0.680806 0.732463 0.00116055 -0.642792 0.76604 -0.0013461 -0.625526 0.780203 0.00125396 -0.587768 0.809029 -0.0014231 -0.566917 0.823774 0 -0.518334 0.855179 -0.000442177 -0.529932 0.84804 -0.000348459 -0.694665 0.719333 0.000369256 -0.732463 0.680807 0.000902712 -0.743141 0.669134 -0.00110777 -0.732463 0.680806 -0.000300904 -0.743141 0.669135 0.000318275 -0.78019 0.625542 0.000738881 -0.788005 0.615668 -0.000954824 -0.78019 0.625542 -0.000246294 -0.788005 0.615668 0.000258806 -0.82379 0.566895 0.000548214 -0.829045 0.559181 -0.000776417 -0.82379 0.566895 -0.000182738 -0.829045 0.559181 0.000191498 -0.862958 0.505275 0.000336663 -0.866025 0.500001 -0.000574494 -0.862958 0.505275 -0.000112221 -0.866025 0.500001 0.000115636 -0.897552 0.440909 9.61055e-05 -0.898794 0.438371 -0.000346909 -0.897552 0.440909 -3.20352e-05 -0.898794 0.438371 3.26972e-05 -0.927185 0.374603 -5.30151e-05 -0.951056 0.30902 -0.000130269 -0.970295 0.241925 -0.000199014 -0.984808 0.173644 -0.00026064 -0.994521 0.10454 -0.000312819 -0.999391 0.0348922 7.29138e-09 -0.999625 0.0274017 0.000938494 -0.999391 0.0348922 -0.000997683 -0.995833 0.0911884 0.000332594 -0.995834 0.0911884 5.42783e-05 -0.927334 0.374234 0.000159045 -0.951056 0.30902 -0.000162835 -0.927334 0.374234 -9.80939e-05 -0.927185 0.374603 0.000135642 -0.952178 0.305543 0.000390807 -0.970295 0.241925 -0.000406927 -0.952178 0.305543 0.000209537 -0.971944 0.235212 0.000597041 -0.984808 0.173644 -0.000628613 -0.971944 0.235212 0.000274315 -0.986518 0.163651 0.000781938 -0.99452 0.10454 -0.000822944 -0.986518 0.163651 0 -0.505231 -0.862984 0.00101486 -0.541574 -0.840653 0.00115792 -0.601486 -0.798883 0.00127822 -0.658206 -0.752837 0.00137908 -0.711403 -0.702783 0.00144967 -0.760796 -0.648989 0.0014958 -0.806143 -0.59172 0.00152285 -0.847187 -0.531293 0.00152383 -0.883711 -0.468031 0.00149963 -0.915512 -0.402287 0.0014502 -0.942434 -0.334388 0.00137746 -0.964333 -0.26469 0.00128115 -0.98108 -0.1936 0.00116007 -0.992596 -0.121458 0.00101651 -0.998813 -0.0486924 0 -0.999981 -0.00611937 -0.000801183 -0.99939 -0.0349053 0.000319889 -0.58779 -0.809014 -0.000385975 -0.601486 -0.798883 0.000364694 -0.642792 -0.76604 -0.000426074 -0.658207 -0.752837 0.000400264 -0.694643 -0.719355 0.000427467 -0.74315 -0.669125 0.00044838 -0.788017 -0.615654 0.000460374 -0.829029 -0.559206 0.000463701 -0.866025 -0.500001 0.000459327 -0.898795 -0.438369 0.000447652 -0.927184 -0.374607 0.000428051 -0.951056 -0.309018 0.000399694 -0.970296 -0.24192 0.000363912 -0.984809 -0.173644 0.000319247 -0.994522 -0.104529 0.000267061 -0.999391 -0.0349053 -0.000338836 -0.998814 -0.0486925 -0.000459694 -0.711403 -0.702784 -0.000483223 -0.760797 -0.64899 -0.000498603 -0.806143 -0.59172 -0.000507618 -0.847188 -0.531293 -0.000507947 -0.883712 -0.468032 -0.000499877 -0.915513 -0.402288 -0.000483402 -0.942435 -0.334388 -0.000459155 -0.964333 -0.26469 -0.00042705 -0.981081 -0.1936 -0.00038669 -0.992597 -0.121458 -0.00095774 -0.994521 -0.104529 -0.00109173 -0.984808 -0.173644 -0.00119908 -0.970295 -0.24192 -0.00128415 -0.951056 -0.309017 -0.00134295 -0.927183 -0.374607 -0.00137798 -0.898794 -0.438369 -0.0013911 -0.866024 -0.5 -0.00138112 -0.829028 -0.559206 -0.00134514 -0.788016 -0.615653 -0.0012824 -0.74315 -0.669124 -0.00120079 -0.694643 -0.719354 -0.00109408 -0.642792 -0.76604 -0.000959666 -0.58779 -0.809013 -0.000802996 -0.529921 -0.848047 -0.000338287 -0.541574 -0.840653 0.000267666 -0.529921 -0.848047 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 -5.29819e-07 -4.96706e-08 1 -8.27607e-07 -2.88946e-08 1 -4.9668e-07 -5.22088e-08 1 -3.54026e-07 -6.24228e-08 1 -2.73892e-07 -6.82899e-08 1 -2.22162e-07 -7.21853e-08 1 -1.85693e-07 -7.50245e-08 1 -1.58356e-07 -7.72353e-08 1 -1.36908e-07 -7.90442e-08 1 -1.19473e-07 -8.05829e-08 1 -1.0487e-07 -8.19351e-08 1 0 -9.21444e-08 1 -9.60808e-08 -9.94931e-08 1 -1.11741e-07 -1.00613e-07 1 -3.6016e-07 -1.03274e-07 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 -1.48102e-07 -9.98995e-08 1 -1.2734e-07 -9.94867e-08 1 -1.10027e-07 -9.90674e-08 1 -9.52519e-08 -9.86405e-08 1 -8.24048e-08 -9.8205e-08 1 -7.1027e-08 -9.7759e-08 1 -6.08009e-08 -9.73013e-08 1 -6.08014e-08 -9.73013e-08 1 -7.11152e-08 -9.78826e-08 1 -8.2737e-08 -9.86009e-08 1 0 0 1 0 0 -0.000442353 -0.529925 -0.848045 -0.00142358 -0.566923 -0.823769 0.00125381 -0.587776 -0.809022 -0.00134583 -0.625528 -0.780201 0.00116056 -0.642792 -0.76604 -0.00123921 -0.680806 -0.732463 -0.000348618 -0.694664 -0.719335 0.000412811 -0.680806 -0.732464 -0.000387124 -0.642792 -0.76604 0.000448333 -0.625528 -0.780202 -0.000418124 -0.587781 -0.80902 0.000474529 -0.566924 -0.82377 0 -0.518322 -0.855186 0.00132706 -0.529924 -0.848044 0.00104509 -0.694663 -0.719334 -0.00110813 -0.732463 -0.680806 -0.000301023 -0.743141 -0.669135 0.000369135 -0.732463 -0.680807 0.000902356 -0.743141 -0.669134 -0.000955157 -0.78019 -0.625542 -0.000246403 -0.788005 -0.615668 0.000318164 -0.78019 -0.625542 0.000738554 -0.788005 -0.615668 -0.000776719 -0.82379 -0.566895 -0.000182837 -0.829045 -0.559181 0.000258705 -0.82379 -0.566895 0.000547916 -0.829045 -0.559181 -0.000574734 -0.862957 -0.505277 -0.0001124 -0.866025 -0.500001 0.000191399 -0.862957 -0.505277 0.000336663 -0.866025 -0.500001 -0.000346909 -0.897552 -0.440909 -3.22678e-05 -0.898795 -0.438369 0.00011548 -0.897552 -0.440909 9.63379e-05 -0.898795 -0.43837 -9.78886e-05 -0.927185 -0.374605 0.000159045 -0.951056 -0.30902 0.000390678 -0.970295 -0.241925 0.000597053 -0.984808 -0.173644 0.000781994 -0.99452 -0.10454 0.000938495 -0.999391 -0.0348922 -7.29135e-09 -0.999625 -0.0274016 -0.000312844 -0.999391 -0.0348922 0.000332526 -0.995834 -0.0911881 -0.000997673 -0.995833 -0.0911881 -0.000162835 -0.927334 -0.374234 -5.31247e-05 -0.951056 -0.30902 5.41454e-05 -0.927334 -0.374234 3.24969e-05 -0.927185 -0.374605 -0.000407079 -0.952179 -0.305542 -0.000130312 -0.970295 -0.241925 0.000135584 -0.952179 -0.305542 -0.000628627 -0.971944 -0.235213 -0.000199079 -0.984809 -0.173644 0.000209459 -0.971944 -0.235213 -0.000822856 -0.986518 -0.163651 -0.000260702 -0.994521 -0.10454 0.000274228 -0.986518 -0.163651 0 -1 0 0 -1 0 0 -0.0162429 0.999868 0 -0.0162429 0.999868 0 -0.0162429 0.999868 0 -0.0162428 0.999868 0 -0.0162429 0.999868 0 -0.0162429 0.999868 -0.000254849 -0.0248079 0.999692 0.000281676 -0.0583855 0.998294 0 -0.110116 0.993919 0 -0.161561 0.986863 0 -0.212573 0.977145 0 -0.262981 0.964801 0 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0.000133008 -0.524696 0.85129 -0.000409543 -0.501867 0.864945 0.000292038 -0.493126 0.869958 -0.000310547 -0.456317 0.889817 0 -0.409489 0.912315 0 -0.361409 0.932408 0 -0.312537 0.949905 0 -0.262981 0.964801 0 -0.212573 0.977145 0 -0.161561 0.986863 0 -0.110116 0.993919 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.0583855 0.998294 -0.000241126 -0.0737851 0.997274 0.00026573 -0.110116 0.993919 0 -0.161561 0.986863 0 -0.212573 0.977145 0 -0.262981 0.964801 0 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0.000138869 -0.565831 0.824521 -0.0005092 -0.546086 0.837729 -0.000419642 -0.545009 0.83843 -0.000218591 -0.122641 0.992451 0.000237527 -0.161561 0.986863 0 -0.212573 0.977145 0 -0.262981 0.964801 0 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0.000129583 -0.605591 0.795776 -0.000606039 -0.588841 0.808249 -0.000327847 -0.585607 0.810595 -0.000184031 -0.17114 0.985247 0.000198329 -0.212573 0.977145 0 -0.262981 0.964801 0 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 9.83033e-05 -0.643868 0.765136 -0.000701456 -0.629992 0.776602 -0.000232642 -0.624724 0.780845 -0.000134041 -0.219209 0.975678 0.000146459 -0.262981 0.964801 0 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 4.15528e-05 -0.680624 0.732633 -0.0007934 -0.669452 0.742855 -0.000139132 -0.662345 0.749199 -7.09058e-05 -0.266791 0.963755 7.44434e-05 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 -4.99833e-05 -0.715719 0.698389 -0.000878296 -0.707113 0.7071 -4.50523e-05 -0.69835 0.715756 0 -0.313691 0.949525 0 -0.359892 0.932994 -0.000105203 -0.405311 0.914179 -0.000198372 -0.44978 0.893139 -0.000292038 -0.493126 0.869958 -0.000133009 -0.524695 0.85129 0.000419642 -0.545009 0.83843 0.0005092 -0.546086 0.837729 -0.000138869 -0.565831 0.824521 0.000327847 -0.585607 0.810595 -0.000129582 -0.605591 0.795776 0.000232642 -0.624724 0.780845 0.000701456 -0.629992 0.776602 -9.83033e-05 -0.643868 0.765136 0.000139131 -0.662345 0.749199 -4.1554e-05 -0.680624 0.732633 4.50523e-05 -0.69835 0.715756 0.000878296 -0.707113 0.7071 4.99833e-05 -0.715719 0.698389 -4.5234e-05 -0.732671 0.680584 0.000186407 -0.749044 0.66252 -0.000129803 -0.765225 0.643764 0.00100973 -0.776599 0.629994 0.000376452 -0.780592 0.625041 -0.000207736 -0.795911 0.605414 0.000639381 -0.810243 0.586094 -0.000271618 -0.824671 0.565613 0.00105028 -0.837725 0.546092 0.00099623 -0.83793 0.545777 -0.000314662 -0.851439 0.524453 0.000893469 -0.863752 0.503916 0.00128336 -0.864944 0.501866 -0.000326136 -0.876141 0.482055 0.000702414 -0.887496 0.460815 0 -0.89733 0.441361 0.000397768 -0.908854 0.417114 -0.000205767 -0.919148 0.393912 0.000267586 -0.928471 0.371406 0 -0.927905 0.372817 0.000273014 -0.932409 0.361405 0 -0.93734 0.348417 0 -0.93734 0.348417 0 -0.945606 0.325315 0.000268753 -0.949909 0.312525 0 -0.964795 0.263002 0 -0.977147 0.212563 0 -0.986863 0.161561 0 -0.993918 0.110125 0 -0.998294 0.0583811 0 -0.999868 0.0162285 -0.00010852 -0.999925 0.012277 0.000718674 -0.999323 0.0367729 -0.000248172 -0.998294 0.0583811 0 -0.993918 0.110125 0 -0.986863 0.161561 0 -0.977147 0.212563 0 -0.964795 0.263002 0 -0.94991 0.312525 0 -0.932409 0.361405 0 -0.912308 0.409504 0 -0.912308 0.409504 0.000112646 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 -0.000186407 -0.749044 0.66252 -0.000951462 -0.742853 0.669454 4.5233e-05 -0.732671 0.680584 0.000210962 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629994 -0.000376452 -0.780592 0.625041 -0.00100973 -0.776599 0.629994 0.000129803 -0.765225 0.643764 0.000310548 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629994 0 -0.808253 0.588835 -0.000639383 -0.810243 0.586094 -0.00104538 -0.808253 0.588835 0.000207736 -0.795911 0.605414 0.000409544 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629995 0 -0.808253 0.588835 0 -0.837725 0.546092 -0.00099623 -0.83793 0.545777 -0.00105028 -0.837725 0.546092 0.000271618 -0.824671 0.565613 0.000606039 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629995 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.885576 0.464494 -0.000702416 -0.887496 0.460815 0.000326135 -0.876141 0.482055 -0.000516244 -0.870338 0.492455 0 -0.864945 0.501867 0.000793399 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629995 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.912308 0.409504 0 -0.918474 0.395481 -0.000267588 -0.928471 0.371406 0.000205766 -0.919148 0.393912 0.000951462 -0.742853 0.669454 0 -0.7766 0.629995 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.912308 0.409504 0 -0.932409 0.361405 0 -0.94991 0.312525 0 -0.953675 0.30084 0.000200882 -0.960446 0.278465 -0.00024741 -0.953284 0.302076 0.00104538 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.912308 0.409504 0 -0.932409 0.361405 0 -0.94991 0.312525 0 -0.964795 0.263002 0 -0.977147 0.212563 0 -0.979149 0.203145 0.000612013 -0.983126 0.182926 -0.00122448 -0.978261 0.207373 -0.000296666 -0.898727 0.438508 0.000491551 -0.909079 0.416623 0 -0.918474 0.395481 0 -0.953675 0.30084 -0.000200886 -0.960446 0.278465 0 -0.960767 0.277356 0.000270603 -0.964795 0.263002 0 -0.967644 0.25232 0.000655973 -0.966934 0.255026 -0.000426386 -0.972964 0.230959 0.000260681 -0.977147 0.212563 0 -0.986863 0.161561 0 -0.993918 0.110125 0 -0.998294 0.0583811 0 -0.999868 0.0162285 0 -0.999868 0.0162285 0 -0.998294 0.0583811 0 -0.998294 0.0583811 0 -0.998294 0.0583811 0.000247412 -0.953284 0.302076 0 -0.979149 0.203145 -0.000612013 -0.983126 0.182926 0 -0.983839 0.179056 0.00024378 -0.986863 0.161561 0.000163147 -0.987298 0.158878 0 -0.989965 0.141314 -0.000741998 -0.990915 0.134488 0 -0.991624 0.129161 0.000217774 -0.993905 0.110239 0.000221441 -0.993918 0.110125 0 -0.998294 0.0583811 0 -0.999594 0.0284909 0 -0.999925 0.0122757 1.94771e-05 -0.999796 0.0202108 0 -0.999868 0.0162285 0 -0.998294 0.0583811 0 -0.993918 0.110125 0 -0.986863 0.161561 0 -0.993918 0.110125 0 -0.993918 0.110125 0.00122448 -0.978261 0.207373 0 -0.995835 0.0911704 -0.000787166 -0.996318 0.0857288 0 -0.996883 0.0788973 0.000159734 -0.998117 0.0613403 0.000248172 -0.998294 0.0583811 0 -0.999594 0.0284909 0 -0.998294 0.0583811 0 -0.999169 0.0407501 -0.000718674 -0.999323 0.0367729 0 -0.999594 0.0284909 0 -0.999868 0.0162285 0 -0.998294 0.0583811 0 -0.998294 0.0583811 0 -0.998294 0.0583811 0 -0.993918 0.110125 0 -0.993918 0.110125 0 -0.999594 0.0284909 0 -0.999169 0.0407501 -0.000159734 -0.998117 0.0613403 0 -0.996883 0.0788973 0.000787166 -0.996318 0.0857288 0 -0.995835 0.0911704 -0.000221441 -0.993918 0.110125 -0.000217774 -0.993905 0.110239 0 -0.991624 0.129161 0.000741998 -0.990915 0.134488 0 -0.989965 0.141314 -0.000163146 -0.987298 0.158878 -0.00024378 -0.986863 0.161561 0 -0.977147 0.212563 0 -0.964795 0.263002 0 -0.94991 0.312525 0 -0.932409 0.361405 0 -0.912308 0.409504 0 -0.889823 0.456306 0 -0.864945 0.501867 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.912308 0.409504 0 -0.932409 0.361405 0 -0.94991 0.312525 0 -0.964795 0.263002 0 -0.977147 0.212563 0 -0.986863 0.161561 0 -0.993918 0.110125 0 -0.986863 0.161561 0 -0.986863 0.161561 0 -0.983839 0.179056 0.000426386 -0.972964 0.230959 -0.000655977 -0.966934 0.255026 0 -0.967644 0.25232 -0.000260681 -0.977147 0.212563 0 -0.973549 0.228478 0 -0.945606 0.325315 0 -0.93734 0.348417 0 -0.93734 0.348417 -0.000268752 -0.949909 0.312525 0 -0.945606 0.325315 -0.000491552 -0.909079 0.416623 0.000296664 -0.898727 0.438508 -0.000527228 -0.894823 0.446421 0 -0.889823 0.456306 0 -0.889823 0.456306 0 -0.861001 0.508603 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.808253 0.588835 -0.000893469 -0.863752 0.503916 0.000314662 -0.851439 0.524453 0.000198373 -0.44978 0.893139 -0.000210962 -0.409489 0.912315 0 -0.361409 0.932408 0 -0.312537 0.949905 0 -0.262981 0.964801 0 -0.212573 0.977145 0 -0.161561 0.986863 0 -0.110116 0.993919 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0.000105203 -0.405311 0.914179 -0.000112646 -0.361409 0.932408 0 -0.312537 0.949905 0 -0.262981 0.964801 0 -0.212573 0.977145 0 -0.161561 0.986863 0 -0.110116 0.993919 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.359892 0.932994 0 -0.313691 0.949525 7.09053e-05 -0.266791 0.963755 0.000134041 -0.219209 0.975678 0.000184031 -0.17114 0.985247 0.000218591 -0.122641 0.992451 0.000241126 -0.0737851 0.997274 0.000254849 -0.0248079 0.999692 0 -0.0162429 0.999868 0 -0.0583855 0.998294 0 -0.110116 0.993919 0 -0.161561 0.986863 0 -0.212573 0.977145 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.312537 0.949905 -7.44434e-05 -0.312537 0.949905 -0.000146459 -0.262981 0.964801 0 -0.212573 0.977145 0 -0.212573 0.977145 -0.00019833 -0.212573 0.977145 0 -0.161561 0.986863 0 -0.161561 0.986863 -0.000237527 -0.161561 0.986863 0 -0.110116 0.993919 0 -0.110116 0.993919 -0.00026573 -0.110116 0.993919 0 -0.0583855 0.998294 0 -0.0583855 0.998294 -0.000281677 -0.0583855 0.998294 0 -0.0162429 0.999868 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.0162429 0.999868 0 -0.0162429 0.999868 0 -0.0583855 0.998294 0 -0.0583855 0.998294 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.110116 0.993919 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.161561 0.986863 0 -0.212573 0.977145 0 -0.161561 0.986863 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.212573 0.977145 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.262981 0.964801 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.312537 0.949905 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.361409 0.932408 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.409489 0.912315 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.456317 0.889817 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.501867 0.864945 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.546086 0.837729 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629994 0 -0.7766 0.629995 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.588841 0.808249 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.629992 0.776602 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629994 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.864945 0.501867 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.669452 0.742855 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.707114 0.7071 0 -0.742853 0.669454 0 -0.7766 0.629994 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.912308 0.409504 0 -0.932409 0.361405 -0.000273014 -0.932409 0.361405 0 -0.927905 0.372817 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.742853 0.669454 0 -0.7766 0.629994 0 -0.7766 0.629995 0 -0.7766 0.629994 0 -0.7766 0.629995 0 -0.7766 0.629994 0 -0.7766 0.629994 0 -0.7766 0.629994 0 -0.7766 0.629995 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.912308 0.409504 0 -0.932409 0.361405 0 -0.94991 0.312525 0 -0.964795 0.263002 -0.000270603 -0.964795 0.263002 0 -0.960767 0.277356 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.808253 0.588835 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.837725 0.546092 0 -0.864945 0.501867 0 -0.864945 0.501867 0 -0.864945 0.501867 0 -0.864945 0.501867 0 -0.864945 0.501867 0 -0.889823 0.456306 0 -0.889823 0.456306 0.000342653 -0.886515 0.4627 0 -0.889823 0.456306 0 -0.889823 0.456306 0 -0.889823 0.456306 0 -0.889823 0.456306 0 -0.889823 0.456306 -0.000257376 -0.912308 0.409504 0 -0.907894 0.419199 0 -0.912308 0.409504 0 -0.912308 0.409504 0 -0.912308 0.409504 0 -0.912308 0.409504 0 -0.912308 0.409504 0 -0.912308 0.409504 0 -0.932409 0.361405 0 -0.932409 0.361405 0 -0.94991 0.312525 0 -0.94991 0.312525 0 -0.932409 0.361405 0 -0.932409 0.361405 0 -0.932409 0.361405 0 -0.932409 0.361405 0 -0.932409 0.361405 0 -0.945606 0.325315 0 -0.94991 0.312525 0 -0.94991 0.312525 0 -0.964795 0.263002 0 -0.964795 0.263002 0 -0.94991 0.312525 0 -0.94991 0.312525 0 -0.94991 0.312525 0 -0.94991 0.312525 0 -0.964795 0.263002 0 -0.964795 0.263002 0 -0.977147 0.212563 0 -0.977147 0.212563 0 -0.964795 0.263002 0 -0.964795 0.263002 0 -0.964795 0.263002 0 -0.973549 0.228478 0 -0.964795 0.263002 0 -0.977147 0.212563 0 -0.986863 0.161561 0 -0.993918 0.110125 0 -0.993918 0.110125 0 -0.977147 0.212563 0 -0.977147 0.212563 0 -0.977147 0.212563 0 -0.977147 0.212563 0 -0.986863 0.161561 0 -0.986863 0.161561 0 -0.977147 0.212563 0 -0.977147 0.212563 0 -0.986863 0.161561 0 -0.986863 0.161561 0 -0.986863 0.161561 0 -0.986863 0.161561 0 -0.993918 0.110125 0 -0.993918 0.110125 0 -0.993918 0.110125 0 -0.998294 0.0583811 0 -0.998294 0.0583811 0 -0.998294 0.0583811 -0.999925 0 0.0122756 -0.999925 0 0.0122756 -0.999323 -9.19591e-05 0.0367865 -0.998118 0 0.0613211 -0.996878 0 0.0789619 -0.99584 0 0.0911202 -0.993906 0 0.110234 -0.993906 0 0.110234 -0.991626 0 0.129144 -0.990902 -0.000102665 0.134583 -0.989959 0 0.141356 -0.990932 0.00184117 0.134351 -0.999596 0 0.0284321 -0.999169 0 0.0407641 -0.999324 0.00207743 0.0367134 -0.996313 -0.000102056 0.0857874 -0.987304 0 0.158843 -0.987304 0 0.158843 -0.983841 0 0.179043 -0.981568 0 0.191114 -0.978316 0 0.207116 -0.978316 0 0.207116 -0.973549 0 0.228478 -0.972944 -6.82555e-05 0.231039 -0.97066 0 0.240457 -0.972986 0.000850346 0.230863 -0.983109 -9.05891e-05 0.18302 -0.966977 0 0.254864 -0.966977 0 0.254864 -0.960764 0 0.277366 -0.953268 -0.000447729 0.302125 -0.960456 0.000363344 0.278431 -0.960431 -3.51498e-05 0.278519 -0.953305 4.32827e-05 0.302009 -0.945606 0 0.325315 -0.937343 0 0.348409 -0.945606 0 0.325315 -0.945606 0 0.325315 -0.937343 0 0.348409 -0.927899 0 0.372832 -0.919162 0.000317044 0.39388 -0.928447 -0.000416348 0.371466 -0.928496 5.67846e-05 0.371342 -0.919119 -4.32664e-05 0.393979 -0.9079 0 0.419186 -0.898754 0.000434492 0.438453 -0.909044 -0.000716262 0.4167 -0.909149 0.00011375 0.416472 -0.898684 -6.9062e-05 0.438598 -0.887641 0 0.460536 -0.879819 0 0.47531 -0.873915 0 0.486078 -0.863963 0 0.503556 -0.863963 0 0.503556 -0.854711 0 0.519104 -0.851377 -8.73063e-05 0.524555 -0.84823 0 0.529629 -0.851469 0.000421338 0.524406 -0.897325 0 0.441371 -0.876097 -8.29207e-05 0.482136 -0.838235 0 0.545308 -0.838235 0 0.545308 -0.827391 0 0.561627 -0.810166 -0.000824916 0.586201 -0.8247 0.000348958 0.56557 -0.824614 -8.10661e-05 0.565695 -0.813837 0.000126218 0.581093 -0.810449 0 0.585809 -0.795859 -6.6904e-05 0.605482 -0.780692 0.000121204 0.624916 -0.782951 0 0.622084 -0.76655 0 0.642185 -0.765194 -4.52089e-05 0.6438 -0.749103 6.39389e-05 0.662454 -0.733131 0 0.680087 -0.715704 -6.03341e-05 0.698404 -0.732674 5.34947e-05 0.680579 -0.797981 0 0.602683 -0.780555 -0.000470635 0.625086 -0.79593 0.000259678 0.605389 -0.732657 -1.64773e-05 0.680598 -0.715725 1.85836e-05 0.698383 -0.697873 0 0.716221 -0.680632 4.87601e-05 0.732625 -0.698346 -5.28972e-05 0.71576 -0.698364 1.75965e-05 0.715743 -0.680615 -1.62219e-05 0.732642 -0.6624 5.77381e-05 0.74915 -0.644481 -0.00114541 0.76462 -0.629992 0.000295585 0.776602 -0.624695 -0.000265363 0.780869 -0.629992 -0.000807646 0.776602 -0.643877 0.000112897 0.765129 -0.662342 -0.000161851 0.749202 -0.669454 -0.000925235 0.742853 -0.669454 0.000317801 0.742853 -0.651511 0 0.758639 -0.642185 0 0.76655 -0.624795 0.000100866 0.780788 -0.612477 0 0.790488 -0.60602 -0.000799945 0.795449 -0.602666 0 0.797993 -0.588841 0.000267355 0.808249 -0.585712 0.000148634 0.81052 -0.571773 0 0.820412 -0.566151 -0.000608349 0.824302 -0.561612 0 0.827401 -0.546081 0.000234196 0.837732 -0.545164 0.000199067 0.838329 -0.529612 0 0.84824 -0.501867 0 0.864945 -0.45632 0 0.889816 -0.409486 0 0.912316 -0.361412 0 0.932406 -0.312537 0 0.949905 -0.262979 0 0.964802 -0.212573 0 0.977145 -0.161561 0 0.986863 -0.110117 0 0.993919 -0.0583855 0 0.998294 -0.024965 -0.000411806 0.999688 -0.0162429 0 0.999868 -0.0162429 0 0.999868 -0.0248233 0.000270079 0.999692 -0.0738002 0.000256353 0.997273 -0.122654 0.000233106 0.99245 -0.171153 0.000196943 0.985245 -0.219221 0.000144106 0.975675 -0.266796 7.67635e-05 0.963753 -0.313688 0 0.949526 -0.359892 0 0.932994 -0.361411 -0.000123251 0.932406 -0.405322 0.000114651 0.914174 -0.409486 -0.000231532 0.912316 -0.409486 0 0.912316 -0.524941 -0.000460647 0.851138 -0.501867 0.0013637 0.864944 -0.493553 -0.000856412 0.869715 -0.450028 -0.00051352 0.893015 -0.405431 -0.000245217 0.914126 -0.359892 0 0.932994 -0.313688 0 0.949526 -0.312537 0.000149681 0.949905 -0.266857 -0.000142804 0.963736 -0.262979 0.000279344 0.964802 -0.219326 -0.000254269 0.975652 -0.171284 -0.000332232 0.985222 -0.122797 -0.000378364 0.992432 -0.0739455 -0.000402434 0.997262 -0.0583855 0.000459618 0.998294 -0.45632 0.000891072 0.889815 -0.409486 0.000537705 0.912316 -0.361412 0.000262246 0.932406 -0.212573 0.000361012 0.977145 -0.161561 0.00041486 0.986863 -0.110117 0.000447518 0.993919 -0.0583855 -0.000298758 0.998294 -0.0583855 0 0.998294 -0.110117 -0.000282678 0.993919 -0.110117 0 0.993919 -0.161561 -0.000253633 0.986863 -0.161561 0 0.986863 -0.212573 -0.000212598 0.977145 -0.212573 0 0.977145 -0.262979 -0.000157528 0.964802 -0.262979 0 0.964802 -0.312537 -8.02409e-05 0.949905 -0.312537 0 0.949905 -0.449792 0.00021846 0.893133 -0.45632 -0.000343955 0.889816 -0.45632 0 0.889816 -0.493149 0.00032318 0.869945 -0.501867 -0.000457241 0.864945 -0.501867 0 0.864945 -0.524715 0.000148405 0.851278 -0.544974 -0.000469024 0.838453 -0.565844 0.000157307 0.824512 -0.585567 -0.000369102 0.810624 -0.588841 -0.000689365 0.808249 -0.605607 0.000147819 0.795764 -0.546081 -0.000572678 0.837732 -0.71623 0 0.697864 -0.750539 0 0.660826 -0.749032 -0.000226015 0.662534 -0.765242 0.000159773 0.643743 -0.876183 0.000453989 0.481979 -0.887641 0 0.460536 -0.918474 0 0.395481 -0.937343 0 0.348408 -0.953677 0 0.300832 -0.983151 0.00136215 0.182791 -0.996326 0.00211351 0.0856153 -0.998118 0 0.0613211 -0.361412 0 0.932406 0 0.0162428 0.999868 0 0.0162428 0.999868 0 0.0162428 0.999868 0 0.0162428 0.999868 0 0.0162428 0.999868 0 0.0162428 0.999868 0.000254852 0.0248079 0.999692 -0.00028167 0.0583855 0.998294 0 0.110117 0.993919 0 0.16156 0.986863 0 0.212574 0.977145 0 0.262979 0.964802 0 0.312539 0.949905 0 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 -0.000132896 0.5247 0.851287 0.000409838 0.501863 0.864947 -0.000291783 0.493122 0.86996 0.000310718 0.456317 0.889817 0 0.409491 0.912314 0 0.36141 0.932407 0 0.312539 0.949905 0 0.262979 0.964802 0 0.212574 0.977145 0 0.16156 0.986863 0 0.110117 0.993919 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.0583855 0.998294 0.000241153 0.0737856 0.997274 -0.000265692 0.110117 0.993919 0 0.16156 0.986863 0 0.212574 0.977145 0 0.262979 0.964802 0 0.312539 0.949905 0 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 -0.000138869 0.565831 0.824521 0.0005092 0.546086 0.837729 0.000419642 0.545009 0.83843 0.000218544 0.12264 0.992451 -0.000237598 0.16156 0.986863 0 0.212574 0.977145 0 0.262979 0.964802 0 0.312539 0.949905 0 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 -0.000129791 0.605596 0.795772 0.000606225 0.588837 0.808252 0.000327637 0.585598 0.810601 0.000183987 0.17114 0.985247 -0.000198389 0.212574 0.977145 0 0.262979 0.964802 0 0.312539 0.949905 0 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 -9.83033e-05 0.643868 0.765136 0.000701456 0.629992 0.776602 0.000232252 0.62472 0.780849 0.000134031 0.219211 0.975678 -0.000146444 0.262979 0.964802 0 0.312539 0.949905 0 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 -4.15528e-05 0.680624 0.732633 0.0007934 0.669452 0.742855 0.000139132 0.662345 0.749199 7.1133e-05 0.266792 0.963754 -7.41751e-05 0.312539 0.949905 0 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 4.99833e-05 0.715719 0.698389 0.000878296 0.707113 0.7071 4.50523e-05 0.69835 0.715756 0 0.313688 0.949526 0 0.359892 0.932994 0.000105087 0.405314 0.914177 0.000198352 0.449778 0.89314 0.000291783 0.493122 0.86996 0.000132896 0.5247 0.851287 -0.000419642 0.545009 0.83843 -0.0005092 0.546086 0.837729 0.000138869 0.565831 0.824521 -0.000327637 0.585598 0.810601 0.000129791 0.605596 0.795772 -0.000232252 0.62472 0.780849 -0.000701456 0.629992 0.776602 9.83033e-05 0.643868 0.765136 -0.000139131 0.662345 0.749199 4.1554e-05 0.680624 0.732633 -4.50523e-05 0.69835 0.715756 -0.000878296 0.707113 0.7071 -4.99833e-05 0.715719 0.698389 4.5234e-05 0.732671 0.680584 -0.000185789 0.749049 0.662514 0.000130249 0.76522 0.643769 -0.00100973 0.776599 0.629994 -0.000376452 0.780592 0.625041 0.000207736 0.795911 0.605414 -0.000639381 0.810243 0.586094 0.000271618 0.824671 0.565613 -0.00105028 0.837725 0.546092 -0.00099623 0.83793 0.545777 0.000314662 0.851439 0.524453 -0.000893469 0.863752 0.503916 -0.00128336 0.864944 0.501866 0.000326745 0.876145 0.482047 -0.000701093 0.887492 0.460822 0 0.89733 0.441361 -0.000397768 0.908854 0.417114 0.000205767 0.919148 0.393912 -0.000267755 0.928474 0.371398 0 0.927908 0.37281 -0.000272841 0.932409 0.361405 0 0.937337 0.348425 0 0.937337 0.348425 0 0.945606 0.325315 -0.000268752 0.949909 0.312525 0 0.964797 0.262998 0 0.977147 0.212563 0 0.986862 0.161565 0 0.993918 0.110121 0 0.998294 0.0583855 0 0.999868 0.0162285 0.00010852 0.999925 0.012277 -0.000719494 0.999323 0.0367824 0.000248039 0.998294 0.0583855 0 0.993918 0.110121 0 0.986862 0.161565 0 0.977147 0.212563 0 0.964797 0.262997 0 0.94991 0.312525 0 0.932409 0.361405 0 0.912308 0.409504 0 0.912308 0.409504 -0.000112758 0.36141 0.932407 0 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0.00018579 0.749049 0.662514 0.000951462 0.742853 0.669454 -4.5233e-05 0.732671 0.680584 -0.000210945 0.409491 0.912314 0 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629995 0.000376452 0.780592 0.625041 0.00100973 0.776599 0.629994 -0.000130249 0.76522 0.643769 -0.000310719 0.456317 0.889817 0 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629994 0 0.808253 0.588835 0.000639383 0.810243 0.586094 0.00104538 0.808253 0.588835 -0.000207736 0.795911 0.605414 -0.000409839 0.501863 0.864947 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629995 0 0.808253 0.588835 0 0.837725 0.546092 0.00099623 0.83793 0.545777 0.00105028 0.837725 0.546092 -0.000271618 0.824671 0.565613 -0.000606225 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629995 0 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.885576 0.464494 0.000701094 0.887492 0.460822 -0.000326745 0.876145 0.482047 0.000516244 0.870338 0.492455 0 0.864945 0.501867 -0.000793399 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629995 0 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.889823 0.456306 0 0.912308 0.409504 0 0.918474 0.395481 0.000267758 0.928474 0.371398 -0.000205766 0.919148 0.393912 -0.000951462 0.742853 0.669454 0 0.7766 0.629994 0 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.889823 0.456306 0 0.912308 0.409504 0 0.932409 0.361405 0 0.94991 0.312525 0 0.953675 0.30084 -0.000200882 0.960446 0.278465 0.00024741 0.953284 0.302076 -0.00104538 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.889823 0.456306 0 0.912308 0.409504 0 0.932409 0.361405 0 0.94991 0.312525 0 0.964797 0.262998 0 0.977147 0.212563 0 0.97915 0.203138 -0.000611315 0.983125 0.182935 0.00122379 0.978263 0.207364 0.000296666 0.898727 0.438508 -0.000491551 0.909079 0.416623 0 0.918474 0.395481 0 0.953675 0.30084 0.000200886 0.960446 0.278465 0 0.960767 0.277356 -0.000270682 0.964796 0.262997 0 0.967646 0.252313 -0.000657861 0.966934 0.255026 0.000424493 0.972963 0.230959 -0.000260869 0.977147 0.212563 0 0.986862 0.161565 0 0.993918 0.110121 0 0.998294 0.0583855 0 0.999868 0.0162285 0 0.999868 0.0162285 0 0.998294 0.0583855 0 0.998294 0.0583855 0 0.998294 0.0583855 -0.000247412 0.953284 0.302076 0 0.97915 0.203138 0.000611317 0.983125 0.182935 0 0.983836 0.17907 -0.000243912 0.986862 0.161565 -0.000163146 0.987298 0.158878 0 0.989965 0.141314 0.000741998 0.990915 0.134488 0 0.991624 0.129161 -0.000217774 0.993905 0.110239 -0.000221585 0.993918 0.110121 0 0.998294 0.0583855 0 0.999594 0.0284909 0 0.999925 0.0122757 -1.94771e-05 0.999796 0.0202108 0 0.999868 0.0162285 0 0.998294 0.0583855 0 0.993918 0.110121 0 0.986862 0.161565 0 0.993918 0.110121 0 0.993918 0.110121 -0.00122379 0.978263 0.207364 0 0.995837 0.0911537 0.000784712 0.996318 0.085729 0 0.996881 0.0789188 -0.000160016 0.998118 0.0613309 -0.000248039 0.998294 0.0583855 0 0.999594 0.0284909 0 0.998294 0.0583855 0 0.999169 0.0407641 0.000719494 0.999323 0.0367824 0 0.999594 0.0284909 0 0.999868 0.0162285 0 0.998294 0.0583855 0 0.998294 0.0583855 0 0.998294 0.0583855 0 0.993918 0.110121 0 0.993918 0.110121 0 0.999594 0.0284909 0 0.999169 0.0407641 0.000160016 0.998118 0.0613309 0 0.996881 0.0789189 -0.000784712 0.996318 0.085729 0 0.995837 0.0911537 0.000221585 0.993918 0.110121 0.000217774 0.993905 0.110239 0 0.991624 0.129161 -0.000741998 0.990915 0.134488 0 0.989965 0.141314 0.000163147 0.987298 0.158878 0.000243912 0.986862 0.161565 0 0.977147 0.212563 0 0.964797 0.262998 0 0.94991 0.312525 0 0.932409 0.361405 0 0.912308 0.409504 0 0.889823 0.456306 0 0.864945 0.501867 0 0.837725 0.546092 0 0.864945 0.501867 0 0.864945 0.501867 0 0.889823 0.456306 0 0.912308 0.409504 0 0.932409 0.361405 0 0.94991 0.312525 0 0.964797 0.262997 0 0.977147 0.212563 0 0.986862 0.161565 0 0.993918 0.110121 0 0.986862 0.161565 0 0.986862 0.161565 0 0.983836 0.17907 -0.000424493 0.972963 0.230959 0.000657861 0.966934 0.255026 0 0.967646 0.252313 0.000260869 0.977147 0.212563 0 0.973547 0.228489 0 0.945606 0.325315 0 0.937337 0.348425 0 0.937337 0.348425 0.000268753 0.949909 0.312525 0 0.945606 0.325315 0.000491552 0.909079 0.416623 -0.000296664 0.898727 0.438508 0.000527228 0.894823 0.446421 0 0.889823 0.456306 0 0.889823 0.456306 0 0.861001 0.508603 0 0.837725 0.546092 0 0.837725 0.546092 0 0.808253 0.588835 0 0.808253 0.588835 0 0.808253 0.588835 0.000893469 0.863752 0.503916 -0.000314662 0.851439 0.524453 -0.000198351 0.449778 0.89314 0.000210945 0.409491 0.912314 0 0.36141 0.932407 0 0.312539 0.949905 0 0.262979 0.964802 0 0.212574 0.977145 0 0.16156 0.986863 0 0.110117 0.993919 0 0.0583855 0.998294 0 0.0583855 0.998294 -0.000105087 0.405314 0.914177 0.000112758 0.36141 0.932407 0 0.312539 0.949905 0 0.262979 0.964802 0 0.212574 0.977145 0 0.16156 0.986863 0 0.110117 0.993919 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.359892 0.932994 0 0.313688 0.949526 -7.11336e-05 0.266792 0.963754 -0.000134031 0.219211 0.975678 -0.000183987 0.17114 0.985247 -0.000218544 0.12264 0.992451 -0.000241153 0.0737856 0.997274 -0.000254852 0.0248079 0.999692 0 0.0162428 0.999868 0 0.0583855 0.998294 0 0.110117 0.993919 0 0.16156 0.986863 0 0.212574 0.977145 0 0.262979 0.964802 0 0.262979 0.964802 0 0.312539 0.949905 7.41751e-05 0.312539 0.949905 0.000146444 0.262979 0.964802 0 0.212574 0.977145 0 0.212574 0.977145 0.000198388 0.212574 0.977145 0 0.16156 0.986863 0 0.16156 0.986863 0.000237598 0.16156 0.986863 0 0.110117 0.993919 0 0.110117 0.993919 0.000265692 0.110117 0.993919 0 0.0583855 0.998294 0 0.0583855 0.998294 0.00028167 0.0583855 0.998294 0 0.0162428 0.999868 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.0162428 0.999868 0 0.0162428 0.999868 0 0.0583855 0.998294 0 0.0583855 0.998294 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.110117 0.993919 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.16156 0.986863 0 0.212574 0.977145 0 0.16156 0.986863 0 0.212574 0.977145 0 0.212574 0.977145 0 0.212574 0.977145 0 0.212574 0.977145 0 0.212574 0.977145 0 0.212574 0.977145 0 0.212574 0.977145 0 0.212574 0.977145 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.262979 0.964802 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.312539 0.949905 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.36141 0.932407 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.409491 0.912314 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.456317 0.889817 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.501863 0.864947 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.546086 0.837729 0 0.588837 0.808252 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629994 0 0.7766 0.629995 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.588837 0.808252 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.629992 0.776602 0 0.669452 0.742855 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629994 0 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.864945 0.501867 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.669452 0.742855 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.707114 0.7071 0 0.742853 0.669454 0 0.7766 0.629994 0 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.889823 0.456306 0 0.912308 0.409504 0 0.932409 0.361405 0.000272841 0.932409 0.361405 0 0.927908 0.37281 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.742853 0.669454 0 0.7766 0.629994 0 0.7766 0.629995 0 0.7766 0.629994 0 0.7766 0.629995 0 0.7766 0.629994 0 0.7766 0.629994 0 0.7766 0.629995 0 0.7766 0.629994 0 0.808253 0.588835 0 0.837725 0.546092 0 0.864945 0.501867 0 0.889823 0.456306 0 0.912308 0.409504 0 0.932409 0.361405 0 0.94991 0.312525 0 0.964797 0.262998 0.000270682 0.964796 0.262998 0 0.960767 0.277356 0 0.808253 0.588835 0 0.808253 0.588835 0 0.808253 0.588835 0 0.808253 0.588835 0 0.808253 0.588835 0 0.808253 0.588835 0 0.808253 0.588835 0 0.837725 0.546092 0 0.837725 0.546092 0 0.837725 0.546092 0 0.837725 0.546092 0 0.837725 0.546092 0 0.837725 0.546092 0 0.837725 0.546092 0 0.864945 0.501867 0 0.864945 0.501867 0 0.864945 0.501867 0 0.864945 0.501867 0 0.864945 0.501867 0 0.889823 0.456306 0 0.889823 0.456306 -0.000342653 0.886515 0.4627 0 0.889823 0.456306 0 0.889823 0.456306 0 0.889823 0.456306 0 0.889823 0.456306 0 0.889823 0.456306 0.000257376 0.912308 0.409504 0 0.907894 0.419199 0 0.912308 0.409504 0 0.912308 0.409504 0 0.912308 0.409504 0 0.912308 0.409504 0 0.912308 0.409504 0 0.912308 0.409504 0 0.932409 0.361405 0 0.932409 0.361405 0 0.94991 0.312525 0 0.94991 0.312525 0 0.932409 0.361405 0 0.932409 0.361405 0 0.932409 0.361405 0 0.932409 0.361405 0 0.932409 0.361405 0 0.945606 0.325315 0 0.94991 0.312525 0 0.94991 0.312525 0 0.964797 0.262997 0 0.964797 0.262997 0 0.94991 0.312525 0 0.94991 0.312525 0 0.94991 0.312525 0 0.94991 0.312525 0 0.964797 0.262998 0 0.964797 0.262998 0 0.977147 0.212563 0 0.977147 0.212563 0 0.964797 0.262998 0 0.964797 0.262998 0 0.964797 0.262998 0 0.973546 0.228489 0 0.964797 0.262998 0 0.977147 0.212563 0 0.986862 0.161565 0 0.993918 0.110121 0 0.993918 0.110121 0 0.977147 0.212563 0 0.977147 0.212563 0 0.977147 0.212563 0 0.977147 0.212563 0 0.986862 0.161565 0 0.986862 0.161565 0 0.977147 0.212563 0 0.977147 0.212563 0 0.986862 0.161565 0 0.986862 0.161565 0 0.986862 0.161565 0 0.986862 0.161565 0 0.993918 0.110121 0 0.993918 0.110121 0 0.993918 0.110121 0 0.998294 0.0583855 0 0.998294 0.0583855 0 0.998294 0.0583855 0.0248583 0.000305077 0.999691 0.0583855 0 0.998294 0.0583855 0.000338215 0.998294 0.0738353 -0.000291641 0.99727 0.110117 0.000322243 0.993919 0.122688 -0.000267314 0.992445 0.161561 0.000291379 0.986863 0.212573 0 0.977145 0.219244 0.00016843 0.97567 0.212573 -0.000246357 0.977145 0.171183 0.000227874 0.985239 0.161561 -0.00029138 0.986863 0.110117 0 0.993919 0.161561 0 0.986863 0.161561 0 0.986863 0.0248583 -0.000305077 0.999691 0.0162429 0 0.999868 0.0162429 0 0.999868 0.171183 -0.000227874 0.985239 0.212573 0.000246357 0.977145 0.262979 0 0.964802 0.266809 9.07525e-05 0.963749 0.262979 -0.000184321 0.964802 0.219244 -0.00016843 0.97567 0.262979 0.000184321 0.964802 0.312537 0 0.949905 0.313688 0 0.949526 0.312537 -9.49193e-05 0.949905 0.266809 -9.07524e-05 0.963749 0.312537 9.49192e-05 0.949905 0.361412 0 0.932406 0.405343 0.000139391 0.914165 0.361412 -0.000149699 0.932406 0.359893 0 0.932994 0.313688 0 0.949526 0.359892 0 0.932994 0.405343 -0.000139391 0.914165 0.449834 -0.000270214 0.893112 0.493214 -0.000407821 0.869908 0.524746 -0.000191724 0.851259 0.544878 0.000605991 0.838515 0.546081 0.000754401 0.837732 0.565879 -0.000208945 0.824488 0.585487 0.000490314 0.810682 0.605642 -0.000202975 0.795737 0.624635 0.000364413 0.780917 0.629991 0.00113549 0.776601 0.643905 -0.000161391 0.765105 0.662303 0.000231392 0.749236 0.680645 -7.32473e-05 0.732613 0.698332 7.94608e-05 0.715774 0.700784 0.000504569 0.713373 0.71623 0 0.697864 0.715687 9.64467e-05 0.698422 0.732689 -8.55107e-05 0.680563 0.750539 0 0.660826 0.76529 0.000276594 0.643686 0.748959 -0.000391349 0.662616 0.361412 0.000149699 0.932406 0.409486 0 0.912316 0.449834 0.000270214 0.893112 0.409486 -0.000285771 0.912316 0.409486 0.000285772 0.912316 0.45632 0 0.889816 0.493214 0.000407821 0.869908 0.45632 -0.000432464 0.889816 0.45632 0.000432466 0.889816 0.501867 0 0.864945 0.524747 0.000191723 0.851259 0.501867 -0.000587388 0.864945 0.501867 0.000587387 0.864945 0.546081 0 0.837732 0.565879 0.000208946 0.824488 0.546081 -0.000754402 0.837732 0.544878 -0.000605992 0.838515 0.588841 0.000935464 0.808248 0.629992 0 0.776602 0.643905 0.000161391 0.765105 0.629991 -0.00113549 0.776601 0.624635 -0.000364413 0.780917 0.605642 0.000202976 0.795737 0.585487 -0.000490315 0.810682 0.588841 -0.000935464 0.808248 0.588841 0 0.808249 0.546081 0 0.837732 0.669454 0.00135776 0.742853 0.697873 0 0.716221 0.698332 -7.94645e-05 0.715774 0.680645 7.32469e-05 0.732613 0.676108 -0.00050939 0.736803 0.669454 0 0.742853 0.750539 0 0.660826 0.76655 0 0.642185 0.782951 0 0.622084 0.797981 0 0.602683 0.810449 0 0.585809 0.810449 0 0.585809 0.820434 0 0.571741 0.824837 -0.000776788 0.56537 0.827391 0 0.561627 0.824837 0.000776788 0.56537 0.820434 0 0.571741 0.827391 0 0.561627 0.748959 0.000391351 0.662616 0.76529 -0.000276594 0.643686 0.780385 0.000905929 0.625299 0.796018 -0.000499591 0.605273 0.838235 0 0.545308 0.838235 0 0.545309 0.84823 0 0.529629 0.854711 0 0.519104 0.851672 0.00116283 0.524074 0.84823 0 0.529629 0.851672 -0.00116283 0.524074 0.854711 0 0.519104 0.863963 0 0.503556 0.873915 0 0.486078 0.879819 0 0.47531 0.876491 0.00178615 0.481415 0.873915 0 0.486078 0.876491 -0.00178614 0.481415 0.879819 0 0.47531 0.887641 0 0.460536 0.897325 0 0.441371 0.902701 0 0.430269 0.899371 0.00367857 0.437171 0.897325 0 0.441371 0.899371 -0.00367857 0.437171 0.908089 0.00611156 0.418733 0.909168 0 0.416429 0.907898 -8.01186e-05 0.419192 0.919113 0 0.393995 0.919113 0 0.393995 0.928504 0 0.371322 0.928504 0 0.371322 0.937343 0 0.348409 0.945606 0 0.325315 0.953309 0 0.301997 0.960428 0 0.278528 0.960428 0 0.278528 0.966977 0 0.254864 0.966977 0 0.254864 0.972941 0 0.231054 0.978316 0 0.207116 0.972941 0 0.231054 0.937343 0 0.348408 0.945606 0 0.325315 0.953309 0 0.301997 0.978316 0 0.207116 0.983106 0 0.183036 0.987304 0 0.158843 0.983106 0 0.183036 0.987304 0 0.158843 0.9909 0 0.134597 0.993906 0 0.110234 0.9909 0 0.134597 0.993906 0 0.110234 0.996313 0 0.0857961 0.998118 0 0.0613211 0.996313 0 0.0857961 0.998118 0 0.0613211 0.999323 0 0.0367899 0.999925 0 0.0122756 0.999323 0 0.0367899 0.999925 0 0.0122756 0.887641 0 0.460536 0.863963 0 0.503556 0.797981 0 0.602683 0.796018 0.000499591 0.605273 0.780385 -0.000905929 0.625299 0.782951 0 0.622084 0.76655 0 0.642185 0.732689 8.55107e-05 0.680563 0.715687 -9.64482e-05 0.698422 0.713382 -0.000504565 0.700775 0.660826 0 0.750539 0.629992 0 0.776602 0.662303 -0.000231395 0.749236 0.122688 0.000267314 0.992445 0.110117 -0.000322243 0.993919 0.0583855 0 0.998294 0.110117 0 0.993919 0.0738353 0.000291641 0.99727 0.0583855 -0.000338215 0.998294 0.212573 0 0.977145 0.262979 0 0.964802 0.312537 0 0.949905 0.361412 0 0.932406 0.409486 0 0.912316 0.45632 0 0.889816 0.501867 0 0.864945 0.588841 0 0.808249 0.733131 0 0.680087 0.733131 0 0.680087 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 1 0 0 1 0 1 0 0 1 0 0 0 0 1 0 0 1 0 -0.999333 -0.036516 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.999333 -0.036516 0 -0.999333 0.036516 0 -0.994001 0.109372 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.920346 0.391106 0 -0.889339 0.457248 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.719911 0.694066 0 -0.66731 0.74478 0 -0.611173 0.791497 0 -0.551787 0.833985 0 -0.489392 0.872064 0 -0.424457 0.905448 0 -0.357246 0.93401 0 -0.288102 0.9576 0 -0.217426 0.976077 0 -0.145608 0.989342 0 -0.0729821 0.997333 0 0 1 0 0.0729821 0.997333 0 0.145606 0.989343 0 0.217429 0.976076 0 0.288102 0.9576 0 0.357246 0.93401 0 0.424457 0.905448 0 0.489392 0.872064 0 0.551781 0.833989 0 0.611179 0.791492 0 0.667303 0.744786 0 0.719917 0.69406 0 0.768644 0.639676 0 0.813288 0.581862 0 0.853594 0.520939 0 0.889339 0.457248 0 0.920346 0.391106 0 0.946443 0.322872 0 0.967483 0.252938 0 0.983365 0.181638 0 0.994001 0.109372 0 0.999333 0.036516 0 0.999333 -0.036516 0 0.994001 -0.109372 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.920346 -0.391106 0 0.889337 -0.457253 0 0.853597 -0.520933 0 0.813288 -0.581862 0 0.768639 -0.639683 0 0.719923 -0.694053 0 0.667296 -0.744792 0 0.611187 -0.791486 0 0.551772 -0.833995 0 0.489401 -0.872059 0 0.424457 -0.905448 0 0.357246 -0.93401 0 0.288091 -0.957603 0 0.217441 -0.976074 0 0.145606 -0.989343 0 0.0729821 -0.997333 0 0 -1 0 -0.0729821 -0.997333 0 -0.145608 -0.989342 0 -0.217438 -0.976074 0 -0.288091 -0.957603 0 -0.357246 -0.93401 0 -0.424457 -0.905448 0 -0.489401 -0.872059 0 -0.551778 -0.833991 0 -0.611181 -0.791491 0 -0.667303 -0.744787 0 -0.719917 -0.69406 0 -0.768639 -0.639683 0 -0.813288 -0.581862 0 -0.853597 -0.520933 0 -0.889337 -0.457253 0 -0.920346 -0.391106 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.667303 -0.744787 0 -0.667303 -0.744787 0 -0.667303 -0.744787 0 -0.611181 -0.791491 0 -0.611181 -0.791491 0 -0.611181 -0.791491 0 -0.551778 -0.833991 0 -0.551778 -0.833991 0 -0.551778 -0.833991 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.217438 -0.976074 0 -0.217438 -0.976074 0 -0.217438 -0.976074 0 -0.145608 -0.989342 0 -0.145608 -0.989342 0 -0.145608 -0.989342 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.145606 -0.989343 0 0.145606 -0.989343 0 0.145606 -0.989343 0 0.217441 -0.976074 0 0.217441 -0.976074 0 0.217441 -0.976074 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.551772 -0.833995 0 0.551772 -0.833995 0 0.551772 -0.833995 0 0.611187 -0.791486 0 0.611187 -0.791486 0 0.611187 -0.791486 0 0.667296 -0.744792 0 0.667296 -0.744792 0 0.667296 -0.744792 0 0.719923 -0.694053 0 0.719923 -0.694053 0 0.719923 -0.694053 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.999333 -0.036516 0 0.999333 -0.036516 0 0.999333 -0.036516 0 0.999333 0.036516 0 0.999333 0.036516 0 0.999333 0.036516 0 0.994001 0.109372 0 0.994001 0.109372 0 0.994001 0.109372 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.920346 0.391106 0 0.920346 0.391106 0 0.920346 0.391106 0 0.889339 0.457248 0 0.889339 0.457248 0 0.889339 0.457248 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.719917 0.69406 0 0.719917 0.69406 0 0.719917 0.69406 0 0.667303 0.744786 0 0.667303 0.744786 0 0.667303 0.744786 0 0.611179 0.791492 0 0.611179 0.791492 0 0.611179 0.791492 0 0.551781 0.833989 0 0.551781 0.833989 0 0.551781 0.833989 0 0.489392 0.872064 0 0.489392 0.872064 0 0.489392 0.872064 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.357246 0.93401 0 0.357246 0.93401 0 0.357246 0.93401 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.217429 0.976076 0 0.217429 0.976076 0 0.217429 0.976076 0 0.145606 0.989343 0 0.145606 0.989343 0 0.145606 0.989343 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.145608 0.989342 0 -0.145608 0.989342 0 -0.145608 0.989342 0 -0.217426 0.976077 0 -0.217426 0.976077 0 -0.217426 0.976077 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.551787 0.833985 0 -0.551787 0.833985 0 -0.551787 0.833985 0 -0.611173 0.791497 0 -0.611173 0.791497 0 -0.611173 0.791497 0 -0.66731 0.74478 0 -0.66731 0.74478 0 -0.66731 0.74478 0 -0.719911 0.694066 0 -0.719911 0.694066 0 -0.719911 0.694066 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.999333 0.036516 0 -0.999333 0.036516 0 -0.999333 0.036516 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.999333 -0.036516 0 -0.999333 0.036516 0 -0.994001 0.109372 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.920346 0.391106 0 -0.889339 0.457248 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.719911 0.694066 0 -0.66731 0.74478 0 -0.611173 0.791497 0 -0.551787 0.833985 0 -0.489392 0.872064 0 -0.424457 0.905448 0 -0.357246 0.93401 0 -0.288102 0.9576 0 -0.217426 0.976077 0 -0.145608 0.989342 0 -0.0729821 0.997333 0 0 1 0 0.0729821 0.997333 0 0.145606 0.989343 0 0.217429 0.976076 0 0.288102 0.9576 0 0.357246 0.93401 0 0.424457 0.905448 0 0.489392 0.872064 0 0.551781 0.833989 0 0.611179 0.791492 0 0.667303 0.744786 0 0.719917 0.69406 0 0.768644 0.639676 0 0.813288 0.581862 0 0.853594 0.520939 0 0.889339 0.457248 0 0.920346 0.391106 0 0.946443 0.322872 0 0.967483 0.252938 0 0.983365 0.181638 0 0.994001 0.109372 0 0.999333 0.036516 0 0.999333 -0.036516 0 0.994001 -0.109372 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.920346 -0.391106 0 0.889337 -0.457253 0 0.853597 -0.520933 0 0.813288 -0.581862 0 0.768639 -0.639683 0 0.719923 -0.694053 0 0.667296 -0.744792 0 0.611187 -0.791486 0 0.551772 -0.833995 0 0.489401 -0.872059 0 0.424457 -0.905448 0 0.357246 -0.93401 0 0.288091 -0.957603 0 0.217441 -0.976074 0 0.145606 -0.989343 0 0.0729821 -0.997333 0 0 -1 0 -0.0729821 -0.997333 0 -0.145608 -0.989342 0 -0.217438 -0.976074 0 -0.288091 -0.957603 0 -0.357246 -0.93401 0 -0.424457 -0.905448 0 -0.489401 -0.872059 0 -0.551778 -0.833991 0 -0.611181 -0.791491 0 -0.667303 -0.744787 0 -0.719917 -0.69406 0 -0.768639 -0.639683 0 -0.813288 -0.581862 0 -0.853597 -0.520933 0 -0.889337 -0.457253 0 -0.920346 -0.391106 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.667303 -0.744787 0 -0.667303 -0.744787 0 -0.667303 -0.744787 0 -0.611181 -0.791491 0 -0.611181 -0.791491 0 -0.611181 -0.791491 0 -0.551778 -0.833991 0 -0.551778 -0.833991 0 -0.551778 -0.833991 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.217438 -0.976074 0 -0.217438 -0.976074 0 -0.217438 -0.976074 0 -0.145608 -0.989342 0 -0.145608 -0.989342 0 -0.145608 -0.989342 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.145606 -0.989343 0 0.145606 -0.989343 0 0.145606 -0.989343 0 0.217441 -0.976074 0 0.217441 -0.976074 0 0.217441 -0.976074 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.551772 -0.833995 0 0.551772 -0.833995 0 0.551772 -0.833995 0 0.611187 -0.791486 0 0.611187 -0.791486 0 0.611187 -0.791486 0 0.667296 -0.744792 0 0.667296 -0.744792 0 0.667296 -0.744792 0 0.719923 -0.694053 0 0.719923 -0.694053 0 0.719923 -0.694053 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.999333 -0.036516 0 0.999333 -0.036516 0 0.999333 -0.036516 0 0.999333 0.036516 0 0.999333 0.036516 0 0.999333 0.036516 0 0.994001 0.109372 0 0.994001 0.109372 0 0.994001 0.109372 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.920346 0.391106 0 0.920346 0.391106 0 0.920346 0.391106 0 0.889339 0.457248 0 0.889339 0.457248 0 0.889339 0.457248 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.719917 0.69406 0 0.719917 0.69406 0 0.719917 0.69406 0 0.667303 0.744786 0 0.667303 0.744786 0 0.667303 0.744786 0 0.611179 0.791492 0 0.611179 0.791492 0 0.611179 0.791492 0 0.551781 0.833989 0 0.551781 0.833989 0 0.551781 0.833989 0 0.489392 0.872064 0 0.489392 0.872064 0 0.489392 0.872064 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.357246 0.93401 0 0.357246 0.93401 0 0.357246 0.93401 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.217429 0.976076 0 0.217429 0.976076 0 0.217429 0.976076 0 0.145606 0.989343 0 0.145606 0.989343 0 0.145606 0.989343 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.145608 0.989342 0 -0.145608 0.989342 0 -0.145608 0.989342 0 -0.217426 0.976077 0 -0.217426 0.976077 0 -0.217426 0.976077 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.551787 0.833985 0 -0.551787 0.833985 0 -0.551787 0.833985 0 -0.611173 0.791497 0 -0.611173 0.791497 0 -0.611173 0.791497 0 -0.66731 0.74478 0 -0.66731 0.74478 0 -0.66731 0.74478 0 -0.719911 0.694066 0 -0.719911 0.694066 0 -0.719911 0.694066 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.999333 0.036516 0 -0.999333 0.036516 0 -0.999333 0.036516 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.999333 0.0365165 0 -0.994001 0.109371 0 -0.994001 0.109371 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.994001 0.109371 0 -0.994001 0.109371 0 -0.999333 0.0365165 0 -0.999333 -0.0365165 0 -0.994001 -0.109371 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.920344 -0.391111 0 -0.889342 -0.457243 0 -0.853594 -0.520939 0 -0.813288 -0.581862 0 -0.768644 -0.639676 0 -0.719917 -0.69406 0 -0.667311 -0.74478 0 -0.611171 -0.791498 0 -0.551763 -0.834001 0 -0.489421 -0.872048 0 -0.424457 -0.905448 0 -0.357235 -0.934015 0 -0.288102 -0.9576 0 -0.217429 -0.976076 0 -0.145606 -0.989343 0 -0.0729821 -0.997333 0 0 -1 0 0.0729821 -0.997333 0 0.145608 -0.989342 0 0.217426 -0.976077 0 0.288102 -0.9576 0 0.357235 -0.934015 0 0.424457 -0.905448 0 0.489421 -0.872048 0 0.551769 -0.833997 0 0.611165 -0.791503 0 0.667317 -0.744774 0 0.719911 -0.694066 0 0.768644 -0.639676 0 0.813288 -0.581862 0 0.853594 -0.520939 0 0.889342 -0.457243 0 0.920344 -0.391111 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.994001 -0.109371 0 0.999333 -0.0365165 0 0.999333 0.0365165 0 0.994001 0.109371 0 0.983365 0.181638 0 0.967483 0.252938 0 0.946443 0.322872 0 0.920344 0.391111 0 0.889342 0.457243 0 0.853594 0.520939 0 0.813288 0.581862 0 0.768644 0.639676 0 0.719911 0.694066 0 0.667317 0.744774 0 0.611165 0.791503 0 0.551769 0.833997 0 0.489421 0.872048 0 0.424457 0.905448 0 0.357235 0.934015 0 0.288102 0.9576 0 0.217426 0.976077 0 0.145608 0.989342 0 0.0729821 0.997333 0 0 1 0 -0.0729821 0.997333 0 -0.145606 0.989343 0 -0.217429 0.976076 0 -0.288102 0.9576 0 -0.357235 0.934015 0 -0.424457 0.905448 0 -0.489421 0.872048 0 -0.551763 0.834001 0 -0.611171 0.791498 0 -0.667311 0.74478 0 -0.719917 0.69406 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.889342 0.457243 0 -0.920344 0.391111 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.920344 0.391111 0 -0.920344 0.391111 0 -0.920344 0.391111 0 -0.889342 0.457243 0 -0.889342 0.457243 0 -0.889342 0.457243 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.66731 0.74478 0 -0.667311 0.74478 0 -0.66731 0.74478 0 -0.611171 0.791498 0 -0.611171 0.791498 0 -0.611171 0.791498 0 -0.551763 0.834001 0 -0.551763 0.834001 0 -0.551763 0.834001 0 -0.489421 0.872048 0 -0.489421 0.872048 0 -0.489421 0.872048 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.357235 0.934015 0 -0.357235 0.934015 0 -0.357235 0.934015 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.145608 0.989342 0 0.145608 0.989342 0 0.145608 0.989342 0 0.217426 0.976077 0 0.217426 0.976077 0 0.217426 0.976077 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.357235 0.934015 0 0.357235 0.934015 0 0.357235 0.934015 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.489421 0.872048 0 0.489421 0.872048 0 0.489421 0.872048 0 0.551769 0.833997 0 0.551769 0.833997 0 0.551769 0.833997 0 0.611165 0.791503 0 0.611165 0.791503 0 0.611165 0.791503 0 0.667317 0.744774 0 0.667317 0.744774 0 0.667317 0.744774 0 0.719911 0.694066 0 0.719911 0.694066 0 0.719911 0.694066 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.889342 0.457243 0 0.889342 0.457243 0 0.889342 0.457243 0 0.920344 0.391111 0 0.920344 0.391111 0 0.920344 0.391111 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.994001 0.109371 0 0.994001 0.109371 0 0.994001 0.109371 0 0.999333 0.0365165 0 0.999333 0.0365165 0 0.999333 0.0365165 0 0.999333 -0.0365165 0 0.999333 -0.0365165 0 0.999333 -0.0365165 0 0.994001 -0.109371 0 0.994001 -0.109371 0 0.994001 -0.109371 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.920344 -0.391111 0 0.920344 -0.391111 0 0.920344 -0.391111 0 0.889342 -0.457243 0 0.889342 -0.457243 0 0.889342 -0.457243 0 0.853594 -0.520939 0 0.853594 -0.520939 0 0.853594 -0.520939 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.768644 -0.639676 0 0.768644 -0.639676 0 0.768644 -0.639676 0 0.719911 -0.694066 0 0.719911 -0.694066 0 0.719911 -0.694066 0 0.667317 -0.744774 0 0.667317 -0.744774 0 0.667317 -0.744774 0 0.611165 -0.791503 0 0.611165 -0.791503 0 0.611165 -0.791503 0 0.551769 -0.833997 0 0.551769 -0.833997 0 0.551769 -0.833997 0 0.489421 -0.872048 0 0.489421 -0.872048 0 0.489421 -0.872048 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.357235 -0.934015 0 0.357235 -0.934015 0 0.357235 -0.934015 0 0.288102 -0.9576 0 0.288102 -0.9576 0 0.288102 -0.9576 0 0.217426 -0.976077 0 0.217426 -0.976077 0 0.217426 -0.976077 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.217429 -0.976076 0 -0.217429 -0.976076 0 -0.217429 -0.976076 0 -0.288102 -0.9576 0 -0.288102 -0.9576 0 -0.288102 -0.9576 0 -0.357235 -0.934015 0 -0.357235 -0.934015 0 -0.357235 -0.934015 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.489421 -0.872048 0 -0.489421 -0.872048 0 -0.489421 -0.872048 0 -0.551763 -0.834001 0 -0.551763 -0.834001 0 -0.551763 -0.834001 0 -0.611171 -0.791498 0 -0.611171 -0.791498 0 -0.611171 -0.791498 0 -0.66731 -0.74478 0 -0.667311 -0.74478 0 -0.66731 -0.74478 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.768644 -0.639676 0 -0.768644 -0.639676 0 -0.768644 -0.639676 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.853594 -0.520939 0 -0.853594 -0.520939 0 -0.853594 -0.520939 0 -0.889342 -0.457243 0 -0.889342 -0.457243 0 -0.889342 -0.457243 0 -0.920344 -0.391111 0 -0.920344 -0.391111 0 -0.920344 -0.391111 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109371 0 -0.994001 -0.109371 0 -0.994001 -0.109371 0 -0.999333 -0.0365165 0 -0.999333 -0.0365165 0 -0.999333 -0.0365165 0 -0.999333 0.0365165 0 -0.999333 0.0365165 0 0.999333 -0.0365165 0 0.994001 -0.109371 0 0.994001 -0.109371 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.994001 -0.109371 0 0.994001 -0.109371 0 0.999333 -0.0365165 0 0.999333 0.0365165 0 0.994001 0.109371 0 0.983365 0.181638 0 0.967483 0.252938 0 0.946443 0.322872 0 0.920344 0.391111 0 0.889342 0.457243 0 0.853594 0.520939 0 0.813288 0.581862 0 0.768644 0.639676 0 0.719911 0.694066 0 0.667317 0.744774 0 0.611165 0.791503 0 0.551769 0.833997 0 0.489421 0.872048 0 0.424457 0.905448 0 0.357235 0.934015 0 0.288102 0.9576 0 0.217426 0.976077 0 0.145608 0.989342 0 0.0729821 0.997333 0 0 1 0 -0.0729821 0.997333 0 -0.145606 0.989343 0 -0.217429 0.976076 0 -0.288102 0.9576 0 -0.357235 0.934015 0 -0.424457 0.905448 0 -0.489421 0.872048 0 -0.551763 0.834001 0 -0.611171 0.791498 0 -0.667311 0.74478 0 -0.719917 0.69406 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.889342 0.457243 0 -0.920344 0.391111 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.994001 0.109371 0 -0.999333 0.0365165 0 -0.999333 -0.0365165 0 -0.994001 -0.109371 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.920344 -0.391111 0 -0.889342 -0.457243 0 -0.853594 -0.520939 0 -0.813288 -0.581862 0 -0.768644 -0.639676 0 -0.719917 -0.69406 0 -0.667311 -0.74478 0 -0.611171 -0.791498 0 -0.551763 -0.834001 0 -0.489421 -0.872048 0 -0.424457 -0.905448 0 -0.357235 -0.934015 0 -0.288102 -0.9576 0 -0.217429 -0.976076 0 -0.145606 -0.989343 0 -0.0729821 -0.997333 0 0 -1 0 0.0729821 -0.997333 0 0.145608 -0.989342 0 0.217426 -0.976077 0 0.288102 -0.9576 0 0.357235 -0.934015 0 0.424457 -0.905448 0 0.489421 -0.872048 0 0.551769 -0.833997 0 0.611165 -0.791503 0 0.667317 -0.744774 0 0.719911 -0.694066 0 0.768644 -0.639676 0 0.813288 -0.581862 0 0.853594 -0.520939 0 0.889342 -0.457243 0 0.920344 -0.391111 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.920344 -0.391111 0 0.920344 -0.391111 0 0.920344 -0.391111 0 0.889342 -0.457243 0 0.889342 -0.457243 0 0.889342 -0.457243 0 0.853594 -0.520939 0 0.853594 -0.520939 0 0.853594 -0.520939 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.768644 -0.639676 0 0.768644 -0.639676 0 0.768644 -0.639676 0 0.719911 -0.694066 0 0.719911 -0.694066 0 0.719911 -0.694066 0 0.667317 -0.744774 0 0.667317 -0.744774 0 0.667317 -0.744774 0 0.611165 -0.791503 0 0.611165 -0.791503 0 0.611165 -0.791503 0 0.551769 -0.833997 0 0.551769 -0.833997 0 0.551769 -0.833997 0 0.489421 -0.872048 0 0.489421 -0.872048 0 0.489421 -0.872048 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.357235 -0.934015 0 0.357235 -0.934015 0 0.357235 -0.934015 0 0.288102 -0.9576 0 0.288102 -0.9576 0 0.288102 -0.9576 0 0.217426 -0.976077 0 0.217426 -0.976077 0 0.217426 -0.976077 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.217429 -0.976076 0 -0.217429 -0.976076 0 -0.217429 -0.976076 0 -0.288102 -0.9576 0 -0.288102 -0.9576 0 -0.288102 -0.9576 0 -0.357235 -0.934015 0 -0.357235 -0.934015 0 -0.357235 -0.934015 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.489421 -0.872048 0 -0.489421 -0.872048 0 -0.489421 -0.872048 0 -0.551763 -0.834001 0 -0.551763 -0.834001 0 -0.551763 -0.834001 0 -0.611171 -0.791498 0 -0.611171 -0.791498 0 -0.611171 -0.791498 0 -0.667311 -0.74478 0 -0.667311 -0.74478 0 -0.667311 -0.74478 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.719917 -0.69406 0 -0.768644 -0.639676 0 -0.768644 -0.639676 0 -0.768644 -0.639676 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.853594 -0.520939 0 -0.853594 -0.520939 0 -0.853594 -0.520939 0 -0.889342 -0.457243 0 -0.889342 -0.457243 0 -0.889342 -0.457243 0 -0.920344 -0.391111 0 -0.920344 -0.391111 0 -0.920344 -0.391111 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109371 0 -0.994001 -0.109371 0 -0.994001 -0.109371 0 -0.999333 -0.0365165 0 -0.999333 -0.0365165 0 -0.999333 -0.0365165 0 -0.999333 0.0365165 0 -0.999333 0.0365165 0 -0.999333 0.0365165 0 -0.994001 0.109371 0 -0.994001 0.109371 0 -0.994001 0.109371 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.920344 0.391111 0 -0.920344 0.391111 0 -0.920344 0.391111 0 -0.889342 0.457243 0 -0.889342 0.457243 0 -0.889342 0.457243 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.667311 0.74478 0 -0.667311 0.74478 0 -0.667311 0.74478 0 -0.611171 0.791498 0 -0.611171 0.791498 0 -0.611171 0.791498 0 -0.551763 0.834001 0 -0.551763 0.834001 0 -0.551763 0.834001 0 -0.489421 0.872048 0 -0.489421 0.872048 0 -0.489421 0.872048 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.357235 0.934015 0 -0.357235 0.934015 0 -0.357235 0.934015 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.145608 0.989342 0 0.145608 0.989342 0 0.145608 0.989342 0 0.217426 0.976077 0 0.217426 0.976077 0 0.217426 0.976077 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.357235 0.934015 0 0.357235 0.934015 0 0.357235 0.934015 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.489421 0.872048 0 0.489421 0.872048 0 0.489421 0.872048 0 0.551769 0.833997 0 0.551769 0.833997 0 0.551769 0.833997 0 0.611165 0.791503 0 0.611165 0.791503 0 0.611165 0.791503 0 0.667317 0.744774 0 0.667317 0.744774 0 0.667317 0.744774 0 0.719911 0.694066 0 0.719911 0.694066 0 0.719911 0.694066 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.889342 0.457243 0 0.889342 0.457243 0 0.889342 0.457243 0 0.920344 0.391111 0 0.920344 0.391111 0 0.920344 0.391111 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.994001 0.109371 0 0.994001 0.109371 0 0.994001 0.109371 0 0.999333 0.0365165 0 0.999333 0.0365165 0 0.999333 0.0365165 0 0.999333 -0.0365165 0 0.999333 -0.0365165 0 -0.999333 0.0365165 0 -0.994001 0.109371 0 -0.994001 0.109371 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.994001 0.109371 0 -0.994001 0.109371 0 -0.999333 0.0365165 0 -0.999333 -0.0365165 0 -0.994001 -0.109371 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.920344 -0.391111 0 -0.889342 -0.457243 0 -0.853594 -0.520939 0 -0.813288 -0.581862 0 -0.768644 -0.639676 0 -0.719911 -0.694066 0 -0.667317 -0.744774 0 -0.611165 -0.791503 0 -0.551769 -0.833997 0 -0.489421 -0.872048 0 -0.424457 -0.905448 0 -0.357235 -0.934015 0 -0.288102 -0.9576 0 -0.217426 -0.976077 0 -0.145608 -0.989342 0 -0.0729821 -0.997333 0 0 -1 0 0.0729821 -0.997333 0 0.145606 -0.989343 0 0.217429 -0.976076 0 0.288102 -0.9576 0 0.357235 -0.934015 0 0.424457 -0.905448 0 0.489421 -0.872048 0 0.551763 -0.834001 0 0.611171 -0.791498 0 0.667311 -0.74478 0 0.719917 -0.69406 0 0.768644 -0.639676 0 0.813288 -0.581862 0 0.853594 -0.520939 0 0.889342 -0.457243 0 0.920344 -0.391111 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.994001 -0.109371 0 0.999333 -0.0365165 0 0.999333 0.0365165 0 0.994001 0.109371 0 0.983365 0.181638 0 0.967483 0.252938 0 0.946443 0.322872 0 0.920344 0.391111 0 0.889342 0.457243 0 0.853594 0.520939 0 0.813288 0.581862 0 0.768644 0.639676 0 0.719917 0.69406 0 0.667311 0.74478 0 0.611171 0.791498 0 0.551763 0.834001 0 0.489421 0.872048 0 0.424457 0.905448 0 0.357235 0.934015 0 0.288102 0.9576 0 0.217429 0.976076 0 0.145606 0.989343 0 0.0729821 0.997333 0 0 1 0 -0.0729821 0.997333 0 -0.145608 0.989342 0 -0.217426 0.976077 0 -0.288102 0.9576 0 -0.357235 0.934015 0 -0.424457 0.905448 0 -0.489421 0.872048 0 -0.551769 0.833997 0 -0.611165 0.791503 0 -0.667317 0.744774 0 -0.719911 0.694066 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.889342 0.457243 0 -0.920344 0.391111 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.920344 0.391111 0 -0.920344 0.391111 0 -0.920344 0.391111 0 -0.889342 0.457243 0 -0.889342 0.457243 0 -0.889342 0.457243 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.719911 0.694066 0 -0.719911 0.694066 0 -0.719911 0.694066 0 -0.667317 0.744774 0 -0.667317 0.744774 0 -0.667317 0.744774 0 -0.611165 0.791503 0 -0.611165 0.791503 0 -0.611165 0.791503 0 -0.551769 0.833997 0 -0.551769 0.833997 0 -0.551769 0.833997 0 -0.489421 0.872048 0 -0.489421 0.872048 0 -0.489421 0.872048 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.357235 0.934015 0 -0.357235 0.934015 0 -0.357235 0.934015 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.217426 0.976077 0 -0.217426 0.976077 0 -0.217426 0.976077 0 -0.145608 0.989342 0 -0.145608 0.989342 0 -0.145608 0.989342 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.145606 0.989343 0 0.145606 0.989343 0 0.145606 0.989343 0 0.217429 0.976076 0 0.217429 0.976076 0 0.217429 0.976076 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.357235 0.934015 0 0.357235 0.934015 0 0.357235 0.934015 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.489421 0.872048 0 0.489421 0.872048 0 0.489421 0.872048 0 0.551763 0.834001 0 0.551763 0.834001 0 0.551763 0.834001 0 0.611171 0.791498 0 0.611171 0.791498 0 0.611171 0.791498 0 0.667311 0.74478 0 0.667311 0.74478 0 0.667311 0.74478 0 0.719917 0.69406 0 0.719917 0.69406 0 0.719917 0.69406 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.889342 0.457243 0 0.889342 0.457243 0 0.889342 0.457243 0 0.920344 0.391111 0 0.920344 0.391111 0 0.920344 0.391111 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.994001 0.109371 0 0.994001 0.109371 0 0.994001 0.109371 0 0.999333 0.0365165 0 0.999333 0.0365165 0 0.999333 0.0365165 0 0.999333 -0.0365165 0 0.999333 -0.0365165 0 0.999333 -0.0365165 0 0.994001 -0.109371 0 0.994001 -0.109371 0 0.994001 -0.109371 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.920344 -0.391111 0 0.920344 -0.391111 0 0.920344 -0.391111 0 0.889342 -0.457243 0 0.889342 -0.457243 0 0.889342 -0.457243 0 0.853594 -0.520939 0 0.853594 -0.520939 0 0.853594 -0.520939 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.768644 -0.639676 0 0.768644 -0.639676 0 0.768644 -0.639676 0 0.719917 -0.69406 0 0.719917 -0.69406 0 0.719917 -0.69406 0 0.667311 -0.74478 0 0.667311 -0.74478 0 0.667311 -0.74478 0 0.611171 -0.791498 0 0.611171 -0.791498 0 0.611171 -0.791498 0 0.551763 -0.834001 0 0.551763 -0.834001 0 0.551763 -0.834001 0 0.489421 -0.872048 0 0.489421 -0.872048 0 0.489421 -0.872048 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.357235 -0.934015 0 0.357235 -0.934015 0 0.357235 -0.934015 0 0.288102 -0.9576 0 0.288102 -0.9576 0 0.288102 -0.9576 0 0.217429 -0.976076 0 0.217429 -0.976076 0 0.217429 -0.976076 0 0.145606 -0.989343 0 0.145606 -0.989343 0 0.145606 -0.989343 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.145608 -0.989342 0 -0.145608 -0.989342 0 -0.145608 -0.989342 0 -0.217426 -0.976077 0 -0.217426 -0.976077 0 -0.217426 -0.976077 0 -0.288102 -0.9576 0 -0.288102 -0.9576 0 -0.288102 -0.9576 0 -0.357235 -0.934015 0 -0.357235 -0.934015 0 -0.357235 -0.934015 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.489421 -0.872048 0 -0.489421 -0.872048 0 -0.489421 -0.872048 0 -0.551769 -0.833997 0 -0.551769 -0.833997 0 -0.551769 -0.833997 0 -0.611165 -0.791503 0 -0.611165 -0.791503 0 -0.611165 -0.791503 0 -0.667317 -0.744774 0 -0.667317 -0.744774 0 -0.667317 -0.744774 0 -0.719911 -0.694066 0 -0.719911 -0.694066 0 -0.719911 -0.694066 0 -0.768644 -0.639676 0 -0.768644 -0.639676 0 -0.768644 -0.639676 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.853594 -0.520939 0 -0.853594 -0.520939 0 -0.853594 -0.520939 0 -0.889342 -0.457243 0 -0.889342 -0.457243 0 -0.889342 -0.457243 0 -0.920344 -0.391111 0 -0.920344 -0.391111 0 -0.920344 -0.391111 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109371 0 -0.994001 -0.109371 0 -0.994001 -0.109371 0 -0.999333 -0.0365165 0 -0.999333 -0.0365165 0 -0.999333 -0.0365165 0 -0.999333 0.0365165 0 -0.999333 0.0365165 0 -0.999333 0.036516 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.999333 0.036516 0 -0.999333 -0.036516 0 -0.994001 -0.109372 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.920346 -0.391106 0 -0.889337 -0.457253 0 -0.853597 -0.520933 0 -0.813288 -0.581862 0 -0.768639 -0.639683 0 -0.719923 -0.694053 0 -0.667296 -0.744792 0 -0.611187 -0.791486 0 -0.551772 -0.833995 0 -0.489401 -0.872059 0 -0.424457 -0.905448 0 -0.357246 -0.93401 0 -0.288091 -0.957603 0 -0.217441 -0.976074 0 -0.145606 -0.989343 0 -0.0729821 -0.997333 0 0 -1 0 0.0729821 -0.997333 0 0.145608 -0.989342 0 0.217438 -0.976074 0 0.288091 -0.957603 0 0.357246 -0.93401 0 0.424457 -0.905448 0 0.489401 -0.872059 0 0.551778 -0.833991 0 0.611181 -0.791491 0 0.667303 -0.744787 0 0.719917 -0.69406 0 0.768639 -0.639683 0 0.813288 -0.581862 0 0.853597 -0.520933 0 0.889337 -0.457253 0 0.920346 -0.391106 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.994001 -0.109372 0 0.999333 -0.036516 0 0.999333 0.036516 0 0.994001 0.109372 0 0.983365 0.181638 0 0.967483 0.252938 0 0.946443 0.322872 0 0.920346 0.391106 0 0.889339 0.457248 0 0.853594 0.520939 0 0.813288 0.581862 0 0.768644 0.639676 0 0.719911 0.694066 0 0.66731 0.74478 0 0.611173 0.791497 0 0.551787 0.833985 0 0.489392 0.872064 0 0.424457 0.905448 0 0.357246 0.93401 0 0.288102 0.9576 0 0.217426 0.976077 0 0.145608 0.989342 0 0.0729821 0.997333 0 0 1 0 -0.0729821 0.997333 0 -0.145606 0.989343 0 -0.217429 0.976076 0 -0.288102 0.9576 0 -0.357246 0.93401 0 -0.424457 0.905448 0 -0.489392 0.872064 0 -0.551781 0.833989 0 -0.611179 0.791492 0 -0.667303 0.744786 0 -0.719917 0.69406 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.889339 0.457248 0 -0.920346 0.391106 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.667303 0.744786 0 -0.667303 0.744786 0 -0.667303 0.744786 0 -0.611179 0.791492 0 -0.611179 0.791492 0 -0.611179 0.791492 0 -0.551781 0.833989 0 -0.551781 0.833989 0 -0.551781 0.833989 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.145608 0.989342 0 0.145608 0.989342 0 0.145608 0.989342 0 0.217426 0.976077 0 0.217426 0.976077 0 0.217426 0.976077 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.357246 0.93401 0 0.357246 0.93401 0 0.357246 0.93401 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.489392 0.872064 0 0.489392 0.872064 0 0.489392 0.872064 0 0.551787 0.833985 0 0.551787 0.833985 0 0.551787 0.833985 0 0.611173 0.791497 0 0.611173 0.791497 0 0.611173 0.791497 0 0.66731 0.74478 0 0.66731 0.74478 0 0.66731 0.74478 0 0.719911 0.694066 0 0.719911 0.694066 0 0.719911 0.694066 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.889339 0.457248 0 0.889339 0.457248 0 0.889339 0.457248 0 0.920346 0.391106 0 0.920346 0.391106 0 0.920346 0.391106 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.994001 0.109372 0 0.994001 0.109372 0 0.994001 0.109372 0 0.999333 0.036516 0 0.999333 0.036516 0 0.999333 0.036516 0 0.999333 -0.036516 0 0.999333 -0.036516 0 0.999333 -0.036516 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.719917 -0.69406 0 0.719917 -0.69406 0 0.719917 -0.69406 0 0.667303 -0.744787 0 0.667303 -0.744787 0 0.667303 -0.744787 0 0.611181 -0.791491 0 0.611181 -0.791491 0 0.611181 -0.791491 0 0.551778 -0.833991 0 0.551778 -0.833991 0 0.551778 -0.833991 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.217438 -0.976074 0 0.217438 -0.976074 0 0.217438 -0.976074 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.217441 -0.976074 0 -0.217441 -0.976074 0 -0.217441 -0.976074 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.551772 -0.833995 0 -0.551772 -0.833995 0 -0.551772 -0.833995 0 -0.611187 -0.791486 0 -0.611187 -0.791486 0 -0.611187 -0.791486 0 -0.667296 -0.744792 0 -0.667296 -0.744792 0 -0.667296 -0.744792 0 -0.719923 -0.694053 0 -0.719923 -0.694053 0 -0.719923 -0.694053 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.999333 0.036516 0 -0.999333 0.036516 0 0.999333 -0.036516 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.983365 -0.181638 0 0.994001 -0.109372 0 0.994001 -0.109372 0 0.999333 -0.036516 0 0.999333 0.036516 0 0.994001 0.109372 0 0.983365 0.181638 0 0.967483 0.252938 0 0.946443 0.322872 0 0.920346 0.391106 0 0.889339 0.457248 0 0.853594 0.520939 0 0.813288 0.581862 0 0.768644 0.639676 0 0.719911 0.694066 0 0.66731 0.74478 0 0.611173 0.791497 0 0.551787 0.833985 0 0.489392 0.872064 0 0.424457 0.905448 0 0.357246 0.93401 0 0.288102 0.9576 0 0.217426 0.976077 0 0.145608 0.989342 0 0.0729821 0.997333 0 0 1 0 -0.0729821 0.997333 0 -0.145606 0.989343 0 -0.217429 0.976076 0 -0.288102 0.9576 0 -0.357246 0.93401 0 -0.424457 0.905448 0 -0.489392 0.872064 0 -0.551781 0.833989 0 -0.611179 0.791492 0 -0.667303 0.744786 0 -0.719917 0.69406 0 -0.768644 0.639676 0 -0.813288 0.581862 0 -0.853594 0.520939 0 -0.889339 0.457248 0 -0.920346 0.391106 0 -0.946443 0.322872 0 -0.967483 0.252938 0 -0.983365 0.181638 0 -0.994001 0.109372 0 -0.999333 0.036516 0 -0.999333 -0.036516 0 -0.994001 -0.109372 0 -0.983365 -0.181638 0 -0.967483 -0.252938 0 -0.946443 -0.322872 0 -0.920346 -0.391106 0 -0.889337 -0.457253 0 -0.853597 -0.520933 0 -0.813288 -0.581862 0 -0.768639 -0.639683 0 -0.719923 -0.694053 0 -0.667296 -0.744792 0 -0.611187 -0.791486 0 -0.551772 -0.833995 0 -0.489401 -0.872059 0 -0.424457 -0.905448 0 -0.357246 -0.93401 0 -0.288091 -0.957603 0 -0.217441 -0.976074 0 -0.145606 -0.989343 0 -0.0729821 -0.997333 0 0 -1 0 0.0729821 -0.997333 0 0.145608 -0.989342 0 0.217438 -0.976074 0 0.288091 -0.957603 0 0.357246 -0.93401 0 0.424457 -0.905448 0 0.489401 -0.872059 0 0.551778 -0.833991 0 0.611181 -0.791491 0 0.667303 -0.744787 0 0.719917 -0.69406 0 0.768639 -0.639683 0 0.813288 -0.581862 0 0.853597 -0.520933 0 0.889337 -0.457253 0 0.920346 -0.391106 0 0.946443 -0.322872 0 0.967483 -0.252938 0 0.967483 -0.252938 0 0.983365 -0.181638 0 0.967483 -0.252938 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.946443 -0.322872 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.920346 -0.391106 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.889337 -0.457253 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.853597 -0.520933 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.813288 -0.581862 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.768639 -0.639683 0 0.719917 -0.69406 0 0.719917 -0.69406 0 0.719917 -0.69406 0 0.667303 -0.744787 0 0.667303 -0.744787 0 0.667303 -0.744787 0 0.611181 -0.791491 0 0.611181 -0.791491 0 0.611181 -0.791491 0 0.551778 -0.833991 0 0.551778 -0.833991 0 0.551778 -0.833991 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.489401 -0.872059 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.424457 -0.905448 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.357246 -0.93401 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.288091 -0.957603 0 0.217438 -0.976074 0 0.217438 -0.976074 0 0.217438 -0.976074 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.145608 -0.989342 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0.0729821 -0.997333 0 0 -1 0 0 -1 0 0 -1 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.0729821 -0.997333 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.145606 -0.989343 0 -0.217441 -0.976074 0 -0.217441 -0.976074 0 -0.217441 -0.976074 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.288091 -0.957603 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.357246 -0.93401 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.424457 -0.905448 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.489401 -0.872059 0 -0.551772 -0.833995 0 -0.551772 -0.833995 0 -0.551772 -0.833995 0 -0.611187 -0.791486 0 -0.611187 -0.791486 0 -0.611187 -0.791486 0 -0.667296 -0.744792 0 -0.667296 -0.744792 0 -0.667296 -0.744792 0 -0.719923 -0.694053 0 -0.719923 -0.694053 0 -0.719923 -0.694053 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.768639 -0.639683 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.813288 -0.581862 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.853597 -0.520933 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.889337 -0.457253 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.920346 -0.391106 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.946443 -0.322872 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.967483 -0.252938 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.983365 -0.181638 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.994001 -0.109372 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.999333 -0.036516 0 -0.999333 0.036516 0 -0.999333 0.036516 0 -0.999333 0.036516 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.994001 0.109372 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.983365 0.181638 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.967483 0.252938 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.946443 0.322872 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.920346 0.391106 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.889339 0.457248 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.853594 0.520939 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.813288 0.581862 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.768644 0.639676 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.719917 0.69406 0 -0.667303 0.744786 0 -0.667303 0.744786 0 -0.667303 0.744786 0 -0.611179 0.791492 0 -0.611179 0.791492 0 -0.611179 0.791492 0 -0.551781 0.833989 0 -0.551781 0.833989 0 -0.551781 0.833989 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.489392 0.872064 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.424457 0.905448 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.357246 0.93401 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.288102 0.9576 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.217429 0.976076 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.145606 0.989343 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 -0.0729821 0.997333 0 0 1 0 0 1 0 0 1 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.0729821 0.997333 0 0.145608 0.989342 0 0.145608 0.989342 0 0.145608 0.989342 0 0.217426 0.976077 0 0.217426 0.976077 0 0.217426 0.976077 0 0.288102 0.9576 0 0.288102 0.9576 0 0.288102 0.9576 0 0.357246 0.93401 0 0.357246 0.93401 0 0.357246 0.93401 0 0.424457 0.905448 0 0.424457 0.905448 0 0.424457 0.905448 0 0.489392 0.872064 0 0.489392 0.872064 0 0.489392 0.872064 0 0.551787 0.833985 0 0.551787 0.833985 0 0.551787 0.833985 0 0.611173 0.791497 0 0.611173 0.791497 0 0.611173 0.791497 0 0.66731 0.74478 0 0.66731 0.74478 0 0.66731 0.74478 0 0.719911 0.694066 0 0.719911 0.694066 0 0.719911 0.694066 0 0.768644 0.639676 0 0.768644 0.639676 0 0.768644 0.639676 0 0.813288 0.581862 0 0.813288 0.581862 0 0.813288 0.581862 0 0.853594 0.520939 0 0.853594 0.520939 0 0.853594 0.520939 0 0.889339 0.457248 0 0.889339 0.457248 0 0.889339 0.457248 0 0.920346 0.391106 0 0.920346 0.391106 0 0.920346 0.391106 0 0.946443 0.322872 0 0.946443 0.322872 0 0.946443 0.322872 0 0.967483 0.252938 0 0.967483 0.252938 0 0.967483 0.252938 0 0.983365 0.181638 0 0.983365 0.181638 0 0.983365 0.181638 0 0.994001 0.109372 0 0.994001 0.109372 0 0.994001 0.109372 0 0.999333 0.036516 0 0.999333 0.036516 0 0.999333 0.036516 0 0.999333 -0.036516 0 0.999333 -0.036516 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 -1 0 -4.5155e-08 -1 -6.77326e-08 -1.85437e-07 -1 -4.96705e-08 -1.0435e-07 -1 -6.26099e-08 0 -1 0 0 -1 -6.20882e-08 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1.85437e-07 1 4.96705e-08 0 1 0 0 1 0 0 1 6.20882e-08 0 1 6.20882e-08 0 1 0 2.03056e-07 1 4.6859e-08 3.31137e-07 1 1.24176e-07 -1.54505e-07 1 8.58362e-08 -4.09372e-08 1 7.16402e-08 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 -0.863779 0.503871 0 -0.863779 0.503871 0 0.863779 0.503871 0 0.863779 0.503871 0 0.863779 0.503871 0 0.863779 0.503871 0.999333 0 0.0365165 0.994001 0 0.109371 0.994001 0 0.109371 0.983365 0 0.181638 0.967483 0 0.252938 0.983365 0 0.181638 0.983365 0 0.181638 0.994001 0 0.109371 0.994001 0 0.109371 0.999333 0 0.0365165 0.999333 0 -0.0365165 0.994001 0 -0.109371 0.983365 0 -0.181638 0.967483 0 -0.252938 0.946443 0 -0.322872 0.920344 0 -0.391111 0.889342 0 -0.457243 0.853594 0 -0.520939 0.813288 0 -0.581862 0.768644 0 -0.639676 0.719911 0 -0.694066 0.667317 0 -0.744774 0.611165 0 -0.791503 0.551769 0 -0.833997 0.489421 0 -0.872048 0.424457 0 -0.905448 0.357235 0 -0.934015 0.288102 0 -0.9576 0.217426 0 -0.976077 0.145608 0 -0.989342 0.0729821 0 -0.997333 0 0 -1 -0.0729821 0 -0.997333 -0.145606 0 -0.989343 -0.217429 0 -0.976076 -0.288102 0 -0.9576 -0.357235 0 -0.934015 -0.424457 0 -0.905448 -0.489421 0 -0.872048 -0.551763 0 -0.834001 -0.611171 0 -0.791498 -0.667311 0 -0.74478 -0.719917 0 -0.69406 -0.768644 0 -0.639676 -0.813288 0 -0.581862 -0.853594 0 -0.520939 -0.889342 0 -0.457243 -0.920344 0 -0.391111 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.994001 0 -0.109371 -0.999333 0 -0.0365165 -0.999333 0 0.0365165 -0.994001 0 0.109371 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.920344 0 0.391111 -0.889342 0 0.457243 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.719917 0 0.69406 -0.667311 0 0.74478 -0.611171 0 0.791498 -0.551763 0 0.834001 -0.489421 0 0.872048 -0.424457 0 0.905448 -0.357235 0 0.934015 -0.288102 0 0.9576 -0.217429 0 0.976076 -0.145606 0 0.989343 -0.0729821 0 0.997333 0 0 1 0.0729821 0 0.997333 0.145608 0 0.989342 0.217426 0 0.976077 0.288102 0 0.9576 0.357235 0 0.934015 0.424457 0 0.905448 0.489421 0 0.872048 0.551769 0 0.833997 0.611165 0 0.791503 0.667317 0 0.744774 0.719911 0 0.694066 0.768644 0 0.639676 0.813288 0 0.581862 0.853594 0 0.520939 0.889342 0 0.457243 0.920344 0 0.391111 0.946443 0 0.322872 0.967483 0 0.252938 0.967483 0 0.252938 0.983365 0 0.181638 0.967483 0 0.252938 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.920344 0 0.391111 0.920344 0 0.391111 0.920344 0 0.391111 0.889342 0 0.457243 0.889342 0 0.457243 0.889342 0 0.457243 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.719911 0 0.694066 0.719911 0 0.694066 0.719911 0 0.694066 0.667317 0 0.744774 0.667317 0 0.744774 0.667317 0 0.744774 0.611165 0 0.791503 0.611165 0 0.791503 0.611165 0 0.791503 0.551769 0 0.833997 0.551769 0 0.833997 0.551769 0 0.833997 0.489421 0 0.872048 0.489421 0 0.872048 0.489421 0 0.872048 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.357235 0 0.934015 0.357235 0 0.934015 0.357235 0 0.934015 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.217426 0 0.976077 0.217426 0 0.976077 0.217426 0 0.976077 0.145608 0 0.989342 0.145608 0 0.989342 0.145608 0 0.989342 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.551763 0 0.834001 -0.551763 0 0.834001 -0.551763 0 0.834001 -0.611171 0 0.791498 -0.611171 0 0.791498 -0.611171 0 0.791498 -0.667311 0 0.74478 -0.667311 0 0.74478 -0.667311 0 0.74478 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.999333 0 0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.0365165 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.667311 0 -0.74478 -0.667311 0 -0.74478 -0.667311 0 -0.74478 -0.611171 0 -0.791498 -0.611171 0 -0.791498 -0.611171 0 -0.791498 -0.551763 0 -0.834001 -0.551763 0 -0.834001 -0.551763 0 -0.834001 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.217429 0 -0.976076 -0.217429 0 -0.976076 -0.217429 0 -0.976076 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.145608 0 -0.989342 0.145608 0 -0.989342 0.145608 0 -0.989342 0.217426 0 -0.976077 0.217426 0 -0.976077 0.217426 0 -0.976077 0.288102 0 -0.9576 0.288102 0 -0.9576 0.288102 0 -0.9576 0.357235 0 -0.934015 0.357235 0 -0.934015 0.357235 0 -0.934015 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.489421 0 -0.872048 0.489421 0 -0.872048 0.489421 0 -0.872048 0.551769 0 -0.833997 0.551769 0 -0.833997 0.551769 0 -0.833997 0.611165 0 -0.791503 0.611165 0 -0.791503 0.611165 0 -0.791503 0.667317 0 -0.744774 0.667317 0 -0.744774 0.667317 0 -0.744774 0.719911 0 -0.694066 0.719911 0 -0.694066 0.719911 0 -0.694066 0.768644 0 -0.639676 0.768644 0 -0.639676 0.768644 0 -0.639676 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.853594 0 -0.520939 0.853594 0 -0.520939 0.853594 0 -0.520939 0.889342 0 -0.457243 0.889342 0 -0.457243 0.889342 0 -0.457243 0.920344 0 -0.391111 0.920344 0 -0.391111 0.920344 0 -0.391111 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.994001 0 -0.109371 0.994001 0 -0.109371 0.994001 0 -0.109371 0.999333 0 -0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.0365165 0.999333 0 0.0365165 0.999333 0 0.0365165 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0.863779 0 0.503871 0.863779 0 0.503871 -0.999333 0 -0.0365165 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.999333 0 -0.0365165 -0.999333 0 0.0365165 -0.994001 0 0.109371 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.920344 0 0.391111 -0.889342 0 0.457243 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.719911 0 0.694066 -0.667317 0 0.744774 -0.611165 0 0.791503 -0.551769 0 0.833997 -0.489421 0 0.872048 -0.424457 0 0.905448 -0.357235 0 0.934015 -0.288102 0 0.9576 -0.217426 0 0.976077 -0.145608 0 0.989342 -0.0729821 0 0.997333 0 0 1 0.0729821 0 0.997333 0.145606 0 0.989343 0.217429 0 0.976076 0.288102 0 0.9576 0.357235 0 0.934015 0.424457 0 0.905448 0.489421 0 0.872048 0.551763 0 0.834001 0.611171 0 0.791498 0.667311 0 0.74478 0.719917 0 0.69406 0.768644 0 0.639676 0.813288 0 0.581862 0.853594 0 0.520939 0.889342 0 0.457243 0.920344 0 0.391111 0.946443 0 0.322872 0.967483 0 0.252938 0.983365 0 0.181638 0.994001 0 0.109371 0.999333 0 0.0365165 0.999333 0 -0.0365165 0.994001 0 -0.109371 0.983365 0 -0.181638 0.967483 0 -0.252938 0.946443 0 -0.322872 0.920344 0 -0.391111 0.889342 0 -0.457243 0.853594 0 -0.520939 0.813288 0 -0.581862 0.768644 0 -0.639676 0.719917 0 -0.69406 0.667311 0 -0.74478 0.611171 0 -0.791498 0.551763 0 -0.834001 0.489421 0 -0.872048 0.424457 0 -0.905448 0.357235 0 -0.934015 0.288102 0 -0.9576 0.217429 0 -0.976076 0.145606 0 -0.989343 0.0729821 0 -0.997333 0 0 -1 -0.0729821 0 -0.997333 -0.145608 0 -0.989342 -0.217426 0 -0.976077 -0.288102 0 -0.9576 -0.357235 0 -0.934015 -0.424457 0 -0.905448 -0.489421 0 -0.872048 -0.551769 0 -0.833997 -0.611165 0 -0.791503 -0.667317 0 -0.744774 -0.719911 0 -0.694066 -0.768644 0 -0.639676 -0.813288 0 -0.581862 -0.853594 0 -0.520939 -0.889342 0 -0.457243 -0.920344 0 -0.391111 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.719911 0 -0.694066 -0.719911 0 -0.694066 -0.719911 0 -0.694066 -0.667317 0 -0.744774 -0.667317 0 -0.744774 -0.667317 0 -0.744774 -0.611165 0 -0.791503 -0.611165 0 -0.791503 -0.611165 0 -0.791503 -0.551769 0 -0.833997 -0.551769 0 -0.833997 -0.551769 0 -0.833997 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.217426 0 -0.976077 -0.217426 0 -0.976077 -0.217426 0 -0.976077 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.145606 0 -0.989343 0.145606 0 -0.989343 0.145606 0 -0.989343 0.217429 0 -0.976076 0.217429 0 -0.976076 0.217429 0 -0.976076 0.288102 0 -0.9576 0.288102 0 -0.9576 0.288102 0 -0.9576 0.357235 0 -0.934015 0.357235 0 -0.934015 0.357235 0 -0.934015 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.489421 0 -0.872048 0.489421 0 -0.872048 0.489421 0 -0.872048 0.551763 0 -0.834001 0.551763 0 -0.834001 0.551763 0 -0.834001 0.611171 0 -0.791498 0.611171 0 -0.791498 0.611171 0 -0.791498 0.667311 0 -0.74478 0.667311 0 -0.74478 0.667311 0 -0.74478 0.719917 0 -0.69406 0.719917 0 -0.69406 0.719917 0 -0.69406 0.768644 0 -0.639676 0.768644 0 -0.639676 0.768644 0 -0.639676 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.853594 0 -0.520939 0.853594 0 -0.520939 0.853594 0 -0.520939 0.889342 0 -0.457243 0.889342 0 -0.457243 0.889342 0 -0.457243 0.920344 0 -0.391111 0.920344 0 -0.391111 0.920344 0 -0.391111 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.994001 0 -0.109371 0.994001 0 -0.109371 0.994001 0 -0.109371 0.999333 0 -0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.0365165 0.999333 0 0.0365165 0.999333 0 0.0365165 0.999333 0 0.0365165 0.994001 0 0.109371 0.994001 0 0.109371 0.994001 0 0.109371 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.920344 0 0.391111 0.920344 0 0.391111 0.920344 0 0.391111 0.889342 0 0.457243 0.889342 0 0.457243 0.889342 0 0.457243 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.719917 0 0.69406 0.719917 0 0.69406 0.719917 0 0.69406 0.667311 0 0.74478 0.667311 0 0.74478 0.667311 0 0.74478 0.611171 0 0.791498 0.611171 0 0.791498 0.611171 0 0.791498 0.551763 0 0.834001 0.551763 0 0.834001 0.551763 0 0.834001 0.489421 0 0.872048 0.489421 0 0.872048 0.489421 0 0.872048 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.357235 0 0.934015 0.357235 0 0.934015 0.357235 0 0.934015 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.217429 0 0.976076 0.217429 0 0.976076 0.217429 0 0.976076 0.145606 0 0.989343 0.145606 0 0.989343 0.145606 0 0.989343 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.551769 0 0.833997 -0.551769 0 0.833997 -0.551769 0 0.833997 -0.611165 0 0.791503 -0.611165 0 0.791503 -0.611165 0 0.791503 -0.667317 0 0.744774 -0.667317 0 0.744774 -0.667317 0 0.744774 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.999333 0 0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.0365165 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -0.863779 0 0.503871 -0.863779 0 0.503871 -1 0 0 -1 2.20432e-07 4.40862e-08 -1 0 6.20882e-08 -1 8.18745e-08 6.14059e-08 -1 1.65568e-07 1.24176e-07 -1 -1.99569e-07 6.6523e-08 -1 0 0 -1 1.85437e-07 4.96705e-08 -1 0 0 -1 0 0 -1 0 6.20882e-08 -0.999333 0 0.036516 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.999333 0 0.036516 -0.999333 0 -0.036516 -0.994001 0 -0.109372 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.920346 0 -0.391106 -0.889337 0 -0.457253 -0.853597 0 -0.520933 -0.813288 0 -0.581862 -0.768639 0 -0.639683 -0.719923 0 -0.694053 -0.667296 0 -0.744792 -0.611187 0 -0.791486 -0.551772 0 -0.833995 -0.489401 0 -0.872059 -0.424457 0 -0.905448 -0.357246 0 -0.93401 -0.288091 0 -0.957603 -0.217441 0 -0.976074 -0.145606 0 -0.989343 -0.0729821 0 -0.997333 0 0 -1 0.0729821 0 -0.997333 0.145608 0 -0.989342 0.217438 0 -0.976074 0.288091 0 -0.957603 0.357246 0 -0.93401 0.424457 0 -0.905448 0.489401 0 -0.872059 0.551778 0 -0.833991 0.611181 0 -0.791491 0.667303 0 -0.744787 0.719917 0 -0.69406 0.768639 0 -0.639683 0.813288 0 -0.581862 0.853597 0 -0.520933 0.889337 0 -0.457253 0.920346 0 -0.391106 0.946443 0 -0.322872 0.967483 0 -0.252938 0.983365 0 -0.181638 0.994001 0 -0.109372 0.999333 0 -0.036516 0.999333 0 0.036516 0.994001 0 0.109372 0.983365 0 0.181638 0.967483 0 0.252938 0.946443 0 0.322872 0.920346 0 0.391106 0.889339 0 0.457248 0.853594 0 0.520939 0.813288 0 0.581862 0.768644 0 0.639676 0.719911 0 0.694066 0.66731 0 0.74478 0.611173 0 0.791497 0.551787 0 0.833985 0.489392 0 0.872064 0.424457 0 0.905448 0.357246 0 0.93401 0.288102 0 0.9576 0.217426 0 0.976077 0.145608 0 0.989342 0.0729821 0 0.997333 0 0 1 -0.0729821 0 0.997333 -0.145606 0 0.989343 -0.217429 0 0.976076 -0.288102 0 0.9576 -0.357246 0 0.93401 -0.424457 0 0.905448 -0.489392 0 0.872064 -0.551781 0 0.833989 -0.611179 0 0.791492 -0.667303 0 0.744786 -0.719917 0 0.69406 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.889339 0 0.457248 -0.920346 0 0.391106 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.667303 0 0.744786 -0.667303 0 0.744786 -0.667303 0 0.744786 -0.611179 0 0.791492 -0.611179 0 0.791492 -0.611179 0 0.791492 -0.551781 0 0.833989 -0.551781 0 0.833989 -0.551781 0 0.833989 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0.145608 0 0.989342 0.145608 0 0.989342 0.145608 0 0.989342 0.217426 0 0.976077 0.217426 0 0.976077 0.217426 0 0.976077 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.357246 0 0.93401 0.357246 0 0.93401 0.357246 0 0.93401 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.489392 0 0.872064 0.489392 0 0.872064 0.489392 0 0.872064 0.551787 0 0.833985 0.551787 0 0.833985 0.551787 0 0.833985 0.611173 0 0.791497 0.611173 0 0.791497 0.611173 0 0.791497 0.66731 0 0.74478 0.66731 0 0.74478 0.66731 0 0.74478 0.719911 0 0.694066 0.719911 0 0.694066 0.719911 0 0.694066 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.889339 0 0.457248 0.889339 0 0.457248 0.889339 0 0.457248 0.920346 0 0.391106 0.920346 0 0.391106 0.920346 0 0.391106 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.994001 0 0.109372 0.994001 0 0.109372 0.994001 0 0.109372 0.999333 0 0.036516 0.999333 0 0.036516 0.999333 0 0.036516 0.999333 0 -0.036516 0.999333 0 -0.036516 0.999333 0 -0.036516 0.994001 0 -0.109372 0.994001 0 -0.109372 0.994001 0 -0.109372 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.920346 0 -0.391106 0.920346 0 -0.391106 0.920346 0 -0.391106 0.889337 0 -0.457253 0.889337 0 -0.457253 0.889337 0 -0.457253 0.853597 0 -0.520933 0.853597 0 -0.520933 0.853597 0 -0.520933 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.768639 0 -0.639683 0.768639 0 -0.639683 0.768639 0 -0.639683 0.719917 0 -0.69406 0.719917 0 -0.69406 0.719917 0 -0.69406 0.667303 0 -0.744787 0.667303 0 -0.744787 0.667303 0 -0.744787 0.611181 0 -0.791491 0.611181 0 -0.791491 0.611181 0 -0.791491 0.551778 0 -0.833991 0.551778 0 -0.833991 0.551778 0 -0.833991 0.489401 0 -0.872059 0.489401 0 -0.872059 0.489401 0 -0.872059 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.357246 0 -0.93401 0.357246 0 -0.93401 0.357246 0 -0.93401 0.288091 0 -0.957603 0.288091 0 -0.957603 0.288091 0 -0.957603 0.217438 0 -0.976074 0.217438 0 -0.976074 0.217438 0 -0.976074 0.145608 0 -0.989342 0.145608 0 -0.989342 0.145608 0 -0.989342 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.217441 0 -0.976074 -0.217441 0 -0.976074 -0.217441 0 -0.976074 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.551772 0 -0.833995 -0.551772 0 -0.833995 -0.551772 0 -0.833995 -0.611187 0 -0.791486 -0.611187 0 -0.791486 -0.611187 0 -0.791486 -0.667296 0 -0.744792 -0.667296 0 -0.744792 -0.667296 0 -0.744792 -0.719923 0 -0.694053 -0.719923 0 -0.694053 -0.719923 0 -0.694053 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.999333 0 -0.036516 -0.999333 0 -0.036516 -0.999333 0 -0.036516 -0.999333 0 0.036516 -0.999333 0 0.036516 -0.999333 0 -0.036516 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.999333 0 -0.036516 -0.999333 0 0.036516 -0.994001 0 0.109372 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.920346 0 0.391106 -0.889339 0 0.457248 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.719917 0 0.69406 -0.667303 0 0.744786 -0.611179 0 0.791492 -0.551781 0 0.833989 -0.489392 0 0.872064 -0.424457 0 0.905448 -0.357246 0 0.93401 -0.288102 0 0.9576 -0.217429 0 0.976076 -0.145606 0 0.989343 -0.0729821 0 0.997333 0 0 1 0.0729821 0 0.997333 0.145608 0 0.989342 0.217426 0 0.976077 0.288102 0 0.9576 0.357246 0 0.93401 0.424457 0 0.905448 0.489392 0 0.872064 0.551787 0 0.833985 0.611173 0 0.791497 0.66731 0 0.74478 0.719911 0 0.694066 0.768644 0 0.639676 0.813288 0 0.581862 0.853594 0 0.520939 0.889339 0 0.457248 0.920346 0 0.391106 0.946443 0 0.322872 0.967483 0 0.252938 0.983365 0 0.181638 0.994001 0 0.109372 0.999333 0 0.036516 0.999333 0 -0.036516 0.994001 0 -0.109372 0.983365 0 -0.181638 0.967483 0 -0.252938 0.946443 0 -0.322872 0.920346 0 -0.391106 0.889337 0 -0.457253 0.853597 0 -0.520933 0.813288 0 -0.581862 0.768639 0 -0.639683 0.719917 0 -0.69406 0.667303 0 -0.744787 0.611181 0 -0.791491 0.551778 0 -0.833991 0.489401 0 -0.872059 0.424457 0 -0.905448 0.357246 0 -0.93401 0.288091 0 -0.957603 0.217438 0 -0.976074 0.145608 0 -0.989342 0.0729821 0 -0.997333 0 0 -1 -0.0729821 0 -0.997333 -0.145606 0 -0.989343 -0.217441 0 -0.976074 -0.288091 0 -0.957603 -0.357246 0 -0.93401 -0.424457 0 -0.905448 -0.489401 0 -0.872059 -0.551772 0 -0.833995 -0.611187 0 -0.791486 -0.667296 0 -0.744792 -0.719923 0 -0.694053 -0.768639 0 -0.639683 -0.813288 0 -0.581862 -0.853597 0 -0.520933 -0.889337 0 -0.457253 -0.920346 0 -0.391106 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.719923 0 -0.694053 -0.719923 0 -0.694053 -0.719923 0 -0.694053 -0.667296 0 -0.744792 -0.667296 0 -0.744792 -0.667296 0 -0.744792 -0.611187 0 -0.791486 -0.611187 0 -0.791486 -0.611187 0 -0.791486 -0.551772 0 -0.833995 -0.551772 0 -0.833995 -0.551772 0 -0.833995 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.217441 0 -0.976074 -0.217441 0 -0.976074 -0.217441 0 -0.976074 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.145608 0 -0.989342 0.145608 0 -0.989342 0.145608 0 -0.989342 0.217438 0 -0.976074 0.217438 0 -0.976074 0.217438 0 -0.976074 0.288091 0 -0.957603 0.288091 0 -0.957603 0.288091 0 -0.957603 0.357246 0 -0.93401 0.357246 0 -0.93401 0.357246 0 -0.93401 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.489401 0 -0.872059 0.489401 0 -0.872059 0.489401 0 -0.872059 0.551778 0 -0.833991 0.551778 0 -0.833991 0.551778 0 -0.833991 0.611181 0 -0.791491 0.611181 0 -0.791491 0.611181 0 -0.791491 0.667303 0 -0.744787 0.667303 0 -0.744787 0.667303 0 -0.744787 0.719917 0 -0.69406 0.719917 0 -0.69406 0.719917 0 -0.69406 0.768639 0 -0.639683 0.768639 0 -0.639683 0.768639 0 -0.639683 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.853597 0 -0.520933 0.853597 0 -0.520933 0.853597 0 -0.520933 0.889337 0 -0.457253 0.889337 0 -0.457253 0.889337 0 -0.457253 0.920346 0 -0.391106 0.920346 0 -0.391106 0.920346 0 -0.391106 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.994001 0 -0.109372 0.994001 0 -0.109372 0.994001 0 -0.109372 0.999333 0 -0.036516 0.999333 0 -0.036516 0.999333 0 -0.036516 0.999333 0 0.036516 0.999333 0 0.036516 0.999333 0 0.036516 0.994001 0 0.109372 0.994001 0 0.109372 0.994001 0 0.109372 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.920346 0 0.391106 0.920346 0 0.391106 0.920346 0 0.391106 0.889339 0 0.457248 0.889339 0 0.457248 0.889339 0 0.457248 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.719911 0 0.694066 0.719911 0 0.694066 0.719911 0 0.694066 0.66731 0 0.74478 0.66731 0 0.74478 0.66731 0 0.74478 0.611173 0 0.791497 0.611173 0 0.791497 0.611173 0 0.791497 0.551787 0 0.833985 0.551787 0 0.833985 0.551787 0 0.833985 0.489392 0 0.872064 0.489392 0 0.872064 0.489392 0 0.872064 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.357246 0 0.93401 0.357246 0 0.93401 0.357246 0 0.93401 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.217426 0 0.976077 0.217426 0 0.976077 0.217426 0 0.976077 0.145608 0 0.989342 0.145608 0 0.989342 0.145608 0 0.989342 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.551781 0 0.833989 -0.551781 0 0.833989 -0.551781 0 0.833989 -0.611179 0 0.791492 -0.611179 0 0.791492 -0.611179 0 0.791492 -0.667303 0 0.744786 -0.667303 0 0.744786 -0.667303 0 0.744786 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.999333 0 0.036516 -0.999333 0 0.036516 -0.999333 0 0.036516 -0.999333 0 -0.036516 -0.999333 0 -0.036516 -0.999333 0 0.0365165 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.999333 0 0.0365165 -0.999333 0 -0.0365165 -0.994001 0 -0.109371 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.920344 0 -0.391111 -0.889342 0 -0.457243 -0.853594 0 -0.520939 -0.813288 0 -0.581862 -0.768644 0 -0.639676 -0.719911 0 -0.694066 -0.667317 0 -0.744774 -0.611165 0 -0.791503 -0.551769 0 -0.833997 -0.489421 0 -0.872048 -0.424457 0 -0.905448 -0.357235 0 -0.934015 -0.288102 0 -0.9576 -0.217426 0 -0.976077 -0.145608 0 -0.989342 -0.0729821 0 -0.997333 0 0 -1 0.0729821 0 -0.997333 0.145606 0 -0.989343 0.217429 0 -0.976076 0.288102 0 -0.9576 0.357235 0 -0.934015 0.424457 0 -0.905448 0.489421 0 -0.872048 0.551763 0 -0.834001 0.611171 0 -0.791498 0.667311 0 -0.74478 0.719917 0 -0.69406 0.768644 0 -0.639676 0.813288 0 -0.581862 0.853594 0 -0.520939 0.889342 0 -0.457243 0.920344 0 -0.391111 0.946443 0 -0.322872 0.967483 0 -0.252938 0.983365 0 -0.181638 0.994001 0 -0.109371 0.999333 0 -0.0365165 0.999333 0 0.0365165 0.994001 0 0.109371 0.983365 0 0.181638 0.967483 0 0.252938 0.946443 0 0.322872 0.920344 0 0.391111 0.889342 0 0.457243 0.853594 0 0.520939 0.813288 0 0.581862 0.768644 0 0.639676 0.719917 0 0.69406 0.667311 0 0.74478 0.611171 0 0.791498 0.551763 0 0.834001 0.489421 0 0.872048 0.424457 0 0.905448 0.357235 0 0.934015 0.288102 0 0.9576 0.217429 0 0.976076 0.145606 0 0.989343 0.0729821 0 0.997333 0 0 1 -0.0729821 0 0.997333 -0.145608 0 0.989342 -0.217426 0 0.976077 -0.288102 0 0.9576 -0.357235 0 0.934015 -0.424457 0 0.905448 -0.489421 0 0.872048 -0.551769 0 0.833997 -0.611165 0 0.791503 -0.667317 0 0.744774 -0.719911 0 0.694066 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.889342 0 0.457243 -0.920344 0 0.391111 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.667317 0 0.744774 -0.667317 0 0.744774 -0.667317 0 0.744774 -0.611165 0 0.791503 -0.611165 0 0.791503 -0.611165 0 0.791503 -0.551769 0 0.833997 -0.551769 0 0.833997 -0.551769 0 0.833997 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0.145606 0 0.989343 0.145606 0 0.989343 0.145606 0 0.989343 0.217429 0 0.976076 0.217429 0 0.976076 0.217429 0 0.976076 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.357235 0 0.934015 0.357235 0 0.934015 0.357235 0 0.934015 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.489421 0 0.872048 0.489421 0 0.872048 0.489421 0 0.872048 0.551763 0 0.834001 0.551763 0 0.834001 0.551763 0 0.834001 0.611171 0 0.791498 0.611171 0 0.791498 0.611171 0 0.791498 0.667311 0 0.74478 0.667311 0 0.74478 0.667311 0 0.74478 0.719917 0 0.69406 0.719917 0 0.69406 0.719917 0 0.69406 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.889342 0 0.457243 0.889342 0 0.457243 0.889342 0 0.457243 0.920344 0 0.391111 0.920344 0 0.391111 0.920344 0 0.391111 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.994001 0 0.109371 0.994001 0 0.109371 0.994001 0 0.109371 0.999333 0 0.0365165 0.999333 0 0.0365165 0.999333 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.0365165 0.994001 0 -0.109371 0.994001 0 -0.109371 0.994001 0 -0.109371 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.920344 0 -0.391111 0.920344 0 -0.391111 0.920344 0 -0.391111 0.889342 0 -0.457243 0.889342 0 -0.457243 0.889342 0 -0.457243 0.853594 0 -0.520939 0.853594 0 -0.520939 0.853594 0 -0.520939 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.768644 0 -0.639676 0.768644 0 -0.639676 0.768644 0 -0.639676 0.719917 0 -0.69406 0.719917 0 -0.69406 0.719917 0 -0.69406 0.667311 0 -0.74478 0.667311 0 -0.74478 0.667311 0 -0.74478 0.611171 0 -0.791498 0.611171 0 -0.791498 0.611171 0 -0.791498 0.551763 0 -0.834001 0.551763 0 -0.834001 0.551763 0 -0.834001 0.489421 0 -0.872048 0.489421 0 -0.872048 0.489421 0 -0.872048 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.357235 0 -0.934015 0.357235 0 -0.934015 0.357235 0 -0.934015 0.288102 0 -0.9576 0.288102 0 -0.9576 0.288102 0 -0.9576 0.217429 0 -0.976076 0.217429 0 -0.976076 0.217429 0 -0.976076 0.145606 0 -0.989343 0.145606 0 -0.989343 0.145606 0 -0.989343 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.217426 0 -0.976077 -0.217426 0 -0.976077 -0.217426 0 -0.976077 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.551769 0 -0.833997 -0.551769 0 -0.833997 -0.551769 0 -0.833997 -0.611165 0 -0.791503 -0.611165 0 -0.791503 -0.611165 0 -0.791503 -0.667317 0 -0.744774 -0.667317 0 -0.744774 -0.667317 0 -0.744774 -0.719911 0 -0.694066 -0.719911 0 -0.694066 -0.719911 0 -0.694066 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.0365165 0.999333 0 -0.0365165 0.994001 0 -0.109371 0.994001 0 -0.109371 0.983365 0 -0.181638 0.967483 0 -0.252938 0.983365 0 -0.181638 0.983365 0 -0.181638 0.994001 0 -0.109371 0.994001 0 -0.109371 0.999333 0 -0.0365165 0.999333 0 0.0365165 0.994001 0 0.109371 0.983365 0 0.181638 0.967483 0 0.252938 0.946443 0 0.322872 0.920344 0 0.391111 0.889342 0 0.457243 0.853594 0 0.520939 0.813288 0 0.581862 0.768644 0 0.639676 0.719911 0 0.694066 0.667317 0 0.744774 0.611165 0 0.791503 0.551769 0 0.833997 0.489421 0 0.872048 0.424457 0 0.905448 0.357235 0 0.934015 0.288102 0 0.9576 0.217426 0 0.976077 0.145608 0 0.989342 0.0729821 0 0.997333 0 0 1 -0.0729821 0 0.997333 -0.145606 0 0.989343 -0.217429 0 0.976076 -0.288102 0 0.9576 -0.357235 0 0.934015 -0.424457 0 0.905448 -0.489421 0 0.872048 -0.551763 0 0.834001 -0.611171 0 0.791498 -0.667311 0 0.74478 -0.719917 0 0.69406 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.889342 0 0.457243 -0.920344 0 0.391111 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.994001 0 0.109371 -0.999333 0 0.0365165 -0.999333 0 -0.0365165 -0.994001 0 -0.109371 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.920344 0 -0.391111 -0.889342 0 -0.457243 -0.853594 0 -0.520939 -0.813288 0 -0.581862 -0.768644 0 -0.639676 -0.719917 0 -0.69406 -0.667311 0 -0.74478 -0.611171 0 -0.791498 -0.551763 0 -0.834001 -0.489421 0 -0.872048 -0.424457 0 -0.905448 -0.357235 0 -0.934015 -0.288102 0 -0.9576 -0.217429 0 -0.976076 -0.145606 0 -0.989343 -0.0729821 0 -0.997333 0 0 -1 0.0729821 0 -0.997333 0.145608 0 -0.989342 0.217426 0 -0.976077 0.288102 0 -0.9576 0.357235 0 -0.934015 0.424457 0 -0.905448 0.489421 0 -0.872048 0.551769 0 -0.833997 0.611165 0 -0.791503 0.667317 0 -0.744774 0.719911 0 -0.694066 0.768644 0 -0.639676 0.813288 0 -0.581862 0.853594 0 -0.520939 0.889342 0 -0.457243 0.920344 0 -0.391111 0.946443 0 -0.322872 0.967483 0 -0.252938 0.967483 0 -0.252938 0.983365 0 -0.181638 0.967483 0 -0.252938 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.920344 0 -0.391111 0.920344 0 -0.391111 0.920344 0 -0.391111 0.889342 0 -0.457243 0.889342 0 -0.457243 0.889342 0 -0.457243 0.853594 0 -0.520939 0.853594 0 -0.520939 0.853594 0 -0.520939 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.768644 0 -0.639676 0.768644 0 -0.639676 0.768644 0 -0.639676 0.719911 0 -0.694066 0.719911 0 -0.694066 0.719911 0 -0.694066 0.667317 0 -0.744774 0.667317 0 -0.744774 0.667317 0 -0.744774 0.611165 0 -0.791503 0.611165 0 -0.791503 0.611165 0 -0.791503 0.551769 0 -0.833997 0.551769 0 -0.833997 0.551769 0 -0.833997 0.489421 0 -0.872048 0.489421 0 -0.872048 0.489421 0 -0.872048 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.357235 0 -0.934015 0.357235 0 -0.934015 0.357235 0 -0.934015 0.288102 0 -0.9576 0.288102 0 -0.9576 0.288102 0 -0.9576 0.217426 0 -0.976077 0.217426 0 -0.976077 0.217426 0 -0.976077 0.145608 0 -0.989342 0.145608 0 -0.989342 0.145608 0 -0.989342 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.145606 0 -0.989343 -0.217429 0 -0.976076 -0.217429 0 -0.976076 -0.217429 0 -0.976076 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.288102 0 -0.9576 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.357235 0 -0.934015 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.489421 0 -0.872048 -0.551763 0 -0.834001 -0.551763 0 -0.834001 -0.551763 0 -0.834001 -0.611171 0 -0.791498 -0.611171 0 -0.791498 -0.611171 0 -0.791498 -0.667311 0 -0.74478 -0.667311 0 -0.74478 -0.667311 0 -0.74478 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.768644 0 -0.639676 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.853594 0 -0.520939 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.889342 0 -0.457243 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.920344 0 -0.391111 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.994001 0 -0.109371 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 -0.999333 0 -0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.0365165 -0.999333 0 0.0365165 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.994001 0 0.109371 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.920344 0 0.391111 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.889342 0 0.457243 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.719917 0 0.69406 -0.667311 0 0.74478 -0.667311 0 0.74478 -0.667311 0 0.74478 -0.611171 0 0.791498 -0.611171 0 0.791498 -0.611171 0 0.791498 -0.551763 0 0.834001 -0.551763 0 0.834001 -0.551763 0 0.834001 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.489421 0 0.872048 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.357235 0 0.934015 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.217429 0 0.976076 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.145606 0 0.989343 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0.145608 0 0.989342 0.145608 0 0.989342 0.145608 0 0.989342 0.217426 0 0.976077 0.217426 0 0.976077 0.217426 0 0.976077 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.357235 0 0.934015 0.357235 0 0.934015 0.357235 0 0.934015 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.489421 0 0.872048 0.489421 0 0.872048 0.489421 0 0.872048 0.551769 0 0.833997 0.551769 0 0.833997 0.551769 0 0.833997 0.611165 0 0.791503 0.611165 0 0.791503 0.611165 0 0.791503 0.667317 0 0.744774 0.667317 0 0.744774 0.667317 0 0.744774 0.719911 0 0.694066 0.719911 0 0.694066 0.719911 0 0.694066 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.889342 0 0.457243 0.889342 0 0.457243 0.889342 0 0.457243 0.920344 0 0.391111 0.920344 0 0.391111 0.920344 0 0.391111 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.994001 0 0.109371 0.994001 0 0.109371 0.994001 0 0.109371 0.999333 0 0.0365165 0.999333 0 0.0365165 0.999333 0 0.0365165 0.999333 0 -0.0365165 0.999333 0 -0.0365165 -0.999333 0 -0.036516 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.999333 0 -0.036516 -0.999333 0 0.036516 -0.994001 0 0.109372 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.920346 0 0.391106 -0.889339 0 0.457248 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.719911 0 0.694066 -0.66731 0 0.74478 -0.611173 0 0.791497 -0.551787 0 0.833985 -0.489392 0 0.872064 -0.424457 0 0.905448 -0.357246 0 0.93401 -0.288102 0 0.9576 -0.217426 0 0.976077 -0.145608 0 0.989342 -0.0729821 0 0.997333 0 0 1 0.0729821 0 0.997333 0.145606 0 0.989343 0.217429 0 0.976076 0.288102 0 0.9576 0.357246 0 0.93401 0.424457 0 0.905448 0.489392 0 0.872064 0.551781 0 0.833989 0.611179 0 0.791492 0.667303 0 0.744786 0.719917 0 0.69406 0.768644 0 0.639676 0.813288 0 0.581862 0.853594 0 0.520939 0.889339 0 0.457248 0.920346 0 0.391106 0.946443 0 0.322872 0.967483 0 0.252938 0.983365 0 0.181638 0.994001 0 0.109372 0.999333 0 0.036516 0.999333 0 -0.036516 0.994001 0 -0.109372 0.983365 0 -0.181638 0.967483 0 -0.252938 0.946443 0 -0.322872 0.920346 0 -0.391106 0.889337 0 -0.457253 0.853597 0 -0.520933 0.813288 0 -0.581862 0.768639 0 -0.639683 0.719923 0 -0.694053 0.667296 0 -0.744792 0.611187 0 -0.791486 0.551772 0 -0.833995 0.489401 0 -0.872059 0.424457 0 -0.905448 0.357246 0 -0.93401 0.288091 0 -0.957603 0.217441 0 -0.976074 0.145606 0 -0.989343 0.0729821 0 -0.997333 0 0 -1 -0.0729821 0 -0.997333 -0.145608 0 -0.989342 -0.217438 0 -0.976074 -0.288091 0 -0.957603 -0.357246 0 -0.93401 -0.424457 0 -0.905448 -0.489401 0 -0.872059 -0.551778 0 -0.833991 -0.611181 0 -0.791491 -0.667303 0 -0.744787 -0.719917 0 -0.69406 -0.768639 0 -0.639683 -0.813288 0 -0.581862 -0.853597 0 -0.520933 -0.889337 0 -0.457253 -0.920346 0 -0.391106 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.667303 0 -0.744787 -0.667303 0 -0.744787 -0.667303 0 -0.744787 -0.611181 0 -0.791491 -0.611181 0 -0.791491 -0.611181 0 -0.791491 -0.551778 0 -0.833991 -0.551778 0 -0.833991 -0.551778 0 -0.833991 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.217438 0 -0.976074 -0.217438 0 -0.976074 -0.217438 0 -0.976074 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.145606 0 -0.989343 0.145606 0 -0.989343 0.145606 0 -0.989343 0.217441 0 -0.976074 0.217441 0 -0.976074 0.217441 0 -0.976074 0.288091 0 -0.957603 0.288091 0 -0.957603 0.288091 0 -0.957603 0.357246 0 -0.93401 0.357246 0 -0.93401 0.357246 0 -0.93401 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.489401 0 -0.872059 0.489401 0 -0.872059 0.489401 0 -0.872059 0.551772 0 -0.833995 0.551772 0 -0.833995 0.551772 0 -0.833995 0.611187 0 -0.791486 0.611187 0 -0.791486 0.611187 0 -0.791486 0.667296 0 -0.744792 0.667296 0 -0.744792 0.667296 0 -0.744792 0.719923 0 -0.694053 0.719923 0 -0.694053 0.719923 0 -0.694053 0.768639 0 -0.639683 0.768639 0 -0.639683 0.768639 0 -0.639683 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.853597 0 -0.520933 0.853597 0 -0.520933 0.853597 0 -0.520933 0.889337 0 -0.457253 0.889337 0 -0.457253 0.889337 0 -0.457253 0.920346 0 -0.391106 0.920346 0 -0.391106 0.920346 0 -0.391106 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.994001 0 -0.109372 0.994001 0 -0.109372 0.994001 0 -0.109372 0.999333 0 -0.036516 0.999333 0 -0.036516 0.999333 0 -0.036516 0.999333 0 0.036516 0.999333 0 0.036516 0.999333 0 0.036516 0.994001 0 0.109372 0.994001 0 0.109372 0.994001 0 0.109372 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.920346 0 0.391106 0.920346 0 0.391106 0.920346 0 0.391106 0.889339 0 0.457248 0.889339 0 0.457248 0.889339 0 0.457248 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.719917 0 0.69406 0.719917 0 0.69406 0.719917 0 0.69406 0.667303 0 0.744786 0.667303 0 0.744786 0.667303 0 0.744786 0.611179 0 0.791492 0.611179 0 0.791492 0.611179 0 0.791492 0.551781 0 0.833989 0.551781 0 0.833989 0.551781 0 0.833989 0.489392 0 0.872064 0.489392 0 0.872064 0.489392 0 0.872064 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.357246 0 0.93401 0.357246 0 0.93401 0.357246 0 0.93401 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.217429 0 0.976076 0.217429 0 0.976076 0.217429 0 0.976076 0.145606 0 0.989343 0.145606 0 0.989343 0.145606 0 0.989343 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.551787 0 0.833985 -0.551787 0 0.833985 -0.551787 0 0.833985 -0.611173 0 0.791497 -0.611173 0 0.791497 -0.611173 0 0.791497 -0.66731 0 0.74478 -0.66731 0 0.74478 -0.66731 0 0.74478 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.999333 0 0.036516 -0.999333 0 0.036516 -0.999333 0 0.036516 -0.999333 0 -0.036516 -0.999333 0 -0.036516 -0.999333 0 0.036516 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.983365 0 0.181638 -0.994001 0 0.109372 -0.994001 0 0.109372 -0.999333 0 0.036516 -0.999333 0 -0.036516 -0.994001 0 -0.109372 -0.983365 0 -0.181638 -0.967483 0 -0.252938 -0.946443 0 -0.322872 -0.920346 0 -0.391106 -0.889337 0 -0.457253 -0.853597 0 -0.520933 -0.813288 0 -0.581862 -0.768639 0 -0.639683 -0.719917 0 -0.69406 -0.667303 0 -0.744787 -0.611181 0 -0.791491 -0.551778 0 -0.833991 -0.489401 0 -0.872059 -0.424457 0 -0.905448 -0.357246 0 -0.93401 -0.288091 0 -0.957603 -0.217438 0 -0.976074 -0.145608 0 -0.989342 -0.0729821 0 -0.997333 0 0 -1 0.0729821 0 -0.997333 0.145606 0 -0.989343 0.217441 0 -0.976074 0.288091 0 -0.957603 0.357246 0 -0.93401 0.424457 0 -0.905448 0.489401 0 -0.872059 0.551772 0 -0.833995 0.611187 0 -0.791486 0.667296 0 -0.744792 0.719923 0 -0.694053 0.768639 0 -0.639683 0.813288 0 -0.581862 0.853597 0 -0.520933 0.889337 0 -0.457253 0.920346 0 -0.391106 0.946443 0 -0.322872 0.967483 0 -0.252938 0.983365 0 -0.181638 0.994001 0 -0.109372 0.999333 0 -0.036516 0.999333 0 0.036516 0.994001 0 0.109372 0.983365 0 0.181638 0.967483 0 0.252938 0.946443 0 0.322872 0.920346 0 0.391106 0.889339 0 0.457248 0.853594 0 0.520939 0.813288 0 0.581862 0.768644 0 0.639676 0.719917 0 0.69406 0.667303 0 0.744786 0.611179 0 0.791492 0.551781 0 0.833989 0.489392 0 0.872064 0.424457 0 0.905448 0.357246 0 0.93401 0.288102 0 0.9576 0.217429 0 0.976076 0.145606 0 0.989343 0.0729821 0 0.997333 0 0 1 -0.0729821 0 0.997333 -0.145608 0 0.989342 -0.217426 0 0.976077 -0.288102 0 0.9576 -0.357246 0 0.93401 -0.424457 0 0.905448 -0.489392 0 0.872064 -0.551787 0 0.833985 -0.611173 0 0.791497 -0.66731 0 0.74478 -0.719911 0 0.694066 -0.768644 0 0.639676 -0.813288 0 0.581862 -0.853594 0 0.520939 -0.889339 0 0.457248 -0.920346 0 0.391106 -0.946443 0 0.322872 -0.967483 0 0.252938 -0.967483 0 0.252938 -0.983365 0 0.181638 -0.967483 0 0.252938 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.946443 0 0.322872 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.920346 0 0.391106 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.889339 0 0.457248 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.853594 0 0.520939 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.813288 0 0.581862 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.768644 0 0.639676 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.719911 0 0.694066 -0.66731 0 0.74478 -0.66731 0 0.74478 -0.66731 0 0.74478 -0.611173 0 0.791497 -0.611173 0 0.791497 -0.611173 0 0.791497 -0.551787 0 0.833985 -0.551787 0 0.833985 -0.551787 0 0.833985 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.489392 0 0.872064 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.424457 0 0.905448 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.357246 0 0.93401 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.288102 0 0.9576 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.217426 0 0.976077 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.145608 0 0.989342 -0.0729821 0 0.997333 -0.0729821 0 0.997333 -0.0729821 0 0.997333 0 0 1 0 0 1 0 0 1 0.0729821 0 0.997333 0.0729821 0 0.997333 0.0729821 0 0.997333 0.145606 0 0.989343 0.145606 0 0.989343 0.145606 0 0.989343 0.217429 0 0.976076 0.217429 0 0.976076 0.217429 0 0.976076 0.288102 0 0.9576 0.288102 0 0.9576 0.288102 0 0.9576 0.357246 0 0.93401 0.357246 0 0.93401 0.357246 0 0.93401 0.424457 0 0.905448 0.424457 0 0.905448 0.424457 0 0.905448 0.489392 0 0.872064 0.489392 0 0.872064 0.489392 0 0.872064 0.551781 0 0.833989 0.551781 0 0.833989 0.551781 0 0.833989 0.611179 0 0.791492 0.611179 0 0.791492 0.611179 0 0.791492 0.667303 0 0.744786 0.667303 0 0.744786 0.667303 0 0.744786 0.719917 0 0.69406 0.719917 0 0.69406 0.719917 0 0.69406 0.768644 0 0.639676 0.768644 0 0.639676 0.768644 0 0.639676 0.813288 0 0.581862 0.813288 0 0.581862 0.813288 0 0.581862 0.853594 0 0.520939 0.853594 0 0.520939 0.853594 0 0.520939 0.889339 0 0.457248 0.889339 0 0.457248 0.889339 0 0.457248 0.920346 0 0.391106 0.920346 0 0.391106 0.920346 0 0.391106 0.946443 0 0.322872 0.946443 0 0.322872 0.946443 0 0.322872 0.967483 0 0.252938 0.967483 0 0.252938 0.967483 0 0.252938 0.983365 0 0.181638 0.983365 0 0.181638 0.983365 0 0.181638 0.994001 0 0.109372 0.994001 0 0.109372 0.994001 0 0.109372 0.999333 0 0.036516 0.999333 0 0.036516 0.999333 0 0.036516 0.999333 0 -0.036516 0.999333 0 -0.036516 0.999333 0 -0.036516 0.994001 0 -0.109372 0.994001 0 -0.109372 0.994001 0 -0.109372 0.983365 0 -0.181638 0.983365 0 -0.181638 0.983365 0 -0.181638 0.967483 0 -0.252938 0.967483 0 -0.252938 0.967483 0 -0.252938 0.946443 0 -0.322872 0.946443 0 -0.322872 0.946443 0 -0.322872 0.920346 0 -0.391106 0.920346 0 -0.391106 0.920346 0 -0.391106 0.889337 0 -0.457253 0.889337 0 -0.457253 0.889337 0 -0.457253 0.853597 0 -0.520933 0.853597 0 -0.520933 0.853597 0 -0.520933 0.813288 0 -0.581862 0.813288 0 -0.581862 0.813288 0 -0.581862 0.768639 0 -0.639683 0.768639 0 -0.639683 0.768639 0 -0.639683 0.719923 0 -0.694053 0.719923 0 -0.694053 0.719923 0 -0.694053 0.667296 0 -0.744792 0.667296 0 -0.744792 0.667296 0 -0.744792 0.611187 0 -0.791486 0.611187 0 -0.791486 0.611187 0 -0.791486 0.551772 0 -0.833995 0.551772 0 -0.833995 0.551772 0 -0.833995 0.489401 0 -0.872059 0.489401 0 -0.872059 0.489401 0 -0.872059 0.424457 0 -0.905448 0.424457 0 -0.905448 0.424457 0 -0.905448 0.357246 0 -0.93401 0.357246 0 -0.93401 0.357246 0 -0.93401 0.288091 0 -0.957603 0.288091 0 -0.957603 0.288091 0 -0.957603 0.217441 0 -0.976074 0.217441 0 -0.976074 0.217441 0 -0.976074 0.145606 0 -0.989343 0.145606 0 -0.989343 0.145606 0 -0.989343 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0.0729821 0 -0.997333 0 0 -1 0 0 -1 0 0 -1 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.0729821 0 -0.997333 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.145608 0 -0.989342 -0.217438 0 -0.976074 -0.217438 0 -0.976074 -0.217438 0 -0.976074 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.288091 0 -0.957603 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.357246 0 -0.93401 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.424457 0 -0.905448 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.489401 0 -0.872059 -0.551778 0 -0.833991 -0.551778 0 -0.833991 -0.551778 0 -0.833991 -0.611181 0 -0.791491 -0.611181 0 -0.791491 -0.611181 0 -0.791491 -0.667303 0 -0.744787 -0.667303 0 -0.744787 -0.667303 0 -0.744787 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.719917 0 -0.69406 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.768639 0 -0.639683 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.813288 0 -0.581862 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.853597 0 -0.520933 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.889337 0 -0.457253 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.920346 0 -0.391106 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.946443 0 -0.322872 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.967483 0 -0.252938 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.983365 0 -0.181638 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.994001 0 -0.109372 -0.999333 0 -0.036516 -0.999333 0 -0.036516 -0.999333 0 -0.036516 -0.999333 0 0.036516 -0.999333 0 0.036516 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -0.863779 0 0.503871 -0.863779 0 0.503871 0.863779 0 0.503871 0.863779 0 0.503871 -1 0 7.16402e-08 -1 1.11387e-07 8.66347e-08 -1 -7.0958e-08 1.24176e-07 -1 0 0 -1 -1.13549e-07 6.11419e-08 -1 0 6.20882e-08 -1 0 6.20882e-08 -1 0 0 -1 -1.85437e-07 4.96705e-08 -1 0 0 -1 0 0 0 -0.707107 0.707106 4.18123e-08 -0.707107 0.707106 0 -0.707107 0.707107 0 -0.707107 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 4.18123e-08 0.707107 0.707107 -0.707106 -0.707107 0 -0.707106 -0.707107 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0.707106 0.707107 0 0.707106 0.707107 0 -0.707107 0 0.707107 -0.707107 -4.18123e-08 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 0.707107 0 0.707107 0.707107 -4.18123e-08 0.707106 0.707107 0 0.707106 0.707107 0 0.707106 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -0.707107 0.707106 0 -0.707107 0.707106 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0.707107 -0.707106 0 0.707107 -0.707106 0 0.707107 0 -0.707107 0.707107 0 -0.707107 -0.707107 0 -0.707107 -0.707107 0 -0.707107 0 -0.707107 -0.707107 0 -0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 -0.707107 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 0.707107 0.707107 0 0.707107 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 -0.707107 -0.707106 0 -0.707107 -0.707106 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 -0.707107 0.707107 0 -0.707107 0.707107 0.707107 0 0.707107 0.707107 0 0.707107 -0.707106 0.707107 0 -0.707106 0.707107 0 0 -0.707107 0.707107 0 -0.707107 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 0 0.707107 0.707107 0 0.707107 0.707107 -0.707107 0 0.707107 -0.707107 0 0.707107 -6.89869e-08 0 -1 -6.89869e-08 0 -1 -6.89869e-08 0 -1 -6.89869e-08 0 -1 0 6.89869e-08 -1 0 6.89869e-08 -1 0 6.89869e-08 -1 0 6.89869e-08 -1 1 0 0 1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 1 0 0 1 0 6.6523e-08 -6.6523e-08 1 4.51761e-08 -4.51761e-08 1 1.88697e-07 -4.0055e-09 1 1.70571e-07 -1.0878e-08 1 1.55934e-07 -1.66111e-08 1 1.43823e-07 -2.15293e-08 1 1.33619e-07 -2.58419e-08 1 1.24876e-07 -2.97002e-08 1 1.17282e-07 -3.32117e-08 1 1.40127e-07 -4.61877e-08 1 1.25668e-07 -4.74262e-08 1 1.14229e-07 -4.87415e-08 1 1.0494e-07 -5.01437e-08 1 9.72386e-08 -5.16435e-08 1 -1.17905e-06 -3.67999e-07 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 1.98535e-07 2.31775e-06 1 4.50193e-08 -1.58978e-07 1 4.39126e-08 -1.84633e-07 1 4.28609e-08 -2.21619e-07 1 4.1857e-08 -2.79617e-07 1 4.089e-08 -3.83844e-07 1 3.99359e-08 -6.26217e-07 1 3.88051e-08 -1.82809e-06 1 4.52575e-08 -4.43487e-08 1 4.62268e-08 -4.31168e-08 1 6.6523e-08 -6.6523e-08 1 5.01405e-08 -8.29055e-08 1 4.31168e-08 -4.03936e-08 1 -6.2899e-08 -1.33516e-09 1 -5.6857e-08 -3.62601e-09 1 -5.19779e-08 -5.53704e-09 1 -4.79411e-08 -7.17644e-09 1 -4.45396e-08 -8.61398e-09 1 -4.16254e-08 -9.90007e-09 1 -3.9094e-08 -1.10705e-08 1 5.84992e-08 -5.84992e-08 1 -1.25668e-07 -4.74262e-08 1 -1.40127e-07 -4.61877e-08 1 6.6523e-08 -6.6523e-08 1 3.88051e-08 1.82809e-06 1 3.99359e-08 6.26217e-07 1 4.089e-08 3.83844e-07 1 4.1857e-08 2.79617e-07 1 4.28609e-08 2.21619e-07 1 4.39126e-08 1.84633e-07 1 4.50193e-08 1.58978e-07 1 1.98535e-07 -2.31775e-06 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 1.17905e-06 -3.67999e-07 1 -9.72386e-08 -5.16435e-08 1 -1.0494e-07 -5.01437e-08 1 -1.14229e-07 -4.87415e-08 1 4.31168e-08 -5.76403e-08 1 8.33384e-08 -4.97076e-08 1 6.6523e-08 -6.6523e-08 1 3.30423e-15 1 4.96705e-08 0 1 4.96705e-08 1 3.30423e-15 -4.96705e-08 1 0 -4.96705e-08 0.707107 0.707106 0 0.707107 0.707106 0 -3.30423e-15 -1 -4.96705e-08 0 -1 -4.96705e-08 -1 -3.30423e-15 4.96705e-08 -1 0 4.96705e-08 3.30423e-15 1 4.96705e-08 0 1 4.96705e-08 -1 -3.30423e-15 4.96705e-08 -1 0 4.96705e-08 -3.30423e-15 -1 -4.96705e-08 0 -1 -4.96705e-08 0.707106 -0.707107 0 0.707106 -0.707107 0 + + + + + + + + + + + + + + +

0 0 1 0 2344 0 0 1 5881 1 1 1 0 2 2343 2 5881 2 5881 3 2343 3 47 3 47 4 2343 4 48 4 49 5 48 5 50 5 51 6 50 6 2385 6 2 7 2385 7 2384 7 52 8 2384 8 2383 8 5882 9 2383 9 3 9 53 10 3 10 2382 10 54 11 2382 11 2381 11 5883 12 2381 12 2380 12 55 13 2380 13 2379 13 56 14 2379 14 2378 14 4 15 2378 15 5 15 5891 16 5 16 6 16 57 17 6 17 2377 17 7 18 2377 18 58 18 5892 19 58 19 2376 19 5893 20 2376 20 2375 20 59 21 2375 21 8 21 60 22 8 22 9 22 5895 23 9 23 2374 23 5894 24 2374 24 2373 24 61 25 2373 25 2372 25 10 26 2372 26 11 26 62 27 11 27 12 27 5896 28 12 28 2371 28 5897 29 2371 29 2370 29 5898 30 2370 30 2369 30 63 31 2369 31 2368 31 64 32 2368 32 65 32 5899 33 65 33 66 33 5900 34 66 34 2367 34 13 35 2367 35 2366 35 67 36 2366 36 14 36 68 37 14 37 2365 37 69 38 2365 38 15 38 16 39 15 39 17 39 5901 40 17 40 2364 40 18 41 2364 41 19 41 5902 42 19 42 2363 42 70 43 2363 43 21 43 20 44 21 44 22 44 5904 45 22 45 23 45 5905 46 23 46 24 46 25 47 24 47 26 47 5906 48 26 48 27 48 5907 49 27 49 28 49 5908 50 28 50 2362 50 71 51 2362 51 2361 51 72 52 2361 52 29 52 5946 53 29 53 2360 53 5909 54 2360 54 30 54 73 55 30 55 2359 55 74 56 2359 56 31 56 5910 57 31 57 32 57 75 58 32 58 2358 58 5950 59 2358 59 76 59 5911 60 76 60 77 60 5912 61 77 61 2356 61 5953 62 2356 62 33 62 5913 63 33 63 78 63 79 64 78 64 34 64 5957 65 34 65 2355 65 5914 66 2355 66 35 66 80 67 35 67 2354 67 5959 68 2354 68 2353 68 81 69 2353 69 82 69 5918 70 82 70 2352 70 5919 71 2352 71 2351 71 36 72 2351 72 2350 72 5963 73 2350 73 2349 73 83 74 2349 74 38 74 37 75 38 75 39 75 84 76 39 76 40 76 85 77 40 77 2348 77 86 78 2348 78 41 78 87 79 41 79 42 79 5966 80 42 80 2347 80 43 81 2347 81 2346 81 88 82 2346 82 44 82 5967 83 44 83 89 83 90 84 89 84 45 84 5875 85 45 85 46 85 5873 86 46 86 2345 86 5872 87 2345 87 2344 87 1 88 5872 88 2344 88 47 89 48 89 49 89 49 90 50 90 51 90 51 91 2385 91 2 91 2 92 2384 92 52 92 52 93 2383 93 5882 93 5882 94 3 94 53 94 53 95 2382 95 54 95 54 96 2381 96 5883 96 5883 97 2380 97 55 97 55 98 2379 98 56 98 56 99 2378 99 4 99 4 100 5 100 5891 100 5891 101 6 101 57 101 57 102 2377 102 7 102 7 103 58 103 5892 103 5892 104 2376 104 5893 104 5893 105 2375 105 59 105 59 106 8 106 60 106 60 107 9 107 5895 107 5895 108 2374 108 5894 108 5894 109 2373 109 61 109 61 110 2372 110 10 110 10 111 11 111 62 111 62 112 12 112 5896 112 5896 113 2371 113 5897 113 5897 114 2370 114 5898 114 5898 115 2369 115 63 115 63 116 2368 116 64 116 64 117 65 117 5899 117 5899 118 66 118 5900 118 5900 119 2367 119 13 119 13 120 2366 120 67 120 67 121 14 121 68 121 68 122 2365 122 69 122 69 123 15 123 16 123 16 124 17 124 5901 124 5901 125 2364 125 18 125 18 126 19 126 5902 126 5902 127 2363 127 70 127 70 128 21 128 20 128 20 129 22 129 5904 129 5904 130 23 130 5905 130 5905 131 24 131 25 131 25 132 26 132 5906 132 5906 133 27 133 5907 133 5907 134 28 134 5908 134 5908 135 2362 135 71 135 71 136 2361 136 72 136 72 137 29 137 5946 137 5946 138 2360 138 5909 138 5909 139 30 139 73 139 73 140 2359 140 74 140 74 141 31 141 5910 141 5910 142 32 142 75 142 75 143 2358 143 5950 143 5950 144 76 144 5911 144 5911 145 77 145 5912 145 5912 146 2356 146 5953 146 5953 147 33 147 5913 147 5913 148 78 148 79 148 79 149 34 149 5957 149 5957 150 2355 150 5914 150 5914 151 35 151 80 151 80 152 2354 152 5959 152 5959 153 2353 153 81 153 81 154 82 154 5918 154 5918 155 2352 155 5919 155 5919 156 2351 156 36 156 36 157 2350 157 5963 157 5963 158 2349 158 83 158 83 159 38 159 37 159 37 160 39 160 84 160 84 161 40 161 85 161 85 162 2348 162 86 162 86 163 41 163 87 163 87 164 42 164 5966 164 5966 165 2347 165 43 165 43 166 2346 166 88 166 88 167 44 167 5967 167 5967 168 89 168 90 168 90 169 45 169 5875 169 5875 170 46 170 5873 170 5873 171 2345 171 5872 171 91 172 93 172 92 172 91 173 94 173 93 173 91 174 2417 174 94 174 94 175 2417 175 95 175 95 176 2417 176 2444 176 5879 177 2444 177 96 177 5972 178 96 178 138 178 5974 179 138 179 139 179 97 180 139 180 140 180 141 181 140 181 2443 181 142 182 2443 182 98 182 5975 183 98 183 99 183 143 184 99 184 144 184 145 185 144 185 2442 185 100 186 2442 186 146 186 5962 187 146 187 2441 187 5961 188 2441 188 147 188 5960 189 147 189 2440 189 148 190 2440 190 2439 190 5917 191 2439 191 2387 191 5916 192 2387 192 2389 192 5915 193 2389 193 2388 193 5958 194 2388 194 2390 194 5956 195 2390 195 2391 195 5955 196 2391 196 101 196 5954 197 101 197 2392 197 5952 198 2392 198 103 198 102 199 103 199 2395 199 5951 200 2395 200 2394 200 5949 201 2394 201 104 201 149 202 104 202 2396 202 5948 203 2396 203 105 203 150 204 105 204 106 204 151 205 106 205 152 205 5947 206 152 206 107 206 108 207 107 207 109 207 153 208 109 208 2397 208 154 209 2397 209 2398 209 5977 210 2398 210 155 210 5978 211 155 211 110 211 5979 212 110 212 2399 212 5981 213 2399 213 2400 213 5980 214 2400 214 2401 214 5982 215 2401 215 2402 215 111 216 2402 216 112 216 156 217 112 217 157 217 158 218 157 218 2403 218 5983 219 2403 219 2404 219 113 220 2404 220 159 220 114 221 159 221 115 221 160 222 115 222 117 222 116 223 117 223 161 223 118 224 161 224 119 224 120 225 119 225 162 225 5984 226 162 226 2438 226 5985 227 2438 227 2437 227 5986 228 2437 228 121 228 122 229 121 229 163 229 123 230 163 230 124 230 5988 231 124 231 125 231 5989 232 125 232 2435 232 5990 233 2435 233 2436 233 126 234 2436 234 164 234 5991 235 164 235 2433 235 127 236 2433 236 2432 236 165 237 2432 237 128 237 166 238 128 238 2431 238 129 239 2431 239 130 239 167 240 130 240 2430 240 5992 241 2430 241 131 241 168 242 131 242 2429 242 5993 243 2429 243 2428 243 169 244 2428 244 2427 244 170 245 2427 245 2426 245 5994 246 2426 246 132 246 171 247 132 247 133 247 172 248 133 248 2425 248 173 249 2425 249 2424 249 174 250 2424 250 2423 250 175 251 2423 251 2422 251 176 252 2422 252 177 252 5999 253 177 253 2421 253 6000 254 2421 254 134 254 6001 255 134 255 178 255 6002 256 178 256 2418 256 179 257 2418 257 2420 257 135 258 2420 258 136 258 137 259 136 259 92 259 93 260 137 260 92 260 95 261 2444 261 5879 261 5879 262 96 262 5972 262 5972 263 138 263 5974 263 5974 264 139 264 97 264 97 265 140 265 141 265 141 266 2443 266 142 266 142 267 98 267 5975 267 5975 268 99 268 143 268 143 269 144 269 145 269 145 270 2442 270 100 270 100 271 146 271 5962 271 5962 272 2441 272 5961 272 5961 273 147 273 5960 273 5960 274 2440 274 148 274 148 275 2439 275 5917 275 5917 276 2387 276 5916 276 5916 277 2389 277 5915 277 5915 278 2388 278 5958 278 5958 279 2390 279 5956 279 5956 280 2391 280 5955 280 5955 281 101 281 5954 281 5954 282 2392 282 5952 282 5952 283 103 283 102 283 102 284 2395 284 5951 284 5951 285 2394 285 5949 285 5949 286 104 286 149 286 149 287 2396 287 5948 287 5948 288 105 288 150 288 150 289 106 289 151 289 151 290 152 290 5947 290 5947 291 107 291 108 291 108 292 109 292 153 292 153 293 2397 293 154 293 154 294 2398 294 5977 294 5977 295 155 295 5978 295 5978 296 110 296 5979 296 5979 297 2399 297 5981 297 5981 298 2400 298 5980 298 5980 299 2401 299 5982 299 5982 300 2402 300 111 300 111 301 112 301 156 301 156 302 157 302 158 302 158 303 2403 303 5983 303 5983 304 2404 304 113 304 113 305 159 305 114 305 114 306 115 306 160 306 160 307 117 307 116 307 116 308 161 308 118 308 118 309 119 309 120 309 120 310 162 310 5984 310 5984 311 2438 311 5985 311 5985 312 2437 312 5986 312 5986 313 121 313 122 313 122 314 163 314 123 314 123 315 124 315 5988 315 5988 316 125 316 5989 316 5989 317 2435 317 5990 317 5990 318 2436 318 126 318 126 319 164 319 5991 319 5991 320 2433 320 127 320 127 321 2432 321 165 321 165 322 128 322 166 322 166 323 2431 323 129 323 129 324 130 324 167 324 167 325 2430 325 5992 325 5992 326 131 326 168 326 168 327 2429 327 5993 327 5993 328 2428 328 169 328 169 329 2427 329 170 329 170 330 2426 330 5994 330 5994 331 132 331 171 331 171 332 133 332 172 332 172 333 2425 333 173 333 173 334 2424 334 174 334 174 335 2423 335 175 335 175 336 2422 336 176 336 176 337 177 337 5999 337 5999 338 2421 338 6000 338 6000 339 134 339 6001 339 6001 340 178 340 6002 340 6002 341 2418 341 179 341 179 342 2420 342 135 342 135 343 136 343 137 343 180 344 6008 344 2446 344 180 345 181 345 6008 345 180 346 182 346 181 346 181 347 182 347 183 347 183 348 182 348 230 348 231 349 230 349 184 349 232 350 184 350 185 350 233 351 185 351 2482 351 5936 352 2482 352 234 352 186 353 234 353 187 353 6010 354 187 354 188 354 235 355 188 355 189 355 236 356 189 356 2480 356 237 357 2480 357 2481 357 238 358 2481 358 2479 358 239 359 2479 359 190 359 6013 360 190 360 2478 360 191 361 2478 361 192 361 240 362 192 362 2477 362 193 363 2477 363 241 363 194 364 241 364 242 364 5940 365 242 365 243 365 5941 366 243 366 244 366 245 367 244 367 195 367 246 368 195 368 2475 368 196 369 2475 369 197 369 5942 370 197 370 198 370 6017 371 198 371 199 371 6019 372 199 372 2474 372 247 373 2474 373 2473 373 5944 374 2473 374 2416 374 6022 375 2416 375 200 375 5945 376 200 376 201 376 248 377 201 377 2472 377 202 378 2472 378 203 378 5976 379 203 379 2471 379 204 380 2471 380 2470 380 5878 381 2470 381 2469 381 249 382 2469 382 2468 382 5973 383 2468 383 2467 383 5971 384 2467 384 205 384 5970 385 205 385 250 385 5880 386 250 386 251 386 5969 387 251 387 2466 387 206 388 2466 388 2465 388 207 389 2465 389 2464 389 6006 390 2464 390 2463 390 6005 391 2463 391 252 391 253 392 252 392 254 392 6004 393 254 393 2462 393 5998 394 2462 394 255 394 208 395 255 395 209 395 5997 396 209 396 256 396 6003 397 256 397 2461 397 5996 398 2461 398 210 398 257 399 210 399 258 399 211 400 258 400 2460 400 5995 401 2460 401 212 401 259 402 212 402 213 402 260 403 213 403 214 403 215 404 214 404 261 404 6024 405 261 405 2459 405 216 406 2459 406 2458 406 6025 407 2458 407 262 407 217 408 262 408 263 408 6026 409 263 409 2457 409 264 410 2457 410 2456 410 265 411 2456 411 2455 411 6027 412 2455 412 218 412 266 413 218 413 219 413 6028 414 219 414 221 414 220 415 221 415 2454 415 222 416 2454 416 267 416 6029 417 267 417 2453 417 268 418 2453 418 2452 418 6031 419 2452 419 269 419 223 420 269 420 224 420 270 421 224 421 225 421 6032 422 225 422 2451 422 271 423 2451 423 226 423 6033 424 226 424 272 424 227 425 272 425 2450 425 6034 426 2450 426 2449 426 273 427 2449 427 2448 427 274 428 2448 428 275 428 228 429 275 429 2447 429 229 430 2447 430 276 430 6007 431 276 431 2446 431 6008 432 6007 432 2446 432 183 433 230 433 231 433 231 434 184 434 232 434 232 435 185 435 233 435 233 436 2482 436 5936 436 5936 437 234 437 186 437 186 438 187 438 6010 438 6010 439 188 439 235 439 235 440 189 440 236 440 236 441 2480 441 237 441 237 442 2481 442 238 442 238 443 2479 443 239 443 239 444 190 444 6013 444 6013 445 2478 445 191 445 191 446 192 446 240 446 240 447 2477 447 193 447 193 448 241 448 194 448 194 449 242 449 5940 449 5940 450 243 450 5941 450 5941 451 244 451 245 451 245 452 195 452 246 452 246 453 2475 453 196 453 196 454 197 454 5942 454 5942 455 198 455 6017 455 6017 456 199 456 6019 456 6019 457 2474 457 247 457 247 458 2473 458 5944 458 5944 459 2416 459 6022 459 6022 460 200 460 5945 460 5945 461 201 461 248 461 248 462 2472 462 202 462 202 463 203 463 5976 463 5976 464 2471 464 204 464 204 465 2470 465 5878 465 5878 466 2469 466 249 466 249 467 2468 467 5973 467 5973 468 2467 468 5971 468 5971 469 205 469 5970 469 5970 470 250 470 5880 470 5880 471 251 471 5969 471 5969 472 2466 472 206 472 206 473 2465 473 207 473 207 474 2464 474 6006 474 6006 475 2463 475 6005 475 6005 476 252 476 253 476 253 477 254 477 6004 477 6004 478 2462 478 5998 478 5998 479 255 479 208 479 208 480 209 480 5997 480 5997 481 256 481 6003 481 6003 482 2461 482 5996 482 5996 483 210 483 257 483 257 484 258 484 211 484 211 485 2460 485 5995 485 5995 486 212 486 259 486 259 487 213 487 260 487 260 488 214 488 215 488 215 489 261 489 6024 489 6024 490 2459 490 216 490 216 491 2458 491 6025 491 6025 492 262 492 217 492 217 493 263 493 6026 493 6026 494 2457 494 264 494 264 495 2456 495 265 495 265 496 2455 496 6027 496 6027 497 218 497 266 497 266 498 219 498 6028 498 6028 499 221 499 220 499 220 500 2454 500 222 500 222 501 267 501 6029 501 6029 502 2453 502 268 502 268 503 2452 503 6031 503 6031 504 269 504 223 504 223 505 224 505 270 505 270 506 225 506 6032 506 6032 507 2451 507 271 507 271 508 226 508 6033 508 6033 509 272 509 227 509 227 510 2450 510 6034 510 6034 511 2449 511 273 511 273 512 2448 512 274 512 274 513 275 513 228 513 228 514 2447 514 229 514 229 515 276 515 6007 515 278 516 277 516 334 516 278 517 5931 517 277 517 278 518 279 518 5931 518 5931 519 279 519 280 519 280 520 279 520 281 520 5930 521 281 521 335 521 5929 522 335 522 282 522 336 523 282 523 283 523 284 524 283 524 2503 524 5928 525 2503 525 2502 525 5927 526 2502 526 337 526 285 527 337 527 286 527 5926 528 286 528 2501 528 5925 529 2501 529 338 529 5924 530 338 530 2500 530 287 531 2500 531 288 531 5923 532 288 532 339 532 340 533 339 533 341 533 5922 534 341 534 2499 534 289 535 2499 535 342 535 343 536 342 536 344 536 5921 537 344 537 345 537 290 538 345 538 2497 538 291 539 2497 539 2496 539 292 540 2496 540 293 540 294 541 293 541 295 541 346 542 295 542 2494 542 296 543 2494 543 2495 543 347 544 2495 544 297 544 348 545 297 545 298 545 5920 546 298 546 299 546 5885 547 299 547 2493 547 5884 548 2493 548 300 548 5886 549 300 549 301 549 5890 550 301 550 349 550 5889 551 349 551 302 551 303 552 302 552 2406 552 350 553 2406 553 304 553 351 554 304 554 305 554 306 555 305 555 2407 555 5888 556 2407 556 307 556 5887 557 307 557 2409 557 308 558 2409 558 2408 558 309 559 2408 559 310 559 6035 560 310 560 2410 560 311 561 2410 561 312 561 5874 562 312 562 352 562 5968 563 352 563 2411 563 353 564 2411 564 313 564 5876 565 313 565 314 565 315 566 314 566 2412 566 354 567 2412 567 2413 567 5965 568 2413 568 2414 568 5964 569 2414 569 355 569 356 570 355 570 316 570 5877 571 316 571 357 571 6023 572 357 572 317 572 6021 573 317 573 2415 573 5943 574 2415 574 358 574 6020 575 358 575 318 575 6018 576 318 576 359 576 6016 577 359 577 320 577 319 578 320 578 360 578 321 579 360 579 361 579 6015 580 361 580 362 580 363 581 362 581 2492 581 322 582 2492 582 323 582 364 583 323 583 365 583 6014 584 365 584 366 584 367 585 366 585 324 585 368 586 324 586 325 586 5939 587 325 587 2491 587 5938 588 2491 588 326 588 6012 589 326 589 2490 589 6011 590 2490 590 2489 590 5937 591 2489 591 369 591 327 592 369 592 370 592 371 593 370 593 372 593 6009 594 372 594 2488 594 328 595 2488 595 373 595 5935 596 373 596 2487 596 5933 597 2487 597 374 597 329 598 374 598 330 598 375 599 330 599 2486 599 331 600 2486 600 2485 600 332 601 2485 601 2484 601 5932 602 2484 602 2483 602 333 603 2483 603 334 603 277 604 333 604 334 604 280 605 281 605 5930 605 5930 606 335 606 5929 606 5929 607 282 607 336 607 336 608 283 608 284 608 284 609 2503 609 5928 609 5928 610 2502 610 5927 610 5927 611 337 611 285 611 285 612 286 612 5926 612 5926 613 2501 613 5925 613 5925 614 338 614 5924 614 5924 615 2500 615 287 615 287 616 288 616 5923 616 5923 617 339 617 340 617 340 618 341 618 5922 618 5922 619 2499 619 289 619 289 620 342 620 343 620 343 621 344 621 5921 621 5921 622 345 622 290 622 290 623 2497 623 291 623 291 624 2496 624 292 624 292 625 293 625 294 625 294 626 295 626 346 626 346 627 2494 627 296 627 296 628 2495 628 347 628 347 629 297 629 348 629 348 630 298 630 5920 630 5920 631 299 631 5885 631 5885 632 2493 632 5884 632 5884 633 300 633 5886 633 5886 634 301 634 5890 634 5890 635 349 635 5889 635 5889 636 302 636 303 636 303 637 2406 637 350 637 350 638 304 638 351 638 351 639 305 639 306 639 306 640 2407 640 5888 640 5888 641 307 641 5887 641 5887 642 2409 642 308 642 308 643 2408 643 309 643 309 644 310 644 6035 644 6035 645 2410 645 311 645 311 646 312 646 5874 646 5874 647 352 647 5968 647 5968 648 2411 648 353 648 353 649 313 649 5876 649 5876 650 314 650 315 650 315 651 2412 651 354 651 354 652 2413 652 5965 652 5965 653 2414 653 5964 653 5964 654 355 654 356 654 356 655 316 655 5877 655 5877 656 357 656 6023 656 6023 657 317 657 6021 657 6021 658 2415 658 5943 658 5943 659 358 659 6020 659 6020 660 318 660 6018 660 6018 661 359 661 6016 661 6016 662 320 662 319 662 319 663 360 663 321 663 321 664 361 664 6015 664 6015 665 362 665 363 665 363 666 2492 666 322 666 322 667 323 667 364 667 364 668 365 668 6014 668 6014 669 366 669 367 669 367 670 324 670 368 670 368 671 325 671 5939 671 5939 672 2491 672 5938 672 5938 673 326 673 6012 673 6012 674 2490 674 6011 674 6011 675 2489 675 5937 675 5937 676 369 676 327 676 327 677 370 677 371 677 371 678 372 678 6009 678 6009 679 2488 679 328 679 328 680 373 680 5935 680 5935 681 2487 681 5933 681 5933 682 374 682 329 682 329 683 330 683 375 683 375 684 2486 684 331 684 331 685 2485 685 332 685 332 686 2484 686 5932 686 5932 687 2483 687 333 687 455 688 706 688 381 688 409 689 381 689 410 689 409 690 455 690 381 690 376 691 377 691 544 691 376 692 379 692 377 692 376 693 382 693 379 693 379 694 382 694 383 694 378 695 383 695 384 695 378 696 379 696 383 696 378 697 380 697 379 697 379 698 380 698 377 698 377 699 380 699 410 699 381 700 377 700 410 700 381 701 544 701 377 701 381 702 706 702 544 702 382 703 543 703 383 703 383 704 543 704 385 704 384 705 385 705 386 705 384 706 383 706 385 706 543 707 388 707 385 707 385 708 388 708 387 708 386 709 387 709 412 709 386 710 385 710 387 710 388 711 389 711 387 711 387 712 389 712 390 712 412 713 390 713 413 713 412 714 387 714 390 714 389 715 541 715 390 715 390 716 541 716 391 716 413 717 391 717 392 717 413 718 390 718 391 718 541 719 540 719 391 719 391 720 540 720 393 720 392 721 393 721 395 721 392 722 391 722 393 722 540 723 539 723 393 723 393 724 539 724 394 724 395 725 394 725 414 725 395 726 393 726 394 726 539 727 537 727 394 727 394 728 537 728 396 728 414 729 396 729 415 729 414 730 394 730 396 730 537 731 534 731 396 731 396 732 534 732 397 732 415 733 397 733 398 733 415 734 396 734 397 734 534 735 535 735 397 735 397 736 535 736 401 736 398 737 401 737 400 737 398 738 397 738 401 738 535 739 533 739 401 739 401 740 533 740 399 740 400 741 399 741 402 741 400 742 401 742 399 742 533 743 532 743 399 743 399 744 532 744 403 744 402 745 403 745 416 745 402 746 399 746 403 746 532 747 404 747 403 747 403 748 404 748 406 748 416 749 406 749 417 749 416 750 403 750 406 750 404 751 407 751 406 751 406 752 407 752 408 752 417 753 408 753 405 753 417 754 406 754 408 754 407 755 531 755 408 755 408 756 531 756 495 756 405 757 408 757 495 757 409 758 411 758 455 758 409 759 410 759 411 759 411 760 410 760 380 760 378 761 411 761 380 761 378 762 384 762 411 762 411 763 384 763 386 763 412 764 411 764 386 764 412 765 413 765 411 765 411 766 413 766 392 766 395 767 411 767 392 767 395 768 414 768 411 768 411 769 414 769 415 769 398 770 411 770 415 770 398 771 400 771 411 771 411 772 400 772 402 772 416 773 411 773 402 773 416 774 417 774 411 774 411 775 417 775 405 775 478 776 411 776 405 776 478 777 530 777 411 777 411 778 530 778 480 778 529 779 411 779 480 779 529 780 418 780 411 780 411 781 418 781 423 781 424 782 423 782 420 782 419 783 420 783 484 783 2890 784 484 784 425 784 422 785 425 785 421 785 422 786 2890 786 425 786 411 787 423 787 424 787 424 788 420 788 419 788 419 789 484 789 2890 789 425 790 426 790 421 790 421 791 426 791 2891 791 2891 792 426 792 441 792 3004 793 441 793 427 793 3001 794 427 794 525 794 2892 795 525 795 486 795 428 796 486 796 524 796 442 797 524 797 429 797 443 798 429 798 523 798 444 799 523 799 430 799 432 800 430 800 431 800 487 801 432 801 431 801 487 802 433 802 432 802 487 803 520 803 433 803 433 804 520 804 445 804 445 805 520 805 519 805 434 806 519 806 490 806 2984 807 490 807 435 807 436 808 2984 808 435 808 436 809 437 809 2984 809 436 810 438 810 437 810 437 811 438 811 2898 811 2898 812 438 812 440 812 439 813 440 813 494 813 439 814 2898 814 440 814 2891 815 441 815 3004 815 3004 816 427 816 3001 816 3001 817 525 817 2892 817 2892 818 486 818 428 818 428 819 524 819 442 819 442 820 429 820 443 820 443 821 523 821 444 821 444 822 430 822 432 822 445 823 519 823 434 823 434 824 490 824 2984 824 411 825 2888 825 455 825 455 826 2888 826 446 826 447 827 455 827 446 827 447 828 3021 828 455 828 455 829 3021 829 2885 829 2884 830 455 830 2885 830 2884 831 448 831 455 831 455 832 448 832 449 832 450 833 455 833 449 833 450 834 451 834 455 834 455 835 451 835 2882 835 3031 836 455 836 2882 836 3031 837 452 837 455 837 455 838 452 838 3036 838 453 839 455 839 3036 839 453 840 454 840 455 840 455 841 454 841 3042 841 2881 842 455 842 3042 842 2881 843 2880 843 455 843 455 844 2880 844 3049 844 3052 845 455 845 3049 845 3052 846 2879 846 455 846 455 847 2879 847 456 847 2876 848 455 848 456 848 2876 849 2875 849 455 849 455 850 2875 850 2405 850 2405 851 2875 851 457 851 458 852 2405 852 457 852 458 853 2930 853 2405 853 2405 854 2930 854 459 854 460 855 2405 855 459 855 460 856 461 856 2405 856 2405 857 461 857 2935 857 2922 858 2405 858 2935 858 2922 859 2939 859 2405 859 2405 860 2939 860 469 860 469 861 2939 861 2920 861 2942 862 469 862 2920 862 2942 863 2944 863 469 863 469 864 2944 864 463 864 462 865 469 865 463 865 462 866 2917 866 469 866 469 867 2917 867 465 867 464 868 469 868 465 868 464 869 466 869 469 869 469 870 466 870 467 870 2914 871 469 871 467 871 2914 872 2912 872 469 872 469 873 2912 873 2910 873 2909 874 469 874 2910 874 2909 875 468 875 469 875 469 876 468 876 2908 876 470 877 469 877 2908 877 470 878 471 878 469 878 469 879 471 879 472 879 473 880 469 880 472 880 473 881 2965 881 469 881 469 882 2965 882 2907 882 2905 883 469 883 2907 883 2905 884 474 884 469 884 469 885 474 885 2904 885 475 886 469 886 2904 886 475 887 2973 887 469 887 469 888 2973 888 476 888 477 889 469 889 476 889 477 890 2901 890 469 890 469 891 2901 891 439 891 478 892 405 892 479 892 530 893 479 893 497 893 480 894 497 894 481 894 529 895 481 895 482 895 418 896 482 896 499 896 423 897 499 897 528 897 420 898 528 898 483 898 484 899 483 899 527 899 425 900 527 900 510 900 426 901 510 901 485 901 441 902 485 902 502 902 427 903 502 903 526 903 525 904 526 904 504 904 486 905 504 905 505 905 524 906 505 906 514 906 429 907 514 907 515 907 523 908 515 908 516 908 430 909 516 909 522 909 431 910 522 910 517 910 487 911 517 911 521 911 520 912 521 912 488 912 519 913 488 913 489 913 490 914 489 914 491 914 435 915 491 915 492 915 436 916 492 916 493 916 438 917 493 917 518 917 440 918 518 918 508 918 494 919 508 919 976 919 494 920 440 920 508 920 405 921 495 921 479 921 479 922 495 922 496 922 680 923 479 923 496 923 680 924 497 924 479 924 680 925 498 925 497 925 497 926 498 926 481 926 481 927 498 927 509 927 482 928 509 928 500 928 499 929 500 929 678 929 528 930 678 930 677 930 483 931 677 931 574 931 527 932 574 932 573 932 510 933 573 933 501 933 485 934 501 934 503 934 502 935 503 935 674 935 526 936 674 936 511 936 504 937 511 937 512 937 505 938 512 938 513 938 514 939 513 939 572 939 515 940 572 940 571 940 516 941 571 941 570 941 522 942 570 942 569 942 517 943 569 943 506 943 521 944 506 944 568 944 488 945 568 945 567 945 489 946 567 946 507 946 491 947 507 947 673 947 492 948 673 948 672 948 493 949 672 949 671 949 518 950 671 950 670 950 508 951 670 951 976 951 508 952 518 952 670 952 481 953 509 953 482 953 482 954 500 954 499 954 499 955 678 955 528 955 528 956 677 956 483 956 483 957 574 957 527 957 527 958 573 958 510 958 510 959 501 959 485 959 485 960 503 960 502 960 502 961 674 961 526 961 526 962 511 962 504 962 504 963 512 963 505 963 505 964 513 964 514 964 514 965 572 965 515 965 515 966 571 966 516 966 516 967 570 967 522 967 522 968 569 968 517 968 517 969 506 969 521 969 521 970 568 970 488 970 488 971 567 971 489 971 489 972 507 972 491 972 491 973 673 973 492 973 492 974 672 974 493 974 493 975 671 975 518 975 440 976 438 976 518 976 438 977 436 977 493 977 436 978 435 978 492 978 435 979 490 979 491 979 490 980 519 980 489 980 519 981 520 981 488 981 520 982 487 982 521 982 487 983 431 983 517 983 431 984 430 984 522 984 430 985 523 985 516 985 523 986 429 986 515 986 429 987 524 987 514 987 524 988 486 988 505 988 486 989 525 989 504 989 525 990 427 990 526 990 427 991 441 991 502 991 441 992 426 992 485 992 426 993 425 993 510 993 425 994 484 994 527 994 484 995 420 995 483 995 420 996 423 996 528 996 423 997 418 997 499 997 418 998 529 998 482 998 529 999 480 999 481 999 480 1000 530 1000 497 1000 530 1001 478 1001 479 1001 531 1002 3023 1002 495 1002 531 1003 407 1003 3023 1003 3023 1004 407 1004 404 1004 532 1005 3023 1005 404 1005 532 1006 536 1006 3023 1006 532 1007 533 1007 536 1007 536 1008 533 1008 535 1008 534 1009 536 1009 535 1009 534 1010 538 1010 536 1010 534 1011 537 1011 538 1011 538 1012 537 1012 539 1012 540 1013 538 1013 539 1013 540 1014 542 1014 538 1014 540 1015 541 1015 542 1015 542 1016 541 1016 389 1016 388 1017 542 1017 389 1017 388 1018 543 1018 542 1018 542 1019 543 1019 3027 1019 3027 1020 543 1020 382 1020 376 1021 3027 1021 382 1021 376 1022 544 1022 3027 1022 3027 1023 544 1023 3029 1023 3029 1024 544 1024 706 1024 3032 1025 706 1025 545 1025 3032 1026 3029 1026 706 1026 5861 1027 546 1027 706 1027 5861 1028 2927 1028 546 1028 5861 1029 2928 1029 2927 1029 5861 1030 548 1030 2928 1030 5861 1031 547 1031 548 1031 5861 1032 2931 1032 547 1032 5861 1033 2934 1033 2931 1033 5861 1034 549 1034 2934 1034 5861 1035 2936 1035 549 1035 5861 1036 2938 1036 2936 1036 5861 1037 550 1037 2938 1037 5861 1038 2941 1038 550 1038 5861 1039 551 1039 2941 1039 5861 1040 552 1040 551 1040 5861 1041 553 1041 552 1041 5861 1042 554 1042 553 1042 5861 1043 2947 1043 554 1043 5861 1044 2948 1044 2947 1044 5861 1045 2951 1045 2948 1045 5861 1046 5860 1046 2951 1046 2951 1047 5860 1047 555 1047 555 1048 5860 1048 556 1048 556 1049 5860 1049 557 1049 557 1050 5860 1050 558 1050 558 1051 5860 1051 559 1051 559 1052 5860 1052 2955 1052 2955 1053 5860 1053 560 1053 560 1054 5860 1054 561 1054 561 1055 5860 1055 2958 1055 2958 1056 5860 1056 562 1056 562 1057 5860 1057 2962 1057 2962 1058 5860 1058 2963 1058 2963 1059 5860 1059 2966 1059 2966 1060 5860 1060 563 1060 563 1061 5860 1061 2969 1061 2969 1062 5860 1062 2971 1062 2971 1063 5860 1063 564 1063 564 1064 5860 1064 565 1064 565 1065 5860 1065 2974 1065 2974 1066 5860 1066 2975 1066 2975 1067 5860 1067 2977 1067 2977 1068 5860 1068 566 1068 566 1069 5860 1069 2979 1069 2979 1070 5860 1070 2981 1070 2981 1071 5860 1071 2982 1071 2982 1072 5860 1072 2985 1072 2985 1073 5860 1073 2987 1073 2987 1074 5860 1074 507 1074 567 1075 2987 1075 507 1075 567 1076 2990 1076 2987 1076 567 1077 568 1077 2990 1077 2990 1078 568 1078 2991 1078 2991 1079 568 1079 506 1079 2992 1080 506 1080 569 1080 570 1081 2992 1081 569 1081 570 1082 2993 1082 2992 1082 570 1083 571 1083 2993 1083 2993 1084 571 1084 2995 1084 2995 1085 571 1085 572 1085 2996 1086 572 1086 513 1086 2998 1087 513 1087 512 1087 2999 1088 512 1088 511 1088 3002 1089 511 1089 674 1089 675 1090 674 1090 503 1090 3006 1091 503 1091 501 1091 3008 1092 501 1092 573 1092 3010 1093 573 1093 574 1093 676 1094 574 1094 677 1094 3012 1095 677 1095 678 1095 679 1096 678 1096 500 1096 3015 1097 500 1097 509 1097 3016 1098 509 1098 498 1098 3018 1099 498 1099 680 1099 3020 1100 680 1100 496 1100 681 1101 496 1101 495 1101 3023 1102 681 1102 495 1102 576 1103 976 1103 5860 1103 576 1104 617 1104 976 1104 576 1105 3530 1105 617 1105 576 1106 575 1106 3530 1106 576 1107 3535 1107 575 1107 576 1108 577 1108 3535 1108 576 1109 3538 1109 577 1109 576 1110 3540 1110 3538 1110 576 1111 578 1111 3540 1111 576 1112 579 1112 578 1112 576 1113 580 1113 579 1113 576 1114 582 1114 580 1114 576 1115 581 1115 582 1115 576 1116 584 1116 581 1116 576 1117 583 1117 584 1117 576 1118 3549 1118 583 1118 576 1119 585 1119 3549 1119 576 1120 3553 1120 585 1120 576 1121 3556 1121 3553 1121 576 1122 586 1122 3556 1122 576 1123 3557 1123 586 1123 576 1124 3559 1124 3557 1124 576 1125 3563 1125 3559 1125 576 1126 587 1126 3563 1126 576 1127 3567 1127 587 1127 576 1128 588 1128 3567 1128 576 1129 3568 1129 588 1129 576 1130 3570 1130 3568 1130 576 1131 3572 1131 3570 1131 576 1132 589 1132 3572 1132 576 1133 590 1133 589 1133 576 1134 591 1134 590 1134 576 1135 3575 1135 591 1135 576 1136 618 1136 3575 1136 3575 1137 618 1137 2730 1137 592 1138 2730 1138 2729 1138 593 1139 2729 1139 2727 1139 594 1140 2727 1140 2788 1140 595 1141 2788 1141 2791 1141 682 1142 2791 1142 596 1142 3583 1143 596 1143 597 1143 3584 1144 597 1144 2796 1144 683 1145 2796 1145 2724 1145 3588 1146 2724 1146 2799 1146 684 1147 2799 1147 598 1147 685 1148 598 1148 2804 1148 3594 1149 2804 1149 2722 1149 686 1150 2722 1150 2720 1150 687 1151 2720 1151 2806 1151 3596 1152 2806 1152 688 1152 599 1153 688 1153 600 1153 3413 1154 600 1154 601 1154 3466 1155 601 1155 2811 1155 602 1156 2811 1156 2813 1156 3468 1157 2813 1157 689 1157 3471 1158 689 1158 2816 1158 603 1159 2816 1159 604 1159 690 1160 604 1160 2718 1160 3476 1161 2718 1161 2717 1161 691 1162 2717 1162 605 1162 606 1163 605 1163 2716 1163 617 1164 2716 1164 657 1164 617 1165 606 1165 2716 1165 617 1166 607 1166 606 1166 617 1167 609 1167 607 1167 617 1168 608 1168 609 1168 617 1169 3485 1169 608 1169 617 1170 611 1170 3485 1170 617 1171 610 1171 611 1171 617 1172 3489 1172 610 1172 617 1173 3492 1173 3489 1173 617 1174 3493 1174 3492 1174 617 1175 3496 1175 3493 1175 617 1176 3497 1176 3496 1176 617 1177 3501 1177 3497 1177 617 1178 3502 1178 3501 1178 617 1179 613 1179 3502 1179 617 1180 612 1180 613 1180 617 1181 3506 1181 612 1181 617 1182 614 1182 3506 1182 617 1183 3508 1183 614 1183 617 1184 3510 1184 3508 1184 617 1185 3513 1185 3510 1185 617 1186 3516 1186 3513 1186 617 1187 3517 1187 3516 1187 617 1188 3519 1188 3517 1188 617 1189 615 1189 3519 1189 617 1190 616 1190 615 1190 617 1191 3523 1191 616 1191 617 1192 3525 1192 3523 1192 617 1193 3528 1193 3525 1193 617 1194 3529 1194 3528 1194 617 1195 3530 1195 3529 1195 619 1196 1103 1196 618 1196 619 1197 625 1197 1103 1197 619 1198 2498 1198 625 1198 620 1199 1066 1199 625 1199 625 1200 1066 1200 1109 1200 1072 1201 625 1201 1109 1201 1072 1202 621 1202 625 1202 625 1203 621 1203 622 1203 1106 1204 625 1204 622 1204 1106 1205 1104 1205 625 1205 625 1206 1104 1206 623 1206 624 1207 625 1207 623 1207 624 1208 1076 1208 625 1208 625 1209 1076 1209 1075 1209 626 1210 625 1210 1075 1210 626 1211 1079 1211 625 1211 625 1212 1079 1212 1080 1212 1090 1213 625 1213 1080 1213 1090 1214 627 1214 625 1214 625 1215 627 1215 629 1215 628 1216 625 1216 629 1216 628 1217 630 1217 625 1217 625 1218 630 1218 1101 1218 631 1219 625 1219 1101 1219 631 1220 1103 1220 625 1220 1065 1221 634 1221 1066 1221 1065 1222 1062 1222 634 1222 634 1223 1062 1223 632 1223 1057 1224 634 1224 632 1224 1057 1225 1053 1225 634 1225 634 1226 1053 1226 1052 1226 1051 1227 634 1227 1052 1227 1051 1228 1047 1228 634 1228 634 1229 1047 1229 633 1229 1040 1230 634 1230 633 1230 1040 1231 635 1231 634 1231 634 1232 635 1232 1039 1232 636 1233 634 1233 1039 1233 636 1234 1037 1234 634 1234 634 1235 1037 1235 1046 1235 1044 1236 634 1236 1046 1236 1044 1237 637 1237 634 1237 634 1238 637 1238 1043 1238 1035 1239 634 1239 1043 1239 1035 1240 1034 1240 634 1240 634 1241 1034 1241 1071 1241 634 1242 638 1242 1066 1242 1066 1243 638 1243 1109 1243 1103 1244 639 1244 618 1244 618 1245 639 1245 1162 1245 1161 1246 618 1246 1162 1246 1161 1247 1114 1247 618 1247 618 1248 1114 1248 1160 1248 640 1249 618 1249 1160 1249 640 1250 1158 1250 618 1250 618 1251 1158 1251 641 1251 1156 1252 618 1252 641 1252 1156 1253 1155 1253 618 1253 618 1254 1155 1254 1154 1254 1118 1255 618 1255 1154 1255 1118 1256 1130 1256 618 1256 618 1257 1130 1257 2871 1257 2697 1258 618 1258 2871 1258 2697 1259 642 1259 618 1259 618 1260 642 1260 643 1260 2694 1261 618 1261 643 1261 2694 1262 2695 1262 618 1262 618 1263 2695 1263 2748 1263 644 1264 618 1264 2748 1264 644 1265 2742 1265 618 1265 618 1266 2742 1266 645 1266 646 1267 618 1267 645 1267 646 1268 2755 1268 618 1268 618 1269 2755 1269 647 1269 2740 1270 618 1270 647 1270 2740 1271 648 1271 618 1271 618 1272 648 1272 2739 1272 2737 1273 618 1273 2739 1273 2737 1274 2765 1274 618 1274 618 1275 2765 1275 2736 1275 2735 1276 618 1276 2736 1276 2735 1277 649 1277 618 1277 618 1278 649 1278 2774 1278 650 1279 618 1279 2774 1279 650 1280 651 1280 618 1280 618 1281 651 1281 652 1281 2732 1282 618 1282 652 1282 2732 1283 653 1283 618 1283 618 1284 653 1284 2731 1284 654 1285 618 1285 2731 1285 654 1286 2730 1286 618 1286 1130 1287 1128 1287 2871 1287 2871 1288 1128 1288 2869 1288 2869 1289 1128 1289 655 1289 2699 1290 655 1290 1140 1290 703 1291 1140 1291 656 1291 702 1292 656 1292 657 1292 2864 1293 657 1293 701 1293 2864 1294 702 1294 657 1294 2869 1295 655 1295 2699 1295 2699 1296 1140 1296 703 1296 656 1297 1141 1297 657 1297 657 1298 1141 1298 658 1298 1146 1299 657 1299 658 1299 1146 1300 659 1300 657 1300 657 1301 659 1301 1151 1301 1165 1302 1151 1302 663 1302 1208 1303 663 1303 669 1303 1208 1304 1165 1304 663 1304 657 1305 1151 1305 1165 1305 1201 1306 660 1306 663 1306 663 1307 660 1307 661 1307 1198 1308 663 1308 661 1308 1198 1309 662 1309 663 1309 663 1310 662 1310 1192 1310 1189 1311 663 1311 1192 1311 1189 1312 664 1312 663 1312 663 1313 664 1313 665 1313 666 1314 663 1314 665 1314 666 1315 1183 1315 663 1315 663 1316 1183 1316 1182 1316 1188 1317 663 1317 1182 1317 1188 1318 667 1318 663 1318 663 1319 667 1319 668 1319 1181 1320 663 1320 668 1320 1181 1321 1180 1321 663 1321 663 1322 1180 1322 1178 1322 1177 1323 663 1323 1178 1323 1177 1324 669 1324 663 1324 976 1325 670 1325 5860 1325 5860 1326 670 1326 671 1326 672 1327 5860 1327 671 1327 672 1328 673 1328 5860 1328 5860 1329 673 1329 507 1329 2991 1330 506 1330 2992 1330 2995 1331 572 1331 2996 1331 2996 1332 513 1332 2998 1332 2998 1333 512 1333 2999 1333 2999 1334 511 1334 3002 1334 3002 1335 674 1335 675 1335 675 1336 503 1336 3006 1336 3006 1337 501 1337 3008 1337 3008 1338 573 1338 3010 1338 3010 1339 574 1339 676 1339 676 1340 677 1340 3012 1340 3012 1341 678 1341 679 1341 679 1342 500 1342 3015 1342 3015 1343 509 1343 3016 1343 3016 1344 498 1344 3018 1344 3018 1345 680 1345 3020 1345 3020 1346 496 1346 681 1346 3575 1347 2730 1347 592 1347 592 1348 2729 1348 593 1348 593 1349 2727 1349 594 1349 594 1350 2788 1350 595 1350 595 1351 2791 1351 682 1351 682 1352 596 1352 3583 1352 3583 1353 597 1353 3584 1353 3584 1354 2796 1354 683 1354 683 1355 2724 1355 3588 1355 3588 1356 2799 1356 684 1356 684 1357 598 1357 685 1357 685 1358 2804 1358 3594 1358 3594 1359 2722 1359 686 1359 686 1360 2720 1360 687 1360 687 1361 2806 1361 3596 1361 3596 1362 688 1362 599 1362 599 1363 600 1363 3413 1363 3413 1364 601 1364 3466 1364 3466 1365 2811 1365 602 1365 602 1366 2813 1366 3468 1366 3468 1367 689 1367 3471 1367 3471 1368 2816 1368 603 1368 603 1369 604 1369 690 1369 690 1370 2718 1370 3476 1370 3476 1371 2717 1371 691 1371 691 1372 605 1372 606 1372 2716 1373 692 1373 657 1373 657 1374 692 1374 2831 1374 693 1375 657 1375 2831 1375 693 1376 694 1376 657 1376 657 1377 694 1377 695 1377 2834 1378 657 1378 695 1378 2834 1379 2714 1379 657 1379 657 1380 2714 1380 2712 1380 2711 1381 657 1381 2712 1381 2711 1382 2710 1382 657 1382 657 1383 2710 1383 2709 1383 696 1384 657 1384 2709 1384 696 1385 2708 1385 657 1385 657 1386 2708 1386 2707 1386 2706 1387 657 1387 2707 1387 2706 1388 2705 1388 657 1388 657 1389 2705 1389 2849 1389 2703 1390 657 1390 2849 1390 2703 1391 697 1391 657 1391 657 1392 697 1392 698 1392 2701 1393 657 1393 698 1393 2701 1394 699 1394 657 1394 657 1395 699 1395 700 1395 2861 1396 657 1396 700 1396 2861 1397 701 1397 657 1397 702 1398 703 1398 656 1398 704 1399 705 1399 706 1399 546 1400 704 1400 706 1400 705 1401 3053 1401 706 1401 706 1402 3053 1402 707 1402 708 1403 706 1403 707 1403 708 1404 3047 1404 706 1404 706 1405 3047 1405 3046 1405 3043 1406 706 1406 3046 1406 3043 1407 3041 1407 706 1407 706 1408 3041 1408 3038 1408 3034 1409 706 1409 3038 1409 3034 1410 545 1410 706 1410 901 1411 732 1411 709 1411 901 1412 710 1412 732 1412 732 1413 710 1413 6515 1413 6515 1414 710 1414 897 1414 884 1415 6515 1415 897 1415 884 1416 711 1416 6515 1416 884 1417 713 1417 711 1417 711 1418 713 1418 712 1418 712 1419 713 1419 883 1419 714 1420 712 1420 883 1420 714 1421 6516 1421 712 1421 714 1422 715 1422 6516 1422 6516 1423 715 1423 6517 1423 6517 1424 715 1424 891 1424 716 1425 6517 1425 891 1425 716 1426 6518 1426 6517 1426 716 1427 876 1427 6518 1427 6518 1428 876 1428 717 1428 717 1429 876 1429 718 1429 881 1430 717 1430 718 1430 881 1431 6519 1431 717 1431 881 1432 719 1432 6519 1432 6519 1433 719 1433 720 1433 731 1434 720 1434 866 1434 6520 1435 866 1435 721 1435 722 1436 6520 1436 721 1436 722 1437 6521 1437 6520 1437 722 1438 723 1438 6521 1438 6521 1439 723 1439 724 1439 724 1440 723 1440 864 1440 725 1441 724 1441 864 1441 725 1442 6522 1442 724 1442 725 1443 726 1443 6522 1443 6522 1444 726 1444 727 1444 6524 1445 727 1445 848 1445 6525 1446 848 1446 857 1446 847 1447 6525 1447 857 1447 847 1448 6526 1448 6525 1448 847 1449 846 1449 6526 1449 6526 1450 846 1450 728 1450 6527 1451 728 1451 729 1451 6528 1452 729 1452 840 1452 845 1453 6528 1453 840 1453 845 1454 730 1454 6528 1454 845 1455 836 1455 730 1455 6519 1456 720 1456 731 1456 731 1457 866 1457 6520 1457 6522 1458 727 1458 6524 1458 6524 1459 848 1459 6525 1459 6526 1460 728 1460 6527 1460 6527 1461 729 1461 6528 1461 732 1462 733 1462 709 1462 709 1463 733 1463 825 1463 825 1464 733 1464 734 1464 746 1465 734 1465 747 1465 826 1466 747 1466 748 1466 827 1467 748 1467 749 1467 828 1468 749 1468 6514 1468 750 1469 6514 1469 736 1469 735 1470 736 1470 751 1470 737 1471 751 1471 752 1471 738 1472 752 1472 739 1472 740 1473 739 1473 6513 1473 741 1474 6513 1474 6512 1474 829 1475 6512 1475 742 1475 753 1476 742 1476 754 1476 743 1477 754 1477 744 1477 755 1478 744 1478 6511 1478 830 1479 6511 1479 756 1479 757 1480 756 1480 758 1480 745 1481 757 1481 758 1481 825 1482 734 1482 746 1482 746 1483 747 1483 826 1483 826 1484 748 1484 827 1484 827 1485 749 1485 828 1485 828 1486 6514 1486 750 1486 750 1487 736 1487 735 1487 735 1488 751 1488 737 1488 737 1489 752 1489 738 1489 738 1490 739 1490 740 1490 740 1491 6513 1491 741 1491 741 1492 6512 1492 829 1492 829 1493 742 1493 753 1493 753 1494 754 1494 743 1494 743 1495 744 1495 755 1495 755 1496 6511 1496 830 1496 830 1497 756 1497 757 1497 758 1498 6531 1498 745 1498 745 1499 6531 1499 759 1499 6531 1500 6532 1500 759 1500 759 1501 6532 1501 811 1501 811 1502 6532 1502 760 1502 780 1503 760 1503 781 1503 812 1504 781 1504 782 1504 783 1505 782 1505 761 1505 784 1506 761 1506 6533 1506 785 1507 6533 1507 6534 1507 786 1508 6534 1508 762 1508 787 1509 762 1509 788 1509 813 1510 788 1510 763 1510 789 1511 763 1511 765 1511 764 1512 765 1512 766 1512 815 1513 766 1513 768 1513 767 1514 768 1514 769 1514 816 1515 769 1515 790 1515 770 1516 790 1516 6547 1516 791 1517 6547 1517 771 1517 792 1518 771 1518 772 1518 773 1519 772 1519 6546 1519 817 1520 6546 1520 6545 1520 793 1521 6545 1521 794 1521 818 1522 794 1522 6544 1522 795 1523 6544 1523 6543 1523 796 1524 6543 1524 774 1524 797 1525 774 1525 798 1525 799 1526 798 1526 800 1526 775 1527 800 1527 6542 1527 819 1528 6542 1528 6541 1528 820 1529 6541 1529 6540 1529 801 1530 6540 1530 6539 1530 802 1531 6539 1531 6538 1531 803 1532 6538 1532 804 1532 821 1533 804 1533 776 1533 777 1534 776 1534 6537 1534 778 1535 6537 1535 805 1535 822 1536 805 1536 6536 1536 779 1537 6536 1537 6535 1537 823 1538 779 1538 6535 1538 811 1539 760 1539 780 1539 780 1540 781 1540 812 1540 812 1541 782 1541 783 1541 783 1542 761 1542 784 1542 784 1543 6533 1543 785 1543 785 1544 6534 1544 786 1544 786 1545 762 1545 787 1545 787 1546 788 1546 813 1546 813 1547 763 1547 789 1547 789 1548 765 1548 764 1548 764 1549 766 1549 815 1549 815 1550 768 1550 767 1550 767 1551 769 1551 816 1551 816 1552 790 1552 770 1552 770 1553 6547 1553 791 1553 791 1554 771 1554 792 1554 792 1555 772 1555 773 1555 773 1556 6546 1556 817 1556 817 1557 6545 1557 793 1557 793 1558 794 1558 818 1558 818 1559 6544 1559 795 1559 795 1560 6543 1560 796 1560 796 1561 774 1561 797 1561 797 1562 798 1562 799 1562 799 1563 800 1563 775 1563 775 1564 6542 1564 819 1564 819 1565 6541 1565 820 1565 820 1566 6540 1566 801 1566 801 1567 6539 1567 802 1567 802 1568 6538 1568 803 1568 803 1569 804 1569 821 1569 821 1570 776 1570 777 1570 777 1571 6537 1571 778 1571 778 1572 805 1572 822 1572 822 1573 6536 1573 779 1573 823 1574 6535 1574 620 1574 620 1575 6535 1575 1066 1575 1066 1576 6535 1576 807 1576 807 1577 6535 1577 6548 1577 806 1578 807 1578 6548 1578 806 1579 1268 1579 807 1579 808 1580 833 1580 2498 1580 2498 1581 833 1581 625 1581 831 1582 2445 1582 3822 1582 3822 1583 2445 1583 3858 1583 3950 1584 824 1584 809 1584 809 1585 824 1585 902 1585 759 1586 902 1586 745 1586 759 1587 809 1587 902 1587 759 1588 810 1588 809 1588 759 1589 814 1589 810 1589 759 1590 811 1590 814 1590 814 1591 811 1591 780 1591 812 1592 814 1592 780 1592 812 1593 783 1593 814 1593 814 1594 783 1594 784 1594 785 1595 814 1595 784 1595 785 1596 786 1596 814 1596 814 1597 786 1597 787 1597 813 1598 814 1598 787 1598 813 1599 789 1599 814 1599 814 1600 789 1600 764 1600 815 1601 814 1601 764 1601 815 1602 767 1602 814 1602 814 1603 767 1603 816 1603 770 1604 814 1604 816 1604 770 1605 791 1605 814 1605 814 1606 791 1606 792 1606 773 1607 814 1607 792 1607 773 1608 817 1608 814 1608 814 1609 817 1609 793 1609 818 1610 814 1610 793 1610 818 1611 795 1611 814 1611 814 1612 795 1612 796 1612 797 1613 814 1613 796 1613 797 1614 799 1614 814 1614 814 1615 799 1615 775 1615 819 1616 814 1616 775 1616 819 1617 820 1617 814 1617 814 1618 820 1618 833 1618 833 1619 820 1619 801 1619 625 1620 801 1620 802 1620 803 1621 625 1621 802 1621 803 1622 821 1622 625 1622 625 1623 821 1623 777 1623 778 1624 625 1624 777 1624 778 1625 822 1625 625 1625 625 1626 822 1626 779 1626 823 1627 625 1627 779 1627 823 1628 620 1628 625 1628 6504 1629 926 1629 824 1629 824 1630 926 1630 902 1630 709 1631 825 1631 902 1631 902 1632 825 1632 746 1632 826 1633 902 1633 746 1633 826 1634 827 1634 902 1634 902 1635 827 1635 828 1635 750 1636 902 1636 828 1636 750 1637 735 1637 902 1637 902 1638 735 1638 737 1638 738 1639 902 1639 737 1639 738 1640 740 1640 902 1640 902 1641 740 1641 741 1641 829 1642 902 1642 741 1642 829 1643 753 1643 902 1643 902 1644 753 1644 743 1644 755 1645 902 1645 743 1645 755 1646 830 1646 902 1646 902 1647 830 1647 757 1647 745 1648 902 1648 757 1648 833 1649 801 1649 625 1649 831 1650 3822 1650 810 1650 810 1651 3822 1651 809 1651 809 1652 3822 1652 3817 1652 814 1653 833 1653 6555 1653 6555 1654 833 1654 6495 1654 832 1655 6495 1655 4606 1655 5771 1656 832 1656 4606 1656 808 1657 4264 1657 833 1657 808 1658 4258 1658 4264 1658 4264 1659 834 1659 833 1659 833 1660 834 1660 6495 1660 6495 1661 834 1661 6368 1661 835 1662 6495 1662 6368 1662 6555 1663 6495 1663 832 1663 977 1664 836 1664 974 1664 974 1665 836 1665 916 1665 838 1666 916 1666 837 1666 974 1667 838 1667 837 1667 974 1668 916 1668 838 1668 916 1669 836 1669 839 1669 841 1670 839 1670 840 1670 729 1671 841 1671 840 1671 729 1672 728 1672 841 1672 841 1673 728 1673 842 1673 915 1674 842 1674 918 1674 969 1675 918 1675 844 1675 843 1676 844 1676 966 1676 843 1677 969 1677 844 1677 836 1678 845 1678 839 1678 839 1679 845 1679 840 1679 728 1680 846 1680 842 1680 842 1681 846 1681 847 1681 855 1682 847 1682 857 1682 849 1683 857 1683 848 1683 727 1684 849 1684 848 1684 727 1685 860 1685 849 1685 727 1686 726 1686 860 1686 860 1687 726 1687 861 1687 859 1688 861 1688 909 1688 908 1689 909 1689 850 1689 954 1690 850 1690 909 1690 851 1691 909 1691 865 1691 907 1692 865 1692 906 1692 853 1693 906 1693 854 1693 852 1694 854 1694 863 1694 852 1695 853 1695 854 1695 842 1696 847 1696 855 1696 919 1697 855 1697 920 1697 914 1698 920 1698 856 1698 963 1699 856 1699 911 1699 963 1700 914 1700 856 1700 855 1701 857 1701 849 1701 920 1702 849 1702 858 1702 911 1703 858 1703 912 1703 959 1704 912 1704 858 1704 859 1705 858 1705 860 1705 861 1706 859 1706 860 1706 726 1707 725 1707 861 1707 861 1708 725 1708 864 1708 862 1709 864 1709 723 1709 722 1710 862 1710 723 1710 722 1711 922 1711 862 1711 722 1712 721 1712 922 1712 922 1713 721 1713 871 1713 921 1714 871 1714 867 1714 854 1715 867 1715 870 1715 863 1716 870 1716 869 1716 863 1717 854 1717 870 1717 861 1718 864 1718 862 1718 909 1719 862 1719 865 1719 909 1720 861 1720 862 1720 721 1721 866 1721 871 1721 871 1722 866 1722 720 1722 872 1723 720 1723 719 1723 867 1724 719 1724 923 1724 870 1725 923 1725 868 1725 869 1726 868 1726 949 1726 869 1727 870 1727 868 1727 871 1728 720 1728 872 1728 867 1729 872 1729 719 1729 867 1730 871 1730 872 1730 923 1731 719 1731 873 1731 718 1732 873 1732 881 1732 718 1733 923 1733 873 1733 718 1734 874 1734 923 1734 718 1735 875 1735 874 1735 718 1736 876 1736 875 1736 875 1737 876 1737 874 1737 874 1738 876 1738 877 1738 880 1739 877 1739 878 1739 946 1740 878 1740 879 1740 946 1741 880 1741 878 1741 946 1742 948 1742 880 1742 880 1743 948 1743 868 1743 874 1744 868 1744 923 1744 874 1745 880 1745 868 1745 874 1746 877 1746 880 1746 719 1747 881 1747 873 1747 876 1748 716 1748 877 1748 877 1749 716 1749 891 1749 892 1750 891 1750 715 1750 882 1751 715 1751 714 1751 893 1752 714 1752 883 1752 713 1753 893 1753 883 1753 713 1754 894 1754 893 1754 713 1755 884 1755 894 1755 894 1756 884 1756 885 1756 896 1757 885 1757 924 1757 886 1758 924 1758 899 1758 886 1759 896 1759 924 1759 886 1760 928 1760 896 1760 896 1761 928 1761 887 1761 895 1762 887 1762 904 1762 888 1763 904 1763 943 1763 889 1764 888 1764 943 1764 889 1765 890 1765 888 1765 889 1766 905 1766 890 1766 890 1767 905 1767 878 1767 892 1768 878 1768 877 1768 891 1769 892 1769 877 1769 892 1770 715 1770 882 1770 890 1771 882 1771 893 1771 888 1772 893 1772 894 1772 895 1773 894 1773 896 1773 887 1774 895 1774 896 1774 882 1775 714 1775 893 1775 884 1776 897 1776 885 1776 885 1777 897 1777 898 1777 924 1778 898 1778 903 1778 899 1779 903 1779 925 1779 899 1780 924 1780 903 1780 897 1781 710 1781 898 1781 898 1782 710 1782 901 1782 900 1783 901 1783 709 1783 903 1784 709 1784 902 1784 925 1785 903 1785 902 1785 898 1786 901 1786 900 1786 903 1787 900 1787 709 1787 903 1788 898 1788 900 1788 895 1789 904 1789 888 1789 894 1790 895 1790 888 1790 905 1791 879 1791 878 1791 948 1792 949 1792 868 1792 853 1793 907 1793 906 1793 865 1794 907 1794 851 1794 851 1795 907 1795 935 1795 954 1796 851 1796 935 1796 954 1797 909 1797 851 1797 954 1798 908 1798 850 1798 960 1799 910 1799 908 1799 960 1800 959 1800 910 1800 910 1801 959 1801 859 1801 908 1802 859 1802 909 1802 908 1803 910 1803 859 1803 959 1804 911 1804 912 1804 966 1805 913 1805 914 1805 966 1806 919 1806 913 1806 966 1807 918 1807 919 1807 966 1808 844 1808 918 1808 837 1809 917 1809 969 1809 837 1810 915 1810 917 1810 837 1811 916 1811 915 1811 915 1812 916 1812 841 1812 842 1813 915 1813 841 1813 841 1814 916 1814 839 1814 969 1815 917 1815 915 1815 918 1816 969 1816 915 1816 919 1817 918 1817 842 1817 855 1818 919 1818 842 1818 914 1819 913 1819 919 1819 920 1820 914 1820 919 1820 849 1821 920 1821 855 1821 911 1822 856 1822 920 1822 858 1823 911 1823 920 1823 860 1824 858 1824 849 1824 959 1825 858 1825 859 1825 862 1826 922 1826 865 1826 865 1827 922 1827 921 1827 906 1828 921 1828 854 1828 906 1829 865 1829 921 1829 871 1830 921 1830 922 1830 854 1831 921 1831 867 1831 923 1832 870 1832 867 1832 890 1833 878 1833 892 1833 882 1834 890 1834 892 1834 888 1835 890 1835 893 1835 885 1836 896 1836 894 1836 898 1837 924 1837 885 1837 902 1838 926 1838 925 1838 925 1839 926 1839 975 1839 899 1840 975 1840 927 1840 886 1841 927 1841 939 1841 928 1842 939 1842 887 1842 928 1843 886 1843 939 1843 926 1844 6505 1844 975 1844 975 1845 6505 1845 927 1845 927 1846 6505 1846 6510 1846 939 1847 6510 1847 929 1847 942 1848 929 1848 940 1848 941 1849 940 1849 930 1849 944 1850 930 1850 931 1850 945 1851 931 1851 6509 1851 947 1852 6509 1852 932 1852 937 1853 932 1853 938 1853 950 1854 938 1854 951 1854 936 1855 951 1855 933 1855 6508 1856 936 1856 933 1856 6508 1857 934 1857 936 1857 6508 1858 952 1858 934 1858 934 1859 952 1859 955 1859 907 1860 955 1860 935 1860 907 1861 934 1861 955 1861 907 1862 853 1862 934 1862 934 1863 853 1863 936 1863 936 1864 853 1864 852 1864 863 1865 936 1865 852 1865 863 1866 950 1866 936 1866 863 1867 869 1867 950 1867 950 1868 869 1868 937 1868 938 1869 950 1869 937 1869 927 1870 6510 1870 939 1870 939 1871 929 1871 942 1871 887 1872 942 1872 904 1872 887 1873 939 1873 942 1873 942 1874 940 1874 941 1874 904 1875 941 1875 943 1875 904 1876 942 1876 941 1876 941 1877 930 1877 944 1877 889 1878 944 1878 905 1878 889 1879 941 1879 944 1879 889 1880 943 1880 941 1880 944 1881 931 1881 945 1881 905 1882 945 1882 879 1882 905 1883 944 1883 945 1883 945 1884 6509 1884 947 1884 946 1885 947 1885 948 1885 946 1886 945 1886 947 1886 946 1887 879 1887 945 1887 947 1888 932 1888 937 1888 948 1889 937 1889 949 1889 948 1890 947 1890 937 1890 950 1891 951 1891 936 1891 952 1892 6507 1892 955 1892 955 1893 6507 1893 953 1893 954 1894 953 1894 908 1894 954 1895 955 1895 953 1895 954 1896 935 1896 955 1896 6507 1897 957 1897 953 1897 953 1898 957 1898 956 1898 908 1899 956 1899 960 1899 908 1900 953 1900 956 1900 957 1901 961 1901 956 1901 956 1902 961 1902 958 1902 959 1903 958 1903 911 1903 959 1904 956 1904 958 1904 959 1905 960 1905 956 1905 961 1906 962 1906 958 1906 958 1907 962 1907 967 1907 911 1908 967 1908 963 1908 911 1909 958 1909 967 1909 962 1910 964 1910 967 1910 967 1911 964 1911 965 1911 914 1912 965 1912 966 1912 914 1913 967 1913 965 1913 914 1914 963 1914 967 1914 964 1915 6506 1915 965 1915 965 1916 6506 1916 966 1916 966 1917 6506 1917 968 1917 843 1918 968 1918 969 1918 843 1919 966 1919 968 1919 6506 1920 970 1920 968 1920 968 1921 970 1921 971 1921 969 1922 971 1922 837 1922 969 1923 968 1923 971 1923 970 1924 972 1924 971 1924 971 1925 972 1925 837 1925 837 1926 972 1926 973 1926 974 1927 973 1927 977 1927 974 1928 837 1928 973 1928 972 1929 978 1929 973 1929 973 1930 978 1930 977 1930 869 1931 949 1931 937 1931 886 1932 899 1932 927 1932 899 1933 925 1933 975 1933 976 1934 836 1934 494 1934 976 1935 730 1935 836 1935 976 1936 617 1936 730 1936 730 1937 617 1937 3901 1937 6523 1938 3901 1938 6551 1938 6523 1939 730 1939 3901 1939 3901 1940 3533 1940 6551 1940 6551 1941 3533 1941 3870 1941 836 1942 977 1942 494 1942 494 1943 977 1943 439 1943 439 1944 977 1944 979 1944 469 1945 439 1945 979 1945 977 1946 978 1946 979 1946 979 1947 978 1947 5076 1947 5655 1948 979 1948 5076 1948 5076 1949 978 1949 5674 1949 5674 1950 978 1950 6549 1950 5723 1951 6549 1951 6487 1951 4699 1952 6487 1952 5764 1952 4699 1953 5723 1953 6487 1953 5674 1954 6549 1954 5723 1954 6159 1955 5723 1955 6091 1955 6159 1956 5674 1956 5723 1956 3945 1957 1025 1957 1026 1957 980 1958 1026 1958 981 1958 982 1959 981 1959 995 1959 983 1960 995 1960 1024 1960 997 1961 1024 1961 996 1961 984 1962 996 1962 998 1962 3946 1963 998 1963 1023 1963 985 1964 1023 1964 999 1964 986 1965 999 1965 6496 1965 987 1966 6496 1966 1002 1966 1001 1967 1002 1967 6498 1967 994 1968 6498 1968 988 1968 992 1969 988 1969 1003 1969 989 1970 1003 1970 6499 1970 6500 1971 989 1971 6499 1971 6500 1972 990 1972 989 1972 6500 1973 6501 1973 990 1973 990 1974 6501 1974 1004 1974 3952 1975 1004 1975 3953 1975 3952 1976 990 1976 1004 1976 3952 1977 3951 1977 990 1977 990 1978 3951 1978 989 1978 989 1979 3951 1979 991 1979 993 1980 989 1980 991 1980 993 1981 992 1981 989 1981 993 1982 3949 1982 992 1982 992 1983 3949 1983 994 1983 988 1984 992 1984 994 1984 980 1985 981 1985 982 1985 983 1986 982 1986 995 1986 983 1987 980 1987 982 1987 995 1988 996 1988 1024 1988 997 1989 996 1989 984 1989 3946 1990 984 1990 998 1990 3946 1991 997 1991 984 1991 998 1992 999 1992 1023 1992 985 1993 999 1993 986 1993 3947 1994 986 1994 987 1994 1000 1995 987 1995 1001 1995 3948 1996 1001 1996 994 1996 3949 1997 3948 1997 994 1997 986 1998 6496 1998 987 1998 987 1999 1002 1999 1001 1999 1001 2000 6498 2000 994 2000 992 2001 1003 2001 989 2001 6501 2002 6502 2002 1004 2002 1004 2003 6502 2003 1005 2003 3953 2004 1005 2004 3954 2004 3953 2005 1004 2005 1005 2005 6502 2006 1007 2006 1005 2006 1005 2007 1007 2007 1006 2007 3954 2008 1006 2008 3955 2008 3954 2009 1005 2009 1006 2009 1007 2010 1010 2010 1006 2010 1006 2011 1010 2011 1009 2011 3955 2012 1009 2012 1008 2012 3955 2013 1006 2013 1009 2013 1010 2014 1011 2014 1009 2014 1009 2015 1011 2015 1008 2015 1008 2016 1011 2016 1022 2016 3956 2017 1022 2017 1012 2017 1013 2018 1012 2018 6503 2018 1019 2019 6503 2019 1021 2019 1020 2020 1021 2020 1014 2020 1015 2021 1014 2021 1017 2021 1016 2022 1017 2022 1018 2022 824 2023 1018 2023 6504 2023 824 2024 1016 2024 1018 2024 1011 2025 1012 2025 1022 2025 3956 2026 1012 2026 1013 2026 1019 2027 1013 2027 6503 2027 1019 2028 3956 2028 1013 2028 6503 2029 1014 2029 1021 2029 1020 2030 1014 2030 1015 2030 1016 2031 1015 2031 1017 2031 1016 2032 1020 2032 1015 2032 1017 2033 6504 2033 1018 2033 1020 2034 1019 2034 1021 2034 3956 2035 1008 2035 1022 2035 3948 2036 1000 2036 1001 2036 1000 2037 3947 2037 987 2037 3947 2038 985 2038 986 2038 985 2039 3946 2039 1023 2039 997 2040 983 2040 1024 2040 980 2041 3945 2041 1026 2041 1025 2042 981 2042 1026 2042 1112 2043 634 2043 1027 2043 1261 2044 1027 2044 1028 2044 1258 2045 1028 2045 1029 2045 1070 2046 1029 2046 1069 2046 1030 2047 1069 2047 1036 2047 1256 2048 1036 2048 1068 2048 1253 2049 1068 2049 1045 2049 1265 2050 1045 2050 1067 2050 1031 2051 1067 2051 1032 2051 1249 2052 1032 2052 1033 2052 1282 2053 1033 2053 1038 2053 1283 2054 1038 2054 1284 2054 1283 2055 1282 2055 1038 2055 1034 2056 1028 2056 1071 2056 1034 2057 1029 2057 1028 2057 1034 2058 1035 2058 1029 2058 1029 2059 1035 2059 1069 2059 1069 2060 1035 2060 1043 2060 1036 2061 1043 2061 637 2061 1068 2062 637 2062 1044 2062 1045 2063 1044 2063 1046 2063 1067 2064 1046 2064 1037 2064 1032 2065 1037 2065 636 2065 1033 2066 636 2066 1039 2066 1038 2067 1039 2067 635 2067 1040 2068 1038 2068 635 2068 1040 2069 1042 2069 1038 2069 1040 2070 633 2070 1042 2070 1042 2071 633 2071 1041 2071 1285 2072 1041 2072 1286 2072 1285 2073 1042 2073 1041 2073 1285 2074 1284 2074 1042 2074 1042 2075 1284 2075 1038 2075 1069 2076 1043 2076 1036 2076 1036 2077 637 2077 1068 2077 1068 2078 1044 2078 1045 2078 1045 2079 1046 2079 1067 2079 1067 2080 1037 2080 1032 2080 1032 2081 636 2081 1033 2081 1033 2082 1039 2082 1038 2082 633 2083 1047 2083 1041 2083 1041 2084 1047 2084 1048 2084 1286 2085 1048 2085 1287 2085 1286 2086 1041 2086 1048 2086 1047 2087 1051 2087 1048 2087 1048 2088 1051 2088 1050 2088 1287 2089 1050 2089 1049 2089 1287 2090 1048 2090 1050 2090 1051 2091 1052 2091 1050 2091 1050 2092 1052 2092 1054 2092 1049 2093 1054 2093 1055 2093 1049 2094 1050 2094 1054 2094 1052 2095 1053 2095 1054 2095 1054 2096 1053 2096 1056 2096 1055 2097 1056 2097 1058 2097 1055 2098 1054 2098 1056 2098 1053 2099 1057 2099 1056 2099 1056 2100 1057 2100 1059 2100 1058 2101 1059 2101 1288 2101 1058 2102 1056 2102 1059 2102 1057 2103 632 2103 1059 2103 1059 2104 632 2104 1061 2104 1288 2105 1061 2105 1060 2105 1288 2106 1059 2106 1061 2106 632 2107 1062 2107 1061 2107 1061 2108 1062 2108 1064 2108 1060 2109 1064 2109 1289 2109 1060 2110 1061 2110 1064 2110 1062 2111 1065 2111 1064 2111 1064 2112 1065 2112 1063 2112 1289 2113 1063 2113 807 2113 1289 2114 1064 2114 1063 2114 1065 2115 1066 2115 1063 2115 1063 2116 1066 2116 807 2116 1282 2117 1249 2117 1033 2117 1249 2118 1031 2118 1032 2118 1031 2119 1265 2119 1067 2119 1265 2120 1253 2120 1045 2120 1253 2121 1256 2121 1068 2121 1256 2122 1030 2122 1036 2122 1030 2123 1070 2123 1069 2123 1070 2124 1258 2124 1029 2124 1258 2125 1261 2125 1028 2125 1261 2126 1112 2126 1027 2126 1071 2127 1028 2127 1027 2127 634 2128 1071 2128 1027 2128 634 2129 1112 2129 1113 2129 638 2130 1113 2130 1111 2130 1109 2131 1111 2131 1110 2131 1072 2132 1110 2132 1083 2132 621 2133 1083 2133 1108 2133 622 2134 1108 2134 1107 2134 1106 2135 1107 2135 1105 2135 1104 2136 1105 2136 1085 2136 623 2137 1085 2137 1073 2137 624 2138 1073 2138 1074 2138 1076 2139 1074 2139 1082 2139 1075 2140 1082 2140 626 2140 1075 2141 1076 2141 1082 2141 1263 2142 1111 2142 1264 2142 1263 2143 1110 2143 1111 2143 1263 2144 1262 2144 1110 2144 1110 2145 1262 2145 1083 2145 1083 2146 1262 2146 1259 2146 1108 2147 1259 2147 1260 2147 1107 2148 1260 2148 1084 2148 1105 2149 1084 2149 1257 2149 1085 2150 1257 2150 1077 2150 1073 2151 1077 2151 1266 2151 1074 2152 1266 2152 1086 2152 1082 2153 1086 2153 1078 2153 1255 2154 1082 2154 1078 2154 1255 2155 1081 2155 1082 2155 1255 2156 1087 2156 1081 2156 1081 2157 1087 2157 1088 2157 1079 2158 1088 2158 1080 2158 1079 2159 1081 2159 1088 2159 1079 2160 626 2160 1081 2160 1081 2161 626 2161 1082 2161 1083 2162 1259 2162 1108 2162 1108 2163 1260 2163 1107 2163 1107 2164 1084 2164 1105 2164 1105 2165 1257 2165 1085 2165 1085 2166 1077 2166 1073 2166 1073 2167 1266 2167 1074 2167 1074 2168 1086 2168 1082 2168 1087 2169 1254 2169 1088 2169 1088 2170 1254 2170 1091 2170 1080 2171 1091 2171 1090 2171 1080 2172 1088 2172 1091 2172 1254 2173 1092 2173 1091 2173 1091 2174 1092 2174 1089 2174 1090 2175 1089 2175 627 2175 1090 2176 1091 2176 1089 2176 1092 2177 1093 2177 1089 2177 1089 2178 1093 2178 1094 2178 627 2179 1094 2179 629 2179 627 2180 1089 2180 1094 2180 1093 2181 1095 2181 1094 2181 1094 2182 1095 2182 1096 2182 629 2183 1096 2183 628 2183 629 2184 1094 2184 1096 2184 1095 2185 1252 2185 1096 2185 1096 2186 1252 2186 1097 2186 628 2187 1097 2187 630 2187 628 2188 1096 2188 1097 2188 1252 2189 1098 2189 1097 2189 1097 2190 1098 2190 1099 2190 630 2191 1099 2191 1101 2191 630 2192 1097 2192 1099 2192 1098 2193 1251 2193 1099 2193 1099 2194 1251 2194 1100 2194 1101 2195 1100 2195 631 2195 1101 2196 1099 2196 1100 2196 1251 2197 1250 2197 1100 2197 1100 2198 1250 2198 1102 2198 631 2199 1102 2199 1103 2199 631 2200 1100 2200 1102 2200 1250 2201 1269 2201 1102 2201 1102 2202 1269 2202 1103 2202 1076 2203 624 2203 1074 2203 624 2204 623 2204 1073 2204 623 2205 1104 2205 1085 2205 1104 2206 1106 2206 1105 2206 1106 2207 622 2207 1107 2207 622 2208 621 2208 1108 2208 621 2209 1072 2209 1083 2209 1072 2210 1109 2210 1110 2210 1109 2211 638 2211 1111 2211 638 2212 634 2212 1113 2212 1112 2213 1264 2213 1113 2213 1113 2214 1264 2214 1111 2214 1269 2215 1277 2215 1103 2215 1103 2216 1277 2216 639 2216 639 2217 1277 2217 1164 2217 1162 2218 1164 2218 1120 2218 1161 2219 1120 2219 1119 2219 1114 2220 1119 2220 1115 2220 1160 2221 1115 2221 1159 2221 640 2222 1159 2222 1116 2222 1158 2223 1116 2223 1132 2223 641 2224 1132 2224 1157 2224 1156 2225 1157 2225 1123 2225 1155 2226 1123 2226 1117 2226 1154 2227 1117 2227 1135 2227 1118 2228 1135 2228 1130 2228 1118 2229 1154 2229 1135 2229 1281 2230 1120 2230 1163 2230 1281 2231 1119 2231 1120 2231 1281 2232 1121 2232 1119 2232 1119 2233 1121 2233 1115 2233 1115 2234 1121 2234 1280 2234 1159 2235 1280 2235 1131 2235 1116 2236 1131 2236 1279 2236 1132 2237 1279 2237 1122 2237 1157 2238 1122 2238 1278 2238 1123 2239 1278 2239 1133 2239 1117 2240 1133 2240 1134 2240 1135 2241 1134 2241 1124 2241 1125 2242 1135 2242 1124 2242 1125 2243 1129 2243 1135 2243 1125 2244 1126 2244 1129 2244 1129 2245 1126 2245 1127 2245 1128 2246 1127 2246 655 2246 1128 2247 1129 2247 1127 2247 1128 2248 1130 2248 1129 2248 1129 2249 1130 2249 1135 2249 1115 2250 1280 2250 1159 2250 1159 2251 1131 2251 1116 2251 1116 2252 1279 2252 1132 2252 1132 2253 1122 2253 1157 2253 1157 2254 1278 2254 1123 2254 1123 2255 1133 2255 1117 2255 1117 2256 1134 2256 1135 2256 1126 2257 1136 2257 1127 2257 1127 2258 1136 2258 1137 2258 655 2259 1137 2259 1140 2259 655 2260 1127 2260 1137 2260 1136 2261 1138 2261 1137 2261 1137 2262 1138 2262 1139 2262 1140 2263 1139 2263 656 2263 1140 2264 1137 2264 1139 2264 1138 2265 1142 2265 1139 2265 1139 2266 1142 2266 1143 2266 656 2267 1143 2267 1141 2267 656 2268 1139 2268 1143 2268 1142 2269 1297 2269 1143 2269 1143 2270 1297 2270 1144 2270 1141 2271 1144 2271 658 2271 1141 2272 1143 2272 1144 2272 1297 2273 1296 2273 1144 2273 1144 2274 1296 2274 1145 2274 658 2275 1145 2275 1146 2275 658 2276 1144 2276 1145 2276 1296 2277 1147 2277 1145 2277 1145 2278 1147 2278 1149 2278 1146 2279 1149 2279 659 2279 1146 2280 1145 2280 1149 2280 1147 2281 1148 2281 1149 2281 1149 2282 1148 2282 1150 2282 659 2283 1150 2283 1151 2283 659 2284 1149 2284 1150 2284 1148 2285 1299 2285 1150 2285 1150 2286 1299 2286 1152 2286 1151 2287 1152 2287 663 2287 1151 2288 1150 2288 1152 2288 1299 2289 1153 2289 1152 2289 1152 2290 1153 2290 663 2290 1154 2291 1155 2291 1117 2291 1155 2292 1156 2292 1123 2292 1156 2293 641 2293 1157 2293 641 2294 1158 2294 1132 2294 1158 2295 640 2295 1116 2295 640 2296 1160 2296 1159 2296 1160 2297 1114 2297 1115 2297 1114 2298 1161 2298 1119 2298 1161 2299 1162 2299 1120 2299 1162 2300 639 2300 1164 2300 1277 2301 1163 2301 1164 2301 1164 2302 1163 2302 1120 2302 1212 2303 1165 2303 1209 2303 1207 2304 1209 2304 1210 2304 1315 2305 1210 2305 1176 2305 1166 2306 1176 2306 1167 2306 1206 2307 1167 2307 1179 2307 1317 2308 1179 2308 1187 2308 1168 2309 1187 2309 1205 2309 1169 2310 1205 2310 1171 2310 1170 2311 1171 2311 1172 2311 1307 2312 1172 2312 1173 2312 1204 2313 1173 2313 1174 2313 1175 2314 1174 2314 1186 2314 1175 2315 1204 2315 1174 2315 669 2316 1210 2316 1208 2316 669 2317 1176 2317 1210 2317 669 2318 1177 2318 1176 2318 1176 2319 1177 2319 1167 2319 1167 2320 1177 2320 1178 2320 1179 2321 1178 2321 1180 2321 1187 2322 1180 2322 1181 2322 1205 2323 1181 2323 668 2323 1171 2324 668 2324 667 2324 1172 2325 667 2325 1188 2325 1173 2326 1188 2326 1182 2326 1174 2327 1182 2327 1183 2327 666 2328 1174 2328 1183 2328 666 2329 1184 2329 1174 2329 666 2330 665 2330 1184 2330 1184 2331 665 2331 1185 2331 1305 2332 1185 2332 1310 2332 1305 2333 1184 2333 1185 2333 1305 2334 1186 2334 1184 2334 1184 2335 1186 2335 1174 2335 1167 2336 1178 2336 1179 2336 1179 2337 1180 2337 1187 2337 1187 2338 1181 2338 1205 2338 1205 2339 668 2339 1171 2339 1171 2340 667 2340 1172 2340 1172 2341 1188 2341 1173 2341 1173 2342 1182 2342 1174 2342 665 2343 664 2343 1185 2343 1185 2344 664 2344 1190 2344 1310 2345 1190 2345 1309 2345 1310 2346 1185 2346 1190 2346 664 2347 1189 2347 1190 2347 1190 2348 1189 2348 1191 2348 1309 2349 1191 2349 1193 2349 1309 2350 1190 2350 1191 2350 1189 2351 1192 2351 1191 2351 1191 2352 1192 2352 1194 2352 1193 2353 1194 2353 1195 2353 1193 2354 1191 2354 1194 2354 1192 2355 662 2355 1194 2355 1194 2356 662 2356 1196 2356 1195 2357 1196 2357 1308 2357 1195 2358 1194 2358 1196 2358 662 2359 1198 2359 1196 2359 1196 2360 1198 2360 1197 2360 1308 2361 1197 2361 1303 2361 1308 2362 1196 2362 1197 2362 1198 2363 661 2363 1197 2363 1197 2364 661 2364 1199 2364 1303 2365 1199 2365 1200 2365 1303 2366 1197 2366 1199 2366 661 2367 660 2367 1199 2367 1199 2368 660 2368 1202 2368 1200 2369 1202 2369 1301 2369 1200 2370 1199 2370 1202 2370 660 2371 1201 2371 1202 2371 1202 2372 1201 2372 1203 2372 1301 2373 1203 2373 1153 2373 1301 2374 1202 2374 1203 2374 1201 2375 663 2375 1203 2375 1203 2376 663 2376 1153 2376 1204 2377 1307 2377 1173 2377 1307 2378 1170 2378 1172 2378 1170 2379 1169 2379 1171 2379 1169 2380 1168 2380 1205 2380 1168 2381 1317 2381 1187 2381 1317 2382 1206 2382 1179 2382 1206 2383 1166 2383 1167 2383 1166 2384 1315 2384 1176 2384 1315 2385 1207 2385 1210 2385 1207 2386 1212 2386 1209 2386 1165 2387 1208 2387 1209 2387 1209 2388 1208 2388 1210 2388 657 2389 1165 2389 3920 2389 3920 2390 1165 2390 1212 2390 1211 2391 1212 2391 1393 2391 1401 2392 1211 2392 1393 2392 1401 2393 6486 2393 1211 2393 1212 2394 1316 2394 1393 2394 1211 2395 3920 2395 1212 2395 1247 2396 1327 2396 1213 2396 1246 2397 1213 2397 1248 2397 1214 2398 1248 2398 1223 2398 1381 2399 1223 2399 1245 2399 1244 2400 1245 2400 1224 2400 1225 2401 1224 2401 1226 2401 1382 2402 1226 2402 1243 2402 1383 2403 1243 2403 1227 2403 1215 2404 1227 2404 1294 2404 1242 2405 1294 2405 1228 2405 1241 2406 1228 2406 1216 2406 1229 2407 1216 2407 1293 2407 1230 2408 1293 2408 1240 2408 1232 2409 1240 2409 1231 2409 1233 2410 1231 2410 1292 2410 1217 2411 1292 2411 1218 2411 1235 2412 1218 2412 1291 2412 1219 2413 1291 2413 1234 2413 1239 2414 1234 2414 1220 2414 1238 2415 1220 2415 1236 2415 1237 2416 1236 2416 1290 2416 1221 2417 1290 2417 1222 2417 806 2418 1222 2418 1268 2418 806 2419 1221 2419 1222 2419 1246 2420 1248 2420 1214 2420 1381 2421 1214 2421 1223 2421 1381 2422 1246 2422 1214 2422 1223 2423 1224 2423 1245 2423 1244 2424 1224 2424 1225 2424 1382 2425 1225 2425 1226 2425 1382 2426 1244 2426 1225 2426 1226 2427 1227 2427 1243 2427 1383 2428 1227 2428 1215 2428 1242 2429 1215 2429 1294 2429 1242 2430 1383 2430 1215 2430 1294 2431 1216 2431 1228 2431 1241 2432 1216 2432 1229 2432 1230 2433 1229 2433 1293 2433 1230 2434 1241 2434 1229 2434 1293 2435 1231 2435 1240 2435 1232 2436 1231 2436 1233 2436 1217 2437 1233 2437 1292 2437 1217 2438 1232 2438 1233 2438 1292 2439 1291 2439 1218 2439 1235 2440 1291 2440 1219 2440 1239 2441 1219 2441 1234 2441 1239 2442 1235 2442 1219 2442 1234 2443 1236 2443 1220 2443 1238 2444 1236 2444 1237 2444 1221 2445 1237 2445 1290 2445 1221 2446 1238 2446 1237 2446 1290 2447 1268 2447 1222 2447 1238 2448 1239 2448 1220 2448 1235 2449 1217 2449 1218 2449 1232 2450 1230 2450 1240 2450 1241 2451 1242 2451 1228 2451 1383 2452 1382 2452 1243 2452 1244 2453 1381 2453 1245 2453 1246 2454 1247 2454 1213 2454 1327 2455 1248 2455 1213 2455 1250 2456 1249 2456 1269 2456 1250 2457 1031 2457 1249 2457 1250 2458 1251 2458 1031 2458 1031 2459 1251 2459 1098 2459 1265 2460 1098 2460 1252 2460 1095 2461 1265 2461 1252 2461 1095 2462 1093 2462 1265 2462 1265 2463 1093 2463 1253 2463 1253 2464 1093 2464 1092 2464 1254 2465 1253 2465 1092 2465 1254 2466 1256 2466 1253 2466 1254 2467 1087 2467 1256 2467 1256 2468 1087 2468 1255 2468 1078 2469 1256 2469 1255 2469 1078 2470 1030 2470 1256 2470 1078 2471 1086 2471 1030 2471 1030 2472 1086 2472 1266 2472 1070 2473 1266 2473 1077 2473 1257 2474 1070 2474 1077 2474 1257 2475 1258 2475 1070 2475 1257 2476 1084 2476 1258 2476 1258 2477 1084 2477 1260 2477 1259 2478 1258 2478 1260 2478 1259 2479 1261 2479 1258 2479 1259 2480 1262 2480 1261 2480 1261 2481 1262 2481 1263 2481 1264 2482 1261 2482 1263 2482 1264 2483 1112 2483 1261 2483 1031 2484 1098 2484 1265 2484 1030 2485 1266 2485 1070 2485 1269 2486 1249 2486 1268 2486 1326 2487 1268 2487 1267 2487 1326 2488 1269 2488 1268 2488 1326 2489 1325 2489 1269 2489 1269 2490 1325 2490 1324 2490 1270 2491 1269 2491 1324 2491 1270 2492 1271 2492 1269 2492 1269 2493 1271 2493 1335 2493 1272 2494 1269 2494 1335 2494 1272 2495 1273 2495 1269 2495 1269 2496 1273 2496 1346 2496 1274 2497 1269 2497 1346 2497 1274 2498 1347 2498 1269 2498 1269 2499 1347 2499 1275 2499 1276 2500 1269 2500 1275 2500 1276 2501 1341 2501 1269 2501 1269 2502 1341 2502 1277 2502 1277 2503 1341 2503 1124 2503 1134 2504 1277 2504 1124 2504 1134 2505 1133 2505 1277 2505 1277 2506 1133 2506 1278 2506 1122 2507 1277 2507 1278 2507 1122 2508 1279 2508 1277 2508 1277 2509 1279 2509 1131 2509 1280 2510 1277 2510 1131 2510 1280 2511 1121 2511 1277 2511 1277 2512 1121 2512 1281 2512 1163 2513 1277 2513 1281 2513 1249 2514 1282 2514 1268 2514 1268 2515 1282 2515 1283 2515 1284 2516 1268 2516 1283 2516 1284 2517 1285 2517 1268 2517 1268 2518 1285 2518 1286 2518 1287 2519 1268 2519 1286 2519 1287 2520 1049 2520 1268 2520 1268 2521 1049 2521 1055 2521 1058 2522 1268 2522 1055 2522 1058 2523 1288 2523 1268 2523 1268 2524 1288 2524 1060 2524 1289 2525 1268 2525 1060 2525 1289 2526 807 2526 1268 2526 1290 2527 1236 2527 1268 2527 1268 2528 1236 2528 1234 2528 1291 2529 1268 2529 1234 2529 1291 2530 1292 2530 1268 2530 1268 2531 1292 2531 1231 2531 1293 2532 1268 2532 1231 2532 1293 2533 1216 2533 1268 2533 1268 2534 1216 2534 1294 2534 1227 2535 1268 2535 1294 2535 1227 2536 1226 2536 1268 2536 1268 2537 1226 2537 1224 2537 1223 2538 1268 2538 1224 2538 1223 2539 1248 2539 1268 2539 1268 2540 1248 2540 1327 2540 1267 2541 1268 2541 1327 2541 1341 2542 1295 2542 1124 2542 1124 2543 1295 2543 1125 2543 1125 2544 1295 2544 1126 2544 1126 2545 1295 2545 1136 2545 1136 2546 1295 2546 1138 2546 1138 2547 1295 2547 1142 2547 1142 2548 1295 2548 1429 2548 1297 2549 1429 2549 1296 2549 1297 2550 1142 2550 1429 2550 1429 2551 1436 2551 1296 2551 1296 2552 1436 2552 1147 2552 1147 2553 1436 2553 1148 2553 1148 2554 1436 2554 1434 2554 1298 2555 1148 2555 1434 2555 1298 2556 1299 2556 1148 2556 1298 2557 1300 2557 1299 2557 1299 2558 1300 2558 1153 2558 1153 2559 1300 2559 1427 2559 1426 2560 1153 2560 1427 2560 1426 2561 1301 2561 1153 2561 1426 2562 1422 2562 1301 2562 1301 2563 1422 2563 1200 2563 1200 2564 1422 2564 1302 2564 1418 2565 1200 2565 1302 2565 1418 2566 1303 2566 1200 2566 1418 2567 1417 2567 1303 2567 1303 2568 1417 2568 1308 2568 1308 2569 1417 2569 1413 2569 1195 2570 1413 2570 1407 2570 1406 2571 1195 2571 1407 2571 1406 2572 1193 2572 1195 2572 1406 2573 1379 2573 1193 2573 1193 2574 1379 2574 1309 2574 1309 2575 1379 2575 1304 2575 1310 2576 1304 2576 1350 2576 1305 2577 1350 2577 1351 2577 1186 2578 1351 2578 1311 2578 1175 2579 1311 2579 1353 2579 1354 2580 1175 2580 1353 2580 1354 2581 1204 2581 1175 2581 1354 2582 1306 2582 1204 2582 1204 2583 1306 2583 1307 2583 1307 2584 1306 2584 1356 2584 1377 2585 1307 2585 1356 2585 1377 2586 1170 2586 1307 2586 1377 2587 1376 2587 1170 2587 1170 2588 1376 2588 1316 2588 1169 2589 1316 2589 1168 2589 1169 2590 1170 2590 1316 2590 1308 2591 1413 2591 1195 2591 1309 2592 1304 2592 1310 2592 1310 2593 1350 2593 1305 2593 1305 2594 1351 2594 1186 2594 1186 2595 1311 2595 1175 2595 1376 2596 1312 2596 1316 2596 1316 2597 1312 2597 1313 2597 1314 2598 1316 2598 1313 2598 1314 2599 1375 2599 1316 2599 1212 2600 1207 2600 1316 2600 1316 2601 1207 2601 1315 2601 1166 2602 1316 2602 1315 2602 1166 2603 1206 2603 1316 2603 1316 2604 1206 2604 1317 2604 1168 2605 1316 2605 1317 2605 1267 2606 1327 2606 1318 2606 1320 2607 1318 2607 1319 2607 1321 2608 1320 2608 1319 2608 1321 2609 1322 2609 1320 2609 1321 2610 1323 2610 1322 2610 1322 2611 1323 2611 1328 2611 1325 2612 1328 2612 1324 2612 1325 2613 1322 2613 1328 2613 1325 2614 1326 2614 1322 2614 1322 2615 1326 2615 1320 2615 1320 2616 1326 2616 1267 2616 1318 2617 1320 2617 1267 2617 1327 2618 1247 2618 1318 2618 1318 2619 1247 2619 1319 2619 1323 2620 1405 2620 1328 2620 1328 2621 1405 2621 1329 2621 1324 2622 1329 2622 1270 2622 1324 2623 1328 2623 1329 2623 1405 2624 1404 2624 1329 2624 1329 2625 1404 2625 1330 2625 1270 2626 1330 2626 1271 2626 1270 2627 1329 2627 1330 2627 1404 2628 1331 2628 1330 2628 1330 2629 1331 2629 1333 2629 1271 2630 1333 2630 1335 2630 1271 2631 1330 2631 1333 2631 1331 2632 1332 2632 1333 2632 1333 2633 1332 2633 1334 2633 1335 2634 1334 2634 1272 2634 1335 2635 1333 2635 1334 2635 1332 2636 1336 2636 1334 2636 1334 2637 1336 2637 1345 2637 1272 2638 1345 2638 1273 2638 1272 2639 1334 2639 1345 2639 1336 2640 1385 2640 1345 2640 1345 2641 1385 2641 1344 2641 1337 2642 1344 2642 1386 2642 1338 2643 1386 2643 1339 2643 1348 2644 1339 2644 1340 2644 1343 2645 1340 2645 1387 2645 1342 2646 1387 2646 1388 2646 1341 2647 1342 2647 1388 2647 1341 2648 1276 2648 1342 2648 1342 2649 1276 2649 1343 2649 1387 2650 1342 2650 1343 2650 1345 2651 1344 2651 1337 2651 1346 2652 1337 2652 1274 2652 1346 2653 1345 2653 1337 2653 1346 2654 1273 2654 1345 2654 1337 2655 1386 2655 1338 2655 1274 2656 1338 2656 1347 2656 1274 2657 1337 2657 1338 2657 1338 2658 1339 2658 1348 2658 1347 2659 1348 2659 1275 2659 1347 2660 1338 2660 1348 2660 1348 2661 1340 2661 1343 2661 1275 2662 1343 2662 1276 2662 1275 2663 1348 2663 1343 2663 1379 2664 1349 2664 1380 2664 1304 2665 1380 2665 1378 2665 1350 2666 1378 2666 1363 2666 1351 2667 1363 2667 1352 2667 1311 2668 1352 2668 1372 2668 1353 2669 1372 2669 1364 2669 1354 2670 1364 2670 1366 2670 1306 2671 1366 2671 1355 2671 1356 2672 1355 2672 1367 2672 1377 2673 1367 2673 1368 2673 1376 2674 1368 2674 1357 2674 1312 2675 1357 2675 1374 2675 1313 2676 1374 2676 1358 2676 1314 2677 1358 2677 1359 2677 1375 2678 1359 2678 1360 2678 1316 2679 1360 2679 1393 2679 1316 2680 1375 2680 1360 2680 1361 2681 1378 2681 1362 2681 1361 2682 1363 2682 1378 2682 1361 2683 1400 2683 1363 2683 1363 2684 1400 2684 1352 2684 1352 2685 1400 2685 1371 2685 1372 2686 1371 2686 1365 2686 1364 2687 1365 2687 1373 2687 1366 2688 1373 2688 1399 2688 1355 2689 1399 2689 1394 2689 1367 2690 1394 2690 1395 2690 1368 2691 1395 2691 1369 2691 1357 2692 1369 2692 1396 2692 1374 2693 1396 2693 1370 2693 1358 2694 1370 2694 1398 2694 1359 2695 1398 2695 1397 2695 1360 2696 1397 2696 1393 2696 1360 2697 1359 2697 1397 2697 1352 2698 1371 2698 1372 2698 1372 2699 1365 2699 1364 2699 1364 2700 1373 2700 1366 2700 1366 2701 1399 2701 1355 2701 1355 2702 1394 2702 1367 2702 1367 2703 1395 2703 1368 2703 1368 2704 1369 2704 1357 2704 1357 2705 1396 2705 1374 2705 1374 2706 1370 2706 1358 2706 1358 2707 1398 2707 1359 2707 1375 2708 1314 2708 1359 2708 1314 2709 1313 2709 1358 2709 1313 2710 1312 2710 1374 2710 1312 2711 1376 2711 1357 2711 1376 2712 1377 2712 1368 2712 1377 2713 1356 2713 1367 2713 1356 2714 1306 2714 1355 2714 1306 2715 1354 2715 1366 2715 1354 2716 1353 2716 1364 2716 1353 2717 1311 2717 1372 2717 1311 2718 1351 2718 1352 2718 1351 2719 1350 2719 1363 2719 1350 2720 1304 2720 1378 2720 1304 2721 1379 2721 1380 2721 1362 2722 1378 2722 1380 2722 1349 2723 1362 2723 1380 2723 1319 2724 1247 2724 6548 2724 1321 2725 6548 2725 1323 2725 1321 2726 1319 2726 6548 2726 1247 2727 1246 2727 6548 2727 6548 2728 1246 2728 1381 2728 1244 2729 6548 2729 1381 2729 1244 2730 1382 2730 6548 2730 6548 2731 1382 2731 1383 2731 1242 2732 6548 2732 1383 2732 1242 2733 1241 2733 6548 2733 6548 2734 1241 2734 1230 2734 1232 2735 6548 2735 1230 2735 1232 2736 1217 2736 6548 2736 6548 2737 1217 2737 806 2737 806 2738 1217 2738 1235 2738 1239 2739 806 2739 1235 2739 1239 2740 1238 2740 806 2740 806 2741 1238 2741 1221 2741 1384 2742 1404 2742 6548 2742 1384 2743 1331 2743 1404 2743 1384 2744 1332 2744 1331 2744 1384 2745 1336 2745 1332 2745 1384 2746 1385 2746 1336 2746 1384 2747 1344 2747 1385 2747 1384 2748 1386 2748 1344 2748 1384 2749 1339 2749 1386 2749 1384 2750 1340 2750 1339 2750 1384 2751 1387 2751 1340 2751 1384 2752 1388 2752 1387 2752 1384 2753 1440 2753 1388 2753 1384 2754 1391 2754 1440 2754 1440 2755 1391 2755 1437 2755 1437 2756 1391 2756 1389 2756 1389 2757 1391 2757 1390 2757 1390 2758 1391 2758 1433 2758 1433 2759 1391 2759 1432 2759 1432 2760 1391 2760 1392 2760 1392 2761 1391 2761 1425 2761 1425 2762 1391 2762 1423 2762 1423 2763 1391 2763 1421 2763 1421 2764 1391 2764 1416 2764 1416 2765 1391 2765 1401 2765 1410 2766 1401 2766 1411 2766 1410 2767 1416 2767 1401 2767 1393 2768 1399 2768 1401 2768 1393 2769 1394 2769 1399 2769 1393 2770 1395 2770 1394 2770 1393 2771 1369 2771 1395 2771 1393 2772 1396 2772 1369 2772 1393 2773 1370 2773 1396 2773 1393 2774 1398 2774 1370 2774 1393 2775 1397 2775 1398 2775 1399 2776 1373 2776 1401 2776 1401 2777 1373 2777 1365 2777 1371 2778 1401 2778 1365 2778 1371 2779 1400 2779 1401 2779 1401 2780 1400 2780 1361 2780 1362 2781 1401 2781 1361 2781 1362 2782 1349 2782 1401 2782 1401 2783 1349 2783 1403 2783 1402 2784 1401 2784 1403 2784 1402 2785 1411 2785 1401 2785 1404 2786 1405 2786 6548 2786 6548 2787 1405 2787 1323 2787 1403 2788 1349 2788 1412 2788 1409 2789 1412 2789 1406 2789 1407 2790 1409 2790 1406 2790 1407 2791 1408 2791 1409 2791 1407 2792 1413 2792 1408 2792 1408 2793 1413 2793 1414 2793 1411 2794 1414 2794 1410 2794 1411 2795 1408 2795 1414 2795 1411 2796 1402 2796 1408 2796 1408 2797 1402 2797 1409 2797 1409 2798 1402 2798 1403 2798 1412 2799 1409 2799 1403 2799 1349 2800 1379 2800 1412 2800 1412 2801 1379 2801 1406 2801 1413 2802 1417 2802 1414 2802 1414 2803 1417 2803 1415 2803 1410 2804 1415 2804 1416 2804 1410 2805 1414 2805 1415 2805 1417 2806 1418 2806 1415 2806 1415 2807 1418 2807 1419 2807 1416 2808 1419 2808 1421 2808 1416 2809 1415 2809 1419 2809 1418 2810 1302 2810 1419 2810 1419 2811 1302 2811 1420 2811 1421 2812 1420 2812 1423 2812 1421 2813 1419 2813 1420 2813 1302 2814 1422 2814 1420 2814 1420 2815 1422 2815 1424 2815 1423 2816 1424 2816 1425 2816 1423 2817 1420 2817 1424 2817 1422 2818 1426 2818 1424 2818 1424 2819 1426 2819 1431 2819 1425 2820 1431 2820 1392 2820 1425 2821 1424 2821 1431 2821 1426 2822 1427 2822 1431 2822 1431 2823 1427 2823 1300 2823 1428 2824 1300 2824 1298 2824 1435 2825 1298 2825 1434 2825 1438 2826 1434 2826 1436 2826 1439 2827 1436 2827 1429 2827 1430 2828 1429 2828 1295 2828 1440 2829 1430 2829 1295 2829 1440 2830 1437 2830 1430 2830 1430 2831 1437 2831 1439 2831 1429 2832 1430 2832 1439 2832 1431 2833 1300 2833 1428 2833 1432 2834 1428 2834 1433 2834 1432 2835 1431 2835 1428 2835 1432 2836 1392 2836 1431 2836 1428 2837 1298 2837 1435 2837 1433 2838 1435 2838 1390 2838 1433 2839 1428 2839 1435 2839 1435 2840 1434 2840 1438 2840 1390 2841 1438 2841 1389 2841 1390 2842 1435 2842 1438 2842 1438 2843 1436 2843 1439 2843 1389 2844 1439 2844 1437 2844 1389 2845 1438 2845 1439 2845 1388 2846 1440 2846 1341 2846 1341 2847 1440 2847 1295 2847 2419 2848 1664 2848 2342 2848 2419 2849 1441 2849 1664 2849 2419 2850 1643 2850 1441 2850 2419 2851 1640 2851 1643 2851 2419 2852 1453 2852 1640 2852 2419 2853 1442 2853 1453 2853 2419 2854 1454 2854 1442 2854 1442 2855 1454 2855 1668 2855 1452 2856 1668 2856 1667 2856 1451 2857 1667 2857 1672 2857 1673 2858 1672 2858 1443 2858 1678 2859 1443 2859 1682 2859 1449 2860 1682 2860 1696 2860 1448 2861 1696 2861 1456 2861 1695 2862 1456 2862 1445 2862 1444 2863 1445 2863 1707 2863 1446 2864 1707 2864 1447 2864 1885 2865 1447 2865 1887 2865 1885 2866 1446 2866 1447 2866 1885 2867 1883 2867 1446 2867 1446 2868 1883 2868 1636 2868 1444 2869 1636 2869 1704 2869 1695 2870 1704 2870 1697 2870 1448 2871 1697 2871 1689 2871 1449 2872 1689 2872 1450 2872 1678 2873 1450 2873 1677 2873 1673 2874 1677 2874 1671 2874 1451 2875 1671 2875 1669 2875 1452 2876 1669 2876 1453 2876 1442 2877 1452 2877 1453 2877 1442 2878 1668 2878 1452 2878 1454 2879 2336 2879 1668 2879 1668 2880 2336 2880 1666 2880 1667 2881 1666 2881 1674 2881 1672 2882 1674 2882 1461 2882 1443 2883 1461 2883 1455 2883 1682 2884 1455 2884 1690 2884 1696 2885 1690 2885 1702 2885 1456 2886 1702 2886 1703 2886 1445 2887 1703 2887 1464 2887 1707 2888 1464 2888 1712 2888 1447 2889 1712 2889 1469 2889 1888 2890 1469 2890 1457 2890 1888 2891 1447 2891 1469 2891 1888 2892 1887 2892 1447 2892 2336 2893 1458 2893 1666 2893 1666 2894 1458 2894 1459 2894 1674 2895 1459 2895 1460 2895 1461 2896 1460 2896 1688 2896 1455 2897 1688 2897 1470 2897 1690 2898 1470 2898 1462 2898 1702 2899 1462 2899 1701 2899 1703 2900 1701 2900 1463 2900 1464 2901 1463 2901 1716 2901 1712 2902 1716 2902 1465 2902 1469 2903 1465 2903 1466 2903 1468 2904 1466 2904 1467 2904 1468 2905 1469 2905 1466 2905 1468 2906 1457 2906 1469 2906 1458 2907 2244 2907 1459 2907 1459 2908 2244 2908 1681 2908 1460 2909 1681 2909 1474 2909 1688 2910 1474 2910 1687 2910 1470 2911 1687 2911 1694 2911 1462 2912 1694 2912 1700 2912 1701 2913 1700 2913 1477 2913 1463 2914 1477 2914 1478 2914 1716 2915 1478 2915 1471 2915 1465 2916 1471 2916 1472 2916 1466 2917 1472 2917 1482 2917 1473 2918 1482 2918 1842 2918 1473 2919 1466 2919 1482 2919 1473 2920 1467 2920 1466 2920 2244 2921 2252 2921 1681 2921 1681 2922 2252 2922 1475 2922 1474 2923 1475 2923 1693 2923 1687 2924 1693 2924 1692 2924 1694 2925 1692 2925 1484 2925 1700 2926 1484 2926 1476 2926 1477 2927 1476 2927 1711 2927 1478 2928 1711 2928 1723 2928 1471 2929 1723 2929 1479 2929 1472 2930 1479 2930 1480 2930 1482 2931 1480 2931 1734 2931 1481 2932 1734 2932 1490 2932 1481 2933 1482 2933 1734 2933 1481 2934 1842 2934 1482 2934 2252 2935 1491 2935 1475 2935 1475 2936 1491 2936 1686 2936 1693 2937 1686 2937 1483 2937 1692 2938 1483 2938 1699 2938 1484 2939 1699 2939 1485 2939 1476 2940 1485 2940 1715 2940 1711 2941 1715 2941 1486 2941 1723 2942 1486 2942 1520 2942 1479 2943 1520 2943 1725 2943 1480 2944 1725 2944 1487 2944 1734 2945 1487 2945 1488 2945 1834 2946 1488 2946 1489 2946 1834 2947 1734 2947 1488 2947 1834 2948 1490 2948 1734 2948 1491 2949 2257 2949 1686 2949 1686 2950 2257 2950 2261 2950 1691 2951 2261 2951 2275 2951 1705 2952 2275 2952 1531 2952 1709 2953 1531 2953 1492 2953 1708 2954 1492 2954 2284 2954 1493 2955 1708 2955 2284 2955 1493 2956 1546 2956 1708 2956 1493 2957 2283 2957 1546 2957 1546 2958 2283 2958 1494 2958 1495 2959 1494 2959 2286 2959 2285 2960 1495 2960 2286 2960 2285 2961 1496 2961 1495 2961 2285 2962 2335 2962 1496 2962 1496 2963 2335 2963 1497 2963 1498 2964 1497 2964 1499 2964 2332 2965 1498 2965 1499 2965 2332 2966 1735 2966 1498 2966 2332 2967 2331 2967 1735 2967 1735 2968 2331 2968 1500 2968 1502 2969 1500 2969 1501 2969 1503 2970 1502 2970 1501 2970 1503 2971 1749 2971 1502 2971 1503 2972 2330 2972 1749 2972 1749 2973 2330 2973 1504 2973 1505 2974 1504 2974 2292 2974 2293 2975 1505 2975 2292 2975 2293 2976 1506 2976 1505 2976 2293 2977 2298 2977 1506 2977 1506 2978 2298 2978 1507 2978 1508 2979 1506 2979 1507 2979 1508 2980 1759 2980 1506 2980 1508 2981 2303 2981 1759 2981 1759 2982 2303 2982 2305 2982 1615 2983 2305 2983 1518 2983 1519 2984 1518 2984 2311 2984 1572 2985 2311 2985 1509 2985 2312 2986 1572 2986 1509 2986 2312 2987 1519 2987 1572 2987 2312 2988 1511 2988 1519 2988 2312 2989 1510 2989 1511 2989 2312 2990 2319 2990 1510 2990 1510 2991 2319 2991 1574 2991 1511 2992 1574 2992 1573 2992 1769 2993 1573 2993 1512 2993 1513 2994 1512 2994 1783 2994 1514 2995 1783 2995 1515 2995 1785 2996 1515 2996 1788 2996 1790 2997 1788 2997 1516 2997 1597 2998 1516 2998 1598 2998 1603 2999 1598 2999 1791 2999 1602 3000 1791 3000 1517 3000 1603 3001 1517 3001 1599 3001 1600 3002 1599 3002 1619 3002 1601 3003 1619 3003 1786 3003 1784 3004 1786 3004 1618 3004 1777 3005 1618 3005 1617 3005 1773 3006 1617 3006 1771 3006 1768 3007 1771 3007 1616 3007 1519 3008 1616 3008 1615 3008 1518 3009 1519 3009 1615 3009 1686 3010 2261 3010 1691 3010 1483 3011 1691 3011 1523 3011 1699 3012 1523 3012 1706 3012 1485 3013 1706 3013 1714 3013 1715 3014 1714 3014 1722 3014 1486 3015 1722 3015 1526 3015 1520 3016 1526 3016 1521 3016 1725 3017 1521 3017 1733 3017 1487 3018 1733 3018 1522 3018 1488 3019 1522 3019 1529 3019 1890 3020 1529 3020 1530 3020 1890 3021 1488 3021 1529 3021 1890 3022 1489 3022 1488 3022 1691 3023 2275 3023 1705 3023 1523 3024 1705 3024 1524 3024 1706 3025 1524 3025 1710 3025 1714 3026 1710 3026 1713 3026 1722 3027 1713 3027 1525 3027 1526 3028 1525 3028 1732 3028 1521 3029 1732 3029 1527 3029 1733 3030 1527 3030 1534 3030 1522 3031 1534 3031 1528 3031 1529 3032 1528 3032 1535 3032 1831 3033 1535 3033 1536 3033 1831 3034 1529 3034 1535 3034 1831 3035 1530 3035 1529 3035 1705 3036 1531 3036 1709 3036 1524 3037 1709 3037 1537 3037 1710 3038 1537 3038 1538 3038 1713 3039 1538 3039 1721 3039 1525 3040 1721 3040 1724 3040 1732 3041 1724 3041 1532 3041 1527 3042 1532 3042 1533 3042 1534 3043 1533 3043 1746 3043 1528 3044 1746 3044 1543 3044 1535 3045 1543 3045 1545 3045 1826 3046 1545 3046 1823 3046 1826 3047 1535 3047 1545 3047 1826 3048 1536 3048 1535 3048 1709 3049 1492 3049 1708 3049 1537 3050 1708 3050 1717 3050 1538 3051 1717 3051 1719 3051 1721 3052 1719 3052 1539 3052 1724 3053 1539 3053 1540 3053 1532 3054 1540 3054 1720 3054 1533 3055 1720 3055 1541 3055 1746 3056 1541 3056 1542 3056 1543 3057 1542 3057 1632 3057 1545 3058 1632 3058 1631 3058 1544 3059 1631 3059 1821 3059 1544 3060 1545 3060 1631 3060 1544 3061 1823 3061 1545 3061 1546 3062 1494 3062 1495 3062 1718 3063 1495 3063 1726 3063 1547 3064 1726 3064 1728 3064 1548 3065 1728 3065 1549 3065 1742 3066 1549 3066 1550 3066 1743 3067 1550 3067 1745 3067 1634 3068 1745 3068 1753 3068 1633 3069 1753 3069 1551 3069 1553 3070 1551 3070 1731 3070 1552 3071 1731 3071 1819 3071 1892 3072 1552 3072 1819 3072 1892 3073 1891 3073 1552 3073 1552 3074 1891 3074 1553 3074 1731 3075 1552 3075 1553 3075 1496 3076 1497 3076 1498 3076 1727 3077 1498 3077 1741 3077 1554 3078 1741 3078 1740 3078 1729 3079 1740 3079 1751 3079 1555 3080 1751 3080 1556 3080 1752 3081 1556 3081 1736 3081 1557 3082 1736 3082 1558 3082 1730 3083 1558 3083 1761 3083 1763 3084 1761 3084 1766 3084 1813 3085 1766 3085 1738 3085 1559 3086 1738 3086 1737 3086 1559 3087 1813 3087 1738 3087 1735 3088 1500 3088 1502 3088 1739 3089 1502 3089 1560 3089 1744 3090 1560 3090 1747 3090 1755 3091 1747 3091 1756 3091 1754 3092 1756 3092 1758 3092 1561 3093 1758 3093 1760 3093 1762 3094 1760 3094 1562 3094 1767 3095 1562 3095 1563 3095 1626 3096 1563 3096 1776 3096 1627 3097 1776 3097 1565 3097 1564 3098 1565 3098 1620 3098 1564 3099 1627 3099 1565 3099 1749 3100 1504 3100 1505 3100 1750 3101 1505 3101 1612 3101 1566 3102 1612 3102 1567 3102 1757 3103 1567 3103 1568 3103 1748 3104 1568 3104 1611 3104 1765 3105 1611 3105 1774 3105 1772 3106 1774 3106 1775 3106 1778 3107 1775 3107 1569 3107 1624 3108 1569 3108 1621 3108 1622 3109 1621 3109 1571 3109 1570 3110 1571 3110 1606 3110 1570 3111 1622 3111 1571 3111 2305 3112 2308 3112 1518 3112 1518 3113 2308 3113 2311 3113 1519 3114 2311 3114 1572 3114 1573 3115 1574 3115 1576 3115 1577 3116 1576 3116 1575 3116 1577 3117 1573 3117 1576 3117 1577 3118 1780 3118 1573 3118 1577 3119 1779 3119 1780 3119 1577 3120 2316 3120 1779 3120 1779 3121 2316 3121 2318 3121 1780 3122 2318 3122 1581 3122 1782 3123 1581 3123 1787 3123 1781 3124 1787 3124 1578 3124 1580 3125 1578 3125 1587 3125 1579 3126 1587 3126 1598 3126 1516 3127 1579 3127 1598 3127 1516 3128 1788 3128 1579 3128 1579 3129 1788 3129 1580 3129 1587 3130 1579 3130 1580 3130 1574 3131 1575 3131 1576 3131 1581 3132 2318 3132 1589 3132 1582 3133 1589 3133 2321 3133 1582 3134 1581 3134 1589 3134 1582 3135 1585 3135 1581 3135 1582 3136 1583 3136 1585 3136 1585 3137 1583 3137 1586 3137 1584 3138 1586 3138 1583 3138 1584 3139 1585 3139 1586 3139 1584 3140 2324 3140 1585 3140 1585 3141 2324 3141 1592 3141 1588 3142 1592 3142 1594 3142 1789 3143 1594 3143 2326 3143 1598 3144 2326 3144 2327 3144 1598 3145 1789 3145 2326 3145 1598 3146 1587 3146 1789 3146 1789 3147 1587 3147 1578 3147 1588 3148 1578 3148 1787 3148 1585 3149 1787 3149 1581 3149 1585 3150 1588 3150 1787 3150 1585 3151 1592 3151 1588 3151 2318 3152 2321 3152 1589 3152 1592 3153 2324 3153 1590 3153 1591 3154 1590 3154 2324 3154 1591 3155 1592 3155 1590 3155 1591 3156 1595 3156 1592 3156 1592 3157 1595 3157 1593 3157 1594 3158 1593 3158 2326 3158 1594 3159 1592 3159 1593 3159 1593 3160 1595 3160 1596 3160 2326 3161 1596 3161 1595 3161 2326 3162 1593 3162 1596 3162 1597 3163 1598 3163 1603 3163 1600 3164 1603 3164 1599 3164 1600 3165 1597 3165 1603 3165 1600 3166 1790 3166 1597 3166 1600 3167 1601 3167 1790 3167 1600 3168 1619 3168 1601 3168 1603 3169 1791 3169 1602 3169 1517 3170 1603 3170 1602 3170 1517 3171 1793 3171 1599 3171 1599 3172 1793 3172 1604 3172 1796 3173 1604 3173 1793 3173 1796 3174 1599 3174 1604 3174 1796 3175 1607 3175 1599 3175 1796 3176 1798 3176 1607 3176 1607 3177 1798 3177 1605 3177 1799 3178 1605 3178 1798 3178 1799 3179 1607 3179 1605 3179 1799 3180 1606 3180 1607 3180 1607 3181 1606 3181 1621 3181 1608 3182 1621 3182 1569 3182 1609 3183 1569 3183 1775 3183 1610 3184 1775 3184 1774 3184 1770 3185 1774 3185 1611 3185 1764 3186 1611 3186 1568 3186 1614 3187 1568 3187 1567 3187 1613 3188 1567 3188 1612 3188 1506 3189 1612 3189 1505 3189 1506 3190 1613 3190 1612 3190 1506 3191 1759 3191 1613 3191 1613 3192 1759 3192 1615 3192 1614 3193 1615 3193 1616 3193 1764 3194 1616 3194 1771 3194 1770 3195 1771 3195 1617 3195 1610 3196 1617 3196 1618 3196 1609 3197 1618 3197 1786 3197 1608 3198 1786 3198 1619 3198 1607 3199 1619 3199 1599 3199 1607 3200 1608 3200 1619 3200 1607 3201 1621 3201 1608 3201 1621 3202 1606 3202 1571 3202 1807 3203 1623 3203 1622 3203 1807 3204 1620 3204 1623 3204 1623 3205 1620 3205 1624 3205 1622 3206 1624 3206 1621 3206 1622 3207 1623 3207 1624 3207 1625 3208 1628 3208 1627 3208 1625 3209 1737 3209 1628 3209 1628 3210 1737 3210 1626 3210 1627 3211 1626 3211 1776 3211 1627 3212 1628 3212 1626 3212 1629 3213 1630 3213 1813 3213 1629 3214 1819 3214 1630 3214 1630 3215 1819 3215 1731 3215 1763 3216 1731 3216 1730 3216 1761 3217 1763 3217 1730 3217 1553 3218 1891 3218 1631 3218 1632 3219 1553 3219 1631 3219 1632 3220 1633 3220 1553 3220 1632 3221 1542 3221 1633 3221 1633 3222 1542 3222 1634 3222 1753 3223 1633 3223 1634 3223 1891 3224 1817 3224 1631 3224 1631 3225 1817 3225 1821 3225 1883 3226 1635 3226 1636 3226 1636 3227 1635 3227 1637 3227 1704 3228 1637 3228 1638 3228 1697 3229 1638 3229 1683 3229 1689 3230 1683 3230 1684 3230 1450 3231 1684 3231 1679 3231 1677 3232 1679 3232 1639 3232 1671 3233 1639 3233 1641 3233 1669 3234 1641 3234 1640 3234 1453 3235 1669 3235 1640 3235 1635 3236 1861 3236 1637 3236 1637 3237 1861 3237 1698 3237 1638 3238 1698 3238 1649 3238 1683 3239 1649 3239 1648 3239 1684 3240 1648 3240 1676 3240 1679 3241 1676 3241 1675 3241 1639 3242 1675 3242 1642 3242 1641 3243 1642 3243 1643 3243 1640 3244 1641 3244 1643 3244 1861 3245 1860 3245 1698 3245 1698 3246 1860 3246 1859 3246 1650 3247 1859 3247 1880 3247 1651 3248 1880 3248 1644 3248 1680 3249 1644 3249 1654 3249 1655 3250 1654 3250 1657 3250 1658 3251 1657 3251 1856 3251 1660 3252 1856 3252 2342 3252 1659 3253 2342 3253 1645 3253 1656 3254 1645 3254 1661 3254 1653 3255 1661 3255 1646 3255 1652 3256 1646 3256 1647 3256 1685 3257 1647 3257 1648 3257 1649 3258 1685 3258 1648 3258 1649 3259 1650 3259 1685 3259 1649 3260 1698 3260 1650 3260 1650 3261 1698 3261 1859 3261 1650 3262 1880 3262 1651 3262 1685 3263 1651 3263 1652 3263 1647 3264 1685 3264 1652 3264 1651 3265 1644 3265 1680 3265 1652 3266 1680 3266 1653 3266 1646 3267 1652 3267 1653 3267 1680 3268 1654 3268 1655 3268 1653 3269 1655 3269 1656 3269 1661 3270 1653 3270 1656 3270 1655 3271 1657 3271 1658 3271 1656 3272 1658 3272 1659 3272 1645 3273 1656 3273 1659 3273 1658 3274 1856 3274 1660 3274 1659 3275 1660 3275 2342 3275 1659 3276 1658 3276 1660 3276 1643 3277 1642 3277 1441 3277 1441 3278 1642 3278 1665 3278 1664 3279 1665 3279 1663 3279 1662 3280 1663 3280 1661 3280 1645 3281 1662 3281 1661 3281 1645 3282 2342 3282 1662 3282 1662 3283 2342 3283 1664 3283 1663 3284 1662 3284 1664 3284 1664 3285 1441 3285 1665 3285 1451 3286 1669 3286 1452 3286 1667 3287 1451 3287 1452 3287 1666 3288 1667 3288 1668 3288 1671 3289 1641 3289 1669 3289 1639 3290 1642 3290 1641 3290 1642 3291 1675 3291 1665 3291 1665 3292 1675 3292 1670 3292 1663 3293 1670 3293 1646 3293 1661 3294 1663 3294 1646 3294 1663 3295 1665 3295 1670 3295 1655 3296 1658 3296 1656 3296 1459 3297 1674 3297 1666 3297 1673 3298 1671 3298 1451 3298 1672 3299 1673 3299 1451 3299 1674 3300 1672 3300 1667 3300 1677 3301 1639 3301 1671 3301 1679 3302 1675 3302 1639 3302 1647 3303 1646 3303 1670 3303 1676 3304 1670 3304 1675 3304 1676 3305 1647 3305 1670 3305 1676 3306 1648 3306 1647 3306 1680 3307 1655 3307 1653 3307 1681 3308 1460 3308 1459 3308 1460 3309 1461 3309 1674 3309 1678 3310 1677 3310 1673 3310 1443 3311 1678 3311 1673 3311 1461 3312 1443 3312 1672 3312 1450 3313 1679 3313 1677 3313 1684 3314 1676 3314 1679 3314 1651 3315 1680 3315 1652 3315 1475 3316 1474 3316 1681 3316 1474 3317 1688 3317 1460 3317 1688 3318 1455 3318 1461 3318 1449 3319 1450 3319 1678 3319 1682 3320 1449 3320 1678 3320 1455 3321 1682 3321 1443 3321 1689 3322 1684 3322 1450 3322 1683 3323 1648 3323 1684 3323 1650 3324 1651 3324 1685 3324 1686 3325 1693 3325 1475 3325 1693 3326 1687 3326 1474 3326 1687 3327 1470 3327 1688 3327 1470 3328 1690 3328 1455 3328 1448 3329 1689 3329 1449 3329 1696 3330 1448 3330 1449 3330 1690 3331 1696 3331 1682 3331 1697 3332 1683 3332 1689 3332 1638 3333 1649 3333 1683 3333 1691 3334 1483 3334 1686 3334 1483 3335 1692 3335 1693 3335 1692 3336 1694 3336 1687 3336 1694 3337 1462 3337 1470 3337 1462 3338 1702 3338 1690 3338 1695 3339 1697 3339 1448 3339 1456 3340 1695 3340 1448 3340 1702 3341 1456 3341 1696 3341 1704 3342 1638 3342 1697 3342 1637 3343 1698 3343 1638 3343 1705 3344 1523 3344 1691 3344 1523 3345 1699 3345 1483 3345 1699 3346 1484 3346 1692 3346 1484 3347 1700 3347 1694 3347 1700 3348 1701 3348 1462 3348 1701 3349 1703 3349 1702 3349 1444 3350 1704 3350 1695 3350 1445 3351 1444 3351 1695 3351 1703 3352 1445 3352 1456 3352 1636 3353 1637 3353 1704 3353 1709 3354 1524 3354 1705 3354 1524 3355 1706 3355 1523 3355 1706 3356 1485 3356 1699 3356 1485 3357 1476 3357 1484 3357 1476 3358 1477 3358 1700 3358 1477 3359 1463 3359 1701 3359 1463 3360 1464 3360 1703 3360 1446 3361 1636 3361 1444 3361 1707 3362 1446 3362 1444 3362 1464 3363 1707 3363 1445 3363 1708 3364 1537 3364 1709 3364 1537 3365 1710 3365 1524 3365 1710 3366 1714 3366 1706 3366 1714 3367 1715 3367 1485 3367 1715 3368 1711 3368 1476 3368 1711 3369 1478 3369 1477 3369 1478 3370 1716 3370 1463 3370 1716 3371 1712 3371 1464 3371 1712 3372 1447 3372 1707 3372 1717 3373 1538 3373 1537 3373 1538 3374 1713 3374 1710 3374 1713 3375 1722 3375 1714 3375 1722 3376 1486 3376 1715 3376 1486 3377 1723 3377 1711 3377 1723 3378 1471 3378 1478 3378 1471 3379 1465 3379 1716 3379 1465 3380 1469 3380 1712 3380 1708 3381 1546 3381 1717 3381 1717 3382 1546 3382 1718 3382 1719 3383 1718 3383 1547 3383 1539 3384 1547 3384 1548 3384 1540 3385 1548 3385 1742 3385 1720 3386 1742 3386 1743 3386 1541 3387 1743 3387 1634 3387 1542 3388 1541 3388 1634 3388 1495 3389 1718 3389 1546 3389 1718 3390 1719 3390 1717 3390 1719 3391 1721 3391 1538 3391 1721 3392 1525 3392 1713 3392 1525 3393 1526 3393 1722 3393 1526 3394 1520 3394 1486 3394 1520 3395 1479 3395 1723 3395 1479 3396 1472 3396 1471 3396 1472 3397 1466 3397 1465 3397 1726 3398 1547 3398 1718 3398 1547 3399 1539 3399 1719 3399 1539 3400 1724 3400 1721 3400 1724 3401 1732 3401 1525 3401 1732 3402 1521 3402 1526 3402 1521 3403 1725 3403 1520 3403 1725 3404 1480 3404 1479 3404 1480 3405 1482 3405 1472 3405 1495 3406 1496 3406 1726 3406 1726 3407 1496 3407 1727 3407 1728 3408 1727 3408 1554 3408 1549 3409 1554 3409 1729 3409 1550 3410 1729 3410 1555 3410 1745 3411 1555 3411 1752 3411 1753 3412 1752 3412 1557 3412 1551 3413 1557 3413 1730 3413 1731 3414 1551 3414 1730 3414 1498 3415 1727 3415 1496 3415 1727 3416 1728 3416 1726 3416 1728 3417 1548 3417 1547 3417 1548 3418 1540 3418 1539 3418 1540 3419 1532 3419 1724 3419 1532 3420 1527 3420 1732 3420 1527 3421 1733 3421 1521 3421 1733 3422 1487 3422 1725 3422 1487 3423 1734 3423 1480 3423 1741 3424 1554 3424 1727 3424 1554 3425 1549 3425 1728 3425 1549 3426 1742 3426 1548 3426 1742 3427 1720 3427 1540 3427 1720 3428 1533 3428 1532 3428 1533 3429 1534 3429 1527 3429 1534 3430 1522 3430 1733 3430 1522 3431 1488 3431 1487 3431 1498 3432 1735 3432 1741 3432 1741 3433 1735 3433 1739 3433 1740 3434 1739 3434 1744 3434 1751 3435 1744 3435 1755 3435 1556 3436 1755 3436 1754 3436 1736 3437 1754 3437 1561 3437 1558 3438 1561 3438 1762 3438 1761 3439 1762 3439 1767 3439 1766 3440 1767 3440 1626 3440 1737 3441 1766 3441 1626 3441 1737 3442 1738 3442 1766 3442 1502 3443 1739 3443 1735 3443 1739 3444 1740 3444 1741 3444 1740 3445 1729 3445 1554 3445 1729 3446 1550 3446 1549 3446 1550 3447 1743 3447 1742 3447 1743 3448 1541 3448 1720 3448 1541 3449 1746 3449 1533 3449 1746 3450 1528 3450 1534 3450 1528 3451 1529 3451 1522 3451 1560 3452 1744 3452 1739 3452 1744 3453 1751 3453 1740 3453 1751 3454 1555 3454 1729 3454 1555 3455 1745 3455 1550 3455 1745 3456 1634 3456 1743 3456 1543 3457 1746 3457 1542 3457 1535 3458 1528 3458 1543 3458 1502 3459 1749 3459 1560 3459 1560 3460 1749 3460 1750 3460 1747 3461 1750 3461 1566 3461 1756 3462 1566 3462 1757 3462 1758 3463 1757 3463 1748 3463 1760 3464 1748 3464 1765 3464 1562 3465 1765 3465 1772 3465 1563 3466 1772 3466 1778 3466 1776 3467 1778 3467 1624 3467 1620 3468 1776 3468 1624 3468 1620 3469 1565 3469 1776 3469 1505 3470 1750 3470 1749 3470 1750 3471 1747 3471 1560 3471 1747 3472 1755 3472 1744 3472 1755 3473 1556 3473 1751 3473 1556 3474 1752 3474 1555 3474 1752 3475 1753 3475 1745 3475 1545 3476 1543 3476 1632 3476 1566 3477 1750 3477 1612 3477 1756 3478 1747 3478 1566 3478 1754 3479 1755 3479 1756 3479 1736 3480 1556 3480 1754 3480 1557 3481 1752 3481 1736 3481 1551 3482 1753 3482 1557 3482 1553 3483 1633 3483 1551 3483 1757 3484 1566 3484 1567 3484 1758 3485 1756 3485 1757 3485 1561 3486 1754 3486 1758 3486 1558 3487 1736 3487 1561 3487 1730 3488 1557 3488 1558 3488 1614 3489 1567 3489 1613 3489 1615 3490 1614 3490 1613 3490 2305 3491 1615 3491 1759 3491 1748 3492 1757 3492 1568 3492 1760 3493 1758 3493 1748 3493 1762 3494 1561 3494 1760 3494 1761 3495 1558 3495 1762 3495 1630 3496 1731 3496 1763 3496 1813 3497 1763 3497 1766 3497 1813 3498 1630 3498 1763 3498 1764 3499 1568 3499 1614 3499 1616 3500 1764 3500 1614 3500 1765 3501 1748 3501 1611 3501 1562 3502 1760 3502 1765 3502 1767 3503 1762 3503 1562 3503 1766 3504 1761 3504 1767 3504 1768 3505 1616 3505 1519 3505 1511 3506 1768 3506 1519 3506 1511 3507 1769 3507 1768 3507 1511 3508 1573 3508 1769 3508 1770 3509 1611 3509 1764 3509 1771 3510 1770 3510 1764 3510 1772 3511 1765 3511 1774 3511 1563 3512 1562 3512 1772 3512 1626 3513 1767 3513 1563 3513 1574 3514 1511 3514 1510 3514 1773 3515 1771 3515 1768 3515 1769 3516 1773 3516 1768 3516 1769 3517 1513 3517 1773 3517 1769 3518 1512 3518 1513 3518 1610 3519 1774 3519 1770 3519 1617 3520 1610 3520 1770 3520 1778 3521 1772 3521 1775 3521 1776 3522 1563 3522 1778 3522 1777 3523 1617 3523 1773 3523 1513 3524 1777 3524 1773 3524 1513 3525 1514 3525 1777 3525 1513 3526 1783 3526 1514 3526 1609 3527 1775 3527 1610 3527 1618 3528 1609 3528 1610 3528 1624 3529 1778 3529 1569 3529 2318 3530 1780 3530 1779 3530 1573 3531 1780 3531 1512 3531 1512 3532 1780 3532 1782 3532 1783 3533 1782 3533 1781 3533 1515 3534 1781 3534 1580 3534 1788 3535 1515 3535 1580 3535 1581 3536 1782 3536 1780 3536 1782 3537 1783 3537 1512 3537 1784 3538 1618 3538 1777 3538 1514 3539 1784 3539 1777 3539 1514 3540 1785 3540 1784 3540 1514 3541 1515 3541 1785 3541 1608 3542 1569 3542 1609 3542 1786 3543 1608 3543 1609 3543 1781 3544 1782 3544 1787 3544 1515 3545 1783 3545 1781 3545 1601 3546 1786 3546 1784 3546 1785 3547 1601 3547 1784 3547 1785 3548 1790 3548 1601 3548 1785 3549 1788 3549 1790 3549 1580 3550 1781 3550 1578 3550 1789 3551 1578 3551 1588 3551 1594 3552 1789 3552 1588 3552 1597 3553 1790 3553 1516 3553 1598 3554 2505 3554 1791 3554 1791 3555 2505 3555 2055 3555 1792 3556 2055 3556 1800 3556 1517 3557 1800 3557 2053 3557 1793 3558 2053 3558 1801 3558 1796 3559 1801 3559 1795 3559 1794 3560 1796 3560 1795 3560 1794 3561 1798 3561 1796 3561 1794 3562 1797 3562 1798 3562 1794 3563 2047 3563 1797 3563 1797 3564 2047 3564 1799 3564 1798 3565 1797 3565 1799 3565 1791 3566 2055 3566 1792 3566 1517 3567 1792 3567 1800 3567 1517 3568 1791 3568 1792 3568 2053 3569 1795 3569 1801 3569 2047 3570 2045 3570 1799 3570 1799 3571 2045 3571 1606 3571 1606 3572 2045 3572 1806 3572 1570 3573 1806 3573 1802 3573 1803 3574 1570 3574 1802 3574 1803 3575 1622 3575 1570 3575 1803 3576 1805 3576 1622 3576 1803 3577 1804 3577 1805 3577 1805 3578 1804 3578 1807 3578 1622 3579 1805 3579 1807 3579 2045 3580 1802 3580 1806 3580 1804 3581 2035 3581 1807 3581 1807 3582 2035 3582 1620 3582 1620 3583 2035 3583 1808 3583 1564 3584 1808 3584 1627 3584 1564 3585 1620 3585 1808 3585 2035 3586 2033 3586 1808 3586 1808 3587 2033 3587 2032 3587 1627 3588 2032 3588 1809 3588 1625 3589 1809 3589 1737 3589 1625 3590 1627 3590 1809 3590 2032 3591 1963 3591 1809 3591 1809 3592 1963 3592 1810 3592 1737 3593 1810 3593 1811 3593 1559 3594 1811 3594 1813 3594 1559 3595 1737 3595 1811 3595 1810 3596 1962 3596 1811 3596 1811 3597 1962 3597 1893 3597 1813 3598 1893 3598 1812 3598 1629 3599 1812 3599 1819 3599 1629 3600 1813 3600 1812 3600 1893 3601 1814 3601 1812 3601 1812 3602 1814 3602 2029 3602 1819 3603 2029 3603 1820 3603 1892 3604 1820 3604 1815 3604 1891 3605 1815 3605 1816 3605 1960 3606 1891 3606 1816 3606 1960 3607 1817 3607 1891 3607 1960 3608 1818 3608 1817 3608 1960 3609 1959 3609 1818 3609 1818 3610 1959 3610 1821 3610 1817 3611 1818 3611 1821 3611 1812 3612 2029 3612 1819 3612 1820 3613 1816 3613 1815 3613 1959 3614 1822 3614 1821 3614 1821 3615 1822 3615 1544 3615 1544 3616 1822 3616 1824 3616 1823 3617 1824 3617 1826 3617 1823 3618 1544 3618 1824 3618 1822 3619 1825 3619 1824 3619 1824 3620 1825 3620 1826 3620 1826 3621 1825 3621 1827 3621 1832 3622 1827 3622 1828 3622 1957 3623 1832 3623 1828 3623 1957 3624 1831 3624 1832 3624 1957 3625 1829 3625 1831 3625 1957 3626 1956 3626 1829 3626 1829 3627 1956 3627 1830 3627 1890 3628 1830 3628 1889 3628 1489 3629 1889 3629 1834 3629 1489 3630 1890 3630 1889 3630 1826 3631 1827 3631 1832 3631 1536 3632 1832 3632 1831 3632 1536 3633 1826 3633 1832 3633 1830 3634 1833 3634 1889 3634 1889 3635 1833 3635 1836 3635 1834 3636 1836 3636 1835 3636 1490 3637 1835 3637 1481 3637 1490 3638 1834 3638 1835 3638 1836 3639 1837 3639 1835 3639 1835 3640 1837 3640 1838 3640 1839 3641 1838 3641 1954 3641 1843 3642 1954 3642 1840 3642 1839 3643 1840 3643 1841 3643 1473 3644 1841 3644 1467 3644 1473 3645 1839 3645 1841 3645 1473 3646 1842 3646 1839 3646 1839 3647 1842 3647 1481 3647 1835 3648 1839 3648 1481 3648 1835 3649 1838 3649 1839 3649 1839 3650 1954 3650 1843 3650 1840 3651 1839 3651 1843 3651 1840 3652 1844 3652 1841 3652 1841 3653 1844 3653 1845 3653 1846 3654 1845 3654 1844 3654 1846 3655 1841 3655 1845 3655 1846 3656 1849 3656 1841 3656 1846 3657 1951 3657 1849 3657 1849 3658 1951 3658 1847 3658 1848 3659 1847 3659 1951 3659 1848 3660 1849 3660 1847 3660 1848 3661 1886 3661 1849 3661 1848 3662 1850 3662 1886 3662 1886 3663 1850 3663 1851 3663 1884 3664 1851 3664 1865 3664 1862 3665 1865 3665 1863 3665 1864 3666 1863 3666 1852 3666 1894 3667 1852 3667 1882 3667 1881 3668 1882 3668 1868 3668 1853 3669 1868 3669 1875 3669 1879 3670 1875 3670 1854 3670 1858 3671 1854 3671 1876 3671 1857 3672 1876 3672 1877 3672 1878 3673 1877 3673 1873 3673 1855 3674 1873 3674 1872 3674 1855 3675 1878 3675 1873 3675 1855 3676 2342 3676 1878 3676 1878 3677 2342 3677 1856 3677 1857 3678 1856 3678 1657 3678 1858 3679 1657 3679 1654 3679 1879 3680 1654 3680 1644 3680 1853 3681 1644 3681 1880 3681 1881 3682 1880 3682 1859 3682 1894 3683 1859 3683 1860 3683 1861 3684 1894 3684 1860 3684 1861 3685 1864 3685 1894 3685 1861 3686 1635 3686 1864 3686 1864 3687 1635 3687 1862 3687 1863 3688 1864 3688 1862 3688 1850 3689 1950 3689 1851 3689 1851 3690 1950 3690 1865 3690 1865 3691 1950 3691 1990 3691 1863 3692 1990 3692 1874 3692 1852 3693 1874 3693 1866 3693 1882 3694 1866 3694 1949 3694 1867 3695 1882 3695 1949 3695 1867 3696 1868 3696 1882 3696 1867 3697 1869 3697 1868 3697 1868 3698 1869 3698 1875 3698 1875 3699 1869 3699 1870 3699 1854 3700 1870 3700 1921 3700 1876 3701 1921 3701 1871 3701 1877 3702 1871 3702 1872 3702 1873 3703 1877 3703 1872 3703 1865 3704 1990 3704 1863 3704 1863 3705 1874 3705 1852 3705 1852 3706 1866 3706 1882 3706 1875 3707 1870 3707 1854 3707 1854 3708 1921 3708 1876 3708 1876 3709 1871 3709 1877 3709 1878 3710 1856 3710 1857 3710 1877 3711 1878 3711 1857 3711 1857 3712 1657 3712 1858 3712 1876 3713 1857 3713 1858 3713 1858 3714 1654 3714 1879 3714 1854 3715 1858 3715 1879 3715 1879 3716 1644 3716 1853 3716 1875 3717 1879 3717 1853 3717 1853 3718 1880 3718 1881 3718 1868 3719 1853 3719 1881 3719 1881 3720 1859 3720 1894 3720 1882 3721 1881 3721 1894 3721 1635 3722 1883 3722 1862 3722 1862 3723 1883 3723 1884 3723 1865 3724 1862 3724 1884 3724 1883 3725 1885 3725 1884 3725 1884 3726 1885 3726 1886 3726 1851 3727 1884 3727 1886 3727 1885 3728 1887 3728 1886 3728 1886 3729 1887 3729 1888 3729 1849 3730 1888 3730 1457 3730 1468 3731 1849 3731 1457 3731 1468 3732 1841 3732 1849 3732 1468 3733 1467 3733 1841 3733 1886 3734 1888 3734 1849 3734 1836 3735 1834 3735 1889 3735 1830 3736 1890 3736 1829 3736 1829 3737 1890 3737 1530 3737 1831 3738 1829 3738 1530 3738 1891 3739 1892 3739 1815 3739 1892 3740 1819 3740 1820 3740 1893 3741 1813 3741 1811 3741 1810 3742 1737 3742 1809 3742 2032 3743 1627 3743 1808 3743 1570 3744 1606 3744 1806 3744 1796 3745 1793 3745 1801 3745 1793 3746 1517 3746 2053 3746 1894 3747 1864 3747 1852 3747 1855 3748 1895 3748 2111 3748 1855 3749 2116 3749 1895 3749 1855 3750 1896 3750 2116 3750 1855 3751 2091 3751 1896 3751 1855 3752 1897 3752 2091 3752 1855 3753 1898 3753 1897 3753 1855 3754 1872 3754 1898 3754 1898 3755 1872 3755 2118 3755 1912 3756 2118 3756 1899 3756 2117 3757 1899 3757 1914 3757 2125 3758 1914 3758 1900 3758 1901 3759 1900 3759 1902 3759 1908 3760 1902 3760 1903 3760 2136 3761 1903 3761 1904 3761 2137 3762 1904 3762 2142 3762 1906 3763 2142 3763 2150 3763 2149 3764 2150 3764 1905 3764 2281 3765 1905 3765 2265 3765 2281 3766 2149 3766 1905 3766 2281 3767 2264 3767 2149 3767 2149 3768 2264 3768 2143 3768 1906 3769 2143 3769 2138 3769 2137 3770 2138 3770 1907 3770 2136 3771 1907 3771 1909 3771 1908 3772 1909 3772 2131 3772 1901 3773 2131 3773 2127 3773 2125 3774 2127 3774 1910 3774 2117 3775 1910 3775 1911 3775 1912 3776 1911 3776 1897 3776 1898 3777 1912 3777 1897 3777 1898 3778 2118 3778 1912 3778 1872 3779 1871 3779 2118 3779 2118 3780 1871 3780 2121 3780 1899 3781 2121 3781 1913 3781 1914 3782 1913 3782 1923 3782 1900 3783 1923 3783 1915 3783 1902 3784 1915 3784 1916 3784 1903 3785 1916 3785 1917 3785 1904 3786 1917 3786 1918 3786 2142 3787 1918 3787 1925 3787 2150 3788 1925 3788 2154 3788 1905 3789 2154 3789 1927 3789 1920 3790 1927 3790 1919 3790 1920 3791 1905 3791 1927 3791 1920 3792 2265 3792 1905 3792 1871 3793 1921 3793 2121 3793 2121 3794 1921 3794 1922 3794 1913 3795 1922 3795 1929 3795 1923 3796 1929 3796 2130 3796 1915 3797 2130 3797 2135 3797 1916 3798 2135 3798 1924 3798 1917 3799 1924 3799 2141 3799 1918 3800 2141 3800 2148 3800 1925 3801 2148 3801 1932 3801 2154 3802 1932 3802 1926 3802 1927 3803 1926 3803 2167 3803 1928 3804 2167 3804 2267 3804 1928 3805 1927 3805 2167 3805 1928 3806 1919 3806 1927 3806 1921 3807 1870 3807 1922 3807 1922 3808 1870 3808 1935 3808 1929 3809 1935 3809 1930 3809 2130 3810 1930 3810 2133 3810 2135 3811 2133 3811 1931 3811 1924 3812 1931 3812 1938 3812 2141 3813 1938 3813 2147 3813 2148 3814 2147 3814 2157 3814 1932 3815 2157 3815 1933 3815 1926 3816 1933 3816 2166 3816 2167 3817 2166 3817 1942 3817 1934 3818 1942 3818 2269 3818 1934 3819 2167 3819 1942 3819 1934 3820 2267 3820 2167 3820 1870 3821 1869 3821 1935 3821 1935 3822 1869 3822 1936 3822 1930 3823 1936 3823 2132 3823 2133 3824 2132 3824 1937 3824 1931 3825 1937 3825 2140 3825 1938 3826 2140 3826 2146 3826 2147 3827 2146 3827 1946 3827 2157 3828 1946 3828 1939 3828 1933 3829 1939 3829 1947 3829 2166 3830 1947 3830 1940 3830 1942 3831 1940 3831 1941 3831 2270 3832 1941 3832 2271 3832 2270 3833 1942 3833 1941 3833 2270 3834 2269 3834 1942 3834 1869 3835 1867 3835 1936 3835 1936 3836 1867 3836 1943 3836 2132 3837 1943 3837 1944 3837 1937 3838 1944 3838 2145 3838 2140 3839 2145 3839 1977 3839 2146 3840 1977 3840 1945 3840 1946 3841 1945 3841 2156 3841 1939 3842 2156 3842 2165 3842 1947 3843 2165 3843 1948 3843 1940 3844 1948 3844 2180 3844 1941 3845 2180 3845 2179 3845 2272 3846 2179 3846 1981 3846 2272 3847 1941 3847 2179 3847 2272 3848 2271 3848 1941 3848 1867 3849 1949 3849 1943 3849 1943 3850 1949 3850 1866 3850 2139 3851 1866 3851 1874 3851 2144 3852 1874 3852 1990 3852 2151 3853 1990 3853 1950 3853 2158 3854 1950 3854 1850 3854 1848 3855 2158 3855 1850 3855 1848 3856 2005 3856 2158 3856 1848 3857 1951 3857 2005 3857 2005 3858 1951 3858 1846 3858 1952 3859 1846 3859 1844 3859 1840 3860 1952 3860 1844 3860 1840 3861 1953 3861 1952 3861 1840 3862 1954 3862 1953 3862 1953 3863 1954 3863 1838 3863 2014 3864 1838 3864 1837 3864 1836 3865 2014 3865 1837 3865 1836 3866 1955 3866 2014 3866 1836 3867 1833 3867 1955 3867 1955 3868 1833 3868 1830 3868 2019 3869 1830 3869 1956 3869 1957 3870 2019 3870 1956 3870 1957 3871 1958 3871 2019 3871 1957 3872 1828 3872 1958 3872 1958 3873 1828 3873 1827 3873 2023 3874 1827 3874 1825 3874 1822 3875 2023 3875 1825 3875 1822 3876 2072 3876 2023 3876 1822 3877 1959 3877 2072 3877 2072 3878 1959 3878 1960 3878 1816 3879 2072 3879 1960 3879 1816 3880 2073 3880 2072 3880 1816 3881 1820 3881 2073 3881 2073 3882 1820 3882 2029 3882 1974 3883 2029 3883 2030 3883 1961 3884 2030 3884 1893 3884 2031 3885 1893 3885 1962 3885 1810 3886 2031 3886 1962 3886 1810 3887 1961 3887 2031 3887 1810 3888 2221 3888 1961 3888 1810 3889 1964 3889 2221 3889 1810 3890 1963 3890 1964 3890 1964 3891 1963 3891 2032 3891 2221 3892 2032 3892 2231 3892 2226 3893 2231 3893 1965 3893 2225 3894 1965 3894 1966 3894 2235 3895 1966 3895 2232 3895 2238 3896 2232 3896 2041 3896 2058 3897 2041 3897 2239 3897 2059 3898 2239 3898 2504 3898 1968 3899 2504 3899 2063 3899 1967 3900 2063 3900 2325 3900 1968 3901 2325 3901 2064 3901 2061 3902 2064 3902 2062 3902 2060 3903 2062 3903 1969 3903 2234 3904 1969 3904 1970 3904 2228 3905 1970 3905 2075 3905 1971 3906 2075 3906 1973 3906 1972 3907 1973 3907 2220 3907 1961 3908 2220 3908 1974 3908 2030 3909 1961 3909 1974 3909 1943 3910 1866 3910 2139 3910 1944 3911 2139 3911 1975 3911 2145 3912 1975 3912 1976 3912 1977 3913 1976 3913 2153 3913 1945 3914 2153 3914 1978 3914 2156 3915 1978 3915 2168 3915 2165 3916 2168 3916 1979 3916 1948 3917 1979 3917 1980 3917 2180 3918 1980 3918 1985 3918 2179 3919 1985 3919 1989 3919 2273 3920 1989 3920 1988 3920 2273 3921 2179 3921 1989 3921 2273 3922 1981 3922 2179 3922 2139 3923 1874 3923 2144 3923 1975 3924 2144 3924 1982 3924 1976 3925 1982 3925 2152 3925 2153 3926 2152 3926 2164 3926 1978 3927 2164 3927 1991 3927 2168 3928 1991 3928 1983 3928 1979 3929 1983 3929 1984 3929 1980 3930 1984 3930 2178 3930 1985 3931 2178 3931 2186 3931 1989 3932 2186 3932 1994 3932 1986 3933 1994 3933 1987 3933 1986 3934 1989 3934 1994 3934 1986 3935 1988 3935 1989 3935 2144 3936 1990 3936 2151 3936 1982 3937 2151 3937 1995 3937 2152 3938 1995 3938 2155 3938 2164 3939 2155 3939 1992 3939 1991 3940 1992 3940 1993 3940 1983 3941 1993 3941 2175 3941 1984 3942 2175 3942 2185 3942 2178 3943 2185 3943 1999 3943 2186 3944 1999 3944 2192 3944 1994 3945 2192 3945 2202 3945 2291 3946 2202 3946 2004 3946 2291 3947 1994 3947 2202 3947 2291 3948 1987 3948 1994 3948 2151 3949 1950 3949 2158 3949 1995 3950 2158 3950 2159 3950 2155 3951 2159 3951 2163 3951 1992 3952 2163 3952 1996 3952 1993 3953 1996 3953 1997 3953 2175 3954 1997 3954 1998 3954 2185 3955 1998 3955 2162 3955 1999 3956 2162 3956 2000 3956 2192 3957 2000 3957 2001 3957 2202 3958 2001 3958 2087 3958 2003 3959 2087 3959 2002 3959 2003 3960 2202 3960 2087 3960 2003 3961 2004 3961 2202 3961 2005 3962 1846 3962 1952 3962 2160 3963 1952 3963 2169 3963 2161 3964 2169 3964 2170 3964 2174 3965 2170 3965 2176 3965 2177 3966 2176 3966 2189 3966 2191 3967 2189 3967 2190 3967 2006 3968 2190 3968 2007 3968 2086 3969 2007 3969 2009 3969 2008 3970 2009 3970 2083 3970 2011 3971 2083 3971 2010 3971 2012 3972 2011 3972 2010 3972 2012 3973 2301 3973 2011 3973 2011 3974 2301 3974 2008 3974 2083 3975 2011 3975 2008 3975 1953 3976 1838 3976 2014 3976 2013 3977 2014 3977 2183 3977 2171 3978 2183 3978 2181 3978 2184 3979 2181 3979 2200 3979 2172 3980 2200 3980 2201 3980 2207 3981 2201 3981 2015 3981 2206 3982 2015 3982 2182 3982 2173 3983 2182 3983 2084 3983 2085 3984 2084 3984 2016 3984 2309 3985 2016 3985 2017 3985 2018 3986 2017 3986 2080 3986 2018 3987 2309 3987 2017 3987 1955 3988 1830 3988 2019 3988 2187 3989 2019 3989 2193 3989 2188 3990 2193 3990 2199 3990 2204 3991 2199 3991 2195 3991 2205 3992 2195 3992 2214 3992 2209 3993 2214 3993 2215 3993 2020 3994 2215 3994 2218 3994 2219 3995 2218 3995 2224 3995 2021 3996 2224 3996 2081 3996 2022 3997 2081 3997 2198 3997 2314 3998 2198 3998 2315 3998 2314 3999 2022 3999 2198 3999 1958 4000 1827 4000 2023 4000 2194 4001 2023 4001 2024 4001 2203 4002 2024 4002 2211 4002 2208 4003 2211 4003 2213 4003 2212 4004 2213 4004 2217 4004 2216 4005 2217 4005 2223 4005 2196 4006 2223 4006 2025 4006 2026 4007 2025 4007 2229 4007 2197 4008 2229 4008 2078 4008 2028 4009 2078 4009 2027 4009 2320 4010 2027 4010 2076 4010 2320 4011 2028 4011 2027 4011 2029 4012 1814 4012 2030 4012 2030 4013 1814 4013 1893 4013 1961 4014 1893 4014 2031 4014 2231 4015 2032 4015 2034 4015 2035 4016 2034 4016 2033 4016 2035 4017 2231 4017 2034 4017 2035 4018 2233 4018 2231 4018 2035 4019 2230 4019 2233 4019 2035 4020 1804 4020 2230 4020 2230 4021 1804 4021 1803 4021 2233 4022 1803 4022 2036 4022 2237 4023 2036 4023 2037 4023 2038 4024 2037 4024 2039 4024 2043 4025 2039 4025 2040 4025 2042 4026 2040 4026 2504 4026 2239 4027 2042 4027 2504 4027 2239 4028 2041 4028 2042 4028 2042 4029 2041 4029 2043 4029 2040 4030 2042 4030 2043 4030 2032 4031 2033 4031 2034 4031 2036 4032 1803 4032 2044 4032 2045 4033 2044 4033 1802 4033 2045 4034 2036 4034 2044 4034 2045 4035 2048 4035 2036 4035 2045 4036 2047 4036 2048 4036 2048 4037 2047 4037 2046 4037 1794 4038 2046 4038 2047 4038 1794 4039 2048 4039 2046 4039 1794 4040 1795 4040 2048 4040 2048 4041 1795 4041 2051 4041 2052 4042 2051 4042 2049 4042 2050 4043 2049 4043 2055 4043 2504 4044 2055 4044 2505 4044 2504 4045 2050 4045 2055 4045 2504 4046 2040 4046 2050 4046 2050 4047 2040 4047 2039 4047 2052 4048 2039 4048 2037 4048 2048 4049 2037 4049 2036 4049 2048 4050 2052 4050 2037 4050 2048 4051 2051 4051 2052 4051 1803 4052 1802 4052 2044 4052 2051 4053 1795 4053 2054 4053 2053 4054 2054 4054 1795 4054 2053 4055 2051 4055 2054 4055 2053 4056 1800 4056 2051 4056 2051 4057 1800 4057 2056 4057 2049 4058 2056 4058 2055 4058 2049 4059 2051 4059 2056 4059 2056 4060 1800 4060 2057 4060 2055 4061 2057 4061 1800 4061 2055 4062 2056 4062 2057 4062 2059 4063 2504 4063 1968 4063 2061 4064 1968 4064 2064 4064 2061 4065 2059 4065 1968 4065 2061 4066 2058 4066 2059 4066 2061 4067 2060 4067 2058 4067 2061 4068 2062 4068 2060 4068 1968 4069 2063 4069 1967 4069 2325 4070 1968 4070 1967 4070 2325 4071 2323 4071 2064 4071 2064 4072 2323 4072 2065 4072 2066 4073 2065 4073 2323 4073 2066 4074 2064 4074 2065 4074 2066 4075 2067 4075 2064 4075 2066 4076 2322 4076 2067 4076 2067 4077 2322 4077 2068 4077 2069 4078 2068 4078 2322 4078 2069 4079 2067 4079 2068 4079 2069 4080 2076 4080 2067 4080 2067 4081 2076 4081 2078 4081 2236 4082 2078 4082 2229 4082 2070 4083 2229 4083 2025 4083 2227 4084 2025 4084 2223 4084 2222 4085 2223 4085 2217 4085 2071 4086 2217 4086 2213 4086 2210 4087 2213 4087 2211 4087 2074 4088 2211 4088 2024 4088 2072 4089 2024 4089 2023 4089 2072 4090 2074 4090 2024 4090 2072 4091 2073 4091 2074 4091 2074 4092 2073 4092 1974 4092 2210 4093 1974 4093 2220 4093 2071 4094 2220 4094 1973 4094 2222 4095 1973 4095 2075 4095 2227 4096 2075 4096 1970 4096 2070 4097 1970 4097 1969 4097 2236 4098 1969 4098 2062 4098 2067 4099 2062 4099 2064 4099 2067 4100 2236 4100 2062 4100 2067 4101 2078 4101 2236 4101 2078 4102 2076 4102 2027 4102 2317 4103 2077 4103 2028 4103 2317 4104 2315 4104 2077 4104 2077 4105 2315 4105 2197 4105 2028 4106 2197 4106 2078 4106 2028 4107 2077 4107 2197 4107 2313 4108 2079 4108 2022 4108 2313 4109 2080 4109 2079 4109 2079 4110 2080 4110 2021 4110 2022 4111 2021 4111 2081 4111 2022 4112 2079 4112 2021 4112 2307 4113 2082 4113 2309 4113 2307 4114 2010 4114 2082 4114 2082 4115 2010 4115 2083 4115 2085 4116 2083 4116 2173 4116 2084 4117 2085 4117 2173 4117 2008 4118 2301 4118 2087 4118 2001 4119 2008 4119 2087 4119 2001 4120 2086 4120 2008 4120 2001 4121 2000 4121 2086 4121 2086 4122 2000 4122 2006 4122 2007 4123 2086 4123 2006 4123 2301 4124 2300 4124 2087 4124 2087 4125 2300 4125 2002 4125 2264 4126 2263 4126 2143 4126 2143 4127 2263 4127 2093 4127 2138 4128 2093 4128 2088 4128 1907 4129 2088 4129 2134 4129 1909 4130 2134 4130 2096 4130 2131 4131 2096 4131 2126 4131 2127 4132 2126 4132 2089 4132 1910 4133 2089 4133 2090 4133 1911 4134 2090 4134 2091 4134 1897 4135 1911 4135 2091 4135 2263 4136 2092 4136 2093 4136 2093 4137 2092 4137 2094 4137 2088 4138 2094 4138 2102 4138 2134 4139 2102 4139 2095 4139 2096 4140 2095 4140 2128 4140 2126 4141 2128 4141 2122 4141 2089 4142 2122 4142 2112 4142 2090 4143 2112 4143 1896 4143 2091 4144 2090 4144 1896 4144 2092 4145 2097 4145 2094 4145 2094 4146 2097 4146 2259 4146 2103 4147 2259 4147 2258 4147 2129 4148 2258 4148 2251 4148 2105 4149 2251 4149 2241 4149 2098 4150 2241 4150 2108 4150 2120 4151 2108 4151 2248 4151 2099 4152 2248 4152 2111 4152 2110 4153 2111 4153 2109 4153 2107 4154 2109 4154 2115 4154 2100 4155 2115 4155 2106 4155 2104 4156 2106 4156 2123 4156 2101 4157 2123 4157 2095 4157 2102 4158 2101 4158 2095 4158 2102 4159 2103 4159 2101 4159 2102 4160 2094 4160 2103 4160 2103 4161 2094 4161 2259 4161 2103 4162 2258 4162 2129 4162 2101 4163 2129 4163 2104 4163 2123 4164 2101 4164 2104 4164 2129 4165 2251 4165 2105 4165 2104 4166 2105 4166 2100 4166 2106 4167 2104 4167 2100 4167 2105 4168 2241 4168 2098 4168 2100 4169 2098 4169 2107 4169 2115 4170 2100 4170 2107 4170 2098 4171 2108 4171 2120 4171 2107 4172 2120 4172 2110 4172 2109 4173 2107 4173 2110 4173 2120 4174 2248 4174 2099 4174 2110 4175 2099 4175 2111 4175 2110 4176 2120 4176 2099 4176 1896 4177 2112 4177 2116 4177 2116 4178 2112 4178 2119 4178 1895 4179 2119 4179 2113 4179 2114 4180 2113 4180 2115 4180 2109 4181 2114 4181 2115 4181 2109 4182 2111 4182 2114 4182 2114 4183 2111 4183 1895 4183 2113 4184 2114 4184 1895 4184 1895 4185 2116 4185 2119 4185 2117 4186 1911 4186 1912 4186 1899 4187 2117 4187 1912 4187 2121 4188 1899 4188 2118 4188 1910 4189 2090 4189 1911 4189 2089 4190 2112 4190 2090 4190 2112 4191 2122 4191 2119 4191 2119 4192 2122 4192 2124 4192 2113 4193 2124 4193 2106 4193 2115 4194 2113 4194 2106 4194 2113 4195 2119 4195 2124 4195 2098 4196 2120 4196 2107 4196 1922 4197 1913 4197 2121 4197 2125 4198 1910 4198 2117 4198 1914 4199 2125 4199 2117 4199 1913 4200 1914 4200 1899 4200 2127 4201 2089 4201 1910 4201 2126 4202 2122 4202 2089 4202 2123 4203 2106 4203 2124 4203 2128 4204 2124 4204 2122 4204 2128 4205 2123 4205 2124 4205 2128 4206 2095 4206 2123 4206 2105 4207 2098 4207 2100 4207 1935 4208 1929 4208 1922 4208 1929 4209 1923 4209 1913 4209 1901 4210 2127 4210 2125 4210 1900 4211 1901 4211 2125 4211 1923 4212 1900 4212 1914 4212 2131 4213 2126 4213 2127 4213 2096 4214 2128 4214 2126 4214 2129 4215 2105 4215 2104 4215 1936 4216 1930 4216 1935 4216 1930 4217 2130 4217 1929 4217 2130 4218 1915 4218 1923 4218 1908 4219 2131 4219 1901 4219 1902 4220 1908 4220 1901 4220 1915 4221 1902 4221 1900 4221 1909 4222 2096 4222 2131 4222 2134 4223 2095 4223 2096 4223 2103 4224 2129 4224 2101 4224 1943 4225 2132 4225 1936 4225 2132 4226 2133 4226 1930 4226 2133 4227 2135 4227 2130 4227 2135 4228 1916 4228 1915 4228 2136 4229 1909 4229 1908 4229 1903 4230 2136 4230 1908 4230 1916 4231 1903 4231 1902 4231 1907 4232 2134 4232 1909 4232 2088 4233 2102 4233 2134 4233 2139 4234 1944 4234 1943 4234 1944 4235 1937 4235 2132 4235 1937 4236 1931 4236 2133 4236 1931 4237 1924 4237 2135 4237 1924 4238 1917 4238 1916 4238 2137 4239 1907 4239 2136 4239 1904 4240 2137 4240 2136 4240 1917 4241 1904 4241 1903 4241 2138 4242 2088 4242 1907 4242 2093 4243 2094 4243 2088 4243 2144 4244 1975 4244 2139 4244 1975 4245 2145 4245 1944 4245 2145 4246 2140 4246 1937 4246 2140 4247 1938 4247 1931 4247 1938 4248 2141 4248 1924 4248 2141 4249 1918 4249 1917 4249 1906 4250 2138 4250 2137 4250 2142 4251 1906 4251 2137 4251 1918 4252 2142 4252 1904 4252 2143 4253 2093 4253 2138 4253 2151 4254 1982 4254 2144 4254 1982 4255 1976 4255 1975 4255 1976 4256 1977 4256 2145 4256 1977 4257 2146 4257 2140 4257 2146 4258 2147 4258 1938 4258 2147 4259 2148 4259 2141 4259 2148 4260 1925 4260 1918 4260 2149 4261 2143 4261 1906 4261 2150 4262 2149 4262 1906 4262 1925 4263 2150 4263 2142 4263 2158 4264 1995 4264 2151 4264 1995 4265 2152 4265 1982 4265 2152 4266 2153 4266 1976 4266 2153 4267 1945 4267 1977 4267 1945 4268 1946 4268 2146 4268 1946 4269 2157 4269 2147 4269 2157 4270 1932 4270 2148 4270 1932 4271 2154 4271 1925 4271 2154 4272 1905 4272 2150 4272 2159 4273 2155 4273 1995 4273 2155 4274 2164 4274 2152 4274 2164 4275 1978 4275 2153 4275 1978 4276 2156 4276 1945 4276 2156 4277 1939 4277 1946 4277 1939 4278 1933 4278 2157 4278 1933 4279 1926 4279 1932 4279 1926 4280 1927 4280 2154 4280 2158 4281 2005 4281 2159 4281 2159 4282 2005 4282 2160 4282 2163 4283 2160 4283 2161 4283 1996 4284 2161 4284 2174 4284 1997 4285 2174 4285 2177 4285 1998 4286 2177 4286 2191 4286 2162 4287 2191 4287 2006 4287 2000 4288 2162 4288 2006 4288 1952 4289 2160 4289 2005 4289 2160 4290 2163 4290 2159 4290 2163 4291 1992 4291 2155 4291 1992 4292 1991 4292 2164 4292 1991 4293 2168 4293 1978 4293 2168 4294 2165 4294 2156 4294 2165 4295 1947 4295 1939 4295 1947 4296 2166 4296 1933 4296 2166 4297 2167 4297 1926 4297 2169 4298 2161 4298 2160 4298 2161 4299 1996 4299 2163 4299 1996 4300 1993 4300 1992 4300 1993 4301 1983 4301 1991 4301 1983 4302 1979 4302 2168 4302 1979 4303 1948 4303 2165 4303 1948 4304 1940 4304 1947 4304 1940 4305 1942 4305 2166 4305 1952 4306 1953 4306 2169 4306 2169 4307 1953 4307 2013 4307 2170 4308 2013 4308 2171 4308 2176 4309 2171 4309 2184 4309 2189 4310 2184 4310 2172 4310 2190 4311 2172 4311 2207 4311 2007 4312 2207 4312 2206 4312 2009 4313 2206 4313 2173 4313 2083 4314 2009 4314 2173 4314 2014 4315 2013 4315 1953 4315 2013 4316 2170 4316 2169 4316 2170 4317 2174 4317 2161 4317 2174 4318 1997 4318 1996 4318 1997 4319 2175 4319 1993 4319 2175 4320 1984 4320 1983 4320 1984 4321 1980 4321 1979 4321 1980 4322 2180 4322 1948 4322 2180 4323 1941 4323 1940 4323 2183 4324 2171 4324 2013 4324 2171 4325 2176 4325 2170 4325 2176 4326 2177 4326 2174 4326 2177 4327 1998 4327 1997 4327 1998 4328 2185 4328 2175 4328 2185 4329 2178 4329 1984 4329 2178 4330 1985 4330 1980 4330 1985 4331 2179 4331 2180 4331 2014 4332 1955 4332 2183 4332 2183 4333 1955 4333 2187 4333 2181 4334 2187 4334 2188 4334 2200 4335 2188 4335 2204 4335 2201 4336 2204 4336 2205 4336 2015 4337 2205 4337 2209 4337 2182 4338 2209 4338 2020 4338 2084 4339 2020 4339 2219 4339 2016 4340 2219 4340 2021 4340 2080 4341 2016 4341 2021 4341 2080 4342 2017 4342 2016 4342 2019 4343 2187 4343 1955 4343 2187 4344 2181 4344 2183 4344 2181 4345 2184 4345 2171 4345 2184 4346 2189 4346 2176 4346 2189 4347 2191 4347 2177 4347 2191 4348 2162 4348 1998 4348 2162 4349 1999 4349 2185 4349 1999 4350 2186 4350 2178 4350 2186 4351 1989 4351 1985 4351 2193 4352 2188 4352 2187 4352 2188 4353 2200 4353 2181 4353 2200 4354 2172 4354 2184 4354 2172 4355 2190 4355 2189 4355 2190 4356 2006 4356 2191 4356 2192 4357 1999 4357 2000 4357 1994 4358 2186 4358 2192 4358 2019 4359 1958 4359 2193 4359 2193 4360 1958 4360 2194 4360 2199 4361 2194 4361 2203 4361 2195 4362 2203 4362 2208 4362 2214 4363 2208 4363 2212 4363 2215 4364 2212 4364 2216 4364 2218 4365 2216 4365 2196 4365 2224 4366 2196 4366 2026 4366 2081 4367 2026 4367 2197 4367 2315 4368 2081 4368 2197 4368 2315 4369 2198 4369 2081 4369 2023 4370 2194 4370 1958 4370 2194 4371 2199 4371 2193 4371 2199 4372 2204 4372 2188 4372 2204 4373 2201 4373 2200 4373 2201 4374 2207 4374 2172 4374 2207 4375 2007 4375 2190 4375 2202 4376 2192 4376 2001 4376 2203 4377 2194 4377 2024 4377 2195 4378 2199 4378 2203 4378 2205 4379 2204 4379 2195 4379 2015 4380 2201 4380 2205 4380 2206 4381 2207 4381 2015 4381 2009 4382 2007 4382 2206 4382 2008 4383 2086 4383 2009 4383 2208 4384 2203 4384 2211 4384 2214 4385 2195 4385 2208 4385 2209 4386 2205 4386 2214 4386 2182 4387 2015 4387 2209 4387 2173 4388 2206 4388 2182 4388 2210 4389 2211 4389 2074 4389 1974 4390 2210 4390 2074 4390 2029 4391 1974 4391 2073 4391 2212 4392 2208 4392 2213 4392 2215 4393 2214 4393 2212 4393 2020 4394 2209 4394 2215 4394 2084 4395 2182 4395 2020 4395 2082 4396 2083 4396 2085 4396 2309 4397 2085 4397 2016 4397 2309 4398 2082 4398 2085 4398 2071 4399 2213 4399 2210 4399 2220 4400 2071 4400 2210 4400 2216 4401 2212 4401 2217 4401 2218 4402 2215 4402 2216 4402 2219 4403 2020 4403 2218 4403 2016 4404 2084 4404 2219 4404 1972 4405 2220 4405 1961 4405 2221 4406 1972 4406 1961 4406 2221 4407 2226 4407 1972 4407 2221 4408 2231 4408 2226 4408 2222 4409 2217 4409 2071 4409 1973 4410 2222 4410 2071 4410 2196 4411 2216 4411 2223 4411 2224 4412 2218 4412 2196 4412 2021 4413 2219 4413 2224 4413 2032 4414 2221 4414 1964 4414 1971 4415 1973 4415 1972 4415 2226 4416 1971 4416 1972 4416 2226 4417 2225 4417 1971 4417 2226 4418 1965 4418 2225 4418 2227 4419 2223 4419 2222 4419 2075 4420 2227 4420 2222 4420 2026 4421 2196 4421 2025 4421 2081 4422 2224 4422 2026 4422 2228 4423 2075 4423 1971 4423 2225 4424 2228 4424 1971 4424 2225 4425 2235 4425 2228 4425 2225 4426 1966 4426 2235 4426 2070 4427 2025 4427 2227 4427 1970 4428 2070 4428 2227 4428 2197 4429 2026 4429 2229 4429 1803 4430 2233 4430 2230 4430 2231 4431 2233 4431 1965 4431 1965 4432 2233 4432 2237 4432 1966 4433 2237 4433 2038 4433 2232 4434 2038 4434 2043 4434 2041 4435 2232 4435 2043 4435 2036 4436 2237 4436 2233 4436 2237 4437 1966 4437 1965 4437 2234 4438 1970 4438 2228 4438 2235 4439 2234 4439 2228 4439 2235 4440 2238 4440 2234 4440 2235 4441 2232 4441 2238 4441 2236 4442 2229 4442 2070 4442 1969 4443 2236 4443 2070 4443 2038 4444 2237 4444 2037 4444 2232 4445 1966 4445 2038 4445 2060 4446 1969 4446 2234 4446 2238 4447 2060 4447 2234 4447 2238 4448 2058 4448 2060 4448 2238 4449 2041 4449 2058 4449 2043 4450 2038 4450 2039 4450 2050 4451 2039 4451 2052 4451 2049 4452 2050 4452 2052 4452 2059 4453 2058 4453 2239 4453 1454 4454 2419 4454 2250 4454 2240 4455 2250 4455 2249 4455 2337 4456 2249 4456 2248 4456 2108 4457 2337 4457 2248 4457 2108 4458 2246 4458 2337 4458 2108 4459 2241 4459 2246 4459 2246 4460 2241 4460 2242 4460 2245 4461 2242 4461 2243 4461 2244 4462 2243 4462 2252 4462 2244 4463 2245 4463 2243 4463 2244 4464 1458 4464 2245 4464 2245 4465 1458 4465 2247 4465 2246 4466 2247 4466 2337 4466 2246 4467 2245 4467 2247 4467 2246 4468 2242 4468 2245 4468 2248 4469 2249 4469 2111 4469 2111 4470 2249 4470 2250 4470 2419 4471 2111 4471 2250 4471 2241 4472 2251 4472 2242 4472 2242 4473 2251 4473 2254 4473 2243 4474 2254 4474 2253 4474 2252 4475 2253 4475 1491 4475 2252 4476 2243 4476 2253 4476 2251 4477 2258 4477 2254 4477 2254 4478 2258 4478 2255 4478 2253 4479 2255 4479 2256 4479 1491 4480 2256 4480 2257 4480 1491 4481 2253 4481 2256 4481 2258 4482 2259 4482 2255 4482 2255 4483 2259 4483 2260 4483 2256 4484 2260 4484 2262 4484 2261 4485 2262 4485 2275 4485 2261 4486 2256 4486 2262 4486 2261 4487 2257 4487 2256 4487 2259 4488 2097 4488 2260 4488 2260 4489 2097 4489 2092 4489 2276 4490 2092 4490 2263 4490 2277 4491 2263 4491 2264 4491 2278 4492 2264 4492 2281 4492 2266 4493 2281 4493 2265 4493 1920 4494 2266 4494 2265 4494 1920 4495 2288 4495 2266 4495 1920 4496 1919 4496 2288 4496 2288 4497 1919 4497 1928 4497 2268 4498 1928 4498 2267 4498 1934 4499 2268 4499 2267 4499 1934 4500 2289 4500 2268 4500 1934 4501 2269 4501 2289 4501 2289 4502 2269 4502 2270 4502 2333 4503 2270 4503 2271 4503 2272 4504 2333 4504 2271 4504 2272 4505 2339 4505 2333 4505 2272 4506 2340 4506 2339 4506 2272 4507 1981 4507 2340 4507 2340 4508 1981 4508 2273 4508 1500 4509 2273 4509 2274 4509 1501 4510 2274 4510 1503 4510 1501 4511 1500 4511 2274 4511 2260 4512 2092 4512 2276 4512 2262 4513 2276 4513 2338 4513 2275 4514 2338 4514 1531 4514 2275 4515 2262 4515 2338 4515 2276 4516 2263 4516 2277 4516 2338 4517 2277 4517 2280 4517 1531 4518 2280 4518 1492 4518 1531 4519 2338 4519 2280 4519 2277 4520 2264 4520 2278 4520 2280 4521 2278 4521 2279 4521 1492 4522 2279 4522 2284 4522 1492 4523 2280 4523 2279 4523 2278 4524 2281 4524 2266 4524 2279 4525 2266 4525 2282 4525 1493 4526 2282 4526 2283 4526 1493 4527 2279 4527 2282 4527 1493 4528 2284 4528 2279 4528 2288 4529 1928 4529 2268 4529 2287 4530 2268 4530 2334 4530 2285 4531 2334 4531 2335 4531 2285 4532 2287 4532 2334 4532 2285 4533 2286 4533 2287 4533 2287 4534 2286 4534 1494 4534 2282 4535 1494 4535 2283 4535 2282 4536 2287 4536 1494 4536 2282 4537 2288 4537 2287 4537 2282 4538 2266 4538 2288 4538 2289 4539 2270 4539 2333 4539 2290 4540 2333 4540 2332 4540 1499 4541 2290 4541 2332 4541 1499 4542 1497 4542 2290 4542 2290 4543 1497 4543 2289 4543 2333 4544 2290 4544 2289 4544 2274 4545 2273 4545 2296 4545 1503 4546 2296 4546 1986 4546 2329 4547 1986 4547 2328 4547 1504 4548 2328 4548 2291 4548 2004 4549 1504 4549 2291 4549 2004 4550 2292 4550 1504 4550 2004 4551 2294 4551 2292 4551 2004 4552 2003 4552 2294 4552 2294 4553 2003 4553 2293 4553 2295 4554 2293 4554 2292 4554 2294 4555 2295 4555 2292 4555 2294 4556 2293 4556 2295 4556 2273 4557 1988 4557 2296 4557 2296 4558 1988 4558 1986 4558 1986 4559 1987 4559 2328 4559 2328 4560 1987 4560 2291 4560 2003 4561 2002 4561 2293 4561 2293 4562 2002 4562 2298 4562 2298 4563 2002 4563 2299 4563 2297 4564 2299 4564 1507 4564 2298 4565 2297 4565 1507 4565 2298 4566 2299 4566 2297 4566 2002 4567 2300 4567 2299 4567 2299 4568 2300 4568 1507 4568 1507 4569 2300 4569 2301 4569 1508 4570 2301 4570 2302 4570 2304 4571 2302 4571 2303 4571 1508 4572 2304 4572 2303 4572 1508 4573 2302 4573 2304 4573 2301 4574 2012 4574 2302 4574 2302 4575 2012 4575 2303 4575 2303 4576 2012 4576 2010 4576 2305 4577 2010 4577 2310 4577 2306 4578 2310 4578 2308 4578 2305 4579 2306 4579 2308 4579 2305 4580 2310 4580 2306 4580 2010 4581 2307 4581 2310 4581 2310 4582 2307 4582 2309 4582 2308 4583 2309 4583 2311 4583 2308 4584 2310 4584 2309 4584 2309 4585 2018 4585 2311 4585 2311 4586 2018 4586 1509 4586 1509 4587 2018 4587 2312 4587 2312 4588 2018 4588 2080 4588 2319 4589 2080 4589 2313 4589 1574 4590 2313 4590 2022 4590 1575 4591 2022 4591 2314 4591 2315 4592 1575 4592 2314 4592 2315 4593 1577 4593 1575 4593 2315 4594 2316 4594 1577 4594 2315 4595 2317 4595 2316 4595 2316 4596 2317 4596 2028 4596 2318 4597 2028 4597 2321 4597 2318 4598 2316 4598 2028 4598 2312 4599 2080 4599 2319 4599 2319 4600 2313 4600 1574 4600 1574 4601 2022 4601 1575 4601 2028 4602 2320 4602 2321 4602 2321 4603 2320 4603 2076 4603 1582 4604 2076 4604 1583 4604 1582 4605 2321 4605 2076 4605 2076 4606 2069 4606 1583 4606 1583 4607 2069 4607 2322 4607 1584 4608 2322 4608 2324 4608 1584 4609 1583 4609 2322 4609 2322 4610 2066 4610 2324 4610 2324 4611 2066 4611 2323 4611 1591 4612 2323 4612 1595 4612 1591 4613 2324 4613 2323 4613 2323 4614 2325 4614 1595 4614 1595 4615 2325 4615 2063 4615 2326 4616 2063 4616 2327 4616 2326 4617 1595 4617 2063 4617 2063 4618 2504 4618 2327 4618 2305 4619 2303 4619 2010 4619 1508 4620 1507 4620 2301 4620 2328 4621 1504 4621 2329 4621 2329 4622 1504 4622 2330 4622 1503 4623 2329 4623 2330 4623 1503 4624 1986 4624 2329 4624 2296 4625 1503 4625 2274 4625 2331 4626 2339 4626 1500 4626 2331 4627 2332 4627 2339 4627 2339 4628 2332 4628 2333 4628 2289 4629 1497 4629 2334 4629 2268 4630 2289 4630 2334 4630 1497 4631 2335 4631 2334 4631 1458 4632 2336 4632 2247 4632 2247 4633 2336 4633 2240 4633 2337 4634 2240 4634 2249 4634 2337 4635 2247 4635 2240 4635 2336 4636 1454 4636 2240 4636 2240 4637 1454 4637 2250 4637 2254 4638 2243 4638 2242 4638 2255 4639 2253 4639 2254 4639 2260 4640 2256 4640 2255 4640 2276 4641 2262 4641 2260 4641 2277 4642 2338 4642 2276 4642 2278 4643 2280 4643 2277 4643 2266 4644 2279 4644 2278 4644 2268 4645 2287 4645 2288 4645 1500 4646 2339 4646 2340 4646 2273 4647 1500 4647 2340 4647 2505 4648 1598 4648 2506 4648 2506 4649 1598 4649 2341 4649 1598 4650 2327 4650 2341 4650 2341 4651 2327 4651 2507 4651 0 4652 2344 4652 2342 4652 2343 4653 2342 4653 48 4653 2343 4654 0 4654 2342 4654 2344 4655 2345 4655 2342 4655 2342 4656 2345 4656 46 4656 45 4657 2342 4657 46 4657 45 4658 89 4658 2342 4658 2342 4659 89 4659 44 4659 2346 4660 2342 4660 44 4660 2346 4661 2347 4661 2342 4661 2342 4662 2347 4662 42 4662 41 4663 2342 4663 42 4663 41 4664 2348 4664 2342 4664 2342 4665 2348 4665 40 4665 39 4666 2342 4666 40 4666 39 4667 38 4667 2342 4667 2342 4668 38 4668 2349 4668 2350 4669 2342 4669 2349 4669 2350 4670 2351 4670 2342 4670 2342 4671 2351 4671 2352 4671 2357 4672 2352 4672 82 4672 2353 4673 2357 4673 82 4673 2353 4674 2354 4674 2357 4674 2357 4675 2354 4675 35 4675 2355 4676 2357 4676 35 4676 2355 4677 34 4677 2357 4677 2357 4678 34 4678 78 4678 33 4679 2357 4679 78 4679 33 4680 2356 4680 2357 4680 2357 4681 2356 4681 77 4681 76 4682 2357 4682 77 4682 76 4683 2358 4683 2357 4683 2357 4684 2358 4684 32 4684 31 4685 2357 4685 32 4685 31 4686 2359 4686 2357 4686 2357 4687 2359 4687 30 4687 2360 4688 2357 4688 30 4688 2360 4689 29 4689 2357 4689 2357 4690 29 4690 5655 4690 5655 4691 29 4691 2361 4691 2362 4692 5655 4692 2361 4692 2362 4693 28 4693 5655 4693 5655 4694 28 4694 27 4694 26 4695 5655 4695 27 4695 26 4696 24 4696 5655 4696 5655 4697 24 4697 23 4697 22 4698 5655 4698 23 4698 22 4699 21 4699 5655 4699 5655 4700 21 4700 2363 4700 19 4701 5655 4701 2363 4701 19 4702 979 4702 5655 4702 19 4703 2364 4703 979 4703 979 4704 2364 4704 17 4704 15 4705 979 4705 17 4705 15 4706 2365 4706 979 4706 979 4707 2365 4707 14 4707 2366 4708 979 4708 14 4708 2366 4709 2367 4709 979 4709 979 4710 2367 4710 66 4710 65 4711 979 4711 66 4711 65 4712 2368 4712 979 4712 979 4713 2368 4713 2369 4713 2370 4714 979 4714 2369 4714 2370 4715 2371 4715 979 4715 979 4716 2371 4716 12 4716 11 4717 979 4717 12 4717 11 4718 2372 4718 979 4718 979 4719 2372 4719 469 4719 469 4720 2372 4720 2373 4720 2374 4721 469 4721 2373 4721 2374 4722 9 4722 469 4722 469 4723 9 4723 8 4723 2375 4724 469 4724 8 4724 2375 4725 2376 4725 469 4725 469 4726 2376 4726 58 4726 2377 4727 469 4727 58 4727 2377 4728 6 4728 469 4728 469 4729 6 4729 5 4729 2405 4730 5 4730 2378 4730 2379 4731 2405 4731 2378 4731 2379 4732 2380 4732 2405 4732 2405 4733 2380 4733 2381 4733 2342 4734 2381 4734 2382 4734 3 4735 2342 4735 2382 4735 3 4736 2383 4736 2342 4736 2342 4737 2383 4737 2384 4737 2385 4738 2342 4738 2384 4738 2385 4739 50 4739 2342 4739 2342 4740 50 4740 48 4740 2342 4741 2352 4741 2357 4741 2393 4742 2357 4742 2386 4742 2393 4743 2342 4743 2357 4743 2393 4744 2419 4744 2342 4744 2393 4745 2439 4745 2419 4745 2393 4746 2387 4746 2439 4746 2393 4747 2389 4747 2387 4747 2393 4748 2388 4748 2389 4748 2393 4749 2390 4749 2388 4749 2393 4750 2391 4750 2390 4750 2393 4751 101 4751 2391 4751 2393 4752 2392 4752 101 4752 2393 4753 103 4753 2392 4753 2393 4754 2395 4754 103 4754 2393 4755 2394 4755 2395 4755 2393 4756 104 4756 2394 4756 2393 4757 2396 4757 104 4757 2393 4758 105 4758 2396 4758 2393 4759 106 4759 105 4759 2393 4760 152 4760 106 4760 2393 4761 107 4761 152 4761 2393 4762 109 4762 107 4762 2393 4763 2397 4763 109 4763 2393 4764 5625 4764 2397 4764 2397 4765 5625 4765 2398 4765 2398 4766 5625 4766 155 4766 155 4767 5625 4767 110 4767 110 4768 5625 4768 2399 4768 2399 4769 5625 4769 2400 4769 2400 4770 5625 4770 2401 4770 2401 4771 5625 4771 2402 4771 2402 4772 5625 4772 112 4772 112 4773 5625 4773 157 4773 157 4774 5625 4774 2403 4774 2403 4775 5625 4775 2404 4775 2404 4776 5625 4776 2434 4776 159 4777 2434 4777 115 4777 159 4778 2404 4778 2434 4778 469 4779 5 4779 2405 4779 2405 4780 2381 4780 2342 4780 1855 4781 2405 4781 2342 4781 1855 4782 5861 4782 2405 4782 1855 4783 302 4783 5861 4783 1855 4784 2406 4784 302 4784 1855 4785 304 4785 2406 4785 1855 4786 305 4786 304 4786 1855 4787 2407 4787 305 4787 1855 4788 307 4788 2407 4788 1855 4789 2409 4789 307 4789 1855 4790 2408 4790 2409 4790 1855 4791 310 4791 2408 4791 1855 4792 2410 4792 310 4792 1855 4793 312 4793 2410 4793 1855 4794 352 4794 312 4794 1855 4795 2411 4795 352 4795 1855 4796 313 4796 2411 4796 1855 4797 314 4797 313 4797 1855 4798 2412 4798 314 4798 1855 4799 2413 4799 2412 4799 1855 4800 2414 4800 2413 4800 1855 4801 355 4801 2414 4801 1855 4802 316 4802 355 4802 1855 4803 357 4803 316 4803 1855 4804 317 4804 357 4804 1855 4805 2415 4805 317 4805 1855 4806 2111 4806 2415 4806 2415 4807 2111 4807 200 4807 358 4808 200 4808 2416 4808 4248 4809 2416 4809 2476 4809 6353 4810 2476 4810 6424 4810 6353 4811 4248 4811 2476 4811 91 4812 92 4812 2419 4812 2417 4813 2419 4813 2444 4813 2417 4814 91 4814 2419 4814 92 4815 136 4815 2419 4815 2419 4816 136 4816 2420 4816 2418 4817 2419 4817 2420 4817 2418 4818 178 4818 2419 4818 2419 4819 178 4819 134 4819 2421 4820 2419 4820 134 4820 2421 4821 177 4821 2419 4821 2419 4822 177 4822 2422 4822 2423 4823 2419 4823 2422 4823 2423 4824 3792 4824 2419 4824 2423 4825 2424 4825 3792 4825 3792 4826 2424 4826 2425 4826 133 4827 3792 4827 2425 4827 133 4828 132 4828 3792 4828 3792 4829 132 4829 3794 4829 3794 4830 132 4830 2426 4830 2427 4831 3794 4831 2426 4831 2427 4832 2428 4832 3794 4832 3794 4833 2428 4833 2429 4833 131 4834 3794 4834 2429 4834 131 4835 2430 4835 3794 4835 3794 4836 2430 4836 130 4836 2431 4837 3794 4837 130 4837 2431 4838 128 4838 3794 4838 3794 4839 128 4839 2432 4839 2433 4840 3794 4840 2432 4840 2433 4841 164 4841 3794 4841 3794 4842 164 4842 2434 4842 2434 4843 164 4843 2436 4843 2435 4844 2434 4844 2436 4844 2435 4845 125 4845 2434 4845 2434 4846 125 4846 124 4846 163 4847 2434 4847 124 4847 163 4848 121 4848 2434 4848 2434 4849 121 4849 2437 4849 2438 4850 2434 4850 2437 4850 2438 4851 162 4851 2434 4851 2434 4852 162 4852 119 4852 161 4853 2434 4853 119 4853 161 4854 117 4854 2434 4854 2434 4855 117 4855 115 4855 2439 4856 2440 4856 2419 4856 2419 4857 2440 4857 147 4857 2441 4858 2419 4858 147 4858 2441 4859 146 4859 2419 4859 2419 4860 146 4860 2442 4860 144 4861 2419 4861 2442 4861 144 4862 99 4862 2419 4862 2419 4863 99 4863 98 4863 2443 4864 2419 4864 98 4864 2443 4865 140 4865 2419 4865 2419 4866 140 4866 139 4866 138 4867 2419 4867 139 4867 138 4868 96 4868 2419 4868 2419 4869 96 4869 2444 4869 180 4870 2446 4870 2445 4870 182 4871 2445 4871 230 4871 182 4872 180 4872 2445 4872 2446 4873 276 4873 2445 4873 2445 4874 276 4874 2447 4874 275 4875 2445 4875 2447 4875 275 4876 2448 4876 2445 4876 2445 4877 2448 4877 2449 4877 2450 4878 2445 4878 2449 4878 2450 4879 272 4879 2445 4879 2445 4880 272 4880 226 4880 2451 4881 2445 4881 226 4881 2451 4882 225 4882 2445 4882 2445 4883 225 4883 224 4883 269 4884 2445 4884 224 4884 269 4885 2452 4885 2445 4885 2445 4886 2452 4886 3858 4886 3858 4887 2452 4887 2453 4887 267 4888 3858 4888 2453 4888 267 4889 2454 4889 3858 4889 3858 4890 2454 4890 221 4890 219 4891 3858 4891 221 4891 219 4892 218 4892 3858 4892 3858 4893 218 4893 2455 4893 2456 4894 3858 4894 2455 4894 2456 4895 2457 4895 3858 4895 3858 4896 2457 4896 263 4896 262 4897 3858 4897 263 4897 262 4898 2458 4898 3858 4898 3858 4899 2458 4899 3857 4899 3857 4900 2458 4900 2459 4900 261 4901 3857 4901 2459 4901 261 4902 214 4902 3857 4902 3857 4903 214 4903 213 4903 212 4904 3857 4904 213 4904 212 4905 2460 4905 3857 4905 3857 4906 2460 4906 258 4906 210 4907 3857 4907 258 4907 210 4908 2461 4908 3857 4908 3857 4909 2461 4909 256 4909 209 4910 3857 4910 256 4910 209 4911 255 4911 3857 4911 3857 4912 255 4912 2462 4912 254 4913 3857 4913 2462 4913 254 4914 252 4914 3857 4914 3857 4915 252 4915 2463 4915 2464 4916 3857 4916 2463 4916 2464 4917 2465 4917 3857 4917 3857 4918 2465 4918 2466 4918 251 4919 3857 4919 2466 4919 251 4920 250 4920 3857 4920 3857 4921 250 4921 205 4921 2467 4922 3857 4922 205 4922 2467 4923 2468 4923 3857 4923 3857 4924 2468 4924 2469 4924 2111 4925 2469 4925 2470 4925 2471 4926 2111 4926 2470 4926 2471 4927 203 4927 2111 4927 2111 4928 203 4928 2472 4928 201 4929 2111 4929 2472 4929 201 4930 200 4930 2111 4930 3857 4931 2469 4931 2111 4931 3792 4932 2111 4932 2419 4932 3792 4933 3857 4933 2111 4933 3792 4934 4042 4934 3857 4934 3857 4935 4042 4935 6265 4935 2415 4936 200 4936 358 4936 2416 4937 2473 4937 2476 4937 2476 4938 2473 4938 2474 4938 199 4939 2476 4939 2474 4939 199 4940 198 4940 2476 4940 2476 4941 198 4941 197 4941 2475 4942 2476 4942 197 4942 2475 4943 195 4943 2476 4943 2476 4944 195 4944 244 4944 243 4945 2476 4945 244 4945 243 4946 242 4946 2476 4946 2476 4947 242 4947 241 4947 2477 4948 2476 4948 241 4948 2477 4949 192 4949 2476 4949 2476 4950 192 4950 2478 4950 190 4951 2476 4951 2478 4951 190 4952 2479 4952 2476 4952 2476 4953 2479 4953 2481 4953 2480 4954 2476 4954 2481 4954 2480 4955 189 4955 2476 4955 2476 4956 189 4956 188 4956 187 4957 2476 4957 188 4957 187 4958 2445 4958 2476 4958 187 4959 234 4959 2445 4959 2445 4960 234 4960 2482 4960 185 4961 2445 4961 2482 4961 185 4962 184 4962 2445 4962 2445 4963 184 4963 230 4963 278 4964 334 4964 808 4964 279 4965 808 4965 281 4965 279 4966 278 4966 808 4966 334 4967 2483 4967 808 4967 808 4968 2483 4968 2484 4968 2485 4969 808 4969 2484 4969 2485 4970 2486 4970 808 4970 808 4971 2486 4971 330 4971 374 4972 808 4972 330 4972 374 4973 2487 4973 808 4973 808 4974 2487 4974 373 4974 4248 4975 373 4975 2488 4975 372 4976 4248 4976 2488 4976 372 4977 370 4977 4248 4977 4248 4978 370 4978 369 4978 2489 4979 4248 4979 369 4979 2489 4980 2490 4980 4248 4980 4248 4981 2490 4981 326 4981 2491 4982 4248 4982 326 4982 2491 4983 325 4983 4248 4983 4248 4984 325 4984 324 4984 366 4985 4248 4985 324 4985 366 4986 365 4986 4248 4986 4248 4987 365 4987 323 4987 2492 4988 4248 4988 323 4988 2492 4989 362 4989 4248 4989 4248 4990 362 4990 361 4990 360 4991 4248 4991 361 4991 360 4992 320 4992 4248 4992 4248 4993 320 4993 359 4993 318 4994 4248 4994 359 4994 318 4995 358 4995 4248 4995 4248 4996 358 4996 2416 4996 808 4997 373 4997 4248 4997 4258 4998 808 4998 4248 4998 302 4999 349 4999 5861 4999 5861 5000 349 5000 301 5000 300 5001 5861 5001 301 5001 300 5002 2498 5002 5861 5002 300 5003 2493 5003 2498 5003 2498 5004 2493 5004 299 5004 298 5005 2498 5005 299 5005 298 5006 297 5006 2498 5006 2498 5007 297 5007 2495 5007 2494 5008 2498 5008 2495 5008 2494 5009 295 5009 2498 5009 2498 5010 295 5010 293 5010 2496 5011 2498 5011 293 5011 2496 5012 2497 5012 2498 5012 2498 5013 2497 5013 345 5013 344 5014 2498 5014 345 5014 344 5015 342 5015 2498 5015 2498 5016 342 5016 2499 5016 341 5017 2498 5017 2499 5017 341 5018 808 5018 2498 5018 341 5019 339 5019 808 5019 808 5020 339 5020 288 5020 2500 5021 808 5021 288 5021 2500 5022 338 5022 808 5022 808 5023 338 5023 2501 5023 286 5024 808 5024 2501 5024 286 5025 337 5025 808 5025 808 5026 337 5026 2502 5026 2503 5027 808 5027 2502 5027 2503 5028 283 5028 808 5028 808 5029 283 5029 282 5029 335 5030 808 5030 282 5030 335 5031 281 5031 808 5031 2357 5032 6166 5032 2386 5032 2445 5033 4455 5033 2476 5033 2504 5034 2505 5034 2508 5034 2508 5035 2505 5035 2506 5035 2327 5036 2504 5036 2507 5036 2507 5037 2504 5037 2508 5037 2508 5038 2506 5038 2507 5038 2507 5039 2506 5039 2341 5039 2510 5040 2509 5040 6182 5040 2510 5041 2511 5041 2509 5041 2510 5042 6181 5042 2511 5042 2511 5043 6181 5043 2514 5043 2513 5044 2514 5044 2512 5044 2513 5045 2511 5045 2514 5045 2513 5046 2515 5046 2511 5046 2511 5047 2515 5047 2509 5047 2509 5048 2515 5048 2516 5048 2690 5049 2516 5049 3960 5049 2691 5050 3960 5050 4026 5050 2517 5051 4026 5051 4025 5051 2685 5052 4025 5052 4024 5052 2686 5053 4024 5053 2518 5053 2683 5054 2518 5054 4023 5054 2519 5055 4023 5055 2520 5055 2680 5056 2520 5056 2681 5056 2678 5057 2681 5057 2521 5057 2677 5058 2521 5058 2675 5058 2676 5059 2675 5059 4021 5059 2672 5060 4021 5060 2673 5060 2671 5061 2673 5061 4020 5061 2669 5062 4020 5062 4019 5062 2522 5063 4019 5063 4018 5063 2667 5064 4018 5064 2523 5064 2663 5065 2523 5065 4017 5065 2662 5066 4017 5066 4016 5066 2524 5067 4016 5067 4015 5067 2660 5068 4015 5068 4014 5068 2657 5069 4014 5069 2525 5069 2526 5070 2525 5070 2527 5070 2655 5071 2527 5071 4012 5071 2654 5072 4012 5072 4013 5072 2652 5073 4013 5073 4011 5073 2650 5074 4011 5074 2651 5074 2649 5075 2651 5075 4010 5075 2528 5076 4010 5076 2648 5076 2529 5077 2648 5077 3980 5077 2645 5078 3980 5078 3978 5078 2643 5079 3978 5079 3977 5079 2641 5080 3977 5080 4008 5080 2640 5081 4008 5081 3976 5081 2638 5082 3976 5082 4007 5082 2530 5083 4007 5083 4006 5083 2636 5084 4006 5084 2531 5084 2634 5085 2531 5085 2532 5085 2632 5086 2532 5086 3974 5086 2631 5087 3974 5087 4003 5087 2629 5088 4003 5088 3972 5088 2628 5089 3972 5089 2626 5089 2627 5090 2626 5090 2533 5090 2625 5091 2533 5091 2624 5091 2623 5092 2624 5092 2534 5092 2621 5093 2534 5093 3971 5093 2535 5094 3971 5094 3970 5094 2536 5095 3970 5095 4001 5095 2537 5096 4001 5096 3969 5096 2538 5097 3969 5097 4000 5097 2616 5098 4000 5098 3999 5098 2614 5099 3999 5099 2612 5099 2613 5100 2612 5100 3998 5100 2610 5101 3998 5101 3967 5101 2609 5102 3967 5102 2539 5102 2607 5103 2539 5103 3997 5103 2605 5104 3997 5104 3966 5104 2604 5105 3966 5105 2540 5105 2602 5106 2540 5106 3964 5106 2601 5107 3964 5107 2541 5107 2600 5108 2541 5108 2542 5108 2599 5109 2542 5109 2543 5109 2598 5110 2543 5110 2594 5110 2595 5111 2594 5111 2592 5111 2591 5112 2592 5112 2544 5112 2589 5113 2544 5113 3957 5113 2588 5114 3957 5114 2587 5114 2586 5115 2587 5115 2545 5115 2584 5116 2545 5116 2546 5116 2582 5117 2546 5117 2581 5117 2580 5118 2581 5118 3958 5118 2577 5119 3958 5119 2576 5119 2575 5120 2576 5120 2574 5120 2572 5121 2574 5121 2547 5121 2571 5122 2547 5122 2569 5122 2566 5123 2569 5123 2567 5123 2565 5124 2567 5124 2548 5124 2549 5125 2548 5125 3959 5125 2550 5126 3959 5126 2551 5126 2563 5127 2551 5127 2552 5127 2561 5128 2552 5128 2553 5128 2554 5129 2553 5129 2555 5129 2559 5130 2555 5130 2557 5130 2556 5131 2557 5131 2512 5131 2514 5132 2556 5132 2512 5132 2514 5133 2558 5133 2556 5133 2514 5134 6181 5134 2558 5134 2558 5135 6322 5135 2556 5135 2556 5136 6322 5136 2559 5136 2557 5137 2556 5137 2559 5137 6322 5138 2560 5138 2559 5138 2559 5139 2560 5139 2554 5139 2555 5140 2559 5140 2554 5140 2560 5141 6321 5141 2554 5141 2554 5142 6321 5142 2561 5142 2553 5143 2554 5143 2561 5143 6321 5144 6320 5144 2561 5144 2561 5145 6320 5145 2563 5145 2552 5146 2561 5146 2563 5146 6320 5147 2562 5147 2563 5147 2563 5148 2562 5148 2550 5148 2551 5149 2563 5149 2550 5149 2562 5150 2564 5150 2550 5150 2550 5151 2564 5151 2549 5151 3959 5152 2550 5152 2549 5152 2564 5153 6319 5153 2549 5153 2549 5154 6319 5154 2565 5154 2548 5155 2549 5155 2565 5155 6319 5156 6318 5156 2565 5156 2565 5157 6318 5157 2566 5157 2567 5158 2565 5158 2566 5158 6318 5159 2568 5159 2566 5159 2566 5160 2568 5160 2571 5160 2569 5161 2566 5161 2571 5161 2568 5162 2570 5162 2571 5162 2571 5163 2570 5163 2572 5163 2547 5164 2571 5164 2572 5164 2570 5165 2573 5165 2572 5165 2572 5166 2573 5166 2575 5166 2574 5167 2572 5167 2575 5167 2573 5168 2578 5168 2575 5168 2575 5169 2578 5169 2577 5169 2576 5170 2575 5170 2577 5170 2578 5171 2579 5171 2577 5171 2577 5172 2579 5172 2580 5172 3958 5173 2577 5173 2580 5173 2579 5174 6317 5174 2580 5174 2580 5175 6317 5175 2582 5175 2581 5176 2580 5176 2582 5176 6317 5177 2583 5177 2582 5177 2582 5178 2583 5178 2584 5178 2546 5179 2582 5179 2584 5179 2583 5180 2585 5180 2584 5180 2584 5181 2585 5181 2586 5181 2545 5182 2584 5182 2586 5182 2585 5183 6316 5183 2586 5183 2586 5184 6316 5184 2588 5184 2587 5185 2586 5185 2588 5185 6316 5186 6315 5186 2588 5186 2588 5187 6315 5187 2589 5187 3957 5188 2588 5188 2589 5188 6315 5189 2590 5189 2589 5189 2589 5190 2590 5190 2591 5190 2544 5191 2589 5191 2591 5191 2590 5192 2593 5192 2591 5192 2591 5193 2593 5193 2595 5193 2592 5194 2591 5194 2595 5194 2593 5195 2596 5195 2595 5195 2595 5196 2596 5196 2598 5196 2594 5197 2595 5197 2598 5197 2596 5198 2597 5198 2598 5198 2598 5199 2597 5199 2599 5199 2543 5200 2598 5200 2599 5200 2597 5201 6314 5201 2599 5201 2599 5202 6314 5202 2600 5202 2542 5203 2599 5203 2600 5203 6314 5204 6313 5204 2600 5204 2600 5205 6313 5205 2601 5205 2541 5206 2600 5206 2601 5206 6313 5207 6312 5207 2601 5207 2601 5208 6312 5208 2602 5208 3964 5209 2601 5209 2602 5209 6312 5210 2603 5210 2602 5210 2602 5211 2603 5211 2604 5211 2540 5212 2602 5212 2604 5212 2603 5213 6311 5213 2604 5213 2604 5214 6311 5214 2605 5214 3966 5215 2604 5215 2605 5215 6311 5216 2606 5216 2605 5216 2605 5217 2606 5217 2607 5217 3997 5218 2605 5218 2607 5218 2606 5219 2608 5219 2607 5219 2607 5220 2608 5220 2609 5220 2539 5221 2607 5221 2609 5221 2608 5222 6308 5222 2609 5222 2609 5223 6308 5223 2610 5223 3967 5224 2609 5224 2610 5224 6308 5225 6279 5225 2610 5225 2610 5226 6279 5226 2613 5226 3998 5227 2610 5227 2613 5227 6279 5228 2611 5228 2613 5228 2613 5229 2611 5229 2614 5229 2612 5230 2613 5230 2614 5230 2611 5231 2615 5231 2614 5231 2614 5232 2615 5232 2616 5232 3999 5233 2614 5233 2616 5233 2615 5234 2617 5234 2616 5234 2616 5235 2617 5235 2538 5235 4000 5236 2616 5236 2538 5236 2617 5237 2618 5237 2538 5237 2538 5238 2618 5238 2537 5238 3969 5239 2538 5239 2537 5239 2618 5240 2619 5240 2537 5240 2537 5241 2619 5241 2536 5241 4001 5242 2537 5242 2536 5242 2619 5243 2620 5243 2536 5243 2536 5244 2620 5244 2535 5244 3970 5245 2536 5245 2535 5245 2620 5246 6302 5246 2535 5246 2535 5247 6302 5247 2621 5247 3971 5248 2535 5248 2621 5248 6302 5249 2622 5249 2621 5249 2621 5250 2622 5250 2623 5250 2534 5251 2621 5251 2623 5251 2622 5252 6278 5252 2623 5252 2623 5253 6278 5253 2625 5253 2624 5254 2623 5254 2625 5254 6278 5255 6277 5255 2625 5255 2625 5256 6277 5256 2627 5256 2533 5257 2625 5257 2627 5257 6277 5258 6300 5258 2627 5258 2627 5259 6300 5259 2628 5259 2626 5260 2627 5260 2628 5260 6300 5261 6276 5261 2628 5261 2628 5262 6276 5262 2629 5262 3972 5263 2628 5263 2629 5263 6276 5264 2630 5264 2629 5264 2629 5265 2630 5265 2631 5265 4003 5266 2629 5266 2631 5266 2630 5267 6275 5267 2631 5267 2631 5268 6275 5268 2632 5268 3974 5269 2631 5269 2632 5269 6275 5270 2633 5270 2632 5270 2632 5271 2633 5271 2634 5271 2532 5272 2632 5272 2634 5272 2633 5273 2635 5273 2634 5273 2634 5274 2635 5274 2636 5274 2531 5275 2634 5275 2636 5275 2635 5276 2637 5276 2636 5276 2636 5277 2637 5277 2530 5277 4006 5278 2636 5278 2530 5278 2637 5279 6273 5279 2530 5279 2530 5280 6273 5280 2638 5280 4007 5281 2530 5281 2638 5281 6273 5282 6272 5282 2638 5282 2638 5283 6272 5283 2640 5283 3976 5284 2638 5284 2640 5284 6272 5285 2639 5285 2640 5285 2640 5286 2639 5286 2641 5286 4008 5287 2640 5287 2641 5287 2639 5288 2642 5288 2641 5288 2641 5289 2642 5289 2643 5289 3977 5290 2641 5290 2643 5290 2642 5291 2644 5291 2643 5291 2643 5292 2644 5292 2645 5292 3978 5293 2643 5293 2645 5293 2644 5294 2646 5294 2645 5294 2645 5295 2646 5295 2529 5295 3980 5296 2645 5296 2529 5296 2646 5297 2647 5297 2529 5297 2529 5298 2647 5298 2528 5298 2648 5299 2529 5299 2528 5299 2647 5300 6193 5300 2528 5300 2528 5301 6193 5301 2649 5301 4010 5302 2528 5302 2649 5302 6193 5303 6192 5303 2649 5303 2649 5304 6192 5304 2650 5304 2651 5305 2649 5305 2650 5305 6192 5306 6191 5306 2650 5306 2650 5307 6191 5307 2652 5307 4011 5308 2650 5308 2652 5308 6191 5309 6190 5309 2652 5309 2652 5310 6190 5310 2654 5310 4013 5311 2652 5311 2654 5311 6190 5312 2653 5312 2654 5312 2654 5313 2653 5313 2655 5313 4012 5314 2654 5314 2655 5314 2653 5315 6189 5315 2655 5315 2655 5316 6189 5316 2526 5316 2527 5317 2655 5317 2526 5317 6189 5318 2656 5318 2526 5318 2526 5319 2656 5319 2657 5319 2525 5320 2526 5320 2657 5320 2656 5321 2658 5321 2657 5321 2657 5322 2658 5322 2660 5322 4014 5323 2657 5323 2660 5323 2658 5324 2659 5324 2660 5324 2660 5325 2659 5325 2524 5325 4015 5326 2660 5326 2524 5326 2659 5327 2661 5327 2524 5327 2524 5328 2661 5328 2662 5328 4016 5329 2524 5329 2662 5329 2661 5330 2664 5330 2662 5330 2662 5331 2664 5331 2663 5331 4017 5332 2662 5332 2663 5332 2664 5333 2665 5333 2663 5333 2663 5334 2665 5334 2667 5334 2523 5335 2663 5335 2667 5335 2665 5336 2666 5336 2667 5336 2667 5337 2666 5337 2522 5337 4018 5338 2667 5338 2522 5338 2666 5339 2668 5339 2522 5339 2522 5340 2668 5340 2669 5340 4019 5341 2522 5341 2669 5341 2668 5342 2670 5342 2669 5342 2669 5343 2670 5343 2671 5343 4020 5344 2669 5344 2671 5344 2670 5345 6188 5345 2671 5345 2671 5346 6188 5346 2672 5346 2673 5347 2671 5347 2672 5347 6188 5348 2674 5348 2672 5348 2672 5349 2674 5349 2676 5349 4021 5350 2672 5350 2676 5350 2674 5351 6186 5351 2676 5351 2676 5352 6186 5352 2677 5352 2675 5353 2676 5353 2677 5353 6186 5354 2679 5354 2677 5354 2677 5355 2679 5355 2678 5355 2521 5356 2677 5356 2678 5356 2679 5357 6185 5357 2678 5357 2678 5358 6185 5358 2680 5358 2681 5359 2678 5359 2680 5359 6185 5360 6184 5360 2680 5360 2680 5361 6184 5361 2519 5361 2520 5362 2680 5362 2519 5362 6184 5363 2682 5363 2519 5363 2519 5364 2682 5364 2683 5364 4023 5365 2519 5365 2683 5365 2682 5366 2684 5366 2683 5366 2683 5367 2684 5367 2686 5367 2518 5368 2683 5368 2686 5368 2684 5369 2687 5369 2686 5369 2686 5370 2687 5370 2685 5370 4024 5371 2686 5371 2685 5371 2687 5372 2688 5372 2685 5372 2685 5373 2688 5373 2517 5373 4025 5374 2685 5374 2517 5374 2688 5375 2689 5375 2517 5375 2517 5376 2689 5376 2691 5376 4026 5377 2517 5377 2691 5377 2689 5378 6183 5378 2691 5378 2691 5379 6183 5379 2690 5379 3960 5380 2691 5380 2690 5380 6183 5381 6182 5381 2690 5381 2690 5382 6182 5382 2509 5382 2516 5383 2690 5383 2509 5383 2692 5384 2693 5384 1211 5384 2692 5385 2696 5385 2693 5385 2692 5386 2746 5386 2696 5386 2696 5387 2746 5387 2745 5387 2694 5388 2745 5388 2695 5388 2694 5389 2696 5389 2745 5389 2694 5390 643 5390 2696 5390 2696 5391 643 5391 2693 5391 2693 5392 643 5392 642 5392 2874 5393 642 5393 2697 5393 2873 5394 2697 5394 2871 5394 2698 5395 2871 5395 2869 5395 2868 5396 2869 5396 2699 5396 2866 5397 2699 5397 703 5397 2700 5398 703 5398 702 5398 2865 5399 702 5399 2864 5399 2863 5400 2864 5400 701 5400 2860 5401 701 5401 2861 5401 2858 5402 2861 5402 700 5402 2857 5403 700 5403 699 5403 2854 5404 699 5404 2701 5404 2853 5405 2701 5405 698 5405 2702 5406 698 5406 697 5406 2852 5407 697 5407 2703 5407 2851 5408 2703 5408 2849 5408 2704 5409 2849 5409 2705 5409 2848 5410 2705 5410 2706 5410 2846 5411 2706 5411 2707 5411 2845 5412 2707 5412 2708 5412 2843 5413 2708 5413 696 5413 2844 5414 696 5414 2709 5414 2842 5415 2709 5415 2710 5415 2841 5416 2710 5416 2711 5416 2839 5417 2711 5417 2712 5417 2837 5418 2712 5418 2714 5418 2713 5419 2714 5419 2834 5419 2835 5420 2834 5420 695 5420 2833 5421 695 5421 694 5421 2715 5422 694 5422 693 5422 2830 5423 693 5423 2831 5423 2829 5424 2831 5424 692 5424 2826 5425 692 5425 2716 5425 2825 5426 2716 5426 605 5426 2824 5427 605 5427 2717 5427 2822 5428 2717 5428 2718 5428 2821 5429 2718 5429 604 5429 2820 5430 604 5430 2816 5430 2817 5431 2816 5431 689 5431 2814 5432 689 5432 2813 5432 2810 5433 2813 5433 2811 5433 2809 5434 2811 5434 601 5434 2719 5435 601 5435 600 5435 2808 5436 600 5436 688 5436 2807 5437 688 5437 2806 5437 2805 5438 2806 5438 2720 5438 2721 5439 2720 5439 2722 5439 2723 5440 2722 5440 2804 5440 2802 5441 2804 5441 598 5441 2801 5442 598 5442 2799 5442 2798 5443 2799 5443 2724 5443 2795 5444 2724 5444 2796 5444 2793 5445 2796 5445 597 5445 2725 5446 597 5446 596 5446 2726 5447 596 5447 2791 5447 2787 5448 2791 5448 2788 5448 2786 5449 2788 5449 2727 5449 2728 5450 2727 5450 2729 5450 2785 5451 2729 5451 2730 5451 2784 5452 2730 5452 654 5452 2780 5453 654 5453 2731 5453 2778 5454 2731 5454 653 5454 2777 5455 653 5455 2732 5455 2776 5456 2732 5456 652 5456 2733 5457 652 5457 651 5457 2734 5458 651 5458 650 5458 2773 5459 650 5459 2774 5459 2772 5460 2774 5460 649 5460 2769 5461 649 5461 2735 5461 2768 5462 2735 5462 2736 5462 2767 5463 2736 5463 2765 5463 2766 5464 2765 5464 2737 5464 2763 5465 2737 5465 2739 5465 2738 5466 2739 5466 648 5466 2759 5467 648 5467 2740 5467 2757 5468 2740 5468 647 5468 2756 5469 647 5469 2755 5469 2741 5470 2755 5470 646 5470 2752 5471 646 5471 645 5471 2750 5472 645 5472 2742 5472 2743 5473 2742 5473 644 5473 2749 5474 644 5474 2748 5474 2744 5475 2748 5475 2695 5475 2745 5476 2744 5476 2695 5476 2745 5477 2747 5477 2744 5477 2745 5478 2746 5478 2747 5478 2747 5479 3942 5479 2744 5479 2744 5480 3942 5480 2749 5480 2748 5481 2744 5481 2749 5481 3942 5482 3941 5482 2749 5482 2749 5483 3941 5483 2743 5483 644 5484 2749 5484 2743 5484 3941 5485 3940 5485 2743 5485 2743 5486 3940 5486 2750 5486 2742 5487 2743 5487 2750 5487 3940 5488 2751 5488 2750 5488 2750 5489 2751 5489 2752 5489 645 5490 2750 5490 2752 5490 2751 5491 2753 5491 2752 5491 2752 5492 2753 5492 2741 5492 646 5493 2752 5493 2741 5493 2753 5494 2754 5494 2741 5494 2741 5495 2754 5495 2756 5495 2755 5496 2741 5496 2756 5496 2754 5497 3939 5497 2756 5497 2756 5498 3939 5498 2757 5498 647 5499 2756 5499 2757 5499 3939 5500 2758 5500 2757 5500 2757 5501 2758 5501 2759 5501 2740 5502 2757 5502 2759 5502 2758 5503 2760 5503 2759 5503 2759 5504 2760 5504 2738 5504 648 5505 2759 5505 2738 5505 2760 5506 2761 5506 2738 5506 2738 5507 2761 5507 2763 5507 2739 5508 2738 5508 2763 5508 2761 5509 2762 5509 2763 5509 2763 5510 2762 5510 2766 5510 2737 5511 2763 5511 2766 5511 2762 5512 2764 5512 2766 5512 2766 5513 2764 5513 2767 5513 2765 5514 2766 5514 2767 5514 2764 5515 3938 5515 2767 5515 2767 5516 3938 5516 2768 5516 2736 5517 2767 5517 2768 5517 3938 5518 2770 5518 2768 5518 2768 5519 2770 5519 2769 5519 2735 5520 2768 5520 2769 5520 2770 5521 2771 5521 2769 5521 2769 5522 2771 5522 2772 5522 649 5523 2769 5523 2772 5523 2771 5524 3937 5524 2772 5524 2772 5525 3937 5525 2773 5525 2774 5526 2772 5526 2773 5526 3937 5527 3936 5527 2773 5527 2773 5528 3936 5528 2734 5528 650 5529 2773 5529 2734 5529 3936 5530 2775 5530 2734 5530 2734 5531 2775 5531 2733 5531 651 5532 2734 5532 2733 5532 2775 5533 3935 5533 2733 5533 2733 5534 3935 5534 2776 5534 652 5535 2733 5535 2776 5535 3935 5536 3934 5536 2776 5536 2776 5537 3934 5537 2777 5537 2732 5538 2776 5538 2777 5538 3934 5539 2779 5539 2777 5539 2777 5540 2779 5540 2778 5540 653 5541 2777 5541 2778 5541 2779 5542 2781 5542 2778 5542 2778 5543 2781 5543 2780 5543 2731 5544 2778 5544 2780 5544 2781 5545 2782 5545 2780 5545 2780 5546 2782 5546 2784 5546 654 5547 2780 5547 2784 5547 2782 5548 2783 5548 2784 5548 2784 5549 2783 5549 2785 5549 2730 5550 2784 5550 2785 5550 2783 5551 3876 5551 2785 5551 2785 5552 3876 5552 2728 5552 2729 5553 2785 5553 2728 5553 3876 5554 3878 5554 2728 5554 2728 5555 3878 5555 2786 5555 2727 5556 2728 5556 2786 5556 3878 5557 2789 5557 2786 5557 2786 5558 2789 5558 2787 5558 2788 5559 2786 5559 2787 5559 2789 5560 2790 5560 2787 5560 2787 5561 2790 5561 2726 5561 2791 5562 2787 5562 2726 5562 2790 5563 2792 5563 2726 5563 2726 5564 2792 5564 2725 5564 596 5565 2726 5565 2725 5565 2792 5566 3879 5566 2725 5566 2725 5567 3879 5567 2793 5567 597 5568 2725 5568 2793 5568 3879 5569 2794 5569 2793 5569 2793 5570 2794 5570 2795 5570 2796 5571 2793 5571 2795 5571 2794 5572 2797 5572 2795 5572 2795 5573 2797 5573 2798 5573 2724 5574 2795 5574 2798 5574 2797 5575 2800 5575 2798 5575 2798 5576 2800 5576 2801 5576 2799 5577 2798 5577 2801 5577 2800 5578 3883 5578 2801 5578 2801 5579 3883 5579 2802 5579 598 5580 2801 5580 2802 5580 3883 5581 2803 5581 2802 5581 2802 5582 2803 5582 2723 5582 2804 5583 2802 5583 2723 5583 2803 5584 3907 5584 2723 5584 2723 5585 3907 5585 2721 5585 2722 5586 2723 5586 2721 5586 3907 5587 3885 5587 2721 5587 2721 5588 3885 5588 2805 5588 2720 5589 2721 5589 2805 5589 3885 5590 3886 5590 2805 5590 2805 5591 3886 5591 2807 5591 2806 5592 2805 5592 2807 5592 3886 5593 3887 5593 2807 5593 2807 5594 3887 5594 2808 5594 688 5595 2807 5595 2808 5595 3887 5596 3908 5596 2808 5596 2808 5597 3908 5597 2719 5597 600 5598 2808 5598 2719 5598 3908 5599 3910 5599 2719 5599 2719 5600 3910 5600 2809 5600 601 5601 2719 5601 2809 5601 3910 5602 2812 5602 2809 5602 2809 5603 2812 5603 2810 5603 2811 5604 2809 5604 2810 5604 2812 5605 2815 5605 2810 5605 2810 5606 2815 5606 2814 5606 2813 5607 2810 5607 2814 5607 2815 5608 3913 5608 2814 5608 2814 5609 3913 5609 2817 5609 689 5610 2814 5610 2817 5610 3913 5611 2818 5611 2817 5611 2817 5612 2818 5612 2820 5612 2816 5613 2817 5613 2820 5613 2818 5614 2819 5614 2820 5614 2820 5615 2819 5615 2821 5615 604 5616 2820 5616 2821 5616 2819 5617 3915 5617 2821 5617 2821 5618 3915 5618 2822 5618 2718 5619 2821 5619 2822 5619 3915 5620 3889 5620 2822 5620 2822 5621 3889 5621 2824 5621 2717 5622 2822 5622 2824 5622 3889 5623 2823 5623 2824 5623 2824 5624 2823 5624 2825 5624 605 5625 2824 5625 2825 5625 2823 5626 3891 5626 2825 5626 2825 5627 3891 5627 2826 5627 2716 5628 2825 5628 2826 5628 3891 5629 2827 5629 2826 5629 2826 5630 2827 5630 2829 5630 692 5631 2826 5631 2829 5631 2827 5632 2828 5632 2829 5632 2829 5633 2828 5633 2830 5633 2831 5634 2829 5634 2830 5634 2828 5635 2832 5635 2830 5635 2830 5636 2832 5636 2715 5636 693 5637 2830 5637 2715 5637 2832 5638 3916 5638 2715 5638 2715 5639 3916 5639 2833 5639 694 5640 2715 5640 2833 5640 3916 5641 3917 5641 2833 5641 2833 5642 3917 5642 2835 5642 695 5643 2833 5643 2835 5643 3917 5644 3918 5644 2835 5644 2835 5645 3918 5645 2713 5645 2834 5646 2835 5646 2713 5646 3918 5647 2836 5647 2713 5647 2713 5648 2836 5648 2837 5648 2714 5649 2713 5649 2837 5649 2836 5650 2838 5650 2837 5650 2837 5651 2838 5651 2839 5651 2712 5652 2837 5652 2839 5652 2838 5653 3919 5653 2839 5653 2839 5654 3919 5654 2841 5654 2711 5655 2839 5655 2841 5655 3919 5656 2840 5656 2841 5656 2841 5657 2840 5657 2842 5657 2710 5658 2841 5658 2842 5658 2840 5659 3922 5659 2842 5659 2842 5660 3922 5660 2844 5660 2709 5661 2842 5661 2844 5661 3922 5662 3921 5662 2844 5662 2844 5663 3921 5663 2843 5663 696 5664 2844 5664 2843 5664 3921 5665 3924 5665 2843 5665 2843 5666 3924 5666 2845 5666 2708 5667 2843 5667 2845 5667 3924 5668 3923 5668 2845 5668 2845 5669 3923 5669 2846 5669 2707 5670 2845 5670 2846 5670 3923 5671 2847 5671 2846 5671 2846 5672 2847 5672 2848 5672 2706 5673 2846 5673 2848 5673 2847 5674 3926 5674 2848 5674 2848 5675 3926 5675 2704 5675 2705 5676 2848 5676 2704 5676 3926 5677 3925 5677 2704 5677 2704 5678 3925 5678 2851 5678 2849 5679 2704 5679 2851 5679 3925 5680 2850 5680 2851 5680 2851 5681 2850 5681 2852 5681 2703 5682 2851 5682 2852 5682 2850 5683 3927 5683 2852 5683 2852 5684 3927 5684 2702 5684 697 5685 2852 5685 2702 5685 3927 5686 3928 5686 2702 5686 2702 5687 3928 5687 2853 5687 698 5688 2702 5688 2853 5688 3928 5689 2855 5689 2853 5689 2853 5690 2855 5690 2854 5690 2701 5691 2853 5691 2854 5691 2855 5692 3929 5692 2854 5692 2854 5693 3929 5693 2857 5693 699 5694 2854 5694 2857 5694 3929 5695 2856 5695 2857 5695 2857 5696 2856 5696 2858 5696 700 5697 2857 5697 2858 5697 2856 5698 2859 5698 2858 5698 2858 5699 2859 5699 2860 5699 2861 5700 2858 5700 2860 5700 2859 5701 2862 5701 2860 5701 2860 5702 2862 5702 2863 5702 701 5703 2860 5703 2863 5703 2862 5704 3931 5704 2863 5704 2863 5705 3931 5705 2865 5705 2864 5706 2863 5706 2865 5706 3931 5707 3930 5707 2865 5707 2865 5708 3930 5708 2700 5708 702 5709 2865 5709 2700 5709 3930 5710 3932 5710 2700 5710 2700 5711 3932 5711 2866 5711 703 5712 2700 5712 2866 5712 3932 5713 2867 5713 2866 5713 2866 5714 2867 5714 2868 5714 2699 5715 2866 5715 2868 5715 2867 5716 2870 5716 2868 5716 2868 5717 2870 5717 2698 5717 2869 5718 2868 5718 2698 5718 2870 5719 2872 5719 2698 5719 2698 5720 2872 5720 2873 5720 2871 5721 2698 5721 2873 5721 2872 5722 3933 5722 2873 5722 2873 5723 3933 5723 2874 5723 2697 5724 2873 5724 2874 5724 3933 5725 1211 5725 2874 5725 2874 5726 1211 5726 2693 5726 642 5727 2874 5727 2693 5727 704 5728 2878 5728 705 5728 704 5729 2877 5729 2878 5729 704 5730 546 5730 2877 5730 2877 5731 546 5731 2925 5731 2875 5732 2925 5732 457 5732 2875 5733 2877 5733 2925 5733 2875 5734 2876 5734 2877 5734 2877 5735 2876 5735 2878 5735 2878 5736 2876 5736 456 5736 3054 5737 456 5737 2879 5737 3051 5738 2879 5738 3052 5738 3050 5739 3052 5739 3049 5739 3048 5740 3049 5740 2880 5740 3044 5741 2880 5741 2881 5741 3045 5742 2881 5742 3042 5742 3040 5743 3042 5743 454 5743 3039 5744 454 5744 453 5744 3037 5745 453 5745 3036 5745 3035 5746 3036 5746 452 5746 3033 5747 452 5747 3031 5747 3030 5748 3031 5748 2882 5748 3028 5749 2882 5749 451 5749 2883 5750 451 5750 450 5750 3026 5751 450 5751 449 5751 3025 5752 449 5752 448 5752 3024 5753 448 5753 2884 5753 3022 5754 2884 5754 2885 5754 2886 5755 2885 5755 3021 5755 3019 5756 3021 5756 447 5756 3017 5757 447 5757 446 5757 2887 5758 446 5758 2888 5758 3014 5759 2888 5759 411 5759 3013 5760 411 5760 424 5760 3011 5761 424 5761 419 5761 2889 5762 419 5762 2890 5762 3009 5763 2890 5763 422 5763 3007 5764 422 5764 421 5764 3005 5765 421 5765 2891 5765 3003 5766 2891 5766 3004 5766 3000 5767 3004 5767 3001 5767 2997 5768 3001 5768 2892 5768 2893 5769 2892 5769 428 5769 2894 5770 428 5770 442 5770 2994 5771 442 5771 443 5771 2895 5772 443 5772 444 5772 2896 5773 444 5773 432 5773 2989 5774 432 5774 433 5774 2988 5775 433 5775 445 5775 2986 5776 445 5776 434 5776 2983 5777 434 5777 2984 5777 2897 5778 2984 5778 437 5778 2980 5779 437 5779 2898 5779 2899 5780 2898 5780 439 5780 2978 5781 439 5781 2901 5781 2900 5782 2901 5782 477 5782 2976 5783 477 5783 476 5783 2902 5784 476 5784 2973 5784 2972 5785 2973 5785 475 5785 2903 5786 475 5786 2904 5786 2970 5787 2904 5787 474 5787 2968 5788 474 5788 2905 5788 2906 5789 2905 5789 2907 5789 2967 5790 2907 5790 2965 5790 2964 5791 2965 5791 473 5791 2961 5792 473 5792 472 5792 2960 5793 472 5793 471 5793 2959 5794 471 5794 470 5794 2957 5795 470 5795 2908 5795 2956 5796 2908 5796 468 5796 2954 5797 468 5797 2909 5797 2953 5798 2909 5798 2910 5798 2911 5799 2910 5799 2912 5799 2952 5800 2912 5800 2914 5800 2913 5801 2914 5801 467 5801 2915 5802 467 5802 466 5802 2950 5803 466 5803 464 5803 2949 5804 464 5804 465 5804 2916 5805 465 5805 2917 5805 2946 5806 2917 5806 462 5806 2918 5807 462 5807 463 5807 2919 5808 463 5808 2944 5808 2945 5809 2944 5809 2942 5809 2943 5810 2942 5810 2920 5810 2940 5811 2920 5811 2939 5811 2921 5812 2939 5812 2922 5812 2937 5813 2922 5813 2935 5813 2933 5814 2935 5814 461 5814 2923 5815 461 5815 460 5815 2932 5816 460 5816 459 5816 2929 5817 459 5817 2930 5817 2924 5818 2930 5818 458 5818 2926 5819 458 5819 457 5819 2925 5820 2926 5820 457 5820 2925 5821 2927 5821 2926 5821 2925 5822 546 5822 2927 5822 2927 5823 2928 5823 2926 5823 2926 5824 2928 5824 2924 5824 458 5825 2926 5825 2924 5825 2928 5826 548 5826 2924 5826 2924 5827 548 5827 2929 5827 2930 5828 2924 5828 2929 5828 548 5829 547 5829 2929 5829 2929 5830 547 5830 2932 5830 459 5831 2929 5831 2932 5831 547 5832 2931 5832 2932 5832 2932 5833 2931 5833 2923 5833 460 5834 2932 5834 2923 5834 2931 5835 2934 5835 2923 5835 2923 5836 2934 5836 2933 5836 461 5837 2923 5837 2933 5837 2934 5838 549 5838 2933 5838 2933 5839 549 5839 2937 5839 2935 5840 2933 5840 2937 5840 549 5841 2936 5841 2937 5841 2937 5842 2936 5842 2921 5842 2922 5843 2937 5843 2921 5843 2936 5844 2938 5844 2921 5844 2921 5845 2938 5845 2940 5845 2939 5846 2921 5846 2940 5846 2938 5847 550 5847 2940 5847 2940 5848 550 5848 2943 5848 2920 5849 2940 5849 2943 5849 550 5850 2941 5850 2943 5850 2943 5851 2941 5851 2945 5851 2942 5852 2943 5852 2945 5852 2941 5853 551 5853 2945 5853 2945 5854 551 5854 2919 5854 2944 5855 2945 5855 2919 5855 551 5856 552 5856 2919 5856 2919 5857 552 5857 2918 5857 463 5858 2919 5858 2918 5858 552 5859 553 5859 2918 5859 2918 5860 553 5860 2946 5860 462 5861 2918 5861 2946 5861 553 5862 554 5862 2946 5862 2946 5863 554 5863 2916 5863 2917 5864 2946 5864 2916 5864 554 5865 2947 5865 2916 5865 2916 5866 2947 5866 2949 5866 465 5867 2916 5867 2949 5867 2947 5868 2948 5868 2949 5868 2949 5869 2948 5869 2950 5869 464 5870 2949 5870 2950 5870 2948 5871 2951 5871 2950 5871 2950 5872 2951 5872 2915 5872 466 5873 2950 5873 2915 5873 2951 5874 555 5874 2915 5874 2915 5875 555 5875 2913 5875 467 5876 2915 5876 2913 5876 555 5877 556 5877 2913 5877 2913 5878 556 5878 2952 5878 2914 5879 2913 5879 2952 5879 556 5880 557 5880 2952 5880 2952 5881 557 5881 2911 5881 2912 5882 2952 5882 2911 5882 557 5883 558 5883 2911 5883 2911 5884 558 5884 2953 5884 2910 5885 2911 5885 2953 5885 558 5886 559 5886 2953 5886 2953 5887 559 5887 2954 5887 2909 5888 2953 5888 2954 5888 559 5889 2955 5889 2954 5889 2954 5890 2955 5890 2956 5890 468 5891 2954 5891 2956 5891 2955 5892 560 5892 2956 5892 2956 5893 560 5893 2957 5893 2908 5894 2956 5894 2957 5894 560 5895 561 5895 2957 5895 2957 5896 561 5896 2959 5896 470 5897 2957 5897 2959 5897 561 5898 2958 5898 2959 5898 2959 5899 2958 5899 2960 5899 471 5900 2959 5900 2960 5900 2958 5901 562 5901 2960 5901 2960 5902 562 5902 2961 5902 472 5903 2960 5903 2961 5903 562 5904 2962 5904 2961 5904 2961 5905 2962 5905 2964 5905 473 5906 2961 5906 2964 5906 2962 5907 2963 5907 2964 5907 2964 5908 2963 5908 2967 5908 2965 5909 2964 5909 2967 5909 2963 5910 2966 5910 2967 5910 2967 5911 2966 5911 2906 5911 2907 5912 2967 5912 2906 5912 2966 5913 563 5913 2906 5913 2906 5914 563 5914 2968 5914 2905 5915 2906 5915 2968 5915 563 5916 2969 5916 2968 5916 2968 5917 2969 5917 2970 5917 474 5918 2968 5918 2970 5918 2969 5919 2971 5919 2970 5919 2970 5920 2971 5920 2903 5920 2904 5921 2970 5921 2903 5921 2971 5922 564 5922 2903 5922 2903 5923 564 5923 2972 5923 475 5924 2903 5924 2972 5924 564 5925 565 5925 2972 5925 2972 5926 565 5926 2902 5926 2973 5927 2972 5927 2902 5927 565 5928 2974 5928 2902 5928 2902 5929 2974 5929 2976 5929 476 5930 2902 5930 2976 5930 2974 5931 2975 5931 2976 5931 2976 5932 2975 5932 2900 5932 477 5933 2976 5933 2900 5933 2975 5934 2977 5934 2900 5934 2900 5935 2977 5935 2978 5935 2901 5936 2900 5936 2978 5936 2977 5937 566 5937 2978 5937 2978 5938 566 5938 2899 5938 439 5939 2978 5939 2899 5939 566 5940 2979 5940 2899 5940 2899 5941 2979 5941 2980 5941 2898 5942 2899 5942 2980 5942 2979 5943 2981 5943 2980 5943 2980 5944 2981 5944 2897 5944 437 5945 2980 5945 2897 5945 2981 5946 2982 5946 2897 5946 2897 5947 2982 5947 2983 5947 2984 5948 2897 5948 2983 5948 2982 5949 2985 5949 2983 5949 2983 5950 2985 5950 2986 5950 434 5951 2983 5951 2986 5951 2985 5952 2987 5952 2986 5952 2986 5953 2987 5953 2988 5953 445 5954 2986 5954 2988 5954 2987 5955 2990 5955 2988 5955 2988 5956 2990 5956 2989 5956 433 5957 2988 5957 2989 5957 2990 5958 2991 5958 2989 5958 2989 5959 2991 5959 2896 5959 432 5960 2989 5960 2896 5960 2991 5961 2992 5961 2896 5961 2896 5962 2992 5962 2895 5962 444 5963 2896 5963 2895 5963 2992 5964 2993 5964 2895 5964 2895 5965 2993 5965 2994 5965 443 5966 2895 5966 2994 5966 2993 5967 2995 5967 2994 5967 2994 5968 2995 5968 2894 5968 442 5969 2994 5969 2894 5969 2995 5970 2996 5970 2894 5970 2894 5971 2996 5971 2893 5971 428 5972 2894 5972 2893 5972 2996 5973 2998 5973 2893 5973 2893 5974 2998 5974 2997 5974 2892 5975 2893 5975 2997 5975 2998 5976 2999 5976 2997 5976 2997 5977 2999 5977 3000 5977 3001 5978 2997 5978 3000 5978 2999 5979 3002 5979 3000 5979 3000 5980 3002 5980 3003 5980 3004 5981 3000 5981 3003 5981 3002 5982 675 5982 3003 5982 3003 5983 675 5983 3005 5983 2891 5984 3003 5984 3005 5984 675 5985 3006 5985 3005 5985 3005 5986 3006 5986 3007 5986 421 5987 3005 5987 3007 5987 3006 5988 3008 5988 3007 5988 3007 5989 3008 5989 3009 5989 422 5990 3007 5990 3009 5990 3008 5991 3010 5991 3009 5991 3009 5992 3010 5992 2889 5992 2890 5993 3009 5993 2889 5993 3010 5994 676 5994 2889 5994 2889 5995 676 5995 3011 5995 419 5996 2889 5996 3011 5996 676 5997 3012 5997 3011 5997 3011 5998 3012 5998 3013 5998 424 5999 3011 5999 3013 5999 3012 6000 679 6000 3013 6000 3013 6001 679 6001 3014 6001 411 6002 3013 6002 3014 6002 679 6003 3015 6003 3014 6003 3014 6004 3015 6004 2887 6004 2888 6005 3014 6005 2887 6005 3015 6006 3016 6006 2887 6006 2887 6007 3016 6007 3017 6007 446 6008 2887 6008 3017 6008 3016 6009 3018 6009 3017 6009 3017 6010 3018 6010 3019 6010 447 6011 3017 6011 3019 6011 3018 6012 3020 6012 3019 6012 3019 6013 3020 6013 2886 6013 3021 6014 3019 6014 2886 6014 3020 6015 681 6015 2886 6015 2886 6016 681 6016 3022 6016 2885 6017 2886 6017 3022 6017 681 6018 3023 6018 3022 6018 3022 6019 3023 6019 3024 6019 2884 6020 3022 6020 3024 6020 3023 6021 536 6021 3024 6021 3024 6022 536 6022 3025 6022 448 6023 3024 6023 3025 6023 536 6024 538 6024 3025 6024 3025 6025 538 6025 3026 6025 449 6026 3025 6026 3026 6026 538 6027 542 6027 3026 6027 3026 6028 542 6028 2883 6028 450 6029 3026 6029 2883 6029 542 6030 3027 6030 2883 6030 2883 6031 3027 6031 3028 6031 451 6032 2883 6032 3028 6032 3027 6033 3029 6033 3028 6033 3028 6034 3029 6034 3030 6034 2882 6035 3028 6035 3030 6035 3029 6036 3032 6036 3030 6036 3030 6037 3032 6037 3033 6037 3031 6038 3030 6038 3033 6038 3032 6039 545 6039 3033 6039 3033 6040 545 6040 3035 6040 452 6041 3033 6041 3035 6041 545 6042 3034 6042 3035 6042 3035 6043 3034 6043 3037 6043 3036 6044 3035 6044 3037 6044 3034 6045 3038 6045 3037 6045 3037 6046 3038 6046 3039 6046 453 6047 3037 6047 3039 6047 3038 6048 3041 6048 3039 6048 3039 6049 3041 6049 3040 6049 454 6050 3039 6050 3040 6050 3041 6051 3043 6051 3040 6051 3040 6052 3043 6052 3045 6052 3042 6053 3040 6053 3045 6053 3043 6054 3046 6054 3045 6054 3045 6055 3046 6055 3044 6055 2881 6056 3045 6056 3044 6056 3046 6057 3047 6057 3044 6057 3044 6058 3047 6058 3048 6058 2880 6059 3044 6059 3048 6059 3047 6060 708 6060 3048 6060 3048 6061 708 6061 3050 6061 3049 6062 3048 6062 3050 6062 708 6063 707 6063 3050 6063 3050 6064 707 6064 3051 6064 3052 6065 3050 6065 3051 6065 707 6066 3053 6066 3051 6066 3051 6067 3053 6067 3054 6067 2879 6068 3051 6068 3054 6068 3053 6069 705 6069 3054 6069 3054 6070 705 6070 2878 6070 456 6071 3054 6071 2878 6071 3775 6072 3055 6072 3238 6072 3775 6073 3056 6073 3055 6073 3775 6074 3776 6074 3056 6074 3056 6075 3776 6075 3100 6075 3058 6076 3100 6076 3057 6076 3058 6077 3056 6077 3100 6077 3058 6078 6242 6078 3056 6078 3056 6079 6242 6079 3055 6079 3055 6080 6242 6080 3059 6080 3237 6081 3059 6081 3235 6081 3236 6082 3235 6082 3060 6082 3233 6083 3060 6083 6240 6083 3232 6084 6240 6084 3061 6084 3228 6085 3061 6085 3229 6085 3227 6086 3229 6086 3063 6086 3062 6087 3063 6087 6239 6087 3226 6088 6239 6088 6238 6088 3225 6089 6238 6089 3224 6089 3223 6090 3224 6090 3064 6090 3222 6091 3064 6091 3066 6091 3065 6092 3066 6092 3067 6092 3219 6093 3067 6093 3220 6093 3218 6094 3220 6094 6237 6094 3217 6095 6237 6095 6236 6095 3214 6096 6236 6096 3068 6096 3213 6097 3068 6097 3069 6097 3211 6098 3069 6098 3070 6098 3209 6099 3070 6099 3071 6099 3207 6100 3071 6100 3206 6100 3204 6101 3206 6101 3203 6101 3202 6102 3203 6102 3201 6102 3200 6103 3201 6103 3072 6103 3199 6104 3072 6104 6235 6104 3196 6105 6235 6105 3073 6105 3194 6106 3073 6106 3074 6106 3193 6107 3074 6107 3075 6107 3191 6108 3075 6108 6234 6108 3190 6109 6234 6109 6233 6109 3076 6110 6233 6110 3187 6110 3186 6111 3187 6111 6232 6111 3184 6112 6232 6112 6231 6112 3182 6113 6231 6113 3077 6113 3180 6114 3077 6114 3078 6114 3181 6115 3078 6115 6230 6115 3178 6116 6230 6116 6229 6116 3176 6117 6229 6117 6227 6117 3175 6118 6227 6118 6228 6118 3171 6119 6228 6119 3172 6119 3173 6120 3172 6120 3169 6120 3168 6121 3169 6121 3167 6121 3165 6122 3167 6122 6226 6122 3164 6123 6226 6123 6225 6123 3163 6124 6225 6124 3162 6124 3079 6125 3162 6125 3161 6125 3080 6126 3161 6126 6224 6126 3081 6127 6224 6127 3159 6127 3160 6128 3159 6128 3082 6128 3156 6129 3082 6129 3157 6129 3155 6130 3157 6130 3083 6130 3154 6131 3083 6131 6223 6131 3152 6132 6223 6132 3084 6132 3150 6133 3084 6133 6222 6133 3149 6134 6222 6134 3085 6134 3147 6135 3085 6135 6221 6135 3146 6136 6221 6136 6220 6136 3145 6137 6220 6137 6219 6137 3086 6138 6219 6138 3143 6138 3141 6139 3143 6139 3139 6139 3138 6140 3139 6140 6218 6140 3137 6141 6218 6141 3087 6141 3136 6142 3087 6142 6217 6142 3088 6143 6217 6143 3134 6143 3133 6144 3134 6144 3090 6144 3089 6145 3090 6145 6216 6145 3130 6146 6216 6146 6215 6146 3128 6147 6215 6147 6214 6147 3126 6148 6214 6148 6213 6148 3124 6149 6213 6149 3091 6149 3122 6150 3091 6150 6212 6150 3121 6151 6212 6151 3092 6151 3119 6152 3092 6152 6210 6152 3118 6153 6210 6153 3093 6153 3115 6154 3093 6154 3114 6154 3113 6155 3114 6155 3094 6155 3111 6156 3094 6156 6209 6156 3110 6157 6209 6157 3096 6157 3095 6158 3096 6158 3109 6158 3106 6159 3109 6159 6208 6159 3107 6160 6208 6160 3097 6160 3104 6161 3097 6161 6207 6161 3103 6162 6207 6162 3098 6162 3099 6163 3098 6163 3057 6163 3100 6164 3099 6164 3057 6164 3100 6165 3101 6165 3099 6165 3100 6166 3776 6166 3101 6166 3101 6167 3777 6167 3099 6167 3099 6168 3777 6168 3103 6168 3098 6169 3099 6169 3103 6169 3777 6170 3102 6170 3103 6170 3103 6171 3102 6171 3104 6171 6207 6172 3103 6172 3104 6172 3102 6173 3778 6173 3104 6173 3104 6174 3778 6174 3107 6174 3097 6175 3104 6175 3107 6175 3778 6176 3105 6176 3107 6176 3107 6177 3105 6177 3106 6177 6208 6178 3107 6178 3106 6178 3105 6179 3108 6179 3106 6179 3106 6180 3108 6180 3095 6180 3109 6181 3106 6181 3095 6181 3108 6182 3779 6182 3095 6182 3095 6183 3779 6183 3110 6183 3096 6184 3095 6184 3110 6184 3779 6185 3112 6185 3110 6185 3110 6186 3112 6186 3111 6186 6209 6187 3110 6187 3111 6187 3112 6188 3780 6188 3111 6188 3111 6189 3780 6189 3113 6189 3094 6190 3111 6190 3113 6190 3780 6191 3781 6191 3113 6191 3113 6192 3781 6192 3115 6192 3114 6193 3113 6193 3115 6193 3781 6194 3116 6194 3115 6194 3115 6195 3116 6195 3118 6195 3093 6196 3115 6196 3118 6196 3116 6197 3117 6197 3118 6197 3118 6198 3117 6198 3119 6198 6210 6199 3118 6199 3119 6199 3117 6200 3120 6200 3119 6200 3119 6201 3120 6201 3121 6201 3092 6202 3119 6202 3121 6202 3120 6203 3123 6203 3121 6203 3121 6204 3123 6204 3122 6204 6212 6205 3121 6205 3122 6205 3123 6206 3125 6206 3122 6206 3122 6207 3125 6207 3124 6207 3091 6208 3122 6208 3124 6208 3125 6209 3782 6209 3124 6209 3124 6210 3782 6210 3126 6210 6213 6211 3124 6211 3126 6211 3782 6212 3127 6212 3126 6212 3126 6213 3127 6213 3128 6213 6214 6214 3126 6214 3128 6214 3127 6215 3129 6215 3128 6215 3128 6216 3129 6216 3130 6216 6215 6217 3128 6217 3130 6217 3129 6218 3131 6218 3130 6218 3130 6219 3131 6219 3089 6219 6216 6220 3130 6220 3089 6220 3131 6221 3132 6221 3089 6221 3089 6222 3132 6222 3133 6222 3090 6223 3089 6223 3133 6223 3132 6224 3783 6224 3133 6224 3133 6225 3783 6225 3088 6225 3134 6226 3133 6226 3088 6226 3783 6227 3135 6227 3088 6227 3088 6228 3135 6228 3136 6228 6217 6229 3088 6229 3136 6229 3135 6230 3784 6230 3136 6230 3136 6231 3784 6231 3137 6231 3087 6232 3136 6232 3137 6232 3784 6233 3785 6233 3137 6233 3137 6234 3785 6234 3138 6234 6218 6235 3137 6235 3138 6235 3785 6236 3140 6236 3138 6236 3138 6237 3140 6237 3141 6237 3139 6238 3138 6238 3141 6238 3140 6239 3142 6239 3141 6239 3141 6240 3142 6240 3086 6240 3143 6241 3141 6241 3086 6241 3142 6242 3144 6242 3086 6242 3086 6243 3144 6243 3145 6243 6219 6244 3086 6244 3145 6244 3144 6245 3786 6245 3145 6245 3145 6246 3786 6246 3146 6246 6220 6247 3145 6247 3146 6247 3786 6248 3787 6248 3146 6248 3146 6249 3787 6249 3147 6249 6221 6250 3146 6250 3147 6250 3787 6251 3148 6251 3147 6251 3147 6252 3148 6252 3149 6252 3085 6253 3147 6253 3149 6253 3148 6254 3151 6254 3149 6254 3149 6255 3151 6255 3150 6255 6222 6256 3149 6256 3150 6256 3151 6257 3788 6257 3150 6257 3150 6258 3788 6258 3152 6258 3084 6259 3150 6259 3152 6259 3788 6260 3153 6260 3152 6260 3152 6261 3153 6261 3154 6261 6223 6262 3152 6262 3154 6262 3153 6263 3789 6263 3154 6263 3154 6264 3789 6264 3155 6264 3083 6265 3154 6265 3155 6265 3789 6266 3791 6266 3155 6266 3155 6267 3791 6267 3156 6267 3157 6268 3155 6268 3156 6268 3791 6269 3158 6269 3156 6269 3156 6270 3158 6270 3160 6270 3082 6271 3156 6271 3160 6271 3158 6272 3790 6272 3160 6272 3160 6273 3790 6273 3081 6273 3159 6274 3160 6274 3081 6274 3790 6275 3816 6275 3081 6275 3081 6276 3816 6276 3080 6276 6224 6277 3081 6277 3080 6277 3816 6278 3815 6278 3080 6278 3080 6279 3815 6279 3079 6279 3161 6280 3080 6280 3079 6280 3815 6281 3814 6281 3079 6281 3079 6282 3814 6282 3163 6282 3162 6283 3079 6283 3163 6283 3814 6284 3813 6284 3163 6284 3163 6285 3813 6285 3164 6285 6225 6286 3163 6286 3164 6286 3813 6287 3166 6287 3164 6287 3164 6288 3166 6288 3165 6288 6226 6289 3164 6289 3165 6289 3166 6290 3812 6290 3165 6290 3165 6291 3812 6291 3168 6291 3167 6292 3165 6292 3168 6292 3812 6293 3170 6293 3168 6293 3168 6294 3170 6294 3173 6294 3169 6295 3168 6295 3173 6295 3170 6296 3811 6296 3173 6296 3173 6297 3811 6297 3171 6297 3172 6298 3173 6298 3171 6298 3811 6299 3810 6299 3171 6299 3171 6300 3810 6300 3175 6300 6228 6301 3171 6301 3175 6301 3810 6302 3174 6302 3175 6302 3175 6303 3174 6303 3176 6303 6227 6304 3175 6304 3176 6304 3174 6305 3177 6305 3176 6305 3176 6306 3177 6306 3178 6306 6229 6307 3176 6307 3178 6307 3177 6308 3809 6308 3178 6308 3178 6309 3809 6309 3181 6309 6230 6310 3178 6310 3181 6310 3809 6311 3179 6311 3181 6311 3181 6312 3179 6312 3180 6312 3078 6313 3181 6313 3180 6313 3179 6314 3183 6314 3180 6314 3180 6315 3183 6315 3182 6315 3077 6316 3180 6316 3182 6316 3183 6317 3793 6317 3182 6317 3182 6318 3793 6318 3184 6318 6231 6319 3182 6319 3184 6319 3793 6320 3185 6320 3184 6320 3184 6321 3185 6321 3186 6321 6232 6322 3184 6322 3186 6322 3185 6323 3795 6323 3186 6323 3186 6324 3795 6324 3076 6324 3187 6325 3186 6325 3076 6325 3795 6326 3188 6326 3076 6326 3076 6327 3188 6327 3190 6327 6233 6328 3076 6328 3190 6328 3188 6329 3189 6329 3190 6329 3190 6330 3189 6330 3191 6330 6234 6331 3190 6331 3191 6331 3189 6332 3192 6332 3191 6332 3191 6333 3192 6333 3193 6333 3075 6334 3191 6334 3193 6334 3192 6335 3195 6335 3193 6335 3193 6336 3195 6336 3194 6336 3074 6337 3193 6337 3194 6337 3195 6338 3796 6338 3194 6338 3194 6339 3796 6339 3196 6339 3073 6340 3194 6340 3196 6340 3796 6341 3197 6341 3196 6341 3196 6342 3197 6342 3199 6342 6235 6343 3196 6343 3199 6343 3197 6344 3198 6344 3199 6344 3199 6345 3198 6345 3200 6345 3072 6346 3199 6346 3200 6346 3198 6347 3797 6347 3200 6347 3200 6348 3797 6348 3202 6348 3201 6349 3200 6349 3202 6349 3797 6350 3205 6350 3202 6350 3202 6351 3205 6351 3204 6351 3203 6352 3202 6352 3204 6352 3205 6353 3798 6353 3204 6353 3204 6354 3798 6354 3207 6354 3206 6355 3204 6355 3207 6355 3798 6356 3208 6356 3207 6356 3207 6357 3208 6357 3209 6357 3071 6358 3207 6358 3209 6358 3208 6359 3210 6359 3209 6359 3209 6360 3210 6360 3211 6360 3070 6361 3209 6361 3211 6361 3210 6362 3799 6362 3211 6362 3211 6363 3799 6363 3213 6363 3069 6364 3211 6364 3213 6364 3799 6365 3212 6365 3213 6365 3213 6366 3212 6366 3214 6366 3068 6367 3213 6367 3214 6367 3212 6368 3215 6368 3214 6368 3214 6369 3215 6369 3217 6369 6236 6370 3214 6370 3217 6370 3215 6371 3216 6371 3217 6371 3217 6372 3216 6372 3218 6372 6237 6373 3217 6373 3218 6373 3216 6374 3800 6374 3218 6374 3218 6375 3800 6375 3219 6375 3220 6376 3218 6376 3219 6376 3800 6377 3801 6377 3219 6377 3219 6378 3801 6378 3065 6378 3067 6379 3219 6379 3065 6379 3801 6380 3221 6380 3065 6380 3065 6381 3221 6381 3222 6381 3066 6382 3065 6382 3222 6382 3221 6383 3802 6383 3222 6383 3222 6384 3802 6384 3223 6384 3064 6385 3222 6385 3223 6385 3802 6386 3804 6386 3223 6386 3223 6387 3804 6387 3225 6387 3224 6388 3223 6388 3225 6388 3804 6389 3803 6389 3225 6389 3225 6390 3803 6390 3226 6390 6238 6391 3225 6391 3226 6391 3803 6392 3805 6392 3226 6392 3226 6393 3805 6393 3062 6393 6239 6394 3226 6394 3062 6394 3805 6395 3807 6395 3062 6395 3062 6396 3807 6396 3227 6396 3063 6397 3062 6397 3227 6397 3807 6398 3806 6398 3227 6398 3227 6399 3806 6399 3228 6399 3229 6400 3227 6400 3228 6400 3806 6401 3230 6401 3228 6401 3228 6402 3230 6402 3232 6402 3061 6403 3228 6403 3232 6403 3230 6404 3231 6404 3232 6404 3232 6405 3231 6405 3233 6405 6240 6406 3232 6406 3233 6406 3231 6407 3234 6407 3233 6407 3233 6408 3234 6408 3236 6408 3060 6409 3233 6409 3236 6409 3234 6410 3808 6410 3236 6410 3236 6411 3808 6411 3237 6411 3235 6412 3236 6412 3237 6412 3808 6413 3238 6413 3237 6413 3237 6414 3238 6414 3055 6414 3059 6415 3237 6415 3055 6415 3818 6416 3243 6416 3819 6416 3818 6417 3242 6417 3243 6417 3818 6418 3239 6418 3242 6418 3242 6419 3239 6419 3295 6419 3241 6420 3295 6420 3240 6420 3241 6421 3242 6421 3295 6421 3241 6422 6325 6422 3242 6422 3242 6423 6325 6423 3243 6423 3243 6424 6325 6424 3244 6424 3411 6425 3244 6425 3412 6425 3409 6426 3412 6426 3245 6426 3406 6427 3245 6427 6324 6427 3407 6428 6324 6428 3246 6428 3405 6429 3246 6429 3247 6429 3404 6430 3247 6430 3249 6430 3248 6431 3249 6431 3250 6431 3401 6432 3250 6432 3251 6432 3398 6433 3251 6433 3399 6433 3252 6434 3399 6434 3254 6434 3253 6435 3254 6435 3255 6435 3396 6436 3255 6436 3256 6436 3393 6437 3256 6437 3394 6437 3257 6438 3394 6438 6243 6438 3392 6439 6243 6439 3258 6439 3259 6440 3258 6440 3391 6440 3260 6441 3391 6441 6244 6441 3261 6442 6244 6442 6245 6442 3389 6443 6245 6443 3390 6443 3262 6444 3390 6444 6247 6444 3263 6445 6247 6445 6246 6445 3386 6446 6246 6446 6248 6446 3385 6447 6248 6447 3265 6447 3264 6448 3265 6448 3383 6448 3381 6449 3383 6449 3382 6449 3380 6450 3382 6450 6250 6450 3379 6451 6250 6451 6249 6451 3377 6452 6249 6452 6251 6452 3376 6453 6251 6453 6253 6453 3266 6454 6253 6454 6252 6454 3372 6455 6252 6455 6254 6455 3371 6456 6254 6456 6255 6456 3370 6457 6255 6457 3369 6457 3368 6458 3369 6458 3367 6458 3366 6459 3367 6459 3364 6459 3365 6460 3364 6460 3363 6460 3267 6461 3363 6461 6257 6461 3268 6462 6257 6462 3361 6462 3360 6463 3361 6463 3269 6463 3359 6464 3269 6464 6258 6464 3358 6465 6258 6465 6259 6465 3356 6466 6259 6466 6260 6466 3355 6467 6260 6467 6261 6467 3354 6468 6261 6468 6262 6468 3352 6469 6262 6469 3271 6469 3270 6470 3271 6470 3349 6470 3350 6471 3349 6471 3272 6471 3347 6472 3272 6472 6263 6472 3273 6473 6263 6473 6264 6473 3345 6474 6264 6474 3274 6474 3344 6475 3274 6475 3342 6475 3340 6476 3342 6476 3275 6476 3339 6477 3275 6477 3338 6477 3336 6478 3338 6478 3276 6478 3335 6479 3276 6479 6266 6479 3277 6480 6266 6480 3334 6480 3332 6481 3334 6481 6267 6481 3278 6482 6267 6482 6268 6482 3331 6483 6268 6483 3279 6483 3330 6484 3279 6484 3280 6484 3281 6485 3280 6485 6269 6485 3326 6486 6269 6486 3327 6486 3282 6487 3327 6487 3283 6487 3323 6488 3283 6488 6270 6488 3322 6489 6270 6489 6271 6489 3321 6490 6271 6490 3284 6490 3319 6491 3284 6491 3285 6491 3318 6492 3285 6492 6333 6492 3316 6493 6333 6493 3286 6493 3314 6494 3286 6494 3288 6494 3287 6495 3288 6495 3313 6495 3311 6496 3313 6496 6332 6496 3289 6497 6332 6497 3310 6497 3309 6498 3310 6498 6331 6498 3307 6499 6331 6499 6330 6499 3306 6500 6330 6500 6329 6500 3290 6501 6329 6501 6328 6501 3291 6502 6328 6502 3292 6502 3303 6503 3292 6503 3293 6503 3302 6504 3293 6504 3301 6504 3299 6505 3301 6505 6327 6505 3294 6506 6327 6506 6326 6506 3298 6507 6326 6507 3240 6507 3295 6508 3298 6508 3240 6508 3295 6509 3296 6509 3298 6509 3295 6510 3239 6510 3296 6510 3296 6511 3297 6511 3298 6511 3298 6512 3297 6512 3294 6512 6326 6513 3298 6513 3294 6513 3297 6514 3300 6514 3294 6514 3294 6515 3300 6515 3299 6515 6327 6516 3294 6516 3299 6516 3300 6517 3868 6517 3299 6517 3299 6518 3868 6518 3302 6518 3301 6519 3299 6519 3302 6519 3868 6520 3867 6520 3302 6520 3302 6521 3867 6521 3303 6521 3293 6522 3302 6522 3303 6522 3867 6523 3866 6523 3303 6523 3303 6524 3866 6524 3291 6524 3292 6525 3303 6525 3291 6525 3866 6526 3304 6526 3291 6526 3291 6527 3304 6527 3290 6527 6328 6528 3291 6528 3290 6528 3304 6529 3305 6529 3290 6529 3290 6530 3305 6530 3306 6530 6329 6531 3290 6531 3306 6531 3305 6532 3308 6532 3306 6532 3306 6533 3308 6533 3307 6533 6330 6534 3306 6534 3307 6534 3308 6535 3865 6535 3307 6535 3307 6536 3865 6536 3309 6536 6331 6537 3307 6537 3309 6537 3865 6538 3864 6538 3309 6538 3309 6539 3864 6539 3289 6539 3310 6540 3309 6540 3289 6540 3864 6541 3863 6541 3289 6541 3289 6542 3863 6542 3311 6542 6332 6543 3289 6543 3311 6543 3863 6544 3312 6544 3311 6544 3311 6545 3312 6545 3287 6545 3313 6546 3311 6546 3287 6546 3312 6547 3315 6547 3287 6547 3287 6548 3315 6548 3314 6548 3288 6549 3287 6549 3314 6549 3315 6550 3862 6550 3314 6550 3314 6551 3862 6551 3316 6551 3286 6552 3314 6552 3316 6552 3862 6553 3317 6553 3316 6553 3316 6554 3317 6554 3318 6554 6333 6555 3316 6555 3318 6555 3317 6556 3320 6556 3318 6556 3318 6557 3320 6557 3319 6557 3285 6558 3318 6558 3319 6558 3320 6559 3860 6559 3319 6559 3319 6560 3860 6560 3321 6560 3284 6561 3319 6561 3321 6561 3860 6562 3861 6562 3321 6562 3321 6563 3861 6563 3322 6563 6271 6564 3321 6564 3322 6564 3861 6565 3324 6565 3322 6565 3322 6566 3324 6566 3323 6566 6270 6567 3322 6567 3323 6567 3324 6568 3325 6568 3323 6568 3323 6569 3325 6569 3282 6569 3283 6570 3323 6570 3282 6570 3325 6571 3859 6571 3282 6571 3282 6572 3859 6572 3326 6572 3327 6573 3282 6573 3326 6573 3859 6574 3328 6574 3326 6574 3326 6575 3328 6575 3281 6575 6269 6576 3326 6576 3281 6576 3328 6577 3329 6577 3281 6577 3281 6578 3329 6578 3330 6578 3280 6579 3281 6579 3330 6579 3329 6580 3856 6580 3330 6580 3330 6581 3856 6581 3331 6581 3279 6582 3330 6582 3331 6582 3856 6583 3855 6583 3331 6583 3331 6584 3855 6584 3278 6584 6268 6585 3331 6585 3278 6585 3855 6586 3333 6586 3278 6586 3278 6587 3333 6587 3332 6587 6267 6588 3278 6588 3332 6588 3333 6589 3854 6589 3332 6589 3332 6590 3854 6590 3277 6590 3334 6591 3332 6591 3277 6591 3854 6592 3853 6592 3277 6592 3277 6593 3853 6593 3335 6593 6266 6594 3277 6594 3335 6594 3853 6595 3852 6595 3335 6595 3335 6596 3852 6596 3336 6596 3276 6597 3335 6597 3336 6597 3852 6598 3337 6598 3336 6598 3336 6599 3337 6599 3339 6599 3338 6600 3336 6600 3339 6600 3337 6601 3851 6601 3339 6601 3339 6602 3851 6602 3340 6602 3275 6603 3339 6603 3340 6603 3851 6604 3341 6604 3340 6604 3340 6605 3341 6605 3344 6605 3342 6606 3340 6606 3344 6606 3341 6607 3343 6607 3344 6607 3344 6608 3343 6608 3345 6608 3274 6609 3344 6609 3345 6609 3343 6610 3850 6610 3345 6610 3345 6611 3850 6611 3273 6611 6264 6612 3345 6612 3273 6612 3850 6613 3346 6613 3273 6613 3273 6614 3346 6614 3347 6614 6263 6615 3273 6615 3347 6615 3346 6616 3348 6616 3347 6616 3347 6617 3348 6617 3350 6617 3272 6618 3347 6618 3350 6618 3348 6619 3849 6619 3350 6619 3350 6620 3849 6620 3270 6620 3349 6621 3350 6621 3270 6621 3849 6622 3351 6622 3270 6622 3270 6623 3351 6623 3352 6623 3271 6624 3270 6624 3352 6624 3351 6625 3353 6625 3352 6625 3352 6626 3353 6626 3354 6626 6262 6627 3352 6627 3354 6627 3353 6628 3848 6628 3354 6628 3354 6629 3848 6629 3355 6629 6261 6630 3354 6630 3355 6630 3848 6631 3847 6631 3355 6631 3355 6632 3847 6632 3356 6632 6260 6633 3355 6633 3356 6633 3847 6634 3357 6634 3356 6634 3356 6635 3357 6635 3358 6635 6259 6636 3356 6636 3358 6636 3357 6637 3846 6637 3358 6637 3358 6638 3846 6638 3359 6638 6258 6639 3358 6639 3359 6639 3846 6640 3845 6640 3359 6640 3359 6641 3845 6641 3360 6641 3269 6642 3359 6642 3360 6642 3845 6643 3844 6643 3360 6643 3360 6644 3844 6644 3268 6644 3361 6645 3360 6645 3268 6645 3844 6646 3843 6646 3268 6646 3268 6647 3843 6647 3267 6647 6257 6648 3268 6648 3267 6648 3843 6649 3362 6649 3267 6649 3267 6650 3362 6650 3365 6650 3363 6651 3267 6651 3365 6651 3362 6652 3840 6652 3365 6652 3365 6653 3840 6653 3366 6653 3364 6654 3365 6654 3366 6654 3840 6655 3842 6655 3366 6655 3366 6656 3842 6656 3368 6656 3367 6657 3366 6657 3368 6657 3842 6658 3839 6658 3368 6658 3368 6659 3839 6659 3370 6659 3369 6660 3368 6660 3370 6660 3839 6661 3838 6661 3370 6661 3370 6662 3838 6662 3371 6662 6255 6663 3370 6663 3371 6663 3838 6664 3837 6664 3371 6664 3371 6665 3837 6665 3372 6665 6254 6666 3371 6666 3372 6666 3837 6667 3373 6667 3372 6667 3372 6668 3373 6668 3266 6668 6252 6669 3372 6669 3266 6669 3373 6670 3374 6670 3266 6670 3266 6671 3374 6671 3376 6671 6253 6672 3266 6672 3376 6672 3374 6673 3375 6673 3376 6673 3376 6674 3375 6674 3377 6674 6251 6675 3376 6675 3377 6675 3375 6676 3378 6676 3377 6676 3377 6677 3378 6677 3379 6677 6249 6678 3377 6678 3379 6678 3378 6679 3835 6679 3379 6679 3379 6680 3835 6680 3380 6680 6250 6681 3379 6681 3380 6681 3835 6682 3836 6682 3380 6682 3380 6683 3836 6683 3381 6683 3382 6684 3380 6684 3381 6684 3836 6685 3834 6685 3381 6685 3381 6686 3834 6686 3264 6686 3383 6687 3381 6687 3264 6687 3834 6688 3384 6688 3264 6688 3264 6689 3384 6689 3385 6689 3265 6690 3264 6690 3385 6690 3384 6691 3833 6691 3385 6691 3385 6692 3833 6692 3386 6692 6248 6693 3385 6693 3386 6693 3833 6694 3387 6694 3386 6694 3386 6695 3387 6695 3263 6695 6246 6696 3386 6696 3263 6696 3387 6697 3388 6697 3263 6697 3263 6698 3388 6698 3262 6698 6247 6699 3263 6699 3262 6699 3388 6700 3832 6700 3262 6700 3262 6701 3832 6701 3389 6701 3390 6702 3262 6702 3389 6702 3832 6703 3831 6703 3389 6703 3389 6704 3831 6704 3261 6704 6245 6705 3389 6705 3261 6705 3831 6706 3829 6706 3261 6706 3261 6707 3829 6707 3260 6707 6244 6708 3261 6708 3260 6708 3829 6709 3830 6709 3260 6709 3260 6710 3830 6710 3259 6710 3391 6711 3260 6711 3259 6711 3830 6712 3828 6712 3259 6712 3259 6713 3828 6713 3392 6713 3258 6714 3259 6714 3392 6714 3828 6715 3827 6715 3392 6715 3392 6716 3827 6716 3257 6716 6243 6717 3392 6717 3257 6717 3827 6718 3395 6718 3257 6718 3257 6719 3395 6719 3393 6719 3394 6720 3257 6720 3393 6720 3395 6721 3826 6721 3393 6721 3393 6722 3826 6722 3396 6722 3256 6723 3393 6723 3396 6723 3826 6724 3397 6724 3396 6724 3396 6725 3397 6725 3253 6725 3255 6726 3396 6726 3253 6726 3397 6727 3825 6727 3253 6727 3253 6728 3825 6728 3252 6728 3254 6729 3253 6729 3252 6729 3825 6730 3400 6730 3252 6730 3252 6731 3400 6731 3398 6731 3399 6732 3252 6732 3398 6732 3400 6733 3824 6733 3398 6733 3398 6734 3824 6734 3401 6734 3251 6735 3398 6735 3401 6735 3824 6736 3823 6736 3401 6736 3401 6737 3823 6737 3248 6737 3250 6738 3401 6738 3248 6738 3823 6739 3402 6739 3248 6739 3248 6740 3402 6740 3404 6740 3249 6741 3248 6741 3404 6741 3402 6742 3403 6742 3404 6742 3404 6743 3403 6743 3405 6743 3247 6744 3404 6744 3405 6744 3403 6745 3821 6745 3405 6745 3405 6746 3821 6746 3407 6746 3246 6747 3405 6747 3407 6747 3821 6748 3820 6748 3407 6748 3407 6749 3820 6749 3406 6749 6324 6750 3407 6750 3406 6750 3820 6751 3408 6751 3406 6751 3406 6752 3408 6752 3409 6752 3245 6753 3406 6753 3409 6753 3408 6754 3410 6754 3409 6754 3409 6755 3410 6755 3411 6755 3412 6756 3409 6756 3411 6756 3410 6757 3819 6757 3411 6757 3411 6758 3819 6758 3243 6758 3244 6759 3411 6759 3243 6759 599 6760 3597 6760 3596 6760 599 6761 3416 6761 3597 6761 599 6762 3413 6762 3416 6762 3416 6763 3413 6763 3465 6763 3414 6764 3465 6764 3415 6764 3414 6765 3416 6765 3465 6765 3414 6766 3912 6766 3416 6766 3416 6767 3912 6767 3597 6767 3597 6768 3912 6768 3911 6768 3417 6769 3911 6769 3909 6769 3418 6770 3909 6770 3595 6770 3593 6771 3595 6771 3419 6771 3591 6772 3419 6772 3592 6772 3590 6773 3592 6773 3420 6773 3589 6774 3420 6774 3884 6774 3587 6775 3884 6775 3585 6775 3586 6776 3585 6776 3882 6776 3582 6777 3882 6777 3421 6777 3580 6778 3421 6778 3881 6778 3581 6779 3881 6779 3422 6779 3579 6780 3422 6780 3880 6780 3578 6781 3880 6781 3423 6781 3577 6782 3423 6782 3906 6782 3576 6783 3906 6783 3425 6783 3424 6784 3425 6784 3877 6784 3574 6785 3877 6785 3875 6785 3573 6786 3875 6786 3426 6786 3427 6787 3426 6787 3869 6787 3571 6788 3869 6788 3428 6788 3569 6789 3428 6789 3430 6789 3429 6790 3430 6790 3871 6790 3566 6791 3871 6791 3431 6791 3565 6792 3431 6792 3432 6792 3564 6793 3432 6793 3433 6793 3560 6794 3433 6794 3561 6794 3562 6795 3561 6795 3434 6795 3558 6796 3434 6796 3873 6796 3555 6797 3873 6797 3872 6797 3554 6798 3872 6798 3874 6798 3551 6799 3874 6799 3552 6799 3550 6800 3552 6800 3435 6800 3547 6801 3435 6801 3548 6801 3436 6802 3548 6802 3437 6802 3546 6803 3437 6803 3545 6803 3544 6804 3545 6804 3439 6804 3438 6805 3439 6805 3543 6805 3542 6806 3543 6806 3441 6806 3440 6807 3441 6807 3541 6807 3442 6808 3541 6808 3443 6808 3539 6809 3443 6809 3445 6809 3444 6810 3445 6810 3537 6810 3536 6811 3537 6811 3446 6811 3534 6812 3446 6812 3533 6812 3532 6813 3533 6813 3531 6813 3447 6814 3531 6814 3905 6814 3448 6815 3905 6815 3527 6815 3526 6816 3527 6816 3904 6816 3524 6817 3904 6817 3903 6817 3449 6818 3903 6818 3522 6818 3450 6819 3522 6819 3520 6819 3521 6820 3520 6820 3451 6820 3518 6821 3451 6821 3902 6821 3452 6822 3902 6822 3514 6822 3515 6823 3514 6823 3453 6823 3511 6824 3453 6824 3900 6824 3512 6825 3900 6825 3509 6825 3507 6826 3509 6826 3899 6826 3454 6827 3899 6827 3898 6827 3455 6828 3898 6828 3897 6828 3505 6829 3897 6829 3896 6829 3504 6830 3896 6830 3503 6830 3500 6831 3503 6831 3456 6831 3499 6832 3456 6832 3457 6832 3498 6833 3457 6833 3458 6833 3494 6834 3458 6834 3459 6834 3495 6835 3459 6835 3460 6835 3491 6836 3460 6836 3490 6836 3487 6837 3490 6837 3488 6837 3461 6838 3488 6838 3895 6838 3486 6839 3895 6839 3462 6839 3484 6840 3462 6840 3483 6840 3482 6841 3483 6841 3481 6841 3480 6842 3481 6842 3894 6842 3479 6843 3894 6843 3477 6843 3478 6844 3477 6844 3893 6844 3475 6845 3893 6845 3892 6845 3474 6846 3892 6846 3472 6846 3473 6847 3472 6847 3463 6847 3464 6848 3463 6848 3890 6848 3469 6849 3890 6849 3888 6849 3470 6850 3888 6850 3914 6850 3467 6851 3914 6851 3415 6851 3465 6852 3467 6852 3415 6852 3465 6853 3466 6853 3467 6853 3465 6854 3413 6854 3466 6854 3466 6855 602 6855 3467 6855 3467 6856 602 6856 3470 6856 3914 6857 3467 6857 3470 6857 602 6858 3468 6858 3470 6858 3470 6859 3468 6859 3469 6859 3888 6860 3470 6860 3469 6860 3468 6861 3471 6861 3469 6861 3469 6862 3471 6862 3464 6862 3890 6863 3469 6863 3464 6863 3471 6864 603 6864 3464 6864 3464 6865 603 6865 3473 6865 3463 6866 3464 6866 3473 6866 603 6867 690 6867 3473 6867 3473 6868 690 6868 3474 6868 3472 6869 3473 6869 3474 6869 690 6870 3476 6870 3474 6870 3474 6871 3476 6871 3475 6871 3892 6872 3474 6872 3475 6872 3476 6873 691 6873 3475 6873 3475 6874 691 6874 3478 6874 3893 6875 3475 6875 3478 6875 691 6876 606 6876 3478 6876 3478 6877 606 6877 3479 6877 3477 6878 3478 6878 3479 6878 606 6879 607 6879 3479 6879 3479 6880 607 6880 3480 6880 3894 6881 3479 6881 3480 6881 607 6882 609 6882 3480 6882 3480 6883 609 6883 3482 6883 3481 6884 3480 6884 3482 6884 609 6885 608 6885 3482 6885 3482 6886 608 6886 3484 6886 3483 6887 3482 6887 3484 6887 608 6888 3485 6888 3484 6888 3484 6889 3485 6889 3486 6889 3462 6890 3484 6890 3486 6890 3485 6891 611 6891 3486 6891 3486 6892 611 6892 3461 6892 3895 6893 3486 6893 3461 6893 611 6894 610 6894 3461 6894 3461 6895 610 6895 3487 6895 3488 6896 3461 6896 3487 6896 610 6897 3489 6897 3487 6897 3487 6898 3489 6898 3491 6898 3490 6899 3487 6899 3491 6899 3489 6900 3492 6900 3491 6900 3491 6901 3492 6901 3495 6901 3460 6902 3491 6902 3495 6902 3492 6903 3493 6903 3495 6903 3495 6904 3493 6904 3494 6904 3459 6905 3495 6905 3494 6905 3493 6906 3496 6906 3494 6906 3494 6907 3496 6907 3498 6907 3458 6908 3494 6908 3498 6908 3496 6909 3497 6909 3498 6909 3498 6910 3497 6910 3499 6910 3457 6911 3498 6911 3499 6911 3497 6912 3501 6912 3499 6912 3499 6913 3501 6913 3500 6913 3456 6914 3499 6914 3500 6914 3501 6915 3502 6915 3500 6915 3500 6916 3502 6916 3504 6916 3503 6917 3500 6917 3504 6917 3502 6918 613 6918 3504 6918 3504 6919 613 6919 3505 6919 3896 6920 3504 6920 3505 6920 613 6921 612 6921 3505 6921 3505 6922 612 6922 3455 6922 3897 6923 3505 6923 3455 6923 612 6924 3506 6924 3455 6924 3455 6925 3506 6925 3454 6925 3898 6926 3455 6926 3454 6926 3506 6927 614 6927 3454 6927 3454 6928 614 6928 3507 6928 3899 6929 3454 6929 3507 6929 614 6930 3508 6930 3507 6930 3507 6931 3508 6931 3512 6931 3509 6932 3507 6932 3512 6932 3508 6933 3510 6933 3512 6933 3512 6934 3510 6934 3511 6934 3900 6935 3512 6935 3511 6935 3510 6936 3513 6936 3511 6936 3511 6937 3513 6937 3515 6937 3453 6938 3511 6938 3515 6938 3513 6939 3516 6939 3515 6939 3515 6940 3516 6940 3452 6940 3514 6941 3515 6941 3452 6941 3516 6942 3517 6942 3452 6942 3452 6943 3517 6943 3518 6943 3902 6944 3452 6944 3518 6944 3517 6945 3519 6945 3518 6945 3518 6946 3519 6946 3521 6946 3451 6947 3518 6947 3521 6947 3519 6948 615 6948 3521 6948 3521 6949 615 6949 3450 6949 3520 6950 3521 6950 3450 6950 615 6951 616 6951 3450 6951 3450 6952 616 6952 3449 6952 3522 6953 3450 6953 3449 6953 616 6954 3523 6954 3449 6954 3449 6955 3523 6955 3524 6955 3903 6956 3449 6956 3524 6956 3523 6957 3525 6957 3524 6957 3524 6958 3525 6958 3526 6958 3904 6959 3524 6959 3526 6959 3525 6960 3528 6960 3526 6960 3526 6961 3528 6961 3448 6961 3527 6962 3526 6962 3448 6962 3528 6963 3529 6963 3448 6963 3448 6964 3529 6964 3447 6964 3905 6965 3448 6965 3447 6965 3529 6966 3530 6966 3447 6966 3447 6967 3530 6967 3532 6967 3531 6968 3447 6968 3532 6968 3530 6969 575 6969 3532 6969 3532 6970 575 6970 3534 6970 3533 6971 3532 6971 3534 6971 575 6972 3535 6972 3534 6972 3534 6973 3535 6973 3536 6973 3446 6974 3534 6974 3536 6974 3535 6975 577 6975 3536 6975 3536 6976 577 6976 3444 6976 3537 6977 3536 6977 3444 6977 577 6978 3538 6978 3444 6978 3444 6979 3538 6979 3539 6979 3445 6980 3444 6980 3539 6980 3538 6981 3540 6981 3539 6981 3539 6982 3540 6982 3442 6982 3443 6983 3539 6983 3442 6983 3540 6984 578 6984 3442 6984 3442 6985 578 6985 3440 6985 3541 6986 3442 6986 3440 6986 578 6987 579 6987 3440 6987 3440 6988 579 6988 3542 6988 3441 6989 3440 6989 3542 6989 579 6990 580 6990 3542 6990 3542 6991 580 6991 3438 6991 3543 6992 3542 6992 3438 6992 580 6993 582 6993 3438 6993 3438 6994 582 6994 3544 6994 3439 6995 3438 6995 3544 6995 582 6996 581 6996 3544 6996 3544 6997 581 6997 3546 6997 3545 6998 3544 6998 3546 6998 581 6999 584 6999 3546 6999 3546 7000 584 7000 3436 7000 3437 7001 3546 7001 3436 7001 584 7002 583 7002 3436 7002 3436 7003 583 7003 3547 7003 3548 7004 3436 7004 3547 7004 583 7005 3549 7005 3547 7005 3547 7006 3549 7006 3550 7006 3435 7007 3547 7007 3550 7007 3549 7008 585 7008 3550 7008 3550 7009 585 7009 3551 7009 3552 7010 3550 7010 3551 7010 585 7011 3553 7011 3551 7011 3551 7012 3553 7012 3554 7012 3874 7013 3551 7013 3554 7013 3553 7014 3556 7014 3554 7014 3554 7015 3556 7015 3555 7015 3872 7016 3554 7016 3555 7016 3556 7017 586 7017 3555 7017 3555 7018 586 7018 3558 7018 3873 7019 3555 7019 3558 7019 586 7020 3557 7020 3558 7020 3558 7021 3557 7021 3562 7021 3434 7022 3558 7022 3562 7022 3557 7023 3559 7023 3562 7023 3562 7024 3559 7024 3560 7024 3561 7025 3562 7025 3560 7025 3559 7026 3563 7026 3560 7026 3560 7027 3563 7027 3564 7027 3433 7028 3560 7028 3564 7028 3563 7029 587 7029 3564 7029 3564 7030 587 7030 3565 7030 3432 7031 3564 7031 3565 7031 587 7032 3567 7032 3565 7032 3565 7033 3567 7033 3566 7033 3431 7034 3565 7034 3566 7034 3567 7035 588 7035 3566 7035 3566 7036 588 7036 3429 7036 3871 7037 3566 7037 3429 7037 588 7038 3568 7038 3429 7038 3429 7039 3568 7039 3569 7039 3430 7040 3429 7040 3569 7040 3568 7041 3570 7041 3569 7041 3569 7042 3570 7042 3571 7042 3428 7043 3569 7043 3571 7043 3570 7044 3572 7044 3571 7044 3571 7045 3572 7045 3427 7045 3869 7046 3571 7046 3427 7046 3572 7047 589 7047 3427 7047 3427 7048 589 7048 3573 7048 3426 7049 3427 7049 3573 7049 589 7050 590 7050 3573 7050 3573 7051 590 7051 3574 7051 3875 7052 3573 7052 3574 7052 590 7053 591 7053 3574 7053 3574 7054 591 7054 3424 7054 3877 7055 3574 7055 3424 7055 591 7056 3575 7056 3424 7056 3424 7057 3575 7057 3576 7057 3425 7058 3424 7058 3576 7058 3575 7059 592 7059 3576 7059 3576 7060 592 7060 3577 7060 3906 7061 3576 7061 3577 7061 592 7062 593 7062 3577 7062 3577 7063 593 7063 3578 7063 3423 7064 3577 7064 3578 7064 593 7065 594 7065 3578 7065 3578 7066 594 7066 3579 7066 3880 7067 3578 7067 3579 7067 594 7068 595 7068 3579 7068 3579 7069 595 7069 3581 7069 3422 7070 3579 7070 3581 7070 595 7071 682 7071 3581 7071 3581 7072 682 7072 3580 7072 3881 7073 3581 7073 3580 7073 682 7074 3583 7074 3580 7074 3580 7075 3583 7075 3582 7075 3421 7076 3580 7076 3582 7076 3583 7077 3584 7077 3582 7077 3582 7078 3584 7078 3586 7078 3882 7079 3582 7079 3586 7079 3584 7080 683 7080 3586 7080 3586 7081 683 7081 3587 7081 3585 7082 3586 7082 3587 7082 683 7083 3588 7083 3587 7083 3587 7084 3588 7084 3589 7084 3884 7085 3587 7085 3589 7085 3588 7086 684 7086 3589 7086 3589 7087 684 7087 3590 7087 3420 7088 3589 7088 3590 7088 684 7089 685 7089 3590 7089 3590 7090 685 7090 3591 7090 3592 7091 3590 7091 3591 7091 685 7092 3594 7092 3591 7092 3591 7093 3594 7093 3593 7093 3419 7094 3591 7094 3593 7094 3594 7095 686 7095 3593 7095 3593 7096 686 7096 3418 7096 3595 7097 3593 7097 3418 7097 686 7098 687 7098 3418 7098 3418 7099 687 7099 3417 7099 3909 7100 3418 7100 3417 7100 687 7101 3596 7101 3417 7101 3417 7102 3596 7102 3597 7102 3911 7103 3417 7103 3597 7103 4041 7104 3602 7104 3994 7104 4041 7105 3598 7105 3602 7105 4041 7106 3599 7106 3598 7106 3598 7107 3599 7107 3600 7107 3601 7108 3600 7108 6294 7108 3601 7109 3598 7109 3600 7109 3601 7110 6296 7110 3598 7110 3598 7111 6296 7111 3602 7111 3602 7112 6296 7112 6297 7112 3774 7113 6297 7113 3603 7113 3773 7114 3603 7114 6298 7114 3771 7115 6298 7115 3605 7115 3604 7116 3605 7116 3606 7116 3769 7117 3606 7117 3768 7117 3767 7118 3768 7118 6206 7118 3766 7119 6206 7119 3607 7119 3764 7120 3607 7120 6205 7120 3762 7121 6205 7121 3608 7121 3761 7122 3608 7122 6203 7122 3759 7123 6203 7123 3757 7123 3609 7124 3757 7124 3756 7124 3753 7125 3756 7125 3610 7125 3754 7126 3610 7126 6202 7126 3751 7127 6202 7127 6201 7127 3750 7128 6201 7128 6200 7128 3749 7129 6200 7129 3611 7129 3748 7130 3611 7130 3612 7130 3747 7131 3612 7131 3613 7131 3745 7132 3613 7132 3744 7132 3743 7133 3744 7133 3741 7133 3739 7134 3741 7134 6199 7134 3740 7135 6199 7135 6198 7135 3736 7136 6198 7136 3614 7136 3735 7137 3614 7137 3615 7137 3733 7138 3615 7138 3616 7138 3731 7139 3616 7139 6197 7139 3730 7140 6197 7140 3618 7140 3617 7141 3618 7141 6196 7141 3728 7142 6196 7142 6195 7142 3727 7143 6195 7143 6194 7143 3725 7144 6194 7144 3724 7144 3722 7145 3724 7145 3619 7145 3720 7146 3619 7146 3718 7146 3719 7147 3718 7147 3620 7147 3621 7148 3620 7148 3622 7148 3716 7149 3622 7149 3623 7149 3715 7150 3623 7150 3624 7150 3714 7151 3624 7151 6299 7151 3712 7152 6299 7152 6274 7152 3708 7153 6274 7153 3709 7153 3707 7154 3709 7154 3706 7154 3704 7155 3706 7155 3703 7155 3702 7156 3703 7156 3625 7156 3699 7157 3625 7157 3627 7157 3626 7158 3627 7158 6301 7158 3698 7159 6301 7159 3628 7159 3629 7160 3628 7160 6303 7160 3695 7161 6303 7161 3631 7161 3630 7162 3631 7162 6304 7162 3632 7163 6304 7163 6305 7163 3691 7164 6305 7164 6306 7164 3689 7165 6306 7165 3633 7165 3688 7166 3633 7166 6307 7166 3685 7167 6307 7167 6280 7167 3686 7168 6280 7168 6309 7168 3684 7169 6309 7169 6310 7169 3683 7170 6310 7170 3682 7170 3680 7171 3682 7171 6281 7171 3679 7172 6281 7172 3678 7172 3677 7173 3678 7173 6284 7173 3634 7174 6284 7174 6283 7174 3674 7175 6283 7175 6285 7175 3673 7176 6285 7176 3671 7176 3670 7177 3671 7177 6286 7177 3669 7178 6286 7178 3635 7178 3667 7179 3635 7179 6288 7179 3636 7180 6288 7180 6287 7180 3663 7181 6287 7181 3637 7181 3662 7182 3637 7182 3661 7182 3658 7183 3661 7183 3638 7183 3659 7184 3638 7184 3639 7184 3657 7185 3639 7185 6289 7185 3656 7186 6289 7186 3640 7186 3641 7187 3640 7187 3642 7187 3654 7188 3642 7188 6290 7188 3643 7189 6290 7189 3652 7189 3651 7190 3652 7190 6291 7190 3650 7191 6291 7191 6292 7191 3644 7192 6292 7192 6293 7192 3645 7193 6293 7193 3646 7193 3648 7194 3646 7194 6295 7194 3647 7195 6295 7195 6294 7195 3600 7196 3647 7196 6294 7196 3600 7197 4039 7197 3647 7197 3600 7198 3599 7198 4039 7198 4039 7199 4040 7199 3647 7199 3647 7200 4040 7200 3648 7200 6295 7201 3647 7201 3648 7201 4040 7202 4038 7202 3648 7202 3648 7203 4038 7203 3645 7203 3646 7204 3648 7204 3645 7204 4038 7205 4037 7205 3645 7205 3645 7206 4037 7206 3644 7206 6293 7207 3645 7207 3644 7207 4037 7208 3649 7208 3644 7208 3644 7209 3649 7209 3650 7209 6292 7210 3644 7210 3650 7210 3649 7211 4036 7211 3650 7211 3650 7212 4036 7212 3651 7212 6291 7213 3650 7213 3651 7213 4036 7214 3653 7214 3651 7214 3651 7215 3653 7215 3643 7215 3652 7216 3651 7216 3643 7216 3653 7217 4035 7217 3643 7217 3643 7218 4035 7218 3654 7218 6290 7219 3643 7219 3654 7219 4035 7220 4034 7220 3654 7220 3654 7221 4034 7221 3641 7221 3642 7222 3654 7222 3641 7222 4034 7223 3655 7223 3641 7223 3641 7224 3655 7224 3656 7224 3640 7225 3641 7225 3656 7225 3655 7226 4033 7226 3656 7226 3656 7227 4033 7227 3657 7227 6289 7228 3656 7228 3657 7228 4033 7229 4032 7229 3657 7229 3657 7230 4032 7230 3659 7230 3639 7231 3657 7231 3659 7231 4032 7232 4031 7232 3659 7232 3659 7233 4031 7233 3658 7233 3638 7234 3659 7234 3658 7234 4031 7235 3660 7235 3658 7235 3658 7236 3660 7236 3662 7236 3661 7237 3658 7237 3662 7237 3660 7238 3664 7238 3662 7238 3662 7239 3664 7239 3663 7239 3637 7240 3662 7240 3663 7240 3664 7241 3665 7241 3663 7241 3663 7242 3665 7242 3636 7242 6287 7243 3663 7243 3636 7243 3665 7244 3666 7244 3636 7244 3636 7245 3666 7245 3667 7245 6288 7246 3636 7246 3667 7246 3666 7247 4030 7247 3667 7247 3667 7248 4030 7248 3669 7248 3635 7249 3667 7249 3669 7249 4030 7250 3668 7250 3669 7250 3669 7251 3668 7251 3670 7251 6286 7252 3669 7252 3670 7252 3668 7253 4029 7253 3670 7253 3670 7254 4029 7254 3673 7254 3671 7255 3670 7255 3673 7255 4029 7256 3672 7256 3673 7256 3673 7257 3672 7257 3674 7257 6285 7258 3673 7258 3674 7258 3672 7259 3675 7259 3674 7259 3674 7260 3675 7260 3634 7260 6283 7261 3674 7261 3634 7261 3675 7262 3676 7262 3634 7262 3634 7263 3676 7263 3677 7263 6284 7264 3634 7264 3677 7264 3676 7265 4028 7265 3677 7265 3677 7266 4028 7266 3679 7266 3678 7267 3677 7267 3679 7267 4028 7268 3681 7268 3679 7268 3679 7269 3681 7269 3680 7269 6281 7270 3679 7270 3680 7270 3681 7271 3961 7271 3680 7271 3680 7272 3961 7272 3683 7272 3682 7273 3680 7273 3683 7273 3961 7274 3962 7274 3683 7274 3683 7275 3962 7275 3684 7275 6310 7276 3683 7276 3684 7276 3962 7277 3963 7277 3684 7277 3684 7278 3963 7278 3686 7278 6309 7279 3684 7279 3686 7279 3963 7280 3965 7280 3686 7280 3686 7281 3965 7281 3685 7281 6280 7282 3686 7282 3685 7282 3965 7283 3996 7283 3685 7283 3685 7284 3996 7284 3688 7284 6307 7285 3685 7285 3688 7285 3996 7286 3687 7286 3688 7286 3688 7287 3687 7287 3689 7287 3633 7288 3688 7288 3689 7288 3687 7289 3690 7289 3689 7289 3689 7290 3690 7290 3691 7290 6306 7291 3689 7291 3691 7291 3690 7292 3692 7292 3691 7292 3691 7293 3692 7293 3632 7293 6305 7294 3691 7294 3632 7294 3692 7295 3693 7295 3632 7295 3632 7296 3693 7296 3630 7296 6304 7297 3632 7297 3630 7297 3693 7298 3694 7298 3630 7298 3630 7299 3694 7299 3695 7299 3631 7300 3630 7300 3695 7300 3694 7301 3696 7301 3695 7301 3695 7302 3696 7302 3629 7302 6303 7303 3695 7303 3629 7303 3696 7304 3968 7304 3629 7304 3629 7305 3968 7305 3698 7305 3628 7306 3629 7306 3698 7306 3968 7307 3697 7307 3698 7307 3698 7308 3697 7308 3626 7308 6301 7309 3698 7309 3626 7309 3697 7310 4002 7310 3626 7310 3626 7311 4002 7311 3699 7311 3627 7312 3626 7312 3699 7312 4002 7313 3700 7313 3699 7313 3699 7314 3700 7314 3702 7314 3625 7315 3699 7315 3702 7315 3700 7316 3701 7316 3702 7316 3702 7317 3701 7317 3704 7317 3703 7318 3702 7318 3704 7318 3701 7319 3705 7319 3704 7319 3704 7320 3705 7320 3707 7320 3706 7321 3704 7321 3707 7321 3705 7322 3710 7322 3707 7322 3707 7323 3710 7323 3708 7323 3709 7324 3707 7324 3708 7324 3710 7325 3711 7325 3708 7325 3708 7326 3711 7326 3712 7326 6274 7327 3708 7327 3712 7327 3711 7328 3713 7328 3712 7328 3712 7329 3713 7329 3714 7329 6299 7330 3712 7330 3714 7330 3713 7331 3973 7331 3714 7331 3714 7332 3973 7332 3715 7332 3624 7333 3714 7333 3715 7333 3973 7334 4004 7334 3715 7334 3715 7335 4004 7335 3716 7335 3623 7336 3715 7336 3716 7336 4004 7337 4005 7337 3716 7337 3716 7338 4005 7338 3621 7338 3622 7339 3716 7339 3621 7339 4005 7340 3717 7340 3621 7340 3621 7341 3717 7341 3719 7341 3620 7342 3621 7342 3719 7342 3717 7343 3975 7343 3719 7343 3719 7344 3975 7344 3720 7344 3718 7345 3719 7345 3720 7345 3975 7346 3721 7346 3720 7346 3720 7347 3721 7347 3722 7347 3619 7348 3720 7348 3722 7348 3721 7349 3723 7349 3722 7349 3722 7350 3723 7350 3725 7350 3724 7351 3722 7351 3725 7351 3723 7352 3726 7352 3725 7352 3725 7353 3726 7353 3727 7353 6194 7354 3725 7354 3727 7354 3726 7355 4009 7355 3727 7355 3727 7356 4009 7356 3728 7356 6195 7357 3727 7357 3728 7357 4009 7358 3729 7358 3728 7358 3728 7359 3729 7359 3617 7359 6196 7360 3728 7360 3617 7360 3729 7361 3979 7361 3617 7361 3617 7362 3979 7362 3730 7362 3618 7363 3617 7363 3730 7363 3979 7364 3981 7364 3730 7364 3730 7365 3981 7365 3731 7365 6197 7366 3730 7366 3731 7366 3981 7367 3732 7367 3731 7367 3731 7368 3732 7368 3733 7368 3616 7369 3731 7369 3733 7369 3732 7370 3982 7370 3733 7370 3733 7371 3982 7371 3735 7371 3615 7372 3733 7372 3735 7372 3982 7373 3734 7373 3735 7373 3735 7374 3734 7374 3736 7374 3614 7375 3735 7375 3736 7375 3734 7376 3737 7376 3736 7376 3736 7377 3737 7377 3740 7377 6198 7378 3736 7378 3740 7378 3737 7379 3738 7379 3740 7379 3740 7380 3738 7380 3739 7380 6199 7381 3740 7381 3739 7381 3738 7382 3983 7382 3739 7382 3739 7383 3983 7383 3743 7383 3741 7384 3739 7384 3743 7384 3983 7385 3742 7385 3743 7385 3743 7386 3742 7386 3745 7386 3744 7387 3743 7387 3745 7387 3742 7388 3746 7388 3745 7388 3745 7389 3746 7389 3747 7389 3613 7390 3745 7390 3747 7390 3746 7391 3984 7391 3747 7391 3747 7392 3984 7392 3748 7392 3612 7393 3747 7393 3748 7393 3984 7394 3986 7394 3748 7394 3748 7395 3986 7395 3749 7395 3611 7396 3748 7396 3749 7396 3986 7397 3985 7397 3749 7397 3749 7398 3985 7398 3750 7398 6200 7399 3749 7399 3750 7399 3985 7400 3987 7400 3750 7400 3750 7401 3987 7401 3751 7401 6201 7402 3750 7402 3751 7402 3987 7403 3752 7403 3751 7403 3751 7404 3752 7404 3754 7404 6202 7405 3751 7405 3754 7405 3752 7406 3988 7406 3754 7406 3754 7407 3988 7407 3753 7407 3610 7408 3754 7408 3753 7408 3988 7409 3755 7409 3753 7409 3753 7410 3755 7410 3609 7410 3756 7411 3753 7411 3609 7411 3755 7412 3758 7412 3609 7412 3609 7413 3758 7413 3759 7413 3757 7414 3609 7414 3759 7414 3758 7415 3760 7415 3759 7415 3759 7416 3760 7416 3761 7416 6203 7417 3759 7417 3761 7417 3760 7418 3989 7418 3761 7418 3761 7419 3989 7419 3762 7419 3608 7420 3761 7420 3762 7420 3989 7421 3763 7421 3762 7421 3762 7422 3763 7422 3764 7422 6205 7423 3762 7423 3764 7423 3763 7424 3765 7424 3764 7424 3764 7425 3765 7425 3766 7425 3607 7426 3764 7426 3766 7426 3765 7427 3990 7427 3766 7427 3766 7428 3990 7428 3767 7428 6206 7429 3766 7429 3767 7429 3990 7430 3770 7430 3767 7430 3767 7431 3770 7431 3769 7431 3768 7432 3767 7432 3769 7432 3770 7433 3991 7433 3769 7433 3769 7434 3991 7434 3604 7434 3606 7435 3769 7435 3604 7435 3991 7436 3993 7436 3604 7436 3604 7437 3993 7437 3771 7437 3605 7438 3604 7438 3771 7438 3993 7439 3772 7439 3771 7439 3771 7440 3772 7440 3773 7440 6298 7441 3771 7441 3773 7441 3772 7442 3995 7442 3773 7442 3773 7443 3995 7443 3774 7443 3603 7444 3773 7444 3774 7444 3995 7445 3994 7445 3774 7445 3774 7446 3994 7446 3602 7446 6297 7447 3774 7447 3602 7447 3238 7448 3944 7448 3775 7448 3775 7449 3944 7449 3776 7449 3776 7450 3944 7450 3101 7450 3101 7451 3944 7451 3777 7451 3777 7452 3944 7452 3102 7452 3102 7453 3944 7453 3778 7453 3778 7454 3944 7454 3105 7454 3105 7455 3944 7455 3108 7455 3108 7456 3944 7456 3779 7456 3779 7457 3944 7457 3112 7457 3112 7458 3944 7458 3780 7458 3780 7459 3944 7459 3781 7459 3781 7460 3944 7460 3116 7460 3116 7461 3944 7461 3117 7461 3117 7462 3944 7462 3120 7462 3120 7463 3944 7463 3123 7463 3123 7464 3944 7464 3125 7464 3125 7465 3944 7465 3782 7465 3782 7466 3944 7466 3127 7466 3127 7467 3944 7467 3129 7467 3129 7468 3944 7468 3131 7468 3131 7469 3944 7469 3132 7469 3132 7470 3944 7470 3783 7470 3783 7471 3944 7471 3135 7471 3135 7472 3944 7472 3784 7472 3784 7473 3944 7473 3785 7473 3785 7474 3944 7474 3140 7474 3140 7475 3944 7475 3142 7475 3142 7476 3944 7476 3144 7476 3144 7477 3944 7477 3786 7477 3786 7478 3944 7478 3787 7478 3787 7479 3944 7479 3148 7479 3148 7480 3944 7480 3151 7480 3151 7481 3944 7481 3788 7481 3788 7482 3944 7482 3153 7482 3153 7483 3944 7483 3789 7483 3789 7484 3944 7484 3791 7484 3791 7485 3944 7485 3792 7485 3158 7486 3792 7486 3790 7486 3158 7487 3791 7487 3792 7487 3794 7488 3793 7488 3792 7488 3794 7489 3185 7489 3793 7489 3794 7490 3795 7490 3185 7490 3794 7491 3188 7491 3795 7491 3794 7492 3189 7492 3188 7492 3794 7493 3192 7493 3189 7493 3794 7494 3195 7494 3192 7494 3794 7495 3796 7495 3195 7495 3794 7496 3197 7496 3796 7496 3794 7497 3198 7497 3197 7497 3794 7498 3797 7498 3198 7498 3794 7499 3205 7499 3797 7499 3794 7500 3798 7500 3205 7500 3794 7501 3208 7501 3798 7501 3794 7502 3210 7502 3208 7502 3794 7503 3799 7503 3210 7503 3794 7504 3212 7504 3799 7504 3794 7505 3215 7505 3212 7505 3794 7506 3216 7506 3215 7506 3794 7507 3800 7507 3216 7507 3794 7508 3801 7508 3800 7508 3794 7509 3221 7509 3801 7509 3794 7510 3802 7510 3221 7510 3794 7511 3804 7511 3802 7511 3794 7512 3803 7512 3804 7512 3794 7513 3805 7513 3803 7513 3794 7514 3807 7514 3805 7514 3794 7515 3806 7515 3807 7515 3794 7516 3230 7516 3806 7516 3794 7517 3231 7517 3230 7517 3794 7518 3234 7518 3231 7518 3794 7519 3808 7519 3234 7519 3794 7520 3238 7520 3808 7520 3793 7521 3183 7521 3792 7521 3792 7522 3183 7522 3179 7522 3809 7523 3792 7523 3179 7523 3809 7524 3177 7524 3792 7524 3792 7525 3177 7525 3174 7525 3810 7526 3792 7526 3174 7526 3810 7527 3811 7527 3792 7527 3792 7528 3811 7528 3170 7528 3812 7529 3792 7529 3170 7529 3812 7530 3166 7530 3792 7530 3792 7531 3166 7531 3813 7531 3814 7532 3792 7532 3813 7532 3814 7533 3815 7533 3792 7533 3792 7534 3815 7534 3816 7534 3790 7535 3792 7535 3816 7535 3841 7536 6256 7536 3817 7536 3817 7537 6256 7537 4022 7537 6553 7538 4022 7538 6480 7538 6553 7539 3817 7539 4022 7539 6553 7540 809 7540 3817 7540 6256 7541 6187 7541 4022 7541 4022 7542 3960 7542 6480 7542 6480 7543 3960 7543 6481 7543 3818 7544 3819 7544 3858 7544 3239 7545 3858 7545 3296 7545 3239 7546 3818 7546 3858 7546 3819 7547 3410 7547 3858 7547 3858 7548 3410 7548 3408 7548 3822 7549 3408 7549 3820 7549 3821 7550 3822 7550 3820 7550 3821 7551 3403 7551 3822 7551 3822 7552 3403 7552 3402 7552 3841 7553 3402 7553 3823 7553 3824 7554 3841 7554 3823 7554 3824 7555 3400 7555 3841 7555 3841 7556 3400 7556 3825 7556 3397 7557 3841 7557 3825 7557 3397 7558 3826 7558 3841 7558 3841 7559 3826 7559 3395 7559 3827 7560 3841 7560 3395 7560 3827 7561 3828 7561 3841 7561 3841 7562 3828 7562 3830 7562 3829 7563 3841 7563 3830 7563 3829 7564 3831 7564 3841 7564 3841 7565 3831 7565 3832 7565 3388 7566 3841 7566 3832 7566 3388 7567 3387 7567 3841 7567 3841 7568 3387 7568 3833 7568 3384 7569 3841 7569 3833 7569 3384 7570 3834 7570 3841 7570 3841 7571 3834 7571 3836 7571 3835 7572 3841 7572 3836 7572 3835 7573 3378 7573 3841 7573 3841 7574 3378 7574 3375 7574 3374 7575 3841 7575 3375 7575 3374 7576 3373 7576 3841 7576 3841 7577 3373 7577 3837 7577 3838 7578 3841 7578 3837 7578 3838 7579 3839 7579 3841 7579 3841 7580 3839 7580 3842 7580 3840 7581 3841 7581 3842 7581 3840 7582 3362 7582 3841 7582 3841 7583 3362 7583 3843 7583 3857 7584 3843 7584 3844 7584 3845 7585 3857 7585 3844 7585 3845 7586 3846 7586 3857 7586 3857 7587 3846 7587 3357 7587 3847 7588 3857 7588 3357 7588 3847 7589 3848 7589 3857 7589 3857 7590 3848 7590 3353 7590 3351 7591 3857 7591 3353 7591 3351 7592 3849 7592 3857 7592 3857 7593 3849 7593 3348 7593 3346 7594 3857 7594 3348 7594 3346 7595 3850 7595 3857 7595 3857 7596 3850 7596 3343 7596 3341 7597 3857 7597 3343 7597 3341 7598 3851 7598 3857 7598 3857 7599 3851 7599 3337 7599 3852 7600 3857 7600 3337 7600 3852 7601 3853 7601 3857 7601 3857 7602 3853 7602 3854 7602 3333 7603 3857 7603 3854 7603 3333 7604 3855 7604 3857 7604 3857 7605 3855 7605 3856 7605 3329 7606 3857 7606 3856 7606 3329 7607 3858 7607 3857 7607 3329 7608 3328 7608 3858 7608 3858 7609 3328 7609 3859 7609 3325 7610 3858 7610 3859 7610 3325 7611 3324 7611 3858 7611 3858 7612 3324 7612 3861 7612 3860 7613 3858 7613 3861 7613 3860 7614 3320 7614 3858 7614 3858 7615 3320 7615 3317 7615 3862 7616 3858 7616 3317 7616 3862 7617 3315 7617 3858 7617 3858 7618 3315 7618 3312 7618 3863 7619 3858 7619 3312 7619 3863 7620 3864 7620 3858 7620 3858 7621 3864 7621 3865 7621 3308 7622 3858 7622 3865 7622 3308 7623 3305 7623 3858 7623 3858 7624 3305 7624 3304 7624 3866 7625 3858 7625 3304 7625 3866 7626 3867 7626 3858 7626 3858 7627 3867 7627 3868 7627 3300 7628 3858 7628 3868 7628 3300 7629 3297 7629 3858 7629 3858 7630 3297 7630 3296 7630 3858 7631 3408 7631 3822 7631 3822 7632 3402 7632 3841 7632 3817 7633 3822 7633 3841 7633 3841 7634 3843 7634 3857 7634 3870 7635 2783 7635 6486 7635 3870 7636 3875 7636 2783 7636 3870 7637 3426 7637 3875 7637 3870 7638 3869 7638 3426 7638 3870 7639 3428 7639 3869 7639 3870 7640 3430 7640 3428 7640 3870 7641 3871 7641 3430 7641 3870 7642 3431 7642 3871 7642 3870 7643 3432 7643 3431 7643 3870 7644 3433 7644 3432 7644 3870 7645 3561 7645 3433 7645 3870 7646 3434 7646 3561 7646 3870 7647 3873 7647 3434 7647 3870 7648 3872 7648 3873 7648 3870 7649 3874 7649 3872 7649 3870 7650 3552 7650 3874 7650 3870 7651 3435 7651 3552 7651 3870 7652 3548 7652 3435 7652 3870 7653 3437 7653 3548 7653 3870 7654 3545 7654 3437 7654 3870 7655 3439 7655 3545 7655 3870 7656 3543 7656 3439 7656 3870 7657 3441 7657 3543 7657 3870 7658 3541 7658 3441 7658 3870 7659 3443 7659 3541 7659 3870 7660 3445 7660 3443 7660 3870 7661 3537 7661 3445 7661 3870 7662 3446 7662 3537 7662 3870 7663 3533 7663 3446 7663 2783 7664 3875 7664 3876 7664 3876 7665 3875 7665 3877 7665 3878 7666 3877 7666 3425 7666 2789 7667 3425 7667 3906 7667 2790 7668 3906 7668 3423 7668 2792 7669 3423 7669 3880 7669 3879 7670 3880 7670 3422 7670 2794 7671 3422 7671 3881 7671 2797 7672 3881 7672 3421 7672 2800 7673 3421 7673 3882 7673 3883 7674 3882 7674 3585 7674 2803 7675 3585 7675 3884 7675 3907 7676 3884 7676 3420 7676 3885 7677 3420 7677 3592 7677 3886 7678 3592 7678 3419 7678 3887 7679 3419 7679 3595 7679 3908 7680 3595 7680 3909 7680 3910 7681 3909 7681 3911 7681 2812 7682 3911 7682 3912 7682 2815 7683 3912 7683 3414 7683 3913 7684 3414 7684 3415 7684 2818 7685 3415 7685 3914 7685 2819 7686 3914 7686 3888 7686 3915 7687 3888 7687 3890 7687 3889 7688 3890 7688 3463 7688 2823 7689 3463 7689 3472 7689 3891 7690 3472 7690 3892 7690 2827 7691 3892 7691 3893 7691 2828 7692 3893 7692 3477 7692 2832 7693 3477 7693 3894 7693 3916 7694 3894 7694 3481 7694 3917 7695 3481 7695 3483 7695 3901 7696 3483 7696 3462 7696 3895 7697 3901 7697 3462 7697 3895 7698 3488 7698 3901 7698 3901 7699 3488 7699 3490 7699 3460 7700 3901 7700 3490 7700 3460 7701 3459 7701 3901 7701 3901 7702 3459 7702 3458 7702 3457 7703 3901 7703 3458 7703 3457 7704 3456 7704 3901 7704 3901 7705 3456 7705 3503 7705 3896 7706 3901 7706 3503 7706 3896 7707 3897 7707 3901 7707 3901 7708 3897 7708 3898 7708 3899 7709 3901 7709 3898 7709 3899 7710 3509 7710 3901 7710 3901 7711 3509 7711 3900 7711 3453 7712 3901 7712 3900 7712 3453 7713 3514 7713 3901 7713 3901 7714 3514 7714 3902 7714 3451 7715 3901 7715 3902 7715 3451 7716 3520 7716 3901 7716 3901 7717 3520 7717 3522 7717 3903 7718 3901 7718 3522 7718 3903 7719 3904 7719 3901 7719 3901 7720 3904 7720 3527 7720 3905 7721 3901 7721 3527 7721 3905 7722 3531 7722 3901 7722 3901 7723 3531 7723 3533 7723 3876 7724 3877 7724 3878 7724 3878 7725 3425 7725 2789 7725 2789 7726 3906 7726 2790 7726 2790 7727 3423 7727 2792 7727 2792 7728 3880 7728 3879 7728 3879 7729 3422 7729 2794 7729 2794 7730 3881 7730 2797 7730 2797 7731 3421 7731 2800 7731 2800 7732 3882 7732 3883 7732 3883 7733 3585 7733 2803 7733 2803 7734 3884 7734 3907 7734 3907 7735 3420 7735 3885 7735 3885 7736 3592 7736 3886 7736 3886 7737 3419 7737 3887 7737 3887 7738 3595 7738 3908 7738 3908 7739 3909 7739 3910 7739 3910 7740 3911 7740 2812 7740 2812 7741 3912 7741 2815 7741 2815 7742 3414 7742 3913 7742 3913 7743 3415 7743 2818 7743 2818 7744 3914 7744 2819 7744 2819 7745 3888 7745 3915 7745 3915 7746 3890 7746 3889 7746 3889 7747 3463 7747 2823 7747 2823 7748 3472 7748 3891 7748 3891 7749 3892 7749 2827 7749 2827 7750 3893 7750 2828 7750 2828 7751 3477 7751 2832 7751 2832 7752 3894 7752 3916 7752 3916 7753 3481 7753 3917 7753 3917 7754 3483 7754 3901 7754 3920 7755 3917 7755 3901 7755 3920 7756 3918 7756 3917 7756 3920 7757 2836 7757 3918 7757 3920 7758 2838 7758 2836 7758 3920 7759 3919 7759 2838 7759 3920 7760 2840 7760 3919 7760 3920 7761 3922 7761 2840 7761 3920 7762 3921 7762 3922 7762 3920 7763 3924 7763 3921 7763 3920 7764 3923 7764 3924 7764 3920 7765 2847 7765 3923 7765 3920 7766 3926 7766 2847 7766 3920 7767 3925 7767 3926 7767 3920 7768 2850 7768 3925 7768 3920 7769 3927 7769 2850 7769 3920 7770 3928 7770 3927 7770 3920 7771 2855 7771 3928 7771 3920 7772 3929 7772 2855 7772 3920 7773 2856 7773 3929 7773 3920 7774 2859 7774 2856 7774 3920 7775 2862 7775 2859 7775 3920 7776 3931 7776 2862 7776 3920 7777 3930 7777 3931 7777 3920 7778 3932 7778 3930 7778 3920 7779 2867 7779 3932 7779 3920 7780 2870 7780 2867 7780 3920 7781 2872 7781 2870 7781 3920 7782 3933 7782 2872 7782 3920 7783 1211 7783 3933 7783 2783 7784 2782 7784 6486 7784 6486 7785 2782 7785 2781 7785 2779 7786 6486 7786 2781 7786 2779 7787 3934 7787 6486 7787 6486 7788 3934 7788 3935 7788 2775 7789 6486 7789 3935 7789 2775 7790 3936 7790 6486 7790 6486 7791 3936 7791 3937 7791 2771 7792 6486 7792 3937 7792 2771 7793 2770 7793 6486 7793 6486 7794 2770 7794 3938 7794 2764 7795 6486 7795 3938 7795 2764 7796 2762 7796 6486 7796 6486 7797 2762 7797 2761 7797 2760 7798 6486 7798 2761 7798 2760 7799 2758 7799 6486 7799 6486 7800 2758 7800 3939 7800 2754 7801 6486 7801 3939 7801 2754 7802 2753 7802 6486 7802 6486 7803 2753 7803 2751 7803 3940 7804 6486 7804 2751 7804 3940 7805 3941 7805 6486 7805 6486 7806 3941 7806 3942 7806 2747 7807 6486 7807 3942 7807 2747 7808 2746 7808 6486 7808 6486 7809 2746 7809 2692 7809 1211 7810 6486 7810 2692 7810 3943 7811 3992 7811 6529 7811 3943 7812 3994 7812 3992 7812 3943 7813 4027 7813 3994 7813 6204 7814 6211 7814 3992 7814 3992 7815 6211 7815 3944 7815 6529 7816 3944 7816 3950 7816 6529 7817 3992 7817 3944 7817 3944 7818 3238 7818 3950 7818 3950 7819 3238 7819 2434 7819 3945 7820 3950 7820 2434 7820 3945 7821 980 7821 3950 7821 3950 7822 980 7822 983 7822 997 7823 3950 7823 983 7823 997 7824 3946 7824 3950 7824 3950 7825 3946 7825 985 7825 3947 7826 3950 7826 985 7826 3947 7827 1000 7827 3950 7827 3950 7828 1000 7828 3948 7828 3949 7829 3950 7829 3948 7829 3949 7830 993 7830 3950 7830 3950 7831 993 7831 991 7831 3951 7832 3950 7832 991 7832 3951 7833 3952 7833 3950 7833 3950 7834 3952 7834 3953 7834 3954 7835 3950 7835 3953 7835 3954 7836 3955 7836 3950 7836 3950 7837 3955 7837 1008 7837 3956 7838 3950 7838 1008 7838 3956 7839 1019 7839 3950 7839 3950 7840 1019 7840 1020 7840 1016 7841 3950 7841 1020 7841 1016 7842 824 7842 3950 7842 3238 7843 3794 7843 2434 7843 6481 7844 3681 7844 4027 7844 6481 7845 2542 7845 3681 7845 6481 7846 2543 7846 2542 7846 6481 7847 2594 7847 2543 7847 6481 7848 2592 7848 2594 7848 6481 7849 2544 7849 2592 7849 6481 7850 3957 7850 2544 7850 6481 7851 2587 7851 3957 7851 6481 7852 2545 7852 2587 7852 6481 7853 2546 7853 2545 7853 6481 7854 2581 7854 2546 7854 6481 7855 3958 7855 2581 7855 6481 7856 2576 7856 3958 7856 6481 7857 2574 7857 2576 7857 6481 7858 2547 7858 2574 7858 6481 7859 2569 7859 2547 7859 6481 7860 2567 7860 2569 7860 6481 7861 2548 7861 2567 7861 6481 7862 3959 7862 2548 7862 6481 7863 2551 7863 3959 7863 6481 7864 2552 7864 2551 7864 6481 7865 2553 7865 2552 7865 6481 7866 2555 7866 2553 7866 6481 7867 2557 7867 2555 7867 6481 7868 2512 7868 2557 7868 6481 7869 2513 7869 2512 7869 6481 7870 2515 7870 2513 7870 6481 7871 2516 7871 2515 7871 6481 7872 3960 7872 2516 7872 3681 7873 2542 7873 3961 7873 3961 7874 2542 7874 2541 7874 3962 7875 2541 7875 3964 7875 3963 7876 3964 7876 2540 7876 3965 7877 2540 7877 3966 7877 3996 7878 3966 7878 3997 7878 3687 7879 3997 7879 2539 7879 3690 7880 2539 7880 3967 7880 3692 7881 3967 7881 3998 7881 3693 7882 3998 7882 2612 7882 3694 7883 2612 7883 3999 7883 3696 7884 3999 7884 4000 7884 3968 7885 4000 7885 3969 7885 3697 7886 3969 7886 4001 7886 4002 7887 4001 7887 3970 7887 3700 7888 3970 7888 3971 7888 3701 7889 3971 7889 2534 7889 3705 7890 2534 7890 2624 7890 3710 7891 2624 7891 2533 7891 3711 7892 2533 7892 2626 7892 3713 7893 2626 7893 3972 7893 3973 7894 3972 7894 4003 7894 4004 7895 4003 7895 3974 7895 4005 7896 3974 7896 2532 7896 3717 7897 2532 7897 2531 7897 3975 7898 2531 7898 4006 7898 3721 7899 4006 7899 4007 7899 3723 7900 4007 7900 3976 7900 3726 7901 3976 7901 4008 7901 4009 7902 4008 7902 3977 7902 3729 7903 3977 7903 3978 7903 3979 7904 3978 7904 3980 7904 3992 7905 3980 7905 4022 7905 3992 7906 3979 7906 3980 7906 3992 7907 3981 7907 3979 7907 3992 7908 3732 7908 3981 7908 3992 7909 3982 7909 3732 7909 3992 7910 3734 7910 3982 7910 3992 7911 3737 7911 3734 7911 3992 7912 3738 7912 3737 7912 3992 7913 3983 7913 3738 7913 3992 7914 3742 7914 3983 7914 3992 7915 3746 7915 3742 7915 3992 7916 3984 7916 3746 7916 3992 7917 3986 7917 3984 7917 3992 7918 3985 7918 3986 7918 3992 7919 3987 7919 3985 7919 3992 7920 3752 7920 3987 7920 3992 7921 3988 7921 3752 7921 3992 7922 3755 7922 3988 7922 3992 7923 3758 7923 3755 7923 3992 7924 3760 7924 3758 7924 3992 7925 3989 7925 3760 7925 3992 7926 3763 7926 3989 7926 3992 7927 3765 7927 3763 7927 3992 7928 3990 7928 3765 7928 3992 7929 3770 7929 3990 7929 3992 7930 3991 7930 3770 7930 3992 7931 3993 7931 3991 7931 3992 7932 3772 7932 3993 7932 3992 7933 3995 7933 3772 7933 3992 7934 3994 7934 3995 7934 3961 7935 2541 7935 3962 7935 3962 7936 3964 7936 3963 7936 3963 7937 2540 7937 3965 7937 3965 7938 3966 7938 3996 7938 3996 7939 3997 7939 3687 7939 3687 7940 2539 7940 3690 7940 3690 7941 3967 7941 3692 7941 3692 7942 3998 7942 3693 7942 3693 7943 2612 7943 3694 7943 3694 7944 3999 7944 3696 7944 3696 7945 4000 7945 3968 7945 3968 7946 3969 7946 3697 7946 3697 7947 4001 7947 4002 7947 4002 7948 3970 7948 3700 7948 3700 7949 3971 7949 3701 7949 3701 7950 2534 7950 3705 7950 3705 7951 2624 7951 3710 7951 3710 7952 2533 7952 3711 7952 3711 7953 2626 7953 3713 7953 3713 7954 3972 7954 3973 7954 3973 7955 4003 7955 4004 7955 4004 7956 3974 7956 4005 7956 4005 7957 2532 7957 3717 7957 3717 7958 2531 7958 3975 7958 3975 7959 4006 7959 3721 7959 3721 7960 4007 7960 3723 7960 3723 7961 3976 7961 3726 7961 3726 7962 4008 7962 4009 7962 4009 7963 3977 7963 3729 7963 3729 7964 3978 7964 3979 7964 3980 7965 2648 7965 4022 7965 4022 7966 2648 7966 4010 7966 2651 7967 4022 7967 4010 7967 2651 7968 4011 7968 4022 7968 4022 7969 4011 7969 4013 7969 4012 7970 4022 7970 4013 7970 4012 7971 2527 7971 4022 7971 4022 7972 2527 7972 2525 7972 4014 7973 4022 7973 2525 7973 4014 7974 4015 7974 4022 7974 4022 7975 4015 7975 4016 7975 4017 7976 4022 7976 4016 7976 4017 7977 2523 7977 4022 7977 4022 7978 2523 7978 4018 7978 4019 7979 4022 7979 4018 7979 4019 7980 4020 7980 4022 7980 4022 7981 4020 7981 2673 7981 4021 7982 4022 7982 2673 7982 4021 7983 2675 7983 4022 7983 4022 7984 2675 7984 2521 7984 2681 7985 4022 7985 2521 7985 2681 7986 2520 7986 4022 7986 4022 7987 2520 7987 4023 7987 2518 7988 4022 7988 4023 7988 2518 7989 4024 7989 4022 7989 4022 7990 4024 7990 4025 7990 4026 7991 4022 7991 4025 7991 4026 7992 3960 7992 4022 7992 3681 7993 4028 7993 4027 7993 4027 7994 4028 7994 3676 7994 3675 7995 4027 7995 3676 7995 3675 7996 3672 7996 4027 7996 4027 7997 3672 7997 4029 7997 3668 7998 4027 7998 4029 7998 3668 7999 4030 7999 4027 7999 4027 8000 4030 8000 3666 8000 3665 8001 4027 8001 3666 8001 3665 8002 3664 8002 4027 8002 4027 8003 3664 8003 3660 8003 4031 8004 4027 8004 3660 8004 4031 8005 4032 8005 4027 8005 4027 8006 4032 8006 4033 8006 3655 8007 4027 8007 4033 8007 3655 8008 4034 8008 4027 8008 4027 8009 4034 8009 4035 8009 3653 8010 4027 8010 4035 8010 3653 8011 4036 8011 4027 8011 4027 8012 4036 8012 3649 8012 4037 8013 4027 8013 3649 8013 4037 8014 4038 8014 4027 8014 4027 8015 4038 8015 4040 8015 4039 8016 4027 8016 4040 8016 4039 8017 3599 8017 4027 8017 4027 8018 3599 8018 4041 8018 3994 8019 4027 8019 4041 8019 6265 8020 6256 8020 3857 8020 3857 8021 6256 8021 3841 8021 6211 8022 4042 8022 3944 8022 3944 8023 4042 8023 3792 8023 455 8024 2405 8024 706 8024 706 8025 2405 8025 5861 8025 4265 8026 4233 8026 4264 8026 4265 8027 4045 8027 4233 8027 4265 8028 4095 8028 4045 8028 4045 8029 4095 8029 4044 8029 4043 8030 4044 8030 6335 8030 4043 8031 4045 8031 4044 8031 4043 8032 4046 8032 4045 8032 4045 8033 4046 8033 4233 8033 4233 8034 4046 8034 6334 8034 4232 8035 6334 8035 4231 8035 4047 8036 4231 8036 4229 8036 4227 8037 4229 8037 4048 8037 4226 8038 4048 8038 4049 8038 4224 8039 4049 8039 6371 8039 4223 8040 6371 8040 4050 8040 4221 8041 4050 8041 6370 8041 4051 8042 6370 8042 6369 8042 4218 8043 6369 8043 6367 8043 4052 8044 6367 8044 4053 8044 4213 8045 4053 8045 4214 8045 4054 8046 4214 8046 4055 8046 4210 8047 4055 8047 6366 8047 4208 8048 6366 8048 4056 8048 4207 8049 4056 8049 6365 8049 4205 8050 6365 8050 6364 8050 4202 8051 6364 8051 4057 8051 4201 8052 4057 8052 4059 8052 4058 8053 4059 8053 4060 8053 4199 8054 4060 8054 6363 8054 4197 8055 6363 8055 4198 8055 4195 8056 4198 8056 6362 8056 4194 8057 6362 8057 6361 8057 4192 8058 6361 8058 6360 8058 4061 8059 6360 8059 6359 8059 4062 8060 6359 8060 6358 8060 4189 8061 6358 8061 6357 8061 4187 8062 6357 8062 4186 8062 4184 8063 4186 8063 6356 8063 4183 8064 6356 8064 6355 8064 4181 8065 6355 8065 4179 8065 4063 8066 4179 8066 4064 8066 4178 8067 4064 8067 6354 8067 4176 8068 6354 8068 4066 8068 4065 8069 4066 8069 4067 8069 4175 8070 4067 8070 4173 8070 4172 8071 4173 8071 4170 8071 4171 8072 4170 8072 6352 8072 4068 8073 6352 8073 6351 8073 4167 8074 6351 8074 4069 8074 4166 8075 4069 8075 4070 8075 4164 8076 4070 8076 4163 8076 4162 8077 4163 8077 6350 8077 4161 8078 6350 8078 4071 8078 4159 8079 4071 8079 6349 8079 4157 8080 6349 8080 4072 8080 4155 8081 4072 8081 4073 8081 4154 8082 4073 8082 6348 8082 4152 8083 6348 8083 4074 8083 4075 8084 4074 8084 4076 8084 4077 8085 4076 8085 4078 8085 4149 8086 4078 8086 4148 8086 4146 8087 4148 8087 4079 8087 4144 8088 4079 8088 6347 8088 4143 8089 6347 8089 4142 8089 4080 8090 4142 8090 6346 8090 4141 8091 6346 8091 6345 8091 4139 8092 6345 8092 6344 8092 4138 8093 6344 8093 4081 8093 4136 8094 4081 8094 4134 8094 4133 8095 4134 8095 4082 8095 4083 8096 4082 8096 6343 8096 4130 8097 6343 8097 4084 8097 4127 8098 4084 8098 6372 8098 4128 8099 6372 8099 4124 8099 4122 8100 4124 8100 6342 8100 4121 8101 6342 8101 4085 8101 4119 8102 4085 8102 4117 8102 4116 8103 4117 8103 4115 8103 4114 8104 4115 8104 4086 8104 4113 8105 4086 8105 4087 8105 4111 8106 4087 8106 6341 8106 4110 8107 6341 8107 6340 8107 4108 8108 6340 8108 4088 8108 4106 8109 4088 8109 6339 8109 4089 8110 6339 8110 4090 8110 4104 8111 4090 8111 4091 8111 4103 8112 4091 8112 4102 8112 4101 8113 4102 8113 6338 8113 4092 8114 6338 8114 4093 8114 4099 8115 4093 8115 6336 8115 4094 8116 6336 8116 6337 8116 4097 8117 6337 8117 6335 8117 4044 8118 4097 8118 6335 8118 4044 8119 4096 8119 4097 8119 4044 8120 4095 8120 4096 8120 4096 8121 4263 8121 4097 8121 4097 8122 4263 8122 4094 8122 6337 8123 4097 8123 4094 8123 4263 8124 4098 8124 4094 8124 4094 8125 4098 8125 4099 8125 6336 8126 4094 8126 4099 8126 4098 8127 4262 8127 4099 8127 4099 8128 4262 8128 4092 8128 4093 8129 4099 8129 4092 8129 4262 8130 4261 8130 4092 8130 4092 8131 4261 8131 4101 8131 6338 8132 4092 8132 4101 8132 4261 8133 4100 8133 4101 8133 4101 8134 4100 8134 4103 8134 4102 8135 4101 8135 4103 8135 4100 8136 4260 8136 4103 8136 4103 8137 4260 8137 4104 8137 4091 8138 4103 8138 4104 8138 4260 8139 4105 8139 4104 8139 4104 8140 4105 8140 4089 8140 4090 8141 4104 8141 4089 8141 4105 8142 4259 8142 4089 8142 4089 8143 4259 8143 4106 8143 6339 8144 4089 8144 4106 8144 4259 8145 4107 8145 4106 8145 4106 8146 4107 8146 4108 8146 4088 8147 4106 8147 4108 8147 4107 8148 4109 8148 4108 8148 4108 8149 4109 8149 4110 8149 6340 8150 4108 8150 4110 8150 4109 8151 4257 8151 4110 8151 4110 8152 4257 8152 4111 8152 6341 8153 4110 8153 4111 8153 4257 8154 4256 8154 4111 8154 4111 8155 4256 8155 4113 8155 4087 8156 4111 8156 4113 8156 4256 8157 4112 8157 4113 8157 4113 8158 4112 8158 4114 8158 4086 8159 4113 8159 4114 8159 4112 8160 4255 8160 4114 8160 4114 8161 4255 8161 4116 8161 4115 8162 4114 8162 4116 8162 4255 8163 4118 8163 4116 8163 4116 8164 4118 8164 4119 8164 4117 8165 4116 8165 4119 8165 4118 8166 4120 8166 4119 8166 4119 8167 4120 8167 4121 8167 4085 8168 4119 8168 4121 8168 4120 8169 4123 8169 4121 8169 4121 8170 4123 8170 4122 8170 6342 8171 4121 8171 4122 8171 4123 8172 4125 8172 4122 8172 4122 8173 4125 8173 4128 8173 4124 8174 4122 8174 4128 8174 4125 8175 4126 8175 4128 8175 4128 8176 4126 8176 4127 8176 6372 8177 4128 8177 4127 8177 4126 8178 4129 8178 4127 8178 4127 8179 4129 8179 4130 8179 4084 8180 4127 8180 4130 8180 4129 8181 4131 8181 4130 8181 4130 8182 4131 8182 4083 8182 6343 8183 4130 8183 4083 8183 4131 8184 4132 8184 4083 8184 4083 8185 4132 8185 4133 8185 4082 8186 4083 8186 4133 8186 4132 8187 4135 8187 4133 8187 4133 8188 4135 8188 4136 8188 4134 8189 4133 8189 4136 8189 4135 8190 4137 8190 4136 8190 4136 8191 4137 8191 4138 8191 4081 8192 4136 8192 4138 8192 4137 8193 4254 8193 4138 8193 4138 8194 4254 8194 4139 8194 6344 8195 4138 8195 4139 8195 4254 8196 4253 8196 4139 8196 4139 8197 4253 8197 4141 8197 6345 8198 4139 8198 4141 8198 4253 8199 4140 8199 4141 8199 4141 8200 4140 8200 4080 8200 6346 8201 4141 8201 4080 8201 4140 8202 4252 8202 4080 8202 4080 8203 4252 8203 4143 8203 4142 8204 4080 8204 4143 8204 4252 8205 4251 8205 4143 8205 4143 8206 4251 8206 4144 8206 6347 8207 4143 8207 4144 8207 4251 8208 4145 8208 4144 8208 4144 8209 4145 8209 4146 8209 4079 8210 4144 8210 4146 8210 4145 8211 4147 8211 4146 8211 4146 8212 4147 8212 4149 8212 4148 8213 4146 8213 4149 8213 4147 8214 4150 8214 4149 8214 4149 8215 4150 8215 4077 8215 4078 8216 4149 8216 4077 8216 4150 8217 4151 8217 4077 8217 4077 8218 4151 8218 4075 8218 4076 8219 4077 8219 4075 8219 4151 8220 4153 8220 4075 8220 4075 8221 4153 8221 4152 8221 4074 8222 4075 8222 4152 8222 4153 8223 4250 8223 4152 8223 4152 8224 4250 8224 4154 8224 6348 8225 4152 8225 4154 8225 4250 8226 4156 8226 4154 8226 4154 8227 4156 8227 4155 8227 4073 8228 4154 8228 4155 8228 4156 8229 4249 8229 4155 8229 4155 8230 4249 8230 4157 8230 4072 8231 4155 8231 4157 8231 4249 8232 4158 8232 4157 8232 4157 8233 4158 8233 4159 8233 6349 8234 4157 8234 4159 8234 4158 8235 4247 8235 4159 8235 4159 8236 4247 8236 4161 8236 4071 8237 4159 8237 4161 8237 4247 8238 4160 8238 4161 8238 4161 8239 4160 8239 4162 8239 6350 8240 4161 8240 4162 8240 4160 8241 4246 8241 4162 8241 4162 8242 4246 8242 4164 8242 4163 8243 4162 8243 4164 8243 4246 8244 4165 8244 4164 8244 4164 8245 4165 8245 4166 8245 4070 8246 4164 8246 4166 8246 4165 8247 4244 8247 4166 8247 4166 8248 4244 8248 4167 8248 4069 8249 4166 8249 4167 8249 4244 8250 4245 8250 4167 8250 4167 8251 4245 8251 4068 8251 6351 8252 4167 8252 4068 8252 4245 8253 4168 8253 4068 8253 4068 8254 4168 8254 4171 8254 6352 8255 4068 8255 4171 8255 4168 8256 4169 8256 4171 8256 4171 8257 4169 8257 4172 8257 4170 8258 4171 8258 4172 8258 4169 8259 4234 8259 4172 8259 4172 8260 4234 8260 4175 8260 4173 8261 4172 8261 4175 8261 4234 8262 4174 8262 4175 8262 4175 8263 4174 8263 4065 8263 4067 8264 4175 8264 4065 8264 4174 8265 4235 8265 4065 8265 4065 8266 4235 8266 4176 8266 4066 8267 4065 8267 4176 8267 4235 8268 4236 8268 4176 8268 4176 8269 4236 8269 4178 8269 6354 8270 4176 8270 4178 8270 4236 8271 4177 8271 4178 8271 4178 8272 4177 8272 4063 8272 4064 8273 4178 8273 4063 8273 4177 8274 4180 8274 4063 8274 4063 8275 4180 8275 4181 8275 4179 8276 4063 8276 4181 8276 4180 8277 4237 8277 4181 8277 4181 8278 4237 8278 4183 8278 6355 8279 4181 8279 4183 8279 4237 8280 4182 8280 4183 8280 4183 8281 4182 8281 4184 8281 6356 8282 4183 8282 4184 8282 4182 8283 4185 8283 4184 8283 4184 8284 4185 8284 4187 8284 4186 8285 4184 8285 4187 8285 4185 8286 4188 8286 4187 8286 4187 8287 4188 8287 4189 8287 6357 8288 4187 8288 4189 8288 4188 8289 4238 8289 4189 8289 4189 8290 4238 8290 4062 8290 6358 8291 4189 8291 4062 8291 4238 8292 4190 8292 4062 8292 4062 8293 4190 8293 4061 8293 6359 8294 4062 8294 4061 8294 4190 8295 4191 8295 4061 8295 4061 8296 4191 8296 4192 8296 6360 8297 4061 8297 4192 8297 4191 8298 4193 8298 4192 8298 4192 8299 4193 8299 4194 8299 6361 8300 4192 8300 4194 8300 4193 8301 4239 8301 4194 8301 4194 8302 4239 8302 4195 8302 6362 8303 4194 8303 4195 8303 4239 8304 4196 8304 4195 8304 4195 8305 4196 8305 4197 8305 4198 8306 4195 8306 4197 8306 4196 8307 4240 8307 4197 8307 4197 8308 4240 8308 4199 8308 6363 8309 4197 8309 4199 8309 4240 8310 4241 8310 4199 8310 4199 8311 4241 8311 4058 8311 4060 8312 4199 8312 4058 8312 4241 8313 4242 8313 4058 8313 4058 8314 4242 8314 4201 8314 4059 8315 4058 8315 4201 8315 4242 8316 4200 8316 4201 8316 4201 8317 4200 8317 4202 8317 4057 8318 4201 8318 4202 8318 4200 8319 4203 8319 4202 8319 4202 8320 4203 8320 4205 8320 6364 8321 4202 8321 4205 8321 4203 8322 4204 8322 4205 8322 4205 8323 4204 8323 4207 8323 6365 8324 4205 8324 4207 8324 4204 8325 4206 8325 4207 8325 4207 8326 4206 8326 4208 8326 4056 8327 4207 8327 4208 8327 4206 8328 4209 8328 4208 8328 4208 8329 4209 8329 4210 8329 6366 8330 4208 8330 4210 8330 4209 8331 4211 8331 4210 8331 4210 8332 4211 8332 4054 8332 4055 8333 4210 8333 4054 8333 4211 8334 4212 8334 4054 8334 4054 8335 4212 8335 4213 8335 4214 8336 4054 8336 4213 8336 4212 8337 4215 8337 4213 8337 4213 8338 4215 8338 4052 8338 4053 8339 4213 8339 4052 8339 4215 8340 4216 8340 4052 8340 4052 8341 4216 8341 4218 8341 6367 8342 4052 8342 4218 8342 4216 8343 4217 8343 4218 8343 4218 8344 4217 8344 4051 8344 6369 8345 4218 8345 4051 8345 4217 8346 4219 8346 4051 8346 4051 8347 4219 8347 4221 8347 6370 8348 4051 8348 4221 8348 4219 8349 4220 8349 4221 8349 4221 8350 4220 8350 4223 8350 4050 8351 4221 8351 4223 8351 4220 8352 4222 8352 4223 8352 4223 8353 4222 8353 4224 8353 6371 8354 4223 8354 4224 8354 4222 8355 4225 8355 4224 8355 4224 8356 4225 8356 4226 8356 4049 8357 4224 8357 4226 8357 4225 8358 4243 8358 4226 8358 4226 8359 4243 8359 4227 8359 4048 8360 4226 8360 4227 8360 4243 8361 4228 8361 4227 8361 4227 8362 4228 8362 4047 8362 4229 8363 4227 8363 4047 8363 4228 8364 4230 8364 4047 8364 4047 8365 4230 8365 4232 8365 4231 8366 4047 8366 4232 8366 4230 8367 4264 8367 4232 8367 4232 8368 4264 8368 4233 8368 6334 8369 4232 8369 4233 8369 834 8370 4169 8370 4248 8370 834 8371 4234 8371 4169 8371 834 8372 4174 8372 4234 8372 834 8373 4235 8373 4174 8373 834 8374 4236 8374 4235 8374 834 8375 4177 8375 4236 8375 834 8376 4180 8376 4177 8376 834 8377 4237 8377 4180 8377 834 8378 4182 8378 4237 8378 834 8379 4185 8379 4182 8379 834 8380 4188 8380 4185 8380 834 8381 4238 8381 4188 8381 834 8382 4190 8382 4238 8382 834 8383 4191 8383 4190 8383 834 8384 4193 8384 4191 8384 834 8385 4239 8385 4193 8385 834 8386 4196 8386 4239 8386 834 8387 4240 8387 4196 8387 834 8388 4241 8388 4240 8388 834 8389 4242 8389 4241 8389 834 8390 4200 8390 4242 8390 834 8391 4203 8391 4200 8391 834 8392 4204 8392 4203 8392 834 8393 4206 8393 4204 8393 834 8394 4209 8394 4206 8394 834 8395 4211 8395 4209 8395 834 8396 4212 8396 4211 8396 834 8397 4215 8397 4212 8397 834 8398 4216 8398 4215 8398 834 8399 4217 8399 4216 8399 834 8400 4219 8400 4217 8400 834 8401 4220 8401 4219 8401 834 8402 4222 8402 4220 8402 834 8403 4225 8403 4222 8403 834 8404 4243 8404 4225 8404 834 8405 4228 8405 4243 8405 834 8406 4230 8406 4228 8406 834 8407 4264 8407 4230 8407 4169 8408 4168 8408 4248 8408 4248 8409 4168 8409 4245 8409 4244 8410 4248 8410 4245 8410 4244 8411 4165 8411 4248 8411 4248 8412 4165 8412 4246 8412 4160 8413 4248 8413 4246 8413 4160 8414 4247 8414 4248 8414 4248 8415 4247 8415 4158 8415 4249 8416 4248 8416 4158 8416 4249 8417 4156 8417 4248 8417 4248 8418 4156 8418 4250 8418 4153 8419 4248 8419 4250 8419 4153 8420 4151 8420 4248 8420 4248 8421 4151 8421 4150 8421 4147 8422 4248 8422 4150 8422 4147 8423 4145 8423 4248 8423 4248 8424 4145 8424 4251 8424 4258 8425 4251 8425 4252 8425 4140 8426 4258 8426 4252 8426 4140 8427 4253 8427 4258 8427 4258 8428 4253 8428 4254 8428 4137 8429 4258 8429 4254 8429 4137 8430 4135 8430 4258 8430 4258 8431 4135 8431 4132 8431 4131 8432 4258 8432 4132 8432 4131 8433 4129 8433 4258 8433 4258 8434 4129 8434 4126 8434 4125 8435 4258 8435 4126 8435 4125 8436 4123 8436 4258 8436 4258 8437 4123 8437 4120 8437 4118 8438 4258 8438 4120 8438 4118 8439 4255 8439 4258 8439 4258 8440 4255 8440 4112 8440 4256 8441 4258 8441 4112 8441 4256 8442 4257 8442 4258 8442 4258 8443 4257 8443 4109 8443 4107 8444 4258 8444 4109 8444 4107 8445 4259 8445 4258 8445 4258 8446 4259 8446 4105 8446 4260 8447 4258 8447 4105 8447 4260 8448 4100 8448 4258 8448 4258 8449 4100 8449 4261 8449 4262 8450 4258 8450 4261 8450 4262 8451 4098 8451 4258 8451 4258 8452 4098 8452 4263 8452 4096 8453 4258 8453 4263 8453 4096 8454 4095 8454 4258 8454 4258 8455 4095 8455 4265 8455 4264 8456 4258 8456 4265 8456 4248 8457 4251 8457 4258 8457 6353 8458 6368 8458 4248 8458 4248 8459 6368 8459 834 8459 4266 8460 4269 8460 4450 8460 4266 8461 4267 8461 4269 8461 4266 8462 4494 8462 4267 8462 4267 8463 4494 8463 4268 8463 6409 8464 4268 8464 4318 8464 6409 8465 4267 8465 4268 8465 6409 8466 6408 8466 4267 8466 4267 8467 6408 8467 4269 8467 4269 8468 6408 8468 4271 8468 4270 8469 4271 8469 4272 8469 4448 8470 4272 8470 6411 8470 4447 8471 6411 8471 6410 8471 4446 8472 6410 8472 4445 8472 4444 8473 4445 8473 4273 8473 4442 8474 4273 8474 4274 8474 4441 8475 4274 8475 4276 8475 4275 8476 4276 8476 4438 8476 4439 8477 4438 8477 6414 8477 4436 8478 6414 8478 6413 8478 4435 8479 6413 8479 6415 8479 4432 8480 6415 8480 4433 8480 4434 8481 4433 8481 4277 8481 4430 8482 4277 8482 4279 8482 4278 8483 4279 8483 4280 8483 4427 8484 4280 8484 4281 8484 4426 8485 4281 8485 6416 8485 4425 8486 6416 8486 6417 8486 4423 8487 6417 8487 4422 8487 4420 8488 4422 8488 6418 8488 4419 8489 6418 8489 6419 8489 4418 8490 6419 8490 4282 8490 4416 8491 4282 8491 6420 8491 4414 8492 6420 8492 6421 8492 4415 8493 6421 8493 4413 8493 4412 8494 4413 8494 4410 8494 4408 8495 4410 8495 4409 8495 4406 8496 4409 8496 4284 8496 4283 8497 4284 8497 4405 8497 4402 8498 4405 8498 6422 8498 4285 8499 6422 8499 4286 8499 4401 8500 4286 8500 6423 8500 4399 8501 6423 8501 4287 8501 4397 8502 4287 8502 4288 8502 4395 8503 4288 8503 4394 8503 4289 8504 4394 8504 4290 8504 4392 8505 4290 8505 4291 8505 4391 8506 4291 8506 6425 8506 4292 8507 6425 8507 4293 8507 4389 8508 4293 8508 4390 8508 4388 8509 4390 8509 4294 8509 4386 8510 4294 8510 6426 8510 4387 8511 6426 8511 4295 8511 4384 8512 4295 8512 4296 8512 4383 8513 4296 8513 4382 8513 4381 8514 4382 8514 6427 8514 4380 8515 6427 8515 6428 8515 4379 8516 6428 8516 4378 8516 4377 8517 4378 8517 4298 8517 4297 8518 4298 8518 6429 8518 4373 8519 6429 8519 4372 8519 4299 8520 4372 8520 4369 8520 4370 8521 4369 8521 6430 8521 4367 8522 6430 8522 6476 8522 4365 8523 6476 8523 4300 8523 4364 8524 4300 8524 4301 8524 4302 8525 4301 8525 4303 8525 4362 8526 4303 8526 4304 8526 4360 8527 4304 8527 6475 8527 4358 8528 6475 8528 4305 8528 4359 8529 4305 8529 4306 8529 4355 8530 4306 8530 4356 8530 4353 8531 4356 8531 4307 8531 4352 8532 4307 8532 4308 8532 4351 8533 4308 8533 6473 8533 4349 8534 6473 8534 6472 8534 4347 8535 6472 8535 6471 8535 4346 8536 6471 8536 4345 8536 4343 8537 4345 8537 4342 8537 4309 8538 4342 8538 4310 8538 4339 8539 4310 8539 4311 8539 4337 8540 4311 8540 4338 8540 4312 8541 4338 8541 4313 8541 4336 8542 4313 8542 6470 8542 4333 8543 6470 8543 4334 8543 4331 8544 4334 8544 4314 8544 4329 8545 4314 8545 6469 8545 4328 8546 6469 8546 6468 8546 4327 8547 6468 8547 6467 8547 4324 8548 6467 8548 4323 8548 4322 8549 4323 8549 4316 8549 4315 8550 4316 8550 6407 8550 4317 8551 6407 8551 4318 8551 4268 8552 4317 8552 4318 8552 4268 8553 4319 8553 4317 8553 4268 8554 4494 8554 4319 8554 4319 8555 4320 8555 4317 8555 4317 8556 4320 8556 4315 8556 6407 8557 4317 8557 4315 8557 4320 8558 4321 8558 4315 8558 4315 8559 4321 8559 4322 8559 4316 8560 4315 8560 4322 8560 4321 8561 4493 8561 4322 8561 4322 8562 4493 8562 4324 8562 4323 8563 4322 8563 4324 8563 4493 8564 4325 8564 4324 8564 4324 8565 4325 8565 4327 8565 6467 8566 4324 8566 4327 8566 4325 8567 4326 8567 4327 8567 4327 8568 4326 8568 4328 8568 6468 8569 4327 8569 4328 8569 4326 8570 4492 8570 4328 8570 4328 8571 4492 8571 4329 8571 6469 8572 4328 8572 4329 8572 4492 8573 4330 8573 4329 8573 4329 8574 4330 8574 4331 8574 4314 8575 4329 8575 4331 8575 4330 8576 4332 8576 4331 8576 4331 8577 4332 8577 4333 8577 4334 8578 4331 8578 4333 8578 4332 8579 4490 8579 4333 8579 4333 8580 4490 8580 4336 8580 6470 8581 4333 8581 4336 8581 4490 8582 4335 8582 4336 8582 4336 8583 4335 8583 4312 8583 4313 8584 4336 8584 4312 8584 4335 8585 4489 8585 4312 8585 4312 8586 4489 8586 4337 8586 4338 8587 4312 8587 4337 8587 4489 8588 4488 8588 4337 8588 4337 8589 4488 8589 4339 8589 4311 8590 4337 8590 4339 8590 4488 8591 4340 8591 4339 8591 4339 8592 4340 8592 4309 8592 4310 8593 4339 8593 4309 8593 4340 8594 4341 8594 4309 8594 4309 8595 4341 8595 4343 8595 4342 8596 4309 8596 4343 8596 4341 8597 4344 8597 4343 8597 4343 8598 4344 8598 4346 8598 4345 8599 4343 8599 4346 8599 4344 8600 4487 8600 4346 8600 4346 8601 4487 8601 4347 8601 6471 8602 4346 8602 4347 8602 4487 8603 4348 8603 4347 8603 4347 8604 4348 8604 4349 8604 6472 8605 4347 8605 4349 8605 4348 8606 4486 8606 4349 8606 4349 8607 4486 8607 4351 8607 6473 8608 4349 8608 4351 8608 4486 8609 4350 8609 4351 8609 4351 8610 4350 8610 4352 8610 4308 8611 4351 8611 4352 8611 4350 8612 4485 8612 4352 8612 4352 8613 4485 8613 4353 8613 4307 8614 4352 8614 4353 8614 4485 8615 4354 8615 4353 8615 4353 8616 4354 8616 4355 8616 4356 8617 4353 8617 4355 8617 4354 8618 4357 8618 4355 8618 4355 8619 4357 8619 4359 8619 4306 8620 4355 8620 4359 8620 4357 8621 4484 8621 4359 8621 4359 8622 4484 8622 4358 8622 4305 8623 4359 8623 4358 8623 4484 8624 4483 8624 4358 8624 4358 8625 4483 8625 4360 8625 6475 8626 4358 8626 4360 8626 4483 8627 4361 8627 4360 8627 4360 8628 4361 8628 4362 8628 4304 8629 4360 8629 4362 8629 4361 8630 4482 8630 4362 8630 4362 8631 4482 8631 4302 8631 4303 8632 4362 8632 4302 8632 4482 8633 4363 8633 4302 8633 4302 8634 4363 8634 4364 8634 4301 8635 4302 8635 4364 8635 4363 8636 4481 8636 4364 8636 4364 8637 4481 8637 4365 8637 4300 8638 4364 8638 4365 8638 4481 8639 4366 8639 4365 8639 4365 8640 4366 8640 4367 8640 6476 8641 4365 8641 4367 8641 4366 8642 4368 8642 4367 8642 4367 8643 4368 8643 4370 8643 6430 8644 4367 8644 4370 8644 4368 8645 4371 8645 4370 8645 4370 8646 4371 8646 4299 8646 4369 8647 4370 8647 4299 8647 4371 8648 4374 8648 4299 8648 4299 8649 4374 8649 4373 8649 4372 8650 4299 8650 4373 8650 4374 8651 4375 8651 4373 8651 4373 8652 4375 8652 4297 8652 6429 8653 4373 8653 4297 8653 4375 8654 4480 8654 4297 8654 4297 8655 4480 8655 4377 8655 4298 8656 4297 8656 4377 8656 4480 8657 4376 8657 4377 8657 4377 8658 4376 8658 4379 8658 4378 8659 4377 8659 4379 8659 4376 8660 4479 8660 4379 8660 4379 8661 4479 8661 4380 8661 6428 8662 4379 8662 4380 8662 4479 8663 4478 8663 4380 8663 4380 8664 4478 8664 4381 8664 6427 8665 4380 8665 4381 8665 4478 8666 4477 8666 4381 8666 4381 8667 4477 8667 4383 8667 4382 8668 4381 8668 4383 8668 4477 8669 4476 8669 4383 8669 4383 8670 4476 8670 4384 8670 4296 8671 4383 8671 4384 8671 4476 8672 4385 8672 4384 8672 4384 8673 4385 8673 4387 8673 4295 8674 4384 8674 4387 8674 4385 8675 4475 8675 4387 8675 4387 8676 4475 8676 4386 8676 6426 8677 4387 8677 4386 8677 4475 8678 4473 8678 4386 8678 4386 8679 4473 8679 4388 8679 4294 8680 4386 8680 4388 8680 4473 8681 4474 8681 4388 8681 4388 8682 4474 8682 4389 8682 4390 8683 4388 8683 4389 8683 4474 8684 4472 8684 4389 8684 4389 8685 4472 8685 4292 8685 4293 8686 4389 8686 4292 8686 4472 8687 4471 8687 4292 8687 4292 8688 4471 8688 4391 8688 6425 8689 4292 8689 4391 8689 4471 8690 4470 8690 4391 8690 4391 8691 4470 8691 4392 8691 4291 8692 4391 8692 4392 8692 4470 8693 4393 8693 4392 8693 4392 8694 4393 8694 4289 8694 4290 8695 4392 8695 4289 8695 4393 8696 4469 8696 4289 8696 4289 8697 4469 8697 4395 8697 4394 8698 4289 8698 4395 8698 4469 8699 4396 8699 4395 8699 4395 8700 4396 8700 4397 8700 4288 8701 4395 8701 4397 8701 4396 8702 4398 8702 4397 8702 4397 8703 4398 8703 4399 8703 4287 8704 4397 8704 4399 8704 4398 8705 4400 8705 4399 8705 4399 8706 4400 8706 4401 8706 6423 8707 4399 8707 4401 8707 4400 8708 4451 8708 4401 8708 4401 8709 4451 8709 4285 8709 4286 8710 4401 8710 4285 8710 4451 8711 4403 8711 4285 8711 4285 8712 4403 8712 4402 8712 6422 8713 4285 8713 4402 8713 4403 8714 4404 8714 4402 8714 4402 8715 4404 8715 4283 8715 4405 8716 4402 8716 4283 8716 4404 8717 4452 8717 4283 8717 4283 8718 4452 8718 4406 8718 4284 8719 4283 8719 4406 8719 4452 8720 4407 8720 4406 8720 4406 8721 4407 8721 4408 8721 4409 8722 4406 8722 4408 8722 4407 8723 4411 8723 4408 8723 4408 8724 4411 8724 4412 8724 4410 8725 4408 8725 4412 8725 4411 8726 4454 8726 4412 8726 4412 8727 4454 8727 4415 8727 4413 8728 4412 8728 4415 8728 4454 8729 4453 8729 4415 8729 4415 8730 4453 8730 4414 8730 6421 8731 4415 8731 4414 8731 4453 8732 4417 8732 4414 8732 4414 8733 4417 8733 4416 8733 6420 8734 4414 8734 4416 8734 4417 8735 4456 8735 4416 8735 4416 8736 4456 8736 4418 8736 4282 8737 4416 8737 4418 8737 4456 8738 4458 8738 4418 8738 4418 8739 4458 8739 4419 8739 6419 8740 4418 8740 4419 8740 4458 8741 4457 8741 4419 8741 4419 8742 4457 8742 4420 8742 6418 8743 4419 8743 4420 8743 4457 8744 4421 8744 4420 8744 4420 8745 4421 8745 4423 8745 4422 8746 4420 8746 4423 8746 4421 8747 4459 8747 4423 8747 4423 8748 4459 8748 4425 8748 6417 8749 4423 8749 4425 8749 4459 8750 4424 8750 4425 8750 4425 8751 4424 8751 4426 8751 6416 8752 4425 8752 4426 8752 4424 8753 4460 8753 4426 8753 4426 8754 4460 8754 4427 8754 4281 8755 4426 8755 4427 8755 4460 8756 4428 8756 4427 8756 4427 8757 4428 8757 4278 8757 4280 8758 4427 8758 4278 8758 4428 8759 4429 8759 4278 8759 4278 8760 4429 8760 4430 8760 4279 8761 4278 8761 4430 8761 4429 8762 4461 8762 4430 8762 4430 8763 4461 8763 4434 8763 4277 8764 4430 8764 4434 8764 4461 8765 4431 8765 4434 8765 4434 8766 4431 8766 4432 8766 4433 8767 4434 8767 4432 8767 4431 8768 4462 8768 4432 8768 4432 8769 4462 8769 4435 8769 6415 8770 4432 8770 4435 8770 4462 8771 4463 8771 4435 8771 4435 8772 4463 8772 4436 8772 6413 8773 4435 8773 4436 8773 4463 8774 4437 8774 4436 8774 4436 8775 4437 8775 4439 8775 6414 8776 4436 8776 4439 8776 4437 8777 4465 8777 4439 8777 4439 8778 4465 8778 4275 8778 4438 8779 4439 8779 4275 8779 4465 8780 4464 8780 4275 8780 4275 8781 4464 8781 4441 8781 4276 8782 4275 8782 4441 8782 4464 8783 4440 8783 4441 8783 4441 8784 4440 8784 4442 8784 4274 8785 4441 8785 4442 8785 4440 8786 4443 8786 4442 8786 4442 8787 4443 8787 4444 8787 4273 8788 4442 8788 4444 8788 4443 8789 4466 8789 4444 8789 4444 8790 4466 8790 4446 8790 4445 8791 4444 8791 4446 8791 4466 8792 4467 8792 4446 8792 4446 8793 4467 8793 4447 8793 6410 8794 4446 8794 4447 8794 4467 8795 4468 8795 4447 8795 4447 8796 4468 8796 4448 8796 6411 8797 4447 8797 4448 8797 4468 8798 4449 8798 4448 8798 4448 8799 4449 8799 4270 8799 4272 8800 4448 8800 4270 8800 4449 8801 4450 8801 4270 8801 4270 8802 4450 8802 4269 8802 4271 8803 4270 8803 4269 8803 4455 8804 4400 8804 2476 8804 4455 8805 4451 8805 4400 8805 4455 8806 4403 8806 4451 8806 4455 8807 4404 8807 4403 8807 4455 8808 4452 8808 4404 8808 4455 8809 4407 8809 4452 8809 4455 8810 4411 8810 4407 8810 4455 8811 4454 8811 4411 8811 4455 8812 4453 8812 4454 8812 4455 8813 4417 8813 4453 8813 4455 8814 4456 8814 4417 8814 4455 8815 4458 8815 4456 8815 4455 8816 4457 8816 4458 8816 4455 8817 4421 8817 4457 8817 4455 8818 4459 8818 4421 8818 4455 8819 4424 8819 4459 8819 4455 8820 4460 8820 4424 8820 4455 8821 4428 8821 4460 8821 4455 8822 4429 8822 4428 8822 4455 8823 4461 8823 4429 8823 4455 8824 4431 8824 4461 8824 4455 8825 4462 8825 4431 8825 4455 8826 4463 8826 4462 8826 4455 8827 4437 8827 4463 8827 4455 8828 4465 8828 4437 8828 4455 8829 4464 8829 4465 8829 4455 8830 4440 8830 4464 8830 4455 8831 4443 8831 4440 8831 4455 8832 4466 8832 4443 8832 4455 8833 4467 8833 4466 8833 4455 8834 4468 8834 4467 8834 4455 8835 4449 8835 4468 8835 4455 8836 4450 8836 4449 8836 4400 8837 4398 8837 2476 8837 2476 8838 4398 8838 4396 8838 4469 8839 2476 8839 4396 8839 4469 8840 4393 8840 2476 8840 2476 8841 4393 8841 4470 8841 4471 8842 2476 8842 4470 8842 4471 8843 4472 8843 2476 8843 2476 8844 4472 8844 4474 8844 4473 8845 2476 8845 4474 8845 4473 8846 4475 8846 2476 8846 2476 8847 4475 8847 4385 8847 4476 8848 2476 8848 4385 8848 4476 8849 4477 8849 2476 8849 2476 8850 4477 8850 4478 8850 4479 8851 2476 8851 4478 8851 4479 8852 4376 8852 2476 8852 2476 8853 4376 8853 4480 8853 4491 8854 4480 8854 4375 8854 4374 8855 4491 8855 4375 8855 4374 8856 4371 8856 4491 8856 4491 8857 4371 8857 4368 8857 4366 8858 4491 8858 4368 8858 4366 8859 4481 8859 4491 8859 4491 8860 4481 8860 4363 8860 4482 8861 4491 8861 4363 8861 4482 8862 4361 8862 4491 8862 4491 8863 4361 8863 4483 8863 4484 8864 4491 8864 4483 8864 4484 8865 4357 8865 4491 8865 4491 8866 4357 8866 4354 8866 4485 8867 4491 8867 4354 8867 4485 8868 4350 8868 4491 8868 4491 8869 4350 8869 4486 8869 4348 8870 4491 8870 4486 8870 4348 8871 4487 8871 4491 8871 4491 8872 4487 8872 4344 8872 4341 8873 4491 8873 4344 8873 4341 8874 4340 8874 4491 8874 4491 8875 4340 8875 4488 8875 4489 8876 4491 8876 4488 8876 4489 8877 4335 8877 4491 8877 4491 8878 4335 8878 4490 8878 4332 8879 4491 8879 4490 8879 4332 8880 4330 8880 4491 8880 4491 8881 4330 8881 4492 8881 4326 8882 4491 8882 4492 8882 4326 8883 4325 8883 4491 8883 4491 8884 4325 8884 4493 8884 4321 8885 4491 8885 4493 8885 4321 8886 4320 8886 4491 8886 4491 8887 4320 8887 4319 8887 4494 8888 4491 8888 4319 8888 4494 8889 4266 8889 4491 8889 4491 8890 4266 8890 4450 8890 2476 8891 4480 8891 4491 8891 6474 8892 6424 8892 4491 8892 4491 8893 6424 8893 2476 8893 810 8894 4495 8894 831 8894 831 8895 4495 8895 5796 8895 4491 8896 5796 8896 6474 8896 4491 8897 831 8897 5796 8897 4491 8898 4450 8898 831 8898 831 8899 4450 8899 2445 8899 2445 8900 4450 8900 4455 8900 4495 8901 6552 8901 5796 8901 5796 8902 6552 8902 4496 8902 4496 8903 6552 8903 5843 8903 5796 8904 6463 8904 6474 8904 6452 8905 4678 8905 4677 8905 6452 8906 4497 8906 4678 8906 6452 8907 6451 8907 4497 8907 4497 8908 6451 8908 4498 8908 5787 8909 4498 8909 5817 8909 5787 8910 4497 8910 4498 8910 5787 8911 5786 8911 4497 8911 4497 8912 5786 8912 4678 8912 4678 8913 5786 8913 5816 8913 4499 8914 5816 8914 5785 8914 4676 8915 5785 8915 5784 8915 4500 8916 5784 8916 4501 8916 4673 8917 4501 8917 4503 8917 4502 8918 4503 8918 4505 8918 4504 8919 4505 8919 5782 8919 4506 8920 5782 8920 5781 8920 4670 8921 5781 8921 5780 8921 4667 8922 5780 8922 5779 8922 4665 8923 5779 8923 4664 8923 4507 8924 4664 8924 5812 8924 4663 8925 5812 8925 5811 8925 4659 8926 5811 8926 4660 8926 4661 8927 4660 8927 4509 8927 4508 8928 4509 8928 4510 8928 4656 8929 4510 8929 4655 8929 4511 8930 4655 8930 4653 8930 4652 8931 4653 8931 5767 8931 4651 8932 5767 8932 4512 8932 4648 8933 4512 8933 4646 8933 4647 8934 4646 8934 5768 8934 4641 8935 5768 8935 4642 8935 4643 8936 4642 8936 4640 8936 4639 8937 4640 8937 4638 8937 4513 8938 4638 8938 4515 8938 4514 8939 4515 8939 4516 8939 4635 8940 4516 8940 5769 8940 4632 8941 5769 8941 4517 8941 4631 8942 4517 8942 4518 8942 4519 8943 4518 8943 5770 8943 4630 8944 5770 8944 4520 8944 4627 8945 4520 8945 4522 8945 4521 8946 4522 8946 4523 8946 4626 8947 4523 8947 4524 8947 4624 8948 4524 8948 5772 8948 4622 8949 5772 8949 5774 8949 4620 8950 5774 8950 5773 8950 4618 8951 5773 8951 5775 8951 4525 8952 5775 8952 4615 8952 4616 8953 4615 8953 4613 8953 4614 8954 4613 8954 4526 8954 4611 8955 4526 8955 4527 8955 4608 8956 4527 8956 5776 8956 4607 8957 5776 8957 4606 8957 4604 8958 4606 8958 4603 8958 4528 8959 4603 8959 4529 8959 4602 8960 4529 8960 4530 8960 4531 8961 4530 8961 4532 8961 4600 8962 4532 8962 5838 8962 4597 8963 5838 8963 5837 8963 4596 8964 5837 8964 4595 8964 4593 8965 4595 8965 5836 8965 4592 8966 5836 8966 5835 8966 4590 8967 5835 8967 5834 8967 4533 8968 5834 8968 5833 8968 4534 8969 5833 8969 4588 8969 4587 8970 4588 8970 4535 8970 4584 8971 4535 8971 4585 8971 4581 8972 4585 8972 4536 8972 4580 8973 4536 8973 4537 8973 4579 8974 4537 8974 5832 8974 4578 8975 5832 8975 5831 8975 4576 8976 5831 8976 5830 8976 4575 8977 5830 8977 5829 8977 4573 8978 5829 8978 5828 8978 4571 8979 5828 8979 5827 8979 4538 8980 5827 8980 5826 8980 4569 8981 5826 8981 5825 8981 4566 8982 5825 8982 4567 8982 4564 8983 4567 8983 4539 8983 4562 8984 4539 8984 4540 8984 4561 8985 4540 8985 4560 8985 4541 8986 4560 8986 5793 8986 4542 8987 5793 8987 5823 8987 4557 8988 5823 8988 5792 8988 4556 8989 5792 8989 4554 8989 4555 8990 4554 8990 4553 8990 4543 8991 4553 8991 4544 8991 4551 8992 4544 8992 5790 8992 4549 8993 5790 8993 5789 8993 4547 8994 5789 8994 5788 8994 4546 8995 5788 8995 5819 8995 4545 8996 5819 8996 5817 8996 4498 8997 4545 8997 5817 8997 4498 8998 6389 8998 4545 8998 4498 8999 6451 8999 6389 8999 6389 9000 6390 9000 4545 9000 4545 9001 6390 9001 4546 9001 5819 9002 4545 9002 4546 9002 6390 9003 4548 9003 4546 9003 4546 9004 4548 9004 4547 9004 5788 9005 4546 9005 4547 9005 4548 9006 6391 9006 4547 9006 4547 9007 6391 9007 4549 9007 5789 9008 4547 9008 4549 9008 6391 9009 4550 9009 4549 9009 4549 9010 4550 9010 4551 9010 5790 9011 4549 9011 4551 9011 4550 9012 6447 9012 4551 9012 4551 9013 6447 9013 4543 9013 4544 9014 4551 9014 4543 9014 6447 9015 4552 9015 4543 9015 4543 9016 4552 9016 4555 9016 4553 9017 4543 9017 4555 9017 4552 9018 6446 9018 4555 9018 4555 9019 6446 9019 4556 9019 4554 9020 4555 9020 4556 9020 6446 9021 6394 9021 4556 9021 4556 9022 6394 9022 4557 9022 5792 9023 4556 9023 4557 9023 6394 9024 4558 9024 4557 9024 4557 9025 4558 9025 4542 9025 5823 9026 4557 9026 4542 9026 4558 9027 6395 9027 4542 9027 4542 9028 6395 9028 4541 9028 5793 9029 4542 9029 4541 9029 6395 9030 4559 9030 4541 9030 4541 9031 4559 9031 4561 9031 4560 9032 4541 9032 4561 9032 4559 9033 4563 9033 4561 9033 4561 9034 4563 9034 4562 9034 4540 9035 4561 9035 4562 9035 4563 9036 6396 9036 4562 9036 4562 9037 6396 9037 4564 9037 4539 9038 4562 9038 4564 9038 6396 9039 4565 9039 4564 9039 4564 9040 4565 9040 4566 9040 4567 9041 4564 9041 4566 9041 4565 9042 4568 9042 4566 9042 4566 9043 4568 9043 4569 9043 5825 9044 4566 9044 4569 9044 4568 9045 4570 9045 4569 9045 4569 9046 4570 9046 4538 9046 5826 9047 4569 9047 4538 9047 4570 9048 6397 9048 4538 9048 4538 9049 6397 9049 4571 9049 5827 9050 4538 9050 4571 9050 6397 9051 4572 9051 4571 9051 4571 9052 4572 9052 4573 9052 5828 9053 4571 9053 4573 9053 4572 9054 4574 9054 4573 9054 4573 9055 4574 9055 4575 9055 5829 9056 4573 9056 4575 9056 4574 9057 6398 9057 4575 9057 4575 9058 6398 9058 4576 9058 5830 9059 4575 9059 4576 9059 6398 9060 4577 9060 4576 9060 4576 9061 4577 9061 4578 9061 5831 9062 4576 9062 4578 9062 4577 9063 6399 9063 4578 9063 4578 9064 6399 9064 4579 9064 5832 9065 4578 9065 4579 9065 6399 9066 6400 9066 4579 9066 4579 9067 6400 9067 4580 9067 4537 9068 4579 9068 4580 9068 6400 9069 4582 9069 4580 9069 4580 9070 4582 9070 4581 9070 4536 9071 4580 9071 4581 9071 4582 9072 4583 9072 4581 9072 4581 9073 4583 9073 4584 9073 4585 9074 4581 9074 4584 9074 4583 9075 6401 9075 4584 9075 4584 9076 6401 9076 4587 9076 4535 9077 4584 9077 4587 9077 6401 9078 4586 9078 4587 9078 4587 9079 4586 9079 4534 9079 4588 9080 4587 9080 4534 9080 4586 9081 4589 9081 4534 9081 4534 9082 4589 9082 4533 9082 5833 9083 4534 9083 4533 9083 4589 9084 6402 9084 4533 9084 4533 9085 6402 9085 4590 9085 5834 9086 4533 9086 4590 9086 6402 9087 4591 9087 4590 9087 4590 9088 4591 9088 4592 9088 5835 9089 4590 9089 4592 9089 4591 9090 6403 9090 4592 9090 4592 9091 6403 9091 4593 9091 5836 9092 4592 9092 4593 9092 6403 9093 4594 9093 4593 9093 4593 9094 4594 9094 4596 9094 4595 9095 4593 9095 4596 9095 4594 9096 4598 9096 4596 9096 4596 9097 4598 9097 4597 9097 5837 9098 4596 9098 4597 9098 4598 9099 4599 9099 4597 9099 4597 9100 4599 9100 4600 9100 5838 9101 4597 9101 4600 9101 4599 9102 6404 9102 4600 9102 4600 9103 6404 9103 4531 9103 4532 9104 4600 9104 4531 9104 6404 9105 4601 9105 4531 9105 4531 9106 4601 9106 4602 9106 4530 9107 4531 9107 4602 9107 4601 9108 6406 9108 4602 9108 4602 9109 6406 9109 4528 9109 4529 9110 4602 9110 4528 9110 6406 9111 6405 9111 4528 9111 4528 9112 6405 9112 4604 9112 4603 9113 4528 9113 4604 9113 6405 9114 4605 9114 4604 9114 4604 9115 4605 9115 4607 9115 4606 9116 4604 9116 4607 9116 4605 9117 4609 9117 4607 9117 4607 9118 4609 9118 4608 9118 5776 9119 4607 9119 4608 9119 4609 9120 4610 9120 4608 9120 4608 9121 4610 9121 4611 9121 4527 9122 4608 9122 4611 9122 4610 9123 6373 9123 4611 9123 4611 9124 6373 9124 4614 9124 4526 9125 4611 9125 4614 9125 6373 9126 4612 9126 4614 9126 4614 9127 4612 9127 4616 9127 4613 9128 4614 9128 4616 9128 4612 9129 4617 9129 4616 9129 4616 9130 4617 9130 4525 9130 4615 9131 4616 9131 4525 9131 4617 9132 4619 9132 4525 9132 4525 9133 4619 9133 4618 9133 5775 9134 4525 9134 4618 9134 4619 9135 4621 9135 4618 9135 4618 9136 4621 9136 4620 9136 5773 9137 4618 9137 4620 9137 4621 9138 6374 9138 4620 9138 4620 9139 6374 9139 4622 9139 5774 9140 4620 9140 4622 9140 6374 9141 6375 9141 4622 9141 4622 9142 6375 9142 4624 9142 5772 9143 4622 9143 4624 9143 6375 9144 4623 9144 4624 9144 4624 9145 4623 9145 4626 9145 4524 9146 4624 9146 4626 9146 4623 9147 4625 9147 4626 9147 4626 9148 4625 9148 4521 9148 4523 9149 4626 9149 4521 9149 4625 9150 4628 9150 4521 9150 4521 9151 4628 9151 4627 9151 4522 9152 4521 9152 4627 9152 4628 9153 4629 9153 4627 9153 4627 9154 4629 9154 4630 9154 4520 9155 4627 9155 4630 9155 4629 9156 6376 9156 4630 9156 4630 9157 6376 9157 4519 9157 5770 9158 4630 9158 4519 9158 6376 9159 6377 9159 4519 9159 4519 9160 6377 9160 4631 9160 4518 9161 4519 9161 4631 9161 6377 9162 6378 9162 4631 9162 4631 9163 6378 9163 4632 9163 4517 9164 4631 9164 4632 9164 6378 9165 4633 9165 4632 9165 4632 9166 4633 9166 4635 9166 5769 9167 4632 9167 4635 9167 4633 9168 4634 9168 4635 9168 4635 9169 4634 9169 4514 9169 4516 9170 4635 9170 4514 9170 4634 9171 4636 9171 4514 9171 4514 9172 4636 9172 4513 9172 4515 9173 4514 9173 4513 9173 4636 9174 4637 9174 4513 9174 4513 9175 4637 9175 4639 9175 4638 9176 4513 9176 4639 9176 4637 9177 6379 9177 4639 9177 4639 9178 6379 9178 4643 9178 4640 9179 4639 9179 4643 9179 6379 9180 6380 9180 4643 9180 4643 9181 6380 9181 4641 9181 4642 9182 4643 9182 4641 9182 6380 9183 4644 9183 4641 9183 4641 9184 4644 9184 4647 9184 5768 9185 4641 9185 4647 9185 4644 9186 4645 9186 4647 9186 4647 9187 4645 9187 4648 9187 4646 9188 4647 9188 4648 9188 4645 9189 4649 9189 4648 9189 4648 9190 4649 9190 4651 9190 4512 9191 4648 9191 4651 9191 4649 9192 4650 9192 4651 9192 4651 9193 4650 9193 4652 9193 5767 9194 4651 9194 4652 9194 4650 9195 4654 9195 4652 9195 4652 9196 4654 9196 4511 9196 4653 9197 4652 9197 4511 9197 4654 9198 6381 9198 4511 9198 4511 9199 6381 9199 4656 9199 4655 9200 4511 9200 4656 9200 6381 9201 4657 9201 4656 9201 4656 9202 4657 9202 4508 9202 4510 9203 4656 9203 4508 9203 4657 9204 4658 9204 4508 9204 4508 9205 4658 9205 4661 9205 4509 9206 4508 9206 4661 9206 4658 9207 4662 9207 4661 9207 4661 9208 4662 9208 4659 9208 4660 9209 4661 9209 4659 9209 4662 9210 6382 9210 4659 9210 4659 9211 6382 9211 4663 9211 5811 9212 4659 9212 4663 9212 6382 9213 6445 9213 4663 9213 4663 9214 6445 9214 4507 9214 5812 9215 4663 9215 4507 9215 6445 9216 4666 9216 4507 9216 4507 9217 4666 9217 4665 9217 4664 9218 4507 9218 4665 9218 4666 9219 4668 9219 4665 9219 4665 9220 4668 9220 4667 9220 5779 9221 4665 9221 4667 9221 4668 9222 4669 9222 4667 9222 4667 9223 4669 9223 4670 9223 5780 9224 4667 9224 4670 9224 4669 9225 6385 9225 4670 9225 4670 9226 6385 9226 4506 9226 5781 9227 4670 9227 4506 9227 6385 9228 4671 9228 4506 9228 4506 9229 4671 9229 4504 9229 5782 9230 4506 9230 4504 9230 4671 9231 4672 9231 4504 9231 4504 9232 4672 9232 4502 9232 4505 9233 4504 9233 4502 9233 4672 9234 6443 9234 4502 9234 4502 9235 6443 9235 4673 9235 4503 9236 4502 9236 4673 9236 6443 9237 4674 9237 4673 9237 4673 9238 4674 9238 4500 9238 4501 9239 4673 9239 4500 9239 4674 9240 4675 9240 4500 9240 4500 9241 4675 9241 4676 9241 5784 9242 4500 9242 4676 9242 4675 9243 6387 9243 4676 9243 4676 9244 6387 9244 4499 9244 5785 9245 4676 9245 4499 9245 6387 9246 4677 9246 4499 9246 4499 9247 4677 9247 4678 9247 5816 9248 4499 9248 4678 9248 4679 9249 4683 9249 6116 9249 4679 9250 4680 9250 4683 9250 4679 9251 4716 9251 4680 9251 4680 9252 4716 9252 4681 9252 5736 9253 4681 9253 5735 9253 5736 9254 4680 9254 4681 9254 5736 9255 4682 9255 4680 9255 4680 9256 4682 9256 4683 9256 4683 9257 4682 9257 5709 9257 4684 9258 5709 9258 4685 9258 4867 9259 4685 9259 5710 9259 4868 9260 5710 9260 4686 9260 4864 9261 4686 9261 4863 9261 4862 9262 4863 9262 4687 9262 4859 9263 4687 9263 5737 9263 4857 9264 5737 9264 5712 9264 4856 9265 5712 9265 5738 9265 4854 9266 5738 9266 5739 9266 4852 9267 5739 9267 5714 9267 4850 9268 5714 9268 5715 9268 4849 9269 5715 9269 4847 9269 4845 9270 4847 9270 4844 9270 4843 9271 4844 9271 5741 9271 4842 9272 5741 9272 4839 9272 4840 9273 4839 9273 4688 9273 4836 9274 4688 9274 5718 9274 4834 9275 5718 9275 4689 9275 4833 9276 4689 9276 5719 9276 4829 9277 5719 9277 4690 9277 4827 9278 4690 9278 5720 9278 4828 9279 5720 9279 4826 9279 4825 9280 4826 9280 4824 9280 4823 9281 4824 9281 4820 9281 4819 9282 4820 9282 5721 9282 4818 9283 5721 9283 5722 9283 4817 9284 5722 9284 4816 9284 4815 9285 4816 9285 5725 9285 4812 9286 5725 9286 5724 9286 4809 9287 5724 9287 4691 9287 4810 9288 4691 9288 4692 9288 4693 9289 4692 9289 5726 9289 4806 9290 5726 9290 4807 9290 4801 9291 4807 9291 4802 9291 4803 9292 4802 9292 4694 9292 4799 9293 4694 9293 4695 9293 4798 9294 4695 9294 4696 9294 4697 9295 4696 9295 4794 9295 4795 9296 4794 9296 4793 9296 4792 9297 4793 9297 5727 9297 4791 9298 5727 9298 4788 9298 4786 9299 4788 9299 5728 9299 4783 9300 5728 9300 4698 9300 4784 9301 4698 9301 4699 9301 4781 9302 4699 9302 5766 9302 4780 9303 5766 9303 4778 9303 4700 9304 4778 9304 4701 9304 4776 9305 4701 9305 4702 9305 4775 9306 4702 9306 5765 9306 4773 9307 5765 9307 4774 9307 4772 9308 4774 9308 4703 9308 4769 9309 4703 9309 5763 9309 4767 9310 5763 9310 4704 9310 4766 9311 4704 9311 4705 9311 4765 9312 4705 9312 5762 9312 4763 9313 5762 9313 4706 9313 4761 9314 4706 9314 4707 9314 4760 9315 4707 9315 4708 9315 4758 9316 4708 9316 4756 9316 4757 9317 4756 9317 5761 9317 4753 9318 5761 9318 4752 9318 4750 9319 4752 9319 5760 9319 4749 9320 5760 9320 5759 9320 4747 9321 5759 9321 5758 9321 4746 9322 5758 9322 4745 9322 4709 9323 4745 9323 5757 9323 4742 9324 5757 9324 4710 9324 4741 9325 4710 9325 5756 9325 4740 9326 5756 9326 4711 9326 4739 9327 4711 9327 5755 9327 4737 9328 5755 9328 5754 9328 4736 9329 5754 9329 4735 9329 4734 9330 4735 9330 4733 9330 4712 9331 4733 9331 5731 9331 4731 9332 5731 9332 4730 9332 4729 9333 4730 9333 5703 9333 4727 9334 5703 9334 4725 9334 4726 9335 4725 9335 5732 9335 4722 9336 5732 9336 4713 9336 4721 9337 4713 9337 5705 9337 4720 9338 5705 9338 5706 9338 4718 9339 5706 9339 4714 9339 4717 9340 4714 9340 5735 9340 4681 9341 4717 9341 5735 9341 4681 9342 4715 9342 4717 9342 4681 9343 4716 9343 4715 9343 4715 9344 6115 9344 4717 9344 4717 9345 6115 9345 4718 9345 4714 9346 4717 9346 4718 9346 6115 9347 6114 9347 4718 9347 4718 9348 6114 9348 4720 9348 5706 9349 4718 9349 4720 9349 6114 9350 4719 9350 4720 9350 4720 9351 4719 9351 4721 9351 5705 9352 4720 9352 4721 9352 4719 9353 6118 9353 4721 9353 4721 9354 6118 9354 4722 9354 4713 9355 4721 9355 4722 9355 6118 9356 4723 9356 4722 9356 4722 9357 4723 9357 4726 9357 5732 9358 4722 9358 4726 9358 4723 9359 4724 9359 4726 9359 4726 9360 4724 9360 4727 9360 4725 9361 4726 9361 4727 9361 4724 9362 6112 9362 4727 9362 4727 9363 6112 9363 4729 9363 5703 9364 4727 9364 4729 9364 6112 9365 4728 9365 4729 9365 4729 9366 4728 9366 4731 9366 4730 9367 4729 9367 4731 9367 4728 9368 4732 9368 4731 9368 4731 9369 4732 9369 4712 9369 5731 9370 4731 9370 4712 9370 4732 9371 6110 9371 4712 9371 4712 9372 6110 9372 4734 9372 4733 9373 4712 9373 4734 9373 6110 9374 6109 9374 4734 9374 4734 9375 6109 9375 4736 9375 4735 9376 4734 9376 4736 9376 6109 9377 6107 9377 4736 9377 4736 9378 6107 9378 4737 9378 5754 9379 4736 9379 4737 9379 6107 9380 4738 9380 4737 9380 4737 9381 4738 9381 4739 9381 5755 9382 4737 9382 4739 9382 4738 9383 6106 9383 4739 9383 4739 9384 6106 9384 4740 9384 4711 9385 4739 9385 4740 9385 6106 9386 6105 9386 4740 9386 4740 9387 6105 9387 4741 9387 5756 9388 4740 9388 4741 9388 6105 9389 6104 9389 4741 9389 4741 9390 6104 9390 4742 9390 4710 9391 4741 9391 4742 9391 6104 9392 4743 9392 4742 9392 4742 9393 4743 9393 4709 9393 5757 9394 4742 9394 4709 9394 4743 9395 4744 9395 4709 9395 4709 9396 4744 9396 4746 9396 4745 9397 4709 9397 4746 9397 4744 9398 6103 9398 4746 9398 4746 9399 6103 9399 4747 9399 5758 9400 4746 9400 4747 9400 6103 9401 4748 9401 4747 9401 4747 9402 4748 9402 4749 9402 5759 9403 4747 9403 4749 9403 4748 9404 6102 9404 4749 9404 4749 9405 6102 9405 4750 9405 5760 9406 4749 9406 4750 9406 6102 9407 4751 9407 4750 9407 4750 9408 4751 9408 4753 9408 4752 9409 4750 9409 4753 9409 4751 9410 4754 9410 4753 9410 4753 9411 4754 9411 4757 9411 5761 9412 4753 9412 4757 9412 4754 9413 4755 9413 4757 9413 4757 9414 4755 9414 4758 9414 4756 9415 4757 9415 4758 9415 4755 9416 4759 9416 4758 9416 4758 9417 4759 9417 4760 9417 4708 9418 4758 9418 4760 9418 4759 9419 6101 9419 4760 9419 4760 9420 6101 9420 4761 9420 4707 9421 4760 9421 4761 9421 6101 9422 4762 9422 4761 9422 4761 9423 4762 9423 4763 9423 4706 9424 4761 9424 4763 9424 4762 9425 6100 9425 4763 9425 4763 9426 6100 9426 4765 9426 5762 9427 4763 9427 4765 9427 6100 9428 4764 9428 4765 9428 4765 9429 4764 9429 4766 9429 4705 9430 4765 9430 4766 9430 4764 9431 6099 9431 4766 9431 4766 9432 6099 9432 4767 9432 4704 9433 4766 9433 4767 9433 6099 9434 4768 9434 4767 9434 4767 9435 4768 9435 4769 9435 5763 9436 4767 9436 4769 9436 4768 9437 4770 9437 4769 9437 4769 9438 4770 9438 4772 9438 4703 9439 4769 9439 4772 9439 4770 9440 4771 9440 4772 9440 4772 9441 4771 9441 4773 9441 4774 9442 4772 9442 4773 9442 4771 9443 6097 9443 4773 9443 4773 9444 6097 9444 4775 9444 5765 9445 4773 9445 4775 9445 6097 9446 6096 9446 4775 9446 4775 9447 6096 9447 4776 9447 4702 9448 4775 9448 4776 9448 6096 9449 6095 9449 4776 9449 4776 9450 6095 9450 4700 9450 4701 9451 4776 9451 4700 9451 6095 9452 4777 9452 4700 9452 4700 9453 4777 9453 4780 9453 4778 9454 4700 9454 4780 9454 4777 9455 4779 9455 4780 9455 4780 9456 4779 9456 4781 9456 5766 9457 4780 9457 4781 9457 4779 9458 4782 9458 4781 9458 4781 9459 4782 9459 4784 9459 4699 9460 4781 9460 4784 9460 4782 9461 4785 9461 4784 9461 4784 9462 4785 9462 4783 9462 4698 9463 4784 9463 4783 9463 4785 9464 4787 9464 4783 9464 4783 9465 4787 9465 4786 9465 5728 9466 4783 9466 4786 9466 4787 9467 4789 9467 4786 9467 4786 9468 4789 9468 4791 9468 4788 9469 4786 9469 4791 9469 4789 9470 4790 9470 4791 9470 4791 9471 4790 9471 4792 9471 5727 9472 4791 9472 4792 9472 4790 9473 6094 9473 4792 9473 4792 9474 6094 9474 4795 9474 4793 9475 4792 9475 4795 9475 6094 9476 4796 9476 4795 9476 4795 9477 4796 9477 4697 9477 4794 9478 4795 9478 4697 9478 4796 9479 4797 9479 4697 9479 4697 9480 4797 9480 4798 9480 4696 9481 4697 9481 4798 9481 4797 9482 6093 9482 4798 9482 4798 9483 6093 9483 4799 9483 4695 9484 4798 9484 4799 9484 6093 9485 4800 9485 4799 9485 4799 9486 4800 9486 4803 9486 4694 9487 4799 9487 4803 9487 4800 9488 4804 9488 4803 9488 4803 9489 4804 9489 4801 9489 4802 9490 4803 9490 4801 9490 4804 9491 4805 9491 4801 9491 4801 9492 4805 9492 4806 9492 4807 9493 4801 9493 4806 9493 4805 9494 6092 9494 4806 9494 4806 9495 6092 9495 4693 9495 5726 9496 4806 9496 4693 9496 6092 9497 4808 9497 4693 9497 4693 9498 4808 9498 4810 9498 4692 9499 4693 9499 4810 9499 4808 9500 6090 9500 4810 9500 4810 9501 6090 9501 4809 9501 4691 9502 4810 9502 4809 9502 6090 9503 4811 9503 4809 9503 4809 9504 4811 9504 4812 9504 5724 9505 4809 9505 4812 9505 4811 9506 4813 9506 4812 9506 4812 9507 4813 9507 4815 9507 5725 9508 4812 9508 4815 9508 4813 9509 4814 9509 4815 9509 4815 9510 4814 9510 4817 9510 4816 9511 4815 9511 4817 9511 4814 9512 6088 9512 4817 9512 4817 9513 6088 9513 4818 9513 5722 9514 4817 9514 4818 9514 6088 9515 6089 9515 4818 9515 4818 9516 6089 9516 4819 9516 5721 9517 4818 9517 4819 9517 6089 9518 4821 9518 4819 9518 4819 9519 4821 9519 4823 9519 4820 9520 4819 9520 4823 9520 4821 9521 4822 9521 4823 9521 4823 9522 4822 9522 4825 9522 4824 9523 4823 9523 4825 9523 4822 9524 6087 9524 4825 9524 4825 9525 6087 9525 4828 9525 4826 9526 4825 9526 4828 9526 6087 9527 6086 9527 4828 9527 4828 9528 6086 9528 4827 9528 5720 9529 4828 9529 4827 9529 6086 9530 4830 9530 4827 9530 4827 9531 4830 9531 4829 9531 4690 9532 4827 9532 4829 9532 4830 9533 4831 9533 4829 9533 4829 9534 4831 9534 4833 9534 5719 9535 4829 9535 4833 9535 4831 9536 4832 9536 4833 9536 4833 9537 4832 9537 4834 9537 4689 9538 4833 9538 4834 9538 4832 9539 4835 9539 4834 9539 4834 9540 4835 9540 4836 9540 5718 9541 4834 9541 4836 9541 4835 9542 4837 9542 4836 9542 4836 9543 4837 9543 4840 9543 4688 9544 4836 9544 4840 9544 4837 9545 4838 9545 4840 9545 4840 9546 4838 9546 4842 9546 4839 9547 4840 9547 4842 9547 4838 9548 4841 9548 4842 9548 4842 9549 4841 9549 4843 9549 5741 9550 4842 9550 4843 9550 4841 9551 6085 9551 4843 9551 4843 9552 6085 9552 4845 9552 4844 9553 4843 9553 4845 9553 6085 9554 4846 9554 4845 9554 4845 9555 4846 9555 4849 9555 4847 9556 4845 9556 4849 9556 4846 9557 4848 9557 4849 9557 4849 9558 4848 9558 4850 9558 5715 9559 4849 9559 4850 9559 4848 9560 4851 9560 4850 9560 4850 9561 4851 9561 4852 9561 5714 9562 4850 9562 4852 9562 4851 9563 6038 9563 4852 9563 4852 9564 6038 9564 4854 9564 5739 9565 4852 9565 4854 9565 6038 9566 4853 9566 4854 9566 4854 9567 4853 9567 4856 9567 5738 9568 4854 9568 4856 9568 4853 9569 4855 9569 4856 9569 4856 9570 4855 9570 4857 9570 5712 9571 4856 9571 4857 9571 4855 9572 4858 9572 4857 9572 4857 9573 4858 9573 4859 9573 5737 9574 4857 9574 4859 9574 4858 9575 4860 9575 4859 9575 4859 9576 4860 9576 4862 9576 4687 9577 4859 9577 4862 9577 4860 9578 4861 9578 4862 9578 4862 9579 4861 9579 4864 9579 4863 9580 4862 9580 4864 9580 4861 9581 4865 9581 4864 9581 4864 9582 4865 9582 4868 9582 4686 9583 4864 9583 4868 9583 4865 9584 4866 9584 4868 9584 4868 9585 4866 9585 4867 9585 5710 9586 4868 9586 4867 9586 4866 9587 4869 9587 4867 9587 4867 9588 4869 9588 4684 9588 4685 9589 4867 9589 4684 9589 4869 9590 6116 9590 4684 9590 4684 9591 6116 9591 4683 9591 5709 9592 4684 9592 4683 9592 4870 9593 4871 9593 5848 9593 4870 9594 4874 9594 4871 9594 4870 9595 4872 9595 4874 9595 4874 9596 4872 9596 4936 9596 4873 9597 4936 9597 4935 9597 4873 9598 4874 9598 4936 9598 4873 9599 4875 9599 4874 9599 4874 9600 4875 9600 4871 9600 4871 9601 4875 9601 4876 9601 5074 9602 4876 9602 5073 9602 5071 9603 5073 9603 5070 9603 4877 9604 5070 9604 5067 9604 5066 9605 5067 9605 5065 9605 5064 9606 5065 9606 4878 9606 5061 9607 4878 9607 6081 9607 5062 9608 6081 9608 5059 9608 5058 9609 5059 9609 6080 9609 5056 9610 6080 9610 4880 9610 4879 9611 4880 9611 5055 9611 5054 9612 5055 9612 4881 9612 5051 9613 4881 9613 5052 9613 4882 9614 5052 9614 4884 9614 4883 9615 4884 9615 4885 9615 5048 9616 4885 9616 6078 9616 5049 9617 6078 9617 4886 9617 5046 9618 4886 9618 6077 9618 5045 9619 6077 9619 4887 9619 5042 9620 4887 9620 4888 9620 4889 9621 4888 9621 6076 9621 5040 9622 6076 9622 5039 9622 5037 9623 5039 9623 6075 9623 5035 9624 6075 9624 4890 9624 5033 9625 4890 9625 5032 9625 5031 9626 5032 9626 4891 9626 5028 9627 4891 9627 5029 9627 5030 9628 5029 9628 6074 9628 5025 9629 6074 9629 6073 9629 5023 9630 6073 9630 4892 9630 5021 9631 4892 9631 5020 9631 5017 9632 5020 9632 4893 9632 4894 9633 4893 9633 4895 9633 5015 9634 4895 9634 4897 9634 4896 9635 4897 9635 5014 9635 4898 9636 5014 9636 5013 9636 5012 9637 5013 9637 4899 9637 5010 9638 4899 9638 6072 9638 5009 9639 6072 9639 6071 9639 4900 9640 6071 9640 6070 9640 4901 9641 6070 9641 6069 9641 5007 9642 6069 9642 4902 9642 4903 9643 4902 9643 5004 9643 4904 9644 5004 9644 4905 9644 4906 9645 4905 9645 4907 9645 5002 9646 4907 9646 5001 9646 4908 9647 5001 9647 4909 9647 4998 9648 4909 9648 4910 9648 4911 9649 4910 9649 4912 9649 4996 9650 4912 9650 4913 9650 4993 9651 4913 9651 4992 9651 4990 9652 4992 9652 6068 9652 4989 9653 6068 9653 4914 9653 4988 9654 4914 9654 6067 9654 4987 9655 6067 9655 6066 9655 4985 9656 6066 9656 4915 9656 4983 9657 4915 9657 6065 9657 4982 9658 6065 9658 4916 9658 4979 9659 4916 9659 4978 9659 4917 9660 4978 9660 4918 9660 4976 9661 4918 9661 4919 9661 4975 9662 4919 9662 6064 9662 4974 9663 6064 9663 6063 9663 4972 9664 6063 9664 6062 9664 4969 9665 6062 9665 4970 9665 4968 9666 4970 9666 6061 9666 4965 9667 6061 9667 4966 9667 4963 9668 4966 9668 4962 9668 4920 9669 4962 9669 4921 9669 4960 9670 4921 9670 6060 9670 4956 9671 6060 9671 6059 9671 4922 9672 6059 9672 6058 9672 4955 9673 6058 9673 4923 9673 4953 9674 4923 9674 4924 9674 4951 9675 4924 9675 4925 9675 4950 9676 4925 9676 4926 9676 4947 9677 4926 9677 4946 9677 4945 9678 4946 9678 4944 9678 4927 9679 4944 9679 4928 9679 4943 9680 4928 9680 4929 9680 4942 9681 4929 9681 6057 9681 4930 9682 6057 9682 4932 9682 4931 9683 4932 9683 4933 9683 4934 9684 4933 9684 4935 9684 4936 9685 4934 9685 4935 9685 4936 9686 4937 9686 4934 9686 4936 9687 4872 9687 4937 9687 4937 9688 4938 9688 4934 9688 4934 9689 4938 9689 4931 9689 4933 9690 4934 9690 4931 9690 4938 9691 4939 9691 4931 9691 4931 9692 4939 9692 4930 9692 4932 9693 4931 9693 4930 9693 4939 9694 4940 9694 4930 9694 4930 9695 4940 9695 4942 9695 6057 9696 4930 9696 4942 9696 4940 9697 4941 9697 4942 9697 4942 9698 4941 9698 4943 9698 4929 9699 4942 9699 4943 9699 4941 9700 5623 9700 4943 9700 4943 9701 5623 9701 4927 9701 4928 9702 4943 9702 4927 9702 5623 9703 5624 9703 4927 9703 4927 9704 5624 9704 4945 9704 4944 9705 4927 9705 4945 9705 5624 9706 4948 9706 4945 9706 4945 9707 4948 9707 4947 9707 4946 9708 4945 9708 4947 9708 4948 9709 4949 9709 4947 9709 4947 9710 4949 9710 4950 9710 4926 9711 4947 9711 4950 9711 4949 9712 5626 9712 4950 9712 4950 9713 5626 9713 4951 9713 4925 9714 4950 9714 4951 9714 5626 9715 4952 9715 4951 9715 4951 9716 4952 9716 4953 9716 4924 9717 4951 9717 4953 9717 4952 9718 4954 9718 4953 9718 4953 9719 4954 9719 4955 9719 4923 9720 4953 9720 4955 9720 4954 9721 5627 9721 4955 9721 4955 9722 5627 9722 4922 9722 6058 9723 4955 9723 4922 9723 5627 9724 4957 9724 4922 9724 4922 9725 4957 9725 4956 9725 6059 9726 4922 9726 4956 9726 4957 9727 4958 9727 4956 9727 4956 9728 4958 9728 4960 9728 6060 9729 4956 9729 4960 9729 4958 9730 4959 9730 4960 9730 4960 9731 4959 9731 4920 9731 4921 9732 4960 9732 4920 9732 4959 9733 4961 9733 4920 9733 4920 9734 4961 9734 4963 9734 4962 9735 4920 9735 4963 9735 4961 9736 4964 9736 4963 9736 4963 9737 4964 9737 4965 9737 4966 9738 4963 9738 4965 9738 4964 9739 4967 9739 4965 9739 4965 9740 4967 9740 4968 9740 6061 9741 4965 9741 4968 9741 4967 9742 4971 9742 4968 9742 4968 9743 4971 9743 4969 9743 4970 9744 4968 9744 4969 9744 4971 9745 5628 9745 4969 9745 4969 9746 5628 9746 4972 9746 6062 9747 4969 9747 4972 9747 5628 9748 4973 9748 4972 9748 4972 9749 4973 9749 4974 9749 6063 9750 4972 9750 4974 9750 4973 9751 5629 9751 4974 9751 4974 9752 5629 9752 4975 9752 6064 9753 4974 9753 4975 9753 5629 9754 5630 9754 4975 9754 4975 9755 5630 9755 4976 9755 4919 9756 4975 9756 4976 9756 5630 9757 5631 9757 4976 9757 4976 9758 5631 9758 4917 9758 4918 9759 4976 9759 4917 9759 5631 9760 4977 9760 4917 9760 4917 9761 4977 9761 4979 9761 4978 9762 4917 9762 4979 9762 4977 9763 4980 9763 4979 9763 4979 9764 4980 9764 4982 9764 4916 9765 4979 9765 4982 9765 4980 9766 4981 9766 4982 9766 4982 9767 4981 9767 4983 9767 6065 9768 4982 9768 4983 9768 4981 9769 5632 9769 4983 9769 4983 9770 5632 9770 4985 9770 4915 9771 4983 9771 4985 9771 5632 9772 4984 9772 4985 9772 4985 9773 4984 9773 4987 9773 6066 9774 4985 9774 4987 9774 4984 9775 4986 9775 4987 9775 4987 9776 4986 9776 4988 9776 6067 9777 4987 9777 4988 9777 4986 9778 5652 9778 4988 9778 4988 9779 5652 9779 4989 9779 4914 9780 4988 9780 4989 9780 5652 9781 5651 9781 4989 9781 4989 9782 5651 9782 4990 9782 6068 9783 4989 9783 4990 9783 5651 9784 4991 9784 4990 9784 4990 9785 4991 9785 4993 9785 4992 9786 4990 9786 4993 9786 4991 9787 4994 9787 4993 9787 4993 9788 4994 9788 4996 9788 4913 9789 4993 9789 4996 9789 4994 9790 4995 9790 4996 9790 4996 9791 4995 9791 4911 9791 4912 9792 4996 9792 4911 9792 4995 9793 4997 9793 4911 9793 4911 9794 4997 9794 4998 9794 4910 9795 4911 9795 4998 9795 4997 9796 4999 9796 4998 9796 4998 9797 4999 9797 4908 9797 4909 9798 4998 9798 4908 9798 4999 9799 5000 9799 4908 9799 4908 9800 5000 9800 5002 9800 5001 9801 4908 9801 5002 9801 5000 9802 5003 9802 5002 9802 5002 9803 5003 9803 4906 9803 4907 9804 5002 9804 4906 9804 5003 9805 5650 9805 4906 9805 4906 9806 5650 9806 4904 9806 4905 9807 4906 9807 4904 9807 5650 9808 5649 9808 4904 9808 4904 9809 5649 9809 4903 9809 5004 9810 4904 9810 4903 9810 5649 9811 5005 9811 4903 9811 4903 9812 5005 9812 5007 9812 4902 9813 4903 9813 5007 9813 5005 9814 5006 9814 5007 9814 5007 9815 5006 9815 4901 9815 6069 9816 5007 9816 4901 9816 5006 9817 5648 9817 4901 9817 4901 9818 5648 9818 4900 9818 6070 9819 4901 9819 4900 9819 5648 9820 5008 9820 4900 9820 4900 9821 5008 9821 5009 9821 6071 9822 4900 9822 5009 9822 5008 9823 5633 9823 5009 9823 5009 9824 5633 9824 5010 9824 6072 9825 5009 9825 5010 9825 5633 9826 5011 9826 5010 9826 5010 9827 5011 9827 5012 9827 4899 9828 5010 9828 5012 9828 5011 9829 5634 9829 5012 9829 5012 9830 5634 9830 4898 9830 5013 9831 5012 9831 4898 9831 5634 9832 5635 9832 4898 9832 4898 9833 5635 9833 4896 9833 5014 9834 4898 9834 4896 9834 5635 9835 5636 9835 4896 9835 4896 9836 5636 9836 5015 9836 4897 9837 4896 9837 5015 9837 5636 9838 5016 9838 5015 9838 5015 9839 5016 9839 4894 9839 4895 9840 5015 9840 4894 9840 5016 9841 5018 9841 4894 9841 4894 9842 5018 9842 5017 9842 4893 9843 4894 9843 5017 9843 5018 9844 5019 9844 5017 9844 5017 9845 5019 9845 5021 9845 5020 9846 5017 9846 5021 9846 5019 9847 5022 9847 5021 9847 5021 9848 5022 9848 5023 9848 4892 9849 5021 9849 5023 9849 5022 9850 5024 9850 5023 9850 5023 9851 5024 9851 5025 9851 6073 9852 5023 9852 5025 9852 5024 9853 5026 9853 5025 9853 5025 9854 5026 9854 5030 9854 6074 9855 5025 9855 5030 9855 5026 9856 5027 9856 5030 9856 5030 9857 5027 9857 5028 9857 5029 9858 5030 9858 5028 9858 5027 9859 5637 9859 5028 9859 5028 9860 5637 9860 5031 9860 4891 9861 5028 9861 5031 9861 5637 9862 5034 9862 5031 9862 5031 9863 5034 9863 5033 9863 5032 9864 5031 9864 5033 9864 5034 9865 5638 9865 5033 9865 5033 9866 5638 9866 5035 9866 4890 9867 5033 9867 5035 9867 5638 9868 5036 9868 5035 9868 5035 9869 5036 9869 5037 9869 6075 9870 5035 9870 5037 9870 5036 9871 5038 9871 5037 9871 5037 9872 5038 9872 5040 9872 5039 9873 5037 9873 5040 9873 5038 9874 5041 9874 5040 9874 5040 9875 5041 9875 4889 9875 6076 9876 5040 9876 4889 9876 5041 9877 5639 9877 4889 9877 4889 9878 5639 9878 5042 9878 4888 9879 4889 9879 5042 9879 5639 9880 5043 9880 5042 9880 5042 9881 5043 9881 5045 9881 4887 9882 5042 9882 5045 9882 5043 9883 5044 9883 5045 9883 5045 9884 5044 9884 5046 9884 6077 9885 5045 9885 5046 9885 5044 9886 5047 9886 5046 9886 5046 9887 5047 9887 5049 9887 4886 9888 5046 9888 5049 9888 5047 9889 5640 9889 5049 9889 5049 9890 5640 9890 5048 9890 6078 9891 5049 9891 5048 9891 5640 9892 5050 9892 5048 9892 5048 9893 5050 9893 4883 9893 4885 9894 5048 9894 4883 9894 5050 9895 5641 9895 4883 9895 4883 9896 5641 9896 4882 9896 4884 9897 4883 9897 4882 9897 5641 9898 5642 9898 4882 9898 4882 9899 5642 9899 5051 9899 5052 9900 4882 9900 5051 9900 5642 9901 5053 9901 5051 9901 5051 9902 5053 9902 5054 9902 4881 9903 5051 9903 5054 9903 5053 9904 5644 9904 5054 9904 5054 9905 5644 9905 4879 9905 5055 9906 5054 9906 4879 9906 5644 9907 5643 9907 4879 9907 4879 9908 5643 9908 5056 9908 4880 9909 4879 9909 5056 9909 5643 9910 5057 9910 5056 9910 5056 9911 5057 9911 5058 9911 6080 9912 5056 9912 5058 9912 5057 9913 5060 9913 5058 9913 5058 9914 5060 9914 5062 9914 5059 9915 5058 9915 5062 9915 5060 9916 5646 9916 5062 9916 5062 9917 5646 9917 5061 9917 6081 9918 5062 9918 5061 9918 5646 9919 5063 9919 5061 9919 5061 9920 5063 9920 5064 9920 4878 9921 5061 9921 5064 9921 5063 9922 5647 9922 5064 9922 5064 9923 5647 9923 5066 9923 5065 9924 5064 9924 5066 9924 5647 9925 5068 9925 5066 9925 5066 9926 5068 9926 4877 9926 5067 9927 5066 9927 4877 9927 5068 9928 5069 9928 4877 9928 4877 9929 5069 9929 5071 9929 5070 9930 4877 9930 5071 9930 5069 9931 5072 9931 5071 9931 5071 9932 5072 9932 5074 9932 5073 9933 5071 9933 5074 9933 5072 9934 5848 9934 5074 9934 5074 9935 5848 9935 4871 9935 4876 9936 5074 9936 4871 9936 5077 9937 5075 9937 5076 9937 5077 9938 5079 9938 5075 9938 5077 9939 5688 9939 5079 9939 5079 9940 5688 9940 5078 9940 6152 9941 5078 9941 5129 9941 6152 9942 5079 9942 5078 9942 6152 9943 6151 9943 5079 9943 5079 9944 6151 9944 5075 9944 5075 9945 6151 9945 6150 9945 5257 9946 6150 9946 6149 9946 5080 9947 6149 9947 5081 9947 5254 9948 5081 9948 5082 9948 5253 9949 5082 9949 6148 9949 5251 9950 6148 9950 5250 9950 5249 9951 5250 9951 6147 9951 5247 9952 6147 9952 6146 9952 5246 9953 6146 9953 6145 9953 5083 9954 6145 9954 6144 9954 5084 9955 6144 9955 6143 9955 5243 9956 6143 9956 5242 9956 5239 9957 5242 9957 5240 9957 5236 9958 5240 9958 6142 9958 5235 9959 6142 9959 6141 9959 5085 9960 6141 9960 6140 9960 5233 9961 6140 9961 6139 9961 5231 9962 6139 9962 5086 9962 5230 9963 5086 9963 6138 9963 5228 9964 6138 9964 5227 9964 5226 9965 5227 9965 6137 9965 5225 9966 6137 9966 6136 9966 5087 9967 6136 9967 5088 9967 5222 9968 5088 9968 6134 9968 5221 9969 6134 9969 5220 9969 5089 9970 5220 9970 5090 9970 5091 9971 5090 9971 6133 9971 5092 9972 6133 9972 5093 9972 5215 9973 5093 9973 5214 9973 5094 9974 5214 9974 6132 9974 5213 9975 6132 9975 5095 9975 5211 9976 5095 9976 5096 9976 5209 9977 5096 9977 5097 9977 5208 9978 5097 9978 5098 9978 5206 9979 5098 9979 6131 9979 5204 9980 6131 9980 6130 9980 5203 9981 6130 9981 5201 9981 5197 9982 5201 9982 5198 9982 5195 9983 5198 9983 5194 9983 5099 9984 5194 9984 5193 9984 5100 9985 5193 9985 6129 9985 5192 9986 6129 9986 5190 9986 5189 9987 5190 9987 5101 9987 5102 9988 5101 9988 5103 9988 5187 9989 5103 9989 6125 9989 5185 9990 6125 9990 6128 9990 5184 9991 6128 9991 6126 9991 5183 9992 6126 9992 6127 9992 5179 9993 6127 9993 5104 9993 5180 9994 5104 9994 6167 9994 5105 9995 6167 9995 5176 9995 5177 9996 5176 9996 6168 9996 5174 9997 6168 9997 5106 9997 5107 9998 5106 9998 5108 9998 5173 9999 5108 9999 6165 9999 5170 10000 6165 10000 6164 10000 5169 10001 6164 10001 5109 10001 5167 10002 5109 10002 5110 10002 5111 10003 5110 10003 5112 10003 5165 10004 5112 10004 5164 10004 5163 10005 5164 10005 5113 10005 5160 10006 5113 10006 5115 10006 5114 10007 5115 10007 5116 10007 5158 10008 5116 10008 6163 10008 5117 10009 6163 10009 5156 10009 5118 10010 5156 10010 6162 10010 5119 10011 6162 10011 5155 10011 5152 10012 5155 10012 6161 10012 5153 10013 6161 10013 5120 10013 5121 10014 5120 10014 5122 10014 5123 10015 5122 10015 6160 10015 5148 10016 6160 10016 6158 10016 5147 10017 6158 10017 5124 10017 5146 10018 5124 10018 6157 10018 5144 10019 6157 10019 5125 10019 5142 10020 5125 10020 6156 10020 5140 10021 6156 10021 6155 10021 5126 10022 6155 10022 6154 10022 5138 10023 6154 10023 6153 10023 5137 10024 6153 10024 5127 10024 5134 10025 5127 10025 5135 10025 5136 10026 5135 10026 5132 10026 5130 10027 5132 10027 5128 10027 5131 10028 5128 10028 5129 10028 5078 10029 5131 10029 5129 10029 5078 10030 5687 10030 5131 10030 5078 10031 5688 10031 5687 10031 5687 10032 5686 10032 5131 10032 5131 10033 5686 10033 5130 10033 5128 10034 5131 10034 5130 10034 5686 10035 5685 10035 5130 10035 5130 10036 5685 10036 5136 10036 5132 10037 5130 10037 5136 10037 5685 10038 5133 10038 5136 10038 5136 10039 5133 10039 5134 10039 5135 10040 5136 10040 5134 10040 5133 10041 5684 10041 5134 10041 5134 10042 5684 10042 5137 10042 5127 10043 5134 10043 5137 10043 5684 10044 5683 10044 5137 10044 5137 10045 5683 10045 5138 10045 6153 10046 5137 10046 5138 10046 5683 10047 5682 10047 5138 10047 5138 10048 5682 10048 5126 10048 6154 10049 5138 10049 5126 10049 5682 10050 5139 10050 5126 10050 5126 10051 5139 10051 5140 10051 6155 10052 5126 10052 5140 10052 5139 10053 5141 10053 5140 10053 5140 10054 5141 10054 5142 10054 6156 10055 5140 10055 5142 10055 5141 10056 5143 10056 5142 10056 5142 10057 5143 10057 5144 10057 5125 10058 5142 10058 5144 10058 5143 10059 5145 10059 5144 10059 5144 10060 5145 10060 5146 10060 6157 10061 5144 10061 5146 10061 5145 10062 5681 10062 5146 10062 5146 10063 5681 10063 5147 10063 5124 10064 5146 10064 5147 10064 5681 10065 5680 10065 5147 10065 5147 10066 5680 10066 5148 10066 6158 10067 5147 10067 5148 10067 5680 10068 5149 10068 5148 10068 5148 10069 5149 10069 5123 10069 6160 10070 5148 10070 5123 10070 5149 10071 5150 10071 5123 10071 5123 10072 5150 10072 5121 10072 5122 10073 5123 10073 5121 10073 5150 10074 5679 10074 5121 10074 5121 10075 5679 10075 5153 10075 5120 10076 5121 10076 5153 10076 5679 10077 5151 10077 5153 10077 5153 10078 5151 10078 5152 10078 6161 10079 5153 10079 5152 10079 5151 10080 5154 10080 5152 10080 5152 10081 5154 10081 5119 10081 5155 10082 5152 10082 5119 10082 5154 10083 5678 10083 5119 10083 5119 10084 5678 10084 5118 10084 6162 10085 5119 10085 5118 10085 5678 10086 5677 10086 5118 10086 5118 10087 5677 10087 5117 10087 5156 10088 5118 10088 5117 10088 5677 10089 5157 10089 5117 10089 5117 10090 5157 10090 5158 10090 6163 10091 5117 10091 5158 10091 5157 10092 5159 10092 5158 10092 5158 10093 5159 10093 5114 10093 5116 10094 5158 10094 5114 10094 5159 10095 5161 10095 5114 10095 5114 10096 5161 10096 5160 10096 5115 10097 5114 10097 5160 10097 5161 10098 5162 10098 5160 10098 5160 10099 5162 10099 5163 10099 5113 10100 5160 10100 5163 10100 5162 10101 5676 10101 5163 10101 5163 10102 5676 10102 5165 10102 5164 10103 5163 10103 5165 10103 5676 10104 5675 10104 5165 10104 5165 10105 5675 10105 5111 10105 5112 10106 5165 10106 5111 10106 5675 10107 5166 10107 5111 10107 5111 10108 5166 10108 5167 10108 5110 10109 5111 10109 5167 10109 5166 10110 5168 10110 5167 10110 5167 10111 5168 10111 5169 10111 5109 10112 5167 10112 5169 10112 5168 10113 5171 10113 5169 10113 5169 10114 5171 10114 5170 10114 6164 10115 5169 10115 5170 10115 5171 10116 5673 10116 5170 10116 5170 10117 5673 10117 5173 10117 6165 10118 5170 10118 5173 10118 5673 10119 5172 10119 5173 10119 5173 10120 5172 10120 5107 10120 5108 10121 5173 10121 5107 10121 5172 10122 5672 10122 5107 10122 5107 10123 5672 10123 5174 10123 5106 10124 5107 10124 5174 10124 5672 10125 5671 10125 5174 10125 5174 10126 5671 10126 5177 10126 6168 10127 5174 10127 5177 10127 5671 10128 5175 10128 5177 10128 5177 10129 5175 10129 5105 10129 5176 10130 5177 10130 5105 10130 5175 10131 5178 10131 5105 10131 5105 10132 5178 10132 5180 10132 6167 10133 5105 10133 5180 10133 5178 10134 5670 10134 5180 10134 5180 10135 5670 10135 5179 10135 5104 10136 5180 10136 5179 10136 5670 10137 5181 10137 5179 10137 5179 10138 5181 10138 5183 10138 6127 10139 5179 10139 5183 10139 5181 10140 5182 10140 5183 10140 5183 10141 5182 10141 5184 10141 6126 10142 5183 10142 5184 10142 5182 10143 5669 10143 5184 10143 5184 10144 5669 10144 5185 10144 6128 10145 5184 10145 5185 10145 5669 10146 5186 10146 5185 10146 5185 10147 5186 10147 5187 10147 6125 10148 5185 10148 5187 10148 5186 10149 5668 10149 5187 10149 5187 10150 5668 10150 5102 10150 5103 10151 5187 10151 5102 10151 5668 10152 5188 10152 5102 10152 5102 10153 5188 10153 5189 10153 5101 10154 5102 10154 5189 10154 5188 10155 5191 10155 5189 10155 5189 10156 5191 10156 5192 10156 5190 10157 5189 10157 5192 10157 5191 10158 5667 10158 5192 10158 5192 10159 5667 10159 5100 10159 6129 10160 5192 10160 5100 10160 5667 10161 5666 10161 5100 10161 5100 10162 5666 10162 5099 10162 5193 10163 5100 10163 5099 10163 5666 10164 5196 10164 5099 10164 5099 10165 5196 10165 5195 10165 5194 10166 5099 10166 5195 10166 5196 10167 5199 10167 5195 10167 5195 10168 5199 10168 5197 10168 5198 10169 5195 10169 5197 10169 5199 10170 5200 10170 5197 10170 5197 10171 5200 10171 5203 10171 5201 10172 5197 10172 5203 10172 5200 10173 5202 10173 5203 10173 5203 10174 5202 10174 5204 10174 6130 10175 5203 10175 5204 10175 5202 10176 5205 10176 5204 10176 5204 10177 5205 10177 5206 10177 6131 10178 5204 10178 5206 10178 5205 10179 5207 10179 5206 10179 5206 10180 5207 10180 5208 10180 5098 10181 5206 10181 5208 10181 5207 10182 5653 10182 5208 10182 5208 10183 5653 10183 5209 10183 5097 10184 5208 10184 5209 10184 5653 10185 5210 10185 5209 10185 5209 10186 5210 10186 5211 10186 5096 10187 5209 10187 5211 10187 5210 10188 5654 10188 5211 10188 5211 10189 5654 10189 5213 10189 5095 10190 5211 10190 5213 10190 5654 10191 5212 10191 5213 10191 5213 10192 5212 10192 5094 10192 6132 10193 5213 10193 5094 10193 5212 10194 5216 10194 5094 10194 5094 10195 5216 10195 5215 10195 5214 10196 5094 10196 5215 10196 5216 10197 5217 10197 5215 10197 5215 10198 5217 10198 5092 10198 5093 10199 5215 10199 5092 10199 5217 10200 5656 10200 5092 10200 5092 10201 5656 10201 5091 10201 6133 10202 5092 10202 5091 10202 5656 10203 5218 10203 5091 10203 5091 10204 5218 10204 5089 10204 5090 10205 5091 10205 5089 10205 5218 10206 5219 10206 5089 10206 5089 10207 5219 10207 5221 10207 5220 10208 5089 10208 5221 10208 5219 10209 5223 10209 5221 10209 5221 10210 5223 10210 5222 10210 6134 10211 5221 10211 5222 10211 5223 10212 5657 10212 5222 10212 5222 10213 5657 10213 5087 10213 5088 10214 5222 10214 5087 10214 5657 10215 5224 10215 5087 10215 5087 10216 5224 10216 5225 10216 6136 10217 5087 10217 5225 10217 5224 10218 5658 10218 5225 10218 5225 10219 5658 10219 5226 10219 6137 10220 5225 10220 5226 10220 5658 10221 5659 10221 5226 10221 5226 10222 5659 10222 5228 10222 5227 10223 5226 10223 5228 10223 5659 10224 5229 10224 5228 10224 5228 10225 5229 10225 5230 10225 6138 10226 5228 10226 5230 10226 5229 10227 5232 10227 5230 10227 5230 10228 5232 10228 5231 10228 5086 10229 5230 10229 5231 10229 5232 10230 5660 10230 5231 10230 5231 10231 5660 10231 5233 10231 6139 10232 5231 10232 5233 10232 5660 10233 5661 10233 5233 10233 5233 10234 5661 10234 5085 10234 6140 10235 5233 10235 5085 10235 5661 10236 5234 10236 5085 10236 5085 10237 5234 10237 5235 10237 6141 10238 5085 10238 5235 10238 5234 10239 5237 10239 5235 10239 5235 10240 5237 10240 5236 10240 6142 10241 5235 10241 5236 10241 5237 10242 5238 10242 5236 10242 5236 10243 5238 10243 5239 10243 5240 10244 5236 10244 5239 10244 5238 10245 5241 10245 5239 10245 5239 10246 5241 10246 5243 10246 5242 10247 5239 10247 5243 10247 5241 10248 5244 10248 5243 10248 5243 10249 5244 10249 5084 10249 6143 10250 5243 10250 5084 10250 5244 10251 5662 10251 5084 10251 5084 10252 5662 10252 5083 10252 6144 10253 5084 10253 5083 10253 5662 10254 5245 10254 5083 10254 5083 10255 5245 10255 5246 10255 6145 10256 5083 10256 5246 10256 5245 10257 5663 10257 5246 10257 5246 10258 5663 10258 5247 10258 6146 10259 5246 10259 5247 10259 5663 10260 5664 10260 5247 10260 5247 10261 5664 10261 5249 10261 6147 10262 5247 10262 5249 10262 5664 10263 5248 10263 5249 10263 5249 10264 5248 10264 5251 10264 5250 10265 5249 10265 5251 10265 5248 10266 5252 10266 5251 10266 5251 10267 5252 10267 5253 10267 6148 10268 5251 10268 5253 10268 5252 10269 5665 10269 5253 10269 5253 10270 5665 10270 5254 10270 5082 10271 5253 10271 5254 10271 5665 10272 5255 10272 5254 10272 5254 10273 5255 10273 5080 10273 5081 10274 5254 10274 5080 10274 5255 10275 5256 10275 5080 10275 5080 10276 5256 10276 5257 10276 6149 10277 5080 10277 5257 10277 5256 10278 5076 10278 5257 10278 5257 10279 5076 10279 5075 10279 6150 10280 5257 10280 5075 10280 5258 10281 5434 10281 5433 10281 5258 10282 5261 10282 5434 10282 5258 10283 6169 10283 5261 10283 5261 10284 6169 10284 5307 10284 5700 10285 5307 10285 5259 10285 5700 10286 5261 10286 5307 10286 5700 10287 5260 10287 5261 10287 5261 10288 5260 10288 5434 10288 5434 10289 5260 10289 5435 10289 5262 10290 5435 10290 5753 10290 5432 10291 5753 10291 5752 10291 5431 10292 5752 10292 5751 10292 5429 10293 5751 10293 5428 10293 5263 10294 5428 10294 5750 10294 5424 10295 5750 10295 5425 10295 5426 10296 5425 10296 5264 10296 5422 10297 5264 10297 5749 10297 5419 10298 5749 10298 5265 10298 5418 10299 5265 10299 5267 10299 5266 10300 5267 10300 5748 10300 5415 10301 5748 10301 5268 10301 5411 10302 5268 10302 5412 10302 5413 10303 5412 10303 5269 10303 5410 10304 5269 10304 5747 10304 5270 10305 5747 10305 5271 10305 5409 10306 5271 10306 5746 10306 5408 10307 5746 10307 5745 10307 5406 10308 5745 10308 5744 10308 5405 10309 5744 10309 5272 10309 5404 10310 5272 10310 5402 10310 5273 10311 5402 10311 5274 10311 5401 10312 5274 10312 5743 10312 5400 10313 5743 10313 5742 10313 5398 10314 5742 10314 5397 10314 5275 10315 5397 10315 5394 10315 5395 10316 5394 10316 5276 10316 5392 10317 5276 10317 5391 10317 5390 10318 5391 10318 5717 10318 5389 10319 5717 10319 5716 10319 5387 10320 5716 10320 5277 10320 5386 10321 5277 10321 5740 10321 5383 10322 5740 10322 5384 10322 5382 10323 5384 10323 5380 10323 5378 10324 5380 10324 5278 10324 5377 10325 5278 10325 5279 10325 5374 10326 5279 10326 5713 10326 5375 10327 5713 10327 5281 10327 5280 10328 5281 10328 5282 10328 5373 10329 5282 10329 5283 10329 5371 10330 5283 10330 5284 10330 5370 10331 5284 10331 5286 10331 5285 10332 5286 10332 5711 10332 5287 10333 5711 10333 5289 10333 5288 10334 5289 10334 5368 10334 5366 10335 5368 10335 5291 10335 5290 10336 5291 10336 5708 10336 5292 10337 5708 10337 5365 10337 5364 10338 5365 10338 5707 10338 5363 10339 5707 10339 5361 10339 5362 10340 5361 10340 5734 10340 5359 10341 5734 10341 5733 10341 5358 10342 5733 10342 5704 10342 5357 10343 5704 10343 5293 10343 5354 10344 5293 10344 5352 10344 5353 10345 5352 10345 5349 10345 5350 10346 5349 10346 5702 10346 5348 10347 5702 10347 5730 10347 5294 10348 5730 10348 5729 10348 5344 10349 5729 10349 5701 10349 5343 10350 5701 10350 5342 10350 5341 10351 5342 10351 5689 10351 5340 10352 5689 10352 5690 10352 5295 10353 5690 10353 5691 10353 5337 10354 5691 10354 5692 10354 5335 10355 5692 10355 5693 10355 5296 10356 5693 10356 5332 10356 5331 10357 5332 10357 5330 10357 5329 10358 5330 10358 5297 10358 5327 10359 5297 10359 5325 10359 5298 10360 5325 10360 5299 10360 5300 10361 5299 10361 5301 10361 5323 10362 5301 10362 5302 10362 5321 10363 5302 10363 5694 10363 5303 10364 5694 10364 5304 10364 5317 10365 5304 10365 5318 10365 5319 10366 5318 10366 5695 10366 5316 10367 5695 10367 5697 10367 5314 10368 5697 10368 5696 10368 5311 10369 5696 10369 5698 10369 5310 10370 5698 10370 5305 10370 5309 10371 5305 10371 5699 10371 5306 10372 5699 10372 5259 10372 5307 10373 5306 10373 5259 10373 5307 10374 6180 10374 5306 10374 5307 10375 6169 10375 6180 10375 6180 10376 5308 10376 5306 10376 5306 10377 5308 10377 5309 10377 5699 10378 5306 10378 5309 10378 5308 10379 6179 10379 5309 10379 5309 10380 6179 10380 5310 10380 5305 10381 5309 10381 5310 10381 6179 10382 6178 10382 5310 10382 5310 10383 6178 10383 5311 10383 5698 10384 5310 10384 5311 10384 6178 10385 5312 10385 5311 10385 5311 10386 5312 10386 5314 10386 5696 10387 5311 10387 5314 10387 5312 10388 5313 10388 5314 10388 5314 10389 5313 10389 5316 10389 5697 10390 5314 10390 5316 10390 5313 10391 5315 10391 5316 10391 5316 10392 5315 10392 5319 10392 5695 10393 5316 10393 5319 10393 5315 10394 6177 10394 5319 10394 5319 10395 6177 10395 5317 10395 5318 10396 5319 10396 5317 10396 6177 10397 5320 10397 5317 10397 5317 10398 5320 10398 5303 10398 5304 10399 5317 10399 5303 10399 5320 10400 6176 10400 5303 10400 5303 10401 6176 10401 5321 10401 5694 10402 5303 10402 5321 10402 6176 10403 5322 10403 5321 10403 5321 10404 5322 10404 5323 10404 5302 10405 5321 10405 5323 10405 5322 10406 6174 10406 5323 10406 5323 10407 6174 10407 5300 10407 5301 10408 5323 10408 5300 10408 6174 10409 5324 10409 5300 10409 5300 10410 5324 10410 5298 10410 5299 10411 5300 10411 5298 10411 5324 10412 5326 10412 5298 10412 5298 10413 5326 10413 5327 10413 5325 10414 5298 10414 5327 10414 5326 10415 5328 10415 5327 10415 5327 10416 5328 10416 5329 10416 5297 10417 5327 10417 5329 10417 5328 10418 6173 10418 5329 10418 5329 10419 6173 10419 5331 10419 5330 10420 5329 10420 5331 10420 6173 10421 5333 10421 5331 10421 5331 10422 5333 10422 5296 10422 5332 10423 5331 10423 5296 10423 5333 10424 5334 10424 5296 10424 5296 10425 5334 10425 5335 10425 5693 10426 5296 10426 5335 10426 5334 10427 5336 10427 5335 10427 5335 10428 5336 10428 5337 10428 5692 10429 5335 10429 5337 10429 5336 10430 6172 10430 5337 10430 5337 10431 6172 10431 5295 10431 5691 10432 5337 10432 5295 10432 6172 10433 5338 10433 5295 10433 5295 10434 5338 10434 5340 10434 5690 10435 5295 10435 5340 10435 5338 10436 5339 10436 5340 10436 5340 10437 5339 10437 5341 10437 5689 10438 5340 10438 5341 10438 5339 10439 6171 10439 5341 10439 5341 10440 6171 10440 5343 10440 5342 10441 5341 10441 5343 10441 6171 10442 5345 10442 5343 10442 5343 10443 5345 10443 5344 10443 5701 10444 5343 10444 5344 10444 5345 10445 5346 10445 5344 10445 5344 10446 5346 10446 5294 10446 5729 10447 5344 10447 5294 10447 5346 10448 5347 10448 5294 10448 5294 10449 5347 10449 5348 10449 5730 10450 5294 10450 5348 10450 5347 10451 6108 10451 5348 10451 5348 10452 6108 10452 5350 10452 5702 10453 5348 10453 5350 10453 6108 10454 6117 10454 5350 10454 5350 10455 6117 10455 5353 10455 5349 10456 5350 10456 5353 10456 6117 10457 5351 10457 5353 10457 5353 10458 5351 10458 5354 10458 5352 10459 5353 10459 5354 10459 5351 10460 5355 10460 5354 10460 5354 10461 5355 10461 5357 10461 5293 10462 5354 10462 5357 10462 5355 10463 5356 10463 5357 10463 5357 10464 5356 10464 5358 10464 5704 10465 5357 10465 5358 10465 5356 10466 6111 10466 5358 10466 5358 10467 6111 10467 5359 10467 5733 10468 5358 10468 5359 10468 6111 10469 6113 10469 5359 10469 5359 10470 6113 10470 5362 10470 5734 10471 5359 10471 5362 10471 6113 10472 5360 10472 5362 10472 5362 10473 5360 10473 5363 10473 5361 10474 5362 10474 5363 10474 5360 10475 6119 10475 5363 10475 5363 10476 6119 10476 5364 10476 5707 10477 5363 10477 5364 10477 6119 10478 6120 10478 5364 10478 5364 10479 6120 10479 5292 10479 5365 10480 5364 10480 5292 10480 6120 10481 6121 10481 5292 10481 5292 10482 6121 10482 5290 10482 5708 10483 5292 10483 5290 10483 6121 10484 5367 10484 5290 10484 5290 10485 5367 10485 5366 10485 5291 10486 5290 10486 5366 10486 5367 10487 6122 10487 5366 10487 5366 10488 6122 10488 5288 10488 5368 10489 5366 10489 5288 10489 6122 10490 6123 10490 5288 10490 5288 10491 6123 10491 5287 10491 5289 10492 5288 10492 5287 10492 6123 10493 6124 10493 5287 10493 5287 10494 6124 10494 5285 10494 5711 10495 5287 10495 5285 10495 6124 10496 5369 10496 5285 10496 5285 10497 5369 10497 5370 10497 5286 10498 5285 10498 5370 10498 5369 10499 6036 10499 5370 10499 5370 10500 6036 10500 5371 10500 5284 10501 5370 10501 5371 10501 6036 10502 5372 10502 5371 10502 5371 10503 5372 10503 5373 10503 5283 10504 5371 10504 5373 10504 5372 10505 6037 10505 5373 10505 5373 10506 6037 10506 5280 10506 5282 10507 5373 10507 5280 10507 6037 10508 6082 10508 5280 10508 5280 10509 6082 10509 5375 10509 5281 10510 5280 10510 5375 10510 6082 10511 6083 10511 5375 10511 5375 10512 6083 10512 5374 10512 5713 10513 5375 10513 5374 10513 6083 10514 5376 10514 5374 10514 5374 10515 5376 10515 5377 10515 5279 10516 5374 10516 5377 10516 5376 10517 5379 10517 5377 10517 5377 10518 5379 10518 5378 10518 5278 10519 5377 10519 5378 10519 5379 10520 6084 10520 5378 10520 5378 10521 6084 10521 5382 10521 5380 10522 5378 10522 5382 10522 6084 10523 5381 10523 5382 10523 5382 10524 5381 10524 5383 10524 5384 10525 5382 10525 5383 10525 5381 10526 5385 10526 5383 10526 5383 10527 5385 10527 5386 10527 5740 10528 5383 10528 5386 10528 5385 10529 5388 10529 5386 10529 5386 10530 5388 10530 5387 10530 5277 10531 5386 10531 5387 10531 5388 10532 6040 10532 5387 10532 5387 10533 6040 10533 5389 10533 5716 10534 5387 10534 5389 10534 6040 10535 6039 10535 5389 10535 5389 10536 6039 10536 5390 10536 5717 10537 5389 10537 5390 10537 6039 10538 6041 10538 5390 10538 5390 10539 6041 10539 5392 10539 5391 10540 5390 10540 5392 10540 6041 10541 5393 10541 5392 10541 5392 10542 5393 10542 5395 10542 5276 10543 5392 10543 5395 10543 5393 10544 6042 10544 5395 10544 5395 10545 6042 10545 5275 10545 5394 10546 5395 10546 5275 10546 6042 10547 5396 10547 5275 10547 5275 10548 5396 10548 5398 10548 5397 10549 5275 10549 5398 10549 5396 10550 5399 10550 5398 10550 5398 10551 5399 10551 5400 10551 5742 10552 5398 10552 5400 10552 5399 10553 6043 10553 5400 10553 5400 10554 6043 10554 5401 10554 5743 10555 5400 10555 5401 10555 6043 10556 6044 10556 5401 10556 5401 10557 6044 10557 5273 10557 5274 10558 5401 10558 5273 10558 6044 10559 5403 10559 5273 10559 5273 10560 5403 10560 5404 10560 5402 10561 5273 10561 5404 10561 5403 10562 6045 10562 5404 10562 5404 10563 6045 10563 5405 10563 5272 10564 5404 10564 5405 10564 6045 10565 6046 10565 5405 10565 5405 10566 6046 10566 5406 10566 5744 10567 5405 10567 5406 10567 6046 10568 5407 10568 5406 10568 5406 10569 5407 10569 5408 10569 5745 10570 5406 10570 5408 10570 5407 10571 6048 10571 5408 10571 5408 10572 6048 10572 5409 10572 5746 10573 5408 10573 5409 10573 6048 10574 6047 10574 5409 10574 5409 10575 6047 10575 5270 10575 5271 10576 5409 10576 5270 10576 6047 10577 6050 10577 5270 10577 5270 10578 6050 10578 5410 10578 5747 10579 5270 10579 5410 10579 6050 10580 6049 10580 5410 10580 5410 10581 6049 10581 5413 10581 5269 10582 5410 10582 5413 10582 6049 10583 6051 10583 5413 10583 5413 10584 6051 10584 5411 10584 5412 10585 5413 10585 5411 10585 6051 10586 5414 10586 5411 10586 5411 10587 5414 10587 5415 10587 5268 10588 5411 10588 5415 10588 5414 10589 5416 10589 5415 10589 5415 10590 5416 10590 5266 10590 5748 10591 5415 10591 5266 10591 5416 10592 5417 10592 5266 10592 5266 10593 5417 10593 5418 10593 5267 10594 5266 10594 5418 10594 5417 10595 5420 10595 5418 10595 5418 10596 5420 10596 5419 10596 5265 10597 5418 10597 5419 10597 5420 10598 5421 10598 5419 10598 5419 10599 5421 10599 5422 10599 5749 10600 5419 10600 5422 10600 5421 10601 5423 10601 5422 10601 5422 10602 5423 10602 5426 10602 5264 10603 5422 10603 5426 10603 5423 10604 5427 10604 5426 10604 5426 10605 5427 10605 5424 10605 5425 10606 5426 10606 5424 10606 5427 10607 6052 10607 5424 10607 5424 10608 6052 10608 5263 10608 5750 10609 5424 10609 5263 10609 6052 10610 6053 10610 5263 10610 5263 10611 6053 10611 5429 10611 5428 10612 5263 10612 5429 10612 6053 10613 5430 10613 5429 10613 5429 10614 5430 10614 5431 10614 5751 10615 5429 10615 5431 10615 5430 10616 6055 10616 5431 10616 5431 10617 6055 10617 5432 10617 5752 10618 5431 10618 5432 10618 6055 10619 6170 10619 5432 10619 5432 10620 6170 10620 5262 10620 5753 10621 5432 10621 5262 10621 6170 10622 5433 10622 5262 10622 5262 10623 5433 10623 5434 10623 5435 10624 5262 10624 5434 10624 5437 10625 5622 10625 6431 10625 5437 10626 5436 10626 5622 10626 5437 10627 5438 10627 5436 10627 5436 10628 5438 10628 5489 10628 5439 10629 5489 10629 5488 10629 5439 10630 5436 10630 5489 10630 5439 10631 5440 10631 5436 10631 5436 10632 5440 10632 5622 10632 5622 10633 5440 10633 5808 10633 5621 10634 5808 10634 4496 10634 5619 10635 4496 10635 5618 10635 5616 10636 5618 10636 5441 10636 5613 10637 5441 10637 5614 10637 5442 10638 5614 10638 5443 10638 5610 10639 5443 10639 5444 10639 5609 10640 5444 10640 5445 10640 5608 10641 5445 10641 5847 10641 5606 10642 5847 10642 5846 10642 5605 10643 5846 10643 5446 10643 5603 10644 5446 10644 5448 10644 5447 10645 5448 10645 5601 10645 5599 10646 5601 10646 5449 10646 5450 10647 5449 10647 5845 10647 5597 10648 5845 10648 5451 10648 5595 10649 5451 10649 5844 10649 5593 10650 5844 10650 5452 10650 5453 10651 5452 10651 5454 10651 5592 10652 5454 10652 5589 10652 5590 10653 5589 10653 5455 10653 5588 10654 5455 10654 5842 10654 5456 10655 5842 10655 5457 10655 5585 10656 5457 10656 5458 10656 5584 10657 5458 10657 5459 10657 5582 10658 5459 10658 5460 10658 5580 10659 5460 10659 5841 10659 5579 10660 5841 10660 5840 10660 5578 10661 5840 10661 5839 10661 5576 10662 5839 10662 5461 10662 5575 10663 5461 10663 5777 10663 5572 10664 5777 10664 5809 10664 5462 10665 5809 10665 5778 10665 5570 10666 5778 10666 5810 10666 5568 10667 5810 10667 5463 10667 5567 10668 5463 10668 5566 10668 5563 10669 5566 10669 5564 10669 5562 10670 5564 10670 5813 10670 5561 10671 5813 10671 5559 10671 5464 10672 5559 10672 5465 10672 5558 10673 5465 10673 5466 10673 5554 10674 5466 10674 5555 10674 5553 10675 5555 10675 5783 10675 5552 10676 5783 10676 5550 10676 5549 10677 5550 10677 5814 10677 5547 10678 5814 10678 5815 10678 5546 10679 5815 10679 5467 10679 5544 10680 5467 10680 5468 10680 5543 10681 5468 10681 5542 10681 5540 10682 5542 10682 5818 10682 5539 10683 5818 10683 5820 10683 5537 10684 5820 10684 5821 10684 5535 10685 5821 10685 5822 10685 5533 10686 5822 10686 5469 10686 5534 10687 5469 10687 5791 10687 5470 10688 5791 10688 5471 10688 5531 10689 5471 10689 5472 10689 5473 10690 5472 10690 5474 10690 5475 10691 5474 10691 5824 10691 5528 10692 5824 10692 5794 10692 5527 10693 5794 10693 5476 10693 5525 10694 5476 10694 5477 10694 5524 10695 5477 10695 5795 10695 5522 10696 5795 10696 5478 10696 5519 10697 5478 10697 5520 10697 5521 10698 5520 10698 5518 10698 5479 10699 5518 10699 5797 10699 5480 10700 5797 10700 5516 10700 5481 10701 5516 10701 5482 10701 5514 10702 5482 10702 5798 10702 5512 10703 5798 10703 5800 10703 5483 10704 5800 10704 5799 10704 5509 10705 5799 10705 5801 10705 5507 10706 5801 10706 5506 10706 5504 10707 5506 10707 5802 10707 5484 10708 5802 10708 5485 10708 5502 10709 5485 10709 5803 10709 5501 10710 5803 10710 5486 10710 5499 10711 5486 10711 5804 10711 5498 10712 5804 10712 5805 10712 5496 10713 5805 10713 5487 10713 5493 10714 5487 10714 5806 10714 5494 10715 5806 10715 5807 10715 5492 10716 5807 10716 5488 10716 5489 10717 5492 10717 5488 10717 5489 10718 5490 10718 5492 10718 5489 10719 5438 10719 5490 10719 5490 10720 5491 10720 5492 10720 5492 10721 5491 10721 5494 10721 5807 10722 5492 10722 5494 10722 5491 10723 6466 10723 5494 10723 5494 10724 6466 10724 5493 10724 5806 10725 5494 10725 5493 10725 6466 10726 6465 10726 5493 10726 5493 10727 6465 10727 5496 10727 5487 10728 5493 10728 5496 10728 6465 10729 5495 10729 5496 10729 5496 10730 5495 10730 5498 10730 5805 10731 5496 10731 5498 10731 5495 10732 5497 10732 5498 10732 5498 10733 5497 10733 5499 10733 5804 10734 5498 10734 5499 10734 5497 10735 5500 10735 5499 10735 5499 10736 5500 10736 5501 10736 5486 10737 5499 10737 5501 10737 5500 10738 6464 10738 5501 10738 5501 10739 6464 10739 5502 10739 5803 10740 5501 10740 5502 10740 6464 10741 5503 10741 5502 10741 5502 10742 5503 10742 5484 10742 5485 10743 5502 10743 5484 10743 5503 10744 6462 10744 5484 10744 5484 10745 6462 10745 5504 10745 5802 10746 5484 10746 5504 10746 6462 10747 5505 10747 5504 10747 5504 10748 5505 10748 5507 10748 5506 10749 5504 10749 5507 10749 5505 10750 5508 10750 5507 10750 5507 10751 5508 10751 5509 10751 5801 10752 5507 10752 5509 10752 5508 10753 5510 10753 5509 10753 5509 10754 5510 10754 5483 10754 5799 10755 5509 10755 5483 10755 5510 10756 5511 10756 5483 10756 5483 10757 5511 10757 5512 10757 5800 10758 5483 10758 5512 10758 5511 10759 5513 10759 5512 10759 5512 10760 5513 10760 5514 10760 5798 10761 5512 10761 5514 10761 5513 10762 5515 10762 5514 10762 5514 10763 5515 10763 5481 10763 5482 10764 5514 10764 5481 10764 5515 10765 6461 10765 5481 10765 5481 10766 6461 10766 5480 10766 5516 10767 5481 10767 5480 10767 6461 10768 6460 10768 5480 10768 5480 10769 6460 10769 5479 10769 5797 10770 5480 10770 5479 10770 6460 10771 5517 10771 5479 10771 5479 10772 5517 10772 5521 10772 5518 10773 5479 10773 5521 10773 5517 10774 6459 10774 5521 10774 5521 10775 6459 10775 5519 10775 5520 10776 5521 10776 5519 10776 6459 10777 6458 10777 5519 10777 5519 10778 6458 10778 5522 10778 5478 10779 5519 10779 5522 10779 6458 10780 6457 10780 5522 10780 5522 10781 6457 10781 5524 10781 5795 10782 5522 10782 5524 10782 6457 10783 5523 10783 5524 10783 5524 10784 5523 10784 5525 10784 5477 10785 5524 10785 5525 10785 5523 10786 5526 10786 5525 10786 5525 10787 5526 10787 5527 10787 5476 10788 5525 10788 5527 10788 5526 10789 6456 10789 5527 10789 5527 10790 6456 10790 5528 10790 5794 10791 5527 10791 5528 10791 6456 10792 5529 10792 5528 10792 5528 10793 5529 10793 5475 10793 5824 10794 5528 10794 5475 10794 5529 10795 5530 10795 5475 10795 5475 10796 5530 10796 5473 10796 5474 10797 5475 10797 5473 10797 5530 10798 6455 10798 5473 10798 5473 10799 6455 10799 5531 10799 5472 10800 5473 10800 5531 10800 6455 10801 5532 10801 5531 10801 5531 10802 5532 10802 5470 10802 5471 10803 5531 10803 5470 10803 5532 10804 6454 10804 5470 10804 5470 10805 6454 10805 5534 10805 5791 10806 5470 10806 5534 10806 6454 10807 6393 10807 5534 10807 5534 10808 6393 10808 5533 10808 5469 10809 5534 10809 5533 10809 6393 10810 6392 10810 5533 10810 5533 10811 6392 10811 5535 10811 5822 10812 5533 10812 5535 10812 6392 10813 5536 10813 5535 10813 5535 10814 5536 10814 5537 10814 5821 10815 5535 10815 5537 10815 5536 10816 5538 10816 5537 10816 5537 10817 5538 10817 5539 10817 5820 10818 5537 10818 5539 10818 5538 10819 6448 10819 5539 10819 5539 10820 6448 10820 5540 10820 5818 10821 5539 10821 5540 10821 6448 10822 5541 10822 5540 10822 5540 10823 5541 10823 5543 10823 5542 10824 5540 10824 5543 10824 5541 10825 6449 10825 5543 10825 5543 10826 6449 10826 5544 10826 5468 10827 5543 10827 5544 10827 6449 10828 5545 10828 5544 10828 5544 10829 5545 10829 5546 10829 5467 10830 5544 10830 5546 10830 5545 10831 6450 10831 5546 10831 5546 10832 6450 10832 5547 10832 5815 10833 5546 10833 5547 10833 6450 10834 5548 10834 5547 10834 5547 10835 5548 10835 5549 10835 5814 10836 5547 10836 5549 10836 5548 10837 6453 10837 5549 10837 5549 10838 6453 10838 5552 10838 5550 10839 5549 10839 5552 10839 6453 10840 5551 10840 5552 10840 5552 10841 5551 10841 5553 10841 5783 10842 5552 10842 5553 10842 5551 10843 6388 10843 5553 10843 5553 10844 6388 10844 5554 10844 5555 10845 5553 10845 5554 10845 6388 10846 5556 10846 5554 10846 5554 10847 5556 10847 5558 10847 5466 10848 5554 10848 5558 10848 5556 10849 5557 10849 5558 10849 5558 10850 5557 10850 5464 10850 5465 10851 5558 10851 5464 10851 5557 10852 5560 10852 5464 10852 5464 10853 5560 10853 5561 10853 5559 10854 5464 10854 5561 10854 5560 10855 6386 10855 5561 10855 5561 10856 6386 10856 5562 10856 5813 10857 5561 10857 5562 10857 6386 10858 6444 10858 5562 10858 5562 10859 6444 10859 5563 10859 5564 10860 5562 10860 5563 10860 6444 10861 5565 10861 5563 10861 5563 10862 5565 10862 5567 10862 5566 10863 5563 10863 5567 10863 5565 10864 6384 10864 5567 10864 5567 10865 6384 10865 5568 10865 5463 10866 5567 10866 5568 10866 6384 10867 5569 10867 5568 10867 5568 10868 5569 10868 5570 10868 5810 10869 5568 10869 5570 10869 5569 10870 5571 10870 5570 10870 5570 10871 5571 10871 5462 10871 5778 10872 5570 10872 5462 10872 5571 10873 6383 10873 5462 10873 5462 10874 6383 10874 5572 10874 5809 10875 5462 10875 5572 10875 6383 10876 5573 10876 5572 10876 5572 10877 5573 10877 5575 10877 5777 10878 5572 10878 5575 10878 5573 10879 5574 10879 5575 10879 5575 10880 5574 10880 5576 10880 5461 10881 5575 10881 5576 10881 5574 10882 5577 10882 5576 10882 5576 10883 5577 10883 5578 10883 5839 10884 5576 10884 5578 10884 5577 10885 6442 10885 5578 10885 5578 10886 6442 10886 5579 10886 5840 10887 5578 10887 5579 10887 6442 10888 6441 10888 5579 10888 5579 10889 6441 10889 5580 10889 5841 10890 5579 10890 5580 10890 6441 10891 5581 10891 5580 10891 5580 10892 5581 10892 5582 10892 5460 10893 5580 10893 5582 10893 5581 10894 6440 10894 5582 10894 5582 10895 6440 10895 5584 10895 5459 10896 5582 10896 5584 10896 6440 10897 5583 10897 5584 10897 5584 10898 5583 10898 5585 10898 5458 10899 5584 10899 5585 10899 5583 10900 5586 10900 5585 10900 5585 10901 5586 10901 5456 10901 5457 10902 5585 10902 5456 10902 5586 10903 6439 10903 5456 10903 5456 10904 6439 10904 5588 10904 5842 10905 5456 10905 5588 10905 6439 10906 5587 10906 5588 10906 5588 10907 5587 10907 5590 10907 5455 10908 5588 10908 5590 10908 5587 10909 6437 10909 5590 10909 5590 10910 6437 10910 5592 10910 5589 10911 5590 10911 5592 10911 6437 10912 5591 10912 5592 10912 5592 10913 5591 10913 5453 10913 5454 10914 5592 10914 5453 10914 5591 10915 6436 10915 5453 10915 5453 10916 6436 10916 5593 10916 5452 10917 5453 10917 5593 10917 6436 10918 5594 10918 5593 10918 5593 10919 5594 10919 5595 10919 5844 10920 5593 10920 5595 10920 5594 10921 6435 10921 5595 10921 5595 10922 6435 10922 5597 10922 5451 10923 5595 10923 5597 10923 6435 10924 5596 10924 5597 10924 5597 10925 5596 10925 5450 10925 5845 10926 5597 10926 5450 10926 5596 10927 5598 10927 5450 10927 5450 10928 5598 10928 5599 10928 5449 10929 5450 10929 5599 10929 5598 10930 5600 10930 5599 10930 5599 10931 5600 10931 5447 10931 5601 10932 5599 10932 5447 10932 5600 10933 5602 10933 5447 10933 5447 10934 5602 10934 5603 10934 5448 10935 5447 10935 5603 10935 5602 10936 5604 10936 5603 10936 5603 10937 5604 10937 5605 10937 5446 10938 5603 10938 5605 10938 5604 10939 6434 10939 5605 10939 5605 10940 6434 10940 5606 10940 5846 10941 5605 10941 5606 10941 6434 10942 5607 10942 5606 10942 5606 10943 5607 10943 5608 10943 5847 10944 5606 10944 5608 10944 5607 10945 6433 10945 5608 10945 5608 10946 6433 10946 5609 10946 5445 10947 5608 10947 5609 10947 6433 10948 6432 10948 5609 10948 5609 10949 6432 10949 5610 10949 5444 10950 5609 10950 5610 10950 6432 10951 5611 10951 5610 10951 5610 10952 5611 10952 5442 10952 5443 10953 5610 10953 5442 10953 5611 10954 5612 10954 5442 10954 5442 10955 5612 10955 5613 10955 5614 10956 5442 10956 5613 10956 5612 10957 5615 10957 5613 10957 5613 10958 5615 10958 5616 10958 5441 10959 5613 10959 5616 10959 5615 10960 5617 10960 5616 10960 5616 10961 5617 10961 5619 10961 5618 10962 5616 10962 5619 10962 5617 10963 5620 10963 5619 10963 5619 10964 5620 10964 5621 10964 4496 10965 5619 10965 5621 10965 5620 10966 6431 10966 5621 10966 5621 10967 6431 10967 5622 10967 5808 10968 5621 10968 5622 10968 5848 10969 5625 10969 4870 10969 4870 10970 5625 10970 4872 10970 4872 10971 5625 10971 4937 10971 4937 10972 5625 10972 4938 10972 4938 10973 5625 10973 4939 10973 4939 10974 5625 10974 4940 10974 4940 10975 5625 10975 4941 10975 4941 10976 5625 10976 5623 10976 5623 10977 5625 10977 5624 10977 5624 10978 5625 10978 4948 10978 4948 10979 5625 10979 4949 10979 4949 10980 5625 10980 5626 10980 5626 10981 5625 10981 4952 10981 4952 10982 5625 10982 4954 10982 4954 10983 5625 10983 5627 10983 5627 10984 5625 10984 4957 10984 4957 10985 5625 10985 4958 10985 4958 10986 5625 10986 4959 10986 4959 10987 5625 10987 4961 10987 4961 10988 5625 10988 4964 10988 4964 10989 5625 10989 4967 10989 4967 10990 5625 10990 4971 10990 4971 10991 5625 10991 5628 10991 5628 10992 5625 10992 4973 10992 4973 10993 5625 10993 5629 10993 5629 10994 5625 10994 5630 10994 5630 10995 5625 10995 5631 10995 5631 10996 5625 10996 4977 10996 4977 10997 5625 10997 4980 10997 4980 10998 5625 10998 4981 10998 4981 10999 5625 10999 5632 10999 5632 11000 5625 11000 4984 11000 4984 11001 5625 11001 2393 11001 4986 11002 2393 11002 5652 11002 4986 11003 4984 11003 2393 11003 5645 11004 5633 11004 2393 11004 5645 11005 5011 11005 5633 11005 5645 11006 5634 11006 5011 11006 5645 11007 5635 11007 5634 11007 5645 11008 5636 11008 5635 11008 5645 11009 5016 11009 5636 11009 5645 11010 5018 11010 5016 11010 5645 11011 5019 11011 5018 11011 5645 11012 5022 11012 5019 11012 5645 11013 5024 11013 5022 11013 5645 11014 5026 11014 5024 11014 5645 11015 5027 11015 5026 11015 5645 11016 5637 11016 5027 11016 5645 11017 5034 11017 5637 11017 5645 11018 5638 11018 5034 11018 5645 11019 5036 11019 5638 11019 5645 11020 5038 11020 5036 11020 5645 11021 5041 11021 5038 11021 5645 11022 5639 11022 5041 11022 5645 11023 5043 11023 5639 11023 5645 11024 5044 11024 5043 11024 5645 11025 5047 11025 5044 11025 5645 11026 5640 11026 5047 11026 5645 11027 5050 11027 5640 11027 5645 11028 5641 11028 5050 11028 5645 11029 5642 11029 5641 11029 5645 11030 5053 11030 5642 11030 5645 11031 5644 11031 5053 11031 5645 11032 5643 11032 5644 11032 5645 11033 5057 11033 5643 11033 5645 11034 5060 11034 5057 11034 5645 11035 5646 11035 5060 11035 5645 11036 5063 11036 5646 11036 5645 11037 5647 11037 5063 11037 5645 11038 5068 11038 5647 11038 5645 11039 5069 11039 5068 11039 5645 11040 5072 11040 5069 11040 5645 11041 5848 11041 5072 11041 5633 11042 5008 11042 2393 11042 2393 11043 5008 11043 5648 11043 5006 11044 2393 11044 5648 11044 5006 11045 5005 11045 2393 11045 2393 11046 5005 11046 5649 11046 5650 11047 2393 11047 5649 11047 5650 11048 5003 11048 2393 11048 2393 11049 5003 11049 5000 11049 4999 11050 2393 11050 5000 11050 4999 11051 4997 11051 2393 11051 2393 11052 4997 11052 4995 11052 4994 11053 2393 11053 4995 11053 4994 11054 4991 11054 2393 11054 2393 11055 4991 11055 5651 11055 5652 11056 2393 11056 5651 11056 5655 11057 5653 11057 2357 11057 5655 11058 5210 11058 5653 11058 5655 11059 5654 11059 5210 11059 5655 11060 5212 11060 5654 11060 5655 11061 5216 11061 5212 11061 5655 11062 5217 11062 5216 11062 5655 11063 5656 11063 5217 11063 5655 11064 5218 11064 5656 11064 5655 11065 5219 11065 5218 11065 5655 11066 5223 11066 5219 11066 5655 11067 5657 11067 5223 11067 5655 11068 5224 11068 5657 11068 5655 11069 5658 11069 5224 11069 5655 11070 5659 11070 5658 11070 5655 11071 5229 11071 5659 11071 5655 11072 5232 11072 5229 11072 5655 11073 5660 11073 5232 11073 5655 11074 5661 11074 5660 11074 5655 11075 5234 11075 5661 11075 5655 11076 5237 11076 5234 11076 5655 11077 5238 11077 5237 11077 5655 11078 5241 11078 5238 11078 5655 11079 5244 11079 5241 11079 5655 11080 5662 11080 5244 11080 5655 11081 5245 11081 5662 11081 5655 11082 5663 11082 5245 11082 5655 11083 5664 11083 5663 11083 5655 11084 5248 11084 5664 11084 5655 11085 5252 11085 5248 11085 5655 11086 5665 11086 5252 11086 5655 11087 5255 11087 5665 11087 5655 11088 5256 11088 5255 11088 5655 11089 5076 11089 5256 11089 5653 11090 5207 11090 2357 11090 2357 11091 5207 11091 5205 11091 5202 11092 2357 11092 5205 11092 5202 11093 5200 11093 2357 11093 2357 11094 5200 11094 5199 11094 5196 11095 2357 11095 5199 11095 5196 11096 5666 11096 2357 11096 2357 11097 5666 11097 5667 11097 5191 11098 2357 11098 5667 11098 5191 11099 5188 11099 2357 11099 2357 11100 5188 11100 5668 11100 5186 11101 2357 11101 5668 11101 5186 11102 5669 11102 2357 11102 2357 11103 5669 11103 5182 11103 5181 11104 2357 11104 5182 11104 5181 11105 5670 11105 2357 11105 2357 11106 5670 11106 5178 11106 5674 11107 5178 11107 5175 11107 5671 11108 5674 11108 5175 11108 5671 11109 5672 11109 5674 11109 5674 11110 5672 11110 5172 11110 5673 11111 5674 11111 5172 11111 5673 11112 5171 11112 5674 11112 5674 11113 5171 11113 5168 11113 5166 11114 5674 11114 5168 11114 5166 11115 5675 11115 5674 11115 5674 11116 5675 11116 5676 11116 5162 11117 5674 11117 5676 11117 5162 11118 5161 11118 5674 11118 5674 11119 5161 11119 5159 11119 5157 11120 5674 11120 5159 11120 5157 11121 5677 11121 5674 11121 5674 11122 5677 11122 5678 11122 5154 11123 5674 11123 5678 11123 5154 11124 5151 11124 5674 11124 5674 11125 5151 11125 5679 11125 5150 11126 5674 11126 5679 11126 5150 11127 5149 11127 5674 11127 5674 11128 5149 11128 5680 11128 5681 11129 5674 11129 5680 11129 5681 11130 5145 11130 5674 11130 5674 11131 5145 11131 5143 11131 5141 11132 5674 11132 5143 11132 5141 11133 5139 11133 5674 11133 5674 11134 5139 11134 5682 11134 5683 11135 5674 11135 5682 11135 5683 11136 5684 11136 5674 11136 5674 11137 5684 11137 5133 11137 5685 11138 5674 11138 5133 11138 5685 11139 5686 11139 5674 11139 5674 11140 5686 11140 5687 11140 5688 11141 5674 11141 5687 11141 5688 11142 5077 11142 5674 11142 5674 11143 5077 11143 5076 11143 2357 11144 5178 11144 5674 11144 5850 11145 5754 11145 5764 11145 5850 11146 5701 11146 5754 11146 5850 11147 5342 11147 5701 11147 5850 11148 5689 11148 5342 11148 5850 11149 5690 11149 5689 11149 5850 11150 5691 11150 5690 11150 5850 11151 5692 11151 5691 11151 5850 11152 5693 11152 5692 11152 5850 11153 5332 11153 5693 11153 5850 11154 5330 11154 5332 11154 5850 11155 5297 11155 5330 11155 5850 11156 5325 11156 5297 11156 5850 11157 5299 11157 5325 11157 5850 11158 5301 11158 5299 11158 5850 11159 5302 11159 5301 11159 5850 11160 5694 11160 5302 11160 5850 11161 5304 11161 5694 11161 5850 11162 5318 11162 5304 11162 5850 11163 5695 11163 5318 11163 5850 11164 5697 11164 5695 11164 5850 11165 5696 11165 5697 11165 5850 11166 5698 11166 5696 11166 5850 11167 5305 11167 5698 11167 5850 11168 5699 11168 5305 11168 5850 11169 5259 11169 5699 11169 5850 11170 5700 11170 5259 11170 5850 11171 5260 11171 5700 11171 5850 11172 5435 11172 5260 11172 5850 11173 5753 11173 5435 11173 5754 11174 5701 11174 4735 11174 4735 11175 5701 11175 5729 11175 4733 11176 5729 11176 5730 11176 5731 11177 5730 11177 5702 11177 4730 11178 5702 11178 5349 11178 5703 11179 5349 11179 5352 11179 4725 11180 5352 11180 5293 11180 5732 11181 5293 11181 5704 11181 4713 11182 5704 11182 5733 11182 5705 11183 5733 11183 5734 11183 5706 11184 5734 11184 5361 11184 4714 11185 5361 11185 5707 11185 5735 11186 5707 11186 5365 11186 5736 11187 5365 11187 5708 11187 4682 11188 5708 11188 5291 11188 5709 11189 5291 11189 5368 11189 4685 11190 5368 11190 5289 11190 5710 11191 5289 11191 5711 11191 4686 11192 5711 11192 5286 11192 4863 11193 5286 11193 5284 11193 4687 11194 5284 11194 5283 11194 5737 11195 5283 11195 5282 11195 5712 11196 5282 11196 5281 11196 5738 11197 5281 11197 5713 11197 5739 11198 5713 11198 5279 11198 5714 11199 5279 11199 5278 11199 5715 11200 5278 11200 5380 11200 4847 11201 5380 11201 5384 11201 4844 11202 5384 11202 5740 11202 5741 11203 5740 11203 5277 11203 4839 11204 5277 11204 5716 11204 4688 11205 5716 11205 5717 11205 5723 11206 5717 11206 5849 11206 5723 11207 4688 11207 5717 11207 5723 11208 5718 11208 4688 11208 5723 11209 4689 11209 5718 11209 5723 11210 5719 11210 4689 11210 5723 11211 4690 11211 5719 11211 5723 11212 5720 11212 4690 11212 5723 11213 4826 11213 5720 11213 5723 11214 4824 11214 4826 11214 5723 11215 4820 11215 4824 11215 5723 11216 5721 11216 4820 11216 5723 11217 5722 11217 5721 11217 5723 11218 4816 11218 5722 11218 5723 11219 5725 11219 4816 11219 5723 11220 5724 11220 5725 11220 5723 11221 4691 11221 5724 11221 5723 11222 4692 11222 4691 11222 5723 11223 5726 11223 4692 11223 5723 11224 4807 11224 5726 11224 5723 11225 4802 11225 4807 11225 5723 11226 4694 11226 4802 11226 5723 11227 4695 11227 4694 11227 5723 11228 4696 11228 4695 11228 5723 11229 4794 11229 4696 11229 5723 11230 4793 11230 4794 11230 5723 11231 5727 11231 4793 11231 5723 11232 4788 11232 5727 11232 5723 11233 5728 11233 4788 11233 5723 11234 4698 11234 5728 11234 5723 11235 4699 11235 4698 11235 4735 11236 5729 11236 4733 11236 4733 11237 5730 11237 5731 11237 5731 11238 5702 11238 4730 11238 4730 11239 5349 11239 5703 11239 5703 11240 5352 11240 4725 11240 4725 11241 5293 11241 5732 11241 5732 11242 5704 11242 4713 11242 4713 11243 5733 11243 5705 11243 5705 11244 5734 11244 5706 11244 5706 11245 5361 11245 4714 11245 4714 11246 5707 11246 5735 11246 5735 11247 5365 11247 5736 11247 5736 11248 5708 11248 4682 11248 4682 11249 5291 11249 5709 11249 5709 11250 5368 11250 4685 11250 4685 11251 5289 11251 5710 11251 5710 11252 5711 11252 4686 11252 4686 11253 5286 11253 4863 11253 4863 11254 5284 11254 4687 11254 4687 11255 5283 11255 5737 11255 5737 11256 5282 11256 5712 11256 5712 11257 5281 11257 5738 11257 5738 11258 5713 11258 5739 11258 5739 11259 5279 11259 5714 11259 5714 11260 5278 11260 5715 11260 5715 11261 5380 11261 4847 11261 4847 11262 5384 11262 4844 11262 4844 11263 5740 11263 5741 11263 5741 11264 5277 11264 4839 11264 4839 11265 5716 11265 4688 11265 5717 11266 5391 11266 5849 11266 5849 11267 5391 11267 5276 11267 5394 11268 5849 11268 5276 11268 5394 11269 5397 11269 5849 11269 5849 11270 5397 11270 5742 11270 5743 11271 5849 11271 5742 11271 5743 11272 5274 11272 5849 11272 5849 11273 5274 11273 5402 11273 5272 11274 5849 11274 5402 11274 5272 11275 5744 11275 5849 11275 5849 11276 5744 11276 5745 11276 5746 11277 5849 11277 5745 11277 5746 11278 5271 11278 5849 11278 5849 11279 5271 11279 5747 11279 5269 11280 5849 11280 5747 11280 5269 11281 5412 11281 5849 11281 5849 11282 5412 11282 5268 11282 5748 11283 5849 11283 5268 11283 5748 11284 5267 11284 5849 11284 5849 11285 5267 11285 5265 11285 5749 11286 5849 11286 5265 11286 5749 11287 5264 11287 5849 11287 5849 11288 5264 11288 5425 11288 5750 11289 5849 11289 5425 11289 5750 11290 5428 11290 5849 11290 5849 11291 5428 11291 5751 11291 5752 11292 5849 11292 5751 11292 5752 11293 5753 11293 5849 11293 5754 11294 5755 11294 5764 11294 5764 11295 5755 11295 4711 11295 5756 11296 5764 11296 4711 11296 5756 11297 4710 11297 5764 11297 5764 11298 4710 11298 5757 11298 4745 11299 5764 11299 5757 11299 4745 11300 5758 11300 5764 11300 5764 11301 5758 11301 5759 11301 5760 11302 5764 11302 5759 11302 5760 11303 4752 11303 5764 11303 5764 11304 4752 11304 5761 11304 4756 11305 5764 11305 5761 11305 4756 11306 4708 11306 5764 11306 5764 11307 4708 11307 4707 11307 4706 11308 5764 11308 4707 11308 4706 11309 5762 11309 5764 11309 5764 11310 5762 11310 4705 11310 4704 11311 5764 11311 4705 11311 4704 11312 5763 11312 5764 11312 5764 11313 5763 11313 4703 11313 4774 11314 5764 11314 4703 11314 4774 11315 5765 11315 5764 11315 5764 11316 5765 11316 4702 11316 4701 11317 5764 11317 4702 11317 4701 11318 4778 11318 5764 11318 5764 11319 4778 11319 5766 11319 4699 11320 5764 11320 5766 11320 5771 11321 5839 11321 5843 11321 5771 11322 4653 11322 5839 11322 5771 11323 5767 11323 4653 11323 5771 11324 4512 11324 5767 11324 5771 11325 4646 11325 4512 11325 5771 11326 5768 11326 4646 11326 5771 11327 4642 11327 5768 11327 5771 11328 4640 11328 4642 11328 5771 11329 4638 11329 4640 11329 5771 11330 4515 11330 4638 11330 5771 11331 4516 11331 4515 11331 5771 11332 5769 11332 4516 11332 5771 11333 4517 11333 5769 11333 5771 11334 4518 11334 4517 11334 5771 11335 5770 11335 4518 11335 5771 11336 4520 11336 5770 11336 5771 11337 4522 11337 4520 11337 5771 11338 4523 11338 4522 11338 5771 11339 4524 11339 4523 11339 5771 11340 5772 11340 4524 11340 5771 11341 5774 11341 5772 11341 5771 11342 5773 11342 5774 11342 5771 11343 5775 11343 5773 11343 5771 11344 4615 11344 5775 11344 5771 11345 4613 11345 4615 11345 5771 11346 4526 11346 4613 11346 5771 11347 4527 11347 4526 11347 5771 11348 5776 11348 4527 11348 5771 11349 4606 11349 5776 11349 5839 11350 4653 11350 5461 11350 5461 11351 4653 11351 4655 11351 5777 11352 4655 11352 4510 11352 5809 11353 4510 11353 4509 11353 5778 11354 4509 11354 4660 11354 5810 11355 4660 11355 5811 11355 5463 11356 5811 11356 5812 11356 5566 11357 5812 11357 4664 11357 5564 11358 4664 11358 5779 11358 5813 11359 5779 11359 5780 11359 5559 11360 5780 11360 5781 11360 5465 11361 5781 11361 5782 11361 5466 11362 5782 11362 4505 11362 5555 11363 4505 11363 4503 11363 5783 11364 4503 11364 4501 11364 5550 11365 4501 11365 5784 11365 5814 11366 5784 11366 5785 11366 5815 11367 5785 11367 5816 11367 5467 11368 5816 11368 5786 11368 5468 11369 5786 11369 5787 11369 5542 11370 5787 11370 5817 11370 5818 11371 5817 11371 5819 11371 5820 11372 5819 11372 5788 11372 5821 11373 5788 11373 5789 11373 5822 11374 5789 11374 5790 11374 5469 11375 5790 11375 4544 11375 5791 11376 4544 11376 4553 11376 5471 11377 4553 11377 4554 11377 5472 11378 4554 11378 5792 11378 5474 11379 5792 11379 5823 11379 5824 11380 5823 11380 5793 11380 5794 11381 5793 11381 4560 11381 5796 11382 4560 11382 6495 11382 5796 11383 5794 11383 4560 11383 5796 11384 5476 11384 5794 11384 5796 11385 5477 11385 5476 11385 5796 11386 5795 11386 5477 11386 5796 11387 5478 11387 5795 11387 5796 11388 5520 11388 5478 11388 5796 11389 5518 11389 5520 11389 5796 11390 5797 11390 5518 11390 5796 11391 5516 11391 5797 11391 5796 11392 5482 11392 5516 11392 5796 11393 5798 11393 5482 11393 5796 11394 5800 11394 5798 11394 5796 11395 5799 11395 5800 11395 5796 11396 5801 11396 5799 11396 5796 11397 5506 11397 5801 11397 5796 11398 5802 11398 5506 11398 5796 11399 5485 11399 5802 11399 5796 11400 5803 11400 5485 11400 5796 11401 5486 11401 5803 11401 5796 11402 5804 11402 5486 11402 5796 11403 5805 11403 5804 11403 5796 11404 5487 11404 5805 11404 5796 11405 5806 11405 5487 11405 5796 11406 5807 11406 5806 11406 5796 11407 5488 11407 5807 11407 5796 11408 5439 11408 5488 11408 5796 11409 5440 11409 5439 11409 5796 11410 5808 11410 5440 11410 5796 11411 4496 11411 5808 11411 5461 11412 4655 11412 5777 11412 5777 11413 4510 11413 5809 11413 5809 11414 4509 11414 5778 11414 5778 11415 4660 11415 5810 11415 5810 11416 5811 11416 5463 11416 5463 11417 5812 11417 5566 11417 5566 11418 4664 11418 5564 11418 5564 11419 5779 11419 5813 11419 5813 11420 5780 11420 5559 11420 5559 11421 5781 11421 5465 11421 5465 11422 5782 11422 5466 11422 5466 11423 4505 11423 5555 11423 5555 11424 4503 11424 5783 11424 5783 11425 4501 11425 5550 11425 5550 11426 5784 11426 5814 11426 5814 11427 5785 11427 5815 11427 5815 11428 5816 11428 5467 11428 5467 11429 5786 11429 5468 11429 5468 11430 5787 11430 5542 11430 5542 11431 5817 11431 5818 11431 5818 11432 5819 11432 5820 11432 5820 11433 5788 11433 5821 11433 5821 11434 5789 11434 5822 11434 5822 11435 5790 11435 5469 11435 5469 11436 4544 11436 5791 11436 5791 11437 4553 11437 5471 11437 5471 11438 4554 11438 5472 11438 5472 11439 5792 11439 5474 11439 5474 11440 5823 11440 5824 11440 5824 11441 5793 11441 5794 11441 4560 11442 4540 11442 6495 11442 6495 11443 4540 11443 4539 11443 4567 11444 6495 11444 4539 11444 4567 11445 5825 11445 6495 11445 6495 11446 5825 11446 5826 11446 5827 11447 6495 11447 5826 11447 5827 11448 5828 11448 6495 11448 6495 11449 5828 11449 5829 11449 5830 11450 6495 11450 5829 11450 5830 11451 5831 11451 6495 11451 6495 11452 5831 11452 5832 11452 4537 11453 6495 11453 5832 11453 4537 11454 4536 11454 6495 11454 6495 11455 4536 11455 4585 11455 4535 11456 6495 11456 4585 11456 4535 11457 4588 11457 6495 11457 6495 11458 4588 11458 5833 11458 5834 11459 6495 11459 5833 11459 5834 11460 5835 11460 6495 11460 6495 11461 5835 11461 5836 11461 4595 11462 6495 11462 5836 11462 4595 11463 5837 11463 6495 11463 6495 11464 5837 11464 5838 11464 4532 11465 6495 11465 5838 11465 4532 11466 4530 11466 6495 11466 6495 11467 4530 11467 4529 11467 4603 11468 6495 11468 4529 11468 4603 11469 4606 11469 6495 11469 5839 11470 5840 11470 5843 11470 5843 11471 5840 11471 5841 11471 5460 11472 5843 11472 5841 11472 5460 11473 5459 11473 5843 11473 5843 11474 5459 11474 5458 11474 5457 11475 5843 11475 5458 11475 5457 11476 5842 11476 5843 11476 5843 11477 5842 11477 5455 11477 5589 11478 5843 11478 5455 11478 5589 11479 5454 11479 5843 11479 5843 11480 5454 11480 5452 11480 5844 11481 5843 11481 5452 11481 5844 11482 5451 11482 5843 11482 5843 11483 5451 11483 5845 11483 5449 11484 5843 11484 5845 11484 5449 11485 5601 11485 5843 11485 5843 11486 5601 11486 5448 11486 5446 11487 5843 11487 5448 11487 5446 11488 5846 11488 5843 11488 5843 11489 5846 11489 5847 11489 5445 11490 5843 11490 5847 11490 5445 11491 5444 11491 5843 11491 5843 11492 5444 11492 5443 11492 5614 11493 5843 11493 5443 11493 5614 11494 5441 11494 5843 11494 5843 11495 5441 11495 5618 11495 4496 11496 5843 11496 5618 11496 2386 11497 6079 11497 2393 11497 2393 11498 6079 11498 5645 11498 6159 11499 6166 11499 5674 11499 5674 11500 6166 11500 2357 11500 3945 11501 2434 11501 1025 11501 1025 11502 2434 11502 5848 11502 5645 11503 1025 11503 5848 11503 5645 11504 6497 11504 1025 11504 5645 11505 5849 11505 6497 11505 5645 11506 6079 11506 5849 11506 5849 11507 6079 11507 6054 11507 2434 11508 5625 11508 5848 11508 6497 11509 5849 11509 6484 11509 6484 11510 5849 11510 5753 11510 5850 11511 6484 11511 5753 11511 6556 11512 6353 11512 5851 11512 5851 11513 6353 11513 5852 11513 5852 11514 6353 11514 6424 11514 6412 11515 5852 11515 6424 11515 6056 11516 2386 11516 5853 11516 5853 11517 2386 11517 6166 11517 5854 11518 6166 11518 6135 11518 5854 11519 5853 11519 6166 11519 5852 11520 6412 11520 6477 11520 6477 11521 6412 11521 6438 11521 5855 11522 5859 11522 5867 11522 5855 11523 5856 11523 5859 11523 5859 11524 5866 11524 5867 11524 5867 11525 5866 11525 5857 11525 5857 11526 5866 11526 5862 11526 5862 11527 5866 11527 5858 11527 5854 11528 6135 11528 5865 11528 5865 11529 6135 11529 6098 11529 6323 11530 6265 11530 5859 11530 5859 11531 6265 11531 5866 11531 5866 11532 6265 11532 4042 11532 6241 11533 5866 11533 4042 11533 5860 11534 5861 11534 5871 11534 5871 11535 5861 11535 5870 11535 5870 11536 5861 11536 2498 11536 619 11537 5870 11537 2498 11537 5862 11538 5853 11538 5857 11538 5862 11539 6489 11539 5853 11539 5853 11540 5854 11540 5857 11540 5857 11541 5854 11541 5863 11541 5863 11542 5854 11542 5864 11542 5864 11543 5854 11543 5865 11543 5866 11544 6241 11544 5858 11544 5858 11545 6241 11545 6282 11545 6493 11546 5851 11546 5868 11546 6493 11547 6492 11547 5851 11547 5851 11548 5852 11548 5868 11548 5868 11549 5852 11549 5867 11549 5867 11550 5852 11550 5855 11550 5855 11551 5852 11551 6477 11551 5870 11552 619 11552 5869 11552 5869 11553 619 11553 618 11553 5867 11554 5857 11554 6030 11554 6030 11555 5857 11555 5987 11555 5934 11556 5903 11556 5868 11556 5868 11557 5903 11557 5863 11557 5857 11558 5863 11558 5987 11558 5987 11559 5863 11559 5903 11559 5868 11560 5867 11560 5934 11560 5934 11561 5867 11561 6030 11561 6493 11562 5868 11562 5870 11562 5869 11563 6493 11563 5870 11563 5864 11564 5871 11564 5863 11564 5864 11565 6491 11565 5871 11565 5871 11566 5870 11566 5863 11566 5863 11567 5870 11567 5868 11567 5873 11568 5872 11568 6035 11568 311 11569 5873 11569 6035 11569 311 11570 5875 11570 5873 11570 311 11571 5874 11571 5875 11571 5875 11572 5874 11572 90 11572 90 11573 5874 11573 5968 11573 5967 11574 5968 11574 353 11574 88 11575 353 11575 5876 11575 43 11576 5876 11576 315 11576 5966 11577 315 11577 354 11577 87 11578 354 11578 5965 11578 86 11579 5965 11579 5964 11579 85 11580 5964 11580 356 11580 84 11581 356 11581 5877 11581 248 11582 5877 11582 5945 11582 248 11583 84 11583 5877 11583 248 11584 143 11584 84 11584 248 11585 202 11585 143 11585 143 11586 202 11586 5975 11586 5975 11587 202 11587 5976 11587 142 11588 5976 11588 204 11588 141 11589 204 11589 5878 11589 97 11590 5878 11590 249 11590 5974 11591 249 11591 5973 11591 5972 11592 5973 11592 5971 11592 5879 11593 5971 11593 5970 11593 95 11594 5970 11594 5880 11594 94 11595 5880 11595 5969 11595 93 11596 5969 11596 206 11596 137 11597 206 11597 135 11597 137 11598 93 11598 206 11598 5881 11599 309 11599 1 11599 5881 11600 308 11600 309 11600 5881 11601 47 11601 308 11601 308 11602 47 11602 5887 11602 5887 11603 47 11603 49 11603 5888 11604 49 11604 51 11604 306 11605 51 11605 2 11605 351 11606 2 11606 52 11606 350 11607 52 11607 5882 11607 303 11608 5882 11608 53 11608 5889 11609 53 11609 54 11609 5890 11610 54 11610 5883 11610 5886 11611 5883 11611 55 11611 5884 11612 55 11612 5885 11612 5884 11613 5886 11613 55 11613 5887 11614 49 11614 5888 11614 5888 11615 51 11615 306 11615 306 11616 2 11616 351 11616 351 11617 52 11617 350 11617 350 11618 5882 11618 303 11618 303 11619 53 11619 5889 11619 5889 11620 54 11620 5890 11620 5890 11621 5883 11621 5886 11621 55 11622 56 11622 5885 11622 5885 11623 56 11623 5920 11623 5920 11624 56 11624 4 11624 348 11625 4 11625 5891 11625 5903 11626 5891 11626 57 11626 7 11627 5903 11627 57 11627 7 11628 5892 11628 5903 11628 5903 11629 5892 11629 5893 11629 59 11630 5903 11630 5893 11630 59 11631 60 11631 5903 11631 5903 11632 60 11632 5895 11632 5894 11633 5903 11633 5895 11633 5894 11634 61 11634 5903 11634 5903 11635 61 11635 10 11635 62 11636 5903 11636 10 11636 62 11637 5896 11637 5903 11637 5903 11638 5896 11638 5897 11638 5898 11639 5903 11639 5897 11639 5898 11640 63 11640 5903 11640 5903 11641 63 11641 64 11641 5899 11642 5903 11642 64 11642 5899 11643 5900 11643 5903 11643 5903 11644 5900 11644 13 11644 67 11645 5903 11645 13 11645 67 11646 68 11646 5903 11646 5903 11647 68 11647 69 11647 16 11648 5903 11648 69 11648 16 11649 5901 11649 5903 11649 5903 11650 5901 11650 18 11650 5902 11651 5903 11651 18 11651 5902 11652 70 11652 5903 11652 5903 11653 70 11653 20 11653 5904 11654 5903 11654 20 11654 5904 11655 5905 11655 5903 11655 5903 11656 5905 11656 25 11656 5906 11657 5903 11657 25 11657 5906 11658 5907 11658 5903 11658 5903 11659 5907 11659 5908 11659 71 11660 5903 11660 5908 11660 71 11661 154 11661 5903 11661 71 11662 153 11662 154 11662 71 11663 72 11663 153 11663 153 11664 72 11664 108 11664 108 11665 72 11665 5946 11665 5947 11666 5946 11666 5909 11666 151 11667 5909 11667 73 11667 150 11668 73 11668 74 11668 5948 11669 74 11669 5910 11669 149 11670 5910 11670 75 11670 5949 11671 75 11671 5950 11671 5951 11672 5950 11672 5911 11672 102 11673 5911 11673 5912 11673 5952 11674 5912 11674 5953 11674 5954 11675 5953 11675 5913 11675 5955 11676 5913 11676 79 11676 5956 11677 79 11677 5957 11677 5958 11678 5957 11678 5914 11678 5915 11679 5914 11679 80 11679 5916 11680 80 11680 5959 11680 5917 11681 5959 11681 81 11681 148 11682 81 11682 5918 11682 5960 11683 5918 11683 5919 11683 5961 11684 5919 11684 36 11684 5962 11685 36 11685 5963 11685 100 11686 5963 11686 83 11686 145 11687 83 11687 37 11687 143 11688 37 11688 84 11688 143 11689 145 11689 37 11689 5920 11690 4 11690 348 11690 348 11691 5891 11691 5903 11691 5934 11692 348 11692 5903 11692 5934 11693 347 11693 348 11693 5934 11694 296 11694 347 11694 5934 11695 346 11695 296 11695 5934 11696 294 11696 346 11696 5934 11697 292 11697 294 11697 5934 11698 291 11698 292 11698 5934 11699 290 11699 291 11699 5934 11700 5921 11700 290 11700 5934 11701 343 11701 5921 11701 5934 11702 289 11702 343 11702 5934 11703 5922 11703 289 11703 5934 11704 340 11704 5922 11704 5934 11705 5923 11705 340 11705 5934 11706 287 11706 5923 11706 5934 11707 5924 11707 287 11707 5934 11708 5925 11708 5924 11708 5934 11709 5926 11709 5925 11709 5934 11710 285 11710 5926 11710 5934 11711 5927 11711 285 11711 5934 11712 5928 11712 5927 11712 5934 11713 284 11713 5928 11713 5934 11714 336 11714 284 11714 5934 11715 5929 11715 336 11715 5934 11716 5930 11716 5929 11716 5934 11717 280 11717 5930 11717 5934 11718 5931 11718 280 11718 5934 11719 277 11719 5931 11719 5934 11720 333 11720 277 11720 5934 11721 5932 11721 333 11721 5934 11722 332 11722 5932 11722 5934 11723 331 11723 332 11723 5934 11724 375 11724 331 11724 5934 11725 329 11725 375 11725 5934 11726 5933 11726 329 11726 5934 11727 5935 11727 5933 11727 5934 11728 6030 11728 5935 11728 5935 11729 6030 11729 5936 11729 328 11730 5936 11730 186 11730 6009 11731 186 11731 6010 11731 371 11732 6010 11732 235 11732 327 11733 235 11733 236 11733 5937 11734 236 11734 237 11734 6011 11735 237 11735 238 11735 6012 11736 238 11736 239 11736 5938 11737 239 11737 6013 11737 5939 11738 6013 11738 191 11738 368 11739 191 11739 240 11739 367 11740 240 11740 193 11740 6014 11741 193 11741 194 11741 364 11742 194 11742 5940 11742 322 11743 5940 11743 5941 11743 363 11744 5941 11744 245 11744 6015 11745 245 11745 246 11745 321 11746 246 11746 196 11746 319 11747 196 11747 5942 11747 6016 11748 5942 11748 6017 11748 6018 11749 6017 11749 6019 11749 6020 11750 6019 11750 247 11750 5943 11751 247 11751 5944 11751 6021 11752 5944 11752 6022 11752 6023 11753 6022 11753 5945 11753 5877 11754 6023 11754 5945 11754 108 11755 5946 11755 5947 11755 5947 11756 5909 11756 151 11756 151 11757 73 11757 150 11757 150 11758 74 11758 5948 11758 5948 11759 5910 11759 149 11759 149 11760 75 11760 5949 11760 5949 11761 5950 11761 5951 11761 5951 11762 5911 11762 102 11762 102 11763 5912 11763 5952 11763 5952 11764 5953 11764 5954 11764 5954 11765 5913 11765 5955 11765 5955 11766 79 11766 5956 11766 5956 11767 5957 11767 5958 11767 5958 11768 5914 11768 5915 11768 5915 11769 80 11769 5916 11769 5916 11770 5959 11770 5917 11770 5917 11771 81 11771 148 11771 148 11772 5918 11772 5960 11772 5960 11773 5919 11773 5961 11773 5961 11774 36 11774 5962 11774 5962 11775 5963 11775 100 11775 100 11776 83 11776 145 11776 84 11777 85 11777 356 11777 85 11778 86 11778 5964 11778 86 11779 87 11779 5965 11779 87 11780 5966 11780 354 11780 5966 11781 43 11781 315 11781 43 11782 88 11782 5876 11782 88 11783 5967 11783 353 11783 5967 11784 90 11784 5968 11784 93 11785 94 11785 5969 11785 94 11786 95 11786 5880 11786 95 11787 5879 11787 5970 11787 5879 11788 5972 11788 5971 11788 5972 11789 5974 11789 5973 11789 5974 11790 97 11790 249 11790 97 11791 141 11791 5878 11791 141 11792 142 11792 204 11792 142 11793 5975 11793 5976 11793 5903 11794 154 11794 5987 11794 5987 11795 154 11795 5977 11795 5978 11796 5987 11796 5977 11796 5978 11797 5979 11797 5987 11797 5987 11798 5979 11798 5981 11798 5980 11799 5987 11799 5981 11799 5980 11800 5982 11800 5987 11800 5987 11801 5982 11801 111 11801 156 11802 5987 11802 111 11802 156 11803 158 11803 5987 11803 5987 11804 158 11804 5983 11804 113 11805 5987 11805 5983 11805 113 11806 114 11806 5987 11806 5987 11807 114 11807 160 11807 116 11808 5987 11808 160 11808 116 11809 118 11809 5987 11809 5987 11810 118 11810 120 11810 5984 11811 5987 11811 120 11811 5984 11812 5985 11812 5987 11812 5987 11813 5985 11813 5986 11813 122 11814 5987 11814 5986 11814 122 11815 123 11815 5987 11815 5987 11816 123 11816 5988 11816 5989 11817 5987 11817 5988 11817 5989 11818 5990 11818 5987 11818 5987 11819 5990 11819 126 11819 5991 11820 5987 11820 126 11820 5991 11821 127 11821 5987 11821 5987 11822 127 11822 165 11822 166 11823 5987 11823 165 11823 166 11824 129 11824 5987 11824 5987 11825 129 11825 167 11825 5992 11826 5987 11826 167 11826 5992 11827 168 11827 5987 11827 5987 11828 168 11828 5993 11828 169 11829 5987 11829 5993 11829 169 11830 260 11830 5987 11830 169 11831 170 11831 260 11831 260 11832 170 11832 259 11832 259 11833 170 11833 5994 11833 5995 11834 5994 11834 171 11834 211 11835 171 11835 257 11835 211 11836 5995 11836 171 11836 259 11837 5994 11837 5995 11837 171 11838 172 11838 257 11838 257 11839 172 11839 5996 11839 5996 11840 172 11840 173 11840 6003 11841 173 11841 174 11841 5997 11842 174 11842 175 11842 208 11843 175 11843 176 11843 5998 11844 176 11844 5999 11844 6004 11845 5999 11845 6000 11845 253 11846 6000 11846 6001 11846 6005 11847 6001 11847 6002 11847 6006 11848 6002 11848 179 11848 207 11849 179 11849 135 11849 206 11850 207 11850 135 11850 5996 11851 173 11851 6003 11851 6003 11852 174 11852 5997 11852 5997 11853 175 11853 208 11853 208 11854 176 11854 5998 11854 5998 11855 5999 11855 6004 11855 6004 11856 6000 11856 253 11856 253 11857 6001 11857 6005 11857 6005 11858 6002 11858 6006 11858 6006 11859 179 11859 207 11859 229 11860 6007 11860 6030 11860 228 11861 6030 11861 274 11861 228 11862 229 11862 6030 11862 6007 11863 6008 11863 6030 11863 6030 11864 6008 11864 181 11864 183 11865 6030 11865 181 11865 183 11866 231 11866 6030 11866 6030 11867 231 11867 232 11867 233 11868 6030 11868 232 11868 233 11869 5936 11869 6030 11869 5935 11870 5936 11870 328 11870 328 11871 186 11871 6009 11871 6009 11872 6010 11872 371 11872 371 11873 235 11873 327 11873 327 11874 236 11874 5937 11874 5937 11875 237 11875 6011 11875 6011 11876 238 11876 6012 11876 6012 11877 239 11877 5938 11877 5938 11878 6013 11878 5939 11878 5939 11879 191 11879 368 11879 368 11880 240 11880 367 11880 367 11881 193 11881 6014 11881 6014 11882 194 11882 364 11882 364 11883 5940 11883 322 11883 322 11884 5941 11884 363 11884 363 11885 245 11885 6015 11885 6015 11886 246 11886 321 11886 321 11887 196 11887 319 11887 319 11888 5942 11888 6016 11888 6016 11889 6017 11889 6018 11889 6018 11890 6019 11890 6020 11890 6020 11891 247 11891 5943 11891 5943 11892 5944 11892 6021 11892 6021 11893 6022 11893 6023 11893 5987 11894 260 11894 6030 11894 6030 11895 260 11895 215 11895 6024 11896 6030 11896 215 11896 6024 11897 216 11897 6030 11897 6030 11898 216 11898 6025 11898 217 11899 6030 11899 6025 11899 217 11900 6026 11900 6030 11900 6030 11901 6026 11901 264 11901 265 11902 6030 11902 264 11902 265 11903 6027 11903 6030 11903 6030 11904 6027 11904 266 11904 6028 11905 6030 11905 266 11905 6028 11906 220 11906 6030 11906 6030 11907 220 11907 222 11907 6029 11908 6030 11908 222 11908 6029 11909 268 11909 6030 11909 6030 11910 268 11910 6031 11910 223 11911 6030 11911 6031 11911 223 11912 270 11912 6030 11912 6030 11913 270 11913 6032 11913 271 11914 6030 11914 6032 11914 271 11915 6033 11915 6030 11915 6030 11916 6033 11916 227 11916 6034 11917 6030 11917 227 11917 6034 11918 273 11918 6030 11918 6030 11919 273 11919 274 11919 309 11920 6035 11920 1 11920 1 11921 6035 11921 5872 11921 4869 11922 5369 11922 6116 11922 4869 11923 6036 11923 5369 11923 4869 11924 4866 11924 6036 11924 6036 11925 4866 11925 5372 11925 5372 11926 4866 11926 4865 11926 6037 11927 4865 11927 4861 11927 6082 11928 4861 11928 4860 11928 6083 11929 4860 11929 4858 11929 5376 11930 4858 11930 4855 11930 5379 11931 4855 11931 4853 11931 6084 11932 4853 11932 6038 11932 5381 11933 6038 11933 4851 11933 5385 11934 4851 11934 4848 11934 6054 11935 4848 11935 6091 11935 6054 11936 5385 11936 4848 11936 6054 11937 5388 11937 5385 11937 6054 11938 6040 11938 5388 11938 6054 11939 6039 11939 6040 11939 6054 11940 6041 11940 6039 11940 6054 11941 5393 11941 6041 11941 6054 11942 6042 11942 5393 11942 6054 11943 5396 11943 6042 11943 6054 11944 5399 11944 5396 11944 6054 11945 6043 11945 5399 11945 6054 11946 6044 11946 6043 11946 6054 11947 5403 11947 6044 11947 6054 11948 6045 11948 5403 11948 6054 11949 6046 11949 6045 11949 6054 11950 5407 11950 6046 11950 6054 11951 6048 11951 5407 11951 6054 11952 6047 11952 6048 11952 6054 11953 6050 11953 6047 11953 6054 11954 6049 11954 6050 11954 6054 11955 6051 11955 6049 11955 6054 11956 5414 11956 6051 11956 6054 11957 5416 11957 5414 11957 6054 11958 5417 11958 5416 11958 6054 11959 5420 11959 5417 11959 6054 11960 5421 11960 5420 11960 6054 11961 5423 11961 5421 11961 6054 11962 5427 11962 5423 11962 6054 11963 6052 11963 5427 11963 6054 11964 6053 11964 6052 11964 6054 11965 5430 11965 6053 11965 6054 11966 6055 11966 5430 11966 6054 11967 6175 11967 6055 11967 6054 11968 6079 11968 6175 11968 6175 11969 6079 11969 6056 11969 6056 11970 6079 11970 6081 11970 4878 11971 6056 11971 6081 11971 4878 11972 5065 11972 6056 11972 6056 11973 5065 11973 5067 11973 5070 11974 6056 11974 5067 11974 5070 11975 5073 11975 6056 11975 6056 11976 5073 11976 4876 11976 4875 11977 6056 11977 4876 11977 4875 11978 4873 11978 6056 11978 6056 11979 4873 11979 4935 11979 4933 11980 6056 11980 4935 11980 4933 11981 4932 11981 6056 11981 6056 11982 4932 11982 6057 11982 4929 11983 6056 11983 6057 11983 4929 11984 4928 11984 6056 11984 6056 11985 4928 11985 4944 11985 4946 11986 6056 11986 4944 11986 4946 11987 4926 11987 6056 11987 6056 11988 4926 11988 4925 11988 4924 11989 6056 11989 4925 11989 4924 11990 4923 11990 6056 11990 6056 11991 4923 11991 6058 11991 6059 11992 6056 11992 6058 11992 6059 11993 6060 11993 6056 11993 6056 11994 6060 11994 4921 11994 4962 11995 6056 11995 4921 11995 4962 11996 4966 11996 6056 11996 6056 11997 4966 11997 6061 11997 4970 11998 6056 11998 6061 11998 4970 11999 2386 11999 6056 11999 4970 12000 6062 12000 2386 12000 2386 12001 6062 12001 6063 12001 6064 12002 2386 12002 6063 12002 6064 12003 4919 12003 2386 12003 2386 12004 4919 12004 4918 12004 4978 12005 2386 12005 4918 12005 4978 12006 4916 12006 2386 12006 2386 12007 4916 12007 6065 12007 4915 12008 2386 12008 6065 12008 4915 12009 6066 12009 2386 12009 2386 12010 6066 12010 6067 12010 4914 12011 2386 12011 6067 12011 4914 12012 6068 12012 2386 12012 2386 12013 6068 12013 4992 12013 4913 12014 2386 12014 4992 12014 4913 12015 4912 12015 2386 12015 2386 12016 4912 12016 4910 12016 4909 12017 2386 12017 4910 12017 4909 12018 5001 12018 2386 12018 2386 12019 5001 12019 4907 12019 4905 12020 2386 12020 4907 12020 4905 12021 5004 12021 2386 12021 2386 12022 5004 12022 4902 12022 6069 12023 2386 12023 4902 12023 6069 12024 6070 12024 2386 12024 2386 12025 6070 12025 6071 12025 6072 12026 2386 12026 6071 12026 6072 12027 6079 12027 2386 12027 6072 12028 4899 12028 6079 12028 6079 12029 4899 12029 5013 12029 5014 12030 6079 12030 5013 12030 5014 12031 4897 12031 6079 12031 6079 12032 4897 12032 4895 12032 4893 12033 6079 12033 4895 12033 4893 12034 5020 12034 6079 12034 6079 12035 5020 12035 4892 12035 6073 12036 6079 12036 4892 12036 6073 12037 6074 12037 6079 12037 6079 12038 6074 12038 5029 12038 4891 12039 6079 12039 5029 12039 4891 12040 5032 12040 6079 12040 6079 12041 5032 12041 4890 12041 6075 12042 6079 12042 4890 12042 6075 12043 5039 12043 6079 12043 6079 12044 5039 12044 6076 12044 4888 12045 6079 12045 6076 12045 4888 12046 4887 12046 6079 12046 6079 12047 4887 12047 6077 12047 4886 12048 6079 12048 6077 12048 4886 12049 6078 12049 6079 12049 6079 12050 6078 12050 4885 12050 4884 12051 6079 12051 4885 12051 4884 12052 5052 12052 6079 12052 6079 12053 5052 12053 4881 12053 5055 12054 6079 12054 4881 12054 5055 12055 4880 12055 6079 12055 6079 12056 4880 12056 6080 12056 5059 12057 6079 12057 6080 12057 5059 12058 6081 12058 6079 12058 5372 12059 4865 12059 6037 12059 6037 12060 4861 12060 6082 12060 6082 12061 4860 12061 6083 12061 6083 12062 4858 12062 5376 12062 5376 12063 4855 12063 5379 12063 5379 12064 4853 12064 6084 12064 6084 12065 6038 12065 5381 12065 5381 12066 4851 12066 5385 12066 4848 12067 4846 12067 6091 12067 6091 12068 4846 12068 6085 12068 4841 12069 6091 12069 6085 12069 4841 12070 4838 12070 6091 12070 6091 12071 4838 12071 4837 12071 4835 12072 6091 12072 4837 12072 4835 12073 4832 12073 6091 12073 6091 12074 4832 12074 4831 12074 4830 12075 6091 12075 4831 12075 4830 12076 6086 12076 6091 12076 6091 12077 6086 12077 6087 12077 4822 12078 6091 12078 6087 12078 4822 12079 4821 12079 6091 12079 6091 12080 4821 12080 6089 12080 6088 12081 6091 12081 6089 12081 6088 12082 4814 12082 6091 12082 6091 12083 4814 12083 4813 12083 4811 12084 6091 12084 4813 12084 4811 12085 6090 12085 6091 12085 6091 12086 6090 12086 4808 12086 6092 12087 6091 12087 4808 12087 6092 12088 4805 12088 6091 12088 6091 12089 4805 12089 4804 12089 4800 12090 6091 12090 4804 12090 4800 12091 6093 12091 6091 12091 6091 12092 6093 12092 4797 12092 4796 12093 6091 12093 4797 12093 4796 12094 6094 12094 6091 12094 6091 12095 6094 12095 4790 12095 4789 12096 6091 12096 4790 12096 4789 12097 6098 12097 6091 12097 4789 12098 4787 12098 6098 12098 6098 12099 4787 12099 4785 12099 4782 12100 6098 12100 4785 12100 4782 12101 4779 12101 6098 12101 6098 12102 4779 12102 4777 12102 6095 12103 6098 12103 4777 12103 6095 12104 6096 12104 6098 12104 6098 12105 6096 12105 6097 12105 4771 12106 6098 12106 6097 12106 4771 12107 4770 12107 6098 12107 6098 12108 4770 12108 4768 12108 6099 12109 6098 12109 4768 12109 6099 12110 4764 12110 6098 12110 6098 12111 4764 12111 6100 12111 4762 12112 6098 12112 6100 12112 4762 12113 6101 12113 6098 12113 6098 12114 6101 12114 4759 12114 4755 12115 6098 12115 4759 12115 4755 12116 4754 12116 6098 12116 6098 12117 4754 12117 4751 12117 6102 12118 6098 12118 4751 12118 6102 12119 4748 12119 6098 12119 6098 12120 4748 12120 6103 12120 4744 12121 6098 12121 6103 12121 4744 12122 4743 12122 6098 12122 6098 12123 4743 12123 6104 12123 6105 12124 6098 12124 6104 12124 6105 12125 6106 12125 6098 12125 6098 12126 6106 12126 4738 12126 6107 12127 6098 12127 4738 12127 6107 12128 5347 12128 6098 12128 6107 12129 6108 12129 5347 12129 6107 12130 6109 12130 6108 12130 6108 12131 6109 12131 6117 12131 6117 12132 6109 12132 6110 12132 5351 12133 6110 12133 4732 12133 5355 12134 4732 12134 4728 12134 5356 12135 4728 12135 6112 12135 6111 12136 6112 12136 4724 12136 6113 12137 4724 12137 4723 12137 5360 12138 4723 12138 6118 12138 6119 12139 6118 12139 4719 12139 6120 12140 4719 12140 6114 12140 6121 12141 6114 12141 6115 12141 5367 12142 6115 12142 4715 12142 6122 12143 4715 12143 4716 12143 6123 12144 4716 12144 4679 12144 6124 12145 4679 12145 6116 12145 5369 12146 6124 12146 6116 12146 6117 12147 6110 12147 5351 12147 5351 12148 4732 12148 5355 12148 5355 12149 4728 12149 5356 12149 5356 12150 6112 12150 6111 12150 6111 12151 4724 12151 6113 12151 6113 12152 4723 12152 5360 12152 5360 12153 6118 12153 6119 12153 6119 12154 4719 12154 6120 12154 6120 12155 6114 12155 6121 12155 6121 12156 6115 12156 5367 12156 5367 12157 4715 12157 6122 12157 6122 12158 4716 12158 6123 12158 6123 12159 4679 12159 6124 12159 6128 12160 6125 12160 6166 12160 6126 12161 6166 12161 6127 12161 6126 12162 6128 12162 6166 12162 6125 12163 5103 12163 6166 12163 6166 12164 5103 12164 5101 12164 5190 12165 6166 12165 5101 12165 5190 12166 6129 12166 6166 12166 6166 12167 6129 12167 5193 12167 5194 12168 6166 12168 5193 12168 5194 12169 5198 12169 6166 12169 6166 12170 5198 12170 5201 12170 6130 12171 6166 12171 5201 12171 6130 12172 6131 12172 6166 12172 6166 12173 6131 12173 5098 12173 5097 12174 6166 12174 5098 12174 5097 12175 5096 12175 6166 12175 6166 12176 5096 12176 5095 12176 6132 12177 6166 12177 5095 12177 6132 12178 5214 12178 6166 12178 6166 12179 5214 12179 5093 12179 6133 12180 6166 12180 5093 12180 6133 12181 5090 12181 6166 12181 6166 12182 5090 12182 5220 12182 6135 12183 5220 12183 6134 12183 5088 12184 6135 12184 6134 12184 5088 12185 6136 12185 6135 12185 6135 12186 6136 12186 6137 12186 5227 12187 6135 12187 6137 12187 5227 12188 6138 12188 6135 12188 6135 12189 6138 12189 5086 12189 6139 12190 6135 12190 5086 12190 6139 12191 6140 12191 6135 12191 6135 12192 6140 12192 6141 12192 6142 12193 6135 12193 6141 12193 6142 12194 5240 12194 6135 12194 6135 12195 5240 12195 5242 12195 6143 12196 6135 12196 5242 12196 6143 12197 6144 12197 6135 12197 6135 12198 6144 12198 6145 12198 6146 12199 6135 12199 6145 12199 6146 12200 6147 12200 6135 12200 6135 12201 6147 12201 5250 12201 6148 12202 6135 12202 5250 12202 6148 12203 5082 12203 6135 12203 6135 12204 5082 12204 5081 12204 6149 12205 6135 12205 5081 12205 6149 12206 6150 12206 6135 12206 6135 12207 6150 12207 6151 12207 6152 12208 6135 12208 6151 12208 6152 12209 5129 12209 6135 12209 6135 12210 5129 12210 5128 12210 6159 12211 5128 12211 5132 12211 5135 12212 6159 12212 5132 12212 5135 12213 5127 12213 6159 12213 6159 12214 5127 12214 6153 12214 6154 12215 6159 12215 6153 12215 6154 12216 6155 12216 6159 12216 6159 12217 6155 12217 6156 12217 5125 12218 6159 12218 6156 12218 5125 12219 6157 12219 6159 12219 6159 12220 6157 12220 5124 12220 6158 12221 6159 12221 5124 12221 6158 12222 6160 12222 6159 12222 6159 12223 6160 12223 5122 12223 5120 12224 6159 12224 5122 12224 5120 12225 6161 12225 6159 12225 6159 12226 6161 12226 5155 12226 6162 12227 6159 12227 5155 12227 6162 12228 5156 12228 6159 12228 6159 12229 5156 12229 6163 12229 5116 12230 6159 12230 6163 12230 5116 12231 5115 12231 6159 12231 6159 12232 5115 12232 5113 12232 5164 12233 6159 12233 5113 12233 5164 12234 5112 12234 6159 12234 6159 12235 5112 12235 5110 12235 5109 12236 6159 12236 5110 12236 5109 12237 6164 12237 6159 12237 6159 12238 6164 12238 6165 12238 5108 12239 6159 12239 6165 12239 5108 12240 5106 12240 6159 12240 6159 12241 5106 12241 6168 12241 6166 12242 6168 12242 5176 12242 6167 12243 6166 12243 5176 12243 6167 12244 5104 12244 6166 12244 6166 12245 5104 12245 6127 12245 6166 12246 5220 12246 6135 12246 6135 12247 5128 12247 6159 12247 6098 12248 6159 12248 6091 12248 6098 12249 6135 12249 6159 12249 6159 12250 6168 12250 6166 12250 5258 12251 5433 12251 6175 12251 6169 12252 6175 12252 6180 12252 6169 12253 5258 12253 6175 12253 5433 12254 6170 12254 6175 12254 6175 12255 6170 12255 6055 12255 6098 12256 5347 12256 6175 12256 6175 12257 5347 12257 5346 12257 5345 12258 6175 12258 5346 12258 5345 12259 6171 12259 6175 12259 6175 12260 6171 12260 5339 12260 5338 12261 6175 12261 5339 12261 5338 12262 6172 12262 6175 12262 6175 12263 6172 12263 5336 12263 5334 12264 6175 12264 5336 12264 5334 12265 5333 12265 6175 12265 6175 12266 5333 12266 6173 12266 5328 12267 6175 12267 6173 12267 5328 12268 5326 12268 6175 12268 6175 12269 5326 12269 5324 12269 6174 12270 6175 12270 5324 12270 6174 12271 5322 12271 6175 12271 6175 12272 5322 12272 6176 12272 5320 12273 6175 12273 6176 12273 5320 12274 6177 12274 6175 12274 6175 12275 6177 12275 5315 12275 5313 12276 6175 12276 5315 12276 5313 12277 5312 12277 6175 12277 6175 12278 5312 12278 6178 12278 6179 12279 6175 12279 6178 12279 6179 12280 5308 12280 6175 12280 6175 12281 5308 12281 6180 12281 2510 12282 6182 12282 6479 12282 6181 12283 6479 12283 2558 12283 6181 12284 2510 12284 6479 12284 6182 12285 6183 12285 6479 12285 6479 12286 6183 12286 2689 12286 6187 12287 2689 12287 2688 12287 2687 12288 6187 12288 2688 12288 2687 12289 2684 12289 6187 12289 6187 12290 2684 12290 2682 12290 6184 12291 6187 12291 2682 12291 6184 12292 6185 12292 6187 12292 6187 12293 6185 12293 2679 12293 6186 12294 6187 12294 2679 12294 6186 12295 2674 12295 6187 12295 6187 12296 2674 12296 6188 12296 2670 12297 6187 12297 6188 12297 2670 12298 2668 12298 6187 12298 6187 12299 2668 12299 2666 12299 2665 12300 6187 12300 2666 12300 2665 12301 2664 12301 6187 12301 6187 12302 2664 12302 2661 12302 2659 12303 6187 12303 2661 12303 2659 12304 2658 12304 6187 12304 6187 12305 2658 12305 2656 12305 6189 12306 6187 12306 2656 12306 6189 12307 2653 12307 6187 12307 6187 12308 2653 12308 6190 12308 6191 12309 6187 12309 6190 12309 6191 12310 6192 12310 6187 12310 6187 12311 6192 12311 6193 12311 2647 12312 6187 12312 6193 12312 2647 12313 2646 12313 6187 12313 6187 12314 2646 12314 2644 12314 2642 12315 6187 12315 2644 12315 2642 12316 2639 12316 6187 12316 6187 12317 2639 12317 3619 12317 6204 12318 3619 12318 3724 12318 6194 12319 6204 12319 3724 12319 6194 12320 6195 12320 6204 12320 6204 12321 6195 12321 6196 12321 3618 12322 6204 12322 6196 12322 3618 12323 6197 12323 6204 12323 6204 12324 6197 12324 3616 12324 3615 12325 6204 12325 3616 12325 3615 12326 3614 12326 6204 12326 6204 12327 3614 12327 6198 12327 6199 12328 6204 12328 6198 12328 6199 12329 3741 12329 6204 12329 6204 12330 3741 12330 3744 12330 3613 12331 6204 12331 3744 12331 3613 12332 3612 12332 6204 12332 6204 12333 3612 12333 3611 12333 6200 12334 6204 12334 3611 12334 6200 12335 6201 12335 6204 12335 6204 12336 6201 12336 6202 12336 3610 12337 6204 12337 6202 12337 3610 12338 3756 12338 6204 12338 6204 12339 3756 12339 3757 12339 6203 12340 6204 12340 3757 12340 6203 12341 3608 12341 6204 12341 6204 12342 3608 12342 6205 12342 3607 12343 6204 12343 6205 12343 3607 12344 6206 12344 6204 12344 6204 12345 6206 12345 3768 12345 3606 12346 6204 12346 3768 12346 3606 12347 3605 12347 6204 12347 6204 12348 3605 12348 6282 12348 6211 12349 6282 12349 6241 12349 3098 12350 6241 12350 3057 12350 3098 12351 6211 12351 6241 12351 3098 12352 6207 12352 6211 12352 6211 12353 6207 12353 3097 12353 6208 12354 6211 12354 3097 12354 6208 12355 3109 12355 6211 12355 6211 12356 3109 12356 3096 12356 6209 12357 6211 12357 3096 12357 6209 12358 3094 12358 6211 12358 6211 12359 3094 12359 3114 12359 3093 12360 6211 12360 3114 12360 3093 12361 6210 12361 6211 12361 6211 12362 6210 12362 3092 12362 6212 12363 6211 12363 3092 12363 6212 12364 3091 12364 6211 12364 6211 12365 3091 12365 6213 12365 6214 12366 6211 12366 6213 12366 6214 12367 6215 12367 6211 12367 6211 12368 6215 12368 6216 12368 3090 12369 6211 12369 6216 12369 3090 12370 3134 12370 6211 12370 6211 12371 3134 12371 6217 12371 3087 12372 6211 12372 6217 12372 3087 12373 6218 12373 6211 12373 6211 12374 6218 12374 3139 12374 3143 12375 6211 12375 3139 12375 3143 12376 6219 12376 6211 12376 6211 12377 6219 12377 6220 12377 6221 12378 6211 12378 6220 12378 6221 12379 3085 12379 6211 12379 6211 12380 3085 12380 6222 12380 3084 12381 6211 12381 6222 12381 3084 12382 6223 12382 6211 12382 6211 12383 6223 12383 4042 12383 4042 12384 6223 12384 3083 12384 3157 12385 4042 12385 3083 12385 3157 12386 3082 12386 4042 12386 4042 12387 3082 12387 3159 12387 6224 12388 4042 12388 3159 12388 6224 12389 3161 12389 4042 12389 4042 12390 3161 12390 3162 12390 6225 12391 4042 12391 3162 12391 6225 12392 6226 12392 4042 12392 4042 12393 6226 12393 3167 12393 3169 12394 4042 12394 3167 12394 3169 12395 3172 12395 4042 12395 4042 12396 3172 12396 6228 12396 6227 12397 4042 12397 6228 12397 6227 12398 6229 12398 4042 12398 4042 12399 6229 12399 6230 12399 3078 12400 4042 12400 6230 12400 3078 12401 3077 12401 4042 12401 4042 12402 3077 12402 6231 12402 6232 12403 4042 12403 6231 12403 6232 12404 3187 12404 4042 12404 4042 12405 3187 12405 6233 12405 6234 12406 4042 12406 6233 12406 6234 12407 3075 12407 4042 12407 4042 12408 3075 12408 3074 12408 3073 12409 4042 12409 3074 12409 3073 12410 6235 12410 4042 12410 4042 12411 6235 12411 6241 12411 6241 12412 6235 12412 3072 12412 3201 12413 6241 12413 3072 12413 3201 12414 3203 12414 6241 12414 6241 12415 3203 12415 3206 12415 3071 12416 6241 12416 3206 12416 3071 12417 3070 12417 6241 12417 6241 12418 3070 12418 3069 12418 3068 12419 6241 12419 3069 12419 3068 12420 6236 12420 6241 12420 6241 12421 6236 12421 6237 12421 3220 12422 6241 12422 6237 12422 3220 12423 3067 12423 6241 12423 6241 12424 3067 12424 3066 12424 3064 12425 6241 12425 3066 12425 3064 12426 3224 12426 6241 12426 6241 12427 3224 12427 6238 12427 6239 12428 6241 12428 6238 12428 6239 12429 3063 12429 6241 12429 6241 12430 3063 12430 3229 12430 3061 12431 6241 12431 3229 12431 3061 12432 6240 12432 6241 12432 6241 12433 6240 12433 3060 12433 3235 12434 6241 12434 3060 12434 3235 12435 3059 12435 6241 12435 6241 12436 3059 12436 6242 12436 3058 12437 6241 12437 6242 12437 3058 12438 3057 12438 6241 12438 6479 12439 2689 12439 6187 12439 6256 12440 6479 12440 6187 12440 6256 12441 6323 12441 6479 12441 6256 12442 3249 12442 6323 12442 6256 12443 3250 12443 3249 12443 6256 12444 3251 12444 3250 12444 6256 12445 3399 12445 3251 12445 6256 12446 3254 12446 3399 12446 6256 12447 3255 12447 3254 12447 6256 12448 3256 12448 3255 12448 6256 12449 3394 12449 3256 12449 6256 12450 6243 12450 3394 12450 6256 12451 3258 12451 6243 12451 6256 12452 3391 12452 3258 12452 6256 12453 6244 12453 3391 12453 6256 12454 6245 12454 6244 12454 6256 12455 3390 12455 6245 12455 6256 12456 6247 12456 3390 12456 6256 12457 6246 12457 6247 12457 6256 12458 6248 12458 6246 12458 6256 12459 3265 12459 6248 12459 6256 12460 3383 12460 3265 12460 6256 12461 3382 12461 3383 12461 6256 12462 6250 12462 3382 12462 6256 12463 6249 12463 6250 12463 6256 12464 6251 12464 6249 12464 6256 12465 6253 12465 6251 12465 6256 12466 6252 12466 6253 12466 6256 12467 6254 12467 6252 12467 6256 12468 6255 12468 6254 12468 6256 12469 3369 12469 6255 12469 6256 12470 3367 12470 3369 12470 6256 12471 3364 12471 3367 12471 6256 12472 3363 12472 3364 12472 6256 12473 6257 12473 3363 12473 6256 12474 6265 12474 6257 12474 6257 12475 6265 12475 3361 12475 3361 12476 6265 12476 3269 12476 3269 12477 6265 12477 6258 12477 6258 12478 6265 12478 6259 12478 6259 12479 6265 12479 6260 12479 6260 12480 6265 12480 6261 12480 6261 12481 6265 12481 6262 12481 6262 12482 6265 12482 3271 12482 3271 12483 6265 12483 3349 12483 3349 12484 6265 12484 3272 12484 3272 12485 6265 12485 6263 12485 6263 12486 6265 12486 6264 12486 6264 12487 6265 12487 3274 12487 3274 12488 6265 12488 3342 12488 3342 12489 6265 12489 3275 12489 3275 12490 6265 12490 3338 12490 3338 12491 6265 12491 3276 12491 3276 12492 6265 12492 6266 12492 6266 12493 6265 12493 3334 12493 3334 12494 6265 12494 6267 12494 6267 12495 6265 12495 6268 12495 6268 12496 6265 12496 3279 12496 3279 12497 6265 12497 3280 12497 3280 12498 6265 12498 6269 12498 6269 12499 6265 12499 3327 12499 3327 12500 6265 12500 3283 12500 3283 12501 6265 12501 6270 12501 6270 12502 6265 12502 6323 12502 6271 12503 6323 12503 3284 12503 6271 12504 6270 12504 6323 12504 3619 12505 2639 12505 3718 12505 3718 12506 2639 12506 6272 12506 3620 12507 6272 12507 6273 12507 3622 12508 6273 12508 2637 12508 3623 12509 2637 12509 2635 12509 3624 12510 2635 12510 2633 12510 6299 12511 2633 12511 6275 12511 6274 12512 6275 12512 2630 12512 3709 12513 2630 12513 6276 12513 3706 12514 6276 12514 6300 12514 3703 12515 6300 12515 6277 12515 3625 12516 6277 12516 6278 12516 3627 12517 6278 12517 2622 12517 6301 12518 2622 12518 6302 12518 3628 12519 6302 12519 2620 12519 6303 12520 2620 12520 2619 12520 3631 12521 2619 12521 2618 12521 6304 12522 2618 12522 2617 12522 6305 12523 2617 12523 2615 12523 6306 12524 2615 12524 2611 12524 3633 12525 2611 12525 6279 12525 6307 12526 6279 12526 6308 12526 6280 12527 6308 12527 2608 12527 6309 12528 2608 12528 2606 12528 6310 12529 2606 12529 6311 12529 3682 12530 6311 12530 2603 12530 6281 12531 2603 12531 6312 12531 6282 12532 6312 12532 6479 12532 6282 12533 6281 12533 6312 12533 6282 12534 3678 12534 6281 12534 6282 12535 6284 12535 3678 12535 6282 12536 6283 12536 6284 12536 6282 12537 6285 12537 6283 12537 6282 12538 3671 12538 6285 12538 6282 12539 6286 12539 3671 12539 6282 12540 3635 12540 6286 12540 6282 12541 6288 12541 3635 12541 6282 12542 6287 12542 6288 12542 6282 12543 3637 12543 6287 12543 6282 12544 3661 12544 3637 12544 6282 12545 3638 12545 3661 12545 6282 12546 3639 12546 3638 12546 6282 12547 6289 12547 3639 12547 6282 12548 3640 12548 6289 12548 6282 12549 3642 12549 3640 12549 6282 12550 6290 12550 3642 12550 6282 12551 3652 12551 6290 12551 6282 12552 6291 12552 3652 12552 6282 12553 6292 12553 6291 12553 6282 12554 6293 12554 6292 12554 6282 12555 3646 12555 6293 12555 6282 12556 6295 12556 3646 12556 6282 12557 6294 12557 6295 12557 6282 12558 3601 12558 6294 12558 6282 12559 6296 12559 3601 12559 6282 12560 6297 12560 6296 12560 6282 12561 3603 12561 6297 12561 6282 12562 6298 12562 3603 12562 6282 12563 3605 12563 6298 12563 3718 12564 6272 12564 3620 12564 3620 12565 6273 12565 3622 12565 3622 12566 2637 12566 3623 12566 3623 12567 2635 12567 3624 12567 3624 12568 2633 12568 6299 12568 6299 12569 6275 12569 6274 12569 6274 12570 2630 12570 3709 12570 3709 12571 6276 12571 3706 12571 3706 12572 6300 12572 3703 12572 3703 12573 6277 12573 3625 12573 3625 12574 6278 12574 3627 12574 3627 12575 2622 12575 6301 12575 6301 12576 6302 12576 3628 12576 3628 12577 2620 12577 6303 12577 6303 12578 2619 12578 3631 12578 3631 12579 2618 12579 6304 12579 6304 12580 2617 12580 6305 12580 6305 12581 2615 12581 6306 12581 6306 12582 2611 12582 3633 12582 3633 12583 6279 12583 6307 12583 6307 12584 6308 12584 6280 12584 6280 12585 2608 12585 6309 12585 6309 12586 2606 12586 6310 12586 6310 12587 6311 12587 3682 12587 3682 12588 2603 12588 6281 12588 6312 12589 6313 12589 6479 12589 6479 12590 6313 12590 6314 12590 2597 12591 6479 12591 6314 12591 2597 12592 2596 12592 6479 12592 6479 12593 2596 12593 2593 12593 2590 12594 6479 12594 2593 12594 2590 12595 6315 12595 6479 12595 6479 12596 6315 12596 6316 12596 2585 12597 6479 12597 6316 12597 2585 12598 2583 12598 6479 12598 6479 12599 2583 12599 6317 12599 2579 12600 6479 12600 6317 12600 2579 12601 2578 12601 6479 12601 6479 12602 2578 12602 2573 12602 2570 12603 6479 12603 2573 12603 2570 12604 2568 12604 6479 12604 6479 12605 2568 12605 6318 12605 6319 12606 6479 12606 6318 12606 6319 12607 2564 12607 6479 12607 6479 12608 2564 12608 2562 12608 6320 12609 6479 12609 2562 12609 6320 12610 6321 12610 6479 12610 6479 12611 6321 12611 2560 12611 6322 12612 6479 12612 2560 12612 6322 12613 2558 12613 6479 12613 3245 12614 3412 12614 6323 12614 6324 12615 6323 12615 3246 12615 6324 12616 3245 12616 6323 12616 3412 12617 3244 12617 6323 12617 6323 12618 3244 12618 6325 12618 3241 12619 6323 12619 6325 12619 3241 12620 3240 12620 6323 12620 6323 12621 3240 12621 6326 12621 6327 12622 6323 12622 6326 12622 6327 12623 3301 12623 6323 12623 6323 12624 3301 12624 3293 12624 3292 12625 6323 12625 3293 12625 3292 12626 6328 12626 6323 12626 6323 12627 6328 12627 6329 12627 6330 12628 6323 12628 6329 12628 6330 12629 6331 12629 6323 12629 6323 12630 6331 12630 3310 12630 6332 12631 6323 12631 3310 12631 6332 12632 3313 12632 6323 12632 6323 12633 3313 12633 3288 12633 3286 12634 6323 12634 3288 12634 3286 12635 6333 12635 6323 12635 6323 12636 6333 12636 3285 12636 3284 12637 6323 12637 3285 12637 3249 12638 3247 12638 6323 12638 6323 12639 3247 12639 3246 12639 6187 12640 3619 12640 6204 12640 6211 12641 6204 12641 6282 12641 4229 12642 4231 12642 6556 12642 4048 12643 6556 12643 4049 12643 4048 12644 4229 12644 6556 12644 4231 12645 6334 12645 6556 12645 6556 12646 6334 12646 4046 12646 4043 12647 6556 12647 4046 12647 4043 12648 6335 12648 6556 12648 6556 12649 6335 12649 6337 12649 6336 12650 6556 12650 6337 12650 6336 12651 4093 12651 6556 12651 6556 12652 4093 12652 6338 12652 4102 12653 6556 12653 6338 12653 4102 12654 4091 12654 6556 12654 6556 12655 4091 12655 4090 12655 6339 12656 6556 12656 4090 12656 6339 12657 4088 12657 6556 12657 6556 12658 4088 12658 6340 12658 6341 12659 6556 12659 6340 12659 6341 12660 4087 12660 6556 12660 6556 12661 4087 12661 4086 12661 4115 12662 6556 12662 4086 12662 4115 12663 4117 12663 6556 12663 6556 12664 4117 12664 4085 12664 6342 12665 6556 12665 4085 12665 6342 12666 4124 12666 6556 12666 6556 12667 4124 12667 6372 12667 6353 12668 6372 12668 4084 12668 6343 12669 6353 12669 4084 12669 6343 12670 4082 12670 6353 12670 6353 12671 4082 12671 4134 12671 4081 12672 6353 12672 4134 12672 4081 12673 6344 12673 6353 12673 6353 12674 6344 12674 6345 12674 6346 12675 6353 12675 6345 12675 6346 12676 4142 12676 6353 12676 6353 12677 4142 12677 6347 12677 4079 12678 6353 12678 6347 12678 4079 12679 4148 12679 6353 12679 6353 12680 4148 12680 4078 12680 4076 12681 6353 12681 4078 12681 4076 12682 4074 12682 6353 12682 6353 12683 4074 12683 6348 12683 4073 12684 6353 12684 6348 12684 4073 12685 4072 12685 6353 12685 6353 12686 4072 12686 6349 12686 4071 12687 6353 12687 6349 12687 4071 12688 6350 12688 6353 12688 6353 12689 6350 12689 4163 12689 4070 12690 6353 12690 4163 12690 4070 12691 4069 12691 6353 12691 6353 12692 4069 12692 6351 12692 6352 12693 6353 12693 6351 12693 6352 12694 4170 12694 6353 12694 6353 12695 4170 12695 6368 12695 6368 12696 4170 12696 4173 12696 4067 12697 6368 12697 4173 12697 4067 12698 4066 12698 6368 12698 6368 12699 4066 12699 6354 12699 4064 12700 6368 12700 6354 12700 4064 12701 4179 12701 6368 12701 6368 12702 4179 12702 6355 12702 6356 12703 6368 12703 6355 12703 6356 12704 4186 12704 6368 12704 6368 12705 4186 12705 6357 12705 6358 12706 6368 12706 6357 12706 6358 12707 6359 12707 6368 12707 6368 12708 6359 12708 6360 12708 6361 12709 6368 12709 6360 12709 6361 12710 6362 12710 6368 12710 6368 12711 6362 12711 4198 12711 6363 12712 6368 12712 4198 12712 6363 12713 4060 12713 6368 12713 6368 12714 4060 12714 4059 12714 4057 12715 6368 12715 4059 12715 4057 12716 6364 12716 6368 12716 6368 12717 6364 12717 6365 12717 4056 12718 6368 12718 6365 12718 4056 12719 6366 12719 6368 12719 6368 12720 6366 12720 4055 12720 4214 12721 6368 12721 4055 12721 4214 12722 4053 12722 6368 12722 6368 12723 4053 12723 6367 12723 6369 12724 6368 12724 6367 12724 6369 12725 6370 12725 6368 12725 6368 12726 6370 12726 4050 12726 6556 12727 4050 12727 6371 12727 4049 12728 6556 12728 6371 12728 6556 12729 6372 12729 6353 12729 6368 12730 4050 12730 6556 12730 6557 12731 6368 12731 6556 12731 6557 12732 835 12732 6368 12732 6557 12733 6405 12733 835 12733 6557 12734 4605 12734 6405 12734 6557 12735 4609 12735 4605 12735 6557 12736 4610 12736 4609 12736 6557 12737 6373 12737 4610 12737 6557 12738 4612 12738 6373 12738 6557 12739 4617 12739 4612 12739 6557 12740 4619 12740 4617 12740 6557 12741 4621 12741 4619 12741 6557 12742 6374 12742 4621 12742 6557 12743 6375 12743 6374 12743 6557 12744 4623 12744 6375 12744 6557 12745 4625 12745 4623 12745 6557 12746 4628 12746 4625 12746 6557 12747 4629 12747 4628 12747 6557 12748 6376 12748 4629 12748 6557 12749 6377 12749 6376 12749 6557 12750 6378 12750 6377 12750 6557 12751 4633 12751 6378 12751 6557 12752 4634 12752 4633 12752 6557 12753 4636 12753 4634 12753 6557 12754 4637 12754 4636 12754 6557 12755 6379 12755 4637 12755 6557 12756 6380 12756 6379 12756 6557 12757 4644 12757 6380 12757 6557 12758 4645 12758 4644 12758 6557 12759 4649 12759 4645 12759 6557 12760 4650 12760 4649 12760 6557 12761 4654 12761 4650 12761 6557 12762 6381 12762 4654 12762 6557 12763 4657 12763 6381 12763 6557 12764 6438 12764 4657 12764 4657 12765 6438 12765 5577 12765 4658 12766 5577 12766 5574 12766 4662 12767 5574 12767 5573 12767 6382 12768 5573 12768 6383 12768 6445 12769 6383 12769 5571 12769 4666 12770 5571 12770 5569 12770 4668 12771 5569 12771 6384 12771 4669 12772 6384 12772 5565 12772 6385 12773 5565 12773 6444 12773 4671 12774 6444 12774 6386 12774 4672 12775 6386 12775 5560 12775 6443 12776 5560 12776 5557 12776 4674 12777 5557 12777 5556 12777 4675 12778 5556 12778 6388 12778 6387 12779 6388 12779 5551 12779 4677 12780 5551 12780 6453 12780 6452 12781 6453 12781 5548 12781 6451 12782 5548 12782 6450 12782 6389 12783 6450 12783 5545 12783 6390 12784 5545 12784 6449 12784 4548 12785 6449 12785 5541 12785 6391 12786 5541 12786 6448 12786 4550 12787 6448 12787 5538 12787 6447 12788 5538 12788 5536 12788 4552 12789 5536 12789 6392 12789 6446 12790 6392 12790 6393 12790 6394 12791 6393 12791 6454 12791 835 12792 6454 12792 6463 12792 835 12793 6394 12793 6454 12793 835 12794 4558 12794 6394 12794 835 12795 6395 12795 4558 12795 835 12796 4559 12796 6395 12796 835 12797 4563 12797 4559 12797 835 12798 6396 12798 4563 12798 835 12799 4565 12799 6396 12799 835 12800 4568 12800 4565 12800 835 12801 4570 12801 4568 12801 835 12802 6397 12802 4570 12802 835 12803 4572 12803 6397 12803 835 12804 4574 12804 4572 12804 835 12805 6398 12805 4574 12805 835 12806 4577 12806 6398 12806 835 12807 6399 12807 4577 12807 835 12808 6400 12808 6399 12808 835 12809 4582 12809 6400 12809 835 12810 4583 12810 4582 12810 835 12811 6401 12811 4583 12811 835 12812 4586 12812 6401 12812 835 12813 4589 12813 4586 12813 835 12814 6402 12814 4589 12814 835 12815 4591 12815 6402 12815 835 12816 6403 12816 4591 12816 835 12817 4594 12817 6403 12817 835 12818 4598 12818 4594 12818 835 12819 4599 12819 4598 12819 835 12820 6404 12820 4599 12820 835 12821 4601 12821 6404 12821 835 12822 6406 12822 4601 12822 835 12823 6405 12823 6406 12823 6412 12824 6474 12824 6438 12824 6412 12825 6407 12825 6474 12825 6412 12826 4318 12826 6407 12826 6412 12827 6409 12827 4318 12827 6412 12828 6408 12828 6409 12828 6412 12829 4271 12829 6408 12829 6412 12830 4272 12830 4271 12830 6412 12831 6411 12831 4272 12831 6412 12832 6410 12832 6411 12832 6412 12833 4445 12833 6410 12833 6412 12834 4273 12834 4445 12834 6412 12835 4274 12835 4273 12835 6412 12836 4276 12836 4274 12836 6412 12837 4438 12837 4276 12837 6412 12838 6414 12838 4438 12838 6412 12839 6413 12839 6414 12839 6412 12840 6415 12840 6413 12840 6412 12841 4433 12841 6415 12841 6412 12842 4277 12842 4433 12842 6412 12843 4279 12843 4277 12843 6412 12844 4280 12844 4279 12844 6412 12845 4281 12845 4280 12845 6412 12846 6416 12846 4281 12846 6412 12847 6417 12847 6416 12847 6412 12848 4422 12848 6417 12848 6412 12849 6418 12849 4422 12849 6412 12850 6419 12850 6418 12850 6412 12851 4282 12851 6419 12851 6412 12852 6420 12852 4282 12852 6412 12853 6421 12853 6420 12853 6412 12854 6424 12854 6421 12854 6421 12855 6424 12855 4413 12855 4413 12856 6424 12856 4410 12856 4410 12857 6424 12857 4409 12857 4409 12858 6424 12858 4284 12858 4284 12859 6424 12859 4405 12859 4405 12860 6424 12860 6422 12860 6422 12861 6424 12861 4286 12861 4286 12862 6424 12862 6423 12862 6423 12863 6424 12863 4287 12863 4287 12864 6424 12864 4288 12864 4288 12865 6424 12865 4394 12865 4394 12866 6424 12866 4290 12866 4290 12867 6424 12867 4291 12867 4291 12868 6424 12868 6425 12868 6425 12869 6424 12869 4293 12869 4293 12870 6424 12870 4390 12870 4390 12871 6424 12871 4294 12871 4294 12872 6424 12872 6426 12872 6426 12873 6424 12873 4295 12873 4295 12874 6424 12874 4296 12874 4296 12875 6424 12875 4382 12875 4382 12876 6424 12876 6427 12876 6427 12877 6424 12877 6428 12877 6428 12878 6424 12878 4378 12878 4378 12879 6424 12879 4298 12879 4298 12880 6424 12880 6429 12880 6429 12881 6424 12881 4372 12881 4372 12882 6424 12882 6474 12882 4369 12883 6474 12883 6430 12883 4369 12884 4372 12884 6474 12884 6474 12885 6463 12885 6438 12885 6438 12886 6463 12886 5438 12886 5437 12887 6438 12887 5438 12887 5437 12888 6431 12888 6438 12888 6438 12889 6431 12889 5620 12889 5617 12890 6438 12890 5620 12890 5617 12891 5615 12891 6438 12891 6438 12892 5615 12892 5612 12892 5611 12893 6438 12893 5612 12893 5611 12894 6432 12894 6438 12894 6438 12895 6432 12895 6433 12895 5607 12896 6438 12896 6433 12896 5607 12897 6434 12897 6438 12897 6438 12898 6434 12898 5604 12898 5602 12899 6438 12899 5604 12899 5602 12900 5600 12900 6438 12900 6438 12901 5600 12901 5598 12901 5596 12902 6438 12902 5598 12902 5596 12903 6435 12903 6438 12903 6438 12904 6435 12904 5594 12904 6436 12905 6438 12905 5594 12905 6436 12906 5591 12906 6438 12906 6438 12907 5591 12907 6437 12907 5587 12908 6438 12908 6437 12908 5587 12909 6439 12909 6438 12909 6438 12910 6439 12910 5586 12910 5583 12911 6438 12911 5586 12911 5583 12912 6440 12912 6438 12912 6438 12913 6440 12913 5581 12913 6441 12914 6438 12914 5581 12914 6441 12915 6442 12915 6438 12915 6438 12916 6442 12916 5577 12916 4677 12917 6387 12917 5551 12917 6387 12918 4675 12918 6388 12918 4675 12919 4674 12919 5556 12919 4674 12920 6443 12920 5557 12920 6443 12921 4672 12921 5560 12921 4672 12922 4671 12922 6386 12922 4671 12923 6385 12923 6444 12923 6385 12924 4669 12924 5565 12924 4669 12925 4668 12925 6384 12925 4668 12926 4666 12926 5569 12926 4666 12927 6445 12927 5571 12927 6445 12928 6382 12928 6383 12928 6382 12929 4662 12929 5573 12929 4662 12930 4658 12930 5574 12930 4658 12931 4657 12931 5577 12931 6394 12932 6446 12932 6393 12932 6446 12933 4552 12933 6392 12933 4552 12934 6447 12934 5536 12934 6447 12935 4550 12935 5538 12935 4550 12936 6391 12936 6448 12936 6391 12937 4548 12937 5541 12937 4548 12938 6390 12938 6449 12938 6390 12939 6389 12939 5545 12939 6389 12940 6451 12940 6450 12940 6451 12941 6452 12941 5548 12941 6452 12942 4677 12942 6453 12942 6454 12943 5532 12943 6463 12943 6463 12944 5532 12944 6455 12944 5530 12945 6463 12945 6455 12945 5530 12946 5529 12946 6463 12946 6463 12947 5529 12947 6456 12947 5526 12948 6463 12948 6456 12948 5526 12949 5523 12949 6463 12949 6463 12950 5523 12950 6457 12950 6458 12951 6463 12951 6457 12951 6458 12952 6459 12952 6463 12952 6463 12953 6459 12953 5517 12953 6460 12954 6463 12954 5517 12954 6460 12955 6461 12955 6463 12955 6463 12956 6461 12956 5515 12956 5513 12957 6463 12957 5515 12957 5513 12958 5511 12958 6463 12958 6463 12959 5511 12959 5510 12959 5508 12960 6463 12960 5510 12960 5508 12961 5505 12961 6463 12961 6463 12962 5505 12962 6462 12962 5503 12963 6463 12963 6462 12963 5503 12964 6464 12964 6463 12964 6463 12965 6464 12965 5500 12965 5497 12966 6463 12966 5500 12966 5497 12967 5495 12967 6463 12967 6463 12968 5495 12968 6465 12968 6466 12969 6463 12969 6465 12969 6466 12970 5491 12970 6463 12970 6463 12971 5491 12971 5490 12971 5438 12972 6463 12972 5490 12972 6407 12973 4316 12973 6474 12973 6474 12974 4316 12974 4323 12974 6467 12975 6474 12975 4323 12975 6467 12976 6468 12976 6474 12976 6474 12977 6468 12977 6469 12977 4314 12978 6474 12978 6469 12978 4314 12979 4334 12979 6474 12979 6474 12980 4334 12980 6470 12980 4313 12981 6474 12981 6470 12981 4313 12982 4338 12982 6474 12982 6474 12983 4338 12983 4311 12983 4310 12984 6474 12984 4311 12984 4310 12985 4342 12985 6474 12985 6474 12986 4342 12986 4345 12986 6471 12987 6474 12987 4345 12987 6471 12988 6472 12988 6474 12988 6474 12989 6472 12989 6473 12989 4308 12990 6474 12990 6473 12990 4308 12991 4307 12991 6474 12991 6474 12992 4307 12992 4356 12992 4306 12993 6474 12993 4356 12993 4306 12994 4305 12994 6474 12994 6474 12995 4305 12995 6475 12995 4304 12996 6474 12996 6475 12996 4304 12997 4303 12997 6474 12997 6474 12998 4303 12998 4301 12998 4300 12999 6474 12999 4301 12999 4300 13000 6476 13000 6474 13000 6474 13001 6476 13001 6430 13001 5855 13002 6477 13002 6478 13002 6478 13003 6477 13003 6438 13003 6478 13004 6479 13004 5855 13004 5855 13005 6479 13005 5856 13005 5859 13006 5856 13006 6323 13006 6323 13007 5856 13007 6479 13007 6552 13008 6482 13008 5843 13008 5843 13009 6482 13009 6438 13009 6557 13010 5843 13010 6438 13010 6557 13011 5771 13011 5843 13011 6557 13012 1391 13012 5771 13012 6557 13013 618 13013 1391 13013 6557 13014 6494 13014 618 13014 6480 13015 6481 13015 6482 13015 6482 13016 6481 13016 6479 13016 6438 13017 6479 13017 6478 13017 6438 13018 6482 13018 6479 13018 6479 13019 6481 13019 6282 13019 6282 13020 6481 13020 4027 13020 6483 13021 4027 13021 3943 13021 6483 13022 6282 13022 4027 13022 6483 13023 6175 13023 6282 13023 6483 13024 5850 13024 6175 13024 6483 13025 6484 13025 5850 13025 5850 13026 5764 13026 6175 13026 6175 13027 5764 13027 6098 13027 6098 13028 5764 13028 6485 13028 576 13029 6485 13029 3870 13029 6486 13030 576 13030 3870 13030 6486 13031 618 13031 576 13031 6486 13032 1391 13032 618 13032 6486 13033 1401 13033 1391 13033 5764 13034 6487 13034 6485 13034 6485 13035 6551 13035 3870 13035 1391 13036 832 13036 5771 13036 6490 13037 6098 13037 576 13037 576 13038 6098 13038 6485 13038 6175 13039 6488 13039 6282 13039 6488 13040 6175 13040 5862 13040 5862 13041 6175 13041 6489 13041 5862 13042 5858 13042 6488 13042 6488 13043 5858 13043 6282 13043 5853 13044 6489 13044 6056 13044 6056 13045 6489 13045 6175 13045 5864 13046 5865 13046 6490 13046 6490 13047 5865 13047 6098 13047 6490 13048 576 13048 5864 13048 5864 13049 576 13049 6491 13049 6494 13050 6557 13050 6493 13050 6493 13051 6557 13051 6492 13051 6493 13052 5869 13052 6494 13052 6494 13053 5869 13053 618 13053 5796 13054 6495 13054 6463 13054 6463 13055 6495 13055 835 13055 5723 13056 5849 13056 6091 13056 6091 13057 5849 13057 6054 13057 3901 13058 617 13058 3920 13058 3920 13059 617 13059 657 13059 4022 13060 6187 13060 3992 13060 3992 13061 6187 13061 6204 13061 3950 13062 809 13062 6529 13062 6529 13063 809 13063 6553 13063 1025 13064 6497 13064 981 13064 981 13065 6497 13065 995 13065 995 13066 6497 13066 996 13066 996 13067 6497 13067 998 13067 998 13068 6497 13068 999 13068 999 13069 6497 13069 6496 13069 6496 13070 6497 13070 1002 13070 1002 13071 6497 13071 6498 13071 6498 13072 6497 13072 988 13072 988 13073 6497 13073 1003 13073 1003 13074 6497 13074 6499 13074 6499 13075 6497 13075 6500 13075 6500 13076 6497 13076 6501 13076 6501 13077 6497 13077 6502 13077 6502 13078 6497 13078 1007 13078 1007 13079 6497 13079 1010 13079 1010 13080 6497 13080 1011 13080 1011 13081 6497 13081 1012 13081 1012 13082 6497 13082 6503 13082 6503 13083 6497 13083 1014 13083 1014 13084 6497 13084 1017 13084 1017 13085 6497 13085 6504 13085 6504 13086 6497 13086 6549 13086 926 13087 6549 13087 6505 13087 926 13088 6504 13088 6549 13088 978 13089 972 13089 6549 13089 6549 13090 972 13090 970 13090 6506 13091 6549 13091 970 13091 6506 13092 964 13092 6549 13092 6549 13093 964 13093 962 13093 961 13094 6549 13094 962 13094 961 13095 957 13095 6549 13095 6549 13096 957 13096 6507 13096 952 13097 6549 13097 6507 13097 952 13098 6508 13098 6549 13098 6549 13099 6508 13099 933 13099 951 13100 6549 13100 933 13100 951 13101 938 13101 6549 13101 6549 13102 938 13102 932 13102 6509 13103 6549 13103 932 13103 6509 13104 931 13104 6549 13104 6549 13105 931 13105 930 13105 940 13106 6549 13106 930 13106 940 13107 929 13107 6549 13107 6549 13108 929 13108 6510 13108 6505 13109 6549 13109 6510 13109 6555 13110 4495 13110 814 13110 814 13111 4495 13111 810 13111 6554 13112 6529 13112 6497 13112 6497 13113 6529 13113 758 13113 6549 13114 758 13114 756 13114 6511 13115 6549 13115 756 13115 6511 13116 744 13116 6549 13116 6549 13117 744 13117 754 13117 742 13118 6549 13118 754 13118 742 13119 6512 13119 6549 13119 6549 13120 6512 13120 6513 13120 6523 13121 6513 13121 739 13121 752 13122 6523 13122 739 13122 752 13123 751 13123 6523 13123 6523 13124 751 13124 736 13124 6514 13125 6523 13125 736 13125 6514 13126 749 13126 6523 13126 6523 13127 749 13127 748 13127 747 13128 6523 13128 748 13128 747 13129 734 13129 6523 13129 6523 13130 734 13130 733 13130 732 13131 6523 13131 733 13131 732 13132 6515 13132 6523 13132 6523 13133 6515 13133 711 13133 712 13134 6523 13134 711 13134 712 13135 6516 13135 6523 13135 6523 13136 6516 13136 6517 13136 6518 13137 6523 13137 6517 13137 6518 13138 717 13138 6523 13138 6523 13139 717 13139 6519 13139 731 13140 6523 13140 6519 13140 731 13141 6520 13141 6523 13141 6523 13142 6520 13142 6521 13142 724 13143 6523 13143 6521 13143 724 13144 6522 13144 6523 13144 6523 13145 6522 13145 6524 13145 6525 13146 6523 13146 6524 13146 6525 13147 6526 13147 6523 13147 6523 13148 6526 13148 6527 13148 6528 13149 6523 13149 6527 13149 6528 13150 730 13150 6523 13150 758 13151 6529 13151 6531 13151 6531 13152 6529 13152 6553 13152 4495 13153 6553 13153 6530 13153 4495 13154 6531 13154 6553 13154 4495 13155 6555 13155 6531 13155 6531 13156 6555 13156 6532 13156 6532 13157 6555 13157 760 13157 760 13158 6555 13158 781 13158 781 13159 6555 13159 782 13159 782 13160 6555 13160 761 13160 761 13161 6555 13161 6533 13161 6533 13162 6555 13162 6534 13162 6534 13163 6555 13163 6548 13163 762 13164 6548 13164 788 13164 762 13165 6534 13165 6548 13165 6555 13166 1384 13166 6548 13166 6535 13167 6536 13167 6548 13167 6548 13168 6536 13168 805 13168 6537 13169 6548 13169 805 13169 6537 13170 776 13170 6548 13170 6548 13171 776 13171 804 13171 6538 13172 6548 13172 804 13172 6538 13173 6539 13173 6548 13173 6548 13174 6539 13174 6540 13174 6541 13175 6548 13175 6540 13175 6541 13176 6542 13176 6548 13176 6548 13177 6542 13177 800 13177 798 13178 6548 13178 800 13178 798 13179 774 13179 6548 13179 6548 13180 774 13180 6543 13180 6544 13181 6548 13181 6543 13181 6544 13182 794 13182 6548 13182 6548 13183 794 13183 6545 13183 6546 13184 6548 13184 6545 13184 6546 13185 772 13185 6548 13185 6548 13186 772 13186 771 13186 6547 13187 6548 13187 771 13187 6547 13188 790 13188 6548 13188 6548 13189 790 13189 769 13189 768 13190 6548 13190 769 13190 768 13191 766 13191 6548 13191 6548 13192 766 13192 765 13192 763 13193 6548 13193 765 13193 763 13194 788 13194 6548 13194 6497 13195 758 13195 6549 13195 6549 13196 6513 13196 6523 13196 6550 13197 6549 13197 6523 13197 6549 13198 6550 13198 6487 13198 6487 13199 6550 13199 6485 13199 6550 13200 6523 13200 6485 13200 6485 13201 6523 13201 6551 13201 5871 13202 6491 13202 5860 13202 5860 13203 6491 13203 576 13203 4495 13204 6530 13204 6552 13204 6552 13205 6530 13205 6482 13205 6530 13206 6553 13206 6482 13206 6482 13207 6553 13207 6480 13207 6554 13208 6497 13208 6483 13208 6483 13209 6497 13209 6484 13209 6529 13210 6554 13210 3943 13210 3943 13211 6554 13211 6483 13211 1384 13212 6555 13212 1391 13212 1391 13213 6555 13213 832 13213 5851 13214 6492 13214 6556 13214 6556 13215 6492 13215 6557 13215

+
+
+
+
+ + + + + + + + + + + + + + +
diff --git a/pyrevolve/SDF/joint.py b/pyrevolve/SDF/joint.py index 220ebec0f5..d8a834349e 100644 --- a/pyrevolve/SDF/joint.py +++ b/pyrevolve/SDF/joint.py @@ -44,6 +44,7 @@ def to_robot_config_sdf(self): servomotor = xml.etree.ElementTree.Element('rv:servomotor', { 'type': 'position', + # 'type': 'velocity', 'id': "{}__rotate".format(self._id), 'part_id': self._id, 'part_name': self._name, @@ -57,9 +58,9 @@ def to_robot_config_sdf(self): servomotor.attrib['coordinates'] = ';'.join(str(i) for i in self._coordinates) pid = xml.etree.ElementTree.SubElement(servomotor, 'rv:pid') - SDF.sub_element_text(pid, 'rv:p', 0.9) + SDF.sub_element_text(pid, 'rv:p', 1) SDF.sub_element_text(pid, 'rv:i', 0.0) - SDF.sub_element_text(pid, 'rv:d', 0.0) + SDF.sub_element_text(pid, 'rv:d', 0.00) SDF.sub_element_text(pid, 'rv:i_max', 0.0) SDF.sub_element_text(pid, 'rv:i_min', 0.0) # SDF.sub_element_text(pid, 'rv:cmd_max', 0.0) @@ -77,10 +78,10 @@ def __init__(self, axis: SDF.math.Vector3): limit = xml.etree.ElementTree.SubElement(self, 'limit') # TODO calibrate this (load from configuration?) - SDF.sub_element_text(limit, 'lower', -7.853982e-01) - SDF.sub_element_text(limit, 'upper', 7.853982e-01) - SDF.sub_element_text(limit, 'effort', 1.765800e-01) - SDF.sub_element_text(limit, 'velocity', 5.235988e+00) + SDF.sub_element_text(limit, 'lower', -1) #-7.853982e-01 + SDF.sub_element_text(limit, 'upper', 1) + SDF.sub_element_text(limit, 'effort', 9.4*9.8e-02) + SDF.sub_element_text(limit, 'velocity', 5.235988e-00) def set_xyz(self, xyz: SDF.math.Vector3): self.xyz.text = '{:e} {:e} {:e}'.format(xyz[0], xyz[1], xyz[2]) diff --git a/pyrevolve/SDF/link.py b/pyrevolve/SDF/link.py index e1f9a3e78d..82f6c08c69 100644 --- a/pyrevolve/SDF/link.py +++ b/pyrevolve/SDF/link.py @@ -3,7 +3,7 @@ from pyrevolve import SDF from pyrevolve.SDF.inertial import transform_inertia_tensor -from ..custom_logging.logger import logger +from pyrevolve.custom_logging.logger import logger class Link(SDF.Posable): diff --git a/pyrevolve/SDF/pose.py b/pyrevolve/SDF/pose.py index 46c4d40beb..5433f8cbfe 100644 --- a/pyrevolve/SDF/pose.py +++ b/pyrevolve/SDF/pose.py @@ -3,7 +3,7 @@ from pyrevolve import SDF import pyrevolve.SDF.math -from ..custom_logging.logger import logger +from pyrevolve.custom_logging.logger import logger class Pose(xml.etree.ElementTree.Element): diff --git a/pyrevolve/SDF/revolve_bot_sdf_builder.py b/pyrevolve/SDF/revolve_bot_sdf_builder.py index 8167c19993..08daa9ada6 100644 --- a/pyrevolve/SDF/revolve_bot_sdf_builder.py +++ b/pyrevolve/SDF/revolve_bot_sdf_builder.py @@ -291,6 +291,14 @@ def _sdf_brain_plugin_conf( else: robot_brain_sdf.append(robot_controller) + try: + robot_IMC = robot_brain.IMC.controller_sdf() + except: + print("IMC in-active!") + xml.etree.ElementTree.SubElement(robot_brain_sdf, 'rv:IMC', {'active': 'false'}) + else: + robot_brain_sdf.append(robot_IMC) + # sensors sensors_elem = xml.etree.ElementTree.SubElement(robot_brain_sdf, 'rv:sensors') for sensor in sensors: diff --git a/pyrevolve/angle/manage/world.py b/pyrevolve/angle/manage/world.py index 31c8fd9a9b..1dadc0414f 100644 --- a/pyrevolve/angle/manage/world.py +++ b/pyrevolve/angle/manage/world.py @@ -38,6 +38,7 @@ def __init__( world_address=None, output_directory=None, state_update_frequency=None, + listen_to_contacts=False, restore=None, _private=None ): @@ -87,6 +88,8 @@ def __init__( self.do_restore = None + self._listen_to_contacts = listen_to_contacts + # Sorry Matteo if False: #output_directory: if not restore: @@ -165,19 +168,21 @@ def poses_header(): async def create( cls, world_address=("127.0.0.1", 11345), - pose_update_frequency=10 + pose_update_frequency=10, + listen_to_contacts=False, ): """ Coroutine to instantiate a Revolve.Angle WorldManager :param pose_update_frequency: :param world_address: - :param analyzer_address: + :param listen_to_contacts: :return: """ self = cls( _private=cls._PRIVATE, world_address=world_address, - state_update_frequency=pose_update_frequency + state_update_frequency=pose_update_frequency, + listen_to_contacts=listen_to_contacts, ) await self._init(builder=None, generator=None) return self @@ -199,7 +204,7 @@ async def _init(self): if self.manager is not None: return - await (super(WorldManager, self)._init()) + await (super()._init()) # Subscribe to pose updates self.pose_subscriber = await self.manager.subscribe( @@ -208,11 +213,12 @@ async def _init(self): self._update_states ) - self.contact_subscriber = await self.manager.subscribe( - '/gazebo/default/physics/contacts', - 'gazebo.msgs.Contacts', - self._update_contacts - ) + if self._listen_to_contacts: + self.contact_subscriber = await self.manager.subscribe( + '/gazebo/default/physics/contacts', + 'gazebo.msgs.Contacts', + self._update_contacts + ) # Awaiting this immediately will lock the program update_state_future = self.set_state_update_frequency( @@ -230,7 +236,8 @@ async def _init(self): # Wait for connections await self.pose_subscriber.wait_for_connection() - await self.contact_subscriber.wait_for_connection() + if self._listen_to_contacts: + await self.contact_subscriber.wait_for_connection() await update_state_future if self.do_restore: diff --git a/pyrevolve/data_analisys/visualize_robot.py b/pyrevolve/data_analisys/visualize_robot.py index 09a0b7879d..9dfe211f32 100644 --- a/pyrevolve/data_analisys/visualize_robot.py +++ b/pyrevolve/data_analisys/visualize_robot.py @@ -7,9 +7,8 @@ from pyrevolve.custom_logging import logger from pyrevolve.revolve_bot import RevolveBot from pyrevolve.SDF.math import Vector3 -from pyrevolve.tol.manage import World +from pyrevolve.tol.manage.single_robot_world import SingleRobotWorld from pyrevolve.util.supervisor.supervisor_multi import DynamicSimSupervisor -from pyrevolve.evolution import fitness async def test_robot_run(robot_file_path: str): @@ -37,8 +36,10 @@ async def test_robot_run(robot_file_path: str): await asyncio.sleep(0.1) # Connect to the simulator and pause - connection = await World.create(settings, world_address=('127.0.0.1', settings.port_start)) + connection = await SingleRobotWorld.create(settings, world_address=('127.0.0.1', settings.port_start)) await asyncio.sleep(1) + await connection.pause(True) + await connection.reset(True) # init finished @@ -47,16 +48,13 @@ async def test_robot_run(robot_file_path: str): robot.update_substrate() robot.save_file(f'{robot_file_path}.sdf', conf_type='sdf') - await connection.pause(False) robot_manager = await connection.insert_robot(robot, Vector3(0, 0, 0.25), life_timeout=None) - await asyncio.sleep(1.0) + await connection.pause(False) # Start the main life loop while True: # Print robot fitness every second status = 'dead' if robot_manager.dead else 'alive' - print(f"Robot fitness ({status}) is \n" - f" OLD: {fitness.online_old_revolve(robot_manager)}\n" - f" DISPLAC: {fitness.displacement(robot_manager, robot)}\n" - f" DIS_VEL: {fitness.displacement_velocity(robot_manager, robot)}") + best_fitness = None if robot_manager.best_evaluation is None else robot_manager.best_evaluation.fitness + log.info(f"status: {status} - fitness: {best_fitness}") await asyncio.sleep(1.0) diff --git a/pyrevolve/examples/analyze_body.py b/pyrevolve/examples/analyze_body.py deleted file mode 100644 index 636ceabf22..0000000000 --- a/pyrevolve/examples/analyze_body.py +++ /dev/null @@ -1,68 +0,0 @@ -""" -Generates a bot using the code in `generated_sdf`, -and sends it to the body analyzer to have it analyzed. - -If the analysis is accepted, it outputs the bot, otherwise -it generates a new one. Writes the final bot's contents to -stdout, statistics are written to stderr. -""" -from __future__ import absolute_import -from __future__ import print_function - -import sys -import random - -from pyrevolve.sdfbuilder.math import Vector3 -from .generated_sdf import generate_robot, builder, robot_to_sdf -from ..gazebo import get_analysis_robot, BodyAnalyzer -from ..custom_logging.logger import logger - -import asyncio - - -if len(sys.argv) > 1: - seed = int(sys.argv[1]) -else: - seed = random.randint(0, 10000) - -random.seed(seed) -logger.info("Seed: {}".format(seed)) - - -async def analysis_func(): - analyzer = await (BodyAnalyzer.create(address=("127.0.0.1", 11346))) - - # Try a maximum of 100 times - for _ in range(100): - # Generate a new robot - robot = generate_robot() - - sdf = get_analysis_robot(robot, builder) - - # Find out its intersections and bounding box - intersections, bbox = await ( - analyzer.analyze_robot(robot, builder=builder)) - - if intersections: - logger.info("Invalid model - intersections detected.", file=sys.stderr) - else: - logger.info("No model intersections detected!", file=sys.stderr) - if bbox: - # Translate the model in z direction so it stands directly on - # the ground - logger.info("Model bounding box: ({}, {}, {}), ({}, {}, {})".format( - bbox.min.x, - bbox.min.y, - bbox.min.z, - bbox.max.x, - bbox.max.y, - bbox.max.z - ), file=sys.stderr) - model = sdf.elements[0] - model.translate(Vector3(0, 0, -bbox.min.z)) - - logger.info(str(robot_to_sdf(robot, "test_bot", "controllerplugin.so"))) - break - -loop = asyncio.get_event_loop() -loop.run_until_complete(analysis_func()) diff --git a/pyrevolve/examples/from_yaml.py b/pyrevolve/examples/from_yaml.py deleted file mode 100644 index 67851e8234..0000000000 --- a/pyrevolve/examples/from_yaml.py +++ /dev/null @@ -1,59 +0,0 @@ -from __future__ import absolute_import -from __future__ import print_function - -from pyrevolve.build.sdf import RobotBuilder -from pyrevolve.build.sdf import BodyBuilder -from pyrevolve.build.sdf import NeuralNetBuilder -from pyrevolve.convert import yaml_to_proto -from pyrevolve.sdfbuilder import SDF -from pyrevolve.sdfbuilder.math import Vector3 - -from .generated_sdf import body_spec, brain_spec - -from ..custom_logging.logger import logger - -bot_yaml = ''' ---- -body: - id : Core - type : Core - children: - 1: - id: Hinge - type: Hinge - params: - length: 0.5 - red: 1.0 - green: 0.0 - blue: 0.0 - 4: - id: Wheel - type: Wheel - params: - red: 0.0 - green: 1.0 - blue: 0.0 - 5: - id: Wheel2 - type: Wheel - params: - red: 0.0 - green: 1.0 - blue: 0.0 -brain: - params: - Wheel-out-0: - type: Oscillator - period: 3 - Wheel2-out-0: - type: Oscillator - period: 3 -''' - -bot = yaml_to_proto(body_spec, brain_spec, bot_yaml) -builder = RobotBuilder(BodyBuilder(body_spec), NeuralNetBuilder(brain_spec)) -model = builder.sdf_robot(bot, "libRobotControlPlugin.so") -model.translate(Vector3(0, 0, 0.5)) -sdf = SDF() -sdf.add_element(model) -logger.info(str(sdf)) diff --git a/pyrevolve/examples/generated_sdf.py b/pyrevolve/examples/generated_sdf.py deleted file mode 100644 index f00d3f4993..0000000000 --- a/pyrevolve/examples/generated_sdf.py +++ /dev/null @@ -1,393 +0,0 @@ -""" -Demonstrates creating a simple SDF bot from a spec and a YAML file. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import math - -from pyrevolve.sdfbuilder.sensor import Sensor as SdfSensor -from pyrevolve.sdfbuilder.math import Vector3 -from pyrevolve.sdfbuilder import SDF, Limit -from pyrevolve.sdfbuilder.structure import Box as BoxGeom - -# Module imports -from pyrevolve.generate import BodyGenerator -from pyrevolve.generate import NeuralNetworkGenerator - -from pyrevolve.spec import BodyImplementation -from pyrevolve.spec import default_neural_net -from pyrevolve.spec import PartSpec -from pyrevolve.spec import ParamSpec -from pyrevolve.spec import Robot - -from pyrevolve.build.sdf.body import Box -from pyrevolve.build.sdf.body import Cylinder -from pyrevolve.build.sdf.body import ComponentJoint as Joint - -from pyrevolve.build.sdf import RobotBuilder -from pyrevolve.build.sdf import BodyBuilder -from pyrevolve.build.sdf import NeuralNetBuilder -from pyrevolve.build.sdf import VelocityMotor -from pyrevolve.build.sdf import PID - -from ..custom_logging.logger import logger - -# Some configuration -# This is the number of times per second we will call our -# robot's brain update (in the default controller). We'll -# also use it as -UPDATE_RATE = 5 - - -# A utility function to generate color properties -channel_func = lambda channel: ParamSpec(channel, min_value=0, max_value=1, default=0.5) -color_params = [ - channel_func("red"), channel_func("green"), channel_func("blue") -] - - -class ColorMixin(object): - """ - Mixin class for "color" parts. Needs to be mixed - in with a body part, or it won't work. - """ - - def apply_color(self): - """ - Applies the "red", "green" and "blue" arguments - to all links in this body part. - """ - self.make_color( - self.part_params["red"], - self.part_params["green"], - self.part_params["blue"]) - - -# Below, we define some body parts -# The first is a simple box that serves as a root component -# We have the box include an IMU Sensor that registers the -# component's acceleration -class Core(Box, ColorMixin): - X = 0.5 - Y = 0.8 - Z = 0.5 - MASS = 1.0 - - def _initialize(self, **kwargs): - """ - We override the default box's initialize method to - include the color of the box. - """ - # Don't forget to call super when a parent class actually - # does something (Box, in this case). - super(Core, self)._initialize(**kwargs) - - # Now we will add the IMU sensor. First, we create the - # sensor as we would like to have it in SDF... - imu = SdfSensor("imu_sensor", "imu", update_rate=UPDATE_RATE) - - # .. we then add this to a specific component using `add_sensor`. - # The second argument to this function allows us to override - # the name of the sensor handler that will be loaded. It defaults - # to the sensor type, so specifying "imu" here has the same - # result as leaving it empty. - self.component.add_sensor(imu, "imu") - - # Apply generated color - self.apply_color() - - -# Next, we illustrate a more complex body part, read -# the class details. -class PassiveHinge(Box, ColorMixin): - """ - A passive joint (i.e. it can move but isn't actuated) with - two attached blocks. One of these blocks is of fixed size, - the other has an x-size determined by a parameter. - """ - X = 0.5 - Y = 0.3 - Z = 0.3 - MASS = 0.1 - - # Offset of the joint from the box edges - JOINT_OFFSET = 0.1 - - def _initialize(self, **kwargs): - """ - Initialize method to generate the hinge. Inheriting from - "box" makes sure there is a box of the given size in - `self.link`. - """ - super(PassiveHinge, self)._initialize(**kwargs) - - # Create the variable size block. `create_link` is the recommended - # way of doing this, because it sets some non-default link properties - # (such as self_collide) which you generally need. - length = kwargs["length"] - self.var_block = self.create_component( - BoxGeom(length, self.y, self.z, 0.1), "var-block") - - # We move the block in the x-direction so that it - # just about overlaps with the other block (to - # make it seem like a somewhat realistic joint) - self.var_block.translate( - Vector3(0.5 * (length + self.x) - self.JOINT_OFFSET, 0, 0)) - - # Now create a revolute joint at this same position. The - # joint axis is in the y-direction. - axis = Vector3(0, 1, 0) - passive_joint = Joint( - joint_type="revolute", - parent=self.component, - child=self.var_block, - axis=axis) - - # Set some movement limits on the joint - passive_joint.axis.limit = Limit( - lower=math.radians(-45), - upper=math.radians(45), - effort=1.0) - - # Set the joint position - in the child frame! - passive_joint.set_position( - Vector3(-0.5 * length + self.JOINT_OFFSET, 0, 0)) - - # Don't forget to add the joint and the link - self.add_joint(passive_joint) - - # Apply the generated color - self.apply_color() - - def get_slot(self, slot): - """ - This method should return the SDF link corresponding to a - certain slot. - """ - # Throw a clear error if the slot doesn't exist - self.check_slot(slot) - - # Slot 0 is the fixed box, slot 1 is the variable sized block - return self.component if slot == 0 else self.var_block - - def get_slot_position(self, slot): - """ - Returns the attachment position of each of the slots. - """ - # Again, prevent errors - self.check_slot(slot) - - # The constructor of `BodyPart` stores the initialization's kwargs - # parameters in `self.part_params`. - length = self.part_params["length"] - - # The center of the base box lies at (0, 0), move 1/2 x to the - # left to get that slot, or move 1/2 x to the right, plus the - # variable length minus the offset to get the other. - return Vector3(-0.5 * self.x, 0, 0) if slot == 0 \ - else Vector3(0.5 * self.x + length - self.JOINT_OFFSET, 0, 0) - - def get_slot_normal(self, slot): - """ - Return the slot normal vectors; in this case they are equal - to the position vectors except for their length. - - Actually, it is not strictly required for normal and tangent - vectors to be normalized, but it is good practice to do so. - """ - return self.get_slot_position(slot).normalized() - - def get_slot_tangent(self, slot): - """ - The tangent vectors determine the "zero orientation", meaning - if a body part has an orientation of 0 it will have its tangent - vector aligned with its parent. The tangent vector has to be - orthogonal to the slot normal. - - We have no specific orientation requirements, so we simply - always return one tangent vector which is orthogonal to both - slot normals, i.e. the vector (0, 0, 1). - """ - self.check_slot(slot) - return Vector3(0, 0, 1) - - -# The second body part is a motorized wheel. For this, we start -# with a cylinder, and extend it to include a thin -# connecting block and a motor. -class Wheel(Cylinder, ColorMixin): - RADIUS = 0.6 - MASS = 0.5 - LENGTH = 0.2 - MOTOR_SIZE = 0.1 - - def _initialize(self, **kwargs): - """ - :param kwargs: - :return: - """ - # Call super to initialize the cylinder part of the wheel - super(Wheel, self)._initialize(**kwargs) - - # Create the small box that serves as the motor - box_size = self.MOTOR_SIZE - self.attachment = self.create_component( - BoxGeom(box_size, box_size, box_size, 0.01), "cylinder-attach") - - # Get attachment position and axis of the motor joint - anchor = Vector3(0, 0, 0.5 * self.LENGTH) - axis = Vector3(0, 0, 1) - - # Size and position the box - self.attachment.set_position(anchor + Vector3(0, 0, 0.5 * box_size)) - - # Create revolute joint. Remember: joint position is in child frame - motor_joint = Joint( - joint_type="revolute", - parent=self.component, - child=self.attachment, - axis=axis) - motor_joint.set_position(Vector3(0, 0, -0.5 * box_size)) - - # Set a force limit on the joint - motor_joint.axis.limit = Limit(effort=0.01) - self.add_joint(motor_joint) - - # Register a joint motor with a maximum velocity of - # 50 rounds per minute (the speed is in radians per second) - # We also give it a simple PID controller - pid = PID(proportional_gain=1.0, integral_gain=0.1) - max_speed = 2 * math.pi * 50.0 / 60 - self.motors.append(VelocityMotor( - part_id=self.id, - motor_id="rotate", - joint=motor_joint, - pid=pid, - min_velocity=-max_speed, - max_velocity=max_speed)) - self.apply_color() - - def get_slot(self, slot_id): - """ - Override get_slot, because we should return the attachment. - :param slot_id: - :return: - """ - return self.attachment - - def get_slot_position(self, slot_id): - """ - Modify `get_slot_position` to return the attachment of the - motor instead. - :param slot_id: - :return: - """ - v = super(Wheel, self).get_slot_position(slot_id) - return v + Vector3(0, 0, self.MOTOR_SIZE) - - -body_spec = BodyImplementation( - { - ("Core", "C"): PartSpec( - body_part=Core, - arity=6, - inputs=6, - params=color_params - ), - ("Wheel", "W"): PartSpec( - body_part=Wheel, - arity=1, - outputs=1, - - # Add color parameters to this part - params=color_params - ), - "Hinge": PartSpec( - body_part=PassiveHinge, - arity=2, - params=color_params + [ - ParamSpec( - name="length", - min_value=0.1, - max_value=1, - default=0.5) - ] - ) - } -) - -# For the brain, we use the default neural network -brain_spec = default_neural_net() - -# Specify a body generator for the specification -body_gen = BodyGenerator( - body_spec, - - # List all parts that can serve as the robot root - root_parts=["Core"], - - # List all parts that can be attached - attach_parts=["Wheel", "Hinge"], - - # Set the maximum number of used parts. The - # actual number will be determined by a - # random pick and some input / output constraints. - max_parts=15, - - # The maximum number of input (i.e. sensory) values - # our robot may have. - max_inputs=8, - - # The maximum number ouf output (i.e. motory) values - # our robot may have. - max_outputs=12 -) - -# Also get a brain generator -brain_gen = NeuralNetworkGenerator( - brain_spec, - max_hidden=10 -) - -# Create a builder to convert the protobuf to SDF -builder = RobotBuilder(BodyBuilder(body_spec), NeuralNetBuilder(brain_spec)) - - -def generate_robot(robot_id=0): - # Create a protobuf robot - robot = Robot() - robot.id = robot_id - - # Generate a body - body = body_gen.generate() - robot.body.CopyFrom(body) - - # The neural network generator can get the interface from the body - brain = brain_gen.generate_from_body(body, body_spec) - robot.brain.CopyFrom(brain) - - return robot - - -def robot_to_sdf(robot, name="test_bot", plugin_controller=None): - model = builder.sdf_robot( - robot=robot, - controller_plugin=plugin_controller, - update_rate=UPDATE_RATE, - name=name) - model_sdf = SDF() - model_sdf.add_element(model) - return model_sdf - - -def generate_sdf_robot(robot_id=0, plugin_controller=None, name="test_bot"): - robot = generate_robot(robot_id) - return robot_to_sdf(robot, name, plugin_controller) - - -if __name__ == "__main__": - # Create SDF and output - sdf = generate_sdf_robot() - logger.info(str(sdf)) diff --git a/pyrevolve/revolve_bot/brain/IMC.py b/pyrevolve/revolve_bot/brain/IMC.py new file mode 100644 index 0000000000..c38753257a --- /dev/null +++ b/pyrevolve/revolve_bot/brain/IMC.py @@ -0,0 +1,64 @@ +""" +Note: Parameters are not set in this file. They are imported from the robot yaml-file, containing the +physical properties of the robot, as well as the brain (learner and controller) and the corresponding +parameters. +""" + +import xml.etree.ElementTree +from .base import Brain + + +class BrainIMC(Brain): + + def __init__(self): + # CPG hyper-parameters + self.active = None + self.restore_checkpoint = None + self.save_checkpoint = None + self.learning_rate = None + self.beta1 = None + self.beta2 = None + self.weight_decay = None + + @staticmethod + def from_yaml(yaml_object): + BIMC = BrainIMC() + try: + for key, value in yaml_object.items(): + try: + setattr(BIMC, key, value) + except: + print(f"Couldn't set {key}, {value}") + except: + print("No IMC") + + for key in vars(BIMC): + if getattr(BIMC, key) is None: + print(f"Didn't load IMC param {key}") + raise RuntimeError(f"Didn't load IMC paramater: {key}") + + return BIMC + + def to_yaml(self): + return { + 'IMC': { + 'active': self.active, + 'restore_checkpoint': self.restore_checkpoint, + 'save_checkpoint': self.save_checkpoint, + 'learning_rate': self.learning_rate, + 'beta1': self.beta1, + 'beta2': self.beta2, + 'weight_decay': self.weight_decay + } + } + + def controller_sdf(self): + return xml.etree.ElementTree.Element('rv:IMC', { + 'active': str(self.active), + 'restore_checkpoint': str(self.restore_checkpoint), + 'save_checkpoint': str(self.save_checkpoint), + 'learning_rate': str(self.learning_rate), + 'beta1': str(self.beta1), + 'beta2': str(self.beta2), + 'weight_decay': str(self.weight_decay) + }) diff --git a/pyrevolve/revolve_bot/brain/__init__.py b/pyrevolve/revolve_bot/brain/__init__.py index aad5efdbfe..a60eb56136 100644 --- a/pyrevolve/revolve_bot/brain/__init__.py +++ b/pyrevolve/revolve_bot/brain/__init__.py @@ -4,3 +4,4 @@ from .bo_cpg import BrainCPGBO from .cpg import BrainCPG from .cppn_cpg import BrainCPPNCPG +from .IMC import BrainIMC diff --git a/pyrevolve/revolve_bot/brain/base.py b/pyrevolve/revolve_bot/brain/base.py index e5e715c212..c16548f96a 100644 --- a/pyrevolve/revolve_bot/brain/base.py +++ b/pyrevolve/revolve_bot/brain/base.py @@ -1,31 +1,45 @@ -import pyrevolve.revolve_bot.brain +from pyrevolve.revolve_bot import brain as brains +from pyrevolve.revolve_bot.brain.learner import Learner class Brain(object): + TYPE = 'NONE' + + def __init__(self): + self.learner = None @staticmethod def from_yaml(yaml_brain): brain_type = yaml_brain['type'] - if brain_type == pyrevolve.revolve_bot.brain.BrainNN.TYPE: - return pyrevolve.revolve_bot.brain.BrainNN.from_yaml(yaml_brain) - elif brain_type == pyrevolve.revolve_bot.brain.BrainRLPowerSplines.TYPE: - return pyrevolve.revolve_bot.brain.BrainRLPowerSplines.from_yaml(yaml_brain) - elif brain_type == pyrevolve.revolve_bot.brain.BrainCPGBO.TYPE: - return pyrevolve.revolve_bot.brain.BrainCPGBO.from_yaml(yaml_brain) - elif brain_type == pyrevolve.revolve_bot.brain.BrainCPG.TYPE: - return pyrevolve.revolve_bot.brain.BrainCPG.from_yaml(yaml_brain) - elif brain_type == pyrevolve.revolve_bot.brain.BrainCPPNCPG.TYPE: - return pyrevolve.revolve_bot.brain.BrainCPPNCPG.from_yaml(yaml_brain) + brain = None + if brain_type == brains.BrainNN.TYPE: + brain = brains.BrainNN.from_yaml(yaml_brain) + elif brain_type == brains.BrainRLPowerSplines.TYPE: + brain = brains.BrainRLPowerSplines.from_yaml(yaml_brain) + elif brain_type == brains.BrainCPGBO.TYPE: + brain = brains.BrainCPGBO.from_yaml(yaml_brain) + elif brain_type == brains.BrainCPG.TYPE: + brain = brains.BrainCPG.from_yaml(yaml_brain) + elif brain_type == brains.BrainCPPNCPG.TYPE: + brain = brains.BrainCPPNCPG.from_yaml(yaml_brain) else: print("No matching brain type defined in yaml file.") - return Brain() + brain = Brain() + + brain.learner = Learner.from_yaml(yaml_brain['learner']) + if 'IMC' in yaml_brain: + brain.IMC = brains.BrainIMC.from_yaml(yaml_brain['IMC']) + return brain def to_yaml(self): - return {} + return { + 'type': self.TYPE, + 'learner': self.learner.to_yaml() + } def learner_sdf(self): - return None + return self.learner.learner_sdf() def controller_sdf(self): return None diff --git a/pyrevolve/revolve_bot/brain/cpg.py b/pyrevolve/revolve_bot/brain/cpg.py index e74123b7f4..d594eb85fb 100644 --- a/pyrevolve/revolve_bot/brain/cpg.py +++ b/pyrevolve/revolve_bot/brain/cpg.py @@ -39,16 +39,15 @@ def __init__(self): @staticmethod def from_yaml(yaml_object): BCPG = BrainCPG() - for my_type in ["controller", "learner"]: #, "meta"]: - try: - my_object = yaml_object[my_type] - for key, value in my_object.items(): - try: - setattr(BCPG, key, value) - except: - print("Couldn't set {}, {}", format(key, value)) - except: - print("Didn't load {} parameters".format(my_type)) + try: + my_object = yaml_object["controller"] + for key, value in my_object.items(): + try: + setattr(BCPG, key, value) + except: + print(f"Couldn't set {key}, {value}") + except: + print("Didn't load \"controller\" parameters") return BCPG @@ -76,11 +75,6 @@ def to_yaml(self): } } - def learner_sdf(self): - return xml.etree.ElementTree.Element('rv:learner', { - 'type': 'offline', - }) - def controller_sdf(self): return xml.etree.ElementTree.Element('rv:controller', { 'type': 'cpg', diff --git a/pyrevolve/revolve_bot/brain/cppn_cpg.py b/pyrevolve/revolve_bot/brain/cppn_cpg.py index 86c4fb07fe..fa9981ab50 100644 --- a/pyrevolve/revolve_bot/brain/cppn_cpg.py +++ b/pyrevolve/revolve_bot/brain/cppn_cpg.py @@ -22,7 +22,9 @@ def to_yaml(self): @staticmethod def from_yaml(yaml_object): cppn_genome = multineat.Genome() - cppn_genome.Deserialize(yaml_object['controller']['cppn'].replace('inf', str(sys.float_info.max))) + cppn_genome_str = yaml_object['controller']['cppn'] + cppn_genome_str_fixed = cppn_genome_str.replace('inf', str(sys.float_info.max)) + cppn_genome.Deserialize(cppn_genome_str_fixed) del yaml_object['controller']['cppn'] BCPG = BrainCPPNCPG(cppn_genome) diff --git a/pyrevolve/revolve_bot/brain/learner/__init__.py b/pyrevolve/revolve_bot/brain/learner/__init__.py new file mode 100644 index 0000000000..41df767a59 --- /dev/null +++ b/pyrevolve/revolve_bot/brain/learner/__init__.py @@ -0,0 +1,5 @@ +from .base import Learner +from .bo import BOLearner +from .hyperneat import HyperNEATLearner +from .nipes import NIPESLearner +from .de import DELearner diff --git a/pyrevolve/revolve_bot/brain/learner/base.py b/pyrevolve/revolve_bot/brain/learner/base.py new file mode 100644 index 0000000000..f7cb5d8cb9 --- /dev/null +++ b/pyrevolve/revolve_bot/brain/learner/base.py @@ -0,0 +1,32 @@ +import xml.etree.ElementTree +from pyrevolve.revolve_bot.brain import learner as learners + + +class Learner(object): + TYPE = 'offline' + + @staticmethod + def from_yaml(yaml_learner): + brain_type = yaml_learner['type'] + + if brain_type == learners.bo.BOLearner.TYPE: + return learners.bo.BOLearner.from_yaml(yaml_learner) + if brain_type == learners.hyperneat.HyperNEATLearner.TYPE: + return learners.hyperneat.HyperNEATLearner.from_yaml(yaml_learner) + if brain_type == learners.nipes.NIPESLearner.TYPE: + return learners.nipes.NIPESLearner.from_yaml(yaml_learner) + if brain_type == learners.de.DELearner.TYPE: + return learners.de.DELearner.from_yaml(yaml_learner) + else: + print("No matching brain/learner type defined in yaml file.") + return Learner() + + def to_yaml(self): + return { + 'type': self.TYPE + } + + def learner_sdf(self): + return xml.etree.ElementTree.Element('rv:learner', { + 'type': 'offline', + }) diff --git a/pyrevolve/revolve_bot/brain/learner/bo.py b/pyrevolve/revolve_bot/brain/learner/bo.py new file mode 100644 index 0000000000..a8687bc278 --- /dev/null +++ b/pyrevolve/revolve_bot/brain/learner/bo.py @@ -0,0 +1,51 @@ +import xml.etree.ElementTree + +from .base import Learner + + +class BOLearner(Learner): + TYPE = 'bo' + + def __init__(self): + self.n_init_samples = 1 + self.n_learning_iterations = 100 + self.evaluation_rate = 15.0 + self.init_method = 'LHS' + self.kernel_noise = 0.00000001 + self.kernel_optimize_noise = False + self.kernel_sigma_sq = 0.222 + self.kernel_l = 0.55 + self.kernel_squared_exp_ard_k = 4 + self.acqui_gpucb_delta = 0.5 + self.acqui_ucb_alpha = 0.44 + self.acqui_ei_jitter = 0.0 + self.acquisition_function = "UCB" + + @staticmethod + def from_yaml(yaml_learner): + # TODO read BO params + return BOLearner() + + # def to_yaml(self): + # #TODO save BO params + # return { + # type: self.TYPE + # } + + def learner_sdf(self): + return xml.etree.ElementTree.Element('rv:learner', { + 'type': 'bo', + 'n_init_samples': str(self.n_init_samples), + 'n_learning_iterations': str(self.n_learning_iterations), + 'evaluation_rate': str(self.evaluation_rate), + 'init_method': str(self.init_method), + 'kernel_noise': str(self.kernel_noise), + 'kernel_optimize_noise': str(self.kernel_optimize_noise), + 'kernel_sigma_sq': str(self.kernel_sigma_sq), + 'kernel_l': str(self.kernel_l), + 'kernel_squared_exp_ard_k': str(self.kernel_squared_exp_ard_k), + 'acqui_gpucb_delta': str(self.acqui_gpucb_delta), + 'acqui_ucb_alpha': str(self.acqui_ucb_alpha), + 'acqui_ei_jitter': str(self.acqui_ei_jitter), + 'acquisition_function': str(self.acquisition_function), + }) diff --git a/pyrevolve/revolve_bot/brain/learner/de.py b/pyrevolve/revolve_bot/brain/learner/de.py new file mode 100644 index 0000000000..0618d98269 --- /dev/null +++ b/pyrevolve/revolve_bot/brain/learner/de.py @@ -0,0 +1,48 @@ +import xml.etree.ElementTree + +from .base import Learner + + +class DELearner(Learner): + TYPE = 'de' + + def __init__(self): + self.subtype = None + self.CR = 0.9 + self.F = 0.3 + self.n_parents = 3 + + self.verbose = False + self.population_size = 10 + self.max_eval = 300 + + @staticmethod + def from_yaml(yaml_learner): + LDE = DELearner() + try: + for key, value in yaml_learner.items(): + try: + setattr(LDE, key, value) + except: + print(f"Couldn't set {key}, {value}") + except: + print("No DE") + + for key in vars(LDE): + if getattr(LDE, key) is None: + raise RuntimeError(f"Didn't load {LDE.TYPE} param {key}") + + return LDE + + def learner_sdf(self): + return xml.etree.ElementTree.Element('rv:learner', { + 'type': 'de', + 'subtype': str(self.subtype), + 'CR': str(self.CR), + 'F': str(self.F), + 'n_parents': str(self.n_parents), + + 'verbose': str(self.verbose), + 'population_size': str(self.population_size), + 'max_eval': str(self.max_eval), + }) diff --git a/pyrevolve/revolve_bot/brain/learner/hyperneat.py b/pyrevolve/revolve_bot/brain/learner/hyperneat.py new file mode 100644 index 0000000000..7e0d5e1806 --- /dev/null +++ b/pyrevolve/revolve_bot/brain/learner/hyperneat.py @@ -0,0 +1,88 @@ +import sys +import xml.etree.ElementTree +import multineat +from .base import Learner + +class HyperNEATLearner(Learner): + TYPE = 'hyperneat' + + def __init__(self): + self.params = multineat.Parameters() + # self.to_yaml() + + + + def to_yaml(self): + obj = super().to_yaml() + obj['learner']['hyperneat'] = self.params + return obj + + @staticmethod + def from_yaml(yaml_object): + HyperNL = HyperNEATLearner() + for yaml_params in ["params"]: + try: + my_object = yaml_object[yaml_params] + for key, value in my_object.items(): + try: + setattr(HyperNL.params, key, value) + except: + print("Couldn't set {}, {}", format(key, value)) + except: + print("Didn't load {} parameters".format(yaml_params)) + + return HyperNL + + def learner_sdf(self): + learner = xml.etree.ElementTree.Element('rv:learner', { + 'type': 'hyperneat', + }) + learner.append(self.params_sdf()) + return learner + + def params_sdf(self): + assert(self.params is not None) + + element = xml.etree.ElementTree.Element('rv:params', { + 'PopulationSize': str(self.params.PopulationSize), + 'DynamicCompatibility': str(self.params.DynamicCompatibility), + 'NormalizeGenomeSize': str(self.params.NormalizeGenomeSize), + 'WeightDiffCoeff': str(self.params.WeightDiffCoeff), + 'CompatTreshold': str(self.params.CompatTreshold), + 'YoungAgeTreshold': str(self.params.YoungAgeTreshold), + # 'SpeciesMaxStagnation ': str(self.params.SpeciesMaxStagnation), # Not present in multineat.Population()? + 'OldAgeTreshold': str(self.params.OldAgeTreshold), + 'MinSpecies': str(self.params.MinSpecies), + 'MaxSpecies': str(self.params.MaxSpecies), + 'RouletteWheelSelection': str(self.params.RouletteWheelSelection), + 'RecurrentProb': str(self.params.RecurrentProb), + 'OverallMutationRate': str(self.params.OverallMutationRate), + 'ArchiveEnforcement': str(self.params.ArchiveEnforcement), + 'MutateWeightsProb': str(self.params.MutateWeightsProb), + 'WeightMutationMaxPower': str(self.params.WeightMutationMaxPower), + 'WeightReplacementMaxPower': str(self.params.WeightReplacementMaxPower), + 'MutateWeightsSevereProb': str(self.params.MutateWeightsSevereProb), + 'WeightMutationRate': str(self.params.WeightMutationRate), + 'WeightReplacementRate': str(self.params.WeightReplacementRate), + 'MaxWeight': str(self.params.MaxWeight), + 'MutateAddNeuronProb': str(self.params.MutateAddNeuronProb), + 'MutateAddLinkProb': str(self.params.MutateAddLinkProb), + 'MutateRemLinkProb': str(self.params.MutateRemLinkProb), + 'MinActivationA': str(self.params.MinActivationA), + 'MaxActivationA': str(self.params.MaxActivationA), + 'ActivationFunction_SignedSigmoid_Prob': str(self.params.ActivationFunction_SignedSigmoid_Prob), + 'ActivationFunction_UnsignedSigmoid_Prob': str(self.params.ActivationFunction_UnsignedSigmoid_Prob), + 'ActivationFunction_Tanh_Prob': str(self.params.ActivationFunction_Tanh_Prob), + 'ActivationFunction_SignedStep_Prob': str(self.params.ActivationFunction_SignedStep_Prob), + 'CrossoverRate': str(self.params.CrossoverRate), + 'MultipointCrossoverRate': str(self.params.MultipointCrossoverRate), + 'SurvivalRate': str(self.params.SurvivalRate), + 'MutateNeuronTraitsProb': str(self.params.MutateNeuronTraitsProb), + 'MutateLinkTraitsProb': str(self.params.MutateLinkTraitsProb), + 'AllowLoops': str(self.params.AllowLoops), + 'AllowClones': str(self.params.AllowClones), + }) + return element + + + diff --git a/pyrevolve/revolve_bot/brain/learner/nipes.py b/pyrevolve/revolve_bot/brain/learner/nipes.py new file mode 100644 index 0000000000..20eb27af07 --- /dev/null +++ b/pyrevolve/revolve_bot/brain/learner/nipes.py @@ -0,0 +1,62 @@ +import xml.etree.ElementTree + +from .base import Learner + + +class NIPESLearner(Learner): + TYPE = 'nipes' + + def __init__(self): + self.stagnation_length = 20 + self.elitist_restart = True + self.CMAES_step = 1.0 + self.novelty_k_value = 15 + self.novelty_ratio = 1. + self.novelty_decrement = 0.05 + self.novelty_threshold = 0.9 + self.novelty_archive_probability = 0.4 + self.population_stagnation_threshold = 0.05 + self.restart = True + self.incremental_population = True + + self.verbose = False + self.population_size = 10 + self.max_eval = 300 + + @staticmethod + def from_yaml(yaml_learner): + LNIPES = NIPESLearner() + try: + for key, value in yaml_learner.items(): + try: + setattr(LNIPES, key, value) + except: + print(f"Couldn't set {key}, {value}") + except: + print("No NIPES") + + for key in vars(LNIPES): + if getattr(LNIPES, key) is None: + raise RuntimeError(f"Didn't load {LNIPES.TYPE} param {key}") + + return LNIPES + + def learner_sdf(self): + return xml.etree.ElementTree.Element('rv:learner', { + 'type': 'nipes', + 'stagnation_length': str(self.stagnation_length), + 'elitist_restart': str(self.elitist_restart), + 'CMAES_step': str(self.CMAES_step), + 'novelty_k_value': str(self.novelty_k_value), + 'novelty_ratio': str(self.novelty_ratio), + 'novelty_decrement': str(self.novelty_decrement), + 'novelty_threshold': str(self.novelty_threshold), + 'novelty_archive_probability': str(self.novelty_archive_probability), + 'population_stagnation_threshold': str(self.population_stagnation_threshold), + 'restart': str(self.restart), + 'incremental_population': str(self.incremental_population), + + 'verbose': str(self.verbose), + 'population_size': str(self.population_size), + 'max_eval': str(self.max_eval), + }) diff --git a/pyrevolve/revolve_bot/revolve_module.py b/pyrevolve/revolve_bot/revolve_module.py index dd21849f8c..7e2601adfc 100644 --- a/pyrevolve/revolve_bot/revolve_module.py +++ b/pyrevolve/revolve_bot/revolve_module.py @@ -245,8 +245,8 @@ class CoreModule(RevolveModule): TYPE = "CoreComponent" VISUAL_MESH = 'model://rg_robot/meshes/CoreComponent.dae' SLOT_COORDINATES = 0.089 / 2.0 - COLLISION_BOX = (0.089, 0.089, 0.045) - MASS = grams(90) + COLLISION_BOX = (0.089, 0.089, 0.0603) + MASS = grams(250) def __init__(self): super().__init__() @@ -273,15 +273,16 @@ class ActiveHingeModule(RevolveModule): TYPE = 'ActiveHinge' VISUAL_MESH_FRAME = 'model://rg_robot/meshes/ActiveHinge_Frame.dae' VISUAL_MESH_SERVO = 'model://rg_robot/meshes/ActiveCardanHinge_Servo_Holder.dae' - COLLISION_BOX_FRAME = (2.20e-02, 3.575e-02, 1.0e-02) - COLLISION_BOX_SERVO = (2.45e-02, 2.575e-02, 1.5e-02) - COLLISION_BOX_SERVO_2 = (1.0e-3, 3.4e-2, 3.4e-02) + + COLLISION_BOX_FRAME = (4.525e-02, 5.3e-02, 1.65891e-02) + COLLISION_BOX_SERVO = (5.83e-02, 5.12e-02, 2.0e-02) + COLLISION_BOX_SERVO_2 = (2.0e-3, 5.3e-2, 5.3e-02) COLLISION_BOX_SERVO_OFFSET = ( SDF.math.Vector3(0, 0, 0), - SDF.math.Vector3(-0.0091, 0, 0), + SDF.math.Vector3(-1.0e-02, 0, 0), ) - MASS_FRAME = grams(1.7) - MASS_SERVO = grams(9) + MASS_FRAME = grams(11) + MASS_SERVO = grams(58) def __init__(self): super().__init__() @@ -318,12 +319,12 @@ def to_sdf(self, tree_depth='', parent_link=None, child_link=None): visual_servo.append(geometry) collision_servo = SDF.Collision(name_servo, self.MASS_SERVO) - collision_servo.translate(SDF.math.Vector3(0.002375, 0, 0)) + collision_servo.translate(SDF.math.Vector3(-0.018, 0, 0)) geometry = SDF.BoxGeometry(self.COLLISION_BOX_SERVO) collision_servo.append(geometry) collision_servo_2 = SDF.Collision(name_servo2, 0) - collision_servo_2.translate(SDF.math.Vector3(0.01175, 0.001, 0)) + collision_servo_2.translate(SDF.math.Vector3(0.02815, 0, 0)) geometry = SDF.BoxGeometry(self.COLLISION_BOX_SERVO_2) collision_servo_2.append(geometry) @@ -335,7 +336,7 @@ def to_sdf(self, tree_depth='', parent_link=None, child_link=None): coordinates=self.substrate_coordinates, motorized=True) - joint.set_position(SDF.math.Vector3(-0.0085, 0, 0)) + joint.set_position(SDF.math.Vector3(-0.0299, 0, 0)) return visual_frame, \ [collision_frame], \ @@ -389,9 +390,9 @@ class BrickModule(RevolveModule): """ TYPE = "FixedBrick" VISUAL_MESH = 'model://rg_robot/meshes/FixedBrick.dae' - SLOT_COORDINATES = 3.8e-2 / 2.0 - COLLISION_BOX = (4.1e-2, 4.1e-2, 3.55e-02) - MASS = grams(10.2) + SLOT_COORDINATES = 0.06288625/ 2.0 + COLLISION_BOX = (0.06288625, 0.06288625, 0.0603) + MASS = grams(30) def __init__(self): super().__init__() diff --git a/pyrevolve/revolve_bot/revolve_moduleV1.py b/pyrevolve/revolve_bot/revolve_moduleV1.py new file mode 100644 index 0000000000..e5522aea21 --- /dev/null +++ b/pyrevolve/revolve_bot/revolve_moduleV1.py @@ -0,0 +1,526 @@ +""" +Class containing the body parts to compose a Robogen robot +""" +from collections import OrderedDict +from enum import Enum + +from pyrevolve import SDF + + +# MEASUREMENT CONVERSION +def mm(x): + return x / 1000.0 + + +def cm(x): + return x / 100.0 + + +def grams(x): + return x / 1000.0 + + +# Module Orientation +class Orientation(Enum): + SOUTH = 0 + NORTH = 1 + EAST = 2 + WEST = 3 + + def short_repr(self): + if self == self.SOUTH: + return 'S' + elif self == self.NORTH: + return 'N' + elif self == self.EAST: + return 'E' + elif self == self.WEST: + return 'W' + else: + assert False + + +class RevolveModule: + """ + Base class allowing for constructing Robogen components in an overviewable manner + """ + DEFAULT_COLOR = (0.5, 0.5, 0.5) + TYPE = None + VISUAL_MESH = None + COLLISION_BOX = None + MASS = None + INERTIA = None + + def __init__(self): + self.id = None + self.orientation = None + self.rgb = None # RevolveModule.DEFAULT_COLOR + self.substrate_coordinates = None + self.children = [None, None, None, None] + self.info = None + + def color(self): + return self.rgb if self.rgb is not None else self.DEFAULT_COLOR + + @staticmethod + def FromYaml(yaml_object): + """ + From a yaml object, creates a data struture of interconnected body modules. + Standard names for modules are: + CoreComponent + ActiveHinge + FixedBrick + FixedBrickSensor + """ + mod_type = yaml_object['type'] + if mod_type == 'CoreComponent' or mod_type == 'Core': + module = CoreModule() + elif mod_type == 'ActiveHinge': + module = ActiveHingeModule() + elif mod_type == 'FixedBrick': + module = BrickModule() + elif mod_type == 'FixedBrickSensor': + module = BrickSensorModule() + elif mod_type == 'TouchSensor': + module = TouchSensorModule() + else: + raise NotImplementedError('"{}" module not yet implemented'.format(mod_type)) + + module.id = yaml_object['id'] + + try: + module.orientation = yaml_object['orientation'] + except KeyError: + module.orientation = 0 + + try: + module.rgb = ( + yaml_object['params']['red'], + yaml_object['params']['green'], + yaml_object['params']['blue'], + ) + except KeyError: + pass + + if 'children' in yaml_object: + for parent_slot in yaml_object['children']: + module.children[parent_slot] = RevolveModule.FromYaml( + yaml_object=yaml_object['children'][parent_slot]) + + return module + + def to_yaml(self): + if self.TYPE is None: + raise RuntimeError('Module TYPE is not implemented for "{}",' + ' this should be defined.'.format(self.__class__)) + + yaml_dict_object = OrderedDict() + yaml_dict_object['id'] = self.id + yaml_dict_object['type'] = self.TYPE + yaml_dict_object['orientation'] = self.orientation + + if self.rgb is not None: + yaml_dict_object['params'] = { + 'red': self.rgb[0], + 'green': self.rgb[1], + 'blue': self.rgb[2], + } + + children = self._generate_yaml_children() + if children is not None: + yaml_dict_object['children'] = children + + return yaml_dict_object + + def iter_children(self): + return enumerate(self.children) + + def _generate_yaml_children(self): + has_children = False + + children = {} + for i, child in self.iter_children(): + if child is not None: + children[i] = child.to_yaml() + has_children = True + + return children if has_children else None + + def validate(self): + """ + Tests if the robot tree is valid (recursively) + :return: True if the robot tree is valid + """ + raise RuntimeError("Robot tree validation not yet implemented") + + def to_sdf(self, tree_depth='', parent_link=None, child_link=None): + """ + Transform the module in sdf elements. + + IMPORTANT: It does not append VISUAL and COLLISION elements to the parent link + automatically. It does append automatically the SENSOR element. + TODO: make the append automatic for VISUAL AND COLLISION AS WELL. + + :param tree_depth: current tree depth as string (for naming) + :param parent_link: link of the parent (may be needed for certain modules) + :param child_link: link of the child (may be needed for certain modules, like hinges) + :return: visual SDF element, collision SDF element, sensor SDF element. + Sensor SDF element may be None. + """ + name = 'component_{}_{}__box'.format(tree_depth, self.TYPE) + visual = SDF.Visual(name, self.rgb) + geometry = SDF.MeshGeometry(self.VISUAL_MESH) + visual.append(geometry) + + collision = SDF.Collision(name, self.MASS) + geometry = SDF.BoxGeometry(self.COLLISION_BOX) + collision.append(geometry) + + return visual, collision, None + + def boxslot(self, orientation=None): + orientation = Orientation.SOUTH if orientation is None else orientation + return BoxSlot(self.possible_slots(), orientation) + + def possible_slots(self): + box_geometry = self.COLLISION_BOX + return ( + (box_geometry[0] / -2.0, box_geometry[0] / 2.0), # X + (box_geometry[1] / -2.0, box_geometry[1] / 2.0), # Y + (box_geometry[2] / -2.0, box_geometry[2] / 2.0), # Z + ) + + def has_children(self): + """ + Check wheter module has children + :return: True if module has children + """ + has_children = False + + if self.children == {1: None}: return False + + for i, child in enumerate(self.children): + if child is not None: + has_children = True + + return has_children + + +class CoreModule(RevolveModule): + """ + Inherits class RevolveModule. Creates Robogen core module + """ + TYPE = "CoreComponent" + VISUAL_MESH = 'model://rg_robot/meshes/CoreComponent.dae' + SLOT_COORDINATES = 0.089 / 2.0 + COLLISION_BOX = (0.089, 0.089, 0.045) + MASS = grams(253) + + def __init__(self): + super().__init__() + self.substrate_coordinates = (0, 0) + + def possible_slots(self): + return ( + (-self.SLOT_COORDINATES, self.SLOT_COORDINATES), # X + (-self.SLOT_COORDINATES, self.SLOT_COORDINATES), # Y + (-self.SLOT_COORDINATES, self.SLOT_COORDINATES), # Z + ) + + def to_sdf(self, tree_depth='', parent_link=None, child_link=None): + imu_sensor = SDF.IMUSensor('core-imu_sensor', parent_link, self) + visual, collision, _ = super().to_sdf(tree_depth, parent_link, child_link) + parent_link.append(imu_sensor) + return visual, collision, imu_sensor + + +class ActiveHingeModule(RevolveModule): + """ + Inherits class RevolveModule. Creates Robogen joint module + """ + TYPE = 'ActiveHinge' + VISUAL_MESH_FRAME = 'model://rg_robot/meshes/ActiveHinge_Frame.dae' + VISUAL_MESH_SERVO = 'model://rg_robot/meshes/ActiveCardanHinge_Servo_Holder.dae' + COLLISION_BOX_FRAME = (2.20e-02, 3.575e-02, 1.0e-02) + COLLISION_BOX_SERVO = (2.45e-02, 2.575e-02, 1.5e-02) + COLLISION_BOX_SERVO_2 = (1.0e-3, 3.4e-2, 3.4e-02) + COLLISION_BOX_SERVO_OFFSET = ( + SDF.math.Vector3(0, 0, 0), + SDF.math.Vector3(-0.0091, 0, 0), + ) + MASS_FRAME = grams(8) + MASS_SERVO = grams(13) + + def __init__(self): + super().__init__() + self.children = {1: None} + + def iter_children(self): + return self.children.items() + + def _generate_yaml_children(self): + child = self.children[1] + if child is None: + return None + else: + return {1: child.to_yaml()} + + def to_sdf(self, tree_depth='', parent_link=None, child_link=None): + assert(parent_link is not None) + assert(child_link is not None) + name_frame = 'component_{}_{}__frame'.format(tree_depth, self.TYPE) + name_joint = 'component_{}_{}__joint'.format(tree_depth, self.TYPE) + name_servo = 'component_{}_{}__servo'.format(tree_depth, self.TYPE) + name_servo2 = 'component_{}_{}__servo2'.format(tree_depth, self.TYPE) + + visual_frame = SDF.Visual(name_frame, self.rgb) + geometry = SDF.MeshGeometry(self.VISUAL_MESH_FRAME) + visual_frame.append(geometry) + + collision_frame = SDF.Collision(name_frame, self.MASS_FRAME) + geometry = SDF.BoxGeometry(self.COLLISION_BOX_FRAME) + collision_frame.append(geometry) + + visual_servo = SDF.Visual(name_servo, self.rgb) + geometry = SDF.MeshGeometry(self.VISUAL_MESH_SERVO) + visual_servo.append(geometry) + + collision_servo = SDF.Collision(name_servo, self.MASS_SERVO) + collision_servo.translate(SDF.math.Vector3(0.002375, 0, 0)) + geometry = SDF.BoxGeometry(self.COLLISION_BOX_SERVO) + collision_servo.append(geometry) + + collision_servo_2 = SDF.Collision(name_servo2, 0) + collision_servo_2.translate(SDF.math.Vector3(0.01175, 0.001, 0)) + geometry = SDF.BoxGeometry(self.COLLISION_BOX_SERVO_2) + collision_servo_2.append(geometry) + + joint = SDF.Joint(self.id, + name_joint, + parent_link, + child_link, + axis=SDF.math.Vector3(0, 1, 0), + coordinates=self.substrate_coordinates, + motorized=True) + + joint.set_position(SDF.math.Vector3(-0.0085, 0, 0)) + + return visual_frame, \ + [collision_frame], \ + visual_servo, \ + [collision_servo, collision_servo_2], \ + joint + + def possible_slots_frame(self): + box_geometry = self.COLLISION_BOX_FRAME + return ( + (box_geometry[0] / -2.0, box_geometry[0] / 2.0 - 0.001), # X + (0, 0), # Y + (0, 0), # Z + ) + + def possible_slots_servo(self): + box_geometry = self.COLLISION_BOX_SERVO + return ( + (box_geometry[0] / -2.0, box_geometry[0] / 2.0), # X + (0, 0), # Y + (0, 0), # Z + ) + + def boxslot_frame(self, orientation=None): + orientation = Orientation.SOUTH if orientation is None else orientation + boundaries = self.possible_slots_frame() + return BoxSlotJoints( + boundaries, + orientation, + self.COLLISION_BOX_SERVO_OFFSET + ) + + def boxslot_servo(self, orientation=None): + orientation = Orientation.SOUTH if orientation is None else orientation + boundaries = self.possible_slots_servo() + return BoxSlotJoints(boundaries, orientation) + + def boxslot(self, orientation=None): + orientation = Orientation.SOUTH if orientation is None else orientation + if orientation is Orientation.SOUTH: + return self.boxslot_frame(orientation) + elif orientation is Orientation.NORTH: + return self.boxslot_servo(orientation) + else: + raise RuntimeError("Invalid orientation") + + +class BrickModule(RevolveModule): + """ + Inherits class RevolveModule. Creates Robogen brick module + """ + TYPE = "FixedBrick" + VISUAL_MESH = 'model://rg_robot/meshes/FixedBrick.dae' + SLOT_COORDINATES = 3.8e-2 / 2.0 + COLLISION_BOX = (4.1e-2, 4.1e-2, 3.55e-02) + MASS = grams(12.0) + + def __init__(self): + super().__init__() + + def possible_slots(self): + return ( + (-self.SLOT_COORDINATES, self.SLOT_COORDINATES), # X + (-self.SLOT_COORDINATES, self.SLOT_COORDINATES), # Y + (-self.SLOT_COORDINATES, self.SLOT_COORDINATES), # Z + ) + + +class BrickSensorModule(RevolveModule): + """ + TODO not finished + Inherits class RevolveModule. Creates Robogen brick sensor module + """ + TYPE = "FixedBrickSensor" + VISUAL_MESH = 'model://rg_robot/meshes/FixedBrick.dae' + COLLISION_BOX = (4.1e-2, 4.1e-2, 3.55e-02) + + def __init__(self): + super().__init__() + raise RuntimeError("Not implemented yet") + + +class TouchSensorModule(RevolveModule): + """ + Inherits class RevolveModule. Creates Robogen sensor module + """ + TYPE = "TouchSensor" + VISUAL_MESH = 'model://rg_robot/meshes/TouchSensor.dae' + SLOT_COORDINATES = 1e-2 / 2.0 + COLLISION_BOX = (4.1e-3, 3.1e-2, 1.55e-02) + MASS = grams(3) + + def __init__(self): + super().__init__() + self.children = {} + + def boxslot(self, orientation=None): + orientation = Orientation.SOUTH if orientation is None else orientation + assert (orientation is Orientation.SOUTH) + return BoxSlotTouchSensor(self.possible_slots()) + + def possible_slots(self): + return ( + (-self.SLOT_COORDINATES, 0), # X + (0, 0), # Y + (0, 0), # Z + ) + + def to_sdf(self, tree_depth='', parent_link=None, child_link=None): + assert(parent_link is not None) + name = 'component_{}_{}'.format(tree_depth, self.TYPE) + name_sensor = 'sensor_{}_{}'.format(tree_depth, self.TYPE) + + visual = SDF.Visual(name, self.rgb) + geometry = SDF.MeshGeometry(self.VISUAL_MESH) + visual.append(geometry) + + collision = SDF.Collision(name, self.MASS) + geometry = SDF.BoxGeometry(self.COLLISION_BOX) + # collision.translate(SDF.math.Vector3(0.01175, 0.001, 0)) + collision.append(geometry) + + sensor = SDF.TouchSensor(name_sensor, collision, parent_link, self) + parent_link.append(sensor) + + return visual, collision, sensor + + +class BoxSlot: + """ + Helper class for modules connection slots + """ + def __init__(self, boundaries, orientation: Orientation): + self.orientation = orientation + self.pos = self._calculate_box_slot_pos(boundaries, orientation) + self.normal = self.pos.normalized() + self.tangent = self._calculate_box_slot_tangent(orientation) + + def _calculate_box_slot_pos(self, boundaries, slot: Orientation): + # boundaries = collision_elem.boundaries + if slot == Orientation.SOUTH: + return SDF.math.Vector3(0, boundaries[1][0], 0) + elif slot == Orientation.NORTH: + return SDF.math.Vector3(0, boundaries[1][1], 0) + elif slot == Orientation.EAST: + return SDF.math.Vector3(boundaries[0][1], 0, 0) + elif slot == Orientation.WEST: + return SDF.math.Vector3(boundaries[0][0], 0, 0) + else: + raise RuntimeError('invalid module orientation: {}'.format(slot)) + + @staticmethod + def _calculate_box_slot_tangent(slot: Orientation): + """ + Return slot tangent + """ + if slot == Orientation.SOUTH: + return SDF.math.Vector3(0, 0, 1) + elif slot == Orientation.NORTH: + return SDF.math.Vector3(0, 0, 1) + elif slot == Orientation.EAST: + return SDF.math.Vector3(0, 0, 1) + elif slot == Orientation.WEST: + return SDF.math.Vector3(0, 0, 1) + # elif slot == 4: + # # Right face tangent: back face + # return SDF.math.Vector3(0, 1, 0) + # elif slot == 5: + # # Left face tangent: back face + # return SDF.math.Vector3(0, 1, 0) + else: + raise RuntimeError("Invalid orientation") + + +class BoxSlotJoints(BoxSlot): + + def __init__(self, boundaries, orientation: Orientation, offset=(SDF.math.Vector3(), SDF.math.Vector3())): + self.offset = offset + super().__init__(boundaries, orientation) + + def _calculate_box_slot_pos(self, boundaries, slot: Orientation): + if slot == Orientation.SOUTH: + return SDF.math.Vector3(boundaries[0][0], 0, 0) + self.offset[0] + elif slot == Orientation.NORTH: + return SDF.math.Vector3(boundaries[0][1], 0, 0) + self.offset[1] + else: + raise RuntimeError('invalid module orientation: {}'.format(slot)) + + @staticmethod + def _calculate_box_slot_tangent(slot: Orientation): + """ + Return slot tangent + """ + if slot == Orientation.SOUTH: + return SDF.math.Vector3(0, 0, 1) + elif slot == Orientation.NORTH: + return SDF.math.Vector3(0, 0, 1) + else: + raise RuntimeError("Invalid orientation") + + +class BoxSlotTouchSensor(BoxSlot): + def __init__(self, boundaries): + super().__init__(boundaries, Orientation.SOUTH) + + def _calculate_box_slot_pos(self, boundaries, slot: Orientation): + if slot == Orientation.SOUTH: + return SDF.math.Vector3(boundaries[0][0], 0, 0) + else: + raise RuntimeError('invalid module orientation: {}'.format(slot)) + + @staticmethod + def _calculate_box_slot_tangent(slot: Orientation): + """ + Return slot tangent + """ + if slot == Orientation.SOUTH: + return SDF.math.Vector3(0, 1, 0) + else: + raise RuntimeError("Invalid orientation") diff --git a/pyrevolve/spec/msgs/__init__.py b/pyrevolve/spec/msgs/__init__.py index 643ad4453a..a15c2f35b3 100644 --- a/pyrevolve/spec/msgs/__init__.py +++ b/pyrevolve/spec/msgs/__init__.py @@ -5,3 +5,4 @@ from .sdf_body_analyze_pb2 import * from .model_inserted_pb2 import * from .robot_states_pb2 import * +from .robot_states_learning_pb2 import * diff --git a/pyrevolve/spec/msgs/robot_states_learning_pb2.py b/pyrevolve/spec/msgs/robot_states_learning_pb2.py new file mode 100644 index 0000000000..fc699f9d67 --- /dev/null +++ b/pyrevolve/spec/msgs/robot_states_learning_pb2.py @@ -0,0 +1,149 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: robot_states_learning.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from pygazebo.msg import time_pb2 as time__pb2 +from pygazebo.msg import pose_pb2 as pose__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='robot_states_learning.proto', + package='revolve.msgs', + syntax='proto2', + serialized_options=None, + serialized_pb=_b('\n\x1brobot_states_learning.proto\x12\x0crevolve.msgs\x1a\ntime.proto\x1a\npose.proto\"Q\n\rBehaviourData\x12\x1f\n\x04time\x18\x01 \x02(\x0b\x32\x11.gazebo.msgs.Time\x12\x1f\n\x04pose\x18\x02 \x01(\x0b\x32\x11.gazebo.msgs.Pose\"~\n\x13LearningRobotStates\x12\n\n\x02id\x18\x01 \x02(\t\x12\x0c\n\x04\x65val\x18\x02 \x02(\r\x12\x0c\n\x04\x64\x65\x61\x64\x18\x03 \x01(\x08\x12\x0f\n\x07\x66itness\x18\x04 \x02(\x01\x12.\n\tbehaviour\x18\x05 \x03(\x0b\x32\x1b.revolve.msgs.BehaviourData') + , + dependencies=[time__pb2.DESCRIPTOR,pose__pb2.DESCRIPTOR,]) + + + + +_BEHAVIOURDATA = _descriptor.Descriptor( + name='BehaviourData', + full_name='revolve.msgs.BehaviourData', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='time', full_name='revolve.msgs.BehaviourData.time', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pose', full_name='revolve.msgs.BehaviourData.pose', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=69, + serialized_end=150, +) + + +_LEARNINGROBOTSTATES = _descriptor.Descriptor( + name='LearningRobotStates', + full_name='revolve.msgs.LearningRobotStates', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='revolve.msgs.LearningRobotStates.id', index=0, + number=1, type=9, cpp_type=9, label=2, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='eval', full_name='revolve.msgs.LearningRobotStates.eval', index=1, + number=2, type=13, cpp_type=3, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dead', full_name='revolve.msgs.LearningRobotStates.dead', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='fitness', full_name='revolve.msgs.LearningRobotStates.fitness', index=3, + number=4, type=1, cpp_type=5, label=2, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='behaviour', full_name='revolve.msgs.LearningRobotStates.behaviour', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=152, + serialized_end=278, +) + +_BEHAVIOURDATA.fields_by_name['time'].message_type = time__pb2._TIME +_BEHAVIOURDATA.fields_by_name['pose'].message_type = pose__pb2._POSE +_LEARNINGROBOTSTATES.fields_by_name['behaviour'].message_type = _BEHAVIOURDATA +DESCRIPTOR.message_types_by_name['BehaviourData'] = _BEHAVIOURDATA +DESCRIPTOR.message_types_by_name['LearningRobotStates'] = _LEARNINGROBOTSTATES +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +BehaviourData = _reflection.GeneratedProtocolMessageType('BehaviourData', (_message.Message,), dict( + DESCRIPTOR = _BEHAVIOURDATA, + __module__ = 'robot_states_learning_pb2' + # @@protoc_insertion_point(class_scope:revolve.msgs.BehaviourData) + )) +_sym_db.RegisterMessage(BehaviourData) + +LearningRobotStates = _reflection.GeneratedProtocolMessageType('LearningRobotStates', (_message.Message,), dict( + DESCRIPTOR = _LEARNINGROBOTSTATES, + __module__ = 'robot_states_learning_pb2' + # @@protoc_insertion_point(class_scope:revolve.msgs.LearningRobotStates) + )) +_sym_db.RegisterMessage(LearningRobotStates) + + +# @@protoc_insertion_point(module_scope) diff --git a/pyrevolve/tol/manage/learningrobotmanager.py b/pyrevolve/tol/manage/learningrobotmanager.py new file mode 100644 index 0000000000..fca112bcc7 --- /dev/null +++ b/pyrevolve/tol/manage/learningrobotmanager.py @@ -0,0 +1,80 @@ +from pyrevolve.util import Time +from pyrevolve.SDF.math import Vector3 +from pyrevolve.spec.msgs.robot_states_learning_pb2 import LearningRobotStates, BehaviourData + + +class Evaluation: + def __init__(self, eval_n: int, fitness: float, behaviour: list): + """ + :param eval_n: + :param fitness: + :param behaviour: + :type behaviour: list(BehaviourData) + """ + self.eval_n = eval_n + self.fitness = fitness + self.behaviour_data = behaviour + + def times(self): + def extract_time(step: BehaviourData): + return step.time + return map(extract_time, self.behaviour_data) + + def poses(self): + def extract_poses(step: BehaviourData): + return step.pose + return map(extract_poses, self.behaviour_data) + + +class LearningRobotManager(object): + """ + Class to manage a single robot + """ + + def __init__( + self, + program_arguments, + robot, + start_position: Vector3, + inserted_time, + ): + """ + :param program_arguments: + :param robot: RevolveBot + :param inserted_time: + :type inserted_time: Time + :return: + """ + + self.robot = robot + self.inserted_time = inserted_time + self.start_position = start_position + self.program_arguments = program_arguments + self.size = robot.size() + + self.dead = False + self.last_fitness = None + self.evaluations = [] + + self.best_evaluation = None + + @property + def name(self): + return str(self.robot.id) + + def learning_step_done(self, report: LearningRobotStates): + """ + :param report: report message sent from the simulator (routed from the WorldManager) + :type report: LearningRobotStates + """ + self.dead = self.dead or report.dead + self.last_fitness = report.fitness + evaluation = Evaluation( + eval_n=report.eval, + fitness=report.fitness, + behaviour=report.behaviour, + ) + self.evaluations.append(evaluation) + + if self.best_evaluation is None or report.fitness > self.best_evaluation.fitness: + self.best_evaluation = evaluation diff --git a/pyrevolve/tol/manage/single_robot_world.py b/pyrevolve/tol/manage/single_robot_world.py new file mode 100644 index 0000000000..426945f36a --- /dev/null +++ b/pyrevolve/tol/manage/single_robot_world.py @@ -0,0 +1,208 @@ +import numbers +import os + +import time + +from pyrevolve.SDF.math import Vector3 +from pyrevolve.gazebo.manage import WorldManager +from .learningrobotmanager import LearningRobotManager +from pyrevolve.spec.msgs import LearningRobotStates +from pyrevolve.spec.msgs import ModelInserted +from pyrevolve.util import Time +from pyrevolve.custom_logging.logger import logger + +# Construct a message base from the time. This should make it unique enough +# for consecutive use when the script is restarted. +_a = time.time() +MSG_BASE = int(_a - 14e8 + (_a - int(_a)) * 1e5) + + +class SingleRobotWorld(WorldManager): + """ + A class that is used to manage the world, meaning it provides methods to + insert / remove robots and request information about where they are. + + The world class contains a number of coroutines, usually from a + request/response perspective. These methods thus work with two futures - + one for the request to complete, one for the response to arrive. The + convention for these methods is to always yield the first future, because it + has proven problematic to send multiple messages over the same channel, + so a request is always sent until completion. The methods then return the + future that resolves when the response is delivered. + """ + + def __init__(self, program_arguments, _private, world_address): + """ + :param program_arguments: + """ + world_address = ("127.0.0.1", 11345) if world_address is None else world_address + + super().__init__( + _private=_private, + world_address=world_address, + ) + + self.robot_managers = {} + self.program_arguments = program_arguments + + @classmethod + async def create(cls, program_arguments, world_address=None): + """ + Coroutine to instantiate a Revolve.Angle WorldManager + :param program_arguments: + :param world_address: + :return: + """ + self = cls(_private=cls._PRIVATE, program_arguments=program_arguments, world_address=world_address) + await self._init() + return self + + async def _init(self): + if self.manager is not None: + return + + await (super()._init()) + + # Subscribe to learning states update + self.learning_states_subscriber = await self.manager.subscribe( + '/gazebo/default/revolve/robot_reports', + 'revolve.msgs.LearningRobotStates', + self._process_learning_state + ) + + # There will be no connection until the first learner starts, so waiting here will result in a deadlock + # await self.learning_states_subscriber.wait_for_connection() + + async def insert_robot( + self, + revolve_bot, + pose=None, + life_timeout=None, + ): + """ + Inserts a robot into the world. This consists of two steps: + + - Sending the insert request message + - Receiving a ModelInfo response + + This method is a coroutine because of the first step, writing + the message must be yielded since PyGazebo doesn't appear to + support writing multiple messages simultaneously. For the response, + i.e. the message that confirms the robot has been inserted, a + future is returned. + + :param revolve_bot: + :type revolve_bot: RevolveBot + :param pose: Insertion pose of a robot + :type pose: Pose|Vector3 + :param life_timeout: Life span of the robot + :type life_timeout: float|None + :return: A future that resolves with the created `Robot` object. + """ + if pose is None: + pose = Vector3(0, 0, self.program_arguments.z_start) + elif isinstance(pose, numbers.Number): + pose = Vector3(0, 0, pose) + elif isinstance(pose, Vector3): + pass + elif hasattr(pose, '__iter__'): + pose = Vector3(pose) + else: + raise RuntimeError(f"pose can only be Vector3 or number, instead {type(pose)} was found") + + # if the ID is digit, when removing the robot, the simulation will try to remove random stuff from the + # environment and give weird crash errors + assert(not str(revolve_bot.id).isdigit()) + + sdf_bot = revolve_bot.to_sdf(pose) + + # To debug and save all SDF files, you can uncomment the following code + # self.output_directory = '/tmp' + # if self.output_directory: + # robot_file_path = os.path.join( + # self.output_directory, + # 'robot_{}.sdf'.format(revolve_bot.id) + # ) + # with open(robot_file_path, 'w') as f: + # f.write(sdf_bot) + + response = await self.insert_model(sdf_bot, life_timeout) + robot_manager = self._robot_inserted( + robot=revolve_bot, + msg=response + ) + return robot_manager + + def _robot_inserted( + self, + robot, + msg + ): + """ + Registers a newly inserted robot and marks the insertion + message response as handled. + + :param robot: RevolveBot + :param msg: + :type msg: pygazebo.msgs.response_pb2.Response + :return: + """ + inserted = ModelInserted() + inserted.ParseFromString(msg.serialized_data) + model = inserted.model + inserted_time = Time(msg=inserted.time) + p = model.pose.position + position = Vector3(p.x, p.y, p.z) + + robot_manager = self.create_robot_manager( + robot, + position, + inserted_time + ) + self.register_robot(robot_manager) + return robot_manager + + def register_robot(self, robot_manager): + """ + Registers a robot with its Gazebo ID in the local array. + :param robot_manager: + :type robot_manager: RobotManager + """ + logger.info("Registering robot {}.".format(robot_manager.name)) + + if robot_manager.name in self.robot_managers: + raise ValueError("Duplicate robot: {}".format(robot_manager.name)) + + self.robot_managers[robot_manager.name] = robot_manager + + def _process_learning_state(self, msg): + """ + Handles the pose info message by updating robot positions. + :param msg: + :return: + """ + report = LearningRobotStates() + report.ParseFromString(msg) + + robot_name = report.id + robot_manager = self.robot_managers[robot_name] + robot_manager.learning_step_done(report) + + def create_robot_manager( + self, + robot, + start_position, + inserted_time, + ): + """ + Overriding with robot manager with more capabilities. + :param robot: + :param inserted_time: + :return: + """ + return LearningRobotManager( + program_arguments=self.program_arguments, + robot=robot, + start_position=start_position, + inserted_time=inserted_time, + ) diff --git a/pyrevolve/tol/manage/world.py b/pyrevolve/tol/manage/world.py index 651ab01553..ab737c80db 100644 --- a/pyrevolve/tol/manage/world.py +++ b/pyrevolve/tol/manage/world.py @@ -37,7 +37,7 @@ class World(WorldManager): future that resolves when the response is delivered. """ - def __init__(self, conf, _private, world_address): + def __init__(self, conf, _private, world_address, listen_to_contacts): """ :param conf: """ @@ -52,7 +52,8 @@ def __init__(self, conf, _private, world_address): builder=None, state_update_frequency=conf.pose_update_frequency, generator=None, - restore=conf.restore_directory + restore=conf.restore_directory, + listen_to_contacts=listen_to_contacts, ) self.conf = conf @@ -83,14 +84,15 @@ def __init__(self, conf, _private, world_address): ) @classmethod - async def create(cls, conf, world_address=None): + async def create(cls, conf, world_address=None, listen_to_contacts=False): """ Coroutine to instantiate a Revolve.Angle WorldManager :param conf: :param world_address: + :param listen_to_contacts: :return: """ - self = cls(_private=cls._PRIVATE, conf=conf, world_address=world_address) + self = cls(_private=cls._PRIVATE, conf=conf, world_address=world_address, listen_to_contacts=listen_to_contacts) await self._init() return self diff --git a/pyrevolve/util/supervisor/supervisor_multi.py b/pyrevolve/util/supervisor/supervisor_multi.py index 7b27ade29a..658e3e5432 100644 --- a/pyrevolve/util/supervisor/supervisor_multi.py +++ b/pyrevolve/util/supervisor/supervisor_multi.py @@ -304,18 +304,23 @@ class SimulatorEnded(Exception): async def read_stdout(): while not ready_str_found.done(): - if process.returncode is None: + if process.returncode is not None: ready_str_found.set_exception(SimulatorEnded()) - out = await stdout.readline() - self._logger.info(f'[starting] {out}') - if ready_str in out: - ready_str_found.set_result(None) + return + out_stream = asyncio.create_task(stdout.readline()) + done, pending = await asyncio.wait({out_stream, ready_str_found}, return_when='FIRST_COMPLETED') + if out_stream in done: + out = out_stream.result() + self._logger.info(f'[starting] {out}') + if ready_str in out: + ready_str_found.set_result(None) async def read_stderr(): while not ready_str_found.done() and process.returncode is None: - err = await stderr.readline() - if err: - self._logger.error(f'[starting] {err}') + err_stream = asyncio.create_task(stderr.readline()) + done, pending = await asyncio.wait({err_stream, ready_str_found}, return_when='FIRST_COMPLETED') + if err_stream in done: + self._logger.error(f'[starting] {err_stream.result()}') stdout_async = asyncio.ensure_future(read_stdout()) stderr_async = asyncio.ensure_future(read_stderr()) diff --git a/src/pygazebo/pygazebo b/src/pygazebo/pygazebo deleted file mode 160000 index e6f03ce47d..0000000000 --- a/src/pygazebo/pygazebo +++ /dev/null @@ -1 +0,0 @@ -Subproject commit e6f03ce47d8e60a2fbf73a142c3182e33a54c7f8 diff --git a/thirdparty/MultiNEAT b/thirdparty/MultiNEAT index a3c32cff53..422bda125a 160000 --- a/thirdparty/MultiNEAT +++ b/thirdparty/MultiNEAT @@ -1 +1 @@ -Subproject commit a3c32cff53abadda74ab28722312a4361309fb3f +Subproject commit 422bda125a427a8c492cf3a40a054a935fa5bff8 diff --git a/thirdparty/PIGPIO/pigpiod_if.c b/thirdparty/PIGPIO/pigpiod_if.c index 7802e57962..fc73aa7255 100644 --- a/thirdparty/PIGPIO/pigpiod_if.c +++ b/thirdparty/PIGPIO/pigpiod_if.c @@ -161,7 +161,7 @@ static int pigpio_command_ext return cmd.res; } -static int pigpioOpenSocket(char *addr, char *port) +static int pigpioOpenSocket(const char *addr, const char *port) { int sock, err, opt; struct addrinfo hints, *res, *rp; diff --git a/thirdparty/PIGPIO/pigpiod_if2.c b/thirdparty/PIGPIO/pigpiod_if2.c index 93efdb08cf..2a16650bea 100644 --- a/thirdparty/PIGPIO/pigpiod_if2.c +++ b/thirdparty/PIGPIO/pigpiod_if2.c @@ -234,7 +234,7 @@ static int pigpio_command_ext return cmd.res; } -static int pigpioOpenSocket(char *addr, char *port) +static int pigpioOpenSocket(const char *addr, const char *port) { int sock, err, opt; struct addrinfo hints, *res, *rp; diff --git a/thirdparty/libtorch/build-hash b/thirdparty/libtorch/build-hash new file mode 100644 index 0000000000..0c079cfcfe --- /dev/null +++ b/thirdparty/libtorch/build-hash @@ -0,0 +1,2 @@ +/pytorch /pytorch ~/project +7f73f1d591afba823daa4a99a939217fb54d7688 diff --git a/thirdparty/libtorch/build-version b/thirdparty/libtorch/build-version new file mode 100644 index 0000000000..a9ca3122a9 --- /dev/null +++ b/thirdparty/libtorch/build-version @@ -0,0 +1 @@ +1.4.0+cpu diff --git a/thirdparty/libtorch/include/ATen/ATen.h b/thirdparty/libtorch/include/ATen/ATen.h new file mode 100644 index 0000000000..669e8a661e --- /dev/null +++ b/thirdparty/libtorch/include/ATen/ATen.h @@ -0,0 +1,27 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include diff --git a/thirdparty/libtorch/include/ATen/AccumulateType.h b/thirdparty/libtorch/include/ATen/AccumulateType.h new file mode 100644 index 0000000000..9f91bcdcdc --- /dev/null +++ b/thirdparty/libtorch/include/ATen/AccumulateType.h @@ -0,0 +1,48 @@ +#pragma once +#include +#include +#include + +// Defines the accumulation type for a scalar type. +// Example: +// using accscalar_t = acc_type; + +#if defined(__CUDACC__) +#include +#include +#elif defined(__HIPCC__) +#include +#include +#endif + +namespace at { + +template +struct AccumulateType { }; + +#if defined(__CUDACC__) || defined(__HIPCC__) +template <> struct AccumulateType { using type = float; }; +#endif +template <> struct AccumulateType { using type = float; }; +template <> struct AccumulateType { using type = float; }; +template <> struct AccumulateType { using type = double; }; +template <> struct AccumulateType { using type = int64_t; }; +template <> struct AccumulateType { using type = int64_t; }; +template <> struct AccumulateType { using type = int64_t; }; +template <> struct AccumulateType { using type = int64_t; }; +template <> struct AccumulateType { using type = int64_t; }; +template <> struct AccumulateType { using type = int64_t; }; +template <> struct AccumulateType { using type = float; }; +template <> struct AccumulateType { using type = double; }; +template <> struct AccumulateType { using type = double; }; +template <> struct AccumulateType { using type = int64_t; }; +template <> struct AccumulateType { using type = int64_t; }; +template <> struct AccumulateType { using type = int64_t; }; +template <> struct AccumulateType { using type = int64_t; }; +template <> struct AccumulateType { using type = int64_t; }; +template <> struct AccumulateType { using type = int64_t; }; + +template +using acc_type = typename AccumulateType::type; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/ArrayRef.h b/thirdparty/libtorch/include/ATen/ArrayRef.h new file mode 100644 index 0000000000..0461d5953e --- /dev/null +++ b/thirdparty/libtorch/include/ATen/ArrayRef.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/Backend.h b/thirdparty/libtorch/include/ATen/Backend.h new file mode 100644 index 0000000000..9651469e19 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Backend.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/Backtrace.h b/thirdparty/libtorch/include/ATen/Backtrace.h new file mode 100644 index 0000000000..bdef9f4a9d --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Backtrace.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/CPUApplyUtils.h b/thirdparty/libtorch/include/ATen/CPUApplyUtils.h new file mode 100644 index 0000000000..7f5dabea8d --- /dev/null +++ b/thirdparty/libtorch/include/ATen/CPUApplyUtils.h @@ -0,0 +1,439 @@ +#pragma once + +#include +#include +#include +#include +#include + +namespace at { + +/* +[collapse dims] Updates sizes, and strides to reflect a "collapse" of +the info, possibly excluding the optional excludeDim. A "collapsed" version +of the info is the fewest dims that order the tensor's elements in the same +way as the original info. If excludeDim is specified, the collapse is the +fewest dims that order the tensor's elements as the original and preserve the +excluded dimension, unless the tensor collapses to a point. + +This function returns a pair of values. + +1) The (new) index of the preserved dimension if excludeDim is +specified. 0 if the tensor is collapsed to a point. -1 +otherwise. + +2) The new number of dimensions. +*/ +template +inline std::pair collapse_dims( + T* sizes, + T* strides, + int64_t dims, + const int excludeDim = -1) { + TORCH_CHECK( + excludeDim >= -1 && excludeDim < dims, + "expected excluded dim between -1 and dims - 1"); + + int64_t stopDim = (excludeDim == -1) ? dims : excludeDim; + int64_t newIndex = -1; + int64_t oldIndex = 0; + int64_t remappedExcludedDim = -1; + + while (oldIndex < dims) { + // Finds a dimension to collapse into + for (; oldIndex < stopDim; ++oldIndex) { + if (sizes[oldIndex] == 1) { + continue; + } + + ++newIndex; + sizes[newIndex] = sizes[oldIndex]; + strides[newIndex] = strides[oldIndex]; + ++oldIndex; + break; + } + + // Collapses dims + for (; oldIndex < stopDim; ++oldIndex) { + if (sizes[oldIndex] == 1) { + continue; + } + + if (strides[newIndex] == sizes[oldIndex] * strides[oldIndex]) { + sizes[newIndex] *= sizes[oldIndex]; + strides[newIndex] = strides[oldIndex]; + } else { + ++newIndex; + sizes[newIndex] = sizes[oldIndex]; + strides[newIndex] = strides[oldIndex]; + } + } + + // Handles excludeDim being set (oldIndex == excludeDim) + if (oldIndex != dims) { + // Preserves excluded dimension + ++newIndex; + sizes[newIndex] = sizes[oldIndex]; + strides[newIndex] = strides[oldIndex]; + remappedExcludedDim = newIndex; + + // Restarts iteration after excludeDim + ++oldIndex; + stopDim = dims; + } + } + + // Handles special case of all dims size 1 + if (newIndex == -1 || (newIndex == 0 && sizes[0] == 1)) { + dims = 1; + sizes[0] = 1; + strides[0] = 1; + + return std::pair(0, 1); + } + + dims = newIndex + 1; + return std::pair(remappedExcludedDim, dims); +} + +/* + * The basic strategy for apply is as follows: + * + * 1. Starting with the outermost index, loop until we reach a dimension where + * the data is no longer contiguous, i.e. the stride at that dimension is not + * equal to the size of the tensor defined by the outer dimensions. Let's call + * this outer (contiguous) tensor A. Note that if the Tensor is contiguous, then + * A is equal to the entire Tensor. Let's call the inner tensor B. + * + * 2. We loop through the indices in B, starting at its outermost dimension. For + * example, if B is a 2x2 matrix, then we do: + * + * B[0][0] + * B[0][1] + * B[1][0] + * B[1][1] + * + * We set the offset into the underlying storage as (storageOffset + stride_B * + * index_B), i.e. basically we compute the offset into the storage as we would + * normally for a Tensor. But because we are guaranteed the subsequent data is + * contiguous in memory, we can simply loop for sizeof(A) iterations and perform + * the operation, without having to follow the order described by the strides of + * A. + * + * 3. As an optimization, we merge dimensions of A that are contiguous in + * memory. For example, if A is a 3x3x3x3 tensor narrowed from a 3x3x4x3 tensor, + * then the first two dimensions can be merged for the purposes of APPLY, + * reducing the number of nested loops. + */ + +inline Tensor sort_strides(Tensor& tensor_) { + IntArrayRef strides = tensor_.strides(); + std::vector indices; + indices.reserve(tensor_.ndimension()); + for (int64_t i = 0; i < tensor_.ndimension(); i++) { + indices.push_back(i); + } + std::sort(indices.begin(), indices.end(), [&strides](int64_t i1, int64_t i2) { + return strides[i1] > strides[i2]; + }); + Tensor tensor = tensor_.permute(indices); + return tensor; +} + +template +struct strided_tensor_iter_fixed { + public: + T* data_ = NULL; + int64_t dim_ = 0; + + int64_t counter_[N] = {0}; + int64_t sizes_[N] = {0}; + int64_t strides_[N] = {0}; + + strided_tensor_iter_fixed(strided_tensor_iter_fixed const&) = delete; + void operator=(strided_tensor_iter_fixed const& x) = delete; + strided_tensor_iter_fixed(strided_tensor_iter_fixed&&) = default; + strided_tensor_iter_fixed(Tensor& tensor, bool sort_strides = false) + : data_(tensor.data_ptr()) { + std::memset(counter_, 0, sizeof(int64_t) * N); + if (tensor.dim() > 0) { + std::memcpy( + sizes_, tensor.sizes().data(), tensor.dim() * sizeof(int64_t)); + std::memcpy( + strides_, + tensor.strides().data(), + tensor.dim() * sizeof(int64_t)); + } + dim_ = std::get<1>(collapse_dims(sizes_, strides_, tensor.ndimension())); + } +}; + +template +struct strided_tensor_iter { + private: + public: + T* data_ = NULL; + int64_t dim_; + + std::vector counter_; + std::vector sizes_; + std::vector strides_; + + strided_tensor_iter(strided_tensor_iter const&) = delete; + void operator=(strided_tensor_iter const& x) = delete; + strided_tensor_iter(strided_tensor_iter&&) = default; + strided_tensor_iter(Tensor& tensor) + : data_(tensor.data_ptr()), + dim_(tensor.ndimension()), + counter_(dim_, 0), + sizes_(tensor.sizes().vec()), + strides_(tensor.strides().vec()) { + dim_ = std::get<1>(collapse_dims(sizes_.data(), strides_.data(), dim_)); + } +}; + +inline bool _all_equal_numel(at::ArrayRef tensors) { + if (tensors.size() == 0) + return true; + int64_t all_numel = tensors[0].numel(); + for (size_t i = 1; i < tensors.size(); i++) { + if (tensors[i].numel() != all_numel) + return false; + } + return true; +} + +inline std::string _all_equal_numel_error(at::ArrayRef tensors) { + std::ostringstream oss; + oss << "inconsistent tensor size, expected "; + for (size_t i = 0; i < tensors.size() - 1; i++) { + oss << tensors[i].sizes() << ", "; + } + oss << "and " << tensors[tensors.size() - 1].sizes() + << " to have the same number of elements, but got "; + for (size_t i = 0; i < tensors.size() - 1; i++) { + oss << tensors[i].numel() << ", "; + } + oss << "and " << tensors[tensors.size() - 1].numel() + << " elements respectively"; + return oss.str(); +} + +inline bool _apply_preamble(ArrayRef tensors) { + checkDeviceType("CPU_tensor_apply", tensors, kCPU); + checkLayout("CPU_tensor_apply", tensors, kStrided); + if (!_all_equal_numel(tensors)) + AT_ERROR(_all_equal_numel_error(tensors)); + // An empty tensor has no elements + for (auto& t : tensors) + if (t.numel() == 0) + return false; + return true; +} + +inline int64_t _max_dim_tensors(ArrayRef tensors) { + int64_t dim = 0; + for (auto& t : tensors) + dim = std::max(dim, t.ndimension()); + return dim; +} + +inline void iterate(int64_t size){}; + +template +inline void iterate(int64_t size, Arg& iter, Args&... iter_tail) { + iter.counter_[iter.dim_ - 1] += size; + iter.data_ = iter.data_ + size * iter.strides_[iter.dim_ - 1]; + iterate(size, iter_tail...); +} + +inline bool iterate_continue() { + return true; +}; + +template +inline bool iterate_continue(Arg& iter, Args&... iter_tail) { + return iter.counter_[iter.dim_ - 1] < iter.sizes_[iter.dim_ - 1] && + iterate_continue(iter_tail...); +} + +inline int64_t max_iterate_size() { + return std::numeric_limits::max(); +}; + +template +inline int64_t max_iterate_size(Arg& iter, Args&... iter_tail) { + return std::min( + (iter.sizes_[iter.dim_ - 1] - iter.counter_[iter.dim_ - 1]), + max_iterate_size(iter_tail...)); +} + +inline void iterate_overflow(){}; + +template +inline void iterate_overflow(Arg& iter, Args&... iter_tail) { + if (iter.counter_[iter.dim_ - 1] == iter.sizes_[iter.dim_ - 1]) { + for (int64_t i = iter.dim_ - 1; i > 0; i--) { + if (iter.counter_[i] == iter.sizes_[i]) { + iter.counter_[i] = 0; + iter.counter_[i - 1]++; + iter.data_ = iter.data_ - (iter.sizes_[i] * iter.strides_[i]) + + iter.strides_[i - 1]; + } + } + } + iterate_overflow(iter_tail...); +} + +inline void forward(int64_t offset){}; + +template +inline void forward(int64_t offset, Arg& iter, Args&... iter_tail) { + int64_t multi = offset; + for (int64_t i = iter.dim_ - 1; i >= 0; i--) { + int64_t inc = multi % iter.sizes_[i]; + multi = multi / iter.sizes_[i]; + iter.data_ = iter.data_ + inc * iter.strides_[i]; + iter.counter_[i] += inc; + } + forward(offset, iter_tail...); +} + +inline int64_t max_dim() { + return 0; +} + +template +inline int64_t max_dim(Arg& iter, Args&... iter_tail) { + return std::max(iter.dim_, max_dim(iter_tail...)); +} + +inline void apply_op(){}; + +template +inline void +apply_op(int64_t numel, int64_t offset, const Op& op, Args... iters) { + // For 0-dim tensors + if (numel == 1 && max_dim(iters...) == 0) { + op(*iters.data_...); + return; + } + if (offset > 0) + forward(offset, iters...); + // Splitting this into chunks helps the compiler create faster assembly + for (int64_t i = 0; i < numel;) { + for (; iterate_continue(iters...) && i < numel;) { + op(*iters.data_...); + iterate(1, iters...); + i++; + } + iterate_overflow(iters...); + } +} + +/* + Apply a pointwise operator to sequence of tensors + + The calling convention for op is a function/functor that takes the same + number of pointers of type scalar as the number of given tensors. For example, + to compute a = b * c, op would be of the form: + [](scalar* a_val, const scalar* b_val, const scalar* c_val) { a_val[0] = + b_val[0] * c_val[0]; }; +*/ + +template +inline void CPU_tensor_apply1(Tensor tensor1, const Op op) { + if (!_apply_preamble({tensor1})) + return; + if (tensor1.ndimension() < 8) { + apply_op( + tensor1.numel(), + 0, + op, + strided_tensor_iter_fixed(tensor1, true)); + } else { + apply_op(tensor1.numel(), 0, op, strided_tensor_iter(tensor1)); + } +} + +template +inline void CPU_tensor_apply2(Tensor tensor1, Tensor tensor2, const Op op) { + if (!_apply_preamble({tensor1, tensor2})) + return; + if (_max_dim_tensors({tensor1, tensor2}) <= 8) { + apply_op( + tensor1.numel(), + 0, + op, + strided_tensor_iter_fixed(tensor1), + strided_tensor_iter_fixed(tensor2)); + } else { + apply_op( + tensor1.numel(), + 0, + op, + strided_tensor_iter(tensor1), + strided_tensor_iter(tensor2)); + } +} + +template +inline void +CPU_tensor_apply3(Tensor tensor1, Tensor tensor2, Tensor tensor3, const Op op) { + if (!_apply_preamble({tensor1, tensor2, tensor3})) + return; + if (_max_dim_tensors({tensor1, tensor2, tensor3}) <= 8) { + apply_op( + tensor1.numel(), + 0, + op, + strided_tensor_iter_fixed(tensor1), + strided_tensor_iter_fixed(tensor2), + strided_tensor_iter_fixed(tensor3)); + } else { + apply_op( + tensor1.numel(), + 0, + op, + strided_tensor_iter(tensor1), + strided_tensor_iter(tensor2), + strided_tensor_iter(tensor3)); + } +} + +template < + typename scalar1, + typename scalar2, + typename scalar3, + typename scalar4, + typename Op> +inline void CPU_tensor_apply4( + Tensor tensor1, + Tensor tensor2, + Tensor tensor3, + Tensor tensor4, + const Op op) { + if (!_apply_preamble({tensor1, tensor2, tensor3, tensor4})) + return; + if (_max_dim_tensors({tensor1, tensor2, tensor3, tensor4}) <= 8) { + apply_op( + tensor1.numel(), + 0, + op, + strided_tensor_iter_fixed(tensor1), + strided_tensor_iter_fixed(tensor2), + strided_tensor_iter_fixed(tensor3), + strided_tensor_iter_fixed(tensor4)); + } else { + apply_op( + tensor1.numel(), + 0, + op, + strided_tensor_iter(tensor1), + strided_tensor_iter(tensor2), + strided_tensor_iter(tensor3), + strided_tensor_iter(tensor4)); + } +} + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/CPUFixedAllocator.h b/thirdparty/libtorch/include/ATen/CPUFixedAllocator.h new file mode 100644 index 0000000000..bc0918a90d --- /dev/null +++ b/thirdparty/libtorch/include/ATen/CPUFixedAllocator.h @@ -0,0 +1,31 @@ +#pragma once + +#include +#include + +// This file creates a fake allocator that just throws exceptions if +// it is actually used. + +// state passed to the allocator is the std::function called +// when the blob is release by ATen + +namespace at { + +static cpu_fixed_malloc(void *, ptrdiff_t) { + AT_ERROR("attempting to resize a tensor view of an external blob"); +} + +static cpu_fixed_realloc(void *, void*, ptrdiff_t) { + AT_ERROR("attempting to resize a tensor view of an external blob"); +} + +static cpu_fixed_free(void * state, void * allocation) { + auto on_release = static_cast*>(state); + (*on_release)(allocation); + delete on_release; +} + +static Allocator CPU_fixed_allocator = + { cpu_fixed_malloc, cpu_fixed_realloc, cpu_fixed_free }; + +} diff --git a/thirdparty/libtorch/include/ATen/CPUGenerator.h b/thirdparty/libtorch/include/ATen/CPUGenerator.h new file mode 100644 index 0000000000..f41142a395 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/CPUGenerator.h @@ -0,0 +1,44 @@ +#pragma once + +#include +#include +#include +#include + +namespace at { + +struct CAFFE2_API CPUGenerator : public Generator { + // Constructors + CPUGenerator(uint64_t seed_in = default_rng_seed_val); + ~CPUGenerator() = default; + + // CPUGenerator methods + std::shared_ptr clone() const; + void set_current_seed(uint64_t seed) override; + uint64_t current_seed() const override; + uint64_t seed() override; + static DeviceType device_type(); + uint32_t random(); + uint64_t random64(); + c10::optional next_float_normal_sample(); + c10::optional next_double_normal_sample(); + void set_next_float_normal_sample(c10::optional randn); + void set_next_double_normal_sample(c10::optional randn); + at::mt19937 engine(); + void set_engine(at::mt19937 engine); + +private: + CPUGenerator* clone_impl() const override; + at::mt19937 engine_; + c10::optional next_float_normal_sample_; + c10::optional next_double_normal_sample_; +}; + +namespace detail { + +CAFFE2_API CPUGenerator* getDefaultCPUGenerator(); +CAFFE2_API std::shared_ptr createCPUGenerator(uint64_t seed_val = default_rng_seed_val); + +} // namespace detail + +} diff --git a/thirdparty/libtorch/include/ATen/CPUType.h b/thirdparty/libtorch/include/ATen/CPUType.h new file mode 100644 index 0000000000..efda52bf9d --- /dev/null +++ b/thirdparty/libtorch/include/ATen/CPUType.h @@ -0,0 +1,579 @@ +#pragma once + +// @generated by aten/src/ATen/gen.py + +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +namespace c10 { +struct Storage; +} + +namespace at { + +class Tensor; +using TensorList = ArrayRef; + +class Context; +struct Generator; + +struct Quantizer; +// This is temporary typedef to enable Quantizer in aten native function API +// we'll remove them when we are actually exposing Quantizer class +// to frontend +using ConstQuantizerPtr = const c10::intrusive_ptr&; + +#ifdef USE_STATIC_DISPATCH +namespace CPUType { + Tensor & angle_out(Tensor & out, const Tensor & self); + Tensor & real_out(Tensor & out, const Tensor & self); + Tensor & imag_out(Tensor & out, const Tensor & self); + Tensor & conj_out(Tensor & out, const Tensor & self); + Tensor add(const Tensor & self, const Tensor & other, Scalar alpha); + Tensor & add_(Tensor & self, const Tensor & other, Scalar alpha); + Tensor & add_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha); + Tensor addmv(const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); + Tensor & addmv_(Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); + Tensor & addmv_out(Tensor & out, const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); + Tensor & arange_out(Tensor & out, Scalar start, Scalar end, Scalar step); + Tensor as_strided(const Tensor & self, IntArrayRef size, IntArrayRef stride, c10::optional storage_offset); + Tensor & atan_(Tensor & self); + Tensor & atan_out(Tensor & out, const Tensor & self); + Tensor baddbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor & baddbmm_(Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor & baddbmm_out(Tensor & out, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor & bernoulli_(Tensor & self, const Tensor & p, Generator * generator); + Tensor & bernoulli_(Tensor & self, double p, Generator * generator); + Tensor bincount(const Tensor & self, const Tensor & weights, int64_t minlength); + Tensor & bitwise_not_out(Tensor & out, const Tensor & self); + Tensor & logical_not_out(Tensor & out, const Tensor & self); + Tensor & logical_xor_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor bmm(const Tensor & self, const Tensor & mat2); + Tensor & bmm_out(Tensor & out, const Tensor & self, const Tensor & mat2); + Tensor & ceil_out(Tensor & out, const Tensor & self); + Tensor & clamp_(Tensor & self, c10::optional min, c10::optional max); + Tensor & clamp_out(Tensor & out, const Tensor & self, c10::optional min, c10::optional max); + Tensor & clamp_max_(Tensor & self, Scalar max); + Tensor & clamp_max_out(Tensor & out, const Tensor & self, Scalar max); + Tensor & clamp_min_(Tensor & self, Scalar min); + Tensor & clamp_min_out(Tensor & out, const Tensor & self, Scalar min); + Tensor & cos_(Tensor & self); + Tensor & cos_out(Tensor & out, const Tensor & self); + Tensor & cosh_(Tensor & self); + Tensor & cosh_out(Tensor & out, const Tensor & self); + std::tuple _ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank, bool zero_infinity); + Tensor _ctc_loss_backward(const Tensor & grad, const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, const Tensor & neg_log_likelihood, const Tensor & log_alpha, int64_t blank, bool zero_infinity); + Tensor div(const Tensor & self, const Tensor & other); + Tensor & div_(Tensor & self, const Tensor & other); + Tensor & div_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor dot(const Tensor & self, const Tensor & tensor); + Tensor embedding_dense_backward(const Tensor & grad_output, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq); + Tensor & embedding_renorm_(Tensor & self, const Tensor & indices, double max_norm, double norm_type); + std::tuple _embedding_bag(const Tensor & weight, const Tensor & indices, const Tensor & offsets, bool scale_grad_by_freq, int64_t mode, bool sparse, const Tensor & per_sample_weights); + Tensor _embedding_bag_dense_backward(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, const Tensor & maximum_indices, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, const Tensor & per_sample_weights); + Tensor _embedding_bag_per_sample_weights_backward(const Tensor & grad, const Tensor & weight, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, int64_t mode); + Tensor empty(IntArrayRef size, const TensorOptions & options, c10::optional memory_format); + Tensor _empty_affine_quantized(IntArrayRef size, const TensorOptions & options, double scale, int64_t zero_point, c10::optional memory_format); + Tensor _empty_per_channel_affine_quantized(IntArrayRef size, const Tensor & scales, const Tensor & zero_points, int64_t axis, const TensorOptions & options, c10::optional memory_format); + Tensor & resize_(Tensor & self, IntArrayRef size, c10::optional memory_format); + Tensor empty_strided(IntArrayRef size, IntArrayRef stride, const TensorOptions & options); + Tensor & erf_(Tensor & self); + Tensor & erf_out(Tensor & out, const Tensor & self); + Tensor & erfc_(Tensor & self); + Tensor & erfc_out(Tensor & out, const Tensor & self); + Tensor & exp_(Tensor & self); + Tensor & exp_out(Tensor & out, const Tensor & self); + Tensor & expm1_out(Tensor & out, const Tensor & self); + Tensor & eye_out(Tensor & out, int64_t n); + Tensor & eye_out(Tensor & out, int64_t n, int64_t m); + Tensor & floor_out(Tensor & out, const Tensor & self); + Tensor from_file(std::string filename, c10::optional shared, c10::optional size, const TensorOptions & options); + Tensor grid_sampler_2d(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); + std::tuple grid_sampler_2d_backward(const Tensor & grad_output, const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); + Tensor grid_sampler_3d(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); + std::tuple grid_sampler_3d_backward(const Tensor & grad_output, const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); + Tensor ger(const Tensor & self, const Tensor & vec2); + Tensor & ger_out(Tensor & out, const Tensor & self, const Tensor & vec2); + Tensor _fft_with_size(const Tensor & self, int64_t signal_ndim, bool complex_input, bool complex_output, bool inverse, IntArrayRef checked_signal_sizes, bool normalized, bool onesided, IntArrayRef output_sizes); + Tensor _inverse_helper(const Tensor & self); + Tensor kl_div_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + std::tuple kthvalue_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, int64_t dim, bool keepdim); + std::tuple native_layer_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, int64_t M, int64_t N, double eps); + std::tuple native_layer_norm_backward(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & rstd, const Tensor & weight, int64_t M, int64_t N, std::array output_mask); + Tensor & linspace_out(Tensor & out, Scalar start, Scalar end, int64_t steps); + Tensor & log_out(Tensor & out, const Tensor & self); + Tensor & log10_out(Tensor & out, const Tensor & self); + Tensor & log1p_(Tensor & self); + Tensor & log1p_out(Tensor & out, const Tensor & self); + Tensor & log2_out(Tensor & out, const Tensor & self); + Tensor & logspace_out(Tensor & out, Scalar start, Scalar end, int64_t steps, double base); + Tensor _log_softmax(const Tensor & self, int64_t dim, bool half_to_float); + Tensor _log_softmax_backward_data(const Tensor & grad_output, const Tensor & output, int64_t dim, const Tensor & self); + Tensor mean(const Tensor & self, c10::optional dtype); + Tensor mean(const Tensor & self, IntArrayRef dim, bool keepdim, c10::optional dtype); + Tensor & mean_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim, c10::optional dtype); + Tensor mm(const Tensor & self, const Tensor & mat2); + Tensor & mm_out(Tensor & out, const Tensor & self, const Tensor & mat2); + Tensor mul(const Tensor & self, const Tensor & other); + Tensor & mul_(Tensor & self, const Tensor & other); + Tensor & mul_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor mv(const Tensor & self, const Tensor & vec); + Tensor & mv_out(Tensor & out, const Tensor & self, const Tensor & vec); + Tensor narrow_copy(const Tensor & self, int64_t dim, int64_t start, int64_t length); + std::tuple native_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps); + std::tuple native_batch_norm_backward(const Tensor & grad_out, const Tensor & input, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_invstd, bool train, double eps, std::array output_mask); + std::tuple batch_norm_update_stats(const Tensor & input, const Tensor & running_mean, const Tensor & running_var, double momentum); + Tensor & randperm_out(Tensor & out, int64_t n, Generator * generator); + Tensor & range_out(Tensor & out, Scalar start, Scalar end, Scalar step); + Tensor & reciprocal_(Tensor & self); + Tensor & reciprocal_out(Tensor & out, const Tensor & self); + Tensor & neg_out(Tensor & out, const Tensor & self); + Tensor repeat_interleave(const Tensor & repeats); + Tensor & round_out(Tensor & out, const Tensor & self); + Tensor relu(const Tensor & self); + Tensor & relu_(Tensor & self); + Tensor prelu(const Tensor & self, const Tensor & weight); + std::tuple prelu_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight); + Tensor gelu(const Tensor & self); + Tensor gelu_backward(const Tensor & grad, const Tensor & self); + Tensor hardshrink(const Tensor & self, Scalar lambd); + Tensor hardshrink_backward(const Tensor & grad_out, const Tensor & self, Scalar lambd); + Tensor & rsqrt_out(Tensor & out, const Tensor & self); + Tensor sigmoid(const Tensor & self); + Tensor & sigmoid_(Tensor & self); + Tensor & sin_out(Tensor & out, const Tensor & self); + Tensor _softmax(const Tensor & self, int64_t dim, bool half_to_float); + Tensor _softmax_backward_data(const Tensor & grad_output, const Tensor & output, int64_t dim, const Tensor & self); + Tensor & sspaddmm_out(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor & tan_(Tensor & self); + Tensor & tan_out(Tensor & out, const Tensor & self); + Tensor & tanh_(Tensor & self); + Tensor & tanh_out(Tensor & out, const Tensor & self); + Tensor flip(const Tensor & self, IntArrayRef dims); + Tensor roll(const Tensor & self, IntArrayRef shifts, IntArrayRef dims); + Tensor & trunc_out(Tensor & out, const Tensor & self); + std::tuple _unique(const Tensor & self, bool sorted, bool return_inverse); + std::tuple unique_dim(const Tensor & self, int64_t dim, bool sorted, bool return_inverse, bool return_counts); + std::tuple unique_consecutive(const Tensor & self, bool return_inverse, bool return_counts, c10::optional dim); + std::tuple unique_dim_consecutive(const Tensor & self, int64_t dim, bool return_inverse, bool return_counts); + std::tuple _unique2(const Tensor & self, bool sorted, bool return_inverse, bool return_counts); + Tensor _s_where(const Tensor & condition, const Tensor & self, const Tensor & other); + Tensor _standard_gamma_grad(const Tensor & self, const Tensor & output); + Tensor _standard_gamma(const Tensor & self, Generator * generator); + Tensor _dirichlet_grad(const Tensor & x, const Tensor & alpha, const Tensor & total); + Tensor _sample_dirichlet(const Tensor & self, Generator * generator); + Tensor poisson(const Tensor & self, Generator * generator); + Tensor clone(const Tensor & self, c10::optional memory_format); + Tensor & pow_out(Tensor & out, const Tensor & self, Scalar exponent); + Tensor pow(const Tensor & self, Scalar exponent); + Tensor & zero_(Tensor & self); + Tensor & sub_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha); + Tensor sub(const Tensor & self, const Tensor & other, Scalar alpha); + Tensor & sub_(Tensor & self, const Tensor & other, Scalar alpha); + Tensor & addmm_out(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor addmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor & addmm_(Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor to_sparse(const Tensor & self, int64_t sparse_dim); + Tensor to_sparse(const Tensor & self); + Tensor to_mkldnn(const Tensor & self); + Tensor quantize_per_tensor(const Tensor & self, double scale, int64_t zero_point, ScalarType dtype); + Tensor quantize_per_channel(const Tensor & self, const Tensor & scales, const Tensor & zero_points, int64_t axis, ScalarType dtype); + Tensor _make_per_tensor_quantized_tensor(const Tensor & self, double scale, int64_t zero_point); + Tensor _make_per_channel_quantized_tensor(const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis); + Tensor fake_quantize_per_tensor_affine(const Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max); + Tensor fake_quantize_per_tensor_affine_backward(const Tensor & grad, const Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max); + Tensor fake_quantize_per_channel_affine(const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max); + Tensor fake_quantize_per_channel_affine_backward(const Tensor & grad, const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max); + Scalar _local_scalar_dense(const Tensor & self); + Tensor & set_(Tensor & self, Storage source); + Tensor & set_(Tensor & self, Storage source, int64_t storage_offset, IntArrayRef size, IntArrayRef stride); + Tensor & set_(Tensor & self, const Tensor & source); + Tensor & set_(Tensor & self); + bool is_set_to(const Tensor & self, const Tensor & tensor); + Tensor & masked_fill_(Tensor & self, const Tensor & mask, Scalar value); + Tensor & masked_fill_(Tensor & self, const Tensor & mask, const Tensor & value); + Tensor & masked_scatter_(Tensor & self, const Tensor & mask, const Tensor & source); + Tensor view(const Tensor & self, IntArrayRef size); + Tensor & put_(Tensor & self, const Tensor & index, const Tensor & source, bool accumulate); + Tensor & index_add_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); + Tensor & index_fill_(Tensor & self, int64_t dim, const Tensor & index, Scalar value); + Tensor & index_fill_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & value); + Tensor & scatter_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); + Tensor & scatter_(Tensor & self, int64_t dim, const Tensor & index, Scalar value); + Tensor & scatter_add_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); + Tensor __and__(const Tensor & self, Scalar other); + Tensor __and__(const Tensor & self, const Tensor & other); + Tensor & __iand__(Tensor & self, Scalar other); + Tensor & __iand__(Tensor & self, const Tensor & other); + Tensor __or__(const Tensor & self, Scalar other); + Tensor __or__(const Tensor & self, const Tensor & other); + Tensor & __ior__(Tensor & self, Scalar other); + Tensor & __ior__(Tensor & self, const Tensor & other); + Tensor & bitwise_xor_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor & bitwise_xor_out(Tensor & out, const Tensor & self, Scalar other); + Tensor __lshift__(const Tensor & self, Scalar other); + Tensor __lshift__(const Tensor & self, const Tensor & other); + Tensor & __ilshift__(Tensor & self, Scalar other); + Tensor & __ilshift__(Tensor & self, const Tensor & other); + Tensor __rshift__(const Tensor & self, Scalar other); + Tensor __rshift__(const Tensor & self, const Tensor & other); + Tensor & __irshift__(Tensor & self, Scalar other); + Tensor & __irshift__(Tensor & self, const Tensor & other); + Tensor & lgamma_(Tensor & self); + Tensor & tril_(Tensor & self, int64_t diagonal); + Tensor & triu_(Tensor & self, int64_t diagonal); + Tensor & renorm_(Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); + Tensor & pow_(Tensor & self, Scalar exponent); + Tensor & pow_(Tensor & self, const Tensor & exponent); + Tensor & lerp_(Tensor & self, const Tensor & end, Scalar weight); + Tensor & lerp_(Tensor & self, const Tensor & end, const Tensor & weight); + Tensor & fmod_(Tensor & self, Scalar other); + Tensor & fmod_(Tensor & self, const Tensor & other); + Tensor & remainder_(Tensor & self, Scalar other); + Tensor & remainder_(Tensor & self, const Tensor & other); + Tensor & addbmm_(Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor & addbmm_out(Tensor & out, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor addbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor & random_(Tensor & self, int64_t from, int64_t to, Generator * generator); + Tensor & random_(Tensor & self, int64_t to, Generator * generator); + Tensor & random_(Tensor & self, Generator * generator); + Tensor & uniform_(Tensor & self, double from, double to, Generator * generator); + Tensor & normal_(Tensor & self, double mean, double std, Generator * generator); + Tensor & cauchy_(Tensor & self, double median, double sigma, Generator * generator); + Tensor & log_normal_(Tensor & self, double mean, double std, Generator * generator); + Tensor & exponential_(Tensor & self, double lambd, Generator * generator); + Tensor & geometric_(Tensor & self, double p, Generator * generator); + Tensor & diag_out(Tensor & out, const Tensor & self, int64_t diagonal); + Tensor diag(const Tensor & self, int64_t diagonal); + Tensor & triu_out(Tensor & out, const Tensor & self, int64_t diagonal); + Tensor & tril_out(Tensor & out, const Tensor & self, int64_t diagonal); + Tensor tril_indices(int64_t row, int64_t col, int64_t offset, const TensorOptions & options); + Tensor triu_indices(int64_t row, int64_t col, int64_t offset, const TensorOptions & options); + Tensor trace(const Tensor & self); + Tensor & ne_out(Tensor & out, const Tensor & self, Scalar other); + Tensor ne(const Tensor & self, Scalar other); + Tensor & ne_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor ne(const Tensor & self, const Tensor & other); + Tensor & eq_out(Tensor & out, const Tensor & self, Scalar other); + Tensor eq(const Tensor & self, Scalar other); + Tensor & eq_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor eq(const Tensor & self, const Tensor & other); + Tensor & ge_out(Tensor & out, const Tensor & self, Scalar other); + Tensor ge(const Tensor & self, Scalar other); + Tensor & ge_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor ge(const Tensor & self, const Tensor & other); + Tensor & le_out(Tensor & out, const Tensor & self, Scalar other); + Tensor le(const Tensor & self, Scalar other); + Tensor & le_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor le(const Tensor & self, const Tensor & other); + Tensor & gt_out(Tensor & out, const Tensor & self, Scalar other); + Tensor gt(const Tensor & self, Scalar other); + Tensor & gt_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor gt(const Tensor & self, const Tensor & other); + Tensor & lt_out(Tensor & out, const Tensor & self, Scalar other); + Tensor lt(const Tensor & self, Scalar other); + Tensor & lt_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor lt(const Tensor & self, const Tensor & other); + Tensor & take_out(Tensor & out, const Tensor & self, const Tensor & index); + Tensor take(const Tensor & self, const Tensor & index); + Tensor & index_select_out(Tensor & out, const Tensor & self, int64_t dim, const Tensor & index); + Tensor index_select(const Tensor & self, int64_t dim, const Tensor & index); + Tensor & masked_select_out(Tensor & out, const Tensor & self, const Tensor & mask); + Tensor masked_select(const Tensor & self, const Tensor & mask); + Tensor & nonzero_out(Tensor & out, const Tensor & self); + Tensor nonzero(const Tensor & self); + Tensor & gather_out(Tensor & out, const Tensor & self, int64_t dim, const Tensor & index, bool sparse_grad); + Tensor gather(const Tensor & self, int64_t dim, const Tensor & index, bool sparse_grad); + std::tuple lstsq_out(Tensor & X, Tensor & qr, const Tensor & self, const Tensor & A); + std::tuple lstsq(const Tensor & self, const Tensor & A); + std::tuple _triangular_solve_helper(const Tensor & self, const Tensor & A, bool upper, bool transpose, bool unitriangular); + std::tuple _symeig_helper(const Tensor & self, bool eigenvectors, bool upper); + std::tuple eig_out(Tensor & e, Tensor & v, const Tensor & self, bool eigenvectors); + std::tuple eig(const Tensor & self, bool eigenvectors); + std::tuple _svd_helper(const Tensor & self, bool some, bool compute_uv); + Tensor _cholesky_helper(const Tensor & self, bool upper); + Tensor _cholesky_solve_helper(const Tensor & self, const Tensor & A, bool upper); + std::tuple _solve_helper(const Tensor & self, const Tensor & A); + Tensor & cholesky_inverse_out(Tensor & out, const Tensor & self, bool upper); + Tensor cholesky_inverse(const Tensor & self, bool upper); + std::tuple _qr_helper(const Tensor & self, bool some); + std::tuple geqrf_out(Tensor & a, Tensor & tau, const Tensor & self); + std::tuple geqrf(const Tensor & self); + Tensor & orgqr_out(Tensor & out, const Tensor & self, const Tensor & input2); + Tensor orgqr(const Tensor & self, const Tensor & input2); + Tensor & ormqr_out(Tensor & out, const Tensor & self, const Tensor & input2, const Tensor & input3, bool left, bool transpose); + Tensor ormqr(const Tensor & self, const Tensor & input2, const Tensor & input3, bool left, bool transpose); + std::tuple _lu_with_info(const Tensor & self, bool pivot, bool check_errors); + Tensor _lu_solve_helper(const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots); + Tensor & multinomial_out(Tensor & out, const Tensor & self, int64_t num_samples, bool replacement, Generator * generator); + Tensor multinomial(const Tensor & self, int64_t num_samples, bool replacement, Generator * generator); + std::tuple _multinomial_alias_setup(const Tensor & probs); + Tensor _multinomial_alias_draw(const Tensor & J, const Tensor & q, int64_t num_samples, Generator * generator); + Tensor & lgamma_out(Tensor & out, const Tensor & self); + Tensor lgamma(const Tensor & self); + Tensor erfinv(const Tensor & self); + Tensor & erfinv_(Tensor & self); + Tensor & erfinv_out(Tensor & out, const Tensor & self); + Tensor & sign_out(Tensor & out, const Tensor & self); + Tensor dist(const Tensor & self, const Tensor & other, Scalar p); + Tensor & lerp_out(Tensor & out, const Tensor & self, const Tensor & end, Scalar weight); + Tensor & lerp_out(Tensor & out, const Tensor & self, const Tensor & end, const Tensor & weight); + Tensor lerp(const Tensor & self, const Tensor & end, Scalar weight); + Tensor lerp(const Tensor & self, const Tensor & end, const Tensor & weight); + Tensor & histc_out(Tensor & out, const Tensor & self, int64_t bins, Scalar min, Scalar max); + Tensor histc(const Tensor & self, int64_t bins, Scalar min, Scalar max); + Tensor & fmod_out(Tensor & out, const Tensor & self, Scalar other); + Tensor fmod(const Tensor & self, Scalar other); + Tensor & fmod_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor fmod(const Tensor & self, const Tensor & other); + Tensor & remainder_out(Tensor & out, const Tensor & self, Scalar other); + Tensor remainder(const Tensor & self, Scalar other); + Tensor & remainder_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor remainder(const Tensor & self, const Tensor & other); + Tensor & min_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor min(const Tensor & self, const Tensor & other); + Tensor min(const Tensor & self); + Tensor & max_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor max(const Tensor & self, const Tensor & other); + Tensor max(const Tensor & self); + Tensor median(const Tensor & self); + std::tuple sort_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool descending); + std::tuple sort(const Tensor & self, int64_t dim, bool descending); + std::tuple topk_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, int64_t dim, bool largest, bool sorted); + std::tuple topk(const Tensor & self, int64_t k, int64_t dim, bool largest, bool sorted); + Tensor & renorm_out(Tensor & out, const Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); + Tensor renorm(const Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); + Tensor unfold(const Tensor & self, int64_t dimension, int64_t size, int64_t step); + bool equal(const Tensor & self, const Tensor & other); + Tensor & pow_out(Tensor & out, const Tensor & self, const Tensor & exponent); + Tensor pow(const Tensor & self, const Tensor & exponent); + Tensor & pow_out(Tensor & out, Scalar self, const Tensor & exponent); + Tensor pow(Scalar self, const Tensor & exponent); + Tensor & normal_out(Tensor & out, const Tensor & mean, double std, Generator * generator); + Tensor normal(const Tensor & mean, double std, Generator * generator); + Tensor & normal_out(Tensor & out, double mean, const Tensor & std, Generator * generator); + Tensor normal(double mean, const Tensor & std, Generator * generator); + Tensor & normal_out(Tensor & out, const Tensor & mean, const Tensor & std, Generator * generator); + Tensor normal(const Tensor & mean, const Tensor & std, Generator * generator); + Tensor _addr(const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); + Tensor & _addr_(Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); + Tensor & _addr_out(Tensor & out, const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); + Tensor & _index_copy_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); + Tensor _cumsum(const Tensor & self, int64_t dim); + Tensor & _cumsum_out(Tensor & out, const Tensor & self, int64_t dim); + Tensor _cumprod(const Tensor & self, int64_t dim); + Tensor & _cumprod_out(Tensor & out, const Tensor & self, int64_t dim); + Tensor _var(const Tensor & self, bool unbiased); + Tensor _std(const Tensor & self, bool unbiased); + Tensor _cat(TensorList tensors, int64_t dim); + Tensor & _cat_out(Tensor & out, TensorList tensors, int64_t dim); + std::tuple _mode(const Tensor & self, int64_t dim, bool keepdim); + std::tuple _mode_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool keepdim); + std::tuple _max(const Tensor & self, int64_t dim, bool keepdim); + std::tuple _max_out(Tensor & max, Tensor & max_indices, const Tensor & self, int64_t dim, bool keepdim); + std::tuple _min(const Tensor & self, int64_t dim, bool keepdim); + std::tuple _min_out(Tensor & min, Tensor & min_indices, const Tensor & self, int64_t dim, bool keepdim); + Tensor & binary_cross_entropy_out(Tensor & out, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); + Tensor binary_cross_entropy(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); + Tensor & binary_cross_entropy_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); + Tensor binary_cross_entropy_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); + Tensor & mse_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor mse_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & l1_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & multi_margin_loss_out(Tensor & out, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction); + Tensor multi_margin_loss(const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction); + Tensor & multi_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction); + Tensor multi_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction); + std::tuple multilabel_margin_loss_forward_out(Tensor & output, Tensor & is_target, const Tensor & self, const Tensor & target, int64_t reduction); + std::tuple multilabel_margin_loss_forward(const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & multilabel_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction, const Tensor & is_target); + Tensor multilabel_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction, const Tensor & is_target); + std::tuple nll_loss_forward_out(Tensor & output, Tensor & total_weight, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); + std::tuple nll_loss_forward(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); + Tensor & nll_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); + Tensor nll_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); + std::tuple nll_loss2d_forward_out(Tensor & output, Tensor & total_weight, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); + std::tuple nll_loss2d_forward(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); + Tensor & nll_loss2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); + Tensor nll_loss2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); + Tensor & smooth_l1_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & smooth_l1_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & soft_margin_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor soft_margin_loss(const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & soft_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor soft_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & elu_out(Tensor & out, const Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale); + Tensor elu(const Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale); + Tensor & elu_backward_out(Tensor & grad_input, const Tensor & grad_output, Scalar alpha, Scalar scale, Scalar input_scale, const Tensor & output); + Tensor elu_backward(const Tensor & grad_output, Scalar alpha, Scalar scale, Scalar input_scale, const Tensor & output); + Tensor & elu_(Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale); + Tensor & glu_out(Tensor & out, const Tensor & self, int64_t dim); + Tensor glu(const Tensor & self, int64_t dim); + Tensor & glu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, int64_t dim); + Tensor glu_backward(const Tensor & grad_output, const Tensor & self, int64_t dim); + Tensor & hardtanh_out(Tensor & out, const Tensor & self, Scalar min_val, Scalar max_val); + Tensor hardtanh(const Tensor & self, Scalar min_val, Scalar max_val); + Tensor & hardtanh_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar min_val, Scalar max_val); + Tensor hardtanh_backward(const Tensor & grad_output, const Tensor & self, Scalar min_val, Scalar max_val); + Tensor & hardtanh_(Tensor & self, Scalar min_val, Scalar max_val); + Tensor & leaky_relu_out(Tensor & out, const Tensor & self, Scalar negative_slope); + Tensor leaky_relu(const Tensor & self, Scalar negative_slope); + Tensor & leaky_relu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar negative_slope); + Tensor leaky_relu_backward(const Tensor & grad_output, const Tensor & self, Scalar negative_slope); + Tensor & leaky_relu_(Tensor & self, Scalar negative_slope); + std::tuple log_sigmoid_forward_out(Tensor & output, Tensor & buffer, const Tensor & self); + std::tuple log_sigmoid_forward(const Tensor & self); + Tensor & log_sigmoid_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & buffer); + Tensor log_sigmoid_backward(const Tensor & grad_output, const Tensor & self, const Tensor & buffer); + Tensor & rrelu_with_noise_out(Tensor & out, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator); + Tensor rrelu_with_noise(const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator); + Tensor & rrelu_with_noise_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training); + Tensor rrelu_with_noise_backward(const Tensor & grad_output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training); + Tensor & rrelu_with_noise_(Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator); + Tensor & softplus_out(Tensor & out, const Tensor & self, Scalar beta, Scalar threshold); + Tensor softplus(const Tensor & self, Scalar beta, Scalar threshold); + Tensor & softplus_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar beta, Scalar threshold, const Tensor & output); + Tensor softplus_backward(const Tensor & grad_output, const Tensor & self, Scalar beta, Scalar threshold, const Tensor & output); + Tensor & softshrink_out(Tensor & out, const Tensor & self, Scalar lambd); + Tensor softshrink(const Tensor & self, Scalar lambd); + Tensor & softshrink_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar lambd); + Tensor softshrink_backward(const Tensor & grad_output, const Tensor & self, Scalar lambd); + Tensor & adaptive_avg_pool2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); + Tensor _adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size); + Tensor _adaptive_avg_pool2d_backward(const Tensor & grad_output, const Tensor & self); + Tensor & adaptive_avg_pool3d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); + Tensor adaptive_avg_pool3d(const Tensor & self, IntArrayRef output_size); + Tensor & adaptive_avg_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self); + Tensor adaptive_avg_pool3d_backward(const Tensor & grad_output, const Tensor & self); + std::tuple adaptive_max_pool2d_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef output_size); + std::tuple adaptive_max_pool2d(const Tensor & self, IntArrayRef output_size); + Tensor & adaptive_max_pool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices); + Tensor adaptive_max_pool2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices); + std::tuple adaptive_max_pool3d_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef output_size); + std::tuple adaptive_max_pool3d(const Tensor & self, IntArrayRef output_size); + Tensor & adaptive_max_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices); + Tensor adaptive_max_pool3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices); + Tensor & avg_pool2d_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor avg_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor & avg_pool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor avg_pool2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor & avg_pool3d_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor avg_pool3d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor & avg_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor avg_pool3d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + std::tuple fractional_max_pool2d_out(Tensor & output, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); + std::tuple fractional_max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); + Tensor & fractional_max_pool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); + Tensor fractional_max_pool2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); + std::tuple fractional_max_pool3d_out(Tensor & output, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); + std::tuple fractional_max_pool3d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); + Tensor & fractional_max_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); + Tensor fractional_max_pool3d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); + std::tuple max_pool2d_with_indices_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + std::tuple max_pool2d_with_indices(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + Tensor & max_pool2d_with_indices_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); + Tensor max_pool2d_with_indices_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); + std::tuple max_pool3d_with_indices_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + std::tuple max_pool3d_with_indices(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + Tensor & max_pool3d_with_indices_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); + Tensor max_pool3d_with_indices_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); + Tensor & max_unpool2d_out(Tensor & out, const Tensor & self, const Tensor & indices, IntArrayRef output_size); + Tensor max_unpool2d(const Tensor & self, const Tensor & indices, IntArrayRef output_size); + Tensor & max_unpool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size); + Tensor max_unpool2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size); + Tensor & max_unpool3d_out(Tensor & out, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); + Tensor max_unpool3d(const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); + Tensor & max_unpool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); + Tensor max_unpool3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); + Tensor & reflection_pad1d_out(Tensor & out, const Tensor & self, IntArrayRef padding); + Tensor reflection_pad1d(const Tensor & self, IntArrayRef padding); + Tensor & reflection_pad1d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor reflection_pad1d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor & reflection_pad2d_out(Tensor & out, const Tensor & self, IntArrayRef padding); + Tensor reflection_pad2d(const Tensor & self, IntArrayRef padding); + Tensor & reflection_pad2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor reflection_pad2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor & replication_pad1d_out(Tensor & out, const Tensor & self, IntArrayRef padding); + Tensor replication_pad1d(const Tensor & self, IntArrayRef padding); + Tensor & replication_pad1d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor replication_pad1d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor & replication_pad2d_out(Tensor & out, const Tensor & self, IntArrayRef padding); + Tensor replication_pad2d(const Tensor & self, IntArrayRef padding); + Tensor & replication_pad2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor replication_pad2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor & replication_pad3d_out(Tensor & out, const Tensor & self, IntArrayRef padding); + Tensor replication_pad3d(const Tensor & self, IntArrayRef padding); + Tensor & replication_pad3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor replication_pad3d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor & upsample_linear1d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor upsample_linear1d(const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor & upsample_linear1d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor upsample_linear1d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor & upsample_bilinear2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor upsample_bilinear2d(const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor & upsample_bilinear2d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor upsample_bilinear2d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor & upsample_bicubic2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor upsample_bicubic2d(const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor & upsample_bicubic2d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor upsample_bicubic2d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor & upsample_trilinear3d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor upsample_trilinear3d(const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor & upsample_trilinear3d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor upsample_trilinear3d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor & upsample_nearest1d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); + Tensor upsample_nearest1d(const Tensor & self, IntArrayRef output_size); + Tensor & upsample_nearest1d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); + Tensor upsample_nearest1d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); + Tensor & upsample_nearest2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); + Tensor upsample_nearest2d(const Tensor & self, IntArrayRef output_size); + Tensor & upsample_nearest2d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); + Tensor upsample_nearest2d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); + Tensor & upsample_nearest3d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); + Tensor upsample_nearest3d(const Tensor & self, IntArrayRef output_size); + Tensor & upsample_nearest3d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); + Tensor upsample_nearest3d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); + Tensor & sigmoid_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & output); + Tensor & tanh_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & output); + Tensor tanh_backward(const Tensor & grad_output, const Tensor & output); + Tensor & slow_conv_transpose2d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation); + Tensor slow_conv_transpose2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation); + std::tuple slow_conv_transpose2d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & columns, const Tensor & ones); + std::tuple slow_conv_transpose2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & columns, const Tensor & ones, std::array output_mask); + Tensor & slow_conv_transpose3d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation); + Tensor slow_conv_transpose3d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation); + std::tuple slow_conv_transpose3d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & finput, const Tensor & fgrad_input); + std::tuple slow_conv_transpose3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask); + std::tuple thnn_conv2d_forward_out(Tensor & output, Tensor & finput, Tensor & fgrad_input, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); + std::tuple thnn_conv2d_forward(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); + std::tuple thnn_conv2d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input); + std::tuple thnn_conv2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask); + std::tuple slow_conv3d_forward_out(Tensor & output, Tensor & finput, Tensor & fgrad_input, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); + std::tuple slow_conv3d_forward(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); + std::tuple slow_conv3d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input); + std::tuple slow_conv3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask); + Tensor slow_conv_dilated2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); + std::tuple slow_conv_dilated2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); + Tensor slow_conv_dilated3d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); + std::tuple slow_conv_dilated3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); + Tensor & col2im_out(Tensor & out, const Tensor & self, IntArrayRef output_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor col2im(const Tensor & self, IntArrayRef output_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor & col2im_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor col2im_backward(const Tensor & grad_output, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor & im2col_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor im2col(const Tensor & self, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor & im2col_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef input_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor im2col_backward(const Tensor & grad_output, IntArrayRef input_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +} +#endif + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/CUDAGenerator.h b/thirdparty/libtorch/include/ATen/CUDAGenerator.h new file mode 100644 index 0000000000..51e4ca916c --- /dev/null +++ b/thirdparty/libtorch/include/ATen/CUDAGenerator.h @@ -0,0 +1,37 @@ +#pragma once + +#include + +namespace at { + +struct CAFFE2_API CUDAGenerator : public Generator { + // Constructors + CUDAGenerator(DeviceIndex device_index = -1); + ~CUDAGenerator() = default; + + // CUDAGenerator methods + std::shared_ptr clone() const; + void set_current_seed(uint64_t seed) override; + uint64_t current_seed() const override; + uint64_t seed() override; + void set_philox_offset_per_thread(uint64_t offset); + uint64_t philox_offset_per_thread(); + std::pair philox_engine_inputs(uint64_t increment); + static DeviceType device_type(); + +private: + CUDAGenerator* clone_impl() const override; + uint64_t seed_ = default_rng_seed_val; + uint64_t philox_offset_per_thread_ = 0; +}; + +namespace cuda { +namespace detail { + + CAFFE2_API CUDAGenerator* getDefaultCUDAGenerator(DeviceIndex device_index = -1); + CAFFE2_API std::shared_ptr createCUDAGenerator(DeviceIndex device_index = -1); + +} // namespace detail +} // namespace cuda +} // namespace at + diff --git a/thirdparty/libtorch/include/ATen/CUDAType.h b/thirdparty/libtorch/include/ATen/CUDAType.h new file mode 100644 index 0000000000..6232d2712b --- /dev/null +++ b/thirdparty/libtorch/include/ATen/CUDAType.h @@ -0,0 +1,620 @@ +#pragma once + +// @generated by aten/src/ATen/gen.py + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +namespace c10 { +struct Storage; +} + +namespace at { + +class Tensor; +using TensorList = ArrayRef; + +class Context; +struct Generator; + +struct Quantizer; +// This is temporary typedef to enable Quantizer in aten native function API +// we'll remove them when we are actually exposing Quantizer class +// to frontend +using ConstQuantizerPtr = const c10::intrusive_ptr&; + +#ifdef USE_STATIC_DISPATCH +namespace CUDAType { + bool _use_cudnn_ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank); + std::tuple _cudnn_ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank, bool deterministic, bool zero_infinity); + Tensor _cudnn_rnn_flatten_weight(TensorList weight_arr, int64_t weight_stride0, int64_t input_size, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, bool bidirectional); + std::tuple _cudnn_rnn(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & weight_buf, const Tensor & hx, const Tensor & cx, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state); + std::tuple> _cudnn_rnn_backward(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & weight_buf, const Tensor & hx, const Tensor & cx, const Tensor & output, const Tensor & grad_output, const Tensor & grad_hy, const Tensor & grad_cy, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state, const Tensor & reserve, std::array output_mask); + Tensor _cudnn_init_dropout_state(double dropout, bool train, int64_t dropout_seed, const TensorOptions & options); + std::tuple _fused_dropout(const Tensor & self, double p, Generator * generator); + Tensor _masked_scale(const Tensor & self, const Tensor & mask, double scale); + Tensor add(const Tensor & self, const Tensor & other, Scalar alpha); + Tensor & add_(Tensor & self, const Tensor & other, Scalar alpha); + Tensor & add_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha); + Tensor addmv(const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); + Tensor & addmv_(Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); + Tensor & addmv_out(Tensor & out, const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); + Tensor & arange_out(Tensor & out, Scalar start, Scalar end, Scalar step); + Tensor as_strided(const Tensor & self, IntArrayRef size, IntArrayRef stride, c10::optional storage_offset); + Tensor & atan_(Tensor & self); + Tensor & atan_out(Tensor & out, const Tensor & self); + Tensor baddbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor & baddbmm_(Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor & baddbmm_out(Tensor & out, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor & bernoulli_(Tensor & self, const Tensor & p, Generator * generator); + Tensor & bernoulli_(Tensor & self, double p, Generator * generator); + Tensor bincount(const Tensor & self, const Tensor & weights, int64_t minlength); + Tensor & bitwise_not_out(Tensor & out, const Tensor & self); + Tensor & logical_not_out(Tensor & out, const Tensor & self); + Tensor & logical_xor_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor bmm(const Tensor & self, const Tensor & mat2); + Tensor & bmm_out(Tensor & out, const Tensor & self, const Tensor & mat2); + Tensor & ceil_out(Tensor & out, const Tensor & self); + Tensor & clamp_(Tensor & self, c10::optional min, c10::optional max); + Tensor & clamp_out(Tensor & out, const Tensor & self, c10::optional min, c10::optional max); + Tensor & clamp_max_(Tensor & self, Scalar max); + Tensor & clamp_max_out(Tensor & out, const Tensor & self, Scalar max); + Tensor & clamp_min_(Tensor & self, Scalar min); + Tensor & clamp_min_out(Tensor & out, const Tensor & self, Scalar min); + Tensor & cos_(Tensor & self); + Tensor & cos_out(Tensor & out, const Tensor & self); + Tensor & cosh_(Tensor & self); + Tensor & cosh_out(Tensor & out, const Tensor & self); + Tensor cudnn_affine_grid_generator(const Tensor & theta, int64_t N, int64_t C, int64_t H, int64_t W); + Tensor cudnn_affine_grid_generator_backward(const Tensor & grad, int64_t N, int64_t C, int64_t H, int64_t W); + std::tuple cudnn_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double exponential_average_factor, double epsilon); + std::tuple cudnn_batch_norm_backward(const Tensor & input, const Tensor & grad_output, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_var, double epsilon, const Tensor & reserveSpace); + Tensor cudnn_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + Tensor cudnn_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + std::tuple cudnn_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); + Tensor cudnn_convolution_backward_bias(const Tensor & grad_output); + Tensor cudnn_convolution_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + Tensor cudnn_convolution_transpose(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + std::tuple cudnn_convolution_transpose_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); + Tensor cudnn_convolution_transpose_backward_bias(const Tensor & grad_output); + Tensor cudnn_convolution_transpose_backward_input(const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + Tensor cudnn_convolution_transpose_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + Tensor cudnn_grid_sampler(const Tensor & self, const Tensor & grid); + std::tuple cudnn_grid_sampler_backward(const Tensor & self, const Tensor & grid, const Tensor & grad_output); + std::tuple _ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank, bool zero_infinity); + Tensor _ctc_loss_backward(const Tensor & grad, const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, const Tensor & neg_log_likelihood, const Tensor & log_alpha, int64_t blank, bool zero_infinity); + Tensor div(const Tensor & self, const Tensor & other); + Tensor & div_(Tensor & self, const Tensor & other); + Tensor & div_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor dot(const Tensor & self, const Tensor & tensor); + Tensor embedding_dense_backward(const Tensor & grad_output, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq); + Tensor & embedding_renorm_(Tensor & self, const Tensor & indices, double max_norm, double norm_type); + std::tuple _embedding_bag(const Tensor & weight, const Tensor & indices, const Tensor & offsets, bool scale_grad_by_freq, int64_t mode, bool sparse, const Tensor & per_sample_weights); + Tensor _embedding_bag_dense_backward(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, const Tensor & maximum_indices, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, const Tensor & per_sample_weights); + Tensor _embedding_bag_per_sample_weights_backward(const Tensor & grad, const Tensor & weight, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, int64_t mode); + Tensor empty(IntArrayRef size, const TensorOptions & options, c10::optional memory_format); + Tensor & resize_(Tensor & self, IntArrayRef size, c10::optional memory_format); + Tensor empty_strided(IntArrayRef size, IntArrayRef stride, const TensorOptions & options); + Tensor & erf_(Tensor & self); + Tensor & erf_out(Tensor & out, const Tensor & self); + Tensor & erfc_(Tensor & self); + Tensor & erfc_out(Tensor & out, const Tensor & self); + Tensor & exp_(Tensor & self); + Tensor & exp_out(Tensor & out, const Tensor & self); + Tensor & expm1_out(Tensor & out, const Tensor & self); + Tensor & eye_out(Tensor & out, int64_t n); + Tensor & eye_out(Tensor & out, int64_t n, int64_t m); + Tensor & floor_out(Tensor & out, const Tensor & self); + Tensor grid_sampler_2d(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); + std::tuple grid_sampler_2d_backward(const Tensor & grad_output, const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); + Tensor grid_sampler_3d(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); + std::tuple grid_sampler_3d_backward(const Tensor & grad_output, const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); + Tensor ger(const Tensor & self, const Tensor & vec2); + Tensor & ger_out(Tensor & out, const Tensor & self, const Tensor & vec2); + Tensor _fft_with_size(const Tensor & self, int64_t signal_ndim, bool complex_input, bool complex_output, bool inverse, IntArrayRef checked_signal_sizes, bool normalized, bool onesided, IntArrayRef output_sizes); + Tensor _inverse_helper(const Tensor & self); + Tensor kl_div_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + std::tuple kthvalue_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, int64_t dim, bool keepdim); + std::tuple native_layer_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, int64_t M, int64_t N, double eps); + std::tuple native_layer_norm_backward(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & rstd, const Tensor & weight, int64_t M, int64_t N, std::array output_mask); + Tensor & linspace_out(Tensor & out, Scalar start, Scalar end, int64_t steps); + Tensor & log_out(Tensor & out, const Tensor & self); + Tensor & log10_out(Tensor & out, const Tensor & self); + Tensor & log1p_(Tensor & self); + Tensor & log1p_out(Tensor & out, const Tensor & self); + Tensor & log2_out(Tensor & out, const Tensor & self); + Tensor & logspace_out(Tensor & out, Scalar start, Scalar end, int64_t steps, double base); + Tensor _log_softmax(const Tensor & self, int64_t dim, bool half_to_float); + Tensor _log_softmax_backward_data(const Tensor & grad_output, const Tensor & output, int64_t dim, const Tensor & self); + Tensor mean(const Tensor & self, c10::optional dtype); + Tensor mean(const Tensor & self, IntArrayRef dim, bool keepdim, c10::optional dtype); + Tensor & mean_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim, c10::optional dtype); + std::tuple miopen_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double exponential_average_factor, double epsilon); + std::tuple miopen_batch_norm_backward(const Tensor & input, const Tensor & grad_output, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_var, double epsilon); + Tensor miopen_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + Tensor miopen_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + std::tuple miopen_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); + Tensor miopen_convolution_backward_bias(const Tensor & grad_output); + Tensor miopen_convolution_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + Tensor miopen_convolution_transpose(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + std::tuple miopen_convolution_transpose_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); + Tensor miopen_convolution_transpose_backward_input(const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + Tensor miopen_convolution_transpose_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + Tensor miopen_depthwise_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + Tensor miopen_depthwise_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + std::tuple miopen_depthwise_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); + Tensor miopen_depthwise_convolution_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); + std::tuple miopen_rnn(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & hx, const Tensor & cx, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state); + std::tuple> miopen_rnn_backward(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & weight_buf, const Tensor & hx, const Tensor & cx, const Tensor & output, const Tensor & grad_output, const Tensor & grad_hy, const Tensor & grad_cy, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state, const Tensor & reserve, std::array output_mask); + Tensor mm(const Tensor & self, const Tensor & mat2); + Tensor & mm_out(Tensor & out, const Tensor & self, const Tensor & mat2); + Tensor mul(const Tensor & self, const Tensor & other); + Tensor & mul_(Tensor & self, const Tensor & other); + Tensor & mul_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor mv(const Tensor & self, const Tensor & vec); + Tensor & mv_out(Tensor & out, const Tensor & self, const Tensor & vec); + Tensor narrow_copy(const Tensor & self, int64_t dim, int64_t start, int64_t length); + std::tuple native_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps); + std::tuple batch_norm_stats(const Tensor & input, double eps); + Tensor batch_norm_elemt(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & mean, const Tensor & invstd, double eps); + Tensor & batch_norm_elemt_out(Tensor & out, const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & mean, const Tensor & invstd, double eps); + std::tuple batch_norm_gather_stats(const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & running_mean, const Tensor & running_var, double momentum, double eps, int64_t count); + std::tuple batch_norm_gather_stats_with_counts(const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & running_mean, const Tensor & running_var, double momentum, double eps, IntArrayRef counts); + std::tuple native_batch_norm_backward(const Tensor & grad_out, const Tensor & input, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_invstd, bool train, double eps, std::array output_mask); + std::tuple batch_norm_backward_reduce(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & weight, bool input_g, bool weight_g, bool bias_g); + Tensor batch_norm_backward_elemt(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & weight, const Tensor & mean_dy, const Tensor & mean_dy_xmu); + std::tuple batch_norm_update_stats(const Tensor & input, const Tensor & running_mean, const Tensor & running_var, double momentum); + Tensor & randperm_out(Tensor & out, int64_t n, Generator * generator); + Tensor & range_out(Tensor & out, Scalar start, Scalar end, Scalar step); + Tensor & reciprocal_(Tensor & self); + Tensor & reciprocal_out(Tensor & out, const Tensor & self); + Tensor & neg_out(Tensor & out, const Tensor & self); + Tensor repeat_interleave(const Tensor & repeats); + Tensor & round_out(Tensor & out, const Tensor & self); + Tensor relu(const Tensor & self); + Tensor & relu_(Tensor & self); + Tensor prelu(const Tensor & self, const Tensor & weight); + std::tuple prelu_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight); + Tensor gelu(const Tensor & self); + Tensor gelu_backward(const Tensor & grad, const Tensor & self); + Tensor hardshrink(const Tensor & self, Scalar lambd); + Tensor hardshrink_backward(const Tensor & grad_out, const Tensor & self, Scalar lambd); + Tensor & rsqrt_out(Tensor & out, const Tensor & self); + Tensor sigmoid(const Tensor & self); + Tensor & sigmoid_(Tensor & self); + Tensor & sin_out(Tensor & out, const Tensor & self); + Tensor _softmax(const Tensor & self, int64_t dim, bool half_to_float); + Tensor _softmax_backward_data(const Tensor & grad_output, const Tensor & output, int64_t dim, const Tensor & self); + Tensor & sspaddmm_out(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor & tan_(Tensor & self); + Tensor & tan_out(Tensor & out, const Tensor & self); + Tensor & tanh_(Tensor & self); + Tensor & tanh_out(Tensor & out, const Tensor & self); + Tensor flip(const Tensor & self, IntArrayRef dims); + Tensor roll(const Tensor & self, IntArrayRef shifts, IntArrayRef dims); + Tensor & trunc_out(Tensor & out, const Tensor & self); + std::tuple _unique(const Tensor & self, bool sorted, bool return_inverse); + std::tuple unique_dim(const Tensor & self, int64_t dim, bool sorted, bool return_inverse, bool return_counts); + std::tuple unique_consecutive(const Tensor & self, bool return_inverse, bool return_counts, c10::optional dim); + std::tuple unique_dim_consecutive(const Tensor & self, int64_t dim, bool return_inverse, bool return_counts); + std::tuple _unique2(const Tensor & self, bool sorted, bool return_inverse, bool return_counts); + Tensor _s_where(const Tensor & condition, const Tensor & self, const Tensor & other); + std::tuple _weight_norm_cuda_interface(const Tensor & v, const Tensor & g, int64_t dim); + std::tuple _weight_norm_cuda_interface_backward(const Tensor & grad_w, const Tensor & saved_v, const Tensor & saved_g, const Tensor & saved_norms, int64_t dim); + Tensor _standard_gamma_grad(const Tensor & self, const Tensor & output); + Tensor _standard_gamma(const Tensor & self, Generator * generator); + Tensor _dirichlet_grad(const Tensor & x, const Tensor & alpha, const Tensor & total); + Tensor _sample_dirichlet(const Tensor & self, Generator * generator); + Tensor poisson(const Tensor & self, Generator * generator); + Tensor clone(const Tensor & self, c10::optional memory_format); + Tensor & pow_out(Tensor & out, const Tensor & self, Scalar exponent); + Tensor pow(const Tensor & self, Scalar exponent); + Tensor & zero_(Tensor & self); + Tensor & sub_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha); + Tensor sub(const Tensor & self, const Tensor & other, Scalar alpha); + Tensor & sub_(Tensor & self, const Tensor & other, Scalar alpha); + Tensor & addmm_out(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor addmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor & addmm_(Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor to_sparse(const Tensor & self, int64_t sparse_dim); + Tensor to_sparse(const Tensor & self); + Tensor fake_quantize_per_tensor_affine(const Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max); + Tensor fake_quantize_per_tensor_affine_backward(const Tensor & grad, const Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max); + Tensor fake_quantize_per_channel_affine(const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max); + Tensor fake_quantize_per_channel_affine_backward(const Tensor & grad, const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max); + Scalar _local_scalar_dense(const Tensor & self); + std::tuple _thnn_fused_lstm_cell(const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & cx, const Tensor & input_bias, const Tensor & hidden_bias); + std::tuple _thnn_fused_lstm_cell_backward(const Tensor & grad_hy, const Tensor & grad_cy, const Tensor & cx, const Tensor & cy, const Tensor & workspace, bool has_bias); + std::tuple _thnn_fused_gru_cell(const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & hx, const Tensor & input_bias, const Tensor & hidden_bias); + std::tuple _thnn_fused_gru_cell_backward(const Tensor & grad_hy, const Tensor & workspace, bool has_bias); + Tensor & set_(Tensor & self, Storage source); + Tensor & set_(Tensor & self, Storage source, int64_t storage_offset, IntArrayRef size, IntArrayRef stride); + Tensor & set_(Tensor & self, const Tensor & source); + Tensor & set_(Tensor & self); + bool is_set_to(const Tensor & self, const Tensor & tensor); + Tensor & masked_fill_(Tensor & self, const Tensor & mask, Scalar value); + Tensor & masked_fill_(Tensor & self, const Tensor & mask, const Tensor & value); + Tensor & masked_scatter_(Tensor & self, const Tensor & mask, const Tensor & source); + Tensor view(const Tensor & self, IntArrayRef size); + Tensor & put_(Tensor & self, const Tensor & index, const Tensor & source, bool accumulate); + Tensor & index_add_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); + Tensor & index_fill_(Tensor & self, int64_t dim, const Tensor & index, Scalar value); + Tensor & index_fill_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & value); + Tensor & scatter_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); + Tensor & scatter_(Tensor & self, int64_t dim, const Tensor & index, Scalar value); + Tensor & scatter_add_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); + Tensor __and__(const Tensor & self, Scalar other); + Tensor __and__(const Tensor & self, const Tensor & other); + Tensor & __iand__(Tensor & self, Scalar other); + Tensor & __iand__(Tensor & self, const Tensor & other); + Tensor __or__(const Tensor & self, Scalar other); + Tensor __or__(const Tensor & self, const Tensor & other); + Tensor & __ior__(Tensor & self, Scalar other); + Tensor & __ior__(Tensor & self, const Tensor & other); + Tensor & bitwise_xor_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor & bitwise_xor_out(Tensor & out, const Tensor & self, Scalar other); + Tensor __lshift__(const Tensor & self, Scalar other); + Tensor __lshift__(const Tensor & self, const Tensor & other); + Tensor & __ilshift__(Tensor & self, Scalar other); + Tensor & __ilshift__(Tensor & self, const Tensor & other); + Tensor __rshift__(const Tensor & self, Scalar other); + Tensor __rshift__(const Tensor & self, const Tensor & other); + Tensor & __irshift__(Tensor & self, Scalar other); + Tensor & __irshift__(Tensor & self, const Tensor & other); + Tensor & lgamma_(Tensor & self); + Tensor & tril_(Tensor & self, int64_t diagonal); + Tensor & triu_(Tensor & self, int64_t diagonal); + Tensor & renorm_(Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); + Tensor & pow_(Tensor & self, Scalar exponent); + Tensor & pow_(Tensor & self, const Tensor & exponent); + Tensor & lerp_(Tensor & self, const Tensor & end, Scalar weight); + Tensor & lerp_(Tensor & self, const Tensor & end, const Tensor & weight); + Tensor & fmod_(Tensor & self, Scalar other); + Tensor & fmod_(Tensor & self, const Tensor & other); + Tensor & remainder_(Tensor & self, Scalar other); + Tensor & remainder_(Tensor & self, const Tensor & other); + Tensor & addbmm_(Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor & addbmm_out(Tensor & out, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor addbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor & random_(Tensor & self, int64_t from, int64_t to, Generator * generator); + Tensor & random_(Tensor & self, int64_t to, Generator * generator); + Tensor & random_(Tensor & self, Generator * generator); + Tensor & uniform_(Tensor & self, double from, double to, Generator * generator); + Tensor & normal_(Tensor & self, double mean, double std, Generator * generator); + Tensor & cauchy_(Tensor & self, double median, double sigma, Generator * generator); + Tensor & log_normal_(Tensor & self, double mean, double std, Generator * generator); + Tensor & exponential_(Tensor & self, double lambd, Generator * generator); + Tensor & geometric_(Tensor & self, double p, Generator * generator); + Tensor & diag_out(Tensor & out, const Tensor & self, int64_t diagonal); + Tensor diag(const Tensor & self, int64_t diagonal); + Tensor & triu_out(Tensor & out, const Tensor & self, int64_t diagonal); + Tensor & tril_out(Tensor & out, const Tensor & self, int64_t diagonal); + Tensor tril_indices(int64_t row, int64_t col, int64_t offset, const TensorOptions & options); + Tensor triu_indices(int64_t row, int64_t col, int64_t offset, const TensorOptions & options); + Tensor trace(const Tensor & self); + Tensor & ne_out(Tensor & out, const Tensor & self, Scalar other); + Tensor ne(const Tensor & self, Scalar other); + Tensor & ne_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor ne(const Tensor & self, const Tensor & other); + Tensor & eq_out(Tensor & out, const Tensor & self, Scalar other); + Tensor eq(const Tensor & self, Scalar other); + Tensor & eq_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor eq(const Tensor & self, const Tensor & other); + Tensor & ge_out(Tensor & out, const Tensor & self, Scalar other); + Tensor ge(const Tensor & self, Scalar other); + Tensor & ge_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor ge(const Tensor & self, const Tensor & other); + Tensor & le_out(Tensor & out, const Tensor & self, Scalar other); + Tensor le(const Tensor & self, Scalar other); + Tensor & le_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor le(const Tensor & self, const Tensor & other); + Tensor & gt_out(Tensor & out, const Tensor & self, Scalar other); + Tensor gt(const Tensor & self, Scalar other); + Tensor & gt_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor gt(const Tensor & self, const Tensor & other); + Tensor & lt_out(Tensor & out, const Tensor & self, Scalar other); + Tensor lt(const Tensor & self, Scalar other); + Tensor & lt_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor lt(const Tensor & self, const Tensor & other); + Tensor & take_out(Tensor & out, const Tensor & self, const Tensor & index); + Tensor take(const Tensor & self, const Tensor & index); + Tensor & index_select_out(Tensor & out, const Tensor & self, int64_t dim, const Tensor & index); + Tensor index_select(const Tensor & self, int64_t dim, const Tensor & index); + Tensor & masked_select_out(Tensor & out, const Tensor & self, const Tensor & mask); + Tensor masked_select(const Tensor & self, const Tensor & mask); + Tensor & nonzero_out(Tensor & out, const Tensor & self); + Tensor nonzero(const Tensor & self); + Tensor & gather_out(Tensor & out, const Tensor & self, int64_t dim, const Tensor & index, bool sparse_grad); + Tensor gather(const Tensor & self, int64_t dim, const Tensor & index, bool sparse_grad); + std::tuple lstsq_out(Tensor & X, Tensor & qr, const Tensor & self, const Tensor & A); + std::tuple lstsq(const Tensor & self, const Tensor & A); + std::tuple _triangular_solve_helper(const Tensor & self, const Tensor & A, bool upper, bool transpose, bool unitriangular); + std::tuple _symeig_helper(const Tensor & self, bool eigenvectors, bool upper); + std::tuple eig_out(Tensor & e, Tensor & v, const Tensor & self, bool eigenvectors); + std::tuple eig(const Tensor & self, bool eigenvectors); + std::tuple _svd_helper(const Tensor & self, bool some, bool compute_uv); + Tensor _cholesky_helper(const Tensor & self, bool upper); + Tensor _cholesky_solve_helper(const Tensor & self, const Tensor & A, bool upper); + std::tuple _solve_helper(const Tensor & self, const Tensor & A); + Tensor & cholesky_inverse_out(Tensor & out, const Tensor & self, bool upper); + Tensor cholesky_inverse(const Tensor & self, bool upper); + std::tuple _qr_helper(const Tensor & self, bool some); + std::tuple geqrf_out(Tensor & a, Tensor & tau, const Tensor & self); + std::tuple geqrf(const Tensor & self); + std::tuple _lu_with_info(const Tensor & self, bool pivot, bool check_errors); + Tensor _lu_solve_helper(const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots); + Tensor & multinomial_out(Tensor & out, const Tensor & self, int64_t num_samples, bool replacement, Generator * generator); + Tensor multinomial(const Tensor & self, int64_t num_samples, bool replacement, Generator * generator); + std::tuple _multinomial_alias_setup(const Tensor & probs); + Tensor _multinomial_alias_draw(const Tensor & J, const Tensor & q, int64_t num_samples, Generator * generator); + Tensor & lgamma_out(Tensor & out, const Tensor & self); + Tensor lgamma(const Tensor & self); + Tensor erfinv(const Tensor & self); + Tensor & erfinv_(Tensor & self); + Tensor & erfinv_out(Tensor & out, const Tensor & self); + Tensor & sign_out(Tensor & out, const Tensor & self); + Tensor dist(const Tensor & self, const Tensor & other, Scalar p); + Tensor & lerp_out(Tensor & out, const Tensor & self, const Tensor & end, Scalar weight); + Tensor & lerp_out(Tensor & out, const Tensor & self, const Tensor & end, const Tensor & weight); + Tensor lerp(const Tensor & self, const Tensor & end, Scalar weight); + Tensor lerp(const Tensor & self, const Tensor & end, const Tensor & weight); + Tensor & histc_out(Tensor & out, const Tensor & self, int64_t bins, Scalar min, Scalar max); + Tensor histc(const Tensor & self, int64_t bins, Scalar min, Scalar max); + Tensor & fmod_out(Tensor & out, const Tensor & self, Scalar other); + Tensor fmod(const Tensor & self, Scalar other); + Tensor & fmod_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor fmod(const Tensor & self, const Tensor & other); + Tensor & remainder_out(Tensor & out, const Tensor & self, Scalar other); + Tensor remainder(const Tensor & self, Scalar other); + Tensor & remainder_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor remainder(const Tensor & self, const Tensor & other); + Tensor & min_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor min(const Tensor & self, const Tensor & other); + Tensor min(const Tensor & self); + Tensor & max_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor max(const Tensor & self, const Tensor & other); + Tensor max(const Tensor & self); + Tensor median(const Tensor & self); + std::tuple sort_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool descending); + std::tuple sort(const Tensor & self, int64_t dim, bool descending); + std::tuple topk_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, int64_t dim, bool largest, bool sorted); + std::tuple topk(const Tensor & self, int64_t k, int64_t dim, bool largest, bool sorted); + Tensor & renorm_out(Tensor & out, const Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); + Tensor renorm(const Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); + Tensor unfold(const Tensor & self, int64_t dimension, int64_t size, int64_t step); + bool equal(const Tensor & self, const Tensor & other); + Tensor & pow_out(Tensor & out, const Tensor & self, const Tensor & exponent); + Tensor pow(const Tensor & self, const Tensor & exponent); + Tensor & pow_out(Tensor & out, Scalar self, const Tensor & exponent); + Tensor pow(Scalar self, const Tensor & exponent); + Tensor & normal_out(Tensor & out, const Tensor & mean, double std, Generator * generator); + Tensor normal(const Tensor & mean, double std, Generator * generator); + Tensor & normal_out(Tensor & out, double mean, const Tensor & std, Generator * generator); + Tensor normal(double mean, const Tensor & std, Generator * generator); + Tensor & normal_out(Tensor & out, const Tensor & mean, const Tensor & std, Generator * generator); + Tensor normal(const Tensor & mean, const Tensor & std, Generator * generator); + Tensor _addr(const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); + Tensor & _addr_(Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); + Tensor & _addr_out(Tensor & out, const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); + Tensor & _index_copy_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); + Tensor _cumsum(const Tensor & self, int64_t dim); + Tensor & _cumsum_out(Tensor & out, const Tensor & self, int64_t dim); + Tensor _cumprod(const Tensor & self, int64_t dim); + Tensor & _cumprod_out(Tensor & out, const Tensor & self, int64_t dim); + Tensor _var(const Tensor & self, bool unbiased); + Tensor _std(const Tensor & self, bool unbiased); + Tensor _cat(TensorList tensors, int64_t dim); + Tensor & _cat_out(Tensor & out, TensorList tensors, int64_t dim); + std::tuple _mode(const Tensor & self, int64_t dim, bool keepdim); + std::tuple _mode_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool keepdim); + std::tuple _max(const Tensor & self, int64_t dim, bool keepdim); + std::tuple _max_out(Tensor & max, Tensor & max_indices, const Tensor & self, int64_t dim, bool keepdim); + std::tuple _min(const Tensor & self, int64_t dim, bool keepdim); + std::tuple _min_out(Tensor & min, Tensor & min_indices, const Tensor & self, int64_t dim, bool keepdim); + Tensor & binary_cross_entropy_out(Tensor & out, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); + Tensor binary_cross_entropy(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); + Tensor & binary_cross_entropy_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); + Tensor binary_cross_entropy_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); + Tensor & mse_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor mse_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & l1_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & multi_margin_loss_out(Tensor & out, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction); + Tensor multi_margin_loss(const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction); + Tensor & multi_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction); + Tensor multi_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction); + std::tuple multilabel_margin_loss_forward_out(Tensor & output, Tensor & is_target, const Tensor & self, const Tensor & target, int64_t reduction); + std::tuple multilabel_margin_loss_forward(const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & multilabel_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction, const Tensor & is_target); + Tensor multilabel_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction, const Tensor & is_target); + std::tuple nll_loss_forward_out(Tensor & output, Tensor & total_weight, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); + std::tuple nll_loss_forward(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); + Tensor & nll_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); + Tensor nll_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); + std::tuple nll_loss2d_forward_out(Tensor & output, Tensor & total_weight, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); + std::tuple nll_loss2d_forward(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); + Tensor & nll_loss2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); + Tensor nll_loss2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); + Tensor & smooth_l1_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & smooth_l1_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & soft_margin_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor soft_margin_loss(const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & soft_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor soft_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & elu_out(Tensor & out, const Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale); + Tensor elu(const Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale); + Tensor & elu_backward_out(Tensor & grad_input, const Tensor & grad_output, Scalar alpha, Scalar scale, Scalar input_scale, const Tensor & output); + Tensor elu_backward(const Tensor & grad_output, Scalar alpha, Scalar scale, Scalar input_scale, const Tensor & output); + Tensor & elu_(Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale); + Tensor & glu_out(Tensor & out, const Tensor & self, int64_t dim); + Tensor glu(const Tensor & self, int64_t dim); + Tensor & glu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, int64_t dim); + Tensor glu_backward(const Tensor & grad_output, const Tensor & self, int64_t dim); + Tensor & hardtanh_out(Tensor & out, const Tensor & self, Scalar min_val, Scalar max_val); + Tensor hardtanh(const Tensor & self, Scalar min_val, Scalar max_val); + Tensor & hardtanh_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar min_val, Scalar max_val); + Tensor hardtanh_backward(const Tensor & grad_output, const Tensor & self, Scalar min_val, Scalar max_val); + Tensor & hardtanh_(Tensor & self, Scalar min_val, Scalar max_val); + Tensor & leaky_relu_out(Tensor & out, const Tensor & self, Scalar negative_slope); + Tensor leaky_relu(const Tensor & self, Scalar negative_slope); + Tensor & leaky_relu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar negative_slope); + Tensor leaky_relu_backward(const Tensor & grad_output, const Tensor & self, Scalar negative_slope); + Tensor & leaky_relu_(Tensor & self, Scalar negative_slope); + std::tuple log_sigmoid_forward_out(Tensor & output, Tensor & buffer, const Tensor & self); + std::tuple log_sigmoid_forward(const Tensor & self); + Tensor & log_sigmoid_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & buffer); + Tensor log_sigmoid_backward(const Tensor & grad_output, const Tensor & self, const Tensor & buffer); + Tensor & rrelu_with_noise_out(Tensor & out, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator); + Tensor rrelu_with_noise(const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator); + Tensor & rrelu_with_noise_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training); + Tensor rrelu_with_noise_backward(const Tensor & grad_output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training); + Tensor & rrelu_with_noise_(Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator); + Tensor & softplus_out(Tensor & out, const Tensor & self, Scalar beta, Scalar threshold); + Tensor softplus(const Tensor & self, Scalar beta, Scalar threshold); + Tensor & softplus_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar beta, Scalar threshold, const Tensor & output); + Tensor softplus_backward(const Tensor & grad_output, const Tensor & self, Scalar beta, Scalar threshold, const Tensor & output); + Tensor & softshrink_out(Tensor & out, const Tensor & self, Scalar lambd); + Tensor softshrink(const Tensor & self, Scalar lambd); + Tensor & softshrink_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar lambd); + Tensor softshrink_backward(const Tensor & grad_output, const Tensor & self, Scalar lambd); + Tensor & adaptive_avg_pool2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); + Tensor _adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size); + Tensor _adaptive_avg_pool2d_backward(const Tensor & grad_output, const Tensor & self); + Tensor & adaptive_avg_pool3d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); + Tensor adaptive_avg_pool3d(const Tensor & self, IntArrayRef output_size); + Tensor & adaptive_avg_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self); + Tensor adaptive_avg_pool3d_backward(const Tensor & grad_output, const Tensor & self); + std::tuple adaptive_max_pool2d_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef output_size); + std::tuple adaptive_max_pool2d(const Tensor & self, IntArrayRef output_size); + Tensor & adaptive_max_pool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices); + Tensor adaptive_max_pool2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices); + std::tuple adaptive_max_pool3d_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef output_size); + std::tuple adaptive_max_pool3d(const Tensor & self, IntArrayRef output_size); + Tensor & adaptive_max_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices); + Tensor adaptive_max_pool3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices); + Tensor & avg_pool2d_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor avg_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor & avg_pool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor avg_pool2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor & avg_pool3d_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor avg_pool3d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor & avg_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor avg_pool3d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + std::tuple fractional_max_pool2d_out(Tensor & output, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); + std::tuple fractional_max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); + Tensor & fractional_max_pool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); + Tensor fractional_max_pool2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); + std::tuple fractional_max_pool3d_out(Tensor & output, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); + std::tuple fractional_max_pool3d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); + Tensor & fractional_max_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); + Tensor fractional_max_pool3d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); + std::tuple max_pool2d_with_indices_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + std::tuple max_pool2d_with_indices(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + Tensor & max_pool2d_with_indices_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); + Tensor max_pool2d_with_indices_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); + std::tuple max_pool3d_with_indices_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + std::tuple max_pool3d_with_indices(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + Tensor & max_pool3d_with_indices_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); + Tensor max_pool3d_with_indices_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); + Tensor & max_unpool2d_out(Tensor & out, const Tensor & self, const Tensor & indices, IntArrayRef output_size); + Tensor max_unpool2d(const Tensor & self, const Tensor & indices, IntArrayRef output_size); + Tensor & max_unpool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size); + Tensor max_unpool2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size); + Tensor & max_unpool3d_out(Tensor & out, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); + Tensor max_unpool3d(const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); + Tensor & max_unpool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); + Tensor max_unpool3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); + Tensor & reflection_pad1d_out(Tensor & out, const Tensor & self, IntArrayRef padding); + Tensor reflection_pad1d(const Tensor & self, IntArrayRef padding); + Tensor & reflection_pad1d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor reflection_pad1d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor & reflection_pad2d_out(Tensor & out, const Tensor & self, IntArrayRef padding); + Tensor reflection_pad2d(const Tensor & self, IntArrayRef padding); + Tensor & reflection_pad2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor reflection_pad2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor & replication_pad1d_out(Tensor & out, const Tensor & self, IntArrayRef padding); + Tensor replication_pad1d(const Tensor & self, IntArrayRef padding); + Tensor & replication_pad1d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor replication_pad1d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor & replication_pad2d_out(Tensor & out, const Tensor & self, IntArrayRef padding); + Tensor replication_pad2d(const Tensor & self, IntArrayRef padding); + Tensor & replication_pad2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor replication_pad2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor & replication_pad3d_out(Tensor & out, const Tensor & self, IntArrayRef padding); + Tensor replication_pad3d(const Tensor & self, IntArrayRef padding); + Tensor & replication_pad3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor replication_pad3d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); + Tensor & upsample_linear1d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor upsample_linear1d(const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor & upsample_linear1d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor upsample_linear1d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor & upsample_bilinear2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor upsample_bilinear2d(const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor & upsample_bilinear2d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor upsample_bilinear2d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor & upsample_bicubic2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor upsample_bicubic2d(const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor & upsample_bicubic2d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor upsample_bicubic2d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor & upsample_trilinear3d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor upsample_trilinear3d(const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor & upsample_trilinear3d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor upsample_trilinear3d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); + Tensor & upsample_nearest1d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); + Tensor upsample_nearest1d(const Tensor & self, IntArrayRef output_size); + Tensor & upsample_nearest1d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); + Tensor upsample_nearest1d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); + Tensor & upsample_nearest2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); + Tensor upsample_nearest2d(const Tensor & self, IntArrayRef output_size); + Tensor & upsample_nearest2d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); + Tensor upsample_nearest2d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); + Tensor & upsample_nearest3d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); + Tensor upsample_nearest3d(const Tensor & self, IntArrayRef output_size); + Tensor & upsample_nearest3d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); + Tensor upsample_nearest3d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); + Tensor & sigmoid_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & output); + Tensor & tanh_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & output); + Tensor tanh_backward(const Tensor & grad_output, const Tensor & output); + Tensor & slow_conv_transpose2d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation); + Tensor slow_conv_transpose2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation); + std::tuple slow_conv_transpose2d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & columns, const Tensor & ones); + std::tuple slow_conv_transpose2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & columns, const Tensor & ones, std::array output_mask); + Tensor & slow_conv_transpose3d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation); + Tensor slow_conv_transpose3d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation); + std::tuple slow_conv_transpose3d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & finput, const Tensor & fgrad_input); + std::tuple slow_conv_transpose3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask); + std::tuple thnn_conv2d_forward_out(Tensor & output, Tensor & finput, Tensor & fgrad_input, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); + std::tuple thnn_conv2d_forward(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); + std::tuple thnn_conv2d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input); + std::tuple thnn_conv2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask); + Tensor & thnn_conv_depthwise2d_forward_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); + Tensor thnn_conv_depthwise2d_forward(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); + std::tuple thnn_conv_depthwise2d_backward_out(Tensor & grad_input, Tensor & grad_weight, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); + std::tuple thnn_conv_depthwise2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); + Tensor slow_conv_dilated2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); + std::tuple slow_conv_dilated2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); + Tensor slow_conv_dilated3d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); + std::tuple slow_conv_dilated3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); + Tensor & col2im_out(Tensor & out, const Tensor & self, IntArrayRef output_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor col2im(const Tensor & self, IntArrayRef output_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor & col2im_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor col2im_backward(const Tensor & grad_output, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor & im2col_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor im2col(const Tensor & self, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor & im2col_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef input_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); + Tensor im2col_backward(const Tensor & grad_output, IntArrayRef input_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +} +#endif + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/Config.h b/thirdparty/libtorch/include/ATen/Config.h new file mode 100644 index 0000000000..2dbea9470f --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Config.h @@ -0,0 +1,12 @@ +#pragma once + +// Test these using #if AT_MKL_ENABLED(), not #ifdef, so that it's +// obvious if you forgot to include Config.h +// c.f. https://stackoverflow.com/questions/33759787/generating-an-error-if-checked-boolean-macro-is-not-defined +// +// DO NOT put the macros for CUDA libraries in this file; they belong in cuda/CUDAConfig.h + +#define AT_MKLDNN_ENABLED() 1 +#define AT_MKL_ENABLED() 1 +#define AT_NNPACK_ENABLED() 1 +#define CAFFE2_STATIC_LINK_CUDA() 0 diff --git a/thirdparty/libtorch/include/ATen/Context.h b/thirdparty/libtorch/include/ATen/Context.h new file mode 100644 index 0000000000..6dc9c10df1 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Context.h @@ -0,0 +1,237 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace at { + +class Tensor; + +class CAFFE2_API Context { + public: + Context(); + + Generator & defaultGenerator(Device device) { + DeviceType device_type = device.type(); + initCUDAIfNeeded(device_type); + initHIPIfNeeded(device_type); + if (device_type == at::kCPU) { + return *at::detail::getDefaultCPUGenerator(); + } else if (device_type == at::kCUDA) { + return *at::detail::getCUDAHooks().getDefaultCUDAGenerator(device.index()); + } else { + AT_ERROR(DeviceTypeName(device_type), " device type not enabled."); + } + } + Device getDeviceFromPtr(void* data, DeviceType device_type) { + initCUDAIfNeeded(device_type); + initHIPIfNeeded(device_type); + if (device_type == at::kCPU) { + return DeviceType::CPU; + } else if (device_type == at::kCUDA) { + return at::detail::getCUDAHooks().getDeviceFromPtr(data); + } else { + AT_ERROR(DeviceTypeName(device_type), " device type not enabled."); + } + } + bool isPinnedPtr(void* data) { + return detail::getCUDAHooks().isPinnedPtr(data); + } + bool hasOpenMP() const; + bool hasMKL() const; + bool hasLAPACK() const; + bool hasMKLDNN() const; + bool hasMAGMA() const { + return detail::getCUDAHooks().hasMAGMA(); + } + bool hasCUDA() const { + return detail::getCUDAHooks().hasCUDA(); + } + bool hasHIP() const { + return detail::getHIPHooks().hasHIP(); + } + bool hasXLA() const { + return c10::impl::hasDeviceGuardImpl(at::DeviceType::XLA); + } + // defined in header so that getNonVariableType has ability to inline + // call_once check. getNonVariableType is called fairly frequently + THCState* lazyInitCUDA() { + std::call_once(thc_init,[&] { + thc_state = detail::getCUDAHooks().initCUDA(); + }); + return thc_state.get(); + } + THHState* lazyInitHIP() { + std::call_once(thh_init,[&] { + thh_state = detail::getHIPHooks().initHIP(); + }); + return thh_state.get(); + } + const at::cuda::NVRTC& getNVRTC() { + return detail::getCUDAHooks().nvrtc(); + } + THCState* getTHCState() { + // AT_ASSERT(thc_state); + return thc_state.get(); + } + THHState* getTHHState() { + return thh_state.get(); + } + + bool setFlushDenormal(bool on); + + // NB: This method is *purely* whether or not a user requested + // that CuDNN was enabled, it doesn't actually say anything about + // whether or not CuDNN is actually usable. Use cudnn_is_acceptable + // to test this instead + bool userEnabledCuDNN() const; + void setUserEnabledCuDNN(bool e); + bool userEnabledMkldnn() const; + void setUserEnabledMkldnn(bool e); + bool benchmarkCuDNN() const; + void setBenchmarkCuDNN(bool); + bool deterministicCuDNN() const; + void setDeterministicCuDNN(bool); + at::QEngine qEngine() const; + void setQEngine(at::QEngine e); + const std::vector& supportedQEngines() const; + + private: + void initCUDAIfNeeded(DeviceType p) { + if (p == DeviceType::CUDA) { + lazyInitCUDA(); + } + } + void initHIPIfNeeded(DeviceType p) { + if (p == DeviceType::HIP) { + lazyInitHIP(); + } + } + std::once_flag thc_init; + std::once_flag thh_init; + bool enabled_cudnn = true; + bool deterministic_cudnn = false; + bool benchmark_cudnn = false; + bool enabled_mkldnn = true; + c10::optional quantized_engine = c10::nullopt; + std::unique_ptr thc_state; + std::unique_ptr thh_state; +}; + +CAFFE2_API Context& globalContext(); + +static inline void init() { + globalContext(); +} + +CAFFE2_API Allocator* getCPUAllocator(); + +static inline DeprecatedTypeProperties& getDeprecatedTypeProperties(Backend p, ScalarType s) { + return globalDeprecatedTypePropertiesRegistry().getDeprecatedTypeProperties( + p, s); +} + +static inline DeprecatedTypeProperties& CPU(ScalarType s) { + return globalDeprecatedTypePropertiesRegistry().getDeprecatedTypeProperties( + Backend::CPU, s); +} + +static inline DeprecatedTypeProperties& CUDA(ScalarType s) { + return globalDeprecatedTypePropertiesRegistry().getDeprecatedTypeProperties( + Backend::CUDA, s); +} + +static inline DeprecatedTypeProperties& HIP(ScalarType s) { + return globalDeprecatedTypePropertiesRegistry().getDeprecatedTypeProperties( + Backend::HIP, s); +} + +static inline bool hasCUDA() { + return globalContext().hasCUDA(); +} + +static inline bool hasHIP() { + return globalContext().hasHIP(); +} + +static inline bool hasXLA() { + return globalContext().hasXLA(); +} + +// Despite its name, this function returns the number of *CUDA* GPUs. +static inline size_t getNumGPUs() { + // WARNING: DO NOT ADD LOGIC TO HANDLE OTHER DEVICE TYPES TO THIS + // FUNCTION. If you are interested in interrogating the number of + // devices for a specific device type, add that function to the + // relevant library (e.g., similar to at::cuda::device_count()) + if (hasCUDA() && hasHIP()) { + throw std::runtime_error( + "Enabling both CUDA and HIP in ATen is not supported, as HIP masquerades " + "to be CUDA (e.g., when you say CUDA, on a HIP build of ATen, this actually " + "means HIP. Rebuild PyTorch with one or the other disabled."); + } else if (hasCUDA()) { + return detail::getCUDAHooks().getNumGPUs(); + } else if (hasHIP()) { + return detail::getHIPHooks().getNumGPUs(); + } else { + return 0; + } +} + +static inline bool hasOpenMP() { + return globalContext().hasOpenMP(); +} + +static inline bool hasMKL() { + return globalContext().hasMKL(); +} + +static inline bool hasLAPACK() { + return globalContext().hasLAPACK(); +} + +static inline bool hasMAGMA() { + return globalContext().hasMAGMA(); +} + +static inline bool hasMKLDNN() { + return globalContext().hasMKLDNN(); +} + +static inline void manual_seed(uint64_t seed) { + auto& gen = globalContext().defaultGenerator(DeviceType::CPU); + { + // See Note [Acquire lock when using random generators] + std::lock_guard lock(gen.mutex_); + gen.set_current_seed(seed); + } + // NB: Sometimes we build with CUDA, but we don't have any GPUs + // available. In that case, we must not seed CUDA; it will fail! + int num_gpus = detail::getCUDAHooks().getNumGPUs(); + if (hasCUDA() && num_gpus > 0) { + for (int i = 0; i < num_gpus; i++) { + auto& cuda_gen = globalContext().defaultGenerator(Device(at::kCUDA, i)); + { + // See Note [Acquire lock when using random generators] + std::lock_guard lock(cuda_gen.mutex_); + cuda_gen.set_current_seed(seed); + } + } + } +} + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/DLConvertor.h b/thirdparty/libtorch/include/ATen/DLConvertor.h new file mode 100644 index 0000000000..8458e6ec2d --- /dev/null +++ b/thirdparty/libtorch/include/ATen/DLConvertor.h @@ -0,0 +1,19 @@ +#pragma once + +#include +#include +#include + +// this convertor will: +// 1) take a Tensor object and wrap it in the DLPack tensor +// 2) take a dlpack tensor and convert it to the ATen Tensor + +namespace at { + +CAFFE2_API ScalarType toScalarType(const DLDataType& dtype); +CAFFE2_API DLManagedTensor* toDLPack(const Tensor& src); +CAFFE2_API Tensor fromDLPack(const DLManagedTensor* src); +CAFFE2_API DLDataType getDLDataType(const Tensor& t); +CAFFE2_API DLContext getDLContext(const Tensor& tensor, const int64_t& device_id); + +} //namespace at diff --git a/thirdparty/libtorch/include/ATen/Device.h b/thirdparty/libtorch/include/ATen/Device.h new file mode 100644 index 0000000000..6c51558036 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Device.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/DeviceGuard.h b/thirdparty/libtorch/include/ATen/DeviceGuard.h new file mode 100644 index 0000000000..2aba6a32ea --- /dev/null +++ b/thirdparty/libtorch/include/ATen/DeviceGuard.h @@ -0,0 +1,36 @@ +#pragma once + +#include +#include +#include // TensorList whyyyyy + +namespace at { + +// Are you here because you're wondering why DeviceGuard(tensor) no +// longer works? For code organization reasons, we have temporarily(?) +// removed this constructor from DeviceGuard. The new way to +// spell it is: +// +// OptionalDeviceGuard guard(device_of(tensor)); + +/// Return the Device of a Tensor, if the Tensor is defined. +inline optional device_of(const Tensor& t) { + if (t.defined()) { + return make_optional(t.device()); + } else { + return nullopt; + } +} + +/// Return the Device of a TensorList, if the list is non-empty and +/// the first Tensor is defined. (This function implicitly assumes +/// that all tensors in the list have the same device.) +inline optional device_of(TensorList t) { + if (!t.empty()) { + return device_of(t.front()); + } else { + return nullopt; + } +} + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/DimVector.h b/thirdparty/libtorch/include/ATen/DimVector.h new file mode 100644 index 0000000000..cb652fffcb --- /dev/null +++ b/thirdparty/libtorch/include/ATen/DimVector.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/Dimname.h b/thirdparty/libtorch/include/ATen/Dimname.h new file mode 100644 index 0000000000..71836a9e25 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Dimname.h @@ -0,0 +1 @@ +#include diff --git a/thirdparty/libtorch/include/ATen/Dispatch.h b/thirdparty/libtorch/include/ATen/Dispatch.h new file mode 100644 index 0000000000..52363ad0ee --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Dispatch.h @@ -0,0 +1,419 @@ +#pragma once + +#include +#include +#include +#include +#include + +#define AT_PRIVATE_CASE_TYPE(enum_type, type, ...) \ + case enum_type: { \ + using scalar_t = type; \ + return __VA_ARGS__(); \ + } + +#define AT_QINT_PRIVATE_CASE_TYPE(enum_type, type, underlying_enum, underlying_type, ...) \ + case enum_type: { \ + const auto& UNDERLYING_TYPE C10_UNUSED = underlying_enum; \ + using scalar_t C10_UNUSED = type; \ + using underlying_t C10_UNUSED = underlying_type; \ + return __VA_ARGS__(); \ + } + +namespace detail { + +inline at::ScalarType scalar_type(at::ScalarType s) { + return s; +} + +C10_DEPRECATED_MESSAGE("passing at::DeprecatedTypeProperties to an AT_DISPATCH macro is deprecated, " \ + "pass an at::ScalarType instead") +inline at::ScalarType scalar_type(const at::DeprecatedTypeProperties &t) { + return t.scalarType(); +} + +C10_DEPRECATED_MESSAGE("AT_DISPATCH_ALL_TYPES_AND_HALF is deprecated, " \ + "use AT_DISPATCH_ALL_TYPES_AND(at::ScalarType::Half, ...) instead") +inline void deprecated_AT_DISPATCH_ALL_TYPES_AND_HALF() {} + +C10_DEPRECATED_MESSAGE("AT_DISPATCH_ALL_TYPES_AND_HALF_AND_COMPLEX is deprecated, " \ + "use AT_DISPATCH_ALL_TYPES_AND_COMPLEX_AND(at::ScalarType::Half, ...) " \ + "instead") +inline void deprecated_AT_DISPATCH_ALL_TYPES_AND_HALF_AND_COMPLEX() {} + +} + +// The AT_DISPATCH_* family of macros provides the ability to +// conveniently generate specializations of a kernel over all of the +// dtypes we care about in PyTorch. We call it "dispatch" because +// we are "dispatching" to the correct, dtype-specific kernel. +// +// A standard usage looks like: +// +// AT_DISPATCH_ALL_TYPES(self.scalar_type(), "op_name", [&] { +// // Your code here, with 'scalar_t' now defined to +// // be the dtype in question +// }) +// +// There are many variations of this macro, so it's important to +// understand exactly /which/ dtypes you want to get instantiated, as +// well as what the "default" set is. +// +// The default set of dtypes that are instantiated (e.g., by +// AT_DISPATCH_ALL_TYPES) are floating point types (float, double), +// and integral types (int32_t, int64_t, int16_t, int8_t, uint8_t), +// but NOT booleans (bool), half-precision floats (Half) or +// complex number (std::complex, std::complex). +// This "cut" is somewhat historical (the default types are the +// ones that TH historically supported), but it also reflects the +// fact that the non-default types are "poorly" behaved (booleans +// are NOT integers mod 2, half precision operations ~essentially +// don't exist on CPU, complex numbers are an experimental application). +// +// Here are the questions you should generally ask to decide which +// dispatch you want: +// +// 1. Is this an integral or floating point specific operation? +// (If so, you'll want one of the FLOATING or INTEGRAL macros.) +// +// 2. Should half be supported? (If you're on CPU, the answer is almost +// definitely no. If you do want support, use one of the AND_HALF +// macros) +// +// Much rarer situations: +// +// 3. Should bool be supported? (You often have to write your kernel +// differently if arithmetic operations are involved.) If so, +// Use AT_DISPATCH_ALL_TYPES_AND along with ScalarType::Bool +// +// 4. Should complex be supported? The answer is almost always no, +// unless you are working on "generic" code that should work on +// all dtypes. + + +// NB: the the_type variable is not used, but we have kept it for +// backwards compatibility. It's probably not used by anyone though; +// but we're just being safe (and it doesn't hurt.) Note we must +// use it to shut up warnings about unused store. + +#define AT_DISPATCH_FLOATING_TYPES(TYPE, NAME, ...) \ + [&] { \ + const auto& the_type = TYPE; \ + /* don't use TYPE again in case it is an expensive or side-effect op */ \ + at::ScalarType _st = ::detail::scalar_type(the_type); \ + switch (_st) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(_st), "'"); \ + } \ + }() + +#define AT_DISPATCH_FLOATING_TYPES_AND_HALF(TYPE, NAME, ...) \ + [&] { \ + const auto& the_type = TYPE; \ + /* don't use TYPE again in case it is an expensive or side-effect op */ \ + at::ScalarType _st = ::detail::scalar_type(the_type); \ + switch (_st) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Half, at::Half, __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(_st), "'"); \ + } \ + }() + +#define AT_DISPATCH_FLOATING_TYPES_AND(SCALARTYPE, TYPE, NAME, ...) \ + [&] { \ + const auto& the_type = TYPE; \ + /* don't use TYPE again in case it is an expensive or side-effect op */ \ + at::ScalarType _st = ::detail::scalar_type(the_type); \ + switch (_st) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE, \ + decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(TYPE), "'"); \ + } \ + }() + + #define AT_DISPATCH_FLOATING_AND_COMPLEX_TYPES(TYPE, NAME, ...) \ + [&] { \ + const auto& the_type = TYPE; \ + /* don't use TYPE again in case it is an expensive or side-effect op */ \ + at::ScalarType _st = ::detail::scalar_type(the_type); \ + switch (_st) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexDouble, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexFloat, std::complex, __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(_st), "'"); \ + } \ + }() + + #define AT_DISPATCH_FLOATING_AND_COMPLEX_TYPES_AND1(SCALARTYPE, TYPE, NAME, ...) \ + [&] { \ + const auto& the_type = TYPE; \ + /* don't use TYPE again in case it is an expensive or side-effect op */ \ + at::ScalarType _st = ::detail::scalar_type(the_type); \ + switch (_st) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexDouble, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexFloat, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(_st), "'"); \ + } \ + }() + +#define AT_DISPATCH_INTEGRAL_TYPES(TYPE, NAME, ...) \ + [&] { \ + const auto& the_type = TYPE; \ + /* don't use TYPE again in case it is an expensive or side-effect op */ \ + at::ScalarType _st = ::detail::scalar_type(the_type); \ + switch (_st) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Char, int8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Int, int32_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(_st), "'"); \ + } \ + }() + +#define AT_DISPATCH_ALL_TYPES(TYPE, NAME, ...) \ + [&] { \ + const auto& the_type = TYPE; \ + /* don't use TYPE again in case it is an expensive or side-effect op */ \ + at::ScalarType _st = ::detail::scalar_type(the_type); \ + switch (_st) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Char, int8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Int, int32_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(_st), "'"); \ + } \ + }() + +#define AT_DISPATCH_COMPLEX_TYPES(TYPE, NAME, ...) \ + [&] { \ + const auto& the_type = TYPE; \ + /* don't use TYPE again in case it is an expensive or side-effect op */ \ + at::ScalarType _st = ::detail::scalar_type(the_type); \ + switch (_st) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexFloat, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexDouble, std::complex, __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(_st), "'"); \ + } \ + }() + +#define AT_DISPATCH_QINT_TYPES(TYPE, NAME, ...) \ + [&] { \ + const auto& SCALAR_TYPE C10_UNUSED = TYPE; \ + switch (TYPE) { \ + AT_QINT_PRIVATE_CASE_TYPE( \ + at::kQInt8, at::qint8, at::kChar, int8_t, __VA_ARGS__) \ + AT_QINT_PRIVATE_CASE_TYPE( \ + at::kQUInt8, at::quint8, at::kByte, uint8_t, __VA_ARGS__) \ + AT_QINT_PRIVATE_CASE_TYPE( \ + at::kQInt32, at::qint32, at::kInt, int, __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(TYPE), "'"); \ + } \ + }() + +#define AT_DISPATCH_ALL_TYPES_AND_COMPLEX(TYPE, NAME, ...) \ + [&] { \ + const auto& the_type = TYPE; \ + /* don't use TYPE again in case it is an expensive or side-effect op*/ \ + at::ScalarType _st = ::detail::scalar_type(the_type); \ + switch (_st) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Char, int8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Int, int32_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexFloat, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexDouble, std::complex, __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(_st), "'"); \ + } \ + }() + +#define AT_DISPATCH_ALL_TYPES_AND(SCALARTYPE, TYPE, NAME, ...) \ + [&] { \ + switch (TYPE) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Char, int8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Int, int32_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(TYPE), "'"); \ + } \ + }() + +#define AT_DISPATCH_ALL_TYPES_AND_COMPLEX_AND(SCALARTYPE, TYPE, NAME, ...) \ + [&] { \ + switch (TYPE) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Char, int8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Int, int32_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexFloat, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexDouble, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(TYPE), "'"); \ + } \ + }() + +#define AT_DISPATCH_COMPLEX_TYPES_AND(SCALARTYPE, TYPE, NAME, ...) \ + [&] { \ + switch (TYPE) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexFloat, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexDouble, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(TYPE), "'"); \ + } \ + }() + +#define AT_DISPATCH_ALL_TYPES_AND2(SCALARTYPE1, SCALARTYPE2, TYPE, NAME, ...) \ + [&] { \ + switch (TYPE) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Char, int8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Int, int32_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE1, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE2, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(TYPE), "'"); \ + } \ + }() + +#define AT_DISPATCH_ALL_TYPES_AND_COMPLEX_AND2(SCALARTYPE1, SCALARTYPE2, TYPE, NAME, ...) \ + [&] { \ + switch (TYPE) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Char, int8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Int, int32_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexFloat, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexDouble, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE1, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE2, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", TYPE, "'"); \ + } \ + }() + +#define AT_DISPATCH_ALL_TYPES_AND3(SCALARTYPE1, SCALARTYPE2, SCALARTYPE3, TYPE, NAME, ...) \ + [&] { \ + switch (TYPE) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Char, int8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Int, int32_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE1, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE2, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE3, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(TYPE), "'"); \ + } \ + }() + +#define AT_DISPATCH_ALL_TYPES_AND_COMPLEX_AND3(SCALARTYPE1, SCALARTYPE2, SCALARTYPE3, TYPE, NAME, ...) \ + [&] { \ + switch (TYPE) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Char, int8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Int, int32_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexFloat, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::ComplexDouble, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE1, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE2, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(SCALARTYPE3, decltype(c10::impl::ScalarTypeToCPPType::t), __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", TYPE, "'"); \ + } \ + }() + +// ---------------------------------------------------------------------------- +// DEPRECATED MACROS, DON'T USE THESE +// ---------------------------------------------------------------------------- + +#define AT_DISPATCH_ALL_TYPES_AND_HALF(TYPE, NAME, ...) \ + [&] { \ + detail::deprecated_AT_DISPATCH_ALL_TYPES_AND_HALF(); \ + const auto& the_type = TYPE; \ + /* don't use TYPE again in case it is an expensive or side-effect op */ \ + at::ScalarType _st = ::detail::scalar_type(the_type); \ + switch (_st) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Char, int8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Int, int32_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Half, at::Half, __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(_st), "'"); \ + } \ + }() + +#define AT_DISPATCH_ALL_TYPES_AND_HALF_AND_COMPLEX(TYPE, NAME, ...) \ + [&] { \ + detail::deprecated_AT_DISPATCH_ALL_TYPES_AND_HALF_AND_COMPLEX() \ + const auto& the_type = TYPE; \ + /* don't use TYPE again in case it is an expensive or side-effect op */ \ + at::ScalarType _st = ::detail::scalar_type(the_type); \ + switch (_st) { \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Byte, uint8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Char, int8_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Double, double, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Float, float, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Int, int32_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Long, int64_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Short, int16_t, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE(at::ScalarType::Half, at::Half, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE( \ + at::ScalarType::ComplexFloat, std::complex, __VA_ARGS__) \ + AT_PRIVATE_CASE_TYPE( \ + at::ScalarType::ComplexDouble, std::complex, __VA_ARGS__) \ + default: \ + AT_ERROR(#NAME, " not implemented for '", toString(_st), "'"); \ + } \ + }() diff --git a/thirdparty/libtorch/include/ATen/DynamicLibrary.h b/thirdparty/libtorch/include/ATen/DynamicLibrary.h new file mode 100644 index 0000000000..ea919a79d3 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/DynamicLibrary.h @@ -0,0 +1,21 @@ +#pragma once + +#include +#include + +namespace at { + +struct DynamicLibrary { + AT_DISALLOW_COPY_AND_ASSIGN(DynamicLibrary); + + CAFFE2_API DynamicLibrary(const char* name); + + CAFFE2_API void* sym(const char* name); + + CAFFE2_API ~DynamicLibrary(); + + private: + void* handle = nullptr; +}; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/ExpandUtils.h b/thirdparty/libtorch/include/ATen/ExpandUtils.h new file mode 100644 index 0000000000..39d2ae5634 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/ExpandUtils.h @@ -0,0 +1,177 @@ +#pragma once + +#include +#include + +#include +#include +#include + +namespace at { + +CAFFE2_API std::vector infer_size(IntArrayRef a, IntArrayRef b); +CAFFE2_API std::tuple, std::vector> +inferExpandGeometry( + IntArrayRef tensor_sizes, + IntArrayRef tensor_strides, + IntArrayRef sizes); + +// avoid copy-construction of Tensor by using a reference_wrapper. +inline void check_defined(std::initializer_list> tensors, const char *api_name) { + for (auto& t : tensors) { + if (!t.get().defined()) { + AT_ERROR(api_name, "(...) called with an undefined Tensor"); + } + } +} + +inline std::tuple expand_inplace(const Tensor &tensor, const Tensor &to_expand) { + if (tensor.sizes().equals(to_expand.sizes())) { + return std::make_tuple(to_expand); + } + + return std::make_tuple(to_expand.expand(tensor.sizes(), /*implicit=*/true)); // see [expand implicit] +} + +inline std::tuple expand_inplace(const Tensor &tensor, const Tensor &to_expand, const char *api_name) { + check_defined({tensor, to_expand}, api_name); + return expand_inplace(tensor, to_expand); +} + +inline std::tuple expand_inplace(const Tensor &tensor, const Tensor &to_expand1, const Tensor &to_expand2) { + if (tensor.sizes().equals(to_expand1.sizes()) && tensor.sizes().equals((to_expand2.sizes()))) { + return std::make_tuple(to_expand1, to_expand2); + } + + return std::make_tuple( + to_expand1.expand(tensor.sizes(), /*implicit=*/true), // see [expand implicit] + to_expand2.expand(tensor.sizes(), /*implicit=*/true)); +} + +inline std::tuple expand_inplace(const Tensor &tensor, const Tensor &to_expand1, const Tensor &to_expand2, + const char *api_name) { + check_defined({tensor, to_expand1, to_expand2}, api_name); + return expand_inplace(tensor, to_expand1, to_expand2); +} + +inline std::tuple expand_outplace(const Tensor &to_expand1, const Tensor &to_expand2) { + if (to_expand1.sizes().equals(to_expand2.sizes())) { + return std::make_tuple(to_expand1, to_expand2); + } + + auto expanded_size = infer_size(to_expand1.sizes(), to_expand2.sizes()); + return std::make_tuple( + to_expand1.expand(expanded_size, /*implicit=*/true), // see [expand implicit] + to_expand2.expand(expanded_size, /*implicit=*/true)); +} + +inline std::tuple expand_outplace(const Tensor &to_expand1, const Tensor &to_expand2, const char *api_name) { + check_defined({to_expand1, to_expand2}, api_name); + return expand_outplace(to_expand1, to_expand2); +} + +inline std::tuple expand_outplace(const Tensor &to_expand1, + const Tensor &to_expand2, + const Tensor &to_expand3) { + if (to_expand1.sizes().equals(to_expand2.sizes()) && to_expand1.sizes().equals(to_expand3.sizes())) { + return std::make_tuple(to_expand1, to_expand2, to_expand3); + } + + auto expanded_size12 = infer_size(to_expand1.sizes(), to_expand2.sizes()); + auto expanded_size = infer_size(expanded_size12, to_expand3.sizes()); + return std::make_tuple( + to_expand1.expand(expanded_size, /*implicit=*/true), // see [expand implicit] + to_expand2.expand(expanded_size, /*implicit=*/true), + to_expand3.expand(expanded_size, /*implicit=*/true)); +} + +inline std::tuple expand_outplace(const Tensor &to_expand1, + const Tensor &to_expand2, + const Tensor &to_expand3, + const char *api_name) { + check_defined({to_expand1, to_expand2, to_expand3}, api_name); + return expand_outplace(to_expand1, to_expand2, to_expand3); +} + +inline std::tuple expand_size(const Tensor &to_expand, IntArrayRef sizes) { + if(to_expand.sizes().equals(sizes)) { + return std::make_tuple(to_expand); + } + + return std::make_tuple(to_expand.expand(sizes, /*implicit=*/true)); // see [expand implicit] +} + +inline std::tuple expand_size(const Tensor &to_expand, IntArrayRef sizes, const char *api_name) { + check_defined({to_expand}, api_name); + return expand_size(to_expand, sizes); +} + +inline std::vector expand_outplace(TensorList to_expand) { + // expands a list of Tensors; ignores undefined (null) tensors + bool first = true; + std::vector sizes; + for (size_t i = 0; i < to_expand.size(); ++i) { + if (!to_expand[i].defined()) { + continue; + } else if (first) { + sizes = to_expand[i].sizes().vec(); + first = false; + } else { + sizes = infer_size(sizes, to_expand[i].sizes()); + } + } + + std::vector result(to_expand.size()); + for (size_t i = 0; i < to_expand.size(); ++i) { + if (!to_expand[i].defined()) { + continue; + } else if (to_expand[i].sizes().equals(sizes)) { + result[i] = to_expand[i]; + } else { + result[i] = to_expand[i].expand(sizes, /*implicit=*/true); // see [expand implicit] + } + } + return result; +} + +// Sums `tensor` repeatedly to produce a tensor of shape `shape`. +// Precondition: is_expandable_to(shape, tensor.sizes()) must be true +static inline Tensor sum_to(Tensor tensor, const IntArrayRef shape) { + if (shape.size() == 0) { + return tensor.sum(); + } + c10::SmallVector reduce_dims; + const at::IntArrayRef sizes = tensor.sizes(); + const int64_t leading_dims = sizes.size() - shape.size(); + for (int64_t i = 0; i < leading_dims; ++i) { + reduce_dims.push_back(i); + } + for (int64_t i = leading_dims; i < static_cast(sizes.size()); ++i) { + if (shape[i - leading_dims] == 1 && sizes[i] != 1) { + reduce_dims.push_back(i); + } + } + if (!reduce_dims.empty()) { + tensor = tensor.sum(reduce_dims, /*keepdim=*/true); + } + return leading_dims > 0 ? tensor.view(shape) : tensor; +} + +// True if `shape` can be broadcasted to `desired` +static inline bool is_expandable_to(IntArrayRef shape, IntArrayRef desired) { + size_t ndim = shape.size(); + size_t target_dim = desired.size(); + if (ndim > target_dim) { + return false; + } + for (size_t i = 0; i < ndim; i++) { + int64_t size = shape[ndim - i - 1]; + int64_t target = desired[target_dim - i - 1]; + if (size != target && size != 1) { + return false; + } + } + return true; +} + +} diff --git a/thirdparty/libtorch/include/ATen/Formatting.h b/thirdparty/libtorch/include/ATen/Formatting.h new file mode 100644 index 0000000000..392e2a27b0 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Formatting.h @@ -0,0 +1 @@ +#include diff --git a/thirdparty/libtorch/include/ATen/Functions.h b/thirdparty/libtorch/include/ATen/Functions.h new file mode 100644 index 0000000000..0ddc2c5e74 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Functions.h @@ -0,0 +1,17530 @@ +#pragma once + +// @generated by aten/src/ATen/gen.py + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace at { + +using native::tensor; + +static inline Tensor _cast_Byte(const Tensor & self, bool non_blocking=false); +static inline Tensor _cast_Char(const Tensor & self, bool non_blocking=false); +static inline Tensor _cast_Double(const Tensor & self, bool non_blocking=false); +static inline Tensor _cast_Float(const Tensor & self, bool non_blocking=false); +static inline Tensor _cast_Int(const Tensor & self, bool non_blocking=false); +static inline Tensor _cast_Long(const Tensor & self, bool non_blocking=false); +static inline Tensor _cast_Short(const Tensor & self, bool non_blocking=false); +static inline Tensor _cast_Half(const Tensor & self, bool non_blocking=false); +#ifdef BUILD_NAMEDTENSOR +static inline std::vector align_tensors(TensorList tensors); +#endif +static inline bool _use_cudnn_ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank); +static inline std::tuple _cudnn_ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank, bool deterministic, bool zero_infinity); +static inline Tensor _cudnn_rnn_flatten_weight(TensorList weight_arr, int64_t weight_stride0, int64_t input_size, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, bool bidirectional); +static inline std::tuple _cudnn_rnn(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & weight_buf, const Tensor & hx, const Tensor & cx, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state); +static inline std::tuple> _cudnn_rnn_backward(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & weight_buf, const Tensor & hx, const Tensor & cx, const Tensor & output, const Tensor & grad_output, const Tensor & grad_hy, const Tensor & grad_cy, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state, const Tensor & reserve, std::array output_mask); +static inline Tensor _cudnn_init_dropout_state(double dropout, bool train, int64_t dropout_seed, const TensorOptions & options); +static inline int64_t _debug_has_internal_overlap(const Tensor & self); +static inline std::tuple _fused_dropout(const Tensor & self, double p, Generator * generator=nullptr); +static inline Tensor _masked_scale(const Tensor & self, const Tensor & mask, double scale); +static inline std::tuple _sobol_engine_draw(const Tensor & quasi, int64_t n, const Tensor & sobolstate, int64_t dimension, int64_t num_generated, c10::optional dtype); +static inline Tensor & _sobol_engine_ff_(Tensor & self, int64_t n, const Tensor & sobolstate, int64_t dimension, int64_t num_generated); +static inline Tensor & _sobol_engine_scramble_(Tensor & self, const Tensor & ltm, int64_t dimension); +static inline Tensor & _sobol_engine_initialize_state_(Tensor & self, int64_t dimension); +static inline Tensor _reshape_from_tensor(const Tensor & self, const Tensor & shape); +static inline Tensor _shape_as_tensor(const Tensor & self); +static inline Tensor dropout(const Tensor & input, double p, bool train); +static inline Tensor & dropout_(Tensor & self, double p, bool train); +static inline Tensor feature_dropout(const Tensor & input, double p, bool train); +static inline Tensor & feature_dropout_(Tensor & self, double p, bool train); +static inline Tensor alpha_dropout(const Tensor & input, double p, bool train); +static inline Tensor & alpha_dropout_(Tensor & self, double p, bool train); +static inline Tensor feature_alpha_dropout(const Tensor & input, double p, bool train); +static inline Tensor & feature_alpha_dropout_(Tensor & self, double p, bool train); +static inline Tensor abs(const Tensor & self); +static inline Tensor & abs_(Tensor & self); +static inline Tensor & abs_out(Tensor & out, const Tensor & self); +static inline Tensor angle(const Tensor & self); +static inline Tensor & angle_out(Tensor & out, const Tensor & self); +static inline Tensor real(const Tensor & self); +static inline Tensor & real_out(Tensor & out, const Tensor & self); +static inline Tensor imag(const Tensor & self); +static inline Tensor & imag_out(Tensor & out, const Tensor & self); +static inline Tensor conj(const Tensor & self); +static inline Tensor & conj_out(Tensor & out, const Tensor & self); +static inline Tensor acos(const Tensor & self); +static inline Tensor & acos_(Tensor & self); +static inline Tensor & acos_out(Tensor & out, const Tensor & self); +static inline Tensor avg_pool1d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true); +static inline Tensor adaptive_avg_pool1d(const Tensor & self, IntArrayRef output_size); +static inline std::tuple adaptive_max_pool1d(const Tensor & self, IntArrayRef output_size); +static inline Tensor add(const Tensor & self, const Tensor & other, Scalar alpha=1); +static inline Tensor & add_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha=1); +static inline Tensor add(const Tensor & self, Scalar other, Scalar alpha=1); +static inline Tensor addmv(const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta=1, Scalar alpha=1); +static inline Tensor & addmv_(Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta=1, Scalar alpha=1); +static inline Tensor & addmv_out(Tensor & out, const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta=1, Scalar alpha=1); +static inline Tensor addr(const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta=1, Scalar alpha=1); +static inline Tensor & addr_out(Tensor & out, const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta=1, Scalar alpha=1); +static inline Tensor affine_grid_generator(const Tensor & theta, IntArrayRef size, bool align_corners); +static inline Tensor affine_grid_generator_backward(const Tensor & grad, IntArrayRef size, bool align_corners); +static inline Tensor all(const Tensor & self, int64_t dim, bool keepdim=false); +static inline Tensor & all_out(Tensor & out, const Tensor & self, int64_t dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor all(const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & all_out(Tensor & out, const Tensor & self, Dimname dim, bool keepdim=false); +#endif +static inline bool allclose(const Tensor & self, const Tensor & other, double rtol=1e-05, double atol=1e-08, bool equal_nan=false); +static inline Tensor any(const Tensor & self, int64_t dim, bool keepdim=false); +static inline Tensor & any_out(Tensor & out, const Tensor & self, int64_t dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor any(const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & any_out(Tensor & out, const Tensor & self, Dimname dim, bool keepdim=false); +#endif +static inline Tensor arange(Scalar end, const TensorOptions & options={}); +static inline Tensor arange(Scalar start, Scalar end, const TensorOptions & options={}); +static inline Tensor arange(Scalar start, Scalar end, Scalar step, const TensorOptions & options={}); +static inline Tensor & arange_out(Tensor & out, Scalar end); +static inline Tensor & arange_out(Tensor & out, Scalar start, Scalar end, Scalar step=1); +static inline Tensor _dim_arange(const Tensor & like, int64_t dim); +static inline Tensor argmax(const Tensor & self, c10::optional dim=c10::nullopt, bool keepdim=false); +static inline Tensor argmin(const Tensor & self, c10::optional dim=c10::nullopt, bool keepdim=false); +static inline Tensor as_strided(const Tensor & self, IntArrayRef size, IntArrayRef stride, c10::optional storage_offset=c10::nullopt); +static inline Tensor & as_strided_(Tensor & self, IntArrayRef size, IntArrayRef stride, c10::optional storage_offset=c10::nullopt); +static inline Tensor asin(const Tensor & self); +static inline Tensor & asin_(Tensor & self); +static inline Tensor & asin_out(Tensor & out, const Tensor & self); +static inline Tensor atan(const Tensor & self); +static inline Tensor & atan_(Tensor & self); +static inline Tensor & atan_out(Tensor & out, const Tensor & self); +static inline Tensor baddbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1); +static inline Tensor & _baddbmm_mkl_(Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1); +static inline Tensor & baddbmm_out(Tensor & out, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1); +static inline Tensor bartlett_window(int64_t window_length, const TensorOptions & options={}); +static inline Tensor bartlett_window(int64_t window_length, bool periodic, const TensorOptions & options={}); +static inline Tensor batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps, bool cudnn_enabled); +static inline std::tuple _batch_norm_impl_index(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps, bool cudnn_enabled); +static inline std::tuple _batch_norm_impl_index_backward(int64_t impl_index, const Tensor & input, const Tensor & grad_output, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_var_transform, bool train, double eps, std::array output_mask, const Tensor & reservedSpace); +static inline Tensor bernoulli(const Tensor & self, Generator * generator=nullptr); +static inline Tensor & bernoulli_out(Tensor & out, const Tensor & self, Generator * generator=nullptr); +static inline Tensor bernoulli(const Tensor & self, double p, Generator * generator=nullptr); +static inline Tensor bilinear(const Tensor & input1, const Tensor & input2, const Tensor & weight, const Tensor & bias); +static inline Tensor binary_cross_entropy_with_logits(const Tensor & self, const Tensor & target, const Tensor & weight={}, const Tensor & pos_weight={}, int64_t reduction=at::Reduction::Mean); +static inline Tensor binary_cross_entropy_with_logits_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight={}, const Tensor & pos_weight={}, int64_t reduction=at::Reduction::Mean); +static inline Tensor bincount(const Tensor & self, const Tensor & weights={}, int64_t minlength=0); +static inline Tensor bitwise_not(const Tensor & self); +static inline Tensor & bitwise_not_out(Tensor & out, const Tensor & self); +static inline Tensor logical_not(const Tensor & self); +static inline Tensor & logical_not_out(Tensor & out, const Tensor & self); +static inline Tensor logical_xor(const Tensor & self, const Tensor & other); +static inline Tensor & logical_xor_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor blackman_window(int64_t window_length, const TensorOptions & options={}); +static inline Tensor blackman_window(int64_t window_length, bool periodic, const TensorOptions & options={}); +static inline Tensor bmm(const Tensor & self, const Tensor & mat2); +static inline Tensor & bmm_out(Tensor & out, const Tensor & self, const Tensor & mat2); +static inline std::vector broadcast_tensors(TensorList tensors); +static inline Tensor cat(TensorList tensors, int64_t dim=0); +static inline Tensor & cat_out(Tensor & out, TensorList tensors, int64_t dim=0); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor cat(TensorList tensors, Dimname dim); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & cat_out(Tensor & out, TensorList tensors, Dimname dim); +#endif +static inline Tensor ceil(const Tensor & self); +static inline Tensor & ceil_(Tensor & self); +static inline Tensor & ceil_out(Tensor & out, const Tensor & self); +static inline Tensor chain_matmul(TensorList matrices); +static inline std::vector chunk(const Tensor & self, int64_t chunks, int64_t dim=0); +static inline Tensor clamp(const Tensor & self, c10::optional min=c10::nullopt, c10::optional max=c10::nullopt); +static inline Tensor & clamp_(Tensor & self, c10::optional min=c10::nullopt, c10::optional max=c10::nullopt); +static inline Tensor & clamp_out(Tensor & out, const Tensor & self, c10::optional min=c10::nullopt, c10::optional max=c10::nullopt); +static inline Tensor clamp_max(const Tensor & self, Scalar max); +static inline Tensor & clamp_max_(Tensor & self, Scalar max); +static inline Tensor & clamp_max_out(Tensor & out, const Tensor & self, Scalar max); +static inline Tensor clamp_min(const Tensor & self, Scalar min); +static inline Tensor & clamp_min_(Tensor & self, Scalar min); +static inline Tensor & clamp_min_out(Tensor & out, const Tensor & self, Scalar min); +static inline bool cudnn_is_acceptable(const Tensor & self); +static inline Tensor constant_pad_nd(const Tensor & self, IntArrayRef pad, Scalar value=0); +static inline Tensor convolution(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups); +static inline Tensor convolution_overrideable(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups); +static inline std::tuple convolution_backward_overrideable(const Tensor & grad_output, const Tensor & input, const Tensor & weight, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups, std::array output_mask); +static inline Tensor _convolution(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups, bool benchmark, bool deterministic, bool cudnn_enabled); +static inline Tensor _convolution_nogroup(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding); +static inline std::tuple _convolution_double_backward(const Tensor & ggI, const Tensor & ggW, const Tensor & ggb, const Tensor & gO, const Tensor & weight, const Tensor & self, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups, bool benchmark, bool deterministic, bool cudnn_enabled, std::array output_mask); +static inline Tensor conv1d(const Tensor & input, const Tensor & weight, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1, int64_t groups=1); +static inline Tensor conv2d(const Tensor & input, const Tensor & weight, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1, int64_t groups=1); +static inline Tensor conv3d(const Tensor & input, const Tensor & weight, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1, int64_t groups=1); +static inline Tensor conv_tbc(const Tensor & self, const Tensor & weight, const Tensor & bias, int64_t pad=0); +static inline std::tuple conv_tbc_backward(const Tensor & self, const Tensor & input, const Tensor & weight, const Tensor & bias, int64_t pad); +static inline Tensor conv_transpose1d(const Tensor & input, const Tensor & weight, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, int64_t groups=1, IntArrayRef dilation=1); +static inline Tensor conv_transpose2d(const Tensor & input, const Tensor & weight, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, int64_t groups=1, IntArrayRef dilation=1); +static inline Tensor conv_transpose3d(const Tensor & input, const Tensor & weight, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, int64_t groups=1, IntArrayRef dilation=1); +static inline Tensor _copy_from(const Tensor & self, const Tensor & dst, bool non_blocking=false); +static inline Tensor cos(const Tensor & self); +static inline Tensor & cos_(Tensor & self); +static inline Tensor & cos_out(Tensor & out, const Tensor & self); +static inline Tensor cosh(const Tensor & self); +static inline Tensor & cosh_(Tensor & self); +static inline Tensor & cosh_out(Tensor & out, const Tensor & self); +static inline Tensor cosine_embedding_loss(const Tensor & input1, const Tensor & input2, const Tensor & target, double margin=0.0, int64_t reduction=at::Reduction::Mean); +static inline Tensor cudnn_affine_grid_generator(const Tensor & theta, int64_t N, int64_t C, int64_t H, int64_t W); +static inline Tensor cudnn_affine_grid_generator_backward(const Tensor & grad, int64_t N, int64_t C, int64_t H, int64_t W); +static inline std::tuple cudnn_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double exponential_average_factor, double epsilon); +static inline std::tuple cudnn_batch_norm_backward(const Tensor & input, const Tensor & grad_output, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_var, double epsilon, const Tensor & reserveSpace); +static inline Tensor cudnn_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline Tensor cudnn_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline std::tuple cudnn_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); +static inline Tensor cudnn_convolution_backward_bias(const Tensor & grad_output); +static inline Tensor cudnn_convolution_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline Tensor cudnn_convolution_transpose(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline std::tuple cudnn_convolution_transpose_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); +static inline Tensor cudnn_convolution_transpose_backward_bias(const Tensor & grad_output); +static inline Tensor cudnn_convolution_transpose_backward_input(const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline Tensor cudnn_convolution_transpose_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline Tensor cudnn_grid_sampler(const Tensor & self, const Tensor & grid); +static inline std::tuple cudnn_grid_sampler_backward(const Tensor & self, const Tensor & grid, const Tensor & grad_output); +static inline Tensor cumsum(const Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +static inline Tensor & cumsum_out(Tensor & out, const Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor cumsum(const Tensor & self, Dimname dim, c10::optional dtype=c10::nullopt); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & cumsum_out(Tensor & out, const Tensor & self, Dimname dim, c10::optional dtype=c10::nullopt); +#endif +static inline Tensor cumprod(const Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +static inline Tensor & cumprod_out(Tensor & out, const Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor cumprod(const Tensor & self, Dimname dim, c10::optional dtype=c10::nullopt); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & cumprod_out(Tensor & out, const Tensor & self, Dimname dim, c10::optional dtype=c10::nullopt); +#endif +static inline Tensor ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank=0, int64_t reduction=at::Reduction::Mean, bool zero_infinity=false); +static inline Tensor ctc_loss(const Tensor & log_probs, const Tensor & targets, const Tensor & input_lengths, const Tensor & target_lengths, int64_t blank=0, int64_t reduction=at::Reduction::Mean, bool zero_infinity=false); +static inline std::tuple _ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank=0, bool zero_infinity=false); +static inline Tensor _ctc_loss_backward(const Tensor & grad, const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, const Tensor & neg_log_likelihood, const Tensor & log_alpha, int64_t blank, bool zero_infinity=false); +static inline Tensor det(const Tensor & self); +static inline Tensor diag_embed(const Tensor & self, int64_t offset=0, int64_t dim1=-2, int64_t dim2=-1); +static inline Tensor diagflat(const Tensor & self, int64_t offset=0); +static inline Tensor diagonal(const Tensor & self, int64_t offset=0, int64_t dim1=0, int64_t dim2=1); +static inline Tensor div(const Tensor & self, const Tensor & other); +static inline Tensor & div_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor div(const Tensor & self, Scalar other); +static inline Tensor dot(const Tensor & self, const Tensor & tensor); +static inline Tensor & dot_out(Tensor & out, const Tensor & self, const Tensor & tensor); +static inline Tensor einsum(std::string equation, TensorList tensors); +static inline Tensor embedding(const Tensor & weight, const Tensor & indices, int64_t padding_idx=-1, bool scale_grad_by_freq=false, bool sparse=false); +static inline Tensor embedding_backward(const Tensor & grad, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq, bool sparse); +static inline Tensor embedding_dense_backward(const Tensor & grad_output, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq); +static inline Tensor & embedding_renorm_(Tensor & self, const Tensor & indices, double max_norm, double norm_type); +static inline Tensor embedding_sparse_backward(const Tensor & grad, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq); +static inline std::tuple embedding_bag(const Tensor & weight, const Tensor & indices, const Tensor & offsets, bool scale_grad_by_freq=false, int64_t mode=0, bool sparse=false, const Tensor & per_sample_weights={}); +static inline std::tuple _embedding_bag(const Tensor & weight, const Tensor & indices, const Tensor & offsets, bool scale_grad_by_freq=false, int64_t mode=0, bool sparse=false, const Tensor & per_sample_weights={}); +static inline Tensor _embedding_bag_backward(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, const Tensor & maximum_indices, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, bool sparse, const Tensor & per_sample_weights); +static inline Tensor _embedding_bag_sparse_backward(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, const Tensor & per_sample_weights); +static inline Tensor _embedding_bag_dense_backward(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, const Tensor & maximum_indices, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, const Tensor & per_sample_weights); +static inline Tensor _embedding_bag_per_sample_weights_backward(const Tensor & grad, const Tensor & weight, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, int64_t mode); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor empty(IntArrayRef size, c10::optional names, const TensorOptions & options={}, c10::optional memory_format=c10::nullopt); +#endif +static inline Tensor empty(IntArrayRef size, const TensorOptions & options={}, c10::optional memory_format=c10::nullopt); +static inline Tensor _empty_affine_quantized(IntArrayRef size, const TensorOptions & options={}, double scale=1, int64_t zero_point=0, c10::optional memory_format=MemoryFormat::Contiguous); +static inline Tensor _empty_per_channel_affine_quantized(IntArrayRef size, const Tensor & scales, const Tensor & zero_points, int64_t axis, const TensorOptions & options={}, c10::optional memory_format=MemoryFormat::Contiguous); +static inline Tensor & empty_out(Tensor & out, IntArrayRef size, c10::optional memory_format=c10::nullopt); +static inline Tensor empty_like(const Tensor & self, c10::optional memory_format=c10::nullopt); +static inline Tensor empty_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +static inline Tensor empty_strided(IntArrayRef size, IntArrayRef stride, const TensorOptions & options={}); +static inline Tensor erf(const Tensor & self); +static inline Tensor & erf_(Tensor & self); +static inline Tensor & erf_out(Tensor & out, const Tensor & self); +static inline Tensor erfc(const Tensor & self); +static inline Tensor & erfc_(Tensor & self); +static inline Tensor & erfc_out(Tensor & out, const Tensor & self); +static inline Tensor exp(const Tensor & self); +static inline Tensor & exp_(Tensor & self); +static inline Tensor & exp_out(Tensor & out, const Tensor & self); +static inline Tensor expm1(const Tensor & self); +static inline Tensor & expm1_(Tensor & self); +static inline Tensor & expm1_out(Tensor & out, const Tensor & self); +static inline Tensor eye(int64_t n, const TensorOptions & options={}); +static inline Tensor eye(int64_t n, int64_t m, const TensorOptions & options={}); +static inline Tensor & eye_out(Tensor & out, int64_t n); +static inline Tensor & eye_out(Tensor & out, int64_t n, int64_t m); +static inline Tensor flatten(const Tensor & self, int64_t start_dim=0, int64_t end_dim=-1); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor flatten(const Tensor & self, int64_t start_dim, int64_t end_dim, Dimname out_dim); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor flatten(const Tensor & self, Dimname start_dim, Dimname end_dim, Dimname out_dim); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor flatten(const Tensor & self, DimnameList dims, Dimname out_dim); +#endif +static inline Tensor & fill_(Tensor & self, Scalar value); +static inline Tensor & fill_(Tensor & self, const Tensor & value); +static inline Tensor floor(const Tensor & self); +static inline Tensor & floor_(Tensor & self); +static inline Tensor & floor_out(Tensor & out, const Tensor & self); +static inline Tensor frac(const Tensor & self); +static inline Tensor & frac_(Tensor & self); +static inline Tensor & frac_out(Tensor & out, const Tensor & self); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor full(IntArrayRef size, Scalar fill_value, c10::optional names, const TensorOptions & options={}); +#endif +static inline Tensor full(IntArrayRef size, Scalar fill_value, const TensorOptions & options={}); +static inline Tensor & full_out(Tensor & out, IntArrayRef size, Scalar fill_value); +static inline Tensor full_like(const Tensor & self, Scalar fill_value, c10::optional memory_format=c10::nullopt); +static inline Tensor full_like(const Tensor & self, Scalar fill_value, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +static inline Tensor from_file(std::string filename, c10::optional shared=c10::nullopt, c10::optional size=0, const TensorOptions & options={}); +static inline Tensor grid_sampler(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +static inline Tensor grid_sampler_2d(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +static inline std::tuple grid_sampler_2d_backward(const Tensor & grad_output, const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +static inline Tensor grid_sampler_3d(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +static inline std::tuple grid_sampler_3d_backward(const Tensor & grad_output, const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +static inline Tensor hann_window(int64_t window_length, const TensorOptions & options={}); +static inline Tensor hann_window(int64_t window_length, bool periodic, const TensorOptions & options={}); +static inline Tensor hamming_window(int64_t window_length, const TensorOptions & options={}); +static inline Tensor hamming_window(int64_t window_length, bool periodic, const TensorOptions & options={}); +static inline Tensor hamming_window(int64_t window_length, bool periodic, double alpha, const TensorOptions & options={}); +static inline Tensor hamming_window(int64_t window_length, bool periodic, double alpha, double beta, const TensorOptions & options={}); +static inline Tensor hinge_embedding_loss(const Tensor & self, const Tensor & target, double margin=1.0, int64_t reduction=at::Reduction::Mean); +static inline Tensor ger(const Tensor & self, const Tensor & vec2); +static inline Tensor & ger_out(Tensor & out, const Tensor & self, const Tensor & vec2); +static inline Tensor group_norm(const Tensor & input, int64_t num_groups, const Tensor & weight={}, const Tensor & bias={}, double eps=1e-05, bool cudnn_enabled=true); +static inline Tensor fft(const Tensor & self, int64_t signal_ndim, bool normalized=false); +static inline Tensor ifft(const Tensor & self, int64_t signal_ndim, bool normalized=false); +static inline Tensor rfft(const Tensor & self, int64_t signal_ndim, bool normalized=false, bool onesided=true); +static inline Tensor irfft(const Tensor & self, int64_t signal_ndim, bool normalized=false, bool onesided=true, IntArrayRef signal_sizes={}); +static inline Tensor _fft_with_size(const Tensor & self, int64_t signal_ndim, bool complex_input, bool complex_output, bool inverse, IntArrayRef checked_signal_sizes, bool normalized, bool onesided, IntArrayRef output_sizes); +static inline int64_t _cufft_get_plan_cache_size(int64_t device_index); +static inline int64_t _cufft_get_plan_cache_max_size(int64_t device_index); +static inline void _cufft_set_plan_cache_max_size(int64_t device_index, int64_t max_size); +static inline void _cufft_clear_plan_cache(int64_t device_index); +static inline Tensor index(const Tensor & self, TensorList indices); +static inline Tensor index_copy(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor index_copy(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & source); +#endif +static inline Tensor & index_put_(Tensor & self, TensorList indices, const Tensor & values, bool accumulate=false); +static inline Tensor index_put(const Tensor & self, TensorList indices, const Tensor & values, bool accumulate=false); +static inline Tensor & _index_put_impl_(Tensor & self, TensorList indices, const Tensor & values, bool accumulate=false, bool unsafe=false); +static inline Tensor instance_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool use_input_stats, double momentum, double eps, bool cudnn_enabled); +static inline Tensor inverse(const Tensor & self); +static inline Tensor & inverse_out(Tensor & out, const Tensor & self); +static inline Tensor _inverse_helper(const Tensor & self); +static inline Tensor isclose(const Tensor & self, const Tensor & other, double rtol=1e-05, double atol=1e-08, bool equal_nan=false); +static inline Tensor isnan(const Tensor & self); +static inline bool is_distributed(const Tensor & self); +static inline bool is_floating_point(const Tensor & self); +static inline bool is_complex(const Tensor & self); +static inline bool is_nonzero(const Tensor & self); +static inline bool is_same_size(const Tensor & self, const Tensor & other); +static inline bool is_signed(const Tensor & self); +static inline Tensor kl_div(const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +static inline Tensor kl_div_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +static inline std::tuple kthvalue(const Tensor & self, int64_t k, int64_t dim=-1, bool keepdim=false); +static inline std::tuple kthvalue_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, int64_t dim=-1, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple kthvalue(const Tensor & self, int64_t k, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple kthvalue_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, Dimname dim, bool keepdim=false); +#endif +static inline Tensor layer_norm(const Tensor & input, IntArrayRef normalized_shape, const Tensor & weight={}, const Tensor & bias={}, double eps=1e-05, bool cudnn_enable=true); +static inline std::tuple native_layer_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, int64_t M, int64_t N, double eps); +static inline std::tuple native_layer_norm_backward(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & rstd, const Tensor & weight, int64_t M, int64_t N, std::array output_mask); +static inline Tensor linear(const Tensor & input, const Tensor & weight, const Tensor & bias={}); +static inline Tensor mkldnn_linear(const Tensor & input, const Tensor & weight, const Tensor & bias={}); +static inline Tensor fbgemm_linear_int8_weight_fp32_activation(const Tensor & input, const Tensor & weight, const Tensor & packed, const Tensor & col_offsets, Scalar weight_scale, Scalar weight_zero_point, const Tensor & bias); +static inline Tensor fbgemm_linear_int8_weight(const Tensor & input, const Tensor & weight, const Tensor & packed, const Tensor & col_offsets, Scalar weight_scale, Scalar weight_zero_point, const Tensor & bias); +static inline std::tuple fbgemm_linear_quantize_weight(const Tensor & input); +static inline Tensor fbgemm_pack_gemm_matrix_fp16(const Tensor & input); +static inline Tensor fbgemm_linear_fp16_weight_fp32_activation(const Tensor & input, const Tensor & packed_weight, const Tensor & bias); +static inline Tensor fbgemm_linear_fp16_weight(const Tensor & input, const Tensor & packed_weight, const Tensor & bias); +static inline Tensor fbgemm_pack_quantized_matrix(const Tensor & input); +static inline Tensor fbgemm_pack_quantized_matrix(const Tensor & input, int64_t K, int64_t N); +static inline Tensor linspace(Scalar start, Scalar end, int64_t steps=100, const TensorOptions & options={}); +static inline Tensor & linspace_out(Tensor & out, Scalar start, Scalar end, int64_t steps=100); +static inline Tensor log(const Tensor & self); +static inline Tensor & log_(Tensor & self); +static inline Tensor & log_out(Tensor & out, const Tensor & self); +static inline Tensor log10(const Tensor & self); +static inline Tensor & log10_(Tensor & self); +static inline Tensor & log10_out(Tensor & out, const Tensor & self); +static inline Tensor log1p(const Tensor & self); +static inline Tensor & log1p_(Tensor & self); +static inline Tensor & log1p_out(Tensor & out, const Tensor & self); +static inline Tensor log2(const Tensor & self); +static inline Tensor & log2_(Tensor & self); +static inline Tensor & log2_out(Tensor & out, const Tensor & self); +static inline Tensor logdet(const Tensor & self); +static inline Tensor logspace(Scalar start, Scalar end, int64_t steps=100, double base=10.0, const TensorOptions & options={}); +static inline Tensor & logspace_out(Tensor & out, Scalar start, Scalar end, int64_t steps=100, double base=10.0); +static inline Tensor log_softmax(const Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor log_softmax(const Tensor & self, Dimname dim, c10::optional dtype=c10::nullopt); +#endif +static inline Tensor _log_softmax(const Tensor & self, int64_t dim, bool half_to_float); +static inline Tensor _log_softmax_backward_data(const Tensor & grad_output, const Tensor & output, int64_t dim, const Tensor & self); +static inline Tensor logsumexp(const Tensor & self, IntArrayRef dim, bool keepdim=false); +static inline Tensor & logsumexp_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor logsumexp(const Tensor & self, DimnameList dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & logsumexp_out(Tensor & out, const Tensor & self, DimnameList dim, bool keepdim=false); +#endif +static inline Tensor margin_ranking_loss(const Tensor & input1, const Tensor & input2, const Tensor & target, double margin=0.0, int64_t reduction=at::Reduction::Mean); +static inline Tensor matmul(const Tensor & self, const Tensor & other); +static inline Tensor & matmul_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor matrix_rank(const Tensor & self, double tol, bool symmetric=false); +static inline Tensor matrix_rank(const Tensor & self, bool symmetric=false); +static inline Tensor matrix_power(const Tensor & self, int64_t n); +static inline std::tuple max(const Tensor & self, int64_t dim, bool keepdim=false); +static inline std::tuple max_out(Tensor & max, Tensor & max_values, const Tensor & self, int64_t dim, bool keepdim=false); +static inline Tensor max_values(const Tensor & self, IntArrayRef dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple max(const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple max_out(Tensor & max, Tensor & max_values, const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor max_values(const Tensor & self, DimnameList dim, bool keepdim=false); +#endif +static inline std::tuple max_pool1d_with_indices(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +static inline Tensor max_pool1d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +static inline Tensor max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +static inline Tensor mkldnn_max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +static inline Tensor quantized_max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +static inline Tensor max_pool3d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +static inline Tensor mean(const Tensor & self, c10::optional dtype=c10::nullopt); +static inline Tensor mean(const Tensor & self, IntArrayRef dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +static inline Tensor & mean_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor mean(const Tensor & self, DimnameList dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & mean_out(Tensor & out, const Tensor & self, DimnameList dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#endif +static inline std::tuple median(const Tensor & self, int64_t dim, bool keepdim=false); +static inline std::tuple median_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple median(const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple median_out(Tensor & values, Tensor & indices, const Tensor & self, Dimname dim, bool keepdim=false); +#endif +static inline std::tuple min(const Tensor & self, int64_t dim, bool keepdim=false); +static inline std::tuple min_out(Tensor & min, Tensor & min_indices, const Tensor & self, int64_t dim, bool keepdim=false); +static inline Tensor min_values(const Tensor & self, IntArrayRef dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple min(const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple min_out(Tensor & min, Tensor & min_indices, const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor min_values(const Tensor & self, DimnameList dim, bool keepdim=false); +#endif +static inline Tensor mkldnn_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups); +static inline Tensor mkldnn_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool bias_defined); +static inline std::tuple mkldnn_convolution_backward_weights(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool bias_defined); +static inline std::tuple mkldnn_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, std::array output_mask); +static inline std::tuple miopen_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double exponential_average_factor, double epsilon); +static inline std::tuple miopen_batch_norm_backward(const Tensor & input, const Tensor & grad_output, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_var, double epsilon); +static inline Tensor miopen_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline Tensor miopen_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline std::tuple miopen_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); +static inline Tensor miopen_convolution_backward_bias(const Tensor & grad_output); +static inline Tensor miopen_convolution_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline Tensor miopen_convolution_transpose(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline std::tuple miopen_convolution_transpose_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); +static inline Tensor miopen_convolution_transpose_backward_input(const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline Tensor miopen_convolution_transpose_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline Tensor miopen_depthwise_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline Tensor miopen_depthwise_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline std::tuple miopen_depthwise_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); +static inline Tensor miopen_depthwise_convolution_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +static inline std::tuple miopen_rnn(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & hx, const Tensor & cx, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state); +static inline std::tuple> miopen_rnn_backward(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & weight_buf, const Tensor & hx, const Tensor & cx, const Tensor & output, const Tensor & grad_output, const Tensor & grad_hy, const Tensor & grad_cy, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state, const Tensor & reserve, std::array output_mask); +static inline Tensor mm(const Tensor & self, const Tensor & mat2); +static inline Tensor & mm_out(Tensor & out, const Tensor & self, const Tensor & mat2); +static inline Tensor _sparse_mm(const Tensor & sparse, const Tensor & dense); +static inline std::tuple mode(const Tensor & self, int64_t dim=-1, bool keepdim=false); +static inline std::tuple mode_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim=-1, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple mode(const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple mode_out(Tensor & values, Tensor & indices, const Tensor & self, Dimname dim, bool keepdim=false); +#endif +static inline Tensor mul(const Tensor & self, const Tensor & other); +static inline Tensor & mul_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor mul(const Tensor & self, Scalar other); +static inline Tensor mv(const Tensor & self, const Tensor & vec); +static inline Tensor & mv_out(Tensor & out, const Tensor & self, const Tensor & vec); +static inline Tensor mvlgamma(const Tensor & self, int64_t p); +static inline Tensor narrow(const Tensor & self, int64_t dim, int64_t start, int64_t length); +static inline std::tuple native_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps); +static inline std::tuple batch_norm_stats(const Tensor & input, double eps); +static inline Tensor batch_norm_elemt(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & mean, const Tensor & invstd, double eps); +static inline Tensor & batch_norm_elemt_out(Tensor & out, const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & mean, const Tensor & invstd, double eps); +static inline std::tuple batch_norm_gather_stats(const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & running_mean, const Tensor & running_var, double momentum, double eps, int64_t count); +static inline std::tuple batch_norm_gather_stats_with_counts(const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & running_mean, const Tensor & running_var, double momentum, double eps, IntArrayRef counts); +static inline std::tuple native_batch_norm_backward(const Tensor & grad_out, const Tensor & input, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_invstd, bool train, double eps, std::array output_mask); +static inline std::tuple batch_norm_backward_reduce(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & weight, bool input_g, bool weight_g, bool bias_g); +static inline Tensor batch_norm_backward_elemt(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & weight, const Tensor & mean_dy, const Tensor & mean_dy_xmu); +static inline std::tuple batch_norm_update_stats(const Tensor & input, const Tensor & running_mean, const Tensor & running_var, double momentum); +static inline bool _nnpack_available(); +static inline Tensor _nnpack_spatial_convolution(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride=1); +static inline std::tuple _nnpack_spatial_convolution_backward(const Tensor & input, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, std::array output_mask); +static inline Tensor _nnpack_spatial_convolution_backward_input(const Tensor & input, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding); +static inline Tensor _nnpack_spatial_convolution_backward_weight(const Tensor & input, IntArrayRef weightsize, const Tensor & grad_output, IntArrayRef padding); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor ones(IntArrayRef size, c10::optional names, const TensorOptions & options={}); +#endif +static inline Tensor ones(IntArrayRef size, const TensorOptions & options={}); +static inline Tensor & ones_out(Tensor & out, IntArrayRef size); +static inline Tensor ones_like(const Tensor & self, c10::optional memory_format=c10::nullopt); +static inline Tensor ones_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +static inline Tensor pairwise_distance(const Tensor & x1, const Tensor & x2, double p=2, double eps=1e-06, bool keepdim=false); +static inline Tensor cdist(const Tensor & x1, const Tensor & x2, double p=2, c10::optional compute_mode=c10::nullopt); +static inline Tensor _cdist_backward(const Tensor & grad, const Tensor & x1, const Tensor & x2, double p, const Tensor & cdist); +static inline Tensor pdist(const Tensor & self, double p=2); +static inline Tensor _pdist_forward(const Tensor & self, double p=2); +static inline Tensor _pdist_backward(const Tensor & grad, const Tensor & self, double p, const Tensor & pdist); +static inline Tensor cosine_similarity(const Tensor & x1, const Tensor & x2, int64_t dim=1, double eps=1e-08); +static inline Tensor pixel_shuffle(const Tensor & self, int64_t upscale_factor); +static inline Tensor pinverse(const Tensor & self, double rcond=1e-15); +static inline Tensor poisson_nll_loss(const Tensor & input, const Tensor & target, bool log_input, bool full, double eps, int64_t reduction); +static inline Tensor scalar_tensor(Scalar s, const TensorOptions & options={}); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor rand(IntArrayRef size, c10::optional names, const TensorOptions & options={}); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor rand(IntArrayRef size, Generator * generator, c10::optional names, const TensorOptions & options={}); +#endif +static inline Tensor rand(IntArrayRef size, const TensorOptions & options={}); +static inline Tensor rand(IntArrayRef size, Generator * generator, const TensorOptions & options={}); +static inline Tensor & rand_out(Tensor & out, IntArrayRef size); +static inline Tensor & rand_out(Tensor & out, IntArrayRef size, Generator * generator); +static inline Tensor rand_like(const Tensor & self, c10::optional memory_format=c10::nullopt); +static inline Tensor rand_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +static inline Tensor randint(int64_t high, IntArrayRef size, const TensorOptions & options={}); +static inline Tensor randint(int64_t high, IntArrayRef size, Generator * generator, const TensorOptions & options={}); +static inline Tensor randint(int64_t low, int64_t high, IntArrayRef size, const TensorOptions & options={}); +static inline Tensor randint(int64_t low, int64_t high, IntArrayRef size, Generator * generator, const TensorOptions & options={}); +static inline Tensor & randint_out(Tensor & out, int64_t high, IntArrayRef size); +static inline Tensor & randint_out(Tensor & out, int64_t high, IntArrayRef size, Generator * generator); +static inline Tensor & randint_out(Tensor & out, int64_t low, int64_t high, IntArrayRef size); +static inline Tensor & randint_out(Tensor & out, int64_t low, int64_t high, IntArrayRef size, Generator * generator); +static inline Tensor randint_like(const Tensor & self, int64_t high, c10::optional memory_format=c10::nullopt); +static inline Tensor randint_like(const Tensor & self, int64_t low, int64_t high, c10::optional memory_format=c10::nullopt); +static inline Tensor randint_like(const Tensor & self, int64_t high, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +static inline Tensor randint_like(const Tensor & self, int64_t low, int64_t high, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +static inline Tensor randn(IntArrayRef size, const TensorOptions & options={}); +static inline Tensor randn(IntArrayRef size, Generator * generator, const TensorOptions & options={}); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor randn(IntArrayRef size, c10::optional names, const TensorOptions & options={}); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor randn(IntArrayRef size, Generator * generator, c10::optional names, const TensorOptions & options={}); +#endif +static inline Tensor & randn_out(Tensor & out, IntArrayRef size); +static inline Tensor & randn_out(Tensor & out, IntArrayRef size, Generator * generator); +static inline Tensor randn_like(const Tensor & self, c10::optional memory_format=c10::nullopt); +static inline Tensor randn_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +static inline Tensor randperm(int64_t n, const TensorOptions & options={}); +static inline Tensor randperm(int64_t n, Generator * generator, const TensorOptions & options={}); +static inline Tensor & randperm_out(Tensor & out, int64_t n); +static inline Tensor & randperm_out(Tensor & out, int64_t n, Generator * generator); +static inline Tensor range(Scalar start, Scalar end, Scalar step=1, const TensorOptions & options={}); +static inline Tensor range(Scalar start, Scalar end, const TensorOptions & options={}); +static inline Tensor & range_out(Tensor & out, Scalar start, Scalar end, Scalar step=1); +static inline Tensor reciprocal(const Tensor & self); +static inline Tensor & reciprocal_(Tensor & self); +static inline Tensor & reciprocal_out(Tensor & out, const Tensor & self); +static inline Tensor neg(const Tensor & self); +static inline Tensor & neg_(Tensor & self); +static inline Tensor & neg_out(Tensor & out, const Tensor & self); +static inline Tensor repeat_interleave(const Tensor & repeats); +static inline Tensor repeat_interleave(const Tensor & self, const Tensor & repeats, c10::optional dim=c10::nullopt); +static inline Tensor repeat_interleave(const Tensor & self, int64_t repeats, c10::optional dim=c10::nullopt); +static inline Tensor reshape(const Tensor & self, IntArrayRef shape); +static inline Tensor _mkldnn_reshape(const Tensor & self, IntArrayRef shape); +static inline Tensor round(const Tensor & self); +static inline Tensor & round_(Tensor & self); +static inline Tensor & round_out(Tensor & out, const Tensor & self); +static inline Tensor rrelu(const Tensor & self, Scalar lower=0.125, Scalar upper=0.333333333333, bool training=false, Generator * generator=nullptr); +static inline Tensor & rrelu_(Tensor & self, Scalar lower=0.125, Scalar upper=0.333333333333, bool training=false, Generator * generator=nullptr); +static inline Tensor relu(const Tensor & self); +static inline Tensor & relu_(Tensor & self); +static inline Tensor prelu(const Tensor & self, const Tensor & weight); +static inline std::tuple prelu_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight); +static inline Tensor gelu(const Tensor & self); +static inline Tensor gelu_backward(const Tensor & grad, const Tensor & self); +static inline Tensor hardshrink(const Tensor & self, Scalar lambd=0.5); +static inline Tensor hardshrink_backward(const Tensor & grad_out, const Tensor & self, Scalar lambd); +static inline Tensor rsqrt(const Tensor & self); +static inline Tensor & rsqrt_(Tensor & self); +static inline Tensor & rsqrt_out(Tensor & out, const Tensor & self); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor select(const Tensor & self, Dimname dim, int64_t index); +#endif +static inline Tensor select(const Tensor & self, int64_t dim, int64_t index); +static inline Tensor selu(const Tensor & self); +static inline Tensor & selu_(Tensor & self); +static inline Tensor celu(const Tensor & self, Scalar alpha=1.0); +static inline Tensor & celu_(Tensor & self, Scalar alpha=1.0); +static inline Tensor sigmoid(const Tensor & self); +static inline Tensor & sigmoid_(Tensor & self); +static inline Tensor & sigmoid_out(Tensor & out, const Tensor & self); +static inline Tensor sin(const Tensor & self); +static inline Tensor & sin_(Tensor & self); +static inline Tensor & sin_out(Tensor & out, const Tensor & self); +static inline Tensor sinh(const Tensor & self); +static inline Tensor & sinh_(Tensor & self); +static inline Tensor & sinh_out(Tensor & out, const Tensor & self); +static inline Tensor detach(const Tensor & self); +static inline Tensor & detach_(Tensor & self); +static inline int64_t size(const Tensor & self, int64_t dim); +#ifdef BUILD_NAMEDTENSOR +static inline int64_t size(const Tensor & self, Dimname dim); +#endif +static inline Tensor slice(const Tensor & self, int64_t dim=0, int64_t start=0, int64_t end=9223372036854775807, int64_t step=1); +static inline std::tuple slogdet(const Tensor & self); +static inline Tensor smm(const Tensor & self, const Tensor & mat2); +static inline Tensor softmax(const Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor softmax(const Tensor & self, Dimname dim, c10::optional dtype=c10::nullopt); +#endif +static inline Tensor _softmax(const Tensor & self, int64_t dim, bool half_to_float); +static inline Tensor _softmax_backward_data(const Tensor & grad_output, const Tensor & output, int64_t dim, const Tensor & self); +static inline std::vector split(const Tensor & self, int64_t split_size, int64_t dim=0); +static inline std::vector split_with_sizes(const Tensor & self, IntArrayRef split_sizes, int64_t dim=0); +static inline Tensor squeeze(const Tensor & self); +static inline Tensor squeeze(const Tensor & self, int64_t dim); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor squeeze(const Tensor & self, Dimname dim); +#endif +static inline Tensor sspaddmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +static inline Tensor & sspaddmm_out(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +static inline Tensor stack(TensorList tensors, int64_t dim=0); +static inline Tensor & stack_out(Tensor & out, TensorList tensors, int64_t dim=0); +static inline Tensor stft(const Tensor & self, int64_t n_fft, c10::optional hop_length=c10::nullopt, c10::optional win_length=c10::nullopt, const Tensor & window={}, bool normalized=false, bool onesided=true); +static inline int64_t stride(const Tensor & self, int64_t dim); +#ifdef BUILD_NAMEDTENSOR +static inline int64_t stride(const Tensor & self, Dimname dim); +#endif +static inline Tensor sum(const Tensor & self, c10::optional dtype=c10::nullopt); +static inline Tensor sum(const Tensor & self, IntArrayRef dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor sum(const Tensor & self, DimnameList dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#endif +static inline Tensor & sum_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & sum_out(Tensor & out, const Tensor & self, DimnameList dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#endif +static inline Tensor sqrt(const Tensor & self); +static inline Tensor & sqrt_(Tensor & self); +static inline Tensor & sqrt_out(Tensor & out, const Tensor & self); +static inline Tensor std(const Tensor & self, bool unbiased=true); +static inline Tensor std(const Tensor & self, IntArrayRef dim, bool unbiased=true, bool keepdim=false); +static inline std::tuple std_mean(const Tensor & self, bool unbiased=true); +static inline std::tuple std_mean(const Tensor & self, IntArrayRef dim, bool unbiased=true, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple std_mean(const Tensor & self, DimnameList dim, bool unbiased=true, bool keepdim=false); +#endif +static inline Tensor & std_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool unbiased=true, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor std(const Tensor & self, DimnameList dim, bool unbiased=true, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & std_out(Tensor & out, const Tensor & self, DimnameList dim, bool unbiased=true, bool keepdim=false); +#endif +static inline Tensor prod(const Tensor & self, c10::optional dtype=c10::nullopt); +static inline Tensor prod(const Tensor & self, int64_t dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +static inline Tensor & prod_out(Tensor & out, const Tensor & self, int64_t dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor prod(const Tensor & self, Dimname dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & prod_out(Tensor & out, const Tensor & self, Dimname dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#endif +static inline Tensor t(const Tensor & self); +static inline Tensor tan(const Tensor & self); +static inline Tensor & tan_(Tensor & self); +static inline Tensor & tan_out(Tensor & out, const Tensor & self); +static inline Tensor tanh(const Tensor & self); +static inline Tensor & tanh_(Tensor & self); +static inline Tensor & tanh_out(Tensor & out, const Tensor & self); +static inline Tensor tensordot(const Tensor & self, const Tensor & other, IntArrayRef dims_self, IntArrayRef dims_other); +static inline Tensor threshold(const Tensor & self, Scalar threshold, Scalar value); +static inline Tensor & threshold_(Tensor & self, Scalar threshold, Scalar value); +static inline Tensor & threshold_out(Tensor & out, const Tensor & self, Scalar threshold, Scalar value); +static inline Tensor threshold_backward(const Tensor & grad_output, const Tensor & self, Scalar threshold); +static inline Tensor transpose(const Tensor & self, int64_t dim0, int64_t dim1); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor transpose(const Tensor & self, Dimname dim0, Dimname dim1); +#endif +static inline Tensor _mkldnn_transpose(const Tensor & self, int64_t dim0, int64_t dim1); +static inline Tensor & _mkldnn_transpose_(Tensor & self, int64_t dim0, int64_t dim1); +static inline Tensor one_hot(const Tensor & self, int64_t num_classes=-1); +static inline Tensor flip(const Tensor & self, IntArrayRef dims); +static inline Tensor roll(const Tensor & self, IntArrayRef shifts, IntArrayRef dims={}); +static inline Tensor rot90(const Tensor & self, int64_t k=1, IntArrayRef dims={0,1}); +static inline Tensor trapz(const Tensor & y, const Tensor & x, int64_t dim=-1); +static inline Tensor trapz(const Tensor & y, double dx=1, int64_t dim=-1); +static inline Tensor _trilinear(const Tensor & i1, const Tensor & i2, const Tensor & i3, IntArrayRef expand1, IntArrayRef expand2, IntArrayRef expand3, IntArrayRef sumdim, int64_t unroll_dim=1); +static inline Tensor triplet_margin_loss(const Tensor & anchor, const Tensor & positive, const Tensor & negative, double margin=1.0, double p=2, double eps=1e-06, bool swap=false, int64_t reduction=at::Reduction::Mean); +static inline Tensor trunc(const Tensor & self); +static inline Tensor & trunc_(Tensor & self); +static inline Tensor & trunc_out(Tensor & out, const Tensor & self); +static inline bool _has_compatible_shallow_copy_type(const Tensor & self, const Tensor & from); +static inline std::tuple _unique(const Tensor & self, bool sorted=true, bool return_inverse=false); +static inline std::tuple unique_dim(const Tensor & self, int64_t dim, bool sorted=true, bool return_inverse=false, bool return_counts=false); +static inline std::tuple unique_consecutive(const Tensor & self, bool return_inverse=false, bool return_counts=false, c10::optional dim=c10::nullopt); +static inline std::tuple unique_dim_consecutive(const Tensor & self, int64_t dim, bool return_inverse=false, bool return_counts=false); +static inline std::tuple _unique2(const Tensor & self, bool sorted=true, bool return_inverse=false, bool return_counts=false); +static inline Tensor _unsafe_view(const Tensor & self, IntArrayRef size); +static inline Tensor unsqueeze(const Tensor & self, int64_t dim); +static inline Tensor var(const Tensor & self, bool unbiased=true); +static inline Tensor var(const Tensor & self, IntArrayRef dim, bool unbiased=true, bool keepdim=false); +static inline Tensor & var_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool unbiased=true, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor var(const Tensor & self, DimnameList dim, bool unbiased=true, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & var_out(Tensor & out, const Tensor & self, DimnameList dim, bool unbiased=true, bool keepdim=false); +#endif +static inline std::tuple var_mean(const Tensor & self, bool unbiased=true); +static inline std::tuple var_mean(const Tensor & self, IntArrayRef dim, bool unbiased=true, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple var_mean(const Tensor & self, DimnameList dim, bool unbiased=true, bool keepdim=false); +#endif +static inline Tensor where(const Tensor & condition, const Tensor & self, const Tensor & other); +static inline std::vector where(const Tensor & condition); +static inline Tensor _s_where(const Tensor & condition, const Tensor & self, const Tensor & other); +static inline Tensor norm_except_dim(const Tensor & v, int64_t pow=2, int64_t dim=0); +static inline Tensor _weight_norm(const Tensor & v, const Tensor & g, int64_t dim=0); +static inline std::tuple _weight_norm_cuda_interface(const Tensor & v, const Tensor & g, int64_t dim=0); +static inline std::tuple _weight_norm_cuda_interface_backward(const Tensor & grad_w, const Tensor & saved_v, const Tensor & saved_g, const Tensor & saved_norms, int64_t dim); +static inline std::tuple _weight_norm_differentiable_backward(const Tensor & grad_w, const Tensor & saved_v, const Tensor & saved_g, const Tensor & saved_norms, int64_t dim); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor zeros(IntArrayRef size, c10::optional names, const TensorOptions & options={}); +#endif +static inline Tensor zeros(IntArrayRef size, const TensorOptions & options={}); +static inline Tensor & zeros_out(Tensor & out, IntArrayRef size); +static inline Tensor zeros_like(const Tensor & self, c10::optional memory_format=c10::nullopt); +static inline Tensor zeros_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +static inline Tensor _standard_gamma_grad(const Tensor & self, const Tensor & output); +static inline Tensor _standard_gamma(const Tensor & self, Generator * generator=nullptr); +static inline Tensor _dirichlet_grad(const Tensor & x, const Tensor & alpha, const Tensor & total); +static inline Tensor _sample_dirichlet(const Tensor & self, Generator * generator=nullptr); +static inline Tensor poisson(const Tensor & self, Generator * generator=nullptr); +static inline Tensor native_norm(const Tensor & self, Scalar p=2); +static inline Tensor _sparse_sum(const Tensor & self); +static inline Tensor _sparse_sum(const Tensor & self, ScalarType dtype); +static inline Tensor _sparse_sum(const Tensor & self, IntArrayRef dim); +static inline Tensor _sparse_sum(const Tensor & self, IntArrayRef dim, ScalarType dtype); +static inline Tensor _sparse_sum_backward(const Tensor & grad, const Tensor & self, IntArrayRef dim); +static inline Tensor norm(const Tensor & self, c10::optional p, ScalarType dtype); +static inline Tensor norm(const Tensor & self, Scalar p=2); +static inline Tensor norm(const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim, ScalarType dtype); +static inline Tensor norm(const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim=false); +static inline Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim, ScalarType dtype); +static inline Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor norm(const Tensor & self, c10::optional p, DimnameList dim, bool keepdim, ScalarType dtype); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor norm(const Tensor & self, c10::optional p, DimnameList dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, DimnameList dim, bool keepdim, ScalarType dtype); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, DimnameList dim, bool keepdim=false); +#endif +static inline Tensor frobenius_norm(const Tensor & self); +static inline Tensor frobenius_norm(const Tensor & self, IntArrayRef dim, bool keepdim=false); +static inline Tensor & frobenius_norm_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim=false); +static inline Tensor nuclear_norm(const Tensor & self, bool keepdim=false); +static inline Tensor & nuclear_norm_out(Tensor & out, const Tensor & self, bool keepdim=false); +static inline Tensor nuclear_norm(const Tensor & self, IntArrayRef dim, bool keepdim=false); +static inline Tensor & nuclear_norm_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim=false); +static inline Tensor clone(const Tensor & self, c10::optional memory_format=c10::nullopt); +static inline Tensor & resize_as_(Tensor & self, const Tensor & the_template, c10::optional memory_format=c10::nullopt); +static inline Tensor & pow_out(Tensor & out, const Tensor & self, Scalar exponent); +static inline Tensor pow(const Tensor & self, Scalar exponent); +static inline Tensor & zero_(Tensor & self); +static inline Tensor & sub_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha=1); +static inline Tensor sub(const Tensor & self, const Tensor & other, Scalar alpha=1); +static inline Tensor sub(const Tensor & self, Scalar other, Scalar alpha=1); +static inline Tensor rsub(const Tensor & self, const Tensor & other, Scalar alpha=1); +static inline Tensor rsub(const Tensor & self, Scalar other, Scalar alpha=1); +static inline Tensor _sparse_addmm(const Tensor & self, const Tensor & sparse, const Tensor & dense, Scalar beta=1, Scalar alpha=1); +static inline Tensor & addmm_out(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +static inline Tensor addmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +static inline Tensor sparse_coo_tensor(IntArrayRef size, const TensorOptions & options); +static inline Tensor sparse_coo_tensor(const Tensor & indices, const Tensor & values, const TensorOptions & options={}); +static inline Tensor sparse_coo_tensor(const Tensor & indices, const Tensor & values, IntArrayRef size, const TensorOptions & options={}); +static inline Tensor _sparse_coo_tensor_unsafe(const Tensor & indices, const Tensor & values, IntArrayRef size, const TensorOptions & options={}); +static inline Tensor _sparse_coo_tensor_with_dims(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size, const TensorOptions & options); +static inline Tensor _sparse_coo_tensor_with_dims_and_tensors(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size, const Tensor & indices, const Tensor & values, const TensorOptions & options); +static inline Tensor to_dense_backward(const Tensor & grad, const Tensor & input); +static inline Tensor & hspmm_out(Tensor & out, const Tensor & mat1, const Tensor & mat2); +static inline Tensor hspmm(const Tensor & mat1, const Tensor & mat2); +static inline Tensor & copy_sparse_to_sparse_(Tensor & self, const Tensor & src, bool non_blocking=false); +static inline std::vector unbind(const Tensor & self, int64_t dim=0); +#ifdef BUILD_NAMEDTENSOR +static inline std::vector unbind(const Tensor & self, Dimname dim); +#endif +static inline Tensor mkldnn_reorder_conv2d_weight(const Tensor & self, IntArrayRef padding=0, IntArrayRef stride=1, IntArrayRef dilation=1, int64_t groups=1); +static inline Tensor to_mkldnn_backward(const Tensor & grad, const Tensor & input); +static inline Tensor quantize_per_tensor(const Tensor & self, double scale, int64_t zero_point, ScalarType dtype); +static inline Tensor quantize_per_channel(const Tensor & self, const Tensor & scales, const Tensor & zero_points, int64_t axis, ScalarType dtype); +static inline Tensor dequantize(const Tensor & self); +static inline double q_scale(const Tensor & self); +static inline int64_t q_zero_point(const Tensor & self); +static inline Tensor q_per_channel_scales(const Tensor & self); +static inline Tensor q_per_channel_zero_points(const Tensor & self); +static inline int64_t q_per_channel_axis(const Tensor & self); +static inline Tensor int_repr(const Tensor & self); +static inline Tensor _make_per_tensor_quantized_tensor(const Tensor & self, double scale, int64_t zero_point); +static inline Tensor _make_per_channel_quantized_tensor(const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis); +static inline Tensor fake_quantize_per_tensor_affine(const Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max); +static inline Tensor fake_quantize_per_tensor_affine_backward(const Tensor & grad, const Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max); +static inline Tensor fake_quantize_per_channel_affine(const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max); +static inline Tensor fake_quantize_per_channel_affine_backward(const Tensor & grad, const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max); +static inline std::vector meshgrid(TensorList tensors); +static inline Tensor cartesian_prod(TensorList tensors); +static inline Tensor combinations(const Tensor & self, int64_t r=2, bool with_replacement=false); +static inline ScalarType result_type(const Tensor & tensor, const Tensor & other); +static inline ScalarType result_type(const Tensor & tensor, Scalar other); +static inline ScalarType result_type(Scalar scalar, const Tensor & tensor); +static inline ScalarType result_type(Scalar scalar1, Scalar scalar2); +static inline bool can_cast(ScalarType from, ScalarType to); +static inline ScalarType promote_types(ScalarType type1, ScalarType type2); +static inline Scalar _local_scalar_dense(const Tensor & self); +static inline std::tuple _thnn_fused_lstm_cell(const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & cx, const Tensor & input_bias={}, const Tensor & hidden_bias={}); +static inline std::tuple _thnn_fused_lstm_cell_backward(const Tensor & grad_hy, const Tensor & grad_cy, const Tensor & cx, const Tensor & cy, const Tensor & workspace, bool has_bias); +static inline std::tuple _thnn_differentiable_lstm_cell_backward(const Tensor & grad_hy, const Tensor & grad_cy, const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & input_bias, const Tensor & hidden_bias, const Tensor & cx, const Tensor & cy); +static inline std::tuple _thnn_fused_gru_cell(const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & hx, const Tensor & input_bias={}, const Tensor & hidden_bias={}); +static inline std::tuple _thnn_fused_gru_cell_backward(const Tensor & grad_hy, const Tensor & workspace, bool has_bias); +static inline std::tuple _thnn_differentiable_gru_cell_backward(const Tensor & grad_hy, const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & hx, const Tensor & input_bias, const Tensor & hidden_bias); +static inline std::tuple lstm(const Tensor & input, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); +static inline std::tuple lstm(const Tensor & data, const Tensor & batch_sizes, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); +static inline std::tuple gru(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); +static inline std::tuple gru(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); +static inline std::tuple rnn_tanh(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); +static inline std::tuple rnn_tanh(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); +static inline std::tuple rnn_relu(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); +static inline std::tuple rnn_relu(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); +static inline std::tuple lstm_cell(const Tensor & input, TensorList hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih={}, const Tensor & b_hh={}); +static inline Tensor gru_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih={}, const Tensor & b_hh={}); +static inline Tensor rnn_tanh_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih={}, const Tensor & b_hh={}); +static inline Tensor rnn_relu_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih={}, const Tensor & b_hh={}); +static inline std::tuple quantized_lstm(const Tensor & input, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first, c10::optional dtype=c10::nullopt, bool use_dynamic=false); +static inline std::tuple quantized_lstm(const Tensor & data, const Tensor & batch_sizes, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, c10::optional dtype=c10::nullopt, bool use_dynamic=false); +static inline std::tuple quantized_gru(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); +static inline std::tuple quantized_gru(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); +static inline std::tuple quantized_lstm_cell(const Tensor & input, TensorList hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh); +static inline Tensor quantized_gru_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh); +static inline Tensor quantized_rnn_relu_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh); +static inline Tensor quantized_rnn_tanh_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh); +static inline std::tuple _pack_padded_sequence(const Tensor & input, const Tensor & lengths, bool batch_first); +static inline Tensor _pack_padded_sequence_backward(const Tensor & grad, IntArrayRef input_size, const Tensor & batch_sizes, bool batch_first); +static inline std::tuple _pad_packed_sequence(const Tensor & data, const Tensor & batch_sizes, bool batch_first, Scalar padding_value, int64_t total_length); +static inline Tensor masked_fill(const Tensor & self, const Tensor & mask, Scalar value); +static inline Tensor masked_fill(const Tensor & self, const Tensor & mask, const Tensor & value); +static inline Tensor masked_scatter(const Tensor & self, const Tensor & mask, const Tensor & source); +static inline Tensor index_add(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor index_add(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & source); +#endif +static inline Tensor index_fill(const Tensor & self, int64_t dim, const Tensor & index, Scalar value); +static inline Tensor index_fill(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & value); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor index_fill(const Tensor & self, Dimname dim, const Tensor & index, Scalar value); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor index_fill(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & value); +#endif +static inline Tensor scatter(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); +static inline Tensor scatter(const Tensor & self, int64_t dim, const Tensor & index, Scalar value); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor scatter(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & src); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor scatter(const Tensor & self, Dimname dim, const Tensor & index, Scalar value); +#endif +static inline Tensor scatter_add(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor scatter_add(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & src); +#endif +static inline Tensor __and__(const Tensor & self, Scalar other); +static inline Tensor __and__(const Tensor & self, const Tensor & other); +static inline Tensor __or__(const Tensor & self, Scalar other); +static inline Tensor __or__(const Tensor & self, const Tensor & other); +static inline Tensor & bitwise_xor_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor & bitwise_xor_out(Tensor & out, const Tensor & self, Scalar other); +static inline Tensor bitwise_xor(const Tensor & self, Scalar other); +static inline Tensor bitwise_xor(const Tensor & self, const Tensor & other); +static inline Tensor __xor__(const Tensor & self, Scalar other); +static inline Tensor __xor__(const Tensor & self, const Tensor & other); +static inline Tensor __lshift__(const Tensor & self, Scalar other); +static inline Tensor __lshift__(const Tensor & self, const Tensor & other); +static inline Tensor __rshift__(const Tensor & self, Scalar other); +static inline Tensor __rshift__(const Tensor & self, const Tensor & other); +static inline Tensor & addbmm_out(Tensor & out, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1); +static inline Tensor addbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1); +static inline Tensor & diag_out(Tensor & out, const Tensor & self, int64_t diagonal=0); +static inline Tensor diag(const Tensor & self, int64_t diagonal=0); +static inline Tensor & cross_out(Tensor & out, const Tensor & self, const Tensor & other, c10::optional dim=c10::nullopt); +static inline Tensor cross(const Tensor & self, const Tensor & other, c10::optional dim=c10::nullopt); +static inline Tensor & triu_out(Tensor & out, const Tensor & self, int64_t diagonal=0); +static inline Tensor triu(const Tensor & self, int64_t diagonal=0); +static inline Tensor & tril_out(Tensor & out, const Tensor & self, int64_t diagonal=0); +static inline Tensor tril(const Tensor & self, int64_t diagonal=0); +static inline Tensor tril_indices(int64_t row, int64_t col, int64_t offset=0, const TensorOptions & options=at::kLong); +static inline Tensor triu_indices(int64_t row, int64_t col, int64_t offset=0, const TensorOptions & options=at::kLong); +static inline Tensor trace(const Tensor & self); +static inline Tensor & ne_out(Tensor & out, const Tensor & self, Scalar other); +static inline Tensor ne(const Tensor & self, Scalar other); +static inline Tensor & ne_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor ne(const Tensor & self, const Tensor & other); +static inline Tensor & eq_out(Tensor & out, const Tensor & self, Scalar other); +static inline Tensor eq(const Tensor & self, Scalar other); +static inline Tensor & eq_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor eq(const Tensor & self, const Tensor & other); +static inline Tensor & ge_out(Tensor & out, const Tensor & self, Scalar other); +static inline Tensor ge(const Tensor & self, Scalar other); +static inline Tensor & ge_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor ge(const Tensor & self, const Tensor & other); +static inline Tensor & le_out(Tensor & out, const Tensor & self, Scalar other); +static inline Tensor le(const Tensor & self, Scalar other); +static inline Tensor & le_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor le(const Tensor & self, const Tensor & other); +static inline Tensor & gt_out(Tensor & out, const Tensor & self, Scalar other); +static inline Tensor gt(const Tensor & self, Scalar other); +static inline Tensor & gt_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor gt(const Tensor & self, const Tensor & other); +static inline Tensor & lt_out(Tensor & out, const Tensor & self, Scalar other); +static inline Tensor lt(const Tensor & self, Scalar other); +static inline Tensor & lt_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor lt(const Tensor & self, const Tensor & other); +static inline Tensor & take_out(Tensor & out, const Tensor & self, const Tensor & index); +static inline Tensor take(const Tensor & self, const Tensor & index); +static inline Tensor & index_select_out(Tensor & out, const Tensor & self, int64_t dim, const Tensor & index); +static inline Tensor index_select(const Tensor & self, int64_t dim, const Tensor & index); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & index_select_out(Tensor & out, const Tensor & self, Dimname dim, const Tensor & index); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor index_select(const Tensor & self, Dimname dim, const Tensor & index); +#endif +static inline Tensor & masked_select_out(Tensor & out, const Tensor & self, const Tensor & mask); +static inline Tensor masked_select(const Tensor & self, const Tensor & mask); +static inline Tensor & nonzero_out(Tensor & out, const Tensor & self); +static inline Tensor nonzero(const Tensor & self); +static inline std::vector nonzero_numpy(const Tensor & self); +static inline Tensor & gather_out(Tensor & out, const Tensor & self, int64_t dim, const Tensor & index, bool sparse_grad=false); +static inline Tensor gather(const Tensor & self, int64_t dim, const Tensor & index, bool sparse_grad=false); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & gather_out(Tensor & out, const Tensor & self, Dimname dim, const Tensor & index, bool sparse_grad=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor gather(const Tensor & self, Dimname dim, const Tensor & index, bool sparse_grad=false); +#endif +static inline Tensor _gather_sparse_backward(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & grad); +static inline Tensor & addcmul_out(Tensor & out, const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value=1); +static inline Tensor addcmul(const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value=1); +static inline Tensor & addcdiv_out(Tensor & out, const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value=1); +static inline Tensor addcdiv(const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value=1); +static inline std::tuple lstsq_out(Tensor & X, Tensor & qr, const Tensor & self, const Tensor & A); +static inline std::tuple lstsq(const Tensor & self, const Tensor & A); +static inline std::tuple triangular_solve_out(Tensor & X, Tensor & M, const Tensor & self, const Tensor & A, bool upper=true, bool transpose=false, bool unitriangular=false); +static inline std::tuple triangular_solve(const Tensor & self, const Tensor & A, bool upper=true, bool transpose=false, bool unitriangular=false); +static inline std::tuple _triangular_solve_helper(const Tensor & self, const Tensor & A, bool upper, bool transpose, bool unitriangular); +static inline std::tuple symeig_out(Tensor & e, Tensor & V, const Tensor & self, bool eigenvectors=false, bool upper=true); +static inline std::tuple symeig(const Tensor & self, bool eigenvectors=false, bool upper=true); +static inline std::tuple _symeig_helper(const Tensor & self, bool eigenvectors, bool upper); +static inline std::tuple eig_out(Tensor & e, Tensor & v, const Tensor & self, bool eigenvectors=false); +static inline std::tuple eig(const Tensor & self, bool eigenvectors=false); +static inline std::tuple svd_out(Tensor & U, Tensor & S, Tensor & V, const Tensor & self, bool some=true, bool compute_uv=true); +static inline std::tuple svd(const Tensor & self, bool some=true, bool compute_uv=true); +static inline std::tuple _svd_helper(const Tensor & self, bool some, bool compute_uv); +static inline Tensor & cholesky_out(Tensor & out, const Tensor & self, bool upper=false); +static inline Tensor cholesky(const Tensor & self, bool upper=false); +static inline Tensor _cholesky_helper(const Tensor & self, bool upper); +static inline Tensor & cholesky_solve_out(Tensor & out, const Tensor & self, const Tensor & input2, bool upper=false); +static inline Tensor cholesky_solve(const Tensor & self, const Tensor & input2, bool upper=false); +static inline Tensor _cholesky_solve_helper(const Tensor & self, const Tensor & A, bool upper); +static inline std::tuple solve(const Tensor & self, const Tensor & A); +static inline std::tuple solve_out(Tensor & solution, Tensor & lu, const Tensor & self, const Tensor & A); +static inline std::tuple _solve_helper(const Tensor & self, const Tensor & A); +static inline Tensor & cholesky_inverse_out(Tensor & out, const Tensor & self, bool upper=false); +static inline Tensor cholesky_inverse(const Tensor & self, bool upper=false); +static inline std::tuple qr_out(Tensor & Q, Tensor & R, const Tensor & self, bool some=true); +static inline std::tuple qr(const Tensor & self, bool some=true); +static inline std::tuple _qr_helper(const Tensor & self, bool some); +static inline std::tuple geqrf_out(Tensor & a, Tensor & tau, const Tensor & self); +static inline std::tuple geqrf(const Tensor & self); +static inline Tensor & orgqr_out(Tensor & out, const Tensor & self, const Tensor & input2); +static inline Tensor orgqr(const Tensor & self, const Tensor & input2); +static inline Tensor & ormqr_out(Tensor & out, const Tensor & self, const Tensor & input2, const Tensor & input3, bool left=true, bool transpose=false); +static inline Tensor ormqr(const Tensor & self, const Tensor & input2, const Tensor & input3, bool left=true, bool transpose=false); +static inline std::tuple _lu_with_info(const Tensor & self, bool pivot=true, bool check_errors=true); +static inline Tensor & lu_solve_out(Tensor & out, const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots); +static inline Tensor lu_solve(const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots); +static inline Tensor _lu_solve_helper(const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots); +static inline Tensor & multinomial_out(Tensor & out, const Tensor & self, int64_t num_samples, bool replacement=false, Generator * generator=nullptr); +static inline Tensor multinomial(const Tensor & self, int64_t num_samples, bool replacement=false, Generator * generator=nullptr); +static inline std::tuple _multinomial_alias_setup(const Tensor & probs); +static inline Tensor _multinomial_alias_draw(const Tensor & J, const Tensor & q, int64_t num_samples, Generator * generator=nullptr); +static inline Tensor & lgamma_out(Tensor & out, const Tensor & self); +static inline Tensor lgamma(const Tensor & self); +static inline Tensor & digamma_out(Tensor & out, const Tensor & self); +static inline Tensor digamma(const Tensor & self); +static inline Tensor & polygamma_out(Tensor & out, int64_t n, const Tensor & self); +static inline Tensor polygamma(int64_t n, const Tensor & self); +static inline Tensor erfinv(const Tensor & self); +static inline Tensor & erfinv_out(Tensor & out, const Tensor & self); +static inline Tensor sign(const Tensor & self); +static inline Tensor & sign_out(Tensor & out, const Tensor & self); +static inline Tensor dist(const Tensor & self, const Tensor & other, Scalar p=2); +static inline Tensor & atan2_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor atan2(const Tensor & self, const Tensor & other); +static inline Tensor & lerp_out(Tensor & out, const Tensor & self, const Tensor & end, Scalar weight); +static inline Tensor & lerp_out(Tensor & out, const Tensor & self, const Tensor & end, const Tensor & weight); +static inline Tensor lerp(const Tensor & self, const Tensor & end, Scalar weight); +static inline Tensor lerp(const Tensor & self, const Tensor & end, const Tensor & weight); +static inline Tensor & histc_out(Tensor & out, const Tensor & self, int64_t bins=100, Scalar min=0, Scalar max=0); +static inline Tensor histc(const Tensor & self, int64_t bins=100, Scalar min=0, Scalar max=0); +static inline Tensor & fmod_out(Tensor & out, const Tensor & self, Scalar other); +static inline Tensor fmod(const Tensor & self, Scalar other); +static inline Tensor & fmod_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor fmod(const Tensor & self, const Tensor & other); +static inline Tensor & remainder_out(Tensor & out, const Tensor & self, Scalar other); +static inline Tensor remainder(const Tensor & self, Scalar other); +static inline Tensor & remainder_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor remainder(const Tensor & self, const Tensor & other); +static inline Tensor & min_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor min(const Tensor & self, const Tensor & other); +static inline Tensor min(const Tensor & self); +static inline Tensor & max_out(Tensor & out, const Tensor & self, const Tensor & other); +static inline Tensor max(const Tensor & self, const Tensor & other); +static inline Tensor max(const Tensor & self); +static inline Tensor median(const Tensor & self); +static inline std::tuple sort_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim=-1, bool descending=false); +static inline std::tuple sort(const Tensor & self, int64_t dim=-1, bool descending=false); +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple sort_out(Tensor & values, Tensor & indices, const Tensor & self, Dimname dim, bool descending=false); +#endif +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple sort(const Tensor & self, Dimname dim, bool descending=false); +#endif +static inline Tensor argsort(const Tensor & self, int64_t dim=-1, bool descending=false); +#ifdef BUILD_NAMEDTENSOR +static inline Tensor argsort(const Tensor & self, Dimname dim, bool descending=false); +#endif +static inline std::tuple topk_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, int64_t dim=-1, bool largest=true, bool sorted=true); +static inline std::tuple topk(const Tensor & self, int64_t k, int64_t dim=-1, bool largest=true, bool sorted=true); +static inline Tensor all(const Tensor & self); +static inline Tensor any(const Tensor & self); +static inline Tensor & renorm_out(Tensor & out, const Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); +static inline Tensor renorm(const Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); +static inline bool equal(const Tensor & self, const Tensor & other); +static inline Tensor & pow_out(Tensor & out, const Tensor & self, const Tensor & exponent); +static inline Tensor pow(const Tensor & self, const Tensor & exponent); +static inline Tensor & pow_out(Tensor & out, Scalar self, const Tensor & exponent); +static inline Tensor pow(Scalar self, const Tensor & exponent); +static inline Tensor & normal_out(Tensor & out, const Tensor & mean, double std=1, Generator * generator=nullptr); +static inline Tensor normal(const Tensor & mean, double std=1, Generator * generator=nullptr); +static inline Tensor & normal_out(Tensor & out, double mean, const Tensor & std, Generator * generator=nullptr); +static inline Tensor normal(double mean, const Tensor & std, Generator * generator=nullptr); +static inline Tensor & normal_out(Tensor & out, const Tensor & mean, const Tensor & std, Generator * generator=nullptr); +static inline Tensor normal(const Tensor & mean, const Tensor & std, Generator * generator=nullptr); +static inline Tensor normal(double mean, double std, IntArrayRef size, Generator * generator=nullptr, const TensorOptions & options={}); +static inline Tensor & normal_out(Tensor & out, double mean, double std, IntArrayRef size, Generator * generator=nullptr); +static inline Tensor alias(const Tensor & self); +static inline Tensor _addr(const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta=1, Scalar alpha=1); +static inline Tensor & _addr_(Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta=1, Scalar alpha=1); +static inline Tensor & _addr_out(Tensor & out, const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta=1, Scalar alpha=1); +static inline Tensor & _index_copy_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); +static inline Tensor _cumsum(const Tensor & self, int64_t dim); +static inline Tensor & _cumsum_out(Tensor & out, const Tensor & self, int64_t dim); +static inline Tensor _cumprod(const Tensor & self, int64_t dim); +static inline Tensor & _cumprod_out(Tensor & out, const Tensor & self, int64_t dim); +static inline Tensor _var(const Tensor & self, bool unbiased=true); +static inline Tensor _std(const Tensor & self, bool unbiased=true); +static inline Tensor _cat(TensorList tensors, int64_t dim=0); +static inline Tensor & _cat_out(Tensor & out, TensorList tensors, int64_t dim=0); +static inline std::tuple _mode(const Tensor & self, int64_t dim=-1, bool keepdim=false); +static inline std::tuple _mode_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim=-1, bool keepdim=false); +static inline std::tuple _max(const Tensor & self, int64_t dim, bool keepdim=false); +static inline std::tuple _max_out(Tensor & max, Tensor & max_indices, const Tensor & self, int64_t dim, bool keepdim=false); +static inline std::tuple _min(const Tensor & self, int64_t dim, bool keepdim=false); +static inline std::tuple _min_out(Tensor & min, Tensor & min_indices, const Tensor & self, int64_t dim, bool keepdim=false); +static inline Tensor & binary_cross_entropy_out(Tensor & out, const Tensor & self, const Tensor & target, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean); +static inline Tensor binary_cross_entropy(const Tensor & self, const Tensor & target, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean); +static inline Tensor & binary_cross_entropy_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean); +static inline Tensor binary_cross_entropy_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean); +static inline Tensor & mse_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +static inline Tensor mse_loss(const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +static inline Tensor & mse_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +static inline Tensor mse_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +static inline Tensor & l1_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +static inline Tensor l1_loss(const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +static inline Tensor & l1_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +static inline Tensor l1_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +static inline Tensor & multi_margin_loss_out(Tensor & out, const Tensor & self, const Tensor & target, Scalar p=1, Scalar margin=1, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean); +static inline Tensor multi_margin_loss(const Tensor & self, const Tensor & target, Scalar p=1, Scalar margin=1, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean); +static inline Tensor & multi_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean); +static inline Tensor multi_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean); +static inline Tensor & multilabel_margin_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +static inline Tensor multilabel_margin_loss(const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +static inline std::tuple multilabel_margin_loss_forward_out(Tensor & output, Tensor & is_target, const Tensor & self, const Tensor & target, int64_t reduction); +static inline std::tuple multilabel_margin_loss_forward(const Tensor & self, const Tensor & target, int64_t reduction); +static inline Tensor & multilabel_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction, const Tensor & is_target); +static inline Tensor multilabel_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction, const Tensor & is_target); +static inline Tensor & nll_loss_out(Tensor & out, const Tensor & self, const Tensor & target, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean, int64_t ignore_index=-100); +static inline Tensor nll_loss(const Tensor & self, const Tensor & target, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean, int64_t ignore_index=-100); +static inline std::tuple nll_loss_forward_out(Tensor & output, Tensor & total_weight, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); +static inline std::tuple nll_loss_forward(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); +static inline Tensor & nll_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); +static inline Tensor nll_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); +static inline Tensor & nll_loss2d_out(Tensor & out, const Tensor & self, const Tensor & target, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean, int64_t ignore_index=-100); +static inline Tensor nll_loss2d(const Tensor & self, const Tensor & target, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean, int64_t ignore_index=-100); +static inline std::tuple nll_loss2d_forward_out(Tensor & output, Tensor & total_weight, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); +static inline std::tuple nll_loss2d_forward(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); +static inline Tensor & nll_loss2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); +static inline Tensor nll_loss2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); +static inline Tensor & smooth_l1_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +static inline Tensor smooth_l1_loss(const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +static inline Tensor & smooth_l1_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +static inline Tensor smooth_l1_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +static inline Tensor & soft_margin_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +static inline Tensor soft_margin_loss(const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +static inline Tensor & soft_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +static inline Tensor soft_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +static inline Tensor & elu_out(Tensor & out, const Tensor & self, Scalar alpha=1, Scalar scale=1, Scalar input_scale=1); +static inline Tensor elu(const Tensor & self, Scalar alpha=1, Scalar scale=1, Scalar input_scale=1); +static inline Tensor & elu_backward_out(Tensor & grad_input, const Tensor & grad_output, Scalar alpha, Scalar scale, Scalar input_scale, const Tensor & output); +static inline Tensor elu_backward(const Tensor & grad_output, Scalar alpha, Scalar scale, Scalar input_scale, const Tensor & output); +static inline Tensor & elu_(Tensor & self, Scalar alpha=1, Scalar scale=1, Scalar input_scale=1); +static inline Tensor & glu_out(Tensor & out, const Tensor & self, int64_t dim=-1); +static inline Tensor glu(const Tensor & self, int64_t dim=-1); +static inline Tensor & glu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, int64_t dim); +static inline Tensor glu_backward(const Tensor & grad_output, const Tensor & self, int64_t dim); +static inline Tensor & hardtanh_out(Tensor & out, const Tensor & self, Scalar min_val=-1, Scalar max_val=1); +static inline Tensor hardtanh(const Tensor & self, Scalar min_val=-1, Scalar max_val=1); +static inline Tensor & hardtanh_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar min_val, Scalar max_val); +static inline Tensor hardtanh_backward(const Tensor & grad_output, const Tensor & self, Scalar min_val, Scalar max_val); +static inline Tensor & hardtanh_(Tensor & self, Scalar min_val=-1, Scalar max_val=1); +static inline Tensor & leaky_relu_out(Tensor & out, const Tensor & self, Scalar negative_slope=0.01); +static inline Tensor leaky_relu(const Tensor & self, Scalar negative_slope=0.01); +static inline Tensor & leaky_relu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar negative_slope); +static inline Tensor leaky_relu_backward(const Tensor & grad_output, const Tensor & self, Scalar negative_slope); +static inline Tensor & leaky_relu_(Tensor & self, Scalar negative_slope=0.01); +static inline Tensor & log_sigmoid_out(Tensor & out, const Tensor & self); +static inline Tensor log_sigmoid(const Tensor & self); +static inline std::tuple log_sigmoid_forward_out(Tensor & output, Tensor & buffer, const Tensor & self); +static inline std::tuple log_sigmoid_forward(const Tensor & self); +static inline Tensor & log_sigmoid_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & buffer); +static inline Tensor log_sigmoid_backward(const Tensor & grad_output, const Tensor & self, const Tensor & buffer); +static inline Tensor & rrelu_with_noise_out(Tensor & out, const Tensor & self, const Tensor & noise, Scalar lower=0.125, Scalar upper=0.333333333333, bool training=false, Generator * generator=nullptr); +static inline Tensor rrelu_with_noise(const Tensor & self, const Tensor & noise, Scalar lower=0.125, Scalar upper=0.333333333333, bool training=false, Generator * generator=nullptr); +static inline Tensor & rrelu_with_noise_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training); +static inline Tensor rrelu_with_noise_backward(const Tensor & grad_output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training); +static inline Tensor & rrelu_with_noise_(Tensor & self, const Tensor & noise, Scalar lower=0.125, Scalar upper=0.333333333333, bool training=false, Generator * generator=nullptr); +static inline Tensor & softplus_out(Tensor & out, const Tensor & self, Scalar beta=1, Scalar threshold=20); +static inline Tensor softplus(const Tensor & self, Scalar beta=1, Scalar threshold=20); +static inline Tensor & softplus_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar beta, Scalar threshold, const Tensor & output); +static inline Tensor softplus_backward(const Tensor & grad_output, const Tensor & self, Scalar beta, Scalar threshold, const Tensor & output); +static inline Tensor & softshrink_out(Tensor & out, const Tensor & self, Scalar lambd=0.5); +static inline Tensor softshrink(const Tensor & self, Scalar lambd=0.5); +static inline Tensor & softshrink_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar lambd); +static inline Tensor softshrink_backward(const Tensor & grad_output, const Tensor & self, Scalar lambd); +static inline Tensor & adaptive_avg_pool2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); +static inline Tensor adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size); +static inline Tensor mkldnn_adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size); +static inline Tensor _adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size); +static inline Tensor _adaptive_avg_pool2d_backward(const Tensor & grad_output, const Tensor & self); +static inline Tensor & adaptive_avg_pool3d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); +static inline Tensor adaptive_avg_pool3d(const Tensor & self, IntArrayRef output_size); +static inline Tensor & adaptive_avg_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self); +static inline Tensor adaptive_avg_pool3d_backward(const Tensor & grad_output, const Tensor & self); +static inline std::tuple adaptive_max_pool2d_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef output_size); +static inline std::tuple adaptive_max_pool2d(const Tensor & self, IntArrayRef output_size); +static inline Tensor & adaptive_max_pool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices); +static inline Tensor adaptive_max_pool2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices); +static inline std::tuple adaptive_max_pool3d_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef output_size); +static inline std::tuple adaptive_max_pool3d(const Tensor & self, IntArrayRef output_size); +static inline Tensor & adaptive_max_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices); +static inline Tensor adaptive_max_pool3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices); +static inline Tensor & avg_pool2d_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +static inline Tensor avg_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +static inline Tensor & avg_pool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +static inline Tensor avg_pool2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +static inline Tensor & avg_pool3d_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +static inline Tensor avg_pool3d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +static inline Tensor & avg_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +static inline Tensor avg_pool3d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +static inline std::tuple fractional_max_pool2d_out(Tensor & output, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); +static inline std::tuple fractional_max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); +static inline Tensor & fractional_max_pool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); +static inline Tensor fractional_max_pool2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); +static inline std::tuple fractional_max_pool3d_out(Tensor & output, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); +static inline std::tuple fractional_max_pool3d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); +static inline Tensor & fractional_max_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); +static inline Tensor fractional_max_pool3d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); +static inline std::tuple max_pool2d_with_indices_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +static inline std::tuple max_pool2d_with_indices(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +static inline Tensor & max_pool2d_with_indices_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); +static inline Tensor max_pool2d_with_indices_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); +static inline std::tuple max_pool3d_with_indices_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +static inline std::tuple max_pool3d_with_indices(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +static inline Tensor & max_pool3d_with_indices_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); +static inline Tensor max_pool3d_with_indices_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); +static inline Tensor & max_unpool2d_out(Tensor & out, const Tensor & self, const Tensor & indices, IntArrayRef output_size); +static inline Tensor max_unpool2d(const Tensor & self, const Tensor & indices, IntArrayRef output_size); +static inline Tensor & max_unpool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size); +static inline Tensor max_unpool2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size); +static inline Tensor & max_unpool3d_out(Tensor & out, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); +static inline Tensor max_unpool3d(const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); +static inline Tensor & max_unpool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); +static inline Tensor max_unpool3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); +static inline Tensor & reflection_pad1d_out(Tensor & out, const Tensor & self, IntArrayRef padding); +static inline Tensor reflection_pad1d(const Tensor & self, IntArrayRef padding); +static inline Tensor & reflection_pad1d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +static inline Tensor reflection_pad1d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +static inline Tensor & reflection_pad2d_out(Tensor & out, const Tensor & self, IntArrayRef padding); +static inline Tensor reflection_pad2d(const Tensor & self, IntArrayRef padding); +static inline Tensor & reflection_pad2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +static inline Tensor reflection_pad2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +static inline Tensor & replication_pad1d_out(Tensor & out, const Tensor & self, IntArrayRef padding); +static inline Tensor replication_pad1d(const Tensor & self, IntArrayRef padding); +static inline Tensor & replication_pad1d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +static inline Tensor replication_pad1d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +static inline Tensor & replication_pad2d_out(Tensor & out, const Tensor & self, IntArrayRef padding); +static inline Tensor replication_pad2d(const Tensor & self, IntArrayRef padding); +static inline Tensor & replication_pad2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +static inline Tensor replication_pad2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +static inline Tensor & replication_pad3d_out(Tensor & out, const Tensor & self, IntArrayRef padding); +static inline Tensor replication_pad3d(const Tensor & self, IntArrayRef padding); +static inline Tensor & replication_pad3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +static inline Tensor replication_pad3d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +static inline Tensor _test_optional_float(const Tensor & self, c10::optional scale=c10::nullopt); +static inline Tensor & upsample_linear1d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); +static inline Tensor upsample_linear1d(const Tensor & self, IntArrayRef output_size, bool align_corners); +static inline Tensor & upsample_linear1d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +static inline Tensor upsample_linear1d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +static inline Tensor & upsample_bilinear2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); +static inline Tensor upsample_bilinear2d(const Tensor & self, IntArrayRef output_size, bool align_corners); +static inline Tensor & upsample_bilinear2d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +static inline Tensor upsample_bilinear2d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +static inline Tensor & upsample_bicubic2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); +static inline Tensor upsample_bicubic2d(const Tensor & self, IntArrayRef output_size, bool align_corners); +static inline Tensor & upsample_bicubic2d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +static inline Tensor upsample_bicubic2d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +static inline Tensor & upsample_trilinear3d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); +static inline Tensor upsample_trilinear3d(const Tensor & self, IntArrayRef output_size, bool align_corners); +static inline Tensor & upsample_trilinear3d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +static inline Tensor upsample_trilinear3d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +static inline Tensor & upsample_nearest1d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); +static inline Tensor upsample_nearest1d(const Tensor & self, IntArrayRef output_size); +static inline Tensor & upsample_nearest1d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +static inline Tensor upsample_nearest1d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +static inline Tensor & upsample_nearest2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); +static inline Tensor upsample_nearest2d(const Tensor & self, IntArrayRef output_size); +static inline Tensor & upsample_nearest2d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +static inline Tensor upsample_nearest2d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +static inline Tensor & upsample_nearest3d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); +static inline Tensor upsample_nearest3d(const Tensor & self, IntArrayRef output_size); +static inline Tensor & upsample_nearest3d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +static inline Tensor upsample_nearest3d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +static inline Tensor & sigmoid_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & output); +static inline Tensor sigmoid_backward(const Tensor & grad_output, const Tensor & output); +static inline Tensor & tanh_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & output); +static inline Tensor tanh_backward(const Tensor & grad_output, const Tensor & output); +static inline Tensor & slow_conv_transpose2d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, IntArrayRef dilation=1); +static inline Tensor slow_conv_transpose2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, IntArrayRef dilation=1); +static inline std::tuple slow_conv_transpose2d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & columns, const Tensor & ones); +static inline std::tuple slow_conv_transpose2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & columns, const Tensor & ones, std::array output_mask); +static inline Tensor & slow_conv_transpose3d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, IntArrayRef dilation=1); +static inline Tensor slow_conv_transpose3d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, IntArrayRef dilation=1); +static inline std::tuple slow_conv_transpose3d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & finput, const Tensor & fgrad_input); +static inline std::tuple slow_conv_transpose3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask); +static inline Tensor & thnn_conv2d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0); +static inline Tensor thnn_conv2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0); +static inline std::tuple thnn_conv2d_forward_out(Tensor & output, Tensor & finput, Tensor & fgrad_input, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); +static inline std::tuple thnn_conv2d_forward(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); +static inline std::tuple thnn_conv2d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input); +static inline std::tuple thnn_conv2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask); +static inline Tensor & thnn_conv_depthwise2d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1); +static inline Tensor thnn_conv_depthwise2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1); +static inline Tensor & thnn_conv_depthwise2d_forward_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); +static inline Tensor thnn_conv_depthwise2d_forward(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); +static inline std::tuple thnn_conv_depthwise2d_backward_out(Tensor & grad_input, Tensor & grad_weight, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); +static inline std::tuple thnn_conv_depthwise2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); +static inline Tensor & slow_conv3d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0); +static inline Tensor slow_conv3d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0); +static inline std::tuple slow_conv3d_forward_out(Tensor & output, Tensor & finput, Tensor & fgrad_input, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); +static inline std::tuple slow_conv3d_forward(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); +static inline std::tuple slow_conv3d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input); +static inline std::tuple slow_conv3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask); +static inline Tensor slow_conv_dilated2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1); +static inline std::tuple slow_conv_dilated2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); +static inline Tensor slow_conv_dilated3d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1); +static inline std::tuple slow_conv_dilated3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); +static inline Tensor & col2im_out(Tensor & out, const Tensor & self, IntArrayRef output_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +static inline Tensor col2im(const Tensor & self, IntArrayRef output_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +static inline Tensor & col2im_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +static inline Tensor col2im_backward(const Tensor & grad_output, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +static inline Tensor & im2col_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +static inline Tensor im2col(const Tensor & self, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +static inline Tensor & im2col_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef input_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +static inline Tensor im2col_backward(const Tensor & grad_output, IntArrayRef input_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +static inline Tensor isfinite(const Tensor & self); + +inline Tensor from_blob( + void* data, + IntArrayRef sizes, + IntArrayRef strides, + const std::function& deleter, + const TensorOptions& options = {}) { + AutoNonVariableTypeMode guard; + auto device = globalContext().getDeviceFromPtr(data, options.device().type()); + if (options.device().has_index()) { + TORCH_CHECK( + options.device() == device, + "Specified device ", options.device(), + " does not match device of data ", device); + } + auto storage = Storage( + options.dtype(), + detail::computeStorageSize(sizes, strides), + InefficientStdFunctionContext::makeDataPtr( + data, deleter, device), + /*allocator=*/nullptr, + /*resizable=*/false); + return empty({0}, options).set_(storage, 0, sizes, strides); +} + +inline Tensor from_blob( + void* data, + IntArrayRef sizes, + const std::function& deleter, + const TensorOptions& options = {}) { + return from_blob(data, sizes, detail::defaultStrides(sizes), deleter, options); +} + +inline Tensor from_blob( + void* data, + IntArrayRef sizes, + IntArrayRef strides, + const TensorOptions& options = {}) { + return from_blob(data, sizes, strides, [](void*) {}, options); +} + +inline Tensor from_blob( + void* data, + IntArrayRef sizes, + const TensorOptions& options = {}) { + return from_blob(data, sizes, detail::defaultStrides(sizes), [](void*) {}, options); +} + +inline int64_t numel(const Tensor& tensor) { + return tensor.numel(); +} + +// function definitions are all static inline because +// they are one-line statically dispatched functions that +// invoke the actual dynamic dispatch on the correct argument +static inline Tensor _cast_Byte(const Tensor & self, bool non_blocking) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_cast_Byte(self, non_blocking); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cast_Byte", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, non_blocking); +#endif +} +static inline Tensor _cast_Char(const Tensor & self, bool non_blocking) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_cast_Char(self, non_blocking); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cast_Char", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, non_blocking); +#endif +} +static inline Tensor _cast_Double(const Tensor & self, bool non_blocking) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_cast_Double(self, non_blocking); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cast_Double", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, non_blocking); +#endif +} +static inline Tensor _cast_Float(const Tensor & self, bool non_blocking) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_cast_Float(self, non_blocking); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cast_Float", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, non_blocking); +#endif +} +static inline Tensor _cast_Int(const Tensor & self, bool non_blocking) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_cast_Int(self, non_blocking); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cast_Int", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, non_blocking); +#endif +} +static inline Tensor _cast_Long(const Tensor & self, bool non_blocking) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_cast_Long(self, non_blocking); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cast_Long", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, non_blocking); +#endif +} +static inline Tensor _cast_Short(const Tensor & self, bool non_blocking) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_cast_Short(self, non_blocking); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cast_Short", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, non_blocking); +#endif +} +static inline Tensor _cast_Half(const Tensor & self, bool non_blocking) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_cast_Half(self, non_blocking); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cast_Half", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, non_blocking); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline std::vector align_tensors(TensorList tensors) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::align_tensors(tensors); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::align_tensors", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, TensorList>( + op, tensors); +#endif +} +#endif +static inline bool _use_cudnn_ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(log_probs, targets)))) { + + default: + AT_ERROR("_use_cudnn_ctc_loss not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(log_probs, targets))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_use_cudnn_ctc_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, log_probs, targets, input_lengths, target_lengths, blank); +#endif +} +static inline std::tuple _cudnn_ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank, bool deterministic, bool zero_infinity) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(log_probs, targets)))) { + + default: + AT_ERROR("_cudnn_ctc_loss not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(log_probs, targets))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cudnn_ctc_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, int64_t, bool, bool>( + op, log_probs, targets, input_lengths, target_lengths, blank, deterministic, zero_infinity); +#endif +} +static inline Tensor _cudnn_rnn_flatten_weight(TensorList weight_arr, int64_t weight_stride0, int64_t input_size, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, bool bidirectional) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(weight_arr)))) { + + default: + AT_ERROR("_cudnn_rnn_flatten_weight not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(weight_arr))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cudnn_rnn_flatten_weight", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, weight_arr, weight_stride0, input_size, mode, hidden_size, num_layers, batch_first, bidirectional); +#endif +} +static inline std::tuple _cudnn_rnn(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & weight_buf, const Tensor & hx, const Tensor & cx, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, weight, weight_buf, hx, cx, dropout_state)))) { + + default: + AT_ERROR("_cudnn_rnn not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, weight, weight_buf, hx, cx, dropout_state))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cudnn_rnn", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, TensorList, int64_t, const Tensor &, const Tensor &, const Tensor &, int64_t, int64_t, int64_t, bool, double, bool, bool, IntArrayRef, const Tensor &>( + op, input, weight, weight_stride0, weight_buf, hx, cx, mode, hidden_size, num_layers, batch_first, dropout, train, bidirectional, batch_sizes, dropout_state); +#endif +} +static inline std::tuple> _cudnn_rnn_backward(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & weight_buf, const Tensor & hx, const Tensor & cx, const Tensor & output, const Tensor & grad_output, const Tensor & grad_hy, const Tensor & grad_cy, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state, const Tensor & reserve, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, weight, weight_buf, hx, cx, output, grad_output, grad_hy, grad_cy, dropout_state, reserve)))) { + + default: + AT_ERROR("_cudnn_rnn_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, weight, weight_buf, hx, cx, output, grad_output, grad_hy, grad_cy, dropout_state, reserve))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cudnn_rnn_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>, const Tensor &, TensorList, int64_t, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, int64_t, int64_t, int64_t, bool, double, bool, bool, IntArrayRef, const Tensor &, const Tensor &, std::array>( + op, input, weight, weight_stride0, weight_buf, hx, cx, output, grad_output, grad_hy, grad_cy, mode, hidden_size, num_layers, batch_first, dropout, train, bidirectional, batch_sizes, dropout_state, reserve, output_mask); +#endif +} +static inline Tensor _cudnn_init_dropout_state(double dropout, bool train, int64_t dropout_seed, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(options)))) { + + default: + AT_ERROR("_cudnn_init_dropout_state not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(options))); + } +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cudnn_init_dropout_state", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, dropout, train, dropout_seed, options); +#endif +} +static inline int64_t _debug_has_internal_overlap(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_debug_has_internal_overlap(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_debug_has_internal_overlap", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline std::tuple _fused_dropout(const Tensor & self, double p, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + + default: + AT_ERROR("_fused_dropout not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_fused_dropout", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, double, Generator *>( + op, self, p, generator); +#endif +} +static inline Tensor _masked_scale(const Tensor & self, const Tensor & mask, double scale) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, mask)))) { + + default: + AT_ERROR("_masked_scale not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, mask))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_masked_scale", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, mask, scale); +#endif +} +static inline std::tuple _sobol_engine_draw(const Tensor & quasi, int64_t n, const Tensor & sobolstate, int64_t dimension, int64_t num_generated, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_sobol_engine_draw(quasi, n, sobolstate, dimension, num_generated, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sobol_engine_draw", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, const Tensor &, int64_t, int64_t, c10::optional>( + op, quasi, n, sobolstate, dimension, num_generated, dtype); +#endif +} +static inline Tensor & _sobol_engine_ff_(Tensor & self, int64_t n, const Tensor & sobolstate, int64_t dimension, int64_t num_generated) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_sobol_engine_ff_(self, n, sobolstate, dimension, num_generated); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sobol_engine_ff_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, n, sobolstate, dimension, num_generated); +#endif +} +static inline Tensor & _sobol_engine_scramble_(Tensor & self, const Tensor & ltm, int64_t dimension) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_sobol_engine_scramble_(self, ltm, dimension); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sobol_engine_scramble_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, ltm, dimension); +#endif +} +static inline Tensor & _sobol_engine_initialize_state_(Tensor & self, int64_t dimension) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_sobol_engine_initialize_state_(self, dimension); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sobol_engine_initialize_state_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dimension); +#endif +} +static inline Tensor _reshape_from_tensor(const Tensor & self, const Tensor & shape) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_reshape_from_tensor(self, shape); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_reshape_from_tensor", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, shape); +#endif +} +static inline Tensor _shape_as_tensor(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_shape_as_tensor(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_shape_as_tensor", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor dropout(const Tensor & input, double p, bool train) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::dropout(input, p, train); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::dropout", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, p, train); +#endif +} +static inline Tensor & dropout_(Tensor & self, double p, bool train) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::dropout_(self, p, train); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::dropout_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, p, train); +#endif +} +static inline Tensor feature_dropout(const Tensor & input, double p, bool train) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::feature_dropout(input, p, train); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::feature_dropout", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, p, train); +#endif +} +static inline Tensor & feature_dropout_(Tensor & self, double p, bool train) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::feature_dropout_(self, p, train); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::feature_dropout_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, p, train); +#endif +} +static inline Tensor alpha_dropout(const Tensor & input, double p, bool train) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::alpha_dropout(input, p, train); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::alpha_dropout", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, p, train); +#endif +} +static inline Tensor & alpha_dropout_(Tensor & self, double p, bool train) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::alpha_dropout_(self, p, train); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::alpha_dropout_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, p, train); +#endif +} +static inline Tensor feature_alpha_dropout(const Tensor & input, double p, bool train) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::feature_alpha_dropout(input, p, train); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::feature_alpha_dropout", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, p, train); +#endif +} +static inline Tensor & feature_alpha_dropout_(Tensor & self, double p, bool train) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::feature_alpha_dropout_(self, p, train); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::feature_alpha_dropout_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, p, train); +#endif +} +static inline Tensor abs(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::abs(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::abs", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & abs_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::abs_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::abs_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & abs_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::abs_out(out, self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::abs", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor angle(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::angle(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::angle", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & angle_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::angle_out(out, self); + break; + default: + AT_ERROR("angle_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::angle", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor real(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::real(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::real", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & real_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::real_out(out, self); + break; + default: + AT_ERROR("real_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::real", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor imag(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::imag(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::imag", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & imag_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::imag_out(out, self); + break; + default: + AT_ERROR("imag_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::imag", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor conj(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::conj(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::conj", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & conj_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::conj_out(out, self); + break; + default: + AT_ERROR("conj_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::conj", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor acos(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::acos(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::acos", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & acos_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::acos_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::acos_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & acos_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::acos_out(out, self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::acos", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor avg_pool1d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::avg_pool1d(self, kernel_size, stride, padding, ceil_mode, count_include_pad); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::avg_pool1d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, kernel_size, stride, padding, ceil_mode, count_include_pad); +#endif +} +static inline Tensor adaptive_avg_pool1d(const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::adaptive_avg_pool1d(self, output_size); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_avg_pool1d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size); +#endif +} +static inline std::tuple adaptive_max_pool1d(const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::adaptive_max_pool1d(self, output_size); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_max_pool1d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, IntArrayRef>( + op, self, output_size); +#endif +} +static inline Tensor add(const Tensor & self, const Tensor & other, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::add(self, other, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::add(self, other, alpha); + break; + default: + AT_ERROR("add not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::add", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other, alpha); +#endif +} +static inline Tensor & add_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::add_out(out, self, other, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::add_out(out, self, other, alpha); + break; + default: + AT_ERROR("add_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::add", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other, alpha); +#endif +} +static inline Tensor add(const Tensor & self, Scalar other, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::add(self, other, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::add", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other, alpha); +#endif +} +static inline Tensor addmv(const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, mat, vec)))) { + case Backend::CPU: + return CPUType::addmv(self, mat, vec, beta, alpha); + break; + default: + AT_ERROR("addmv not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, mat, vec))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addmv", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, mat, vec, beta, alpha); +#endif +} +static inline Tensor & addmv_(Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, mat, vec)))) { + case Backend::CPU: + return CPUType::addmv_(self, mat, vec, beta, alpha); + break; + default: + AT_ERROR("addmv_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, mat, vec))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addmv_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, mat, vec, beta, alpha); +#endif +} +static inline Tensor & addmv_out(Tensor & out, const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, mat, vec)))) { + case Backend::CPU: + return CPUType::addmv_out(out, self, mat, vec, beta, alpha); + break; + default: + AT_ERROR("addmv_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, mat, vec))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addmv", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, mat, vec, beta, alpha); +#endif +} +static inline Tensor addr(const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::addr(self, vec1, vec2, beta, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addr", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, vec1, vec2, beta, alpha); +#endif +} +static inline Tensor & addr_out(Tensor & out, const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::addr_out(out, self, vec1, vec2, beta, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addr", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, vec1, vec2, beta, alpha); +#endif +} +static inline Tensor affine_grid_generator(const Tensor & theta, IntArrayRef size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::affine_grid_generator(theta, size, align_corners); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::affine_grid_generator", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, theta, size, align_corners); +#endif +} +static inline Tensor affine_grid_generator_backward(const Tensor & grad, IntArrayRef size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::affine_grid_generator_backward(grad, size, align_corners); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::affine_grid_generator_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad, size, align_corners); +#endif +} +static inline Tensor all(const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::all(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::all", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, keepdim); +#endif +} +static inline Tensor & all_out(Tensor & out, const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::all_out(out, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::all", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor all(const Tensor & self, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::all(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::all", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & all_out(Tensor & out, const Tensor & self, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::all_out(out, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::all", "dimname_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, keepdim); +#endif +} +#endif +static inline bool allclose(const Tensor & self, const Tensor & other, double rtol, double atol, bool equal_nan) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::allclose(self, other, rtol, atol, equal_nan); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::allclose", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other, rtol, atol, equal_nan); +#endif +} +static inline Tensor any(const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::any(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::any", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, keepdim); +#endif +} +static inline Tensor & any_out(Tensor & out, const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::any_out(out, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::any", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor any(const Tensor & self, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::any(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::any", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & any_out(Tensor & out, const Tensor & self, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::any_out(out, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::any", "dimname_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, keepdim); +#endif +} +#endif +static inline Tensor arange(Scalar end, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::arange(end, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::arange", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, end, options); +#endif +} +static inline Tensor arange(Scalar start, Scalar end, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::arange(start, end, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::arange", "start"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, start, end, options); +#endif +} +static inline Tensor arange(Scalar start, Scalar end, Scalar step, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::arange(start, end, step, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::arange", "start_step"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, start, end, step, options); +#endif +} +static inline Tensor & arange_out(Tensor & out, Scalar end) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::arange_out(out, end); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::arange", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, end); +#endif +} +static inline Tensor & arange_out(Tensor & out, Scalar start, Scalar end, Scalar step) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out)))) { + case Backend::CPU: + return CPUType::arange_out(out, start, end, step); + break; + default: + AT_ERROR("arange_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::arange", "start_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, start, end, step); +#endif +} +static inline Tensor _dim_arange(const Tensor & like, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_dim_arange(like, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_dim_arange", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, like, dim); +#endif +} +static inline Tensor argmax(const Tensor & self, c10::optional dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::argmax(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::argmax", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed, bool>( + op, self, dim, keepdim); +#endif +} +static inline Tensor argmin(const Tensor & self, c10::optional dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::argmin(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::argmin", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed, bool>( + op, self, dim, keepdim); +#endif +} +static inline Tensor as_strided(const Tensor & self, IntArrayRef size, IntArrayRef stride, c10::optional storage_offset) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::as_strided(self, size, stride, storage_offset); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::as_strided(self, size, stride, storage_offset); + break; + default: + AT_ERROR("as_strided not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::as_strided", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, size, stride, storage_offset); +#endif +} +static inline Tensor & as_strided_(Tensor & self, IntArrayRef size, IntArrayRef stride, c10::optional storage_offset) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::as_strided_(self, size, stride, storage_offset); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::as_strided_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, size, stride, storage_offset); +#endif +} +static inline Tensor asin(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::asin(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::asin", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & asin_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::asin_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::asin_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & asin_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::asin_out(out, self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::asin", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor atan(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::atan(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::atan", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & atan_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::atan_(self); + break; + default: + AT_ERROR("atan_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::atan_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & atan_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::atan_out(out, self); + break; + default: + AT_ERROR("atan_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::atan", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor baddbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, batch1, batch2)))) { + case Backend::CPU: + return CPUType::baddbmm(self, batch1, batch2, beta, alpha); + break; + default: + AT_ERROR("baddbmm not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, batch1, batch2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::baddbmm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, batch1, batch2, beta, alpha); +#endif +} +static inline Tensor & _baddbmm_mkl_(Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_baddbmm_mkl_(self, batch1, batch2, beta, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_baddbmm_mkl_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, batch1, batch2, beta, alpha); +#endif +} +static inline Tensor & baddbmm_out(Tensor & out, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, batch1, batch2)))) { + case Backend::CPU: + return CPUType::baddbmm_out(out, self, batch1, batch2, beta, alpha); + break; + default: + AT_ERROR("baddbmm_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, batch1, batch2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::baddbmm", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, batch1, batch2, beta, alpha); +#endif +} +static inline Tensor bartlett_window(int64_t window_length, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bartlett_window(window_length, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bartlett_window", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, window_length, options); +#endif +} +static inline Tensor bartlett_window(int64_t window_length, bool periodic, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bartlett_window(window_length, periodic, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bartlett_window", "periodic"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, window_length, periodic, options); +#endif +} +static inline Tensor batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps, bool cudnn_enabled) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::batch_norm(input, weight, bias, running_mean, running_var, training, momentum, eps, cudnn_enabled); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::batch_norm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, running_mean, running_var, training, momentum, eps, cudnn_enabled); +#endif +} +static inline std::tuple _batch_norm_impl_index(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps, bool cudnn_enabled) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_batch_norm_impl_index(input, weight, bias, running_mean, running_var, training, momentum, eps, cudnn_enabled); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_batch_norm_impl_index", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, bool, double, double, bool>( + op, input, weight, bias, running_mean, running_var, training, momentum, eps, cudnn_enabled); +#endif +} +static inline std::tuple _batch_norm_impl_index_backward(int64_t impl_index, const Tensor & input, const Tensor & grad_output, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_var_transform, bool train, double eps, std::array output_mask, const Tensor & reservedSpace) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_batch_norm_impl_index_backward(impl_index, input, grad_output, weight, running_mean, running_var, save_mean, save_var_transform, train, eps, output_mask, reservedSpace); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_batch_norm_impl_index_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, int64_t, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, bool, double, std::array, const Tensor &>( + op, impl_index, input, grad_output, weight, running_mean, running_var, save_mean, save_var_transform, train, eps, output_mask, reservedSpace); +#endif +} +static inline Tensor bernoulli(const Tensor & self, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bernoulli(self, generator); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bernoulli", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, generator); +#endif +} +static inline Tensor & bernoulli_out(Tensor & out, const Tensor & self, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bernoulli_out(out, self, generator); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bernoulli", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, generator); +#endif +} +static inline Tensor bernoulli(const Tensor & self, double p, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bernoulli(self, p, generator); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bernoulli", "p"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, p, generator); +#endif +} +static inline Tensor bilinear(const Tensor & input1, const Tensor & input2, const Tensor & weight, const Tensor & bias) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bilinear(input1, input2, weight, bias); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bilinear", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input1, input2, weight, bias); +#endif +} +static inline Tensor binary_cross_entropy_with_logits(const Tensor & self, const Tensor & target, const Tensor & weight, const Tensor & pos_weight, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::binary_cross_entropy_with_logits(self, target, weight, pos_weight, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::binary_cross_entropy_with_logits", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, target, weight, pos_weight, reduction); +#endif +} +static inline Tensor binary_cross_entropy_with_logits_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, const Tensor & pos_weight, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::binary_cross_entropy_with_logits_backward(grad_output, self, target, weight, pos_weight, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::binary_cross_entropy_with_logits_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, target, weight, pos_weight, reduction); +#endif +} +static inline Tensor bincount(const Tensor & self, const Tensor & weights, int64_t minlength) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weights)))) { + case Backend::CPU: + return CPUType::bincount(self, weights, minlength); + break; + default: + AT_ERROR("bincount not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weights))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bincount", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weights, minlength); +#endif +} +static inline Tensor bitwise_not(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bitwise_not(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bitwise_not", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & bitwise_not_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::bitwise_not_out(out, self); + break; + default: + AT_ERROR("bitwise_not_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bitwise_not", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor logical_not(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logical_not(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::logical_not", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & logical_not_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::logical_not_out(out, self); + break; + default: + AT_ERROR("logical_not_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::logical_not", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor logical_xor(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logical_xor(self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::logical_xor", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, other); +#endif +} +static inline Tensor & logical_xor_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::logical_xor_out(out, self, other); + break; + default: + AT_ERROR("logical_xor_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::logical_xor", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor blackman_window(int64_t window_length, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::blackman_window(window_length, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::blackman_window", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, window_length, options); +#endif +} +static inline Tensor blackman_window(int64_t window_length, bool periodic, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::blackman_window(window_length, periodic, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::blackman_window", "periodic"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, window_length, periodic, options); +#endif +} +static inline Tensor bmm(const Tensor & self, const Tensor & mat2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, mat2)))) { + case Backend::CPU: + return CPUType::bmm(self, mat2); + break; + default: + AT_ERROR("bmm not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, mat2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bmm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, mat2); +#endif +} +static inline Tensor & bmm_out(Tensor & out, const Tensor & self, const Tensor & mat2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, mat2)))) { + case Backend::CPU: + return CPUType::bmm_out(out, self, mat2); + break; + default: + AT_ERROR("bmm_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, mat2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bmm", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, mat2); +#endif +} +static inline std::vector broadcast_tensors(TensorList tensors) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::broadcast_tensors(tensors); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::broadcast_tensors", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, TensorList>( + op, tensors); +#endif +} +static inline Tensor cat(TensorList tensors, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cat(tensors, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cat", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, tensors, dim); +#endif +} +static inline Tensor & cat_out(Tensor & out, TensorList tensors, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cat_out(out, tensors, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cat", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, tensors, dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor cat(TensorList tensors, Dimname dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cat(tensors, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cat", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, tensors, dim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & cat_out(Tensor & out, TensorList tensors, Dimname dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cat_out(out, tensors, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cat", "names_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, tensors, dim); +#endif +} +#endif +static inline Tensor ceil(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ceil(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ceil", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & ceil_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ceil_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ceil_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & ceil_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::ceil_out(out, self); + break; + default: + AT_ERROR("ceil_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ceil", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor chain_matmul(TensorList matrices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::chain_matmul(matrices); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::chain_matmul", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, matrices); +#endif +} +static inline std::vector chunk(const Tensor & self, int64_t chunks, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::chunk(self, chunks, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::chunk", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, int64_t>( + op, self, chunks, dim); +#endif +} +static inline Tensor clamp(const Tensor & self, c10::optional min, c10::optional max) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::clamp(self, min, max); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::clamp", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed, c10::optional>( + op, self, min, max); +#endif +} +static inline Tensor & clamp_(Tensor & self, c10::optional min, c10::optional max) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::clamp_(self, min, max); + break; + default: + AT_ERROR("clamp_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::clamp_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, c10::optional>( + op, self, min, max); +#endif +} +static inline Tensor & clamp_out(Tensor & out, const Tensor & self, c10::optional min, c10::optional max) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::clamp_out(out, self, min, max); + break; + default: + AT_ERROR("clamp_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::clamp", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, c10::optional>( + op, out, self, min, max); +#endif +} +static inline Tensor clamp_max(const Tensor & self, Scalar max) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::clamp_max(self, max); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::clamp_max", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, max); +#endif +} +static inline Tensor & clamp_max_(Tensor & self, Scalar max) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::clamp_max_(self, max); + break; + default: + AT_ERROR("clamp_max_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::clamp_max_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, max); +#endif +} +static inline Tensor & clamp_max_out(Tensor & out, const Tensor & self, Scalar max) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::clamp_max_out(out, self, max); + break; + default: + AT_ERROR("clamp_max_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::clamp_max", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, max); +#endif +} +static inline Tensor clamp_min(const Tensor & self, Scalar min) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::clamp_min(self, min); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::clamp_min", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, min); +#endif +} +static inline Tensor & clamp_min_(Tensor & self, Scalar min) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::clamp_min_(self, min); + break; + default: + AT_ERROR("clamp_min_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::clamp_min_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, min); +#endif +} +static inline Tensor & clamp_min_out(Tensor & out, const Tensor & self, Scalar min) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::clamp_min_out(out, self, min); + break; + default: + AT_ERROR("clamp_min_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::clamp_min", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, min); +#endif +} +static inline bool cudnn_is_acceptable(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cudnn_is_acceptable(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_is_acceptable", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor constant_pad_nd(const Tensor & self, IntArrayRef pad, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::constant_pad_nd(self, pad, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::constant_pad_nd", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, pad, value); +#endif +} +static inline Tensor convolution(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::convolution(input, weight, bias, stride, padding, dilation, transposed, output_padding, groups); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::convolution", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, stride, padding, dilation, transposed, output_padding, groups); +#endif +} +static inline Tensor convolution_overrideable(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::convolution_overrideable(input, weight, bias, stride, padding, dilation, transposed, output_padding, groups); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::convolution_overrideable", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, stride, padding, dilation, transposed, output_padding, groups); +#endif +} +static inline std::tuple convolution_backward_overrideable(const Tensor & grad_output, const Tensor & input, const Tensor & weight, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::convolution_backward_overrideable(grad_output, input, weight, stride, padding, dilation, transposed, output_padding, groups, output_mask); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::convolution_backward_overrideable", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, bool, IntArrayRef, int64_t, std::array>( + op, grad_output, input, weight, stride, padding, dilation, transposed, output_padding, groups, output_mask); +#endif +} +static inline Tensor _convolution(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups, bool benchmark, bool deterministic, bool cudnn_enabled) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_convolution(input, weight, bias, stride, padding, dilation, transposed, output_padding, groups, benchmark, deterministic, cudnn_enabled); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_convolution", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, stride, padding, dilation, transposed, output_padding, groups, benchmark, deterministic, cudnn_enabled); +#endif +} +static inline Tensor _convolution_nogroup(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_convolution_nogroup(input, weight, bias, stride, padding, dilation, transposed, output_padding); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_convolution_nogroup", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, stride, padding, dilation, transposed, output_padding); +#endif +} +static inline std::tuple _convolution_double_backward(const Tensor & ggI, const Tensor & ggW, const Tensor & ggb, const Tensor & gO, const Tensor & weight, const Tensor & self, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups, bool benchmark, bool deterministic, bool cudnn_enabled, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_convolution_double_backward(ggI, ggW, ggb, gO, weight, self, stride, padding, dilation, transposed, output_padding, groups, benchmark, deterministic, cudnn_enabled, output_mask); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_convolution_double_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, bool, IntArrayRef, int64_t, bool, bool, bool, std::array>( + op, ggI, ggW, ggb, gO, weight, self, stride, padding, dilation, transposed, output_padding, groups, benchmark, deterministic, cudnn_enabled, output_mask); +#endif +} +static inline Tensor conv1d(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, int64_t groups) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::conv1d(input, weight, bias, stride, padding, dilation, groups); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::conv1d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, stride, padding, dilation, groups); +#endif +} +static inline Tensor conv2d(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, int64_t groups) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::conv2d(input, weight, bias, stride, padding, dilation, groups); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::conv2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, stride, padding, dilation, groups); +#endif +} +static inline Tensor conv3d(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, int64_t groups) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::conv3d(input, weight, bias, stride, padding, dilation, groups); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::conv3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, stride, padding, dilation, groups); +#endif +} +static inline Tensor conv_tbc(const Tensor & self, const Tensor & weight, const Tensor & bias, int64_t pad) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::conv_tbc(self, weight, bias, pad); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::conv_tbc", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, weight, bias, pad); +#endif +} +static inline std::tuple conv_tbc_backward(const Tensor & self, const Tensor & input, const Tensor & weight, const Tensor & bias, int64_t pad) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::conv_tbc_backward(self, input, weight, bias, pad); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::conv_tbc_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, int64_t>( + op, self, input, weight, bias, pad); +#endif +} +static inline Tensor conv_transpose1d(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, int64_t groups, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::conv_transpose1d(input, weight, bias, stride, padding, output_padding, groups, dilation); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::conv_transpose1d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, stride, padding, output_padding, groups, dilation); +#endif +} +static inline Tensor conv_transpose2d(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, int64_t groups, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::conv_transpose2d(input, weight, bias, stride, padding, output_padding, groups, dilation); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::conv_transpose2d", "input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, stride, padding, output_padding, groups, dilation); +#endif +} +static inline Tensor conv_transpose3d(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, int64_t groups, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::conv_transpose3d(input, weight, bias, stride, padding, output_padding, groups, dilation); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::conv_transpose3d", "input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, stride, padding, output_padding, groups, dilation); +#endif +} +static inline Tensor _copy_from(const Tensor & self, const Tensor & dst, bool non_blocking) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, dst)))) { + + default: + AT_ERROR("_copy_from not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, dst))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_copy_from", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dst, non_blocking); +#endif +} +static inline Tensor cos(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cos(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cos", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & cos_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::cos_(self); + break; + default: + AT_ERROR("cos_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cos_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & cos_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::cos_out(out, self); + break; + default: + AT_ERROR("cos_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cos", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor cosh(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cosh(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cosh", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & cosh_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::cosh_(self); + break; + default: + AT_ERROR("cosh_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cosh_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & cosh_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::cosh_out(out, self); + break; + default: + AT_ERROR("cosh_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cosh", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor cosine_embedding_loss(const Tensor & input1, const Tensor & input2, const Tensor & target, double margin, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cosine_embedding_loss(input1, input2, target, margin, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cosine_embedding_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input1, input2, target, margin, reduction); +#endif +} +static inline Tensor cudnn_affine_grid_generator(const Tensor & theta, int64_t N, int64_t C, int64_t H, int64_t W) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(theta)))) { + + default: + AT_ERROR("cudnn_affine_grid_generator not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(theta))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_affine_grid_generator", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, theta, N, C, H, W); +#endif +} +static inline Tensor cudnn_affine_grid_generator_backward(const Tensor & grad, int64_t N, int64_t C, int64_t H, int64_t W) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad)))) { + + default: + AT_ERROR("cudnn_affine_grid_generator_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_affine_grid_generator_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad, N, C, H, W); +#endif +} +static inline std::tuple cudnn_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double exponential_average_factor, double epsilon) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, weight, bias, running_mean, running_var)))) { + + default: + AT_ERROR("cudnn_batch_norm not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, weight, bias, running_mean, running_var))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_batch_norm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, bool, double, double>( + op, input, weight, bias, running_mean, running_var, training, exponential_average_factor, epsilon); +#endif +} +static inline std::tuple cudnn_batch_norm_backward(const Tensor & input, const Tensor & grad_output, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_var, double epsilon, const Tensor & reserveSpace) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, grad_output, weight, running_mean, running_var, save_mean, save_var, reserveSpace)))) { + + default: + AT_ERROR("cudnn_batch_norm_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, grad_output, weight, running_mean, running_var, save_mean, save_var, reserveSpace))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_batch_norm_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, double, const Tensor &>( + op, input, grad_output, weight, running_mean, running_var, save_mean, save_var, epsilon, reserveSpace); +#endif +} +static inline Tensor cudnn_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias)))) { + + default: + AT_ERROR("cudnn_convolution not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_convolution", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, bias, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline Tensor cudnn_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, weight)))) { + + default: + AT_ERROR("cudnn_convolution_backward_input not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_convolution_backward_input", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self_size, grad_output, weight, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline std::tuple cudnn_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, grad_output, weight)))) { + + default: + AT_ERROR("cudnn_convolution_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, grad_output, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_convolution_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, int64_t, bool, bool, std::array>( + op, self, grad_output, weight, padding, stride, dilation, groups, benchmark, deterministic, output_mask); +#endif +} +static inline Tensor cudnn_convolution_backward_bias(const Tensor & grad_output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output)))) { + + default: + AT_ERROR("cudnn_convolution_backward_bias not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_convolution_backward_bias", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output); +#endif +} +static inline Tensor cudnn_convolution_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + + default: + AT_ERROR("cudnn_convolution_backward_weight not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_convolution_backward_weight", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, weight_size, grad_output, self, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline Tensor cudnn_convolution_transpose(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias)))) { + + default: + AT_ERROR("cudnn_convolution_transpose not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_convolution_transpose", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, bias, padding, output_padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline std::tuple cudnn_convolution_transpose_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, grad_output, weight)))) { + + default: + AT_ERROR("cudnn_convolution_transpose_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, grad_output, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_convolution_transpose_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, int64_t, bool, bool, std::array>( + op, self, grad_output, weight, padding, output_padding, stride, dilation, groups, benchmark, deterministic, output_mask); +#endif +} +static inline Tensor cudnn_convolution_transpose_backward_bias(const Tensor & grad_output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output)))) { + + default: + AT_ERROR("cudnn_convolution_transpose_backward_bias not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_convolution_transpose_backward_bias", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output); +#endif +} +static inline Tensor cudnn_convolution_transpose_backward_input(const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, weight)))) { + + default: + AT_ERROR("cudnn_convolution_transpose_backward_input not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_convolution_transpose_backward_input", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, weight, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline Tensor cudnn_convolution_transpose_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + + default: + AT_ERROR("cudnn_convolution_transpose_backward_weight not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_convolution_transpose_backward_weight", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, weight_size, grad_output, self, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline Tensor cudnn_grid_sampler(const Tensor & self, const Tensor & grid) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, grid)))) { + + default: + AT_ERROR("cudnn_grid_sampler not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, grid))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_grid_sampler", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, grid); +#endif +} +static inline std::tuple cudnn_grid_sampler_backward(const Tensor & self, const Tensor & grid, const Tensor & grad_output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, grid, grad_output)))) { + + default: + AT_ERROR("cudnn_grid_sampler_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, grid, grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cudnn_grid_sampler_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &>( + op, self, grid, grad_output); +#endif +} +static inline Tensor cumsum(const Tensor & self, int64_t dim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cumsum(self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cumsum", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, dtype); +#endif +} +static inline Tensor & cumsum_out(Tensor & out, const Tensor & self, int64_t dim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cumsum_out(out, self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cumsum", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, dim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor cumsum(const Tensor & self, Dimname dim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cumsum(self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cumsum", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, dtype); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & cumsum_out(Tensor & out, const Tensor & self, Dimname dim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cumsum_out(out, self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cumsum", "dimname_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, dim, dtype); +#endif +} +#endif +static inline Tensor cumprod(const Tensor & self, int64_t dim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cumprod(self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cumprod", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, dtype); +#endif +} +static inline Tensor & cumprod_out(Tensor & out, const Tensor & self, int64_t dim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cumprod_out(out, self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cumprod", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, dim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor cumprod(const Tensor & self, Dimname dim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cumprod(self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cumprod", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, dtype); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & cumprod_out(Tensor & out, const Tensor & self, Dimname dim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cumprod_out(out, self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cumprod", "dimname_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, dim, dtype); +#endif +} +#endif +static inline Tensor ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank, int64_t reduction, bool zero_infinity) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ctc_loss(log_probs, targets, input_lengths, target_lengths, blank, reduction, zero_infinity); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ctc_loss", "IntList"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, log_probs, targets, input_lengths, target_lengths, blank, reduction, zero_infinity); +#endif +} +static inline Tensor ctc_loss(const Tensor & log_probs, const Tensor & targets, const Tensor & input_lengths, const Tensor & target_lengths, int64_t blank, int64_t reduction, bool zero_infinity) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ctc_loss(log_probs, targets, input_lengths, target_lengths, blank, reduction, zero_infinity); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ctc_loss", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, log_probs, targets, input_lengths, target_lengths, blank, reduction, zero_infinity); +#endif +} +static inline std::tuple _ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank, bool zero_infinity) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(log_probs, targets)))) { + case Backend::CPU: + return CPUType::_ctc_loss(log_probs, targets, input_lengths, target_lengths, blank, zero_infinity); + break; + default: + AT_ERROR("_ctc_loss not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(log_probs, targets))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_ctc_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, int64_t, bool>( + op, log_probs, targets, input_lengths, target_lengths, blank, zero_infinity); +#endif +} +static inline Tensor _ctc_loss_backward(const Tensor & grad, const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, const Tensor & neg_log_likelihood, const Tensor & log_alpha, int64_t blank, bool zero_infinity) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad, log_probs, targets, neg_log_likelihood, log_alpha)))) { + case Backend::CPU: + return CPUType::_ctc_loss_backward(grad, log_probs, targets, input_lengths, target_lengths, neg_log_likelihood, log_alpha, blank, zero_infinity); + break; + default: + AT_ERROR("_ctc_loss_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad, log_probs, targets, neg_log_likelihood, log_alpha))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_ctc_loss_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad, log_probs, targets, input_lengths, target_lengths, neg_log_likelihood, log_alpha, blank, zero_infinity); +#endif +} +static inline Tensor det(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::det(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::det", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor diag_embed(const Tensor & self, int64_t offset, int64_t dim1, int64_t dim2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::diag_embed(self, offset, dim1, dim2); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::diag_embed", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, offset, dim1, dim2); +#endif +} +static inline Tensor diagflat(const Tensor & self, int64_t offset) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::diagflat(self, offset); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::diagflat", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, offset); +#endif +} +static inline Tensor diagonal(const Tensor & self, int64_t offset, int64_t dim1, int64_t dim2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::diagonal(self, offset, dim1, dim2); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::diagonal", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, offset, dim1, dim2); +#endif +} +static inline Tensor div(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::div(self, other); + break; + case Backend::SparseCPU: + return SparseCPUType::div(self, other); + break; + default: + AT_ERROR("div not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::div", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & div_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::div_out(out, self, other); + break; + case Backend::SparseCPU: + return SparseCPUType::div_out(out, self, other); + break; + default: + AT_ERROR("div_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::div", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor div(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::div(self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::div", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor dot(const Tensor & self, const Tensor & tensor) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, tensor)))) { + case Backend::CPU: + return CPUType::dot(self, tensor); + break; + default: + AT_ERROR("dot not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, tensor))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::dot", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, tensor); +#endif +} +static inline Tensor & dot_out(Tensor & out, const Tensor & self, const Tensor & tensor) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::dot_out(out, self, tensor); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::dot", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, tensor); +#endif +} +static inline Tensor einsum(std::string equation, TensorList tensors) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::einsum(equation, tensors); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::einsum", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, equation, tensors); +#endif +} +static inline Tensor embedding(const Tensor & weight, const Tensor & indices, int64_t padding_idx, bool scale_grad_by_freq, bool sparse) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::embedding(weight, indices, padding_idx, scale_grad_by_freq, sparse); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::embedding", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, weight, indices, padding_idx, scale_grad_by_freq, sparse); +#endif +} +static inline Tensor embedding_backward(const Tensor & grad, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq, bool sparse) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::embedding_backward(grad, indices, num_weights, padding_idx, scale_grad_by_freq, sparse); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::embedding_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad, indices, num_weights, padding_idx, scale_grad_by_freq, sparse); +#endif +} +static inline Tensor embedding_dense_backward(const Tensor & grad_output, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, indices)))) { + case Backend::CPU: + return CPUType::embedding_dense_backward(grad_output, indices, num_weights, padding_idx, scale_grad_by_freq); + break; + default: + AT_ERROR("embedding_dense_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::embedding_dense_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, indices, num_weights, padding_idx, scale_grad_by_freq); +#endif +} +static inline Tensor & embedding_renorm_(Tensor & self, const Tensor & indices, double max_norm, double norm_type) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, indices)))) { + case Backend::CPU: + return CPUType::embedding_renorm_(self, indices, max_norm, norm_type); + break; + default: + AT_ERROR("embedding_renorm_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::embedding_renorm_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, indices, max_norm, norm_type); +#endif +} +static inline Tensor embedding_sparse_backward(const Tensor & grad, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::embedding_sparse_backward(grad, indices, num_weights, padding_idx, scale_grad_by_freq); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::embedding_sparse_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad, indices, num_weights, padding_idx, scale_grad_by_freq); +#endif +} +static inline std::tuple embedding_bag(const Tensor & weight, const Tensor & indices, const Tensor & offsets, bool scale_grad_by_freq, int64_t mode, bool sparse, const Tensor & per_sample_weights) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::embedding_bag(weight, indices, offsets, scale_grad_by_freq, mode, sparse, per_sample_weights); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::embedding_bag", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, bool, int64_t, bool, const Tensor &>( + op, weight, indices, offsets, scale_grad_by_freq, mode, sparse, per_sample_weights); +#endif +} +static inline std::tuple _embedding_bag(const Tensor & weight, const Tensor & indices, const Tensor & offsets, bool scale_grad_by_freq, int64_t mode, bool sparse, const Tensor & per_sample_weights) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(weight, indices, offsets, per_sample_weights)))) { + case Backend::CPU: + return CPUType::_embedding_bag(weight, indices, offsets, scale_grad_by_freq, mode, sparse, per_sample_weights); + break; + default: + AT_ERROR("_embedding_bag not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(weight, indices, offsets, per_sample_weights))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_embedding_bag", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, bool, int64_t, bool, const Tensor &>( + op, weight, indices, offsets, scale_grad_by_freq, mode, sparse, per_sample_weights); +#endif +} +static inline Tensor _embedding_bag_backward(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, const Tensor & maximum_indices, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, bool sparse, const Tensor & per_sample_weights) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_embedding_bag_backward(grad, indices, offsets, offset2bag, bag_size, maximum_indices, num_weights, scale_grad_by_freq, mode, sparse, per_sample_weights); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_embedding_bag_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad, indices, offsets, offset2bag, bag_size, maximum_indices, num_weights, scale_grad_by_freq, mode, sparse, per_sample_weights); +#endif +} +static inline Tensor _embedding_bag_sparse_backward(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, const Tensor & per_sample_weights) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_embedding_bag_sparse_backward(grad, indices, offsets, offset2bag, bag_size, num_weights, scale_grad_by_freq, mode, per_sample_weights); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_embedding_bag_sparse_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad, indices, offsets, offset2bag, bag_size, num_weights, scale_grad_by_freq, mode, per_sample_weights); +#endif +} +static inline Tensor _embedding_bag_dense_backward(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, const Tensor & maximum_indices, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, const Tensor & per_sample_weights) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad, indices, offsets, offset2bag, bag_size, maximum_indices, per_sample_weights)))) { + case Backend::CPU: + return CPUType::_embedding_bag_dense_backward(grad, indices, offsets, offset2bag, bag_size, maximum_indices, num_weights, scale_grad_by_freq, mode, per_sample_weights); + break; + default: + AT_ERROR("_embedding_bag_dense_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad, indices, offsets, offset2bag, bag_size, maximum_indices, per_sample_weights))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_embedding_bag_dense_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad, indices, offsets, offset2bag, bag_size, maximum_indices, num_weights, scale_grad_by_freq, mode, per_sample_weights); +#endif +} +static inline Tensor _embedding_bag_per_sample_weights_backward(const Tensor & grad, const Tensor & weight, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, int64_t mode) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad, weight, indices, offsets, offset2bag)))) { + case Backend::CPU: + return CPUType::_embedding_bag_per_sample_weights_backward(grad, weight, indices, offsets, offset2bag, mode); + break; + default: + AT_ERROR("_embedding_bag_per_sample_weights_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad, weight, indices, offsets, offset2bag))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_embedding_bag_per_sample_weights_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad, weight, indices, offsets, offset2bag, mode); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor empty(IntArrayRef size, c10::optional names, const TensorOptions & options, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::empty(size, names, options, memory_format); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::empty", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const TensorOptions &, c10::optional>( + op, size, names, options, memory_format); +#endif +} +#endif +static inline Tensor empty(IntArrayRef size, const TensorOptions & options, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(options)))) { + case Backend::CPU: + return CPUType::empty(size, options, memory_format); + break; + case Backend::SparseCPU: + return SparseCPUType::empty(size, options, memory_format); + break; + default: + AT_ERROR("empty not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(options))); + } +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::empty", "memory_format"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, size, options, memory_format); +#endif +} +static inline Tensor _empty_affine_quantized(IntArrayRef size, const TensorOptions & options, double scale, int64_t zero_point, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(options)))) { + case Backend::CPU: + return CPUType::_empty_affine_quantized(size, options, scale, zero_point, memory_format); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::_empty_affine_quantized(size, options, scale, zero_point, memory_format); + break; + default: + AT_ERROR("_empty_affine_quantized not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(options))); + } +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_empty_affine_quantized", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, size, options, scale, zero_point, memory_format); +#endif +} +static inline Tensor _empty_per_channel_affine_quantized(IntArrayRef size, const Tensor & scales, const Tensor & zero_points, int64_t axis, const TensorOptions & options, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(scales, zero_points, options)))) { + case Backend::CPU: + return CPUType::_empty_per_channel_affine_quantized(size, scales, zero_points, axis, options, memory_format); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::_empty_per_channel_affine_quantized(size, scales, zero_points, axis, options, memory_format); + break; + default: + AT_ERROR("_empty_per_channel_affine_quantized not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(scales, zero_points, options))); + } +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(scales, zero_points, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_empty_per_channel_affine_quantized", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, size, scales, zero_points, axis, options, memory_format); +#endif +} +static inline Tensor & empty_out(Tensor & out, IntArrayRef size, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::empty_out(out, size, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::empty", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, size, memory_format); +#endif +} +static inline Tensor empty_like(const Tensor & self, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::empty_like(self, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::empty_like", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, memory_format); +#endif +} +static inline Tensor empty_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::empty_like(self, options, memory_format); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(self, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::empty_like", "dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, options, memory_format); +#endif +} +static inline Tensor empty_strided(IntArrayRef size, IntArrayRef stride, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(options)))) { + case Backend::CPU: + return CPUType::empty_strided(size, stride, options); + break; + default: + AT_ERROR("empty_strided not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(options))); + } +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::empty_strided", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, size, stride, options); +#endif +} +static inline Tensor erf(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::erf(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::erf", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & erf_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::erf_(self); + break; + default: + AT_ERROR("erf_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::erf_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & erf_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::erf_out(out, self); + break; + default: + AT_ERROR("erf_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::erf", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor erfc(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::erfc(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::erfc", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & erfc_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::erfc_(self); + break; + default: + AT_ERROR("erfc_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::erfc_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & erfc_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::erfc_out(out, self); + break; + default: + AT_ERROR("erfc_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::erfc", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor exp(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::exp(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::exp", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & exp_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::exp_(self); + break; + default: + AT_ERROR("exp_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::exp_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & exp_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::exp_out(out, self); + break; + default: + AT_ERROR("exp_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::exp", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor expm1(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::expm1(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::expm1", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & expm1_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::expm1_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::expm1_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & expm1_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::expm1_out(out, self); + break; + default: + AT_ERROR("expm1_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::expm1", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor eye(int64_t n, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::eye(n, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::eye", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, n, options); +#endif +} +static inline Tensor eye(int64_t n, int64_t m, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::eye(n, m, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::eye", "m"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, n, m, options); +#endif +} +static inline Tensor & eye_out(Tensor & out, int64_t n) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out)))) { + case Backend::CPU: + return CPUType::eye_out(out, n); + break; + default: + AT_ERROR("eye_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::eye", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, n); +#endif +} +static inline Tensor & eye_out(Tensor & out, int64_t n, int64_t m) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out)))) { + case Backend::CPU: + return CPUType::eye_out(out, n, m); + break; + default: + AT_ERROR("eye_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::eye", "m_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, n, m); +#endif +} +static inline Tensor flatten(const Tensor & self, int64_t start_dim, int64_t end_dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::flatten(self, start_dim, end_dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::flatten", "using_ints"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, start_dim, end_dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor flatten(const Tensor & self, int64_t start_dim, int64_t end_dim, Dimname out_dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::flatten(self, start_dim, end_dim, out_dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::flatten", "named_out_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, start_dim, end_dim, out_dim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor flatten(const Tensor & self, Dimname start_dim, Dimname end_dim, Dimname out_dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::flatten(self, start_dim, end_dim, out_dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::flatten", "using_names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, start_dim, end_dim, out_dim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor flatten(const Tensor & self, DimnameList dims, Dimname out_dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::flatten(self, dims, out_dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::flatten", "DimnameList"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dims, out_dim); +#endif +} +#endif +static inline Tensor & fill_(Tensor & self, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fill_(self, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fill_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, value); +#endif +} +static inline Tensor & fill_(Tensor & self, const Tensor & value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fill_(self, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fill_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, value); +#endif +} +static inline Tensor floor(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::floor(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::floor", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & floor_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::floor_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::floor_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & floor_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::floor_out(out, self); + break; + default: + AT_ERROR("floor_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::floor", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor frac(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::frac(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::frac", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & frac_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::frac_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::frac_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & frac_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::frac_out(out, self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::frac", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor full(IntArrayRef size, Scalar fill_value, c10::optional names, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::full(size, fill_value, names, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::full", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const TensorOptions &>( + op, size, fill_value, names, options); +#endif +} +#endif +static inline Tensor full(IntArrayRef size, Scalar fill_value, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::full(size, fill_value, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::full", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, size, fill_value, options); +#endif +} +static inline Tensor & full_out(Tensor & out, IntArrayRef size, Scalar fill_value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::full_out(out, size, fill_value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::full", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, size, fill_value); +#endif +} +static inline Tensor full_like(const Tensor & self, Scalar fill_value, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::full_like(self, fill_value, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::full_like", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, fill_value, memory_format); +#endif +} +static inline Tensor full_like(const Tensor & self, Scalar fill_value, const TensorOptions & options, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::full_like(self, fill_value, options, memory_format); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(self, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::full_like", "dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, fill_value, options, memory_format); +#endif +} +static inline Tensor from_file(std::string filename, c10::optional shared, c10::optional size, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(options)))) { + case Backend::CPU: + return CPUType::from_file(filename, shared, size, options); + break; + default: + AT_ERROR("from_file not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(options))); + } +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::from_file", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, c10::optional, const TensorOptions &>( + op, filename, shared, size, options); +#endif +} +static inline Tensor grid_sampler(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::grid_sampler(input, grid, interpolation_mode, padding_mode, align_corners); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::grid_sampler", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, grid, interpolation_mode, padding_mode, align_corners); +#endif +} +static inline Tensor grid_sampler_2d(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, grid)))) { + case Backend::CPU: + return CPUType::grid_sampler_2d(input, grid, interpolation_mode, padding_mode, align_corners); + break; + default: + AT_ERROR("grid_sampler_2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, grid))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::grid_sampler_2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, grid, interpolation_mode, padding_mode, align_corners); +#endif +} +static inline std::tuple grid_sampler_2d_backward(const Tensor & grad_output, const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, input, grid)))) { + case Backend::CPU: + return CPUType::grid_sampler_2d_backward(grad_output, input, grid, interpolation_mode, padding_mode, align_corners); + break; + default: + AT_ERROR("grid_sampler_2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, input, grid))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::grid_sampler_2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, int64_t, int64_t, bool>( + op, grad_output, input, grid, interpolation_mode, padding_mode, align_corners); +#endif +} +static inline Tensor grid_sampler_3d(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, grid)))) { + case Backend::CPU: + return CPUType::grid_sampler_3d(input, grid, interpolation_mode, padding_mode, align_corners); + break; + default: + AT_ERROR("grid_sampler_3d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, grid))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::grid_sampler_3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, grid, interpolation_mode, padding_mode, align_corners); +#endif +} +static inline std::tuple grid_sampler_3d_backward(const Tensor & grad_output, const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, input, grid)))) { + case Backend::CPU: + return CPUType::grid_sampler_3d_backward(grad_output, input, grid, interpolation_mode, padding_mode, align_corners); + break; + default: + AT_ERROR("grid_sampler_3d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, input, grid))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::grid_sampler_3d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, int64_t, int64_t, bool>( + op, grad_output, input, grid, interpolation_mode, padding_mode, align_corners); +#endif +} +static inline Tensor hann_window(int64_t window_length, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::hann_window(window_length, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hann_window", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, window_length, options); +#endif +} +static inline Tensor hann_window(int64_t window_length, bool periodic, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::hann_window(window_length, periodic, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hann_window", "periodic"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, window_length, periodic, options); +#endif +} +static inline Tensor hamming_window(int64_t window_length, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::hamming_window(window_length, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hamming_window", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, window_length, options); +#endif +} +static inline Tensor hamming_window(int64_t window_length, bool periodic, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::hamming_window(window_length, periodic, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hamming_window", "periodic"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, window_length, periodic, options); +#endif +} +static inline Tensor hamming_window(int64_t window_length, bool periodic, double alpha, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::hamming_window(window_length, periodic, alpha, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hamming_window", "periodic_alpha"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, window_length, periodic, alpha, options); +#endif +} +static inline Tensor hamming_window(int64_t window_length, bool periodic, double alpha, double beta, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::hamming_window(window_length, periodic, alpha, beta, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hamming_window", "periodic_alpha_beta"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, window_length, periodic, alpha, beta, options); +#endif +} +static inline Tensor hinge_embedding_loss(const Tensor & self, const Tensor & target, double margin, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::hinge_embedding_loss(self, target, margin, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hinge_embedding_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, target, margin, reduction); +#endif +} +static inline Tensor ger(const Tensor & self, const Tensor & vec2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, vec2)))) { + case Backend::CPU: + return CPUType::ger(self, vec2); + break; + default: + AT_ERROR("ger not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, vec2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ger", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, vec2); +#endif +} +static inline Tensor & ger_out(Tensor & out, const Tensor & self, const Tensor & vec2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, vec2)))) { + case Backend::CPU: + return CPUType::ger_out(out, self, vec2); + break; + default: + AT_ERROR("ger_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, vec2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ger", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, vec2); +#endif +} +static inline Tensor group_norm(const Tensor & input, int64_t num_groups, const Tensor & weight, const Tensor & bias, double eps, bool cudnn_enabled) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::group_norm(input, num_groups, weight, bias, eps, cudnn_enabled); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::group_norm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, num_groups, weight, bias, eps, cudnn_enabled); +#endif +} +static inline Tensor fft(const Tensor & self, int64_t signal_ndim, bool normalized) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fft(self, signal_ndim, normalized); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fft", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, signal_ndim, normalized); +#endif +} +static inline Tensor ifft(const Tensor & self, int64_t signal_ndim, bool normalized) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ifft(self, signal_ndim, normalized); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ifft", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, signal_ndim, normalized); +#endif +} +static inline Tensor rfft(const Tensor & self, int64_t signal_ndim, bool normalized, bool onesided) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rfft(self, signal_ndim, normalized, onesided); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rfft", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, signal_ndim, normalized, onesided); +#endif +} +static inline Tensor irfft(const Tensor & self, int64_t signal_ndim, bool normalized, bool onesided, IntArrayRef signal_sizes) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::irfft(self, signal_ndim, normalized, onesided, signal_sizes); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::irfft", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, signal_ndim, normalized, onesided, signal_sizes); +#endif +} +static inline Tensor _fft_with_size(const Tensor & self, int64_t signal_ndim, bool complex_input, bool complex_output, bool inverse, IntArrayRef checked_signal_sizes, bool normalized, bool onesided, IntArrayRef output_sizes) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_fft_with_size(self, signal_ndim, complex_input, complex_output, inverse, checked_signal_sizes, normalized, onesided, output_sizes); + break; + default: + AT_ERROR("_fft_with_size not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_fft_with_size", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, signal_ndim, complex_input, complex_output, inverse, checked_signal_sizes, normalized, onesided, output_sizes); +#endif +} +static inline int64_t _cufft_get_plan_cache_size(int64_t device_index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_cufft_get_plan_cache_size(device_index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cufft_get_plan_cache_size", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, device_index); +#endif +} +static inline int64_t _cufft_get_plan_cache_max_size(int64_t device_index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_cufft_get_plan_cache_max_size(device_index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cufft_get_plan_cache_max_size", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, device_index); +#endif +} +static inline void _cufft_set_plan_cache_max_size(int64_t device_index, int64_t max_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + TypeDefault::_cufft_set_plan_cache_max_size(device_index, max_size); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cufft_set_plan_cache_max_size", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, device_index, max_size); +#endif +} +static inline void _cufft_clear_plan_cache(int64_t device_index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + TypeDefault::_cufft_clear_plan_cache(device_index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cufft_clear_plan_cache", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, device_index); +#endif +} +static inline Tensor index(const Tensor & self, TensorList indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index(self, indices); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, indices); +#endif +} +static inline Tensor index_copy(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & source) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_copy(self, dim, index, source); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_copy", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, index, source); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor index_copy(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & source) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_copy(self, dim, index, source); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_copy", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, index, source); +#endif +} +#endif +static inline Tensor & index_put_(Tensor & self, TensorList indices, const Tensor & values, bool accumulate) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_put_(self, indices, values, accumulate); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_put_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, indices, values, accumulate); +#endif +} +static inline Tensor index_put(const Tensor & self, TensorList indices, const Tensor & values, bool accumulate) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_put(self, indices, values, accumulate); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_put", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, indices, values, accumulate); +#endif +} +static inline Tensor & _index_put_impl_(Tensor & self, TensorList indices, const Tensor & values, bool accumulate, bool unsafe) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_index_put_impl_(self, indices, values, accumulate, unsafe); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_index_put_impl_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, indices, values, accumulate, unsafe); +#endif +} +static inline Tensor instance_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool use_input_stats, double momentum, double eps, bool cudnn_enabled) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::instance_norm(input, weight, bias, running_mean, running_var, use_input_stats, momentum, eps, cudnn_enabled); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::instance_norm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, running_mean, running_var, use_input_stats, momentum, eps, cudnn_enabled); +#endif +} +static inline Tensor inverse(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::inverse(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::inverse", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & inverse_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::inverse_out(out, self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::inverse", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor _inverse_helper(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_inverse_helper(self); + break; + default: + AT_ERROR("_inverse_helper not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_inverse_helper", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor isclose(const Tensor & self, const Tensor & other, double rtol, double atol, bool equal_nan) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::isclose(self, other, rtol, atol, equal_nan); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::isclose", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other, rtol, atol, equal_nan); +#endif +} +static inline Tensor isnan(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::isnan(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::isnan", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline bool is_distributed(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_distributed(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::is_distributed", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline bool is_floating_point(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_floating_point(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::is_floating_point", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline bool is_complex(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_complex(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::is_complex", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline bool is_nonzero(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_nonzero(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::is_nonzero", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline bool is_same_size(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_same_size(self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::is_same_size", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline bool is_signed(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_signed(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::is_signed", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor kl_div(const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::kl_div(self, target, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::kl_div", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, target, reduction); +#endif +} +static inline Tensor kl_div_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target)))) { + case Backend::CPU: + return CPUType::kl_div_backward(grad_output, self, target, reduction); + break; + default: + AT_ERROR("kl_div_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::kl_div_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, target, reduction); +#endif +} +static inline std::tuple kthvalue(const Tensor & self, int64_t k, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::kthvalue(self, k, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::kthvalue", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, int64_t, bool>( + op, self, k, dim, keepdim); +#endif +} +static inline std::tuple kthvalue_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(values, indices, self)))) { + case Backend::CPU: + return CPUType::kthvalue_out(values, indices, self, k, dim, keepdim); + break; + default: + AT_ERROR("kthvalue_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(values, indices, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::kthvalue", "values"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, int64_t, int64_t, bool>( + op, values, indices, self, k, dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple kthvalue(const Tensor & self, int64_t k, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::kthvalue(self, k, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::kthvalue", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, Dimname, bool>( + op, self, k, dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple kthvalue_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::kthvalue_out(values, indices, self, k, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::kthvalue", "dimname_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, int64_t, Dimname, bool>( + op, values, indices, self, k, dim, keepdim); +#endif +} +#endif +static inline Tensor layer_norm(const Tensor & input, IntArrayRef normalized_shape, const Tensor & weight, const Tensor & bias, double eps, bool cudnn_enable) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::layer_norm(input, normalized_shape, weight, bias, eps, cudnn_enable); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::layer_norm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, normalized_shape, weight, bias, eps, cudnn_enable); +#endif +} +static inline std::tuple native_layer_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, int64_t M, int64_t N, double eps) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, weight, bias)))) { + case Backend::CPU: + return CPUType::native_layer_norm(input, weight, bias, M, N, eps); + break; + default: + AT_ERROR("native_layer_norm not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::native_layer_norm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, int64_t, int64_t, double>( + op, input, weight, bias, M, N, eps); +#endif +} +static inline std::tuple native_layer_norm_backward(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & rstd, const Tensor & weight, int64_t M, int64_t N, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_out, input, mean, rstd, weight)))) { + case Backend::CPU: + return CPUType::native_layer_norm_backward(grad_out, input, mean, rstd, weight, M, N, output_mask); + break; + default: + AT_ERROR("native_layer_norm_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_out, input, mean, rstd, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::native_layer_norm_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, int64_t, int64_t, std::array>( + op, grad_out, input, mean, rstd, weight, M, N, output_mask); +#endif +} +static inline Tensor linear(const Tensor & input, const Tensor & weight, const Tensor & bias) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::linear(input, weight, bias); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::linear", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias); +#endif +} +static inline Tensor mkldnn_linear(const Tensor & input, const Tensor & weight, const Tensor & bias) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, weight, bias)))) { + + default: + AT_ERROR("mkldnn_linear not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mkldnn_linear", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias); +#endif +} +static inline Tensor fbgemm_linear_int8_weight_fp32_activation(const Tensor & input, const Tensor & weight, const Tensor & packed, const Tensor & col_offsets, Scalar weight_scale, Scalar weight_zero_point, const Tensor & bias) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fbgemm_linear_int8_weight_fp32_activation(input, weight, packed, col_offsets, weight_scale, weight_zero_point, bias); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fbgemm_linear_int8_weight_fp32_activation", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, packed, col_offsets, weight_scale, weight_zero_point, bias); +#endif +} +static inline Tensor fbgemm_linear_int8_weight(const Tensor & input, const Tensor & weight, const Tensor & packed, const Tensor & col_offsets, Scalar weight_scale, Scalar weight_zero_point, const Tensor & bias) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fbgemm_linear_int8_weight(input, weight, packed, col_offsets, weight_scale, weight_zero_point, bias); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fbgemm_linear_int8_weight", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, weight, packed, col_offsets, weight_scale, weight_zero_point, bias); +#endif +} +static inline std::tuple fbgemm_linear_quantize_weight(const Tensor & input) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fbgemm_linear_quantize_weight(input); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fbgemm_linear_quantize_weight", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &>( + op, input); +#endif +} +static inline Tensor fbgemm_pack_gemm_matrix_fp16(const Tensor & input) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fbgemm_pack_gemm_matrix_fp16(input); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fbgemm_pack_gemm_matrix_fp16", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input); +#endif +} +static inline Tensor fbgemm_linear_fp16_weight_fp32_activation(const Tensor & input, const Tensor & packed_weight, const Tensor & bias) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fbgemm_linear_fp16_weight_fp32_activation(input, packed_weight, bias); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fbgemm_linear_fp16_weight_fp32_activation", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, packed_weight, bias); +#endif +} +static inline Tensor fbgemm_linear_fp16_weight(const Tensor & input, const Tensor & packed_weight, const Tensor & bias) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fbgemm_linear_fp16_weight(input, packed_weight, bias); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fbgemm_linear_fp16_weight", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, packed_weight, bias); +#endif +} +static inline Tensor fbgemm_pack_quantized_matrix(const Tensor & input) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fbgemm_pack_quantized_matrix(input); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fbgemm_pack_quantized_matrix", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input); +#endif +} +static inline Tensor fbgemm_pack_quantized_matrix(const Tensor & input, int64_t K, int64_t N) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fbgemm_pack_quantized_matrix(input, K, N); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fbgemm_pack_quantized_matrix", "KN"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, K, N); +#endif +} +static inline Tensor linspace(Scalar start, Scalar end, int64_t steps, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::linspace(start, end, steps, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::linspace", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, start, end, steps, options); +#endif +} +static inline Tensor & linspace_out(Tensor & out, Scalar start, Scalar end, int64_t steps) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out)))) { + case Backend::CPU: + return CPUType::linspace_out(out, start, end, steps); + break; + default: + AT_ERROR("linspace_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::linspace", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, start, end, steps); +#endif +} +static inline Tensor log(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & log_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & log_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::log_out(out, self); + break; + default: + AT_ERROR("log_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor log10(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log10(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log10", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & log10_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log10_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log10_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & log10_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::log10_out(out, self); + break; + default: + AT_ERROR("log10_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log10", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor log1p(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log1p(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log1p", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & log1p_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::log1p_(self); + break; + case Backend::SparseCPU: + return SparseCPUType::log1p_(self); + break; + default: + AT_ERROR("log1p_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log1p_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & log1p_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::log1p_out(out, self); + break; + case Backend::SparseCPU: + return SparseCPUType::log1p_out(out, self); + break; + default: + AT_ERROR("log1p_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log1p", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor log2(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log2(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log2", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & log2_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log2_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log2_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & log2_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::log2_out(out, self); + break; + default: + AT_ERROR("log2_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log2", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor logdet(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logdet(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::logdet", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor logspace(Scalar start, Scalar end, int64_t steps, double base, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logspace(start, end, steps, base, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::logspace", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, start, end, steps, base, options); +#endif +} +static inline Tensor & logspace_out(Tensor & out, Scalar start, Scalar end, int64_t steps, double base) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out)))) { + case Backend::CPU: + return CPUType::logspace_out(out, start, end, steps, base); + break; + default: + AT_ERROR("logspace_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::logspace", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, start, end, steps, base); +#endif +} +static inline Tensor log_softmax(const Tensor & self, int64_t dim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log_softmax(self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log_softmax", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor log_softmax(const Tensor & self, Dimname dim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log_softmax(self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log_softmax", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, dtype); +#endif +} +#endif +static inline Tensor _log_softmax(const Tensor & self, int64_t dim, bool half_to_float) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_log_softmax(self, dim, half_to_float); + break; + default: + AT_ERROR("_log_softmax not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_log_softmax", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, half_to_float); +#endif +} +static inline Tensor _log_softmax_backward_data(const Tensor & grad_output, const Tensor & output, int64_t dim, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, output, self)))) { + case Backend::CPU: + return CPUType::_log_softmax_backward_data(grad_output, output, dim, self); + break; + default: + AT_ERROR("_log_softmax_backward_data not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_log_softmax_backward_data", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, output, dim, self); +#endif +} +static inline Tensor logsumexp(const Tensor & self, IntArrayRef dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logsumexp(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::logsumexp", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, keepdim); +#endif +} +static inline Tensor & logsumexp_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logsumexp_out(out, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::logsumexp", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor logsumexp(const Tensor & self, DimnameList dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logsumexp(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::logsumexp", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & logsumexp_out(Tensor & out, const Tensor & self, DimnameList dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logsumexp_out(out, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::logsumexp", "names_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, keepdim); +#endif +} +#endif +static inline Tensor margin_ranking_loss(const Tensor & input1, const Tensor & input2, const Tensor & target, double margin, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::margin_ranking_loss(input1, input2, target, margin, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::margin_ranking_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input1, input2, target, margin, reduction); +#endif +} +static inline Tensor matmul(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::matmul(self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::matmul", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & matmul_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::matmul_out(out, self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::matmul", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor matrix_rank(const Tensor & self, double tol, bool symmetric) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::matrix_rank(self, tol, symmetric); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::matrix_rank", "tol"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, tol, symmetric); +#endif +} +static inline Tensor matrix_rank(const Tensor & self, bool symmetric) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::matrix_rank(self, symmetric); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::matrix_rank", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, symmetric); +#endif +} +static inline Tensor matrix_power(const Tensor & self, int64_t n) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::matrix_power(self, n); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::matrix_power", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, n); +#endif +} +static inline std::tuple max(const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, self, dim, keepdim); +#endif +} +static inline std::tuple max_out(Tensor & max, Tensor & max_values, const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max_out(max, max_values, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max", "dim_max"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, int64_t, bool>( + op, max, max_values, self, dim, keepdim); +#endif +} +static inline Tensor max_values(const Tensor & self, IntArrayRef dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max_values(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_values", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple max(const Tensor & self, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, Dimname, bool>( + op, self, dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple max_out(Tensor & max, Tensor & max_values, const Tensor & self, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max_out(max, max_values, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max", "names_dim_max"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, Dimname, bool>( + op, max, max_values, self, dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor max_values(const Tensor & self, DimnameList dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max_values(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_values", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, keepdim); +#endif +} +#endif +static inline std::tuple max_pool1d_with_indices(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max_pool1d_with_indices(self, kernel_size, stride, padding, dilation, ceil_mode); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_pool1d_with_indices", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, bool>( + op, self, kernel_size, stride, padding, dilation, ceil_mode); +#endif +} +static inline Tensor max_pool1d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max_pool1d(self, kernel_size, stride, padding, dilation, ceil_mode); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_pool1d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, kernel_size, stride, padding, dilation, ceil_mode); +#endif +} +static inline Tensor max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max_pool2d(self, kernel_size, stride, padding, dilation, ceil_mode); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_pool2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, kernel_size, stride, padding, dilation, ceil_mode); +#endif +} +static inline Tensor mkldnn_max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + + default: + AT_ERROR("mkldnn_max_pool2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mkldnn_max_pool2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, kernel_size, stride, padding, dilation, ceil_mode); +#endif +} +static inline Tensor quantized_max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::quantized_max_pool2d(self, kernel_size, stride, padding, dilation, ceil_mode); + break; + default: + AT_ERROR("quantized_max_pool2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::quantized_max_pool2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, kernel_size, stride, padding, dilation, ceil_mode); +#endif +} +static inline Tensor max_pool3d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max_pool3d(self, kernel_size, stride, padding, dilation, ceil_mode); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_pool3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, kernel_size, stride, padding, dilation, ceil_mode); +#endif +} +static inline Tensor mean(const Tensor & self, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::mean(self, dtype); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::mean(self, dtype); + break; + default: + AT_ERROR("mean not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mean", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dtype); +#endif +} +static inline Tensor mean(const Tensor & self, IntArrayRef dim, bool keepdim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::mean(self, dim, keepdim, dtype); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::mean(self, dim, keepdim, dtype); + break; + default: + AT_ERROR("mean not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mean", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, keepdim, dtype); +#endif +} +static inline Tensor & mean_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::mean_out(out, self, dim, keepdim, dtype); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::mean_out(out, self, dim, keepdim, dtype); + break; + default: + AT_ERROR("mean_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mean", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, dim, keepdim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor mean(const Tensor & self, DimnameList dim, bool keepdim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mean(self, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mean", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, keepdim, dtype); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & mean_out(Tensor & out, const Tensor & self, DimnameList dim, bool keepdim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mean_out(out, self, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mean", "names_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, dim, keepdim, dtype); +#endif +} +#endif +static inline std::tuple median(const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::median(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::median", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, self, dim, keepdim); +#endif +} +static inline std::tuple median_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::median_out(values, indices, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::median", "dim_values"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, int64_t, bool>( + op, values, indices, self, dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple median(const Tensor & self, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::median(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::median", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, Dimname, bool>( + op, self, dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple median_out(Tensor & values, Tensor & indices, const Tensor & self, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::median_out(values, indices, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::median", "names_dim_values"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, Dimname, bool>( + op, values, indices, self, dim, keepdim); +#endif +} +#endif +static inline std::tuple min(const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::min(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::min", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, self, dim, keepdim); +#endif +} +static inline std::tuple min_out(Tensor & min, Tensor & min_indices, const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::min_out(min, min_indices, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::min", "dim_min"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, int64_t, bool>( + op, min, min_indices, self, dim, keepdim); +#endif +} +static inline Tensor min_values(const Tensor & self, IntArrayRef dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::min_values(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::min_values", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple min(const Tensor & self, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::min(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::min", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, Dimname, bool>( + op, self, dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple min_out(Tensor & min, Tensor & min_indices, const Tensor & self, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::min_out(min, min_indices, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::min", "names_dim_min"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, Dimname, bool>( + op, min, min_indices, self, dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor min_values(const Tensor & self, DimnameList dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::min_values(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::min_values", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, keepdim); +#endif +} +#endif +static inline Tensor mkldnn_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mkldnn_convolution(self, weight, bias, padding, stride, dilation, groups); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mkldnn_convolution", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, bias, padding, stride, dilation, groups); +#endif +} +static inline Tensor mkldnn_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool bias_defined) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mkldnn_convolution_backward_input(self_size, grad_output, weight, padding, stride, dilation, groups, bias_defined); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mkldnn_convolution_backward_input", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self_size, grad_output, weight, padding, stride, dilation, groups, bias_defined); +#endif +} +static inline std::tuple mkldnn_convolution_backward_weights(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool bias_defined) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mkldnn_convolution_backward_weights(weight_size, grad_output, self, padding, stride, dilation, groups, bias_defined); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mkldnn_convolution_backward_weights", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, IntArrayRef, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, int64_t, bool>( + op, weight_size, grad_output, self, padding, stride, dilation, groups, bias_defined); +#endif +} +static inline std::tuple mkldnn_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mkldnn_convolution_backward(self, grad_output, weight, padding, stride, dilation, groups, output_mask); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mkldnn_convolution_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, int64_t, std::array>( + op, self, grad_output, weight, padding, stride, dilation, groups, output_mask); +#endif +} +static inline std::tuple miopen_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double exponential_average_factor, double epsilon) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, weight, bias, running_mean, running_var)))) { + + default: + AT_ERROR("miopen_batch_norm not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, weight, bias, running_mean, running_var))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_batch_norm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, bool, double, double>( + op, input, weight, bias, running_mean, running_var, training, exponential_average_factor, epsilon); +#endif +} +static inline std::tuple miopen_batch_norm_backward(const Tensor & input, const Tensor & grad_output, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_var, double epsilon) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, grad_output, weight, running_mean, running_var, save_mean, save_var)))) { + + default: + AT_ERROR("miopen_batch_norm_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, grad_output, weight, running_mean, running_var, save_mean, save_var))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_batch_norm_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, double>( + op, input, grad_output, weight, running_mean, running_var, save_mean, save_var, epsilon); +#endif +} +static inline Tensor miopen_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias)))) { + + default: + AT_ERROR("miopen_convolution not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_convolution", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, bias, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline Tensor miopen_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, weight)))) { + + default: + AT_ERROR("miopen_convolution_backward_input not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_convolution_backward_input", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self_size, grad_output, weight, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline std::tuple miopen_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, grad_output, weight)))) { + + default: + AT_ERROR("miopen_convolution_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, grad_output, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_convolution_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, int64_t, bool, bool, std::array>( + op, self, grad_output, weight, padding, stride, dilation, groups, benchmark, deterministic, output_mask); +#endif +} +static inline Tensor miopen_convolution_backward_bias(const Tensor & grad_output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output)))) { + + default: + AT_ERROR("miopen_convolution_backward_bias not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_convolution_backward_bias", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output); +#endif +} +static inline Tensor miopen_convolution_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + + default: + AT_ERROR("miopen_convolution_backward_weight not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_convolution_backward_weight", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, weight_size, grad_output, self, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline Tensor miopen_convolution_transpose(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias)))) { + + default: + AT_ERROR("miopen_convolution_transpose not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_convolution_transpose", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, bias, padding, output_padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline std::tuple miopen_convolution_transpose_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, grad_output, weight)))) { + + default: + AT_ERROR("miopen_convolution_transpose_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, grad_output, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_convolution_transpose_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, int64_t, bool, bool, std::array>( + op, self, grad_output, weight, padding, output_padding, stride, dilation, groups, benchmark, deterministic, output_mask); +#endif +} +static inline Tensor miopen_convolution_transpose_backward_input(const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, weight)))) { + + default: + AT_ERROR("miopen_convolution_transpose_backward_input not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_convolution_transpose_backward_input", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, weight, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline Tensor miopen_convolution_transpose_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + + default: + AT_ERROR("miopen_convolution_transpose_backward_weight not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_convolution_transpose_backward_weight", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, weight_size, grad_output, self, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline Tensor miopen_depthwise_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias)))) { + + default: + AT_ERROR("miopen_depthwise_convolution not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_depthwise_convolution", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, bias, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline Tensor miopen_depthwise_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, weight)))) { + + default: + AT_ERROR("miopen_depthwise_convolution_backward_input not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_depthwise_convolution_backward_input", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self_size, grad_output, weight, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline std::tuple miopen_depthwise_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, grad_output, weight)))) { + + default: + AT_ERROR("miopen_depthwise_convolution_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, grad_output, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_depthwise_convolution_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, int64_t, bool, bool, std::array>( + op, self, grad_output, weight, padding, stride, dilation, groups, benchmark, deterministic, output_mask); +#endif +} +static inline Tensor miopen_depthwise_convolution_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + + default: + AT_ERROR("miopen_depthwise_convolution_backward_weight not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_depthwise_convolution_backward_weight", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, weight_size, grad_output, self, padding, stride, dilation, groups, benchmark, deterministic); +#endif +} +static inline std::tuple miopen_rnn(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & hx, const Tensor & cx, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, weight, hx, cx, dropout_state)))) { + + default: + AT_ERROR("miopen_rnn not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, weight, hx, cx, dropout_state))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_rnn", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, TensorList, int64_t, const Tensor &, const Tensor &, int64_t, int64_t, int64_t, bool, double, bool, bool, IntArrayRef, const Tensor &>( + op, input, weight, weight_stride0, hx, cx, mode, hidden_size, num_layers, batch_first, dropout, train, bidirectional, batch_sizes, dropout_state); +#endif +} +static inline std::tuple> miopen_rnn_backward(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & weight_buf, const Tensor & hx, const Tensor & cx, const Tensor & output, const Tensor & grad_output, const Tensor & grad_hy, const Tensor & grad_cy, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state, const Tensor & reserve, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, weight, weight_buf, hx, cx, output, grad_output, grad_hy, grad_cy, dropout_state, reserve)))) { + + default: + AT_ERROR("miopen_rnn_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, weight, weight_buf, hx, cx, output, grad_output, grad_hy, grad_cy, dropout_state, reserve))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::miopen_rnn_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>, const Tensor &, TensorList, int64_t, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, int64_t, int64_t, int64_t, bool, double, bool, bool, IntArrayRef, const Tensor &, const Tensor &, std::array>( + op, input, weight, weight_stride0, weight_buf, hx, cx, output, grad_output, grad_hy, grad_cy, mode, hidden_size, num_layers, batch_first, dropout, train, bidirectional, batch_sizes, dropout_state, reserve, output_mask); +#endif +} +static inline Tensor mm(const Tensor & self, const Tensor & mat2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, mat2)))) { + case Backend::CPU: + return CPUType::mm(self, mat2); + break; + case Backend::SparseCPU: + return SparseCPUType::mm(self, mat2); + break; + default: + AT_ERROR("mm not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, mat2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, mat2); +#endif +} +static inline Tensor & mm_out(Tensor & out, const Tensor & self, const Tensor & mat2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, mat2)))) { + case Backend::CPU: + return CPUType::mm_out(out, self, mat2); + break; + case Backend::SparseCPU: + return SparseCPUType::mm_out(out, self, mat2); + break; + default: + AT_ERROR("mm_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, mat2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mm", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, mat2); +#endif +} +static inline Tensor _sparse_mm(const Tensor & sparse, const Tensor & dense) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_sparse_mm(sparse, dense); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sparse_mm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, sparse, dense); +#endif +} +static inline std::tuple mode(const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mode(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mode", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, self, dim, keepdim); +#endif +} +static inline std::tuple mode_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mode_out(values, indices, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mode", "values"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, int64_t, bool>( + op, values, indices, self, dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple mode(const Tensor & self, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mode(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mode", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, Dimname, bool>( + op, self, dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple mode_out(Tensor & values, Tensor & indices, const Tensor & self, Dimname dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mode_out(values, indices, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mode", "dimname_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, Dimname, bool>( + op, values, indices, self, dim, keepdim); +#endif +} +#endif +static inline Tensor mul(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::mul(self, other); + break; + case Backend::SparseCPU: + return SparseCPUType::mul(self, other); + break; + default: + AT_ERROR("mul not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mul", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & mul_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::mul_out(out, self, other); + break; + case Backend::SparseCPU: + return SparseCPUType::mul_out(out, self, other); + break; + default: + AT_ERROR("mul_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mul", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor mul(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mul(self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mul", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor mv(const Tensor & self, const Tensor & vec) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, vec)))) { + case Backend::CPU: + return CPUType::mv(self, vec); + break; + default: + AT_ERROR("mv not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, vec))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mv", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, vec); +#endif +} +static inline Tensor & mv_out(Tensor & out, const Tensor & self, const Tensor & vec) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, vec)))) { + case Backend::CPU: + return CPUType::mv_out(out, self, vec); + break; + default: + AT_ERROR("mv_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, vec))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mv", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, vec); +#endif +} +static inline Tensor mvlgamma(const Tensor & self, int64_t p) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mvlgamma(self, p); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mvlgamma", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, p); +#endif +} +static inline Tensor narrow(const Tensor & self, int64_t dim, int64_t start, int64_t length) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::narrow(self, dim, start, length); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::narrow", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, start, length); +#endif +} +static inline std::tuple native_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, weight, bias, running_mean, running_var)))) { + case Backend::CPU: + return CPUType::native_batch_norm(input, weight, bias, running_mean, running_var, training, momentum, eps); + break; + default: + AT_ERROR("native_batch_norm not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, weight, bias, running_mean, running_var))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::native_batch_norm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, bool, double, double>( + op, input, weight, bias, running_mean, running_var, training, momentum, eps); +#endif +} +static inline std::tuple batch_norm_stats(const Tensor & input, double eps) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input)))) { + + default: + AT_ERROR("batch_norm_stats not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::batch_norm_stats", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, double>( + op, input, eps); +#endif +} +static inline Tensor batch_norm_elemt(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & mean, const Tensor & invstd, double eps) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, weight, bias, mean, invstd)))) { + + default: + AT_ERROR("batch_norm_elemt not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, weight, bias, mean, invstd))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::batch_norm_elemt", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, mean, invstd, eps); +#endif +} +static inline Tensor & batch_norm_elemt_out(Tensor & out, const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & mean, const Tensor & invstd, double eps) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, input, weight, bias, mean, invstd)))) { + + default: + AT_ERROR("batch_norm_elemt_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, input, weight, bias, mean, invstd))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::batch_norm_elemt", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, input, weight, bias, mean, invstd, eps); +#endif +} +static inline std::tuple batch_norm_gather_stats(const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & running_mean, const Tensor & running_var, double momentum, double eps, int64_t count) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, mean, invstd, running_mean, running_var)))) { + + default: + AT_ERROR("batch_norm_gather_stats not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, mean, invstd, running_mean, running_var))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::batch_norm_gather_stats", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, double, double, int64_t>( + op, input, mean, invstd, running_mean, running_var, momentum, eps, count); +#endif +} +static inline std::tuple batch_norm_gather_stats_with_counts(const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & running_mean, const Tensor & running_var, double momentum, double eps, IntArrayRef counts) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, mean, invstd, running_mean, running_var)))) { + + default: + AT_ERROR("batch_norm_gather_stats_with_counts not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, mean, invstd, running_mean, running_var))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::batch_norm_gather_stats_with_counts", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, double, double, IntArrayRef>( + op, input, mean, invstd, running_mean, running_var, momentum, eps, counts); +#endif +} +static inline std::tuple native_batch_norm_backward(const Tensor & grad_out, const Tensor & input, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_invstd, bool train, double eps, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_out, input, weight, running_mean, running_var, save_mean, save_invstd)))) { + case Backend::CPU: + return CPUType::native_batch_norm_backward(grad_out, input, weight, running_mean, running_var, save_mean, save_invstd, train, eps, output_mask); + break; + default: + AT_ERROR("native_batch_norm_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_out, input, weight, running_mean, running_var, save_mean, save_invstd))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::native_batch_norm_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, bool, double, std::array>( + op, grad_out, input, weight, running_mean, running_var, save_mean, save_invstd, train, eps, output_mask); +#endif +} +static inline std::tuple batch_norm_backward_reduce(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & weight, bool input_g, bool weight_g, bool bias_g) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_out, input, mean, invstd, weight)))) { + + default: + AT_ERROR("batch_norm_backward_reduce not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_out, input, mean, invstd, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::batch_norm_backward_reduce", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, bool, bool, bool>( + op, grad_out, input, mean, invstd, weight, input_g, weight_g, bias_g); +#endif +} +static inline Tensor batch_norm_backward_elemt(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & weight, const Tensor & mean_dy, const Tensor & mean_dy_xmu) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_out, input, mean, invstd, weight, mean_dy, mean_dy_xmu)))) { + + default: + AT_ERROR("batch_norm_backward_elemt not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_out, input, mean, invstd, weight, mean_dy, mean_dy_xmu))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::batch_norm_backward_elemt", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_out, input, mean, invstd, weight, mean_dy, mean_dy_xmu); +#endif +} +static inline std::tuple batch_norm_update_stats(const Tensor & input, const Tensor & running_mean, const Tensor & running_var, double momentum) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input, running_mean, running_var)))) { + case Backend::CPU: + return CPUType::batch_norm_update_stats(input, running_mean, running_var, momentum); + break; + default: + AT_ERROR("batch_norm_update_stats not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input, running_mean, running_var))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::batch_norm_update_stats", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, double>( + op, input, running_mean, running_var, momentum); +#endif +} +static inline bool _nnpack_available() { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_nnpack_available(); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_nnpack_available", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op); +#endif +} +static inline Tensor _nnpack_spatial_convolution(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_nnpack_spatial_convolution(input, weight, bias, padding, stride); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_nnpack_spatial_convolution", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weight, bias, padding, stride); +#endif +} +static inline std::tuple _nnpack_spatial_convolution_backward(const Tensor & input, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_nnpack_spatial_convolution_backward(input, grad_output, weight, padding, output_mask); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_nnpack_spatial_convolution_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, std::array>( + op, input, grad_output, weight, padding, output_mask); +#endif +} +static inline Tensor _nnpack_spatial_convolution_backward_input(const Tensor & input, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_nnpack_spatial_convolution_backward_input(input, grad_output, weight, padding); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_nnpack_spatial_convolution_backward_input", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, grad_output, weight, padding); +#endif +} +static inline Tensor _nnpack_spatial_convolution_backward_weight(const Tensor & input, IntArrayRef weightsize, const Tensor & grad_output, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_nnpack_spatial_convolution_backward_weight(input, weightsize, grad_output, padding); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_nnpack_spatial_convolution_backward_weight", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, weightsize, grad_output, padding); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor ones(IntArrayRef size, c10::optional names, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ones(size, names, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ones", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const TensorOptions &>( + op, size, names, options); +#endif +} +#endif +static inline Tensor ones(IntArrayRef size, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ones(size, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ones", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, size, options); +#endif +} +static inline Tensor & ones_out(Tensor & out, IntArrayRef size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ones_out(out, size); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ones", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, size); +#endif +} +static inline Tensor ones_like(const Tensor & self, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ones_like(self, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ones_like", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, memory_format); +#endif +} +static inline Tensor ones_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ones_like(self, options, memory_format); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(self, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ones_like", "dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, options, memory_format); +#endif +} +static inline Tensor pairwise_distance(const Tensor & x1, const Tensor & x2, double p, double eps, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::pairwise_distance(x1, x2, p, eps, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::pairwise_distance", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, x1, x2, p, eps, keepdim); +#endif +} +static inline Tensor cdist(const Tensor & x1, const Tensor & x2, double p, c10::optional compute_mode) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cdist(x1, x2, p, compute_mode); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cdist", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed>( + op, x1, x2, p, compute_mode); +#endif +} +static inline Tensor _cdist_backward(const Tensor & grad, const Tensor & x1, const Tensor & x2, double p, const Tensor & cdist) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_cdist_backward(grad, x1, x2, p, cdist); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cdist_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad, x1, x2, p, cdist); +#endif +} +static inline Tensor pdist(const Tensor & self, double p) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::pdist(self, p); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::pdist", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, p); +#endif +} +static inline Tensor _pdist_forward(const Tensor & self, double p) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_pdist_forward(self, p); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_pdist_forward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, p); +#endif +} +static inline Tensor _pdist_backward(const Tensor & grad, const Tensor & self, double p, const Tensor & pdist) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_pdist_backward(grad, self, p, pdist); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_pdist_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad, self, p, pdist); +#endif +} +static inline Tensor cosine_similarity(const Tensor & x1, const Tensor & x2, int64_t dim, double eps) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cosine_similarity(x1, x2, dim, eps); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cosine_similarity", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, x1, x2, dim, eps); +#endif +} +static inline Tensor pixel_shuffle(const Tensor & self, int64_t upscale_factor) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::pixel_shuffle(self, upscale_factor); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::pixel_shuffle", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, upscale_factor); +#endif +} +static inline Tensor pinverse(const Tensor & self, double rcond) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::pinverse(self, rcond); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::pinverse", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, rcond); +#endif +} +static inline Tensor poisson_nll_loss(const Tensor & input, const Tensor & target, bool log_input, bool full, double eps, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::poisson_nll_loss(input, target, log_input, full, eps, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::poisson_nll_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, target, log_input, full, eps, reduction); +#endif +} +static inline Tensor scalar_tensor(Scalar s, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scalar_tensor(s, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::scalar_tensor", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, s, options); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor rand(IntArrayRef size, c10::optional names, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rand(size, names, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rand", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const TensorOptions &>( + op, size, names, options); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor rand(IntArrayRef size, Generator * generator, c10::optional names, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rand(size, generator, names, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rand", "generator_with_names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const TensorOptions &>( + op, size, generator, names, options); +#endif +} +#endif +static inline Tensor rand(IntArrayRef size, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rand(size, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rand", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, size, options); +#endif +} +static inline Tensor rand(IntArrayRef size, Generator * generator, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rand(size, generator, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rand", "generator"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, size, generator, options); +#endif +} +static inline Tensor & rand_out(Tensor & out, IntArrayRef size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rand_out(out, size); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rand", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, size); +#endif +} +static inline Tensor & rand_out(Tensor & out, IntArrayRef size, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rand_out(out, size, generator); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rand", "generator_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, size, generator); +#endif +} +static inline Tensor rand_like(const Tensor & self, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rand_like(self, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rand_like", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, memory_format); +#endif +} +static inline Tensor rand_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rand_like(self, options, memory_format); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(self, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rand_like", "dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, options, memory_format); +#endif +} +static inline Tensor randint(int64_t high, IntArrayRef size, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randint(high, size, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randint", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, high, size, options); +#endif +} +static inline Tensor randint(int64_t high, IntArrayRef size, Generator * generator, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randint(high, size, generator, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randint", "generator"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, high, size, generator, options); +#endif +} +static inline Tensor randint(int64_t low, int64_t high, IntArrayRef size, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randint(low, high, size, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randint", "low"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, low, high, size, options); +#endif +} +static inline Tensor randint(int64_t low, int64_t high, IntArrayRef size, Generator * generator, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randint(low, high, size, generator, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randint", "low_generator"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, low, high, size, generator, options); +#endif +} +static inline Tensor & randint_out(Tensor & out, int64_t high, IntArrayRef size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randint_out(out, high, size); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randint", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, high, size); +#endif +} +static inline Tensor & randint_out(Tensor & out, int64_t high, IntArrayRef size, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randint_out(out, high, size, generator); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randint", "generator_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, high, size, generator); +#endif +} +static inline Tensor & randint_out(Tensor & out, int64_t low, int64_t high, IntArrayRef size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randint_out(out, low, high, size); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randint", "low_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, low, high, size); +#endif +} +static inline Tensor & randint_out(Tensor & out, int64_t low, int64_t high, IntArrayRef size, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randint_out(out, low, high, size, generator); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randint", "low_generator_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, low, high, size, generator); +#endif +} +static inline Tensor randint_like(const Tensor & self, int64_t high, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randint_like(self, high, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randint_like", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, high, memory_format); +#endif +} +static inline Tensor randint_like(const Tensor & self, int64_t low, int64_t high, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randint_like(self, low, high, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randint_like", "low"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, low, high, memory_format); +#endif +} +static inline Tensor randint_like(const Tensor & self, int64_t high, const TensorOptions & options, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randint_like(self, high, options, memory_format); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(self, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randint_like", "dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, high, options, memory_format); +#endif +} +static inline Tensor randint_like(const Tensor & self, int64_t low, int64_t high, const TensorOptions & options, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randint_like(self, low, high, options, memory_format); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(self, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randint_like", "low_dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, low, high, options, memory_format); +#endif +} +static inline Tensor randn(IntArrayRef size, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randn(size, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randn", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, size, options); +#endif +} +static inline Tensor randn(IntArrayRef size, Generator * generator, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randn(size, generator, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randn", "generator"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, size, generator, options); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor randn(IntArrayRef size, c10::optional names, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randn(size, names, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randn", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const TensorOptions &>( + op, size, names, options); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor randn(IntArrayRef size, Generator * generator, c10::optional names, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randn(size, generator, names, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randn", "generator_with_names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const TensorOptions &>( + op, size, generator, names, options); +#endif +} +#endif +static inline Tensor & randn_out(Tensor & out, IntArrayRef size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randn_out(out, size); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randn", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, size); +#endif +} +static inline Tensor & randn_out(Tensor & out, IntArrayRef size, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randn_out(out, size, generator); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randn", "generator_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, size, generator); +#endif +} +static inline Tensor randn_like(const Tensor & self, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randn_like(self, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randn_like", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, memory_format); +#endif +} +static inline Tensor randn_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randn_like(self, options, memory_format); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(self, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randn_like", "dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, options, memory_format); +#endif +} +static inline Tensor randperm(int64_t n, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randperm(n, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randperm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, n, options); +#endif +} +static inline Tensor randperm(int64_t n, Generator * generator, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randperm(n, generator, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randperm", "generator"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, n, generator, options); +#endif +} +static inline Tensor & randperm_out(Tensor & out, int64_t n) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::randperm_out(out, n); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randperm", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, n); +#endif +} +static inline Tensor & randperm_out(Tensor & out, int64_t n, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out)))) { + case Backend::CPU: + return CPUType::randperm_out(out, n, generator); + break; + default: + AT_ERROR("randperm_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::randperm", "generator_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, n, generator); +#endif +} +static inline Tensor range(Scalar start, Scalar end, Scalar step, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::range(start, end, step, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::range", "step"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, start, end, step, options); +#endif +} +static inline Tensor range(Scalar start, Scalar end, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::range(start, end, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::range", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, start, end, options); +#endif +} +static inline Tensor & range_out(Tensor & out, Scalar start, Scalar end, Scalar step) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out)))) { + case Backend::CPU: + return CPUType::range_out(out, start, end, step); + break; + default: + AT_ERROR("range_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::range", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, start, end, step); +#endif +} +static inline Tensor reciprocal(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::reciprocal(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::reciprocal", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & reciprocal_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::reciprocal_(self); + break; + default: + AT_ERROR("reciprocal_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::reciprocal_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & reciprocal_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::reciprocal_out(out, self); + break; + default: + AT_ERROR("reciprocal_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::reciprocal", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor neg(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::neg(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::neg", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & neg_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::neg_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::neg_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & neg_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::neg_out(out, self); + break; + default: + AT_ERROR("neg_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::neg", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor repeat_interleave(const Tensor & repeats) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(repeats)))) { + case Backend::CPU: + return CPUType::repeat_interleave(repeats); + break; + default: + AT_ERROR("repeat_interleave not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(repeats))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::repeat_interleave", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, repeats); +#endif +} +static inline Tensor repeat_interleave(const Tensor & self, const Tensor & repeats, c10::optional dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::repeat_interleave(self, repeats, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::repeat_interleave", "self_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed>( + op, self, repeats, dim); +#endif +} +static inline Tensor repeat_interleave(const Tensor & self, int64_t repeats, c10::optional dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::repeat_interleave(self, repeats, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::repeat_interleave", "self_int"}).value(); + return c10::Dispatcher::singleton().callUnboxed>( + op, self, repeats, dim); +#endif +} +static inline Tensor reshape(const Tensor & self, IntArrayRef shape) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::reshape(self, shape); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::reshape", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, shape); +#endif +} +static inline Tensor _mkldnn_reshape(const Tensor & self, IntArrayRef shape) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + + default: + AT_ERROR("_mkldnn_reshape not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_mkldnn_reshape", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, shape); +#endif +} +static inline Tensor round(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::round(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::round", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & round_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::round_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::round_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & round_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::round_out(out, self); + break; + default: + AT_ERROR("round_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::round", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor rrelu(const Tensor & self, Scalar lower, Scalar upper, bool training, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rrelu(self, lower, upper, training, generator); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rrelu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, lower, upper, training, generator); +#endif +} +static inline Tensor & rrelu_(Tensor & self, Scalar lower, Scalar upper, bool training, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rrelu_(self, lower, upper, training, generator); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rrelu_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, lower, upper, training, generator); +#endif +} +static inline Tensor relu(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::relu(self); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::relu(self); + break; + default: + AT_ERROR("relu not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::relu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & relu_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::relu_(self); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::relu_(self); + break; + default: + AT_ERROR("relu_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::relu_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor prelu(const Tensor & self, const Tensor & weight) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight)))) { + case Backend::CPU: + return CPUType::prelu(self, weight); + break; + default: + AT_ERROR("prelu not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::prelu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, weight); +#endif +} +static inline std::tuple prelu_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight)))) { + case Backend::CPU: + return CPUType::prelu_backward(grad_output, self, weight); + break; + default: + AT_ERROR("prelu_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::prelu_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &>( + op, grad_output, self, weight); +#endif +} +static inline Tensor gelu(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::gelu(self); + break; + default: + AT_ERROR("gelu not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gelu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor gelu_backward(const Tensor & grad, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad, self)))) { + case Backend::CPU: + return CPUType::gelu_backward(grad, self); + break; + default: + AT_ERROR("gelu_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gelu_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad, self); +#endif +} +static inline Tensor hardshrink(const Tensor & self, Scalar lambd) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::hardshrink(self, lambd); + break; + default: + AT_ERROR("hardshrink not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hardshrink", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, lambd); +#endif +} +static inline Tensor hardshrink_backward(const Tensor & grad_out, const Tensor & self, Scalar lambd) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_out, self)))) { + case Backend::CPU: + return CPUType::hardshrink_backward(grad_out, self, lambd); + break; + default: + AT_ERROR("hardshrink_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hardshrink_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_out, self, lambd); +#endif +} +static inline Tensor rsqrt(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rsqrt(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rsqrt", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & rsqrt_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rsqrt_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rsqrt_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & rsqrt_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::rsqrt_out(out, self); + break; + default: + AT_ERROR("rsqrt_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rsqrt", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor select(const Tensor & self, Dimname dim, int64_t index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::select(self, dim, index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::select", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, index); +#endif +} +#endif +static inline Tensor select(const Tensor & self, int64_t dim, int64_t index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::select(self, dim, index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::select", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, index); +#endif +} +static inline Tensor selu(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::selu(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::selu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & selu_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::selu_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::selu_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor celu(const Tensor & self, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::celu(self, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::celu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, alpha); +#endif +} +static inline Tensor & celu_(Tensor & self, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::celu_(self, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::celu_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, alpha); +#endif +} +static inline Tensor sigmoid(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::sigmoid(self); + break; + default: + AT_ERROR("sigmoid not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sigmoid", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & sigmoid_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::sigmoid_(self); + break; + default: + AT_ERROR("sigmoid_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sigmoid_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & sigmoid_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sigmoid_out(out, self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sigmoid", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor sin(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sin(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sin", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & sin_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sin_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sin_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & sin_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::sin_out(out, self); + break; + default: + AT_ERROR("sin_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sin", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor sinh(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sinh(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sinh", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & sinh_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sinh_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sinh_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & sinh_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sinh_out(out, self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sinh", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor detach(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::detach(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::detach", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & detach_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::detach_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::detach_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline int64_t size(const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::size(self, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::size", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline int64_t size(const Tensor & self, Dimname dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::size(self, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::size", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim); +#endif +} +#endif +static inline Tensor slice(const Tensor & self, int64_t dim, int64_t start, int64_t end, int64_t step) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::slice(self, dim, start, end, step); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slice", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, start, end, step); +#endif +} +static inline std::tuple slogdet(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::slogdet(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slogdet", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &>( + op, self); +#endif +} +static inline Tensor smm(const Tensor & self, const Tensor & mat2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::smm(self, mat2); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::smm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, mat2); +#endif +} +static inline Tensor softmax(const Tensor & self, int64_t dim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::softmax(self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::softmax", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor softmax(const Tensor & self, Dimname dim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::softmax(self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::softmax", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, dtype); +#endif +} +#endif +static inline Tensor _softmax(const Tensor & self, int64_t dim, bool half_to_float) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_softmax(self, dim, half_to_float); + break; + default: + AT_ERROR("_softmax not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_softmax", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, half_to_float); +#endif +} +static inline Tensor _softmax_backward_data(const Tensor & grad_output, const Tensor & output, int64_t dim, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, output, self)))) { + case Backend::CPU: + return CPUType::_softmax_backward_data(grad_output, output, dim, self); + break; + default: + AT_ERROR("_softmax_backward_data not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_softmax_backward_data", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, output, dim, self); +#endif +} +static inline std::vector split(const Tensor & self, int64_t split_size, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::split(self, split_size, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::split", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, int64_t>( + op, self, split_size, dim); +#endif +} +static inline std::vector split_with_sizes(const Tensor & self, IntArrayRef split_sizes, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::split_with_sizes(self, split_sizes, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::split_with_sizes", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, IntArrayRef, int64_t>( + op, self, split_sizes, dim); +#endif +} +static inline Tensor squeeze(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::squeeze(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::squeeze", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor squeeze(const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::squeeze(self, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::squeeze", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor squeeze(const Tensor & self, Dimname dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::squeeze(self, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::squeeze", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim); +#endif +} +#endif +static inline Tensor sspaddmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sspaddmm(self, mat1, mat2, beta, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sspaddmm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, mat1, mat2, beta, alpha); +#endif +} +static inline Tensor & sspaddmm_out(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, mat1, mat2)))) { + case Backend::CPU: + return CPUType::sspaddmm_out(out, self, mat1, mat2, beta, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::sspaddmm_out(out, self, mat1, mat2, beta, alpha); + break; + default: + AT_ERROR("sspaddmm_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, mat1, mat2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sspaddmm", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, mat1, mat2, beta, alpha); +#endif +} +static inline Tensor stack(TensorList tensors, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::stack(tensors, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::stack", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, tensors, dim); +#endif +} +static inline Tensor & stack_out(Tensor & out, TensorList tensors, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::stack_out(out, tensors, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::stack", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, tensors, dim); +#endif +} +static inline Tensor stft(const Tensor & self, int64_t n_fft, c10::optional hop_length, c10::optional win_length, const Tensor & window, bool normalized, bool onesided) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::stft(self, n_fft, hop_length, win_length, window, normalized, onesided); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::stft", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, c10::optional, const Tensor &, bool, bool>( + op, self, n_fft, hop_length, win_length, window, normalized, onesided); +#endif +} +static inline int64_t stride(const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::stride(self, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::stride", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline int64_t stride(const Tensor & self, Dimname dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::stride(self, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::stride", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim); +#endif +} +#endif +static inline Tensor sum(const Tensor & self, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sum(self, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sum", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dtype); +#endif +} +static inline Tensor sum(const Tensor & self, IntArrayRef dim, bool keepdim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sum(self, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sum", "dim_IntList"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, keepdim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor sum(const Tensor & self, DimnameList dim, bool keepdim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sum(self, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sum", "dim_DimnameList"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, keepdim, dtype); +#endif +} +#endif +static inline Tensor & sum_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sum_out(out, self, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sum", "IntList_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, dim, keepdim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & sum_out(Tensor & out, const Tensor & self, DimnameList dim, bool keepdim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sum_out(out, self, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sum", "DimnameList_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, dim, keepdim, dtype); +#endif +} +#endif +static inline Tensor sqrt(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sqrt(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sqrt", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & sqrt_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sqrt_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sqrt_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & sqrt_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sqrt_out(out, self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sqrt", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor std(const Tensor & self, bool unbiased) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::std(self, unbiased); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::std", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, unbiased); +#endif +} +static inline Tensor std(const Tensor & self, IntArrayRef dim, bool unbiased, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::std(self, dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::std", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, unbiased, keepdim); +#endif +} +static inline std::tuple std_mean(const Tensor & self, bool unbiased) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::std_mean(self, unbiased); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::std_mean", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool>( + op, self, unbiased); +#endif +} +static inline std::tuple std_mean(const Tensor & self, IntArrayRef dim, bool unbiased, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::std_mean(self, dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::std_mean", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, IntArrayRef, bool, bool>( + op, self, dim, unbiased, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple std_mean(const Tensor & self, DimnameList dim, bool unbiased, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::std_mean(self, dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::std_mean", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, DimnameList, bool, bool>( + op, self, dim, unbiased, keepdim); +#endif +} +#endif +static inline Tensor & std_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool unbiased, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::std_out(out, self, dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::std", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, unbiased, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor std(const Tensor & self, DimnameList dim, bool unbiased, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::std(self, dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::std", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, unbiased, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & std_out(Tensor & out, const Tensor & self, DimnameList dim, bool unbiased, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::std_out(out, self, dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::std", "names_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, unbiased, keepdim); +#endif +} +#endif +static inline Tensor prod(const Tensor & self, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::prod(self, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::prod", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dtype); +#endif +} +static inline Tensor prod(const Tensor & self, int64_t dim, bool keepdim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::prod(self, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::prod", "dim_int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, keepdim, dtype); +#endif +} +static inline Tensor & prod_out(Tensor & out, const Tensor & self, int64_t dim, bool keepdim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::prod_out(out, self, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::prod", "int_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, dim, keepdim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor prod(const Tensor & self, Dimname dim, bool keepdim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::prod(self, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::prod", "dim_Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, dim, keepdim, dtype); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & prod_out(Tensor & out, const Tensor & self, Dimname dim, bool keepdim, c10::optional dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::prod_out(out, self, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::prod", "Dimname_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, dim, keepdim, dtype); +#endif +} +#endif +static inline Tensor t(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::t(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::t", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor tan(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::tan(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::tan", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & tan_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::tan_(self); + break; + default: + AT_ERROR("tan_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::tan_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & tan_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::tan_out(out, self); + break; + default: + AT_ERROR("tan_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::tan", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor tanh(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::tanh(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::tanh", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & tanh_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::tanh_(self); + break; + default: + AT_ERROR("tanh_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::tanh_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & tanh_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::tanh_out(out, self); + break; + default: + AT_ERROR("tanh_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::tanh", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor tensordot(const Tensor & self, const Tensor & other, IntArrayRef dims_self, IntArrayRef dims_other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::tensordot(self, other, dims_self, dims_other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::tensordot", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, other, dims_self, dims_other); +#endif +} +static inline Tensor threshold(const Tensor & self, Scalar threshold, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::threshold(self, threshold, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::threshold", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, threshold, value); +#endif +} +static inline Tensor & threshold_(Tensor & self, Scalar threshold, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::threshold_(self, threshold, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::threshold_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, threshold, value); +#endif +} +static inline Tensor & threshold_out(Tensor & out, const Tensor & self, Scalar threshold, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::threshold_out(out, self, threshold, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::threshold", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, threshold, value); +#endif +} +static inline Tensor threshold_backward(const Tensor & grad_output, const Tensor & self, Scalar threshold) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::threshold_backward(grad_output, self, threshold); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::threshold_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, threshold); +#endif +} +static inline Tensor transpose(const Tensor & self, int64_t dim0, int64_t dim1) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::transpose(self, dim0, dim1); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::transpose", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim0, dim1); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor transpose(const Tensor & self, Dimname dim0, Dimname dim1) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::transpose(self, dim0, dim1); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::transpose", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim0, dim1); +#endif +} +#endif +static inline Tensor _mkldnn_transpose(const Tensor & self, int64_t dim0, int64_t dim1) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + + default: + AT_ERROR("_mkldnn_transpose not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_mkldnn_transpose", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim0, dim1); +#endif +} +static inline Tensor & _mkldnn_transpose_(Tensor & self, int64_t dim0, int64_t dim1) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + + default: + AT_ERROR("_mkldnn_transpose_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_mkldnn_transpose_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim0, dim1); +#endif +} +static inline Tensor one_hot(const Tensor & self, int64_t num_classes) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::one_hot(self, num_classes); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::one_hot", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, num_classes); +#endif +} +static inline Tensor flip(const Tensor & self, IntArrayRef dims) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::flip(self, dims); + break; + default: + AT_ERROR("flip not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::flip", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dims); +#endif +} +static inline Tensor roll(const Tensor & self, IntArrayRef shifts, IntArrayRef dims) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::roll(self, shifts, dims); + break; + default: + AT_ERROR("roll not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::roll", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, shifts, dims); +#endif +} +static inline Tensor rot90(const Tensor & self, int64_t k, IntArrayRef dims) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rot90(self, k, dims); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rot90", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, k, dims); +#endif +} +static inline Tensor trapz(const Tensor & y, const Tensor & x, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::trapz(y, x, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::trapz", "x"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, y, x, dim); +#endif +} +static inline Tensor trapz(const Tensor & y, double dx, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::trapz(y, dx, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::trapz", "dx"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, y, dx, dim); +#endif +} +static inline Tensor _trilinear(const Tensor & i1, const Tensor & i2, const Tensor & i3, IntArrayRef expand1, IntArrayRef expand2, IntArrayRef expand3, IntArrayRef sumdim, int64_t unroll_dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_trilinear(i1, i2, i3, expand1, expand2, expand3, sumdim, unroll_dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_trilinear", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, i1, i2, i3, expand1, expand2, expand3, sumdim, unroll_dim); +#endif +} +static inline Tensor triplet_margin_loss(const Tensor & anchor, const Tensor & positive, const Tensor & negative, double margin, double p, double eps, bool swap, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::triplet_margin_loss(anchor, positive, negative, margin, p, eps, swap, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::triplet_margin_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, anchor, positive, negative, margin, p, eps, swap, reduction); +#endif +} +static inline Tensor trunc(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::trunc(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::trunc", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & trunc_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::trunc_(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::trunc_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & trunc_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::trunc_out(out, self); + break; + default: + AT_ERROR("trunc_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::trunc", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline bool _has_compatible_shallow_copy_type(const Tensor & self, const Tensor & from) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_has_compatible_shallow_copy_type(self, from); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_has_compatible_shallow_copy_type", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, from); +#endif +} +static inline std::tuple _unique(const Tensor & self, bool sorted, bool return_inverse) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_unique(self, sorted, return_inverse); + break; + default: + AT_ERROR("_unique not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_unique", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool, bool>( + op, self, sorted, return_inverse); +#endif +} +static inline std::tuple unique_dim(const Tensor & self, int64_t dim, bool sorted, bool return_inverse, bool return_counts) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::unique_dim(self, dim, sorted, return_inverse, return_counts); + break; + default: + AT_ERROR("unique_dim not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::unique_dim", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool, bool, bool>( + op, self, dim, sorted, return_inverse, return_counts); +#endif +} +static inline std::tuple unique_consecutive(const Tensor & self, bool return_inverse, bool return_counts, c10::optional dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::unique_consecutive(self, return_inverse, return_counts, dim); + break; + default: + AT_ERROR("unique_consecutive not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::unique_consecutive", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool, bool, c10::optional>( + op, self, return_inverse, return_counts, dim); +#endif +} +static inline std::tuple unique_dim_consecutive(const Tensor & self, int64_t dim, bool return_inverse, bool return_counts) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::unique_dim_consecutive(self, dim, return_inverse, return_counts); + break; + default: + AT_ERROR("unique_dim_consecutive not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::unique_dim_consecutive", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool, bool>( + op, self, dim, return_inverse, return_counts); +#endif +} +static inline std::tuple _unique2(const Tensor & self, bool sorted, bool return_inverse, bool return_counts) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_unique2(self, sorted, return_inverse, return_counts); + break; + default: + AT_ERROR("_unique2 not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_unique2", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool, bool, bool>( + op, self, sorted, return_inverse, return_counts); +#endif +} +static inline Tensor _unsafe_view(const Tensor & self, IntArrayRef size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_unsafe_view(self, size); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_unsafe_view", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, size); +#endif +} +static inline Tensor unsqueeze(const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::unsqueeze(self, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::unsqueeze", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim); +#endif +} +static inline Tensor var(const Tensor & self, bool unbiased) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::var(self, unbiased); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::var", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, unbiased); +#endif +} +static inline Tensor var(const Tensor & self, IntArrayRef dim, bool unbiased, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::var(self, dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::var", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, unbiased, keepdim); +#endif +} +static inline Tensor & var_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool unbiased, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::var_out(out, self, dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::var", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, unbiased, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor var(const Tensor & self, DimnameList dim, bool unbiased, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::var(self, dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::var", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, unbiased, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & var_out(Tensor & out, const Tensor & self, DimnameList dim, bool unbiased, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::var_out(out, self, dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::var", "names_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, unbiased, keepdim); +#endif +} +#endif +static inline std::tuple var_mean(const Tensor & self, bool unbiased) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::var_mean(self, unbiased); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::var_mean", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool>( + op, self, unbiased); +#endif +} +static inline std::tuple var_mean(const Tensor & self, IntArrayRef dim, bool unbiased, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::var_mean(self, dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::var_mean", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, IntArrayRef, bool, bool>( + op, self, dim, unbiased, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple var_mean(const Tensor & self, DimnameList dim, bool unbiased, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::var_mean(self, dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::var_mean", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, DimnameList, bool, bool>( + op, self, dim, unbiased, keepdim); +#endif +} +#endif +static inline Tensor where(const Tensor & condition, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::where(condition, self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::where", "self"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, condition, self, other); +#endif +} +static inline std::vector where(const Tensor & condition) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::where(condition); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::where", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &>( + op, condition); +#endif +} +static inline Tensor _s_where(const Tensor & condition, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(condition, self, other)))) { + case Backend::CPU: + return CPUType::_s_where(condition, self, other); + break; + default: + AT_ERROR("_s_where not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(condition, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_s_where", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, condition, self, other); +#endif +} +static inline Tensor norm_except_dim(const Tensor & v, int64_t pow, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm_except_dim(v, pow, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::norm_except_dim", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, v, pow, dim); +#endif +} +static inline Tensor _weight_norm(const Tensor & v, const Tensor & g, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_weight_norm(v, g, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_weight_norm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, v, g, dim); +#endif +} +static inline std::tuple _weight_norm_cuda_interface(const Tensor & v, const Tensor & g, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(v, g)))) { + + default: + AT_ERROR("_weight_norm_cuda_interface not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(v, g))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_weight_norm_cuda_interface", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, int64_t>( + op, v, g, dim); +#endif +} +static inline std::tuple _weight_norm_cuda_interface_backward(const Tensor & grad_w, const Tensor & saved_v, const Tensor & saved_g, const Tensor & saved_norms, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_w, saved_v, saved_g, saved_norms)))) { + + default: + AT_ERROR("_weight_norm_cuda_interface_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_w, saved_v, saved_g, saved_norms))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_weight_norm_cuda_interface_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, int64_t>( + op, grad_w, saved_v, saved_g, saved_norms, dim); +#endif +} +static inline std::tuple _weight_norm_differentiable_backward(const Tensor & grad_w, const Tensor & saved_v, const Tensor & saved_g, const Tensor & saved_norms, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_weight_norm_differentiable_backward(grad_w, saved_v, saved_g, saved_norms, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_weight_norm_differentiable_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, int64_t>( + op, grad_w, saved_v, saved_g, saved_norms, dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor zeros(IntArrayRef size, c10::optional names, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::zeros(size, names, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::zeros", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const TensorOptions &>( + op, size, names, options); +#endif +} +#endif +static inline Tensor zeros(IntArrayRef size, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::zeros(size, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::zeros", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, size, options); +#endif +} +static inline Tensor & zeros_out(Tensor & out, IntArrayRef size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::zeros_out(out, size); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::zeros", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, size); +#endif +} +static inline Tensor zeros_like(const Tensor & self, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::zeros_like(self, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::zeros_like", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, memory_format); +#endif +} +static inline Tensor zeros_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::zeros_like(self, options, memory_format); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(self, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::zeros_like", "dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, options, memory_format); +#endif +} +static inline Tensor _standard_gamma_grad(const Tensor & self, const Tensor & output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, output)))) { + case Backend::CPU: + return CPUType::_standard_gamma_grad(self, output); + break; + default: + AT_ERROR("_standard_gamma_grad not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_standard_gamma_grad", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, output); +#endif +} +static inline Tensor _standard_gamma(const Tensor & self, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_standard_gamma(self, generator); + break; + default: + AT_ERROR("_standard_gamma not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_standard_gamma", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, generator); +#endif +} +static inline Tensor _dirichlet_grad(const Tensor & x, const Tensor & alpha, const Tensor & total) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(x, alpha, total)))) { + case Backend::CPU: + return CPUType::_dirichlet_grad(x, alpha, total); + break; + default: + AT_ERROR("_dirichlet_grad not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(x, alpha, total))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_dirichlet_grad", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, x, alpha, total); +#endif +} +static inline Tensor _sample_dirichlet(const Tensor & self, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_sample_dirichlet(self, generator); + break; + default: + AT_ERROR("_sample_dirichlet not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sample_dirichlet", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, generator); +#endif +} +static inline Tensor poisson(const Tensor & self, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::poisson(self, generator); + break; + default: + AT_ERROR("poisson not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::poisson", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, generator); +#endif +} +static inline Tensor native_norm(const Tensor & self, Scalar p) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::SparseCPU: + return SparseCPUType::native_norm(self, p); + break; + default: + AT_ERROR("native_norm not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::native_norm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, p); +#endif +} +static inline Tensor _sparse_sum(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_sparse_sum(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sparse_sum", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor _sparse_sum(const Tensor & self, ScalarType dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_sparse_sum(self, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sparse_sum", "dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dtype); +#endif +} +static inline Tensor _sparse_sum(const Tensor & self, IntArrayRef dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_sparse_sum(self, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sparse_sum", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim); +#endif +} +static inline Tensor _sparse_sum(const Tensor & self, IntArrayRef dim, ScalarType dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_sparse_sum(self, dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sparse_sum", "dim_dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, dtype); +#endif +} +static inline Tensor _sparse_sum_backward(const Tensor & grad, const Tensor & self, IntArrayRef dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad, self)))) { + case Backend::SparseCPU: + return SparseCPUType::_sparse_sum_backward(grad, self, dim); + break; + default: + AT_ERROR("_sparse_sum_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sparse_sum_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad, self, dim); +#endif +} +static inline Tensor norm(const Tensor & self, c10::optional p, ScalarType dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm(self, p, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::norm", "ScalarOpt_dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, ScalarType>( + op, self, p, dtype); +#endif +} +static inline Tensor norm(const Tensor & self, Scalar p) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm(self, p); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::norm", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, p); +#endif +} +static inline Tensor norm(const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim, ScalarType dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm(self, p, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::norm", "ScalarOpt_dim_dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, IntArrayRef, bool, ScalarType>( + op, self, p, dim, keepdim, dtype); +#endif +} +static inline Tensor norm(const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm(self, p, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::norm", "ScalarOpt_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, IntArrayRef, bool>( + op, self, p, dim, keepdim); +#endif +} +static inline Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim, ScalarType dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm_out(out, self, p, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::norm", "dtype_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, IntArrayRef, bool, ScalarType>( + op, out, self, p, dim, keepdim, dtype); +#endif +} +static inline Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm_out(out, self, p, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::norm", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, IntArrayRef, bool>( + op, out, self, p, dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor norm(const Tensor & self, c10::optional p, DimnameList dim, bool keepdim, ScalarType dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm(self, p, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::norm", "names_ScalarOpt_dim_dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, DimnameList, bool, ScalarType>( + op, self, p, dim, keepdim, dtype); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor norm(const Tensor & self, c10::optional p, DimnameList dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm(self, p, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::norm", "names_ScalarOpt_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, DimnameList, bool>( + op, self, p, dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, DimnameList dim, bool keepdim, ScalarType dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm_out(out, self, p, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::norm", "names_dtype_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, DimnameList, bool, ScalarType>( + op, out, self, p, dim, keepdim, dtype); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, DimnameList dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm_out(out, self, p, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::norm", "names_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, DimnameList, bool>( + op, out, self, p, dim, keepdim); +#endif +} +#endif +static inline Tensor frobenius_norm(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::frobenius_norm(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::frobenius_norm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor frobenius_norm(const Tensor & self, IntArrayRef dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::frobenius_norm(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::frobenius_norm", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, keepdim); +#endif +} +static inline Tensor & frobenius_norm_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::frobenius_norm_out(out, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::frobenius_norm", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, keepdim); +#endif +} +static inline Tensor nuclear_norm(const Tensor & self, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::nuclear_norm(self, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nuclear_norm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, keepdim); +#endif +} +static inline Tensor & nuclear_norm_out(Tensor & out, const Tensor & self, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::nuclear_norm_out(out, self, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nuclear_norm", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, keepdim); +#endif +} +static inline Tensor nuclear_norm(const Tensor & self, IntArrayRef dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::nuclear_norm(self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nuclear_norm", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, keepdim); +#endif +} +static inline Tensor & nuclear_norm_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::nuclear_norm_out(out, self, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nuclear_norm", "dim_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, keepdim); +#endif +} +static inline Tensor clone(const Tensor & self, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::clone(self, memory_format); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::clone(self, memory_format); + break; + case Backend::SparseCPU: + return SparseCPUType::clone(self, memory_format); + break; + default: + AT_ERROR("clone not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::clone", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, memory_format); +#endif +} +static inline Tensor & resize_as_(Tensor & self, const Tensor & the_template, c10::optional memory_format) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::resize_as_(self, the_template, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::resize_as_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, the_template, memory_format); +#endif +} +static inline Tensor & pow_out(Tensor & out, const Tensor & self, Scalar exponent) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::pow_out(out, self, exponent); + break; + case Backend::SparseCPU: + return SparseCPUType::pow_out(out, self, exponent); + break; + default: + AT_ERROR("pow_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::pow", "Tensor_Scalar_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, exponent); +#endif +} +static inline Tensor pow(const Tensor & self, Scalar exponent) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::pow(self, exponent); + break; + case Backend::SparseCPU: + return SparseCPUType::pow(self, exponent); + break; + default: + AT_ERROR("pow not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::pow", "Tensor_Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, exponent); +#endif +} +static inline Tensor & zero_(Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::zero_(self); + break; + case Backend::SparseCPU: + return SparseCPUType::zero_(self); + break; + default: + AT_ERROR("zero_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::zero_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & sub_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::sub_out(out, self, other, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::sub_out(out, self, other, alpha); + break; + default: + AT_ERROR("sub_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sub", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other, alpha); +#endif +} +static inline Tensor sub(const Tensor & self, const Tensor & other, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::sub(self, other, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::sub(self, other, alpha); + break; + default: + AT_ERROR("sub not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sub", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other, alpha); +#endif +} +static inline Tensor sub(const Tensor & self, Scalar other, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sub(self, other, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sub", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other, alpha); +#endif +} +static inline Tensor rsub(const Tensor & self, const Tensor & other, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rsub(self, other, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rsub", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other, alpha); +#endif +} +static inline Tensor rsub(const Tensor & self, Scalar other, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rsub(self, other, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rsub", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other, alpha); +#endif +} +static inline Tensor _sparse_addmm(const Tensor & self, const Tensor & sparse, const Tensor & dense, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_sparse_addmm(self, sparse, dense, beta, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sparse_addmm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, sparse, dense, beta, alpha); +#endif +} +static inline Tensor & addmm_out(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, mat1, mat2)))) { + case Backend::CPU: + return CPUType::addmm_out(out, self, mat1, mat2, beta, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::addmm_out(out, self, mat1, mat2, beta, alpha); + break; + default: + AT_ERROR("addmm_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, mat1, mat2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addmm", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, mat1, mat2, beta, alpha); +#endif +} +static inline Tensor addmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, mat1, mat2)))) { + case Backend::CPU: + return CPUType::addmm(self, mat1, mat2, beta, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::addmm(self, mat1, mat2, beta, alpha); + break; + default: + AT_ERROR("addmm not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, mat1, mat2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addmm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, mat1, mat2, beta, alpha); +#endif +} +static inline Tensor sparse_coo_tensor(IntArrayRef size, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sparse_coo_tensor(size, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sparse_coo_tensor", "size"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, size, options); +#endif +} +static inline Tensor sparse_coo_tensor(const Tensor & indices, const Tensor & values, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sparse_coo_tensor(indices, values, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(indices, values, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sparse_coo_tensor", "indices"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, indices, values, options); +#endif +} +static inline Tensor sparse_coo_tensor(const Tensor & indices, const Tensor & values, IntArrayRef size, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sparse_coo_tensor(indices, values, size, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(indices, values, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sparse_coo_tensor", "indices_size"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, indices, values, size, options); +#endif +} +static inline Tensor _sparse_coo_tensor_unsafe(const Tensor & indices, const Tensor & values, IntArrayRef size, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_sparse_coo_tensor_unsafe(indices, values, size, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(indices, values, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sparse_coo_tensor_unsafe", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, indices, values, size, options); +#endif +} +static inline Tensor _sparse_coo_tensor_with_dims(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(options)))) { + case Backend::SparseCPU: + return SparseCPUType::_sparse_coo_tensor_with_dims(sparse_dim, dense_dim, size, options); + break; + default: + AT_ERROR("_sparse_coo_tensor_with_dims not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(options))); + } +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sparse_coo_tensor_with_dims", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, sparse_dim, dense_dim, size, options); +#endif +} +static inline Tensor _sparse_coo_tensor_with_dims_and_tensors(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size, const Tensor & indices, const Tensor & values, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(indices, values, options)))) { + case Backend::SparseCPU: + return SparseCPUType::_sparse_coo_tensor_with_dims_and_tensors(sparse_dim, dense_dim, size, indices, values, options); + break; + default: + AT_ERROR("_sparse_coo_tensor_with_dims_and_tensors not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(indices, values, options))); + } +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(indices, values, options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_sparse_coo_tensor_with_dims_and_tensors", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, sparse_dim, dense_dim, size, indices, values, options); +#endif +} +static inline Tensor to_dense_backward(const Tensor & grad, const Tensor & input) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::to_dense_backward(grad, input); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::to_dense_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad, input); +#endif +} +static inline Tensor & hspmm_out(Tensor & out, const Tensor & mat1, const Tensor & mat2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, mat1, mat2)))) { + case Backend::SparseCPU: + return SparseCPUType::hspmm_out(out, mat1, mat2); + break; + default: + AT_ERROR("hspmm_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, mat1, mat2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hspmm", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, mat1, mat2); +#endif +} +static inline Tensor hspmm(const Tensor & mat1, const Tensor & mat2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(mat1, mat2)))) { + case Backend::SparseCPU: + return SparseCPUType::hspmm(mat1, mat2); + break; + default: + AT_ERROR("hspmm not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(mat1, mat2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hspmm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, mat1, mat2); +#endif +} +static inline Tensor & copy_sparse_to_sparse_(Tensor & self, const Tensor & src, bool non_blocking) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, src)))) { + case Backend::SparseCPU: + return SparseCPUType::copy_sparse_to_sparse_(self, src, non_blocking); + break; + default: + AT_ERROR("copy_sparse_to_sparse_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, src))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::copy_sparse_to_sparse_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, src, non_blocking); +#endif +} +static inline std::vector unbind(const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::unbind(self, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::unbind", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t>( + op, self, dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline std::vector unbind(const Tensor & self, Dimname dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::unbind(self, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::unbind", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, Dimname>( + op, self, dim); +#endif +} +#endif +static inline Tensor mkldnn_reorder_conv2d_weight(const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + + default: + AT_ERROR("mkldnn_reorder_conv2d_weight not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mkldnn_reorder_conv2d_weight", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, padding, stride, dilation, groups); +#endif +} +static inline Tensor to_mkldnn_backward(const Tensor & grad, const Tensor & input) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::to_mkldnn_backward(grad, input); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::to_mkldnn_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad, input); +#endif +} +static inline Tensor quantize_per_tensor(const Tensor & self, double scale, int64_t zero_point, ScalarType dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::quantize_per_tensor(self, scale, zero_point, dtype); + break; + default: + AT_ERROR("quantize_per_tensor not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::quantize_per_tensor", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, scale, zero_point, dtype); +#endif +} +static inline Tensor quantize_per_channel(const Tensor & self, const Tensor & scales, const Tensor & zero_points, int64_t axis, ScalarType dtype) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, scales, zero_points)))) { + case Backend::CPU: + return CPUType::quantize_per_channel(self, scales, zero_points, axis, dtype); + break; + default: + AT_ERROR("quantize_per_channel not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, scales, zero_points))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::quantize_per_channel", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, scales, zero_points, axis, dtype); +#endif +} +static inline Tensor dequantize(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::dequantize(self); + break; + default: + AT_ERROR("dequantize not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::dequantize", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline double q_scale(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::q_scale(self); + break; + default: + AT_ERROR("q_scale not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::q_scale", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline int64_t q_zero_point(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::q_zero_point(self); + break; + default: + AT_ERROR("q_zero_point not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::q_zero_point", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor q_per_channel_scales(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::q_per_channel_scales(self); + break; + default: + AT_ERROR("q_per_channel_scales not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::q_per_channel_scales", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor q_per_channel_zero_points(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::q_per_channel_zero_points(self); + break; + default: + AT_ERROR("q_per_channel_zero_points not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::q_per_channel_zero_points", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline int64_t q_per_channel_axis(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::q_per_channel_axis(self); + break; + default: + AT_ERROR("q_per_channel_axis not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::q_per_channel_axis", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor int_repr(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::int_repr(self); + break; + default: + AT_ERROR("int_repr not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::int_repr", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor _make_per_tensor_quantized_tensor(const Tensor & self, double scale, int64_t zero_point) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_make_per_tensor_quantized_tensor(self, scale, zero_point); + break; + default: + AT_ERROR("_make_per_tensor_quantized_tensor not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_make_per_tensor_quantized_tensor", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, scale, zero_point); +#endif +} +static inline Tensor _make_per_channel_quantized_tensor(const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, scale, zero_point)))) { + case Backend::CPU: + return CPUType::_make_per_channel_quantized_tensor(self, scale, zero_point, axis); + break; + default: + AT_ERROR("_make_per_channel_quantized_tensor not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, scale, zero_point))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_make_per_channel_quantized_tensor", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, scale, zero_point, axis); +#endif +} +static inline Tensor fake_quantize_per_tensor_affine(const Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::fake_quantize_per_tensor_affine(self, scale, zero_point, quant_min, quant_max); + break; + default: + AT_ERROR("fake_quantize_per_tensor_affine not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fake_quantize_per_tensor_affine", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, scale, zero_point, quant_min, quant_max); +#endif +} +static inline Tensor fake_quantize_per_tensor_affine_backward(const Tensor & grad, const Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad, self)))) { + case Backend::CPU: + return CPUType::fake_quantize_per_tensor_affine_backward(grad, self, scale, zero_point, quant_min, quant_max); + break; + default: + AT_ERROR("fake_quantize_per_tensor_affine_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fake_quantize_per_tensor_affine_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad, self, scale, zero_point, quant_min, quant_max); +#endif +} +static inline Tensor fake_quantize_per_channel_affine(const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, scale, zero_point)))) { + case Backend::CPU: + return CPUType::fake_quantize_per_channel_affine(self, scale, zero_point, axis, quant_min, quant_max); + break; + default: + AT_ERROR("fake_quantize_per_channel_affine not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, scale, zero_point))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fake_quantize_per_channel_affine", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, scale, zero_point, axis, quant_min, quant_max); +#endif +} +static inline Tensor fake_quantize_per_channel_affine_backward(const Tensor & grad, const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad, self, scale, zero_point)))) { + case Backend::CPU: + return CPUType::fake_quantize_per_channel_affine_backward(grad, self, scale, zero_point, axis, quant_min, quant_max); + break; + default: + AT_ERROR("fake_quantize_per_channel_affine_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad, self, scale, zero_point))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fake_quantize_per_channel_affine_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad, self, scale, zero_point, axis, quant_min, quant_max); +#endif +} +static inline std::vector meshgrid(TensorList tensors) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::meshgrid(tensors); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::meshgrid", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, TensorList>( + op, tensors); +#endif +} +static inline Tensor cartesian_prod(TensorList tensors) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cartesian_prod(tensors); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cartesian_prod", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, tensors); +#endif +} +static inline Tensor combinations(const Tensor & self, int64_t r, bool with_replacement) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::combinations(self, r, with_replacement); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::combinations", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, r, with_replacement); +#endif +} +static inline ScalarType result_type(const Tensor & tensor, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::result_type(tensor, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::result_type", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, tensor, other); +#endif +} +static inline ScalarType result_type(const Tensor & tensor, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::result_type(tensor, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::result_type", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, tensor, other); +#endif +} +static inline ScalarType result_type(Scalar scalar, const Tensor & tensor) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::result_type(scalar, tensor); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::result_type", "Scalar_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, scalar, tensor); +#endif +} +static inline ScalarType result_type(Scalar scalar1, Scalar scalar2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::result_type(scalar1, scalar2); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::result_type", "Scalar_Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, scalar1, scalar2); +#endif +} +static inline bool can_cast(ScalarType from, ScalarType to) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::can_cast(from, to); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::can_cast", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, from, to); +#endif +} +static inline ScalarType promote_types(ScalarType type1, ScalarType type2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::promote_types(type1, type2); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::promote_types", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, type1, type2); +#endif +} +static inline Scalar _local_scalar_dense(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_local_scalar_dense(self); + break; + default: + AT_ERROR("_local_scalar_dense not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_local_scalar_dense", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline std::tuple _thnn_fused_lstm_cell(const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & cx, const Tensor & input_bias, const Tensor & hidden_bias) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input_gates, hidden_gates, cx, input_bias, hidden_bias)))) { + + default: + AT_ERROR("_thnn_fused_lstm_cell not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input_gates, hidden_gates, cx, input_bias, hidden_bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_thnn_fused_lstm_cell", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &>( + op, input_gates, hidden_gates, cx, input_bias, hidden_bias); +#endif +} +static inline std::tuple _thnn_fused_lstm_cell_backward(const Tensor & grad_hy, const Tensor & grad_cy, const Tensor & cx, const Tensor & cy, const Tensor & workspace, bool has_bias) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_hy, grad_cy, cx, cy, workspace)))) { + + default: + AT_ERROR("_thnn_fused_lstm_cell_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_hy, grad_cy, cx, cy, workspace))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_thnn_fused_lstm_cell_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, bool>( + op, grad_hy, grad_cy, cx, cy, workspace, has_bias); +#endif +} +static inline std::tuple _thnn_differentiable_lstm_cell_backward(const Tensor & grad_hy, const Tensor & grad_cy, const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & input_bias, const Tensor & hidden_bias, const Tensor & cx, const Tensor & cy) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_thnn_differentiable_lstm_cell_backward(grad_hy, grad_cy, input_gates, hidden_gates, input_bias, hidden_bias, cx, cy); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_thnn_differentiable_lstm_cell_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &>( + op, grad_hy, grad_cy, input_gates, hidden_gates, input_bias, hidden_bias, cx, cy); +#endif +} +static inline std::tuple _thnn_fused_gru_cell(const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & hx, const Tensor & input_bias, const Tensor & hidden_bias) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(input_gates, hidden_gates, hx, input_bias, hidden_bias)))) { + + default: + AT_ERROR("_thnn_fused_gru_cell not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(input_gates, hidden_gates, hx, input_bias, hidden_bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_thnn_fused_gru_cell", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &>( + op, input_gates, hidden_gates, hx, input_bias, hidden_bias); +#endif +} +static inline std::tuple _thnn_fused_gru_cell_backward(const Tensor & grad_hy, const Tensor & workspace, bool has_bias) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_hy, workspace)))) { + + default: + AT_ERROR("_thnn_fused_gru_cell_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_hy, workspace))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_thnn_fused_gru_cell_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, bool>( + op, grad_hy, workspace, has_bias); +#endif +} +static inline std::tuple _thnn_differentiable_gru_cell_backward(const Tensor & grad_hy, const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & hx, const Tensor & input_bias, const Tensor & hidden_bias) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_thnn_differentiable_gru_cell_backward(grad_hy, input_gates, hidden_gates, hx, input_bias, hidden_bias); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_thnn_differentiable_gru_cell_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &>( + op, grad_hy, input_gates, hidden_gates, hx, input_bias, hidden_bias); +#endif +} +static inline std::tuple lstm(const Tensor & input, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::lstm(input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lstm", "input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, TensorList, TensorList, bool, int64_t, double, bool, bool, bool>( + op, input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first); +#endif +} +static inline std::tuple lstm(const Tensor & data, const Tensor & batch_sizes, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::lstm(data, batch_sizes, hx, params, has_biases, num_layers, dropout, train, bidirectional); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lstm", "data"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, TensorList, TensorList, bool, int64_t, double, bool, bool>( + op, data, batch_sizes, hx, params, has_biases, num_layers, dropout, train, bidirectional); +#endif +} +static inline std::tuple gru(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::gru(input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gru", "input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, TensorList, bool, int64_t, double, bool, bool, bool>( + op, input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first); +#endif +} +static inline std::tuple gru(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::gru(data, batch_sizes, hx, params, has_biases, num_layers, dropout, train, bidirectional); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gru", "data"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, TensorList, bool, int64_t, double, bool, bool>( + op, data, batch_sizes, hx, params, has_biases, num_layers, dropout, train, bidirectional); +#endif +} +static inline std::tuple rnn_tanh(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rnn_tanh(input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rnn_tanh", "input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, TensorList, bool, int64_t, double, bool, bool, bool>( + op, input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first); +#endif +} +static inline std::tuple rnn_tanh(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rnn_tanh(data, batch_sizes, hx, params, has_biases, num_layers, dropout, train, bidirectional); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rnn_tanh", "data"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, TensorList, bool, int64_t, double, bool, bool>( + op, data, batch_sizes, hx, params, has_biases, num_layers, dropout, train, bidirectional); +#endif +} +static inline std::tuple rnn_relu(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rnn_relu(input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rnn_relu", "input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, TensorList, bool, int64_t, double, bool, bool, bool>( + op, input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first); +#endif +} +static inline std::tuple rnn_relu(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rnn_relu(data, batch_sizes, hx, params, has_biases, num_layers, dropout, train, bidirectional); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rnn_relu", "data"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, TensorList, bool, int64_t, double, bool, bool>( + op, data, batch_sizes, hx, params, has_biases, num_layers, dropout, train, bidirectional); +#endif +} +static inline std::tuple lstm_cell(const Tensor & input, TensorList hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::lstm_cell(input, hx, w_ih, w_hh, b_ih, b_hh); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lstm_cell", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, TensorList, const Tensor &, const Tensor &, const Tensor &, const Tensor &>( + op, input, hx, w_ih, w_hh, b_ih, b_hh); +#endif +} +static inline Tensor gru_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::gru_cell(input, hx, w_ih, w_hh, b_ih, b_hh); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gru_cell", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, hx, w_ih, w_hh, b_ih, b_hh); +#endif +} +static inline Tensor rnn_tanh_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rnn_tanh_cell(input, hx, w_ih, w_hh, b_ih, b_hh); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rnn_tanh_cell", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, hx, w_ih, w_hh, b_ih, b_hh); +#endif +} +static inline Tensor rnn_relu_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rnn_relu_cell(input, hx, w_ih, w_hh, b_ih, b_hh); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rnn_relu_cell", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, input, hx, w_ih, w_hh, b_ih, b_hh); +#endif +} +static inline std::tuple quantized_lstm(const Tensor & input, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first, c10::optional dtype, bool use_dynamic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::quantized_lstm(input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first, dtype, use_dynamic); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::quantized_lstm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, TensorList, TensorList, bool, int64_t, double, bool, bool, bool, c10::optional, bool>( + op, input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first, dtype, use_dynamic); +#endif +} +static inline std::tuple quantized_lstm(const Tensor & data, const Tensor & batch_sizes, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, c10::optional dtype, bool use_dynamic) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::quantized_lstm(data, batch_sizes, hx, params, has_biases, num_layers, dropout, train, bidirectional, dtype, use_dynamic); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::quantized_lstm", "data"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, TensorList, TensorList, bool, int64_t, double, bool, bool, c10::optional, bool>( + op, data, batch_sizes, hx, params, has_biases, num_layers, dropout, train, bidirectional, dtype, use_dynamic); +#endif +} +static inline std::tuple quantized_gru(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::quantized_gru(input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::quantized_gru", "input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, TensorList, bool, int64_t, double, bool, bool, bool>( + op, input, hx, params, has_biases, num_layers, dropout, train, bidirectional, batch_first); +#endif +} +static inline std::tuple quantized_gru(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::quantized_gru(data, batch_sizes, hx, params, has_biases, num_layers, dropout, train, bidirectional); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::quantized_gru", "data"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, TensorList, bool, int64_t, double, bool, bool>( + op, data, batch_sizes, hx, params, has_biases, num_layers, dropout, train, bidirectional); +#endif +} +static inline std::tuple quantized_lstm_cell(const Tensor & input, TensorList hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::quantized_lstm_cell(input, hx, w_ih, w_hh, b_ih, b_hh, packed_ih, packed_hh, col_offsets_ih, col_offsets_hh, scale_ih, scale_hh, zero_point_ih, zero_point_hh); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::quantized_lstm_cell", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, TensorList, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, const Tensor &, Scalar, Scalar, Scalar, Scalar>( + op, input, hx, w_ih, w_hh, b_ih, b_hh, packed_ih, packed_hh, col_offsets_ih, col_offsets_hh, scale_ih, scale_hh, zero_point_ih, zero_point_hh); +#endif +} +static inline Tensor quantized_gru_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::quantized_gru_cell(input, hx, w_ih, w_hh, b_ih, b_hh, packed_ih, packed_hh, col_offsets_ih, col_offsets_hh, scale_ih, scale_hh, zero_point_ih, zero_point_hh); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::quantized_gru_cell", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, hx, w_ih, w_hh, b_ih, b_hh, packed_ih, packed_hh, col_offsets_ih, col_offsets_hh, scale_ih, scale_hh, zero_point_ih, zero_point_hh); +#endif +} +static inline Tensor quantized_rnn_relu_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::quantized_rnn_relu_cell(input, hx, w_ih, w_hh, b_ih, b_hh, packed_ih, packed_hh, col_offsets_ih, col_offsets_hh, scale_ih, scale_hh, zero_point_ih, zero_point_hh); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::quantized_rnn_relu_cell", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, hx, w_ih, w_hh, b_ih, b_hh, packed_ih, packed_hh, col_offsets_ih, col_offsets_hh, scale_ih, scale_hh, zero_point_ih, zero_point_hh); +#endif +} +static inline Tensor quantized_rnn_tanh_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::quantized_rnn_tanh_cell(input, hx, w_ih, w_hh, b_ih, b_hh, packed_ih, packed_hh, col_offsets_ih, col_offsets_hh, scale_ih, scale_hh, zero_point_ih, zero_point_hh); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::quantized_rnn_tanh_cell", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, input, hx, w_ih, w_hh, b_ih, b_hh, packed_ih, packed_hh, col_offsets_ih, col_offsets_hh, scale_ih, scale_hh, zero_point_ih, zero_point_hh); +#endif +} +static inline std::tuple _pack_padded_sequence(const Tensor & input, const Tensor & lengths, bool batch_first) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_pack_padded_sequence(input, lengths, batch_first); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_pack_padded_sequence", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, bool>( + op, input, lengths, batch_first); +#endif +} +static inline Tensor _pack_padded_sequence_backward(const Tensor & grad, IntArrayRef input_size, const Tensor & batch_sizes, bool batch_first) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_pack_padded_sequence_backward(grad, input_size, batch_sizes, batch_first); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_pack_padded_sequence_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad, input_size, batch_sizes, batch_first); +#endif +} +static inline std::tuple _pad_packed_sequence(const Tensor & data, const Tensor & batch_sizes, bool batch_first, Scalar padding_value, int64_t total_length) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_pad_packed_sequence(data, batch_sizes, batch_first, padding_value, total_length); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_pad_packed_sequence", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, bool, Scalar, int64_t>( + op, data, batch_sizes, batch_first, padding_value, total_length); +#endif +} +static inline Tensor masked_fill(const Tensor & self, const Tensor & mask, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::masked_fill(self, mask, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::masked_fill", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, mask, value); +#endif +} +static inline Tensor masked_fill(const Tensor & self, const Tensor & mask, const Tensor & value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::masked_fill(self, mask, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::masked_fill", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, mask, value); +#endif +} +static inline Tensor masked_scatter(const Tensor & self, const Tensor & mask, const Tensor & source) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::masked_scatter(self, mask, source); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::masked_scatter", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, mask, source); +#endif +} +static inline Tensor index_add(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & source) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_add(self, dim, index, source); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_add", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, index, source); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor index_add(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & source) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_add(self, dim, index, source); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_add", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, index, source); +#endif +} +#endif +static inline Tensor index_fill(const Tensor & self, int64_t dim, const Tensor & index, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_fill(self, dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_fill", "int_Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, index, value); +#endif +} +static inline Tensor index_fill(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_fill(self, dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_fill", "int_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, index, value); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor index_fill(const Tensor & self, Dimname dim, const Tensor & index, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_fill(self, dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_fill", "Dimname_Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, index, value); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor index_fill(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_fill(self, dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_fill", "Dimname_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, index, value); +#endif +} +#endif +static inline Tensor scatter(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & src) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scatter(self, dim, index, src); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::scatter", "src"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, index, src); +#endif +} +static inline Tensor scatter(const Tensor & self, int64_t dim, const Tensor & index, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scatter(self, dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::scatter", "value"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, index, value); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor scatter(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & src) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scatter(self, dim, index, src); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::scatter", "dimname_src"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, index, src); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor scatter(const Tensor & self, Dimname dim, const Tensor & index, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scatter(self, dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::scatter", "dimname_value"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, index, value); +#endif +} +#endif +static inline Tensor scatter_add(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & src) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scatter_add(self, dim, index, src); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::scatter_add", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, index, src); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor scatter_add(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & src) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scatter_add(self, dim, index, src); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::scatter_add", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, index, src); +#endif +} +#endif +static inline Tensor __and__(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::__and__(self, other); + break; + default: + AT_ERROR("__and__ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::__and__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor __and__(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::__and__(self, other); + break; + default: + AT_ERROR("__and__ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::__and__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor __or__(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::__or__(self, other); + break; + default: + AT_ERROR("__or__ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::__or__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor __or__(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::__or__(self, other); + break; + default: + AT_ERROR("__or__ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::__or__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & bitwise_xor_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::bitwise_xor_out(out, self, other); + break; + default: + AT_ERROR("bitwise_xor_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bitwise_xor", "Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor & bitwise_xor_out(Tensor & out, const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::bitwise_xor_out(out, self, other); + break; + default: + AT_ERROR("bitwise_xor_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bitwise_xor", "Scalar_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor bitwise_xor(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bitwise_xor(self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bitwise_xor", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, other); +#endif +} +static inline Tensor bitwise_xor(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bitwise_xor(self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::bitwise_xor", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, other); +#endif +} +static inline Tensor __xor__(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::__xor__(self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::__xor__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor __xor__(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::__xor__(self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::__xor__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor __lshift__(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::__lshift__(self, other); + break; + default: + AT_ERROR("__lshift__ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::__lshift__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor __lshift__(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::__lshift__(self, other); + break; + default: + AT_ERROR("__lshift__ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::__lshift__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor __rshift__(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::__rshift__(self, other); + break; + default: + AT_ERROR("__rshift__ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::__rshift__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor __rshift__(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::__rshift__(self, other); + break; + default: + AT_ERROR("__rshift__ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::__rshift__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & addbmm_out(Tensor & out, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, batch1, batch2)))) { + case Backend::CPU: + return CPUType::addbmm_out(out, self, batch1, batch2, beta, alpha); + break; + default: + AT_ERROR("addbmm_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, batch1, batch2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addbmm", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, batch1, batch2, beta, alpha); +#endif +} +static inline Tensor addbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, batch1, batch2)))) { + case Backend::CPU: + return CPUType::addbmm(self, batch1, batch2, beta, alpha); + break; + default: + AT_ERROR("addbmm not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, batch1, batch2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addbmm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, batch1, batch2, beta, alpha); +#endif +} +static inline Tensor & diag_out(Tensor & out, const Tensor & self, int64_t diagonal) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::diag_out(out, self, diagonal); + break; + default: + AT_ERROR("diag_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::diag", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, diagonal); +#endif +} +static inline Tensor diag(const Tensor & self, int64_t diagonal) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::diag(self, diagonal); + break; + default: + AT_ERROR("diag not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::diag", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, diagonal); +#endif +} +static inline Tensor & cross_out(Tensor & out, const Tensor & self, const Tensor & other, c10::optional dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cross_out(out, self, other, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cross", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, other, dim); +#endif +} +static inline Tensor cross(const Tensor & self, const Tensor & other, c10::optional dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cross(self, other, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cross", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed>( + op, self, other, dim); +#endif +} +static inline Tensor & triu_out(Tensor & out, const Tensor & self, int64_t diagonal) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::triu_out(out, self, diagonal); + break; + default: + AT_ERROR("triu_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::triu", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, diagonal); +#endif +} +static inline Tensor triu(const Tensor & self, int64_t diagonal) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::triu(self, diagonal); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::triu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, diagonal); +#endif +} +static inline Tensor & tril_out(Tensor & out, const Tensor & self, int64_t diagonal) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::tril_out(out, self, diagonal); + break; + default: + AT_ERROR("tril_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::tril", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, diagonal); +#endif +} +static inline Tensor tril(const Tensor & self, int64_t diagonal) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::tril(self, diagonal); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::tril", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, diagonal); +#endif +} +static inline Tensor tril_indices(int64_t row, int64_t col, int64_t offset, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(options)))) { + case Backend::CPU: + return CPUType::tril_indices(row, col, offset, options); + break; + default: + AT_ERROR("tril_indices not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(options))); + } +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::tril_indices", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, row, col, offset, options); +#endif +} +static inline Tensor triu_indices(int64_t row, int64_t col, int64_t offset, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(options)))) { + case Backend::CPU: + return CPUType::triu_indices(row, col, offset, options); + break; + default: + AT_ERROR("triu_indices not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(options))); + } +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::triu_indices", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, row, col, offset, options); +#endif +} +static inline Tensor trace(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::trace(self); + break; + default: + AT_ERROR("trace not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::trace", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & ne_out(Tensor & out, const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::ne_out(out, self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::ne_out(out, self, other); + break; + default: + AT_ERROR("ne_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ne", "Scalar_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor ne(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::ne(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::ne(self, other); + break; + default: + AT_ERROR("ne not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ne", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & ne_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::ne_out(out, self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::ne_out(out, self, other); + break; + default: + AT_ERROR("ne_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ne", "Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor ne(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::ne(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::ne(self, other); + break; + default: + AT_ERROR("ne not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ne", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & eq_out(Tensor & out, const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::eq_out(out, self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::eq_out(out, self, other); + break; + default: + AT_ERROR("eq_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::eq", "Scalar_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor eq(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::eq(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::eq(self, other); + break; + default: + AT_ERROR("eq not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::eq", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & eq_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::eq_out(out, self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::eq_out(out, self, other); + break; + default: + AT_ERROR("eq_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::eq", "Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor eq(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::eq(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::eq(self, other); + break; + default: + AT_ERROR("eq not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::eq", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & ge_out(Tensor & out, const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::ge_out(out, self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::ge_out(out, self, other); + break; + default: + AT_ERROR("ge_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ge", "Scalar_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor ge(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::ge(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::ge(self, other); + break; + default: + AT_ERROR("ge not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ge", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & ge_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::ge_out(out, self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::ge_out(out, self, other); + break; + default: + AT_ERROR("ge_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ge", "Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor ge(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::ge(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::ge(self, other); + break; + default: + AT_ERROR("ge not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ge", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & le_out(Tensor & out, const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::le_out(out, self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::le_out(out, self, other); + break; + default: + AT_ERROR("le_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::le", "Scalar_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor le(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::le(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::le(self, other); + break; + default: + AT_ERROR("le not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::le", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & le_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::le_out(out, self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::le_out(out, self, other); + break; + default: + AT_ERROR("le_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::le", "Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor le(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::le(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::le(self, other); + break; + default: + AT_ERROR("le not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::le", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & gt_out(Tensor & out, const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::gt_out(out, self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::gt_out(out, self, other); + break; + default: + AT_ERROR("gt_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gt", "Scalar_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor gt(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::gt(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::gt(self, other); + break; + default: + AT_ERROR("gt not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gt", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & gt_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::gt_out(out, self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::gt_out(out, self, other); + break; + default: + AT_ERROR("gt_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gt", "Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor gt(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::gt(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::gt(self, other); + break; + default: + AT_ERROR("gt not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gt", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & lt_out(Tensor & out, const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::lt_out(out, self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::lt_out(out, self, other); + break; + default: + AT_ERROR("lt_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lt", "Scalar_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor lt(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::lt(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::lt(self, other); + break; + default: + AT_ERROR("lt not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lt", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & lt_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::lt_out(out, self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::lt_out(out, self, other); + break; + default: + AT_ERROR("lt_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lt", "Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor lt(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::lt(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::lt(self, other); + break; + default: + AT_ERROR("lt not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lt", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & take_out(Tensor & out, const Tensor & self, const Tensor & index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, index)))) { + case Backend::CPU: + return CPUType::take_out(out, self, index); + break; + default: + AT_ERROR("take_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, index))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::take", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, index); +#endif +} +static inline Tensor take(const Tensor & self, const Tensor & index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, index)))) { + case Backend::CPU: + return CPUType::take(self, index); + break; + default: + AT_ERROR("take not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, index))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::take", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, index); +#endif +} +static inline Tensor & index_select_out(Tensor & out, const Tensor & self, int64_t dim, const Tensor & index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, index)))) { + case Backend::CPU: + return CPUType::index_select_out(out, self, dim, index); + break; + default: + AT_ERROR("index_select_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, index))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_select", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, index); +#endif +} +static inline Tensor index_select(const Tensor & self, int64_t dim, const Tensor & index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, index)))) { + case Backend::CPU: + return CPUType::index_select(self, dim, index); + break; + case Backend::SparseCPU: + return SparseCPUType::index_select(self, dim, index); + break; + default: + AT_ERROR("index_select not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, index))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_select", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, index); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & index_select_out(Tensor & out, const Tensor & self, Dimname dim, const Tensor & index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_select_out(out, self, dim, index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_select", "dimname_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, index); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor index_select(const Tensor & self, Dimname dim, const Tensor & index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_select(self, dim, index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::index_select", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, index); +#endif +} +#endif +static inline Tensor & masked_select_out(Tensor & out, const Tensor & self, const Tensor & mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, mask)))) { + case Backend::CPU: + return CPUType::masked_select_out(out, self, mask); + break; + default: + AT_ERROR("masked_select_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, mask))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::masked_select", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, mask); +#endif +} +static inline Tensor masked_select(const Tensor & self, const Tensor & mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, mask)))) { + case Backend::CPU: + return CPUType::masked_select(self, mask); + break; + default: + AT_ERROR("masked_select not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, mask))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::masked_select", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, mask); +#endif +} +static inline Tensor & nonzero_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::nonzero_out(out, self); + break; + default: + AT_ERROR("nonzero_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nonzero", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor nonzero(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::nonzero(self); + break; + default: + AT_ERROR("nonzero not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nonzero", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline std::vector nonzero_numpy(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::nonzero_numpy(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nonzero_numpy", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &>( + op, self); +#endif +} +static inline Tensor & gather_out(Tensor & out, const Tensor & self, int64_t dim, const Tensor & index, bool sparse_grad) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, index)))) { + case Backend::CPU: + return CPUType::gather_out(out, self, dim, index, sparse_grad); + break; + default: + AT_ERROR("gather_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, index))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gather", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, index, sparse_grad); +#endif +} +static inline Tensor gather(const Tensor & self, int64_t dim, const Tensor & index, bool sparse_grad) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, index)))) { + case Backend::CPU: + return CPUType::gather(self, dim, index, sparse_grad); + break; + default: + AT_ERROR("gather not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, index))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gather", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, index, sparse_grad); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor & gather_out(Tensor & out, const Tensor & self, Dimname dim, const Tensor & index, bool sparse_grad) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::gather_out(out, self, dim, index, sparse_grad); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gather", "dimname_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim, index, sparse_grad); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline Tensor gather(const Tensor & self, Dimname dim, const Tensor & index, bool sparse_grad) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::gather(self, dim, index, sparse_grad); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::gather", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, index, sparse_grad); +#endif +} +#endif +static inline Tensor _gather_sparse_backward(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & grad) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_gather_sparse_backward(self, dim, index, grad); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_gather_sparse_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, index, grad); +#endif +} +static inline Tensor & addcmul_out(Tensor & out, const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::addcmul_out(out, self, tensor1, tensor2, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addcmul", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, tensor1, tensor2, value); +#endif +} +static inline Tensor addcmul(const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::addcmul(self, tensor1, tensor2, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addcmul", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, tensor1, tensor2, value); +#endif +} +static inline Tensor & addcdiv_out(Tensor & out, const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::addcdiv_out(out, self, tensor1, tensor2, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addcdiv", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, tensor1, tensor2, value); +#endif +} +static inline Tensor addcdiv(const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::addcdiv(self, tensor1, tensor2, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::addcdiv", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, tensor1, tensor2, value); +#endif +} +static inline std::tuple lstsq_out(Tensor & X, Tensor & qr, const Tensor & self, const Tensor & A) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(X, qr, self, A)))) { + case Backend::CPU: + return CPUType::lstsq_out(X, qr, self, A); + break; + default: + AT_ERROR("lstsq_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(X, qr, self, A))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lstsq", "X"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, const Tensor &>( + op, X, qr, self, A); +#endif +} +static inline std::tuple lstsq(const Tensor & self, const Tensor & A) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, A)))) { + case Backend::CPU: + return CPUType::lstsq(self, A); + break; + default: + AT_ERROR("lstsq not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, A))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lstsq", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &>( + op, self, A); +#endif +} +static inline std::tuple triangular_solve_out(Tensor & X, Tensor & M, const Tensor & self, const Tensor & A, bool upper, bool transpose, bool unitriangular) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::triangular_solve_out(X, M, self, A, upper, transpose, unitriangular); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::triangular_solve", "X"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, const Tensor &, bool, bool, bool>( + op, X, M, self, A, upper, transpose, unitriangular); +#endif +} +static inline std::tuple triangular_solve(const Tensor & self, const Tensor & A, bool upper, bool transpose, bool unitriangular) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::triangular_solve(self, A, upper, transpose, unitriangular); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::triangular_solve", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, bool, bool, bool>( + op, self, A, upper, transpose, unitriangular); +#endif +} +static inline std::tuple _triangular_solve_helper(const Tensor & self, const Tensor & A, bool upper, bool transpose, bool unitriangular) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, A)))) { + case Backend::CPU: + return CPUType::_triangular_solve_helper(self, A, upper, transpose, unitriangular); + break; + default: + AT_ERROR("_triangular_solve_helper not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, A))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_triangular_solve_helper", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, bool, bool, bool>( + op, self, A, upper, transpose, unitriangular); +#endif +} +static inline std::tuple symeig_out(Tensor & e, Tensor & V, const Tensor & self, bool eigenvectors, bool upper) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::symeig_out(e, V, self, eigenvectors, upper); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::symeig", "e"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, bool, bool>( + op, e, V, self, eigenvectors, upper); +#endif +} +static inline std::tuple symeig(const Tensor & self, bool eigenvectors, bool upper) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::symeig(self, eigenvectors, upper); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::symeig", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool, bool>( + op, self, eigenvectors, upper); +#endif +} +static inline std::tuple _symeig_helper(const Tensor & self, bool eigenvectors, bool upper) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_symeig_helper(self, eigenvectors, upper); + break; + default: + AT_ERROR("_symeig_helper not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_symeig_helper", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool, bool>( + op, self, eigenvectors, upper); +#endif +} +static inline std::tuple eig_out(Tensor & e, Tensor & v, const Tensor & self, bool eigenvectors) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(e, v, self)))) { + case Backend::CPU: + return CPUType::eig_out(e, v, self, eigenvectors); + break; + default: + AT_ERROR("eig_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(e, v, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::eig", "e"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, bool>( + op, e, v, self, eigenvectors); +#endif +} +static inline std::tuple eig(const Tensor & self, bool eigenvectors) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::eig(self, eigenvectors); + break; + default: + AT_ERROR("eig not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::eig", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool>( + op, self, eigenvectors); +#endif +} +static inline std::tuple svd_out(Tensor & U, Tensor & S, Tensor & V, const Tensor & self, bool some, bool compute_uv) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::svd_out(U, S, V, self, some, compute_uv); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::svd", "U"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, Tensor &, const Tensor &, bool, bool>( + op, U, S, V, self, some, compute_uv); +#endif +} +static inline std::tuple svd(const Tensor & self, bool some, bool compute_uv) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::svd(self, some, compute_uv); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::svd", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool, bool>( + op, self, some, compute_uv); +#endif +} +static inline std::tuple _svd_helper(const Tensor & self, bool some, bool compute_uv) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_svd_helper(self, some, compute_uv); + break; + default: + AT_ERROR("_svd_helper not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_svd_helper", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool, bool>( + op, self, some, compute_uv); +#endif +} +static inline Tensor & cholesky_out(Tensor & out, const Tensor & self, bool upper) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cholesky_out(out, self, upper); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cholesky", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, upper); +#endif +} +static inline Tensor cholesky(const Tensor & self, bool upper) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cholesky(self, upper); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cholesky", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, upper); +#endif +} +static inline Tensor _cholesky_helper(const Tensor & self, bool upper) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_cholesky_helper(self, upper); + break; + default: + AT_ERROR("_cholesky_helper not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cholesky_helper", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, upper); +#endif +} +static inline Tensor & cholesky_solve_out(Tensor & out, const Tensor & self, const Tensor & input2, bool upper) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cholesky_solve_out(out, self, input2, upper); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cholesky_solve", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, input2, upper); +#endif +} +static inline Tensor cholesky_solve(const Tensor & self, const Tensor & input2, bool upper) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cholesky_solve(self, input2, upper); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cholesky_solve", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, input2, upper); +#endif +} +static inline Tensor _cholesky_solve_helper(const Tensor & self, const Tensor & A, bool upper) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, A)))) { + case Backend::CPU: + return CPUType::_cholesky_solve_helper(self, A, upper); + break; + default: + AT_ERROR("_cholesky_solve_helper not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, A))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cholesky_solve_helper", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, A, upper); +#endif +} +static inline std::tuple solve(const Tensor & self, const Tensor & A) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::solve(self, A); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::solve", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &>( + op, self, A); +#endif +} +static inline std::tuple solve_out(Tensor & solution, Tensor & lu, const Tensor & self, const Tensor & A) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::solve_out(solution, lu, self, A); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::solve", "solution"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, const Tensor &>( + op, solution, lu, self, A); +#endif +} +static inline std::tuple _solve_helper(const Tensor & self, const Tensor & A) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, A)))) { + case Backend::CPU: + return CPUType::_solve_helper(self, A); + break; + default: + AT_ERROR("_solve_helper not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, A))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_solve_helper", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &>( + op, self, A); +#endif +} +static inline Tensor & cholesky_inverse_out(Tensor & out, const Tensor & self, bool upper) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::cholesky_inverse_out(out, self, upper); + break; + default: + AT_ERROR("cholesky_inverse_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cholesky_inverse", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, upper); +#endif +} +static inline Tensor cholesky_inverse(const Tensor & self, bool upper) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::cholesky_inverse(self, upper); + break; + default: + AT_ERROR("cholesky_inverse not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::cholesky_inverse", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, upper); +#endif +} +static inline std::tuple qr_out(Tensor & Q, Tensor & R, const Tensor & self, bool some) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::qr_out(Q, R, self, some); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::qr", "Q"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, bool>( + op, Q, R, self, some); +#endif +} +static inline std::tuple qr(const Tensor & self, bool some) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::qr(self, some); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::qr", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool>( + op, self, some); +#endif +} +static inline std::tuple _qr_helper(const Tensor & self, bool some) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_qr_helper(self, some); + break; + default: + AT_ERROR("_qr_helper not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_qr_helper", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool>( + op, self, some); +#endif +} +static inline std::tuple geqrf_out(Tensor & a, Tensor & tau, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(a, tau, self)))) { + case Backend::CPU: + return CPUType::geqrf_out(a, tau, self); + break; + default: + AT_ERROR("geqrf_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(a, tau, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::geqrf", "a"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &>( + op, a, tau, self); +#endif +} +static inline std::tuple geqrf(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::geqrf(self); + break; + default: + AT_ERROR("geqrf not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::geqrf", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &>( + op, self); +#endif +} +static inline Tensor & orgqr_out(Tensor & out, const Tensor & self, const Tensor & input2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, input2)))) { + case Backend::CPU: + return CPUType::orgqr_out(out, self, input2); + break; + default: + AT_ERROR("orgqr_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, input2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::orgqr", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, input2); +#endif +} +static inline Tensor orgqr(const Tensor & self, const Tensor & input2) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, input2)))) { + case Backend::CPU: + return CPUType::orgqr(self, input2); + break; + default: + AT_ERROR("orgqr not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, input2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::orgqr", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, input2); +#endif +} +static inline Tensor & ormqr_out(Tensor & out, const Tensor & self, const Tensor & input2, const Tensor & input3, bool left, bool transpose) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, input2, input3)))) { + case Backend::CPU: + return CPUType::ormqr_out(out, self, input2, input3, left, transpose); + break; + default: + AT_ERROR("ormqr_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, input2, input3))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ormqr", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, input2, input3, left, transpose); +#endif +} +static inline Tensor ormqr(const Tensor & self, const Tensor & input2, const Tensor & input3, bool left, bool transpose) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, input2, input3)))) { + case Backend::CPU: + return CPUType::ormqr(self, input2, input3, left, transpose); + break; + default: + AT_ERROR("ormqr not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, input2, input3))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::ormqr", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, input2, input3, left, transpose); +#endif +} +static inline std::tuple _lu_with_info(const Tensor & self, bool pivot, bool check_errors) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_lu_with_info(self, pivot, check_errors); + break; + default: + AT_ERROR("_lu_with_info not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_lu_with_info", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool, bool>( + op, self, pivot, check_errors); +#endif +} +static inline Tensor & lu_solve_out(Tensor & out, const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::lu_solve_out(out, self, LU_data, LU_pivots); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lu_solve", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, LU_data, LU_pivots); +#endif +} +static inline Tensor lu_solve(const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::lu_solve(self, LU_data, LU_pivots); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lu_solve", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, LU_data, LU_pivots); +#endif +} +static inline Tensor _lu_solve_helper(const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, LU_data, LU_pivots)))) { + case Backend::CPU: + return CPUType::_lu_solve_helper(self, LU_data, LU_pivots); + break; + default: + AT_ERROR("_lu_solve_helper not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, LU_data, LU_pivots))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_lu_solve_helper", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, LU_data, LU_pivots); +#endif +} +static inline Tensor & multinomial_out(Tensor & out, const Tensor & self, int64_t num_samples, bool replacement, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::multinomial_out(out, self, num_samples, replacement, generator); + break; + default: + AT_ERROR("multinomial_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::multinomial", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, num_samples, replacement, generator); +#endif +} +static inline Tensor multinomial(const Tensor & self, int64_t num_samples, bool replacement, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::multinomial(self, num_samples, replacement, generator); + break; + default: + AT_ERROR("multinomial not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::multinomial", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, num_samples, replacement, generator); +#endif +} +static inline std::tuple _multinomial_alias_setup(const Tensor & probs) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(probs)))) { + case Backend::CPU: + return CPUType::_multinomial_alias_setup(probs); + break; + default: + AT_ERROR("_multinomial_alias_setup not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(probs))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_multinomial_alias_setup", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &>( + op, probs); +#endif +} +static inline Tensor _multinomial_alias_draw(const Tensor & J, const Tensor & q, int64_t num_samples, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(J, q)))) { + case Backend::CPU: + return CPUType::_multinomial_alias_draw(J, q, num_samples, generator); + break; + default: + AT_ERROR("_multinomial_alias_draw not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(J, q))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_multinomial_alias_draw", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, J, q, num_samples, generator); +#endif +} +static inline Tensor & lgamma_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::lgamma_out(out, self); + break; + default: + AT_ERROR("lgamma_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lgamma", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor lgamma(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::lgamma(self); + break; + default: + AT_ERROR("lgamma not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lgamma", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & digamma_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::digamma_out(out, self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::digamma", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor digamma(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::digamma(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::digamma", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & polygamma_out(Tensor & out, int64_t n, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::polygamma_out(out, n, self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::polygamma", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, n, self); +#endif +} +static inline Tensor polygamma(int64_t n, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::polygamma(n, self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::polygamma", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, n, self); +#endif +} +static inline Tensor erfinv(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::erfinv(self); + break; + default: + AT_ERROR("erfinv not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::erfinv", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & erfinv_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::erfinv_out(out, self); + break; + default: + AT_ERROR("erfinv_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::erfinv", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor sign(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sign(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sign", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor & sign_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::sign_out(out, self); + break; + default: + AT_ERROR("sign_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sign", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor dist(const Tensor & self, const Tensor & other, Scalar p) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::dist(self, other, p); + break; + default: + AT_ERROR("dist not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::dist", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other, p); +#endif +} +static inline Tensor & atan2_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::atan2_out(out, self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::atan2", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor atan2(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::atan2(self, other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::atan2", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & lerp_out(Tensor & out, const Tensor & self, const Tensor & end, Scalar weight) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, end)))) { + case Backend::CPU: + return CPUType::lerp_out(out, self, end, weight); + break; + default: + AT_ERROR("lerp_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, end))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lerp", "Scalar_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, end, weight); +#endif +} +static inline Tensor & lerp_out(Tensor & out, const Tensor & self, const Tensor & end, const Tensor & weight) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, end, weight)))) { + case Backend::CPU: + return CPUType::lerp_out(out, self, end, weight); + break; + default: + AT_ERROR("lerp_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, end, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lerp", "Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, end, weight); +#endif +} +static inline Tensor lerp(const Tensor & self, const Tensor & end, Scalar weight) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, end)))) { + case Backend::CPU: + return CPUType::lerp(self, end, weight); + break; + default: + AT_ERROR("lerp not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, end))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lerp", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, end, weight); +#endif +} +static inline Tensor lerp(const Tensor & self, const Tensor & end, const Tensor & weight) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, end, weight)))) { + case Backend::CPU: + return CPUType::lerp(self, end, weight); + break; + default: + AT_ERROR("lerp not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, end, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::lerp", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, end, weight); +#endif +} +static inline Tensor & histc_out(Tensor & out, const Tensor & self, int64_t bins, Scalar min, Scalar max) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::histc_out(out, self, bins, min, max); + break; + default: + AT_ERROR("histc_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::histc", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, bins, min, max); +#endif +} +static inline Tensor histc(const Tensor & self, int64_t bins, Scalar min, Scalar max) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::histc(self, bins, min, max); + break; + default: + AT_ERROR("histc not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::histc", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, bins, min, max); +#endif +} +static inline Tensor & fmod_out(Tensor & out, const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::fmod_out(out, self, other); + break; + default: + AT_ERROR("fmod_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fmod", "Scalar_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor fmod(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::fmod(self, other); + break; + default: + AT_ERROR("fmod not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fmod", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & fmod_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::fmod_out(out, self, other); + break; + default: + AT_ERROR("fmod_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fmod", "Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor fmod(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::fmod(self, other); + break; + default: + AT_ERROR("fmod not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fmod", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & remainder_out(Tensor & out, const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::remainder_out(out, self, other); + break; + default: + AT_ERROR("remainder_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::remainder", "Scalar_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor remainder(const Tensor & self, Scalar other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::remainder(self, other); + break; + default: + AT_ERROR("remainder not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::remainder", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & remainder_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::remainder_out(out, self, other); + break; + default: + AT_ERROR("remainder_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::remainder", "Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor remainder(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::remainder(self, other); + break; + default: + AT_ERROR("remainder not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::remainder", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & min_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::min_out(out, self, other); + break; + default: + AT_ERROR("min_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::min", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor min(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::min(self, other); + break; + default: + AT_ERROR("min not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::min", "other"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor min(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::min(self); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::min(self); + break; + default: + AT_ERROR("min not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::min", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & max_out(Tensor & out, const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, other)))) { + case Backend::CPU: + return CPUType::max_out(out, self, other); + break; + default: + AT_ERROR("max_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, other); +#endif +} +static inline Tensor max(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::max(self, other); + break; + default: + AT_ERROR("max not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max", "other"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor max(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::max(self); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::max(self); + break; + default: + AT_ERROR("max not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor median(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::median(self); + break; + default: + AT_ERROR("median not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::median", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline std::tuple sort_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool descending) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(values, indices, self)))) { + case Backend::CPU: + return CPUType::sort_out(values, indices, self, dim, descending); + break; + default: + AT_ERROR("sort_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(values, indices, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sort", "values"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, int64_t, bool>( + op, values, indices, self, dim, descending); +#endif +} +static inline std::tuple sort(const Tensor & self, int64_t dim, bool descending) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::sort(self, dim, descending); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::sort(self, dim, descending); + break; + default: + AT_ERROR("sort not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sort", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, self, dim, descending); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple sort_out(Tensor & values, Tensor & indices, const Tensor & self, Dimname dim, bool descending) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sort_out(values, indices, self, dim, descending); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sort", "dimname_values"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, Dimname, bool>( + op, values, indices, self, dim, descending); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +static inline std::tuple sort(const Tensor & self, Dimname dim, bool descending) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sort(self, dim, descending); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sort", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, Dimname, bool>( + op, self, dim, descending); +#endif +} +#endif +static inline Tensor argsort(const Tensor & self, int64_t dim, bool descending) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::argsort(self, dim, descending); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::argsort", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim, descending); +#endif +} +#ifdef BUILD_NAMEDTENSOR +static inline Tensor argsort(const Tensor & self, Dimname dim, bool descending) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::argsort(self, dim, descending); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::argsort", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, descending); +#endif +} +#endif +static inline std::tuple topk_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, int64_t dim, bool largest, bool sorted) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(values, indices, self)))) { + case Backend::CPU: + return CPUType::topk_out(values, indices, self, k, dim, largest, sorted); + break; + default: + AT_ERROR("topk_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(values, indices, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::topk", "values"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, int64_t, int64_t, bool, bool>( + op, values, indices, self, k, dim, largest, sorted); +#endif +} +static inline std::tuple topk(const Tensor & self, int64_t k, int64_t dim, bool largest, bool sorted) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::topk(self, k, dim, largest, sorted); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::topk(self, k, dim, largest, sorted); + break; + default: + AT_ERROR("topk not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::topk", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, int64_t, bool, bool>( + op, self, k, dim, largest, sorted); +#endif +} +static inline Tensor all(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::all(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::all", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor any(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::any(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::any", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline Tensor & renorm_out(Tensor & out, const Tensor & self, Scalar p, int64_t dim, Scalar maxnorm) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::renorm_out(out, self, p, dim, maxnorm); + break; + default: + AT_ERROR("renorm_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::renorm", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, p, dim, maxnorm); +#endif +} +static inline Tensor renorm(const Tensor & self, Scalar p, int64_t dim, Scalar maxnorm) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::renorm(self, p, dim, maxnorm); + break; + default: + AT_ERROR("renorm not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::renorm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, p, dim, maxnorm); +#endif +} +static inline bool equal(const Tensor & self, const Tensor & other) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, other)))) { + case Backend::CPU: + return CPUType::equal(self, other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::equal(self, other); + break; + default: + AT_ERROR("equal not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, other))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::equal", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, other); +#endif +} +static inline Tensor & pow_out(Tensor & out, const Tensor & self, const Tensor & exponent) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, exponent)))) { + case Backend::CPU: + return CPUType::pow_out(out, self, exponent); + break; + default: + AT_ERROR("pow_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, exponent))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::pow", "Tensor_Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, exponent); +#endif +} +static inline Tensor pow(const Tensor & self, const Tensor & exponent) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, exponent)))) { + case Backend::CPU: + return CPUType::pow(self, exponent); + break; + default: + AT_ERROR("pow not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, exponent))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::pow", "Tensor_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, exponent); +#endif +} +static inline Tensor & pow_out(Tensor & out, Scalar self, const Tensor & exponent) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, exponent)))) { + case Backend::CPU: + return CPUType::pow_out(out, self, exponent); + break; + default: + AT_ERROR("pow_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, exponent))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::pow", "Scalar_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, exponent); +#endif +} +static inline Tensor pow(Scalar self, const Tensor & exponent) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(exponent)))) { + case Backend::CPU: + return CPUType::pow(self, exponent); + break; + default: + AT_ERROR("pow not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(exponent))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::pow", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, exponent); +#endif +} +static inline Tensor & normal_out(Tensor & out, const Tensor & mean, double std, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, mean)))) { + case Backend::CPU: + return CPUType::normal_out(out, mean, std, generator); + break; + default: + AT_ERROR("normal_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, mean))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::normal", "Tensor_float_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, mean, std, generator); +#endif +} +static inline Tensor normal(const Tensor & mean, double std, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(mean)))) { + case Backend::CPU: + return CPUType::normal(mean, std, generator); + break; + default: + AT_ERROR("normal not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(mean))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::normal", "Tensor_float"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, mean, std, generator); +#endif +} +static inline Tensor & normal_out(Tensor & out, double mean, const Tensor & std, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, std)))) { + case Backend::CPU: + return CPUType::normal_out(out, mean, std, generator); + break; + default: + AT_ERROR("normal_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, std))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::normal", "float_Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, mean, std, generator); +#endif +} +static inline Tensor normal(double mean, const Tensor & std, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(std)))) { + case Backend::CPU: + return CPUType::normal(mean, std, generator); + break; + default: + AT_ERROR("normal not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(std))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::normal", "float_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, mean, std, generator); +#endif +} +static inline Tensor & normal_out(Tensor & out, const Tensor & mean, const Tensor & std, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, mean, std)))) { + case Backend::CPU: + return CPUType::normal_out(out, mean, std, generator); + break; + default: + AT_ERROR("normal_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, mean, std))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::normal", "Tensor_Tensor_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, mean, std, generator); +#endif +} +static inline Tensor normal(const Tensor & mean, const Tensor & std, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(mean, std)))) { + case Backend::CPU: + return CPUType::normal(mean, std, generator); + break; + default: + AT_ERROR("normal not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(mean, std))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::normal", "Tensor_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, mean, std, generator); +#endif +} +static inline Tensor normal(double mean, double std, IntArrayRef size, Generator * generator, const TensorOptions & options) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::normal(mean, std, size, generator, options); +#else + globalLegacyTypeDispatch().initForTensorTypeSet(c10::detail::multi_dispatch_tensor_type_set(options)); + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::normal", "float_float"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, mean, std, size, generator, options); +#endif +} +static inline Tensor & normal_out(Tensor & out, double mean, double std, IntArrayRef size, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::normal_out(out, mean, std, size, generator); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::normal", "float_float_out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, mean, std, size, generator); +#endif +} +static inline Tensor alias(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::alias(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::alias", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self); +#endif +} +static inline Tensor _addr(const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, vec1, vec2)))) { + case Backend::CPU: + return CPUType::_addr(self, vec1, vec2, beta, alpha); + break; + default: + AT_ERROR("_addr not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, vec1, vec2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_addr", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, vec1, vec2, beta, alpha); +#endif +} +static inline Tensor & _addr_(Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, vec1, vec2)))) { + case Backend::CPU: + return CPUType::_addr_(self, vec1, vec2, beta, alpha); + break; + default: + AT_ERROR("_addr_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, vec1, vec2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_addr_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, vec1, vec2, beta, alpha); +#endif +} +static inline Tensor & _addr_out(Tensor & out, const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, vec1, vec2)))) { + case Backend::CPU: + return CPUType::_addr_out(out, self, vec1, vec2, beta, alpha); + break; + default: + AT_ERROR("_addr_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, vec1, vec2))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_addr", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, vec1, vec2, beta, alpha); +#endif +} +static inline Tensor & _index_copy_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & source) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, index, source)))) { + case Backend::CPU: + return CPUType::_index_copy_(self, dim, index, source); + break; + default: + AT_ERROR("_index_copy_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, index, source))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_index_copy_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, dim, index, source); +#endif +} +static inline Tensor _cumsum(const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_cumsum(self, dim); + break; + default: + AT_ERROR("_cumsum not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cumsum", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim); +#endif +} +static inline Tensor & _cumsum_out(Tensor & out, const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::_cumsum_out(out, self, dim); + break; + default: + AT_ERROR("_cumsum_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cumsum", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim); +#endif +} +static inline Tensor _cumprod(const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_cumprod(self, dim); + break; + default: + AT_ERROR("_cumprod not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cumprod", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim); +#endif +} +static inline Tensor & _cumprod_out(Tensor & out, const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::_cumprod_out(out, self, dim); + break; + default: + AT_ERROR("_cumprod_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cumprod", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim); +#endif +} +static inline Tensor _var(const Tensor & self, bool unbiased) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_var(self, unbiased); + break; + default: + AT_ERROR("_var not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_var", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, unbiased); +#endif +} +static inline Tensor _std(const Tensor & self, bool unbiased) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_std(self, unbiased); + break; + default: + AT_ERROR("_std not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_std", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, unbiased); +#endif +} +static inline Tensor _cat(TensorList tensors, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(tensors)))) { + case Backend::CPU: + return CPUType::_cat(tensors, dim); + break; + default: + AT_ERROR("_cat not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(tensors))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cat", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, tensors, dim); +#endif +} +static inline Tensor & _cat_out(Tensor & out, TensorList tensors, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, tensors)))) { + case Backend::CPU: + return CPUType::_cat_out(out, tensors, dim); + break; + default: + AT_ERROR("_cat_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, tensors))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_cat", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, tensors, dim); +#endif +} +static inline std::tuple _mode(const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_mode(self, dim, keepdim); + break; + default: + AT_ERROR("_mode not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_mode", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, self, dim, keepdim); +#endif +} +static inline std::tuple _mode_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(values, indices, self)))) { + case Backend::CPU: + return CPUType::_mode_out(values, indices, self, dim, keepdim); + break; + default: + AT_ERROR("_mode_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(values, indices, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_mode", "values"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, int64_t, bool>( + op, values, indices, self, dim, keepdim); +#endif +} +static inline std::tuple _max(const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_max(self, dim, keepdim); + break; + default: + AT_ERROR("_max not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_max", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, self, dim, keepdim); +#endif +} +static inline std::tuple _max_out(Tensor & max, Tensor & max_indices, const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(max, max_indices, self)))) { + case Backend::CPU: + return CPUType::_max_out(max, max_indices, self, dim, keepdim); + break; + default: + AT_ERROR("_max_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(max, max_indices, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_max", "max"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, int64_t, bool>( + op, max, max_indices, self, dim, keepdim); +#endif +} +static inline std::tuple _min(const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_min(self, dim, keepdim); + break; + default: + AT_ERROR("_min not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_min", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, self, dim, keepdim); +#endif +} +static inline std::tuple _min_out(Tensor & min, Tensor & min_indices, const Tensor & self, int64_t dim, bool keepdim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(min, min_indices, self)))) { + case Backend::CPU: + return CPUType::_min_out(min, min_indices, self, dim, keepdim); + break; + default: + AT_ERROR("_min_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(min, min_indices, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_min", "min"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, int64_t, bool>( + op, min, min_indices, self, dim, keepdim); +#endif +} +static inline Tensor & binary_cross_entropy_out(Tensor & out, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, target, weight)))) { + case Backend::CPU: + return CPUType::binary_cross_entropy_out(out, self, target, weight, reduction); + break; + default: + AT_ERROR("binary_cross_entropy_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, target, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::binary_cross_entropy", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, target, weight, reduction); +#endif +} +static inline Tensor binary_cross_entropy(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, target, weight)))) { + case Backend::CPU: + return CPUType::binary_cross_entropy(self, target, weight, reduction); + break; + default: + AT_ERROR("binary_cross_entropy not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, target, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::binary_cross_entropy", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, target, weight, reduction); +#endif +} +static inline Tensor & binary_cross_entropy_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target, weight)))) { + case Backend::CPU: + return CPUType::binary_cross_entropy_backward_out(grad_input, grad_output, self, target, weight, reduction); + break; + default: + AT_ERROR("binary_cross_entropy_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::binary_cross_entropy_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, target, weight, reduction); +#endif +} +static inline Tensor binary_cross_entropy_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target, weight)))) { + case Backend::CPU: + return CPUType::binary_cross_entropy_backward(grad_output, self, target, weight, reduction); + break; + default: + AT_ERROR("binary_cross_entropy_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::binary_cross_entropy_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, target, weight, reduction); +#endif +} +static inline Tensor & mse_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mse_loss_out(out, self, target, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mse_loss", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, target, reduction); +#endif +} +static inline Tensor mse_loss(const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mse_loss(self, target, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mse_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, target, reduction); +#endif +} +static inline Tensor & mse_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target)))) { + case Backend::CPU: + return CPUType::mse_loss_backward_out(grad_input, grad_output, self, target, reduction); + break; + default: + AT_ERROR("mse_loss_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mse_loss_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, target, reduction); +#endif +} +static inline Tensor mse_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target)))) { + case Backend::CPU: + return CPUType::mse_loss_backward(grad_output, self, target, reduction); + break; + default: + AT_ERROR("mse_loss_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mse_loss_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, target, reduction); +#endif +} +static inline Tensor & l1_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::l1_loss_out(out, self, target, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::l1_loss", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, target, reduction); +#endif +} +static inline Tensor l1_loss(const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::l1_loss(self, target, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::l1_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, target, reduction); +#endif +} +static inline Tensor & l1_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target)))) { + case Backend::CPU: + return CPUType::l1_loss_backward_out(grad_input, grad_output, self, target, reduction); + break; + default: + AT_ERROR("l1_loss_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::l1_loss_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, target, reduction); +#endif +} +static inline Tensor l1_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::l1_loss_backward(grad_output, self, target, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::l1_loss_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, target, reduction); +#endif +} +static inline Tensor & multi_margin_loss_out(Tensor & out, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, target, weight)))) { + case Backend::CPU: + return CPUType::multi_margin_loss_out(out, self, target, p, margin, weight, reduction); + break; + default: + AT_ERROR("multi_margin_loss_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, target, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::multi_margin_loss", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, target, p, margin, weight, reduction); +#endif +} +static inline Tensor multi_margin_loss(const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, target, weight)))) { + case Backend::CPU: + return CPUType::multi_margin_loss(self, target, p, margin, weight, reduction); + break; + default: + AT_ERROR("multi_margin_loss not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, target, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::multi_margin_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, target, p, margin, weight, reduction); +#endif +} +static inline Tensor & multi_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target, weight)))) { + case Backend::CPU: + return CPUType::multi_margin_loss_backward_out(grad_input, grad_output, self, target, p, margin, weight, reduction); + break; + default: + AT_ERROR("multi_margin_loss_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::multi_margin_loss_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, target, p, margin, weight, reduction); +#endif +} +static inline Tensor multi_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target, weight)))) { + case Backend::CPU: + return CPUType::multi_margin_loss_backward(grad_output, self, target, p, margin, weight, reduction); + break; + default: + AT_ERROR("multi_margin_loss_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::multi_margin_loss_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, target, p, margin, weight, reduction); +#endif +} +static inline Tensor & multilabel_margin_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::multilabel_margin_loss_out(out, self, target, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::multilabel_margin_loss", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, target, reduction); +#endif +} +static inline Tensor multilabel_margin_loss(const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::multilabel_margin_loss(self, target, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::multilabel_margin_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, target, reduction); +#endif +} +static inline std::tuple multilabel_margin_loss_forward_out(Tensor & output, Tensor & is_target, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(output, is_target, self, target)))) { + case Backend::CPU: + return CPUType::multilabel_margin_loss_forward_out(output, is_target, self, target, reduction); + break; + default: + AT_ERROR("multilabel_margin_loss_forward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(output, is_target, self, target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::multilabel_margin_loss_forward", "output"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, const Tensor &, int64_t>( + op, output, is_target, self, target, reduction); +#endif +} +static inline std::tuple multilabel_margin_loss_forward(const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, target)))) { + case Backend::CPU: + return CPUType::multilabel_margin_loss_forward(self, target, reduction); + break; + default: + AT_ERROR("multilabel_margin_loss_forward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::multilabel_margin_loss_forward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, int64_t>( + op, self, target, reduction); +#endif +} +static inline Tensor & multilabel_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction, const Tensor & is_target) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target, is_target)))) { + case Backend::CPU: + return CPUType::multilabel_margin_loss_backward_out(grad_input, grad_output, self, target, reduction, is_target); + break; + default: + AT_ERROR("multilabel_margin_loss_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target, is_target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::multilabel_margin_loss_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, target, reduction, is_target); +#endif +} +static inline Tensor multilabel_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction, const Tensor & is_target) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target, is_target)))) { + case Backend::CPU: + return CPUType::multilabel_margin_loss_backward(grad_output, self, target, reduction, is_target); + break; + default: + AT_ERROR("multilabel_margin_loss_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target, is_target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::multilabel_margin_loss_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, target, reduction, is_target); +#endif +} +static inline Tensor & nll_loss_out(Tensor & out, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::nll_loss_out(out, self, target, weight, reduction, ignore_index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nll_loss", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, target, weight, reduction, ignore_index); +#endif +} +static inline Tensor nll_loss(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::nll_loss(self, target, weight, reduction, ignore_index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nll_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, target, weight, reduction, ignore_index); +#endif +} +static inline std::tuple nll_loss_forward_out(Tensor & output, Tensor & total_weight, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(output, total_weight, self, target, weight)))) { + case Backend::CPU: + return CPUType::nll_loss_forward_out(output, total_weight, self, target, weight, reduction, ignore_index); + break; + default: + AT_ERROR("nll_loss_forward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(output, total_weight, self, target, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nll_loss_forward", "output"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, const Tensor &, const Tensor &, int64_t, int64_t>( + op, output, total_weight, self, target, weight, reduction, ignore_index); +#endif +} +static inline std::tuple nll_loss_forward(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, target, weight)))) { + case Backend::CPU: + return CPUType::nll_loss_forward(self, target, weight, reduction, ignore_index); + break; + default: + AT_ERROR("nll_loss_forward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, target, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nll_loss_forward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, int64_t, int64_t>( + op, self, target, weight, reduction, ignore_index); +#endif +} +static inline Tensor & nll_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target, weight, total_weight)))) { + case Backend::CPU: + return CPUType::nll_loss_backward_out(grad_input, grad_output, self, target, weight, reduction, ignore_index, total_weight); + break; + default: + AT_ERROR("nll_loss_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target, weight, total_weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nll_loss_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, target, weight, reduction, ignore_index, total_weight); +#endif +} +static inline Tensor nll_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target, weight, total_weight)))) { + case Backend::CPU: + return CPUType::nll_loss_backward(grad_output, self, target, weight, reduction, ignore_index, total_weight); + break; + default: + AT_ERROR("nll_loss_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target, weight, total_weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nll_loss_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, target, weight, reduction, ignore_index, total_weight); +#endif +} +static inline Tensor & nll_loss2d_out(Tensor & out, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::nll_loss2d_out(out, self, target, weight, reduction, ignore_index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nll_loss2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, target, weight, reduction, ignore_index); +#endif +} +static inline Tensor nll_loss2d(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::nll_loss2d(self, target, weight, reduction, ignore_index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nll_loss2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, target, weight, reduction, ignore_index); +#endif +} +static inline std::tuple nll_loss2d_forward_out(Tensor & output, Tensor & total_weight, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(output, total_weight, self, target, weight)))) { + case Backend::CPU: + return CPUType::nll_loss2d_forward_out(output, total_weight, self, target, weight, reduction, ignore_index); + break; + default: + AT_ERROR("nll_loss2d_forward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(output, total_weight, self, target, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nll_loss2d_forward", "output"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, const Tensor &, const Tensor &, int64_t, int64_t>( + op, output, total_weight, self, target, weight, reduction, ignore_index); +#endif +} +static inline std::tuple nll_loss2d_forward(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, target, weight)))) { + case Backend::CPU: + return CPUType::nll_loss2d_forward(self, target, weight, reduction, ignore_index); + break; + default: + AT_ERROR("nll_loss2d_forward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, target, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nll_loss2d_forward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, int64_t, int64_t>( + op, self, target, weight, reduction, ignore_index); +#endif +} +static inline Tensor & nll_loss2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target, weight, total_weight)))) { + case Backend::CPU: + return CPUType::nll_loss2d_backward_out(grad_input, grad_output, self, target, weight, reduction, ignore_index, total_weight); + break; + default: + AT_ERROR("nll_loss2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target, weight, total_weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nll_loss2d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, target, weight, reduction, ignore_index, total_weight); +#endif +} +static inline Tensor nll_loss2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target, weight, total_weight)))) { + case Backend::CPU: + return CPUType::nll_loss2d_backward(grad_output, self, target, weight, reduction, ignore_index, total_weight); + break; + default: + AT_ERROR("nll_loss2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target, weight, total_weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::nll_loss2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, target, weight, reduction, ignore_index, total_weight); +#endif +} +static inline Tensor & smooth_l1_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, target)))) { + case Backend::CPU: + return CPUType::smooth_l1_loss_out(out, self, target, reduction); + break; + default: + AT_ERROR("smooth_l1_loss_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::smooth_l1_loss", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, target, reduction); +#endif +} +static inline Tensor smooth_l1_loss(const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::smooth_l1_loss(self, target, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::smooth_l1_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, target, reduction); +#endif +} +static inline Tensor & smooth_l1_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target)))) { + case Backend::CPU: + return CPUType::smooth_l1_loss_backward_out(grad_input, grad_output, self, target, reduction); + break; + default: + AT_ERROR("smooth_l1_loss_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::smooth_l1_loss_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, target, reduction); +#endif +} +static inline Tensor smooth_l1_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::smooth_l1_loss_backward(grad_output, self, target, reduction); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::smooth_l1_loss_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, target, reduction); +#endif +} +static inline Tensor & soft_margin_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, target)))) { + case Backend::CPU: + return CPUType::soft_margin_loss_out(out, self, target, reduction); + break; + default: + AT_ERROR("soft_margin_loss_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::soft_margin_loss", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, target, reduction); +#endif +} +static inline Tensor soft_margin_loss(const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, target)))) { + case Backend::CPU: + return CPUType::soft_margin_loss(self, target, reduction); + break; + default: + AT_ERROR("soft_margin_loss not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::soft_margin_loss", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, target, reduction); +#endif +} +static inline Tensor & soft_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target)))) { + case Backend::CPU: + return CPUType::soft_margin_loss_backward_out(grad_input, grad_output, self, target, reduction); + break; + default: + AT_ERROR("soft_margin_loss_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::soft_margin_loss_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, target, reduction); +#endif +} +static inline Tensor soft_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target)))) { + case Backend::CPU: + return CPUType::soft_margin_loss_backward(grad_output, self, target, reduction); + break; + default: + AT_ERROR("soft_margin_loss_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, target))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::soft_margin_loss_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, target, reduction); +#endif +} +static inline Tensor & elu_out(Tensor & out, const Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::elu_out(out, self, alpha, scale, input_scale); + break; + default: + AT_ERROR("elu_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::elu", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, alpha, scale, input_scale); +#endif +} +static inline Tensor elu(const Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::elu(self, alpha, scale, input_scale); + break; + default: + AT_ERROR("elu not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::elu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, alpha, scale, input_scale); +#endif +} +static inline Tensor & elu_backward_out(Tensor & grad_input, const Tensor & grad_output, Scalar alpha, Scalar scale, Scalar input_scale, const Tensor & output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, output)))) { + case Backend::CPU: + return CPUType::elu_backward_out(grad_input, grad_output, alpha, scale, input_scale, output); + break; + default: + AT_ERROR("elu_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::elu_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, alpha, scale, input_scale, output); +#endif +} +static inline Tensor elu_backward(const Tensor & grad_output, Scalar alpha, Scalar scale, Scalar input_scale, const Tensor & output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, output)))) { + case Backend::CPU: + return CPUType::elu_backward(grad_output, alpha, scale, input_scale, output); + break; + default: + AT_ERROR("elu_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::elu_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, alpha, scale, input_scale, output); +#endif +} +static inline Tensor & elu_(Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::elu_(self, alpha, scale, input_scale); + break; + default: + AT_ERROR("elu_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::elu_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, alpha, scale, input_scale); +#endif +} +static inline Tensor & glu_out(Tensor & out, const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::glu_out(out, self, dim); + break; + default: + AT_ERROR("glu_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::glu", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, dim); +#endif +} +static inline Tensor glu(const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::glu(self, dim); + break; + default: + AT_ERROR("glu not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::glu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, dim); +#endif +} +static inline Tensor & glu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self)))) { + case Backend::CPU: + return CPUType::glu_backward_out(grad_input, grad_output, self, dim); + break; + default: + AT_ERROR("glu_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::glu_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, dim); +#endif +} +static inline Tensor glu_backward(const Tensor & grad_output, const Tensor & self, int64_t dim) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::glu_backward(grad_output, self, dim); + break; + default: + AT_ERROR("glu_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::glu_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, dim); +#endif +} +static inline Tensor & hardtanh_out(Tensor & out, const Tensor & self, Scalar min_val, Scalar max_val) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::hardtanh_out(out, self, min_val, max_val); + break; + default: + AT_ERROR("hardtanh_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hardtanh", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, min_val, max_val); +#endif +} +static inline Tensor hardtanh(const Tensor & self, Scalar min_val, Scalar max_val) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::hardtanh(self, min_val, max_val); + break; + default: + AT_ERROR("hardtanh not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hardtanh", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, min_val, max_val); +#endif +} +static inline Tensor & hardtanh_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar min_val, Scalar max_val) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self)))) { + case Backend::CPU: + return CPUType::hardtanh_backward_out(grad_input, grad_output, self, min_val, max_val); + break; + default: + AT_ERROR("hardtanh_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hardtanh_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, min_val, max_val); +#endif +} +static inline Tensor hardtanh_backward(const Tensor & grad_output, const Tensor & self, Scalar min_val, Scalar max_val) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::hardtanh_backward(grad_output, self, min_val, max_val); + break; + default: + AT_ERROR("hardtanh_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hardtanh_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, min_val, max_val); +#endif +} +static inline Tensor & hardtanh_(Tensor & self, Scalar min_val, Scalar max_val) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::hardtanh_(self, min_val, max_val); + break; + default: + AT_ERROR("hardtanh_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::hardtanh_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, min_val, max_val); +#endif +} +static inline Tensor & leaky_relu_out(Tensor & out, const Tensor & self, Scalar negative_slope) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::leaky_relu_out(out, self, negative_slope); + break; + default: + AT_ERROR("leaky_relu_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::leaky_relu", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, negative_slope); +#endif +} +static inline Tensor leaky_relu(const Tensor & self, Scalar negative_slope) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::leaky_relu(self, negative_slope); + break; + default: + AT_ERROR("leaky_relu not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::leaky_relu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, negative_slope); +#endif +} +static inline Tensor & leaky_relu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar negative_slope) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self)))) { + case Backend::CPU: + return CPUType::leaky_relu_backward_out(grad_input, grad_output, self, negative_slope); + break; + default: + AT_ERROR("leaky_relu_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::leaky_relu_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, negative_slope); +#endif +} +static inline Tensor leaky_relu_backward(const Tensor & grad_output, const Tensor & self, Scalar negative_slope) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::leaky_relu_backward(grad_output, self, negative_slope); + break; + default: + AT_ERROR("leaky_relu_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::leaky_relu_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, negative_slope); +#endif +} +static inline Tensor & leaky_relu_(Tensor & self, Scalar negative_slope) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::leaky_relu_(self, negative_slope); + break; + default: + AT_ERROR("leaky_relu_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::leaky_relu_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, negative_slope); +#endif +} +static inline Tensor & log_sigmoid_out(Tensor & out, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log_sigmoid_out(out, self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log_sigmoid", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self); +#endif +} +static inline Tensor log_sigmoid(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log_sigmoid(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log_sigmoid", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} +static inline std::tuple log_sigmoid_forward_out(Tensor & output, Tensor & buffer, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(output, buffer, self)))) { + case Backend::CPU: + return CPUType::log_sigmoid_forward_out(output, buffer, self); + break; + default: + AT_ERROR("log_sigmoid_forward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(output, buffer, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log_sigmoid_forward", "output"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &>( + op, output, buffer, self); +#endif +} +static inline std::tuple log_sigmoid_forward(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::log_sigmoid_forward(self); + break; + default: + AT_ERROR("log_sigmoid_forward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log_sigmoid_forward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &>( + op, self); +#endif +} +static inline Tensor & log_sigmoid_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & buffer) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, buffer)))) { + case Backend::CPU: + return CPUType::log_sigmoid_backward_out(grad_input, grad_output, self, buffer); + break; + default: + AT_ERROR("log_sigmoid_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, buffer))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log_sigmoid_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, buffer); +#endif +} +static inline Tensor log_sigmoid_backward(const Tensor & grad_output, const Tensor & self, const Tensor & buffer) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, buffer)))) { + case Backend::CPU: + return CPUType::log_sigmoid_backward(grad_output, self, buffer); + break; + default: + AT_ERROR("log_sigmoid_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, buffer))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::log_sigmoid_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, buffer); +#endif +} +static inline Tensor & rrelu_with_noise_out(Tensor & out, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, noise)))) { + case Backend::CPU: + return CPUType::rrelu_with_noise_out(out, self, noise, lower, upper, training, generator); + break; + default: + AT_ERROR("rrelu_with_noise_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, noise))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rrelu_with_noise", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, noise, lower, upper, training, generator); +#endif +} +static inline Tensor rrelu_with_noise(const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, noise)))) { + case Backend::CPU: + return CPUType::rrelu_with_noise(self, noise, lower, upper, training, generator); + break; + default: + AT_ERROR("rrelu_with_noise not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, noise))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rrelu_with_noise", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, noise, lower, upper, training, generator); +#endif +} +static inline Tensor & rrelu_with_noise_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, noise)))) { + case Backend::CPU: + return CPUType::rrelu_with_noise_backward_out(grad_input, grad_output, self, noise, lower, upper, training); + break; + default: + AT_ERROR("rrelu_with_noise_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, noise))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rrelu_with_noise_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, noise, lower, upper, training); +#endif +} +static inline Tensor rrelu_with_noise_backward(const Tensor & grad_output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, noise)))) { + case Backend::CPU: + return CPUType::rrelu_with_noise_backward(grad_output, self, noise, lower, upper, training); + break; + default: + AT_ERROR("rrelu_with_noise_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, noise))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rrelu_with_noise_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, noise, lower, upper, training); +#endif +} +static inline Tensor & rrelu_with_noise_(Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, noise)))) { + case Backend::CPU: + return CPUType::rrelu_with_noise_(self, noise, lower, upper, training, generator); + break; + default: + AT_ERROR("rrelu_with_noise_ not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, noise))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::rrelu_with_noise_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, noise, lower, upper, training, generator); +#endif +} +static inline Tensor & softplus_out(Tensor & out, const Tensor & self, Scalar beta, Scalar threshold) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::softplus_out(out, self, beta, threshold); + break; + default: + AT_ERROR("softplus_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::softplus", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, beta, threshold); +#endif +} +static inline Tensor softplus(const Tensor & self, Scalar beta, Scalar threshold) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::softplus(self, beta, threshold); + break; + default: + AT_ERROR("softplus not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::softplus", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, beta, threshold); +#endif +} +static inline Tensor & softplus_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar beta, Scalar threshold, const Tensor & output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, output)))) { + case Backend::CPU: + return CPUType::softplus_backward_out(grad_input, grad_output, self, beta, threshold, output); + break; + default: + AT_ERROR("softplus_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::softplus_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, beta, threshold, output); +#endif +} +static inline Tensor softplus_backward(const Tensor & grad_output, const Tensor & self, Scalar beta, Scalar threshold, const Tensor & output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, output)))) { + case Backend::CPU: + return CPUType::softplus_backward(grad_output, self, beta, threshold, output); + break; + default: + AT_ERROR("softplus_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::softplus_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, beta, threshold, output); +#endif +} +static inline Tensor & softshrink_out(Tensor & out, const Tensor & self, Scalar lambd) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::softshrink_out(out, self, lambd); + break; + default: + AT_ERROR("softshrink_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::softshrink", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, lambd); +#endif +} +static inline Tensor softshrink(const Tensor & self, Scalar lambd) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::softshrink(self, lambd); + break; + default: + AT_ERROR("softshrink not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::softshrink", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self, lambd); +#endif +} +static inline Tensor & softshrink_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar lambd) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self)))) { + case Backend::CPU: + return CPUType::softshrink_backward_out(grad_input, grad_output, self, lambd); + break; + default: + AT_ERROR("softshrink_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::softshrink_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, lambd); +#endif +} +static inline Tensor softshrink_backward(const Tensor & grad_output, const Tensor & self, Scalar lambd) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::softshrink_backward(grad_output, self, lambd); + break; + default: + AT_ERROR("softshrink_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::softshrink_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, lambd); +#endif +} +static inline Tensor & adaptive_avg_pool2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::adaptive_avg_pool2d_out(out, self, output_size); + break; + default: + AT_ERROR("adaptive_avg_pool2d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_avg_pool2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, output_size); +#endif +} +static inline Tensor adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::adaptive_avg_pool2d(self, output_size); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_avg_pool2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size); +#endif +} +static inline Tensor mkldnn_adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + + default: + AT_ERROR("mkldnn_adaptive_avg_pool2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::mkldnn_adaptive_avg_pool2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size); +#endif +} +static inline Tensor _adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::_adaptive_avg_pool2d(self, output_size); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::_adaptive_avg_pool2d(self, output_size); + break; + default: + AT_ERROR("_adaptive_avg_pool2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_adaptive_avg_pool2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size); +#endif +} +static inline Tensor _adaptive_avg_pool2d_backward(const Tensor & grad_output, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::_adaptive_avg_pool2d_backward(grad_output, self); + break; + default: + AT_ERROR("_adaptive_avg_pool2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_adaptive_avg_pool2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self); +#endif +} +static inline Tensor & adaptive_avg_pool3d_out(Tensor & out, const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::adaptive_avg_pool3d_out(out, self, output_size); + break; + default: + AT_ERROR("adaptive_avg_pool3d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_avg_pool3d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, output_size); +#endif +} +static inline Tensor adaptive_avg_pool3d(const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::adaptive_avg_pool3d(self, output_size); + break; + default: + AT_ERROR("adaptive_avg_pool3d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_avg_pool3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size); +#endif +} +static inline Tensor & adaptive_avg_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self)))) { + case Backend::CPU: + return CPUType::adaptive_avg_pool3d_backward_out(grad_input, grad_output, self); + break; + default: + AT_ERROR("adaptive_avg_pool3d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_avg_pool3d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self); +#endif +} +static inline Tensor adaptive_avg_pool3d_backward(const Tensor & grad_output, const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::adaptive_avg_pool3d_backward(grad_output, self); + break; + default: + AT_ERROR("adaptive_avg_pool3d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_avg_pool3d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self); +#endif +} +static inline std::tuple adaptive_max_pool2d_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, indices, self)))) { + case Backend::CPU: + return CPUType::adaptive_max_pool2d_out(out, indices, self, output_size); + break; + default: + AT_ERROR("adaptive_max_pool2d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, indices, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_max_pool2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, IntArrayRef>( + op, out, indices, self, output_size); +#endif +} +static inline std::tuple adaptive_max_pool2d(const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::adaptive_max_pool2d(self, output_size); + break; + default: + AT_ERROR("adaptive_max_pool2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_max_pool2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, IntArrayRef>( + op, self, output_size); +#endif +} +static inline Tensor & adaptive_max_pool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::adaptive_max_pool2d_backward_out(grad_input, grad_output, self, indices); + break; + default: + AT_ERROR("adaptive_max_pool2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_max_pool2d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, indices); +#endif +} +static inline Tensor adaptive_max_pool2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::adaptive_max_pool2d_backward(grad_output, self, indices); + break; + default: + AT_ERROR("adaptive_max_pool2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_max_pool2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, indices); +#endif +} +static inline std::tuple adaptive_max_pool3d_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, indices, self)))) { + case Backend::CPU: + return CPUType::adaptive_max_pool3d_out(out, indices, self, output_size); + break; + default: + AT_ERROR("adaptive_max_pool3d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, indices, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_max_pool3d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, IntArrayRef>( + op, out, indices, self, output_size); +#endif +} +static inline std::tuple adaptive_max_pool3d(const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::adaptive_max_pool3d(self, output_size); + break; + default: + AT_ERROR("adaptive_max_pool3d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_max_pool3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, IntArrayRef>( + op, self, output_size); +#endif +} +static inline Tensor & adaptive_max_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::adaptive_max_pool3d_backward_out(grad_input, grad_output, self, indices); + break; + default: + AT_ERROR("adaptive_max_pool3d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_max_pool3d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, indices); +#endif +} +static inline Tensor adaptive_max_pool3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::adaptive_max_pool3d_backward(grad_output, self, indices); + break; + default: + AT_ERROR("adaptive_max_pool3d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::adaptive_max_pool3d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, self, indices); +#endif +} +static inline Tensor & avg_pool2d_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::avg_pool2d_out(out, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); + break; + default: + AT_ERROR("avg_pool2d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::avg_pool2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); +#endif +} +static inline Tensor avg_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::avg_pool2d(self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::avg_pool2d(self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); + break; + default: + AT_ERROR("avg_pool2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::avg_pool2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); +#endif +} +static inline Tensor & avg_pool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self)))) { + case Backend::CPU: + return CPUType::avg_pool2d_backward_out(grad_input, grad_output, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); + break; + default: + AT_ERROR("avg_pool2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::avg_pool2d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, grad_input, grad_output, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); +#endif +} +static inline Tensor avg_pool2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::avg_pool2d_backward(grad_output, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); + break; + default: + AT_ERROR("avg_pool2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::avg_pool2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, grad_output, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); +#endif +} +static inline Tensor & avg_pool3d_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::avg_pool3d_out(out, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); + break; + default: + AT_ERROR("avg_pool3d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::avg_pool3d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, out, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); +#endif +} +static inline Tensor avg_pool3d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::avg_pool3d(self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); + break; + default: + AT_ERROR("avg_pool3d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::avg_pool3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); +#endif +} +static inline Tensor & avg_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self)))) { + case Backend::CPU: + return CPUType::avg_pool3d_backward_out(grad_input, grad_output, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); + break; + default: + AT_ERROR("avg_pool3d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::avg_pool3d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, grad_input, grad_output, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); +#endif +} +static inline Tensor avg_pool3d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::avg_pool3d_backward(grad_output, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); + break; + default: + AT_ERROR("avg_pool3d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::avg_pool3d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, grad_output, self, kernel_size, stride, padding, ceil_mode, count_include_pad, divisor_override); +#endif +} +static inline std::tuple fractional_max_pool2d_out(Tensor & output, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(output, indices, self, random_samples)))) { + case Backend::CPU: + return CPUType::fractional_max_pool2d_out(output, indices, self, kernel_size, output_size, random_samples); + break; + default: + AT_ERROR("fractional_max_pool2d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(output, indices, self, random_samples))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fractional_max_pool2d", "output"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, IntArrayRef, IntArrayRef, const Tensor &>( + op, output, indices, self, kernel_size, output_size, random_samples); +#endif +} +static inline std::tuple fractional_max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, random_samples)))) { + case Backend::CPU: + return CPUType::fractional_max_pool2d(self, kernel_size, output_size, random_samples); + break; + default: + AT_ERROR("fractional_max_pool2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, random_samples))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fractional_max_pool2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, IntArrayRef, IntArrayRef, const Tensor &>( + op, self, kernel_size, output_size, random_samples); +#endif +} +static inline Tensor & fractional_max_pool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::fractional_max_pool2d_backward_out(grad_input, grad_output, self, kernel_size, output_size, indices); + break; + default: + AT_ERROR("fractional_max_pool2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fractional_max_pool2d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, kernel_size, output_size, indices); +#endif +} +static inline Tensor fractional_max_pool2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::fractional_max_pool2d_backward(grad_output, self, kernel_size, output_size, indices); + break; + default: + AT_ERROR("fractional_max_pool2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fractional_max_pool2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, kernel_size, output_size, indices); +#endif +} +static inline std::tuple fractional_max_pool3d_out(Tensor & output, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(output, indices, self, random_samples)))) { + case Backend::CPU: + return CPUType::fractional_max_pool3d_out(output, indices, self, kernel_size, output_size, random_samples); + break; + default: + AT_ERROR("fractional_max_pool3d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(output, indices, self, random_samples))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fractional_max_pool3d", "output"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, IntArrayRef, IntArrayRef, const Tensor &>( + op, output, indices, self, kernel_size, output_size, random_samples); +#endif +} +static inline std::tuple fractional_max_pool3d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, random_samples)))) { + case Backend::CPU: + return CPUType::fractional_max_pool3d(self, kernel_size, output_size, random_samples); + break; + default: + AT_ERROR("fractional_max_pool3d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, random_samples))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fractional_max_pool3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, IntArrayRef, IntArrayRef, const Tensor &>( + op, self, kernel_size, output_size, random_samples); +#endif +} +static inline Tensor & fractional_max_pool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::fractional_max_pool3d_backward_out(grad_input, grad_output, self, kernel_size, output_size, indices); + break; + default: + AT_ERROR("fractional_max_pool3d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fractional_max_pool3d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, kernel_size, output_size, indices); +#endif +} +static inline Tensor fractional_max_pool3d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::fractional_max_pool3d_backward(grad_output, self, kernel_size, output_size, indices); + break; + default: + AT_ERROR("fractional_max_pool3d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::fractional_max_pool3d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, kernel_size, output_size, indices); +#endif +} +static inline std::tuple max_pool2d_with_indices_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, indices, self)))) { + case Backend::CPU: + return CPUType::max_pool2d_with_indices_out(out, indices, self, kernel_size, stride, padding, dilation, ceil_mode); + break; + default: + AT_ERROR("max_pool2d_with_indices_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, indices, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_pool2d_with_indices", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, bool>( + op, out, indices, self, kernel_size, stride, padding, dilation, ceil_mode); +#endif +} +static inline std::tuple max_pool2d_with_indices(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::max_pool2d_with_indices(self, kernel_size, stride, padding, dilation, ceil_mode); + break; + default: + AT_ERROR("max_pool2d_with_indices not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_pool2d_with_indices", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, bool>( + op, self, kernel_size, stride, padding, dilation, ceil_mode); +#endif +} +static inline Tensor & max_pool2d_with_indices_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::max_pool2d_with_indices_backward_out(grad_input, grad_output, self, kernel_size, stride, padding, dilation, ceil_mode, indices); + break; + default: + AT_ERROR("max_pool2d_with_indices_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_pool2d_with_indices_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, kernel_size, stride, padding, dilation, ceil_mode, indices); +#endif +} +static inline Tensor max_pool2d_with_indices_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::max_pool2d_with_indices_backward(grad_output, self, kernel_size, stride, padding, dilation, ceil_mode, indices); + break; + default: + AT_ERROR("max_pool2d_with_indices_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_pool2d_with_indices_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, kernel_size, stride, padding, dilation, ceil_mode, indices); +#endif +} +static inline std::tuple max_pool3d_with_indices_out(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, indices, self)))) { + case Backend::CPU: + return CPUType::max_pool3d_with_indices_out(out, indices, self, kernel_size, stride, padding, dilation, ceil_mode); + break; + default: + AT_ERROR("max_pool3d_with_indices_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, indices, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_pool3d_with_indices", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, bool>( + op, out, indices, self, kernel_size, stride, padding, dilation, ceil_mode); +#endif +} +static inline std::tuple max_pool3d_with_indices(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::max_pool3d_with_indices(self, kernel_size, stride, padding, dilation, ceil_mode); + break; + default: + AT_ERROR("max_pool3d_with_indices not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_pool3d_with_indices", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, bool>( + op, self, kernel_size, stride, padding, dilation, ceil_mode); +#endif +} +static inline Tensor & max_pool3d_with_indices_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::max_pool3d_with_indices_backward_out(grad_input, grad_output, self, kernel_size, stride, padding, dilation, ceil_mode, indices); + break; + default: + AT_ERROR("max_pool3d_with_indices_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_pool3d_with_indices_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, kernel_size, stride, padding, dilation, ceil_mode, indices); +#endif +} +static inline Tensor max_pool3d_with_indices_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::max_pool3d_with_indices_backward(grad_output, self, kernel_size, stride, padding, dilation, ceil_mode, indices); + break; + default: + AT_ERROR("max_pool3d_with_indices_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_pool3d_with_indices_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, kernel_size, stride, padding, dilation, ceil_mode, indices); +#endif +} +static inline Tensor & max_unpool2d_out(Tensor & out, const Tensor & self, const Tensor & indices, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, indices)))) { + case Backend::CPU: + return CPUType::max_unpool2d_out(out, self, indices, output_size); + break; + default: + AT_ERROR("max_unpool2d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_unpool2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, indices, output_size); +#endif +} +static inline Tensor max_unpool2d(const Tensor & self, const Tensor & indices, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, indices)))) { + case Backend::CPU: + return CPUType::max_unpool2d(self, indices, output_size); + break; + default: + AT_ERROR("max_unpool2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_unpool2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, indices, output_size); +#endif +} +static inline Tensor & max_unpool2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::max_unpool2d_backward_out(grad_input, grad_output, self, indices, output_size); + break; + default: + AT_ERROR("max_unpool2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_unpool2d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, indices, output_size); +#endif +} +static inline Tensor max_unpool2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::max_unpool2d_backward(grad_output, self, indices, output_size); + break; + default: + AT_ERROR("max_unpool2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_unpool2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, indices, output_size); +#endif +} +static inline Tensor & max_unpool3d_out(Tensor & out, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, indices)))) { + case Backend::CPU: + return CPUType::max_unpool3d_out(out, self, indices, output_size, stride, padding); + break; + default: + AT_ERROR("max_unpool3d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_unpool3d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, indices, output_size, stride, padding); +#endif +} +static inline Tensor max_unpool3d(const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, indices)))) { + case Backend::CPU: + return CPUType::max_unpool3d(self, indices, output_size, stride, padding); + break; + default: + AT_ERROR("max_unpool3d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_unpool3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, indices, output_size, stride, padding); +#endif +} +static inline Tensor & max_unpool3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::max_unpool3d_backward_out(grad_input, grad_output, self, indices, output_size, stride, padding); + break; + default: + AT_ERROR("max_unpool3d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_unpool3d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, indices, output_size, stride, padding); +#endif +} +static inline Tensor max_unpool3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices)))) { + case Backend::CPU: + return CPUType::max_unpool3d_backward(grad_output, self, indices, output_size, stride, padding); + break; + default: + AT_ERROR("max_unpool3d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, indices))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::max_unpool3d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, indices, output_size, stride, padding); +#endif +} +static inline Tensor & reflection_pad1d_out(Tensor & out, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::reflection_pad1d_out(out, self, padding); + break; + default: + AT_ERROR("reflection_pad1d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::reflection_pad1d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, padding); +#endif +} +static inline Tensor reflection_pad1d(const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::reflection_pad1d(self, padding); + break; + default: + AT_ERROR("reflection_pad1d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::reflection_pad1d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, padding); +#endif +} +static inline Tensor & reflection_pad1d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self)))) { + case Backend::CPU: + return CPUType::reflection_pad1d_backward_out(grad_input, grad_output, self, padding); + break; + default: + AT_ERROR("reflection_pad1d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::reflection_pad1d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, padding); +#endif +} +static inline Tensor reflection_pad1d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::reflection_pad1d_backward(grad_output, self, padding); + break; + default: + AT_ERROR("reflection_pad1d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::reflection_pad1d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, padding); +#endif +} +static inline Tensor & reflection_pad2d_out(Tensor & out, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::reflection_pad2d_out(out, self, padding); + break; + default: + AT_ERROR("reflection_pad2d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::reflection_pad2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, padding); +#endif +} +static inline Tensor reflection_pad2d(const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::reflection_pad2d(self, padding); + break; + default: + AT_ERROR("reflection_pad2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::reflection_pad2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, padding); +#endif +} +static inline Tensor & reflection_pad2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self)))) { + case Backend::CPU: + return CPUType::reflection_pad2d_backward_out(grad_input, grad_output, self, padding); + break; + default: + AT_ERROR("reflection_pad2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::reflection_pad2d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, padding); +#endif +} +static inline Tensor reflection_pad2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::reflection_pad2d_backward(grad_output, self, padding); + break; + default: + AT_ERROR("reflection_pad2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::reflection_pad2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, padding); +#endif +} +static inline Tensor & replication_pad1d_out(Tensor & out, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::replication_pad1d_out(out, self, padding); + break; + default: + AT_ERROR("replication_pad1d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::replication_pad1d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, padding); +#endif +} +static inline Tensor replication_pad1d(const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::replication_pad1d(self, padding); + break; + default: + AT_ERROR("replication_pad1d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::replication_pad1d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, padding); +#endif +} +static inline Tensor & replication_pad1d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self)))) { + case Backend::CPU: + return CPUType::replication_pad1d_backward_out(grad_input, grad_output, self, padding); + break; + default: + AT_ERROR("replication_pad1d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::replication_pad1d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, padding); +#endif +} +static inline Tensor replication_pad1d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::replication_pad1d_backward(grad_output, self, padding); + break; + default: + AT_ERROR("replication_pad1d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::replication_pad1d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, padding); +#endif +} +static inline Tensor & replication_pad2d_out(Tensor & out, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::replication_pad2d_out(out, self, padding); + break; + default: + AT_ERROR("replication_pad2d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::replication_pad2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, padding); +#endif +} +static inline Tensor replication_pad2d(const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::replication_pad2d(self, padding); + break; + default: + AT_ERROR("replication_pad2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::replication_pad2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, padding); +#endif +} +static inline Tensor & replication_pad2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self)))) { + case Backend::CPU: + return CPUType::replication_pad2d_backward_out(grad_input, grad_output, self, padding); + break; + default: + AT_ERROR("replication_pad2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::replication_pad2d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, padding); +#endif +} +static inline Tensor replication_pad2d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::replication_pad2d_backward(grad_output, self, padding); + break; + default: + AT_ERROR("replication_pad2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::replication_pad2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, padding); +#endif +} +static inline Tensor & replication_pad3d_out(Tensor & out, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::replication_pad3d_out(out, self, padding); + break; + default: + AT_ERROR("replication_pad3d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::replication_pad3d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, padding); +#endif +} +static inline Tensor replication_pad3d(const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::replication_pad3d(self, padding); + break; + default: + AT_ERROR("replication_pad3d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::replication_pad3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, padding); +#endif +} +static inline Tensor & replication_pad3d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self)))) { + case Backend::CPU: + return CPUType::replication_pad3d_backward_out(grad_input, grad_output, self, padding); + break; + default: + AT_ERROR("replication_pad3d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::replication_pad3d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, self, padding); +#endif +} +static inline Tensor replication_pad3d_backward(const Tensor & grad_output, const Tensor & self, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self)))) { + case Backend::CPU: + return CPUType::replication_pad3d_backward(grad_output, self, padding); + break; + default: + AT_ERROR("replication_pad3d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::replication_pad3d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, self, padding); +#endif +} +static inline Tensor _test_optional_float(const Tensor & self, c10::optional scale) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_test_optional_float(self, scale); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::_test_optional_float", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, self, scale); +#endif +} +static inline Tensor & upsample_linear1d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::upsample_linear1d_out(out, self, output_size, align_corners); + break; + default: + AT_ERROR("upsample_linear1d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_linear1d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, output_size, align_corners); +#endif +} +static inline Tensor upsample_linear1d(const Tensor & self, IntArrayRef output_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::upsample_linear1d(self, output_size, align_corners); + break; + default: + AT_ERROR("upsample_linear1d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_linear1d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size, align_corners); +#endif +} +static inline Tensor & upsample_linear1d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output)))) { + case Backend::CPU: + return CPUType::upsample_linear1d_backward_out(grad_input, grad_output, output_size, input_size, align_corners); + break; + default: + AT_ERROR("upsample_linear1d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_linear1d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, output_size, input_size, align_corners); +#endif +} +static inline Tensor upsample_linear1d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output)))) { + case Backend::CPU: + return CPUType::upsample_linear1d_backward(grad_output, output_size, input_size, align_corners); + break; + default: + AT_ERROR("upsample_linear1d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_linear1d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, output_size, input_size, align_corners); +#endif +} +static inline Tensor & upsample_bilinear2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::upsample_bilinear2d_out(out, self, output_size, align_corners); + break; + default: + AT_ERROR("upsample_bilinear2d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_bilinear2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, output_size, align_corners); +#endif +} +static inline Tensor upsample_bilinear2d(const Tensor & self, IntArrayRef output_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::upsample_bilinear2d(self, output_size, align_corners); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::upsample_bilinear2d(self, output_size, align_corners); + break; + default: + AT_ERROR("upsample_bilinear2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_bilinear2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size, align_corners); +#endif +} +static inline Tensor & upsample_bilinear2d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output)))) { + case Backend::CPU: + return CPUType::upsample_bilinear2d_backward_out(grad_input, grad_output, output_size, input_size, align_corners); + break; + default: + AT_ERROR("upsample_bilinear2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_bilinear2d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, output_size, input_size, align_corners); +#endif +} +static inline Tensor upsample_bilinear2d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output)))) { + case Backend::CPU: + return CPUType::upsample_bilinear2d_backward(grad_output, output_size, input_size, align_corners); + break; + default: + AT_ERROR("upsample_bilinear2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_bilinear2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, output_size, input_size, align_corners); +#endif +} +static inline Tensor & upsample_bicubic2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::upsample_bicubic2d_out(out, self, output_size, align_corners); + break; + default: + AT_ERROR("upsample_bicubic2d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_bicubic2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, output_size, align_corners); +#endif +} +static inline Tensor upsample_bicubic2d(const Tensor & self, IntArrayRef output_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::upsample_bicubic2d(self, output_size, align_corners); + break; + default: + AT_ERROR("upsample_bicubic2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_bicubic2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size, align_corners); +#endif +} +static inline Tensor & upsample_bicubic2d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output)))) { + case Backend::CPU: + return CPUType::upsample_bicubic2d_backward_out(grad_input, grad_output, output_size, input_size, align_corners); + break; + default: + AT_ERROR("upsample_bicubic2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_bicubic2d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, output_size, input_size, align_corners); +#endif +} +static inline Tensor upsample_bicubic2d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output)))) { + case Backend::CPU: + return CPUType::upsample_bicubic2d_backward(grad_output, output_size, input_size, align_corners); + break; + default: + AT_ERROR("upsample_bicubic2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_bicubic2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, output_size, input_size, align_corners); +#endif +} +static inline Tensor & upsample_trilinear3d_out(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::upsample_trilinear3d_out(out, self, output_size, align_corners); + break; + default: + AT_ERROR("upsample_trilinear3d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_trilinear3d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, output_size, align_corners); +#endif +} +static inline Tensor upsample_trilinear3d(const Tensor & self, IntArrayRef output_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::upsample_trilinear3d(self, output_size, align_corners); + break; + default: + AT_ERROR("upsample_trilinear3d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_trilinear3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size, align_corners); +#endif +} +static inline Tensor & upsample_trilinear3d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output)))) { + case Backend::CPU: + return CPUType::upsample_trilinear3d_backward_out(grad_input, grad_output, output_size, input_size, align_corners); + break; + default: + AT_ERROR("upsample_trilinear3d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_trilinear3d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, output_size, input_size, align_corners); +#endif +} +static inline Tensor upsample_trilinear3d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output)))) { + case Backend::CPU: + return CPUType::upsample_trilinear3d_backward(grad_output, output_size, input_size, align_corners); + break; + default: + AT_ERROR("upsample_trilinear3d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_trilinear3d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, output_size, input_size, align_corners); +#endif +} +static inline Tensor & upsample_nearest1d_out(Tensor & out, const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::upsample_nearest1d_out(out, self, output_size); + break; + default: + AT_ERROR("upsample_nearest1d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_nearest1d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, output_size); +#endif +} +static inline Tensor upsample_nearest1d(const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::upsample_nearest1d(self, output_size); + break; + default: + AT_ERROR("upsample_nearest1d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_nearest1d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size); +#endif +} +static inline Tensor & upsample_nearest1d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output)))) { + case Backend::CPU: + return CPUType::upsample_nearest1d_backward_out(grad_input, grad_output, output_size, input_size); + break; + default: + AT_ERROR("upsample_nearest1d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_nearest1d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, output_size, input_size); +#endif +} +static inline Tensor upsample_nearest1d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output)))) { + case Backend::CPU: + return CPUType::upsample_nearest1d_backward(grad_output, output_size, input_size); + break; + default: + AT_ERROR("upsample_nearest1d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_nearest1d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, output_size, input_size); +#endif +} +static inline Tensor & upsample_nearest2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::upsample_nearest2d_out(out, self, output_size); + break; + default: + AT_ERROR("upsample_nearest2d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_nearest2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, output_size); +#endif +} +static inline Tensor upsample_nearest2d(const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::upsample_nearest2d(self, output_size); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::upsample_nearest2d(self, output_size); + break; + default: + AT_ERROR("upsample_nearest2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_nearest2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size); +#endif +} +static inline Tensor & upsample_nearest2d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output)))) { + case Backend::CPU: + return CPUType::upsample_nearest2d_backward_out(grad_input, grad_output, output_size, input_size); + break; + default: + AT_ERROR("upsample_nearest2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_nearest2d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, output_size, input_size); +#endif +} +static inline Tensor upsample_nearest2d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output)))) { + case Backend::CPU: + return CPUType::upsample_nearest2d_backward(grad_output, output_size, input_size); + break; + default: + AT_ERROR("upsample_nearest2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_nearest2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, output_size, input_size); +#endif +} +static inline Tensor & upsample_nearest3d_out(Tensor & out, const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::upsample_nearest3d_out(out, self, output_size); + break; + default: + AT_ERROR("upsample_nearest3d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_nearest3d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, output_size); +#endif +} +static inline Tensor upsample_nearest3d(const Tensor & self, IntArrayRef output_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::upsample_nearest3d(self, output_size); + break; + default: + AT_ERROR("upsample_nearest3d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_nearest3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size); +#endif +} +static inline Tensor & upsample_nearest3d_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output)))) { + case Backend::CPU: + return CPUType::upsample_nearest3d_backward_out(grad_input, grad_output, output_size, input_size); + break; + default: + AT_ERROR("upsample_nearest3d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_nearest3d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, output_size, input_size); +#endif +} +static inline Tensor upsample_nearest3d_backward(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output)))) { + case Backend::CPU: + return CPUType::upsample_nearest3d_backward(grad_output, output_size, input_size); + break; + default: + AT_ERROR("upsample_nearest3d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::upsample_nearest3d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, output_size, input_size); +#endif +} +static inline Tensor & sigmoid_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, output)))) { + case Backend::CPU: + return CPUType::sigmoid_backward_out(grad_input, grad_output, output); + break; + default: + AT_ERROR("sigmoid_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sigmoid_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, output); +#endif +} +static inline Tensor sigmoid_backward(const Tensor & grad_output, const Tensor & output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sigmoid_backward(grad_output, output); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::sigmoid_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, output); +#endif +} +static inline Tensor & tanh_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, output)))) { + case Backend::CPU: + return CPUType::tanh_backward_out(grad_input, grad_output, output); + break; + default: + AT_ERROR("tanh_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output, output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::tanh_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, output); +#endif +} +static inline Tensor tanh_backward(const Tensor & grad_output, const Tensor & output) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, output)))) { + case Backend::CPU: + return CPUType::tanh_backward(grad_output, output); + break; + default: + AT_ERROR("tanh_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::tanh_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_output, output); +#endif +} +static inline Tensor & slow_conv_transpose2d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, weight, bias)))) { + case Backend::CPU: + return CPUType::slow_conv_transpose2d_out(out, self, weight, kernel_size, bias, stride, padding, output_padding, dilation); + break; + default: + AT_ERROR("slow_conv_transpose2d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv_transpose2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, weight, kernel_size, bias, stride, padding, output_padding, dilation); +#endif +} +static inline Tensor slow_conv_transpose2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias)))) { + case Backend::CPU: + return CPUType::slow_conv_transpose2d(self, weight, kernel_size, bias, stride, padding, output_padding, dilation); + break; + default: + AT_ERROR("slow_conv_transpose2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv_transpose2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, kernel_size, bias, stride, padding, output_padding, dilation); +#endif +} +static inline std::tuple slow_conv_transpose2d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & columns, const Tensor & ones) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_weight, grad_bias, grad_output, self, weight, columns, ones)))) { + case Backend::CPU: + return CPUType::slow_conv_transpose2d_backward_out(grad_input, grad_weight, grad_bias, grad_output, self, weight, kernel_size, stride, padding, output_padding, dilation, columns, ones); + break; + default: + AT_ERROR("slow_conv_transpose2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_weight, grad_bias, grad_output, self, weight, columns, ones))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv_transpose2d_backward", "grad_output"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, Tensor &, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, const Tensor &, const Tensor &>( + op, grad_input, grad_weight, grad_bias, grad_output, self, weight, kernel_size, stride, padding, output_padding, dilation, columns, ones); +#endif +} +static inline std::tuple slow_conv_transpose2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & columns, const Tensor & ones, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight, columns, ones)))) { + case Backend::CPU: + return CPUType::slow_conv_transpose2d_backward(grad_output, self, weight, kernel_size, stride, padding, output_padding, dilation, columns, ones, output_mask); + break; + default: + AT_ERROR("slow_conv_transpose2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight, columns, ones))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv_transpose2d_backward", "output_mask"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, const Tensor &, const Tensor &, std::array>( + op, grad_output, self, weight, kernel_size, stride, padding, output_padding, dilation, columns, ones, output_mask); +#endif +} +static inline Tensor & slow_conv_transpose3d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, weight, bias)))) { + case Backend::CPU: + return CPUType::slow_conv_transpose3d_out(out, self, weight, kernel_size, bias, stride, padding, output_padding, dilation); + break; + default: + AT_ERROR("slow_conv_transpose3d_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv_transpose3d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, weight, kernel_size, bias, stride, padding, output_padding, dilation); +#endif +} +static inline Tensor slow_conv_transpose3d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias)))) { + case Backend::CPU: + return CPUType::slow_conv_transpose3d(self, weight, kernel_size, bias, stride, padding, output_padding, dilation); + break; + default: + AT_ERROR("slow_conv_transpose3d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv_transpose3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, kernel_size, bias, stride, padding, output_padding, dilation); +#endif +} +static inline std::tuple slow_conv_transpose3d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & finput, const Tensor & fgrad_input) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_weight, grad_bias, grad_output, self, weight, finput, fgrad_input)))) { + case Backend::CPU: + return CPUType::slow_conv_transpose3d_backward_out(grad_input, grad_weight, grad_bias, grad_output, self, weight, kernel_size, stride, padding, output_padding, dilation, finput, fgrad_input); + break; + default: + AT_ERROR("slow_conv_transpose3d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_weight, grad_bias, grad_output, self, weight, finput, fgrad_input))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv_transpose3d_backward", "grad_output"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, Tensor &, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, const Tensor &, const Tensor &>( + op, grad_input, grad_weight, grad_bias, grad_output, self, weight, kernel_size, stride, padding, output_padding, dilation, finput, fgrad_input); +#endif +} +static inline std::tuple slow_conv_transpose3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight, finput, fgrad_input)))) { + case Backend::CPU: + return CPUType::slow_conv_transpose3d_backward(grad_output, self, weight, kernel_size, stride, padding, output_padding, dilation, finput, fgrad_input, output_mask); + break; + default: + AT_ERROR("slow_conv_transpose3d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight, finput, fgrad_input))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv_transpose3d_backward", "output_mask"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, const Tensor &, const Tensor &, std::array>( + op, grad_output, self, weight, kernel_size, stride, padding, output_padding, dilation, finput, fgrad_input, output_mask); +#endif +} +static inline Tensor & thnn_conv2d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::thnn_conv2d_out(out, self, weight, kernel_size, bias, stride, padding); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::thnn_conv2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, weight, kernel_size, bias, stride, padding); +#endif +} +static inline Tensor thnn_conv2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::thnn_conv2d(self, weight, kernel_size, bias, stride, padding); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::thnn_conv2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, kernel_size, bias, stride, padding); +#endif +} +static inline std::tuple thnn_conv2d_forward_out(Tensor & output, Tensor & finput, Tensor & fgrad_input, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(output, finput, fgrad_input, self, weight, bias)))) { + case Backend::CPU: + return CPUType::thnn_conv2d_forward_out(output, finput, fgrad_input, self, weight, kernel_size, bias, stride, padding); + break; + default: + AT_ERROR("thnn_conv2d_forward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(output, finput, fgrad_input, self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::thnn_conv2d_forward", "output"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, Tensor &, const Tensor &, const Tensor &, IntArrayRef, const Tensor &, IntArrayRef, IntArrayRef>( + op, output, finput, fgrad_input, self, weight, kernel_size, bias, stride, padding); +#endif +} +static inline std::tuple thnn_conv2d_forward(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias)))) { + case Backend::CPU: + return CPUType::thnn_conv2d_forward(self, weight, kernel_size, bias, stride, padding); + break; + default: + AT_ERROR("thnn_conv2d_forward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::thnn_conv2d_forward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, IntArrayRef, const Tensor &, IntArrayRef, IntArrayRef>( + op, self, weight, kernel_size, bias, stride, padding); +#endif +} +static inline std::tuple thnn_conv2d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_weight, grad_bias, grad_output, self, weight, finput, fgrad_input)))) { + case Backend::CPU: + return CPUType::thnn_conv2d_backward_out(grad_input, grad_weight, grad_bias, grad_output, self, weight, kernel_size, stride, padding, finput, fgrad_input); + break; + default: + AT_ERROR("thnn_conv2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_weight, grad_bias, grad_output, self, weight, finput, fgrad_input))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::thnn_conv2d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, Tensor &, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, const Tensor &, const Tensor &>( + op, grad_input, grad_weight, grad_bias, grad_output, self, weight, kernel_size, stride, padding, finput, fgrad_input); +#endif +} +static inline std::tuple thnn_conv2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight, finput, fgrad_input)))) { + case Backend::CPU: + return CPUType::thnn_conv2d_backward(grad_output, self, weight, kernel_size, stride, padding, finput, fgrad_input, output_mask); + break; + default: + AT_ERROR("thnn_conv2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight, finput, fgrad_input))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::thnn_conv2d_backward", "output_mask"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, const Tensor &, const Tensor &, std::array>( + op, grad_output, self, weight, kernel_size, stride, padding, finput, fgrad_input, output_mask); +#endif +} +static inline Tensor & thnn_conv_depthwise2d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::thnn_conv_depthwise2d_out(out, self, weight, kernel_size, bias, stride, padding, dilation); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::thnn_conv_depthwise2d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, weight, kernel_size, bias, stride, padding, dilation); +#endif +} +static inline Tensor thnn_conv_depthwise2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::thnn_conv_depthwise2d(self, weight, kernel_size, bias, stride, padding, dilation); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::thnn_conv_depthwise2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, kernel_size, bias, stride, padding, dilation); +#endif +} +static inline Tensor & thnn_conv_depthwise2d_forward_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self, weight, bias)))) { + + default: + AT_ERROR("thnn_conv_depthwise2d_forward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::thnn_conv_depthwise2d_forward", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, weight, kernel_size, bias, stride, padding, dilation); +#endif +} +static inline Tensor thnn_conv_depthwise2d_forward(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias)))) { + + default: + AT_ERROR("thnn_conv_depthwise2d_forward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::thnn_conv_depthwise2d_forward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, kernel_size, bias, stride, padding, dilation); +#endif +} +static inline std::tuple thnn_conv_depthwise2d_backward_out(Tensor & grad_input, Tensor & grad_weight, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_weight, grad_output, self, weight)))) { + + default: + AT_ERROR("thnn_conv_depthwise2d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_weight, grad_output, self, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::thnn_conv_depthwise2d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef>( + op, grad_input, grad_weight, grad_output, self, weight, kernel_size, stride, padding, dilation); +#endif +} +static inline std::tuple thnn_conv_depthwise2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight)))) { + + default: + AT_ERROR("thnn_conv_depthwise2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::thnn_conv_depthwise2d_backward", "output_mask"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, std::array>( + op, grad_output, self, weight, kernel_size, stride, padding, dilation, output_mask); +#endif +} +static inline Tensor & slow_conv3d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::slow_conv3d_out(out, self, weight, kernel_size, bias, stride, padding); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv3d", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, weight, kernel_size, bias, stride, padding); +#endif +} +static inline Tensor slow_conv3d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::slow_conv3d(self, weight, kernel_size, bias, stride, padding); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, kernel_size, bias, stride, padding); +#endif +} +static inline std::tuple slow_conv3d_forward_out(Tensor & output, Tensor & finput, Tensor & fgrad_input, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(output, finput, fgrad_input, self, weight, bias)))) { + case Backend::CPU: + return CPUType::slow_conv3d_forward_out(output, finput, fgrad_input, self, weight, kernel_size, bias, stride, padding); + break; + default: + AT_ERROR("slow_conv3d_forward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(output, finput, fgrad_input, self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv3d_forward", "output"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, Tensor &, const Tensor &, const Tensor &, IntArrayRef, const Tensor &, IntArrayRef, IntArrayRef>( + op, output, finput, fgrad_input, self, weight, kernel_size, bias, stride, padding); +#endif +} +static inline std::tuple slow_conv3d_forward(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias)))) { + case Backend::CPU: + return CPUType::slow_conv3d_forward(self, weight, kernel_size, bias, stride, padding); + break; + default: + AT_ERROR("slow_conv3d_forward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv3d_forward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, IntArrayRef, const Tensor &, IntArrayRef, IntArrayRef>( + op, self, weight, kernel_size, bias, stride, padding); +#endif +} +static inline std::tuple slow_conv3d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_weight, grad_bias, grad_output, self, weight, finput, fgrad_input)))) { + case Backend::CPU: + return CPUType::slow_conv3d_backward_out(grad_input, grad_weight, grad_bias, grad_output, self, weight, kernel_size, stride, padding, finput, fgrad_input); + break; + default: + AT_ERROR("slow_conv3d_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_weight, grad_bias, grad_output, self, weight, finput, fgrad_input))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv3d_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, Tensor &, Tensor &, Tensor &, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, const Tensor &, const Tensor &>( + op, grad_input, grad_weight, grad_bias, grad_output, self, weight, kernel_size, stride, padding, finput, fgrad_input); +#endif +} +static inline std::tuple slow_conv3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight, finput, fgrad_input)))) { + case Backend::CPU: + return CPUType::slow_conv3d_backward(grad_output, self, weight, kernel_size, stride, padding, finput, fgrad_input, output_mask); + break; + default: + AT_ERROR("slow_conv3d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight, finput, fgrad_input))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv3d_backward", "output_mask"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, const Tensor &, const Tensor &, std::array>( + op, grad_output, self, weight, kernel_size, stride, padding, finput, fgrad_input, output_mask); +#endif +} +static inline Tensor slow_conv_dilated2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias)))) { + case Backend::CPU: + return CPUType::slow_conv_dilated2d(self, weight, kernel_size, bias, stride, padding, dilation); + break; + default: + AT_ERROR("slow_conv_dilated2d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv_dilated2d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, kernel_size, bias, stride, padding, dilation); +#endif +} +static inline std::tuple slow_conv_dilated2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight)))) { + case Backend::CPU: + return CPUType::slow_conv_dilated2d_backward(grad_output, self, weight, kernel_size, stride, padding, dilation, output_mask); + break; + default: + AT_ERROR("slow_conv_dilated2d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv_dilated2d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, std::array>( + op, grad_output, self, weight, kernel_size, stride, padding, dilation, output_mask); +#endif +} +static inline Tensor slow_conv_dilated3d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias)))) { + case Backend::CPU: + return CPUType::slow_conv_dilated3d(self, weight, kernel_size, bias, stride, padding, dilation); + break; + default: + AT_ERROR("slow_conv_dilated3d not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self, weight, bias))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv_dilated3d", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, weight, kernel_size, bias, stride, padding, dilation); +#endif +} +static inline std::tuple slow_conv_dilated3d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight)))) { + case Backend::CPU: + return CPUType::slow_conv_dilated3d_backward(grad_output, self, weight, kernel_size, stride, padding, dilation, output_mask); + break; + default: + AT_ERROR("slow_conv_dilated3d_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output, self, weight))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::slow_conv_dilated3d_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &, IntArrayRef, IntArrayRef, IntArrayRef, IntArrayRef, std::array>( + op, grad_output, self, weight, kernel_size, stride, padding, dilation, output_mask); +#endif +} +static inline Tensor & col2im_out(Tensor & out, const Tensor & self, IntArrayRef output_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::col2im_out(out, self, output_size, kernel_size, dilation, padding, stride); + break; + default: + AT_ERROR("col2im_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::col2im", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, output_size, kernel_size, dilation, padding, stride); +#endif +} +static inline Tensor col2im(const Tensor & self, IntArrayRef output_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::col2im(self, output_size, kernel_size, dilation, padding, stride); + break; + default: + AT_ERROR("col2im not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::col2im", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, output_size, kernel_size, dilation, padding, stride); +#endif +} +static inline Tensor & col2im_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output)))) { + case Backend::CPU: + return CPUType::col2im_backward_out(grad_input, grad_output, kernel_size, dilation, padding, stride); + break; + default: + AT_ERROR("col2im_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::col2im_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, kernel_size, dilation, padding, stride); +#endif +} +static inline Tensor col2im_backward(const Tensor & grad_output, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output)))) { + case Backend::CPU: + return CPUType::col2im_backward(grad_output, kernel_size, dilation, padding, stride); + break; + default: + AT_ERROR("col2im_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::col2im_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, kernel_size, dilation, padding, stride); +#endif +} +static inline Tensor & im2col_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(out, self)))) { + case Backend::CPU: + return CPUType::im2col_out(out, self, kernel_size, dilation, padding, stride); + break; + default: + AT_ERROR("im2col_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(out, self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::im2col", "out"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, out, self, kernel_size, dilation, padding, stride); +#endif +} +static inline Tensor im2col(const Tensor & self, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(self)))) { + case Backend::CPU: + return CPUType::im2col(self, kernel_size, dilation, padding, stride); + break; + default: + AT_ERROR("im2col not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(self))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::im2col", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, self, kernel_size, dilation, padding, stride); +#endif +} +static inline Tensor & im2col_backward_out(Tensor & grad_input, const Tensor & grad_output, IntArrayRef input_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output)))) { + case Backend::CPU: + return CPUType::im2col_backward_out(grad_input, grad_output, input_size, kernel_size, dilation, padding, stride); + break; + default: + AT_ERROR("im2col_backward_out not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_input, grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::im2col_backward", "grad_input"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_input, grad_output, input_size, kernel_size, dilation, padding, stride); +#endif +} +static inline Tensor im2col_backward(const Tensor & grad_output, IntArrayRef input_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(c10::detail::multi_dispatch_tensor_type_set(grad_output)))) { + case Backend::CPU: + return CPUType::im2col_backward(grad_output, input_size, kernel_size, dilation, padding, stride); + break; + default: + AT_ERROR("im2col_backward not implemented for ", at::toString(c10::detail::multi_dispatch_tensor_type_set(grad_output))); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::im2col_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, grad_output, input_size, kernel_size, dilation, padding, stride); +#endif +} +static inline Tensor isfinite(const Tensor & self) { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::isfinite(self); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton() + .findSchema({"aten::isfinite", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, self); +#endif +} + +} diff --git a/thirdparty/libtorch/include/ATen/Generator.h b/thirdparty/libtorch/include/ATen/Generator.h new file mode 100644 index 0000000000..48c25e141d --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Generator.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/InferSize.h b/thirdparty/libtorch/include/ATen/InferSize.h new file mode 100644 index 0000000000..301ad9bb20 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/InferSize.h @@ -0,0 +1,52 @@ +#pragma once + +#include +#include +#include +#include + +namespace at { + +// Infers the size of a dim with size -1, if it exists. Also checks that new +// shape is compatible with the number of elements. +inline std::vector infer_size(IntArrayRef shape, int64_t numel) { + auto res = shape.vec(); + int64_t newsize = 1; + auto infer_dim = c10::optional(); + for (int64_t dim = 0, ndim = shape.size(); dim != ndim; dim++) { + if (shape[dim] == -1) { + if (infer_dim) { + throw std::runtime_error("only one dimension can be inferred"); + } + infer_dim = dim; + } else if (shape[dim] >= 0) { + newsize *= shape[dim]; + } else { + AT_ERROR("invalid shape dimension ", shape[dim]); + } + } + + if (numel == newsize || (infer_dim && newsize > 0 && numel % newsize == 0)) { + if (infer_dim) { + // We have a degree of freedom here to select the dimension size; follow + // NumPy semantics and just bail. However, a nice error message is needed + // because users often use `view` as a way to flatten & unflatten + // dimensions and will otherwise be confused why + // empty_tensor.view( 0, 0) + // works yet + // empty_tensor.view(-1, 0) + // doesn't. + TORCH_CHECK(newsize != 0, "cannot reshape tensor of 0 elements into shape ", + shape, " because the unspecified dimension size -1 can be any " + "value and is ambiguous"); + res[*infer_dim] = numel / newsize; + } + return res; + } + + std::ostringstream ss; + ss << "shape '" << shape << "' is invalid for input of size " << numel; + throw std::runtime_error(ss.str()); +} + +} diff --git a/thirdparty/libtorch/include/ATen/InitialTensorOptions.h b/thirdparty/libtorch/include/ATen/InitialTensorOptions.h new file mode 100644 index 0000000000..00438563af --- /dev/null +++ b/thirdparty/libtorch/include/ATen/InitialTensorOptions.h @@ -0,0 +1,15 @@ +#pragma once + +#include + +namespace at { + +// Represents the initial TensorOptions, before the "defaults" are ever changed. +// This is designed to be used in library code, where the explicit devices, dtypes, etc. are known. +// NOTE: this is not a stable API. +inline TensorOptions initialTensorOptions() { + return TensorOptions(kCPU).dtype(kFloat).layout(kStrided) + .requires_grad(false); +} + +} diff --git a/thirdparty/libtorch/include/ATen/Layout.h b/thirdparty/libtorch/include/ATen/Layout.h new file mode 100644 index 0000000000..ea71e2b469 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Layout.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/LegacyTHFunctionsCPU.h b/thirdparty/libtorch/include/ATen/LegacyTHFunctionsCPU.h new file mode 100644 index 0000000000..57ff1d9647 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/LegacyTHFunctionsCPU.h @@ -0,0 +1,261 @@ +#pragma once + +// @generated by aten/src/ATen/gen.py + +#include +#include +#include + +namespace c10 { +class Scalar; +} +namespace at { +struct Generator; +class Tensor; +struct Type; +} // namespace at + +namespace at { +namespace native { +namespace legacy { +namespace cpu { + +Tensor & _th_set_(Tensor & self, Storage source, int64_t storage_offset, IntArrayRef size, IntArrayRef stride); +Tensor & _th_set_(Tensor & self, const Tensor & source); +Tensor & _th_masked_fill_(Tensor & self, const Tensor & mask, Scalar value); +Tensor & s__th_masked_fill_(Tensor & self, const Tensor & mask, Scalar value); +Tensor & _th_masked_fill_bool_(Tensor & self, const Tensor & mask, Scalar value); +Tensor & s__th_masked_fill_bool_(Tensor & self, const Tensor & mask, Scalar value); +Tensor & _th_masked_scatter_(Tensor & self, const Tensor & mask, const Tensor & source); +Tensor & s__th_masked_scatter_(Tensor & self, const Tensor & mask, const Tensor & source); +Tensor & _th_masked_scatter_bool_(Tensor & self, const Tensor & mask, const Tensor & source); +Tensor & s__th_masked_scatter_bool_(Tensor & self, const Tensor & mask, const Tensor & source); +Tensor & _th_masked_select_out(Tensor & result, const Tensor & self, const Tensor & mask); +Tensor & s__th_masked_select_out(Tensor & result, const Tensor & self, const Tensor & mask); +Tensor _th_masked_select(const Tensor & self, const Tensor & mask); +Tensor s__th_masked_select(const Tensor & self, const Tensor & mask); +Tensor & _th_masked_select_bool_out(Tensor & result, const Tensor & self, const Tensor & mask); +Tensor & s__th_masked_select_bool_out(Tensor & result, const Tensor & self, const Tensor & mask); +Tensor _th_masked_select_bool(const Tensor & self, const Tensor & mask); +Tensor s__th_masked_select_bool(const Tensor & self, const Tensor & mask); +Tensor & _th_nonzero_out(Tensor & result, const Tensor & self); +Tensor _th_nonzero(const Tensor & self); +Tensor _th_clone(const Tensor & self); +Tensor & _th_index_select_out(Tensor & result, const Tensor & self, int64_t dim, const Tensor & index); +Tensor _th_index_select(const Tensor & self, int64_t dim, const Tensor & index); +Tensor & _th_index_copy_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); +Tensor & _th_take_out(Tensor & result, const Tensor & self, const Tensor & index); +Tensor _th_take(const Tensor & self, const Tensor & index); +Tensor & _th_put_(Tensor & self, const Tensor & index, const Tensor & source, bool accumulate); +Tensor & _th_index_fill_(Tensor & self, int64_t dim, const Tensor & index, Scalar value); +Tensor & _th_scatter_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); +Tensor & _th_scatter_(Tensor & self, int64_t dim, const Tensor & index, Scalar value); +Tensor & _th_scatter_add_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); +Tensor & _th_gather_out(Tensor & result, const Tensor & self, int64_t dim, const Tensor & index); +Tensor _th_gather(const Tensor & self, int64_t dim, const Tensor & index); +bool _th_equal(const Tensor & self, const Tensor & other); +Tensor & _th_and_out(Tensor & result, const Tensor & self, Scalar other); +Tensor _th_and(const Tensor & self, Scalar other); +Tensor & _th_and_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_and_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_and(const Tensor & self, const Tensor & other); +Tensor s__th_and(const Tensor & self, const Tensor & other); +Tensor & _th_iand_(Tensor & self, Scalar other); +Tensor & _th_iand_(Tensor & self, const Tensor & other); +Tensor & s__th_iand_(Tensor & self, const Tensor & other); +Tensor & _th_or_out(Tensor & result, const Tensor & self, Scalar other); +Tensor _th_or(const Tensor & self, Scalar other); +Tensor & _th_or_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_or_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_or(const Tensor & self, const Tensor & other); +Tensor s__th_or(const Tensor & self, const Tensor & other); +Tensor & _th_ior_(Tensor & self, Scalar other); +Tensor & _th_ior_(Tensor & self, const Tensor & other); +Tensor & s__th_ior_(Tensor & self, const Tensor & other); +Tensor & _th_lshift_out(Tensor & result, const Tensor & self, Scalar other); +Tensor _th_lshift(const Tensor & self, Scalar other); +Tensor & _th_lshift_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_lshift_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_lshift(const Tensor & self, const Tensor & other); +Tensor s__th_lshift(const Tensor & self, const Tensor & other); +Tensor & _th_ilshift_(Tensor & self, Scalar other); +Tensor & _th_ilshift_(Tensor & self, const Tensor & other); +Tensor & s__th_ilshift_(Tensor & self, const Tensor & other); +Tensor & _th_rshift_out(Tensor & result, const Tensor & self, Scalar other); +Tensor _th_rshift(const Tensor & self, Scalar other); +Tensor & _th_rshift_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_rshift_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_rshift(const Tensor & self, const Tensor & other); +Tensor s__th_rshift(const Tensor & self, const Tensor & other); +Tensor & _th_irshift_(Tensor & self, Scalar other); +Tensor & _th_irshift_(Tensor & self, const Tensor & other); +Tensor & s__th_irshift_(Tensor & self, const Tensor & other); +Tensor & _th_min_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_min_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_min(const Tensor & self, const Tensor & other); +Tensor s__th_min(const Tensor & self, const Tensor & other); +Tensor _th_min(const Tensor & self); +std::tuple _th_min_out(Tensor & min, Tensor & min_indices, const Tensor & self, int64_t dim, bool keepdim); +std::tuple _th_min(const Tensor & self, int64_t dim, bool keepdim); +Tensor & _th_max_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_max_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_max(const Tensor & self, const Tensor & other); +Tensor s__th_max(const Tensor & self, const Tensor & other); +Tensor _th_max(const Tensor & self); +std::tuple _th_max_out(Tensor & max, Tensor & max_indices, const Tensor & self, int64_t dim, bool keepdim); +std::tuple _th_max(const Tensor & self, int64_t dim, bool keepdim); +std::tuple _th_mode_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool keepdim); +std::tuple _th_mode(const Tensor & self, int64_t dim, bool keepdim); +std::tuple _th_sort_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool descending); +std::tuple _th_sort(const Tensor & self, int64_t dim, bool descending); +Tensor & _th_var_out(Tensor & result, const Tensor & self, int64_t dim, bool unbiased, bool keepdim); +Tensor _th_var(const Tensor & self, int64_t dim, bool unbiased, bool keepdim); +Tensor _th_var(const Tensor & self, bool unbiased); +Tensor & _th_std_out(Tensor & result, const Tensor & self, int64_t dim, bool unbiased, bool keepdim); +Tensor _th_std(const Tensor & self, int64_t dim, bool unbiased, bool keepdim); +Tensor _th_std(const Tensor & self, bool unbiased); +Tensor & _th_renorm_out(Tensor & result, const Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); +Tensor _th_renorm(const Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); +Tensor & _th_renorm_(Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); +Tensor _th_dist(const Tensor & self, const Tensor & other, Scalar p); +Tensor s__th_dist(const Tensor & self, const Tensor & other, Scalar p); +Tensor & _th_histc_out(Tensor & result, const Tensor & self, int64_t bins, Scalar min, Scalar max); +Tensor _th_histc(const Tensor & self, int64_t bins, Scalar min, Scalar max); +Tensor & _th_cumsum_out(Tensor & result, const Tensor & self, int64_t dim); +Tensor _th_cumsum(const Tensor & self, int64_t dim); +Tensor & _th_cumprod_out(Tensor & result, const Tensor & self, int64_t dim); +Tensor _th_cumprod(const Tensor & self, int64_t dim); +Tensor _th_trace(const Tensor & self); +Tensor & _th_fmod_out(Tensor & result, const Tensor & self, Scalar other); +Tensor _th_fmod(const Tensor & self, Scalar other); +Tensor & _th_fmod_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_fmod_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_fmod(const Tensor & self, const Tensor & other); +Tensor s__th_fmod(const Tensor & self, const Tensor & other); +Tensor & _th_fmod_(Tensor & self, Scalar other); +Tensor & _th_fmod_(Tensor & self, const Tensor & other); +Tensor & s__th_fmod_(Tensor & self, const Tensor & other); +Tensor & _th_remainder_out(Tensor & result, const Tensor & self, Scalar other); +Tensor _th_remainder(const Tensor & self, Scalar other); +Tensor & _th_remainder_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_remainder_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_remainder(const Tensor & self, const Tensor & other); +Tensor s__th_remainder(const Tensor & self, const Tensor & other); +Tensor & _th_remainder_(Tensor & self, Scalar other); +Tensor & _th_remainder_(Tensor & self, const Tensor & other); +Tensor & s__th_remainder_(Tensor & self, const Tensor & other); +Tensor _th_dot(const Tensor & self, const Tensor & tensor); +Tensor & _th_diag_out(Tensor & result, const Tensor & self, int64_t diagonal); +Tensor _th_diag(const Tensor & self, int64_t diagonal); +Tensor & _th_addmm_out(Tensor & result, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); +Tensor & s__th_addmm_out(Tensor & result, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); +Tensor _th_addmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); +Tensor s__th_addmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); +Tensor & _th_addmm_(Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); +Tensor & _th_addmv_out(Tensor & result, const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); +Tensor & s__th_addmv_out(Tensor & result, const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); +Tensor _th_addmv(const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); +Tensor s__th_addmv(const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); +Tensor & _th_addmv_(Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); +Tensor & _th_addr_out(Tensor & result, const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); +Tensor & s__th_addr_out(Tensor & result, const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); +Tensor _th_addr(const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); +Tensor s__th_addr(const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); +Tensor & _th_addr_(Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); +Tensor & _th_ger_out(Tensor & result, const Tensor & self, const Tensor & vec2); +Tensor _th_ger(const Tensor & self, const Tensor & vec2); +Tensor & _th_mv_out(Tensor & result, const Tensor & self, const Tensor & vec); +Tensor _th_mv(const Tensor & self, const Tensor & vec); +Tensor & _th_mm_out(Tensor & result, const Tensor & self, const Tensor & mat2); +Tensor _th_mm(const Tensor & self, const Tensor & mat2); +Tensor & _th_addbmm_out(Tensor & result, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +Tensor & s__th_addbmm_out(Tensor & result, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +Tensor _th_addbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +Tensor s__th_addbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +Tensor & _th_addbmm_(Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +std::tuple _th_gels_out(Tensor & res1, Tensor & res2, const Tensor & self, const Tensor & A); +std::tuple _th_gels(const Tensor & self, const Tensor & A); +std::tuple _th_eig_out(Tensor & res1, Tensor & res2, const Tensor & self, bool eigenvectors); +std::tuple _th_eig(const Tensor & self, bool eigenvectors); +Tensor & _th_potri_out(Tensor & output, const Tensor & self, bool upper); +Tensor _th_potri(const Tensor & self, bool upper); +std::tuple _th_geqrf_out(Tensor & res1, Tensor & res2, const Tensor & self); +std::tuple _th_geqrf(const Tensor & self); +Tensor & _th_orgqr_out(Tensor & result, const Tensor & self, const Tensor & input2); +Tensor _th_orgqr(const Tensor & self, const Tensor & input2); +Tensor & _th_ormqr_out(Tensor & result, const Tensor & self, const Tensor & input2, const Tensor & input3, bool left, bool transpose); +Tensor _th_ormqr(const Tensor & self, const Tensor & input2, const Tensor & input3, bool left, bool transpose); +Tensor & _th_random_(Tensor & self, int64_t from, int64_t to, Generator * generator); +Tensor & _th_random_(Tensor & self, int64_t to, Generator * generator); +Tensor & _th_random_(Tensor & self, Generator * generator); +std::tuple _th_multinomial_alias_setup_out(Tensor & J, Tensor & q, const Tensor & probs); +std::tuple _th_multinomial_alias_setup(const Tensor & probs); +Tensor & _th_multinomial_alias_draw_out(Tensor & result, const Tensor & q, const Tensor & J, int64_t num_samples, Generator * generator); +Tensor _th_multinomial_alias_draw(const Tensor & q, const Tensor & J, int64_t num_samples, Generator * generator); +Tensor & _th_uniform_(Tensor & self, double from, double to, Generator * generator); +Tensor & _th_normal_out(Tensor & output, const Tensor & mean, double std, Generator * generator); +Tensor _th_normal(const Tensor & mean, double std, Generator * generator); +Tensor & _th_normal_out(Tensor & output, double mean, const Tensor & std, Generator * generator); +Tensor _th_normal(double mean, const Tensor & std, Generator * generator); +Tensor & _th_normal_out(Tensor & output, const Tensor & mean, const Tensor & std, Generator * generator); +Tensor _th_normal(const Tensor & mean, const Tensor & std, Generator * generator); +Tensor & _th_normal_(Tensor & self, double mean, double std, Generator * generator); +Tensor & _th_cauchy_(Tensor & self, double median, double sigma, Generator * generator); +Tensor & _th_log_normal_(Tensor & self, double mean, double std, Generator * generator); +Tensor & _th_exponential_(Tensor & self, double lambd, Generator * generator); +Tensor & _th_geometric_(Tensor & self, double p, Generator * generator); +Tensor & _th_cat_out(Tensor & self, TensorList tensors, int64_t dim); +Tensor _th_cat(TensorList tensors, int64_t dim); +Tensor & _thnn_binary_cross_entropy_forward_out(Tensor & output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); +Tensor _thnn_binary_cross_entropy_forward(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); +Tensor & _thnn_binary_cross_entropy_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); +Tensor _thnn_binary_cross_entropy_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); +Tensor & _thnn_soft_margin_loss_forward_out(Tensor & output, const Tensor & self, const Tensor & target, int64_t reduction); +Tensor _thnn_soft_margin_loss_forward(const Tensor & self, const Tensor & target, int64_t reduction); +Tensor & _thnn_soft_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +Tensor _thnn_soft_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +Tensor & _thnn_elu_forward_out(Tensor & output, const Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale); +Tensor _thnn_elu_forward(const Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale); +Tensor & _thnn_elu_backward_out(Tensor & grad_input, const Tensor & grad_output, Scalar alpha, Scalar scale, Scalar input_scale, const Tensor & output); +Tensor _thnn_elu_backward(const Tensor & grad_output, Scalar alpha, Scalar scale, Scalar input_scale, const Tensor & output); +Tensor & _thnn_elu_forward_(Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale); +Tensor & _thnn_glu_forward_out(Tensor & output, const Tensor & self, int64_t dim); +Tensor _thnn_glu_forward(const Tensor & self, int64_t dim); +Tensor & _thnn_glu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, int64_t dim); +Tensor _thnn_glu_backward(const Tensor & grad_output, const Tensor & self, int64_t dim); +Tensor & _thnn_hardtanh_forward_out(Tensor & output, const Tensor & self, Scalar min_val, Scalar max_val); +Tensor _thnn_hardtanh_forward(const Tensor & self, Scalar min_val, Scalar max_val); +Tensor & _thnn_hardtanh_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar min_val, Scalar max_val); +Tensor _thnn_hardtanh_backward(const Tensor & grad_output, const Tensor & self, Scalar min_val, Scalar max_val); +Tensor & _thnn_hardtanh_forward_(Tensor & self, Scalar min_val, Scalar max_val); +Tensor & _thnn_leaky_relu_forward_out(Tensor & output, const Tensor & self, Scalar negative_slope); +Tensor _thnn_leaky_relu_forward(const Tensor & self, Scalar negative_slope); +Tensor & _thnn_leaky_relu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar negative_slope); +Tensor _thnn_leaky_relu_backward(const Tensor & grad_output, const Tensor & self, Scalar negative_slope); +Tensor & _thnn_leaky_relu_forward_(Tensor & self, Scalar negative_slope); +std::tuple _thnn_log_sigmoid_forward_out(Tensor & output, Tensor & buffer, const Tensor & self); +std::tuple _thnn_log_sigmoid_forward(const Tensor & self); +Tensor & _thnn_log_sigmoid_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & buffer); +Tensor _thnn_log_sigmoid_backward(const Tensor & grad_output, const Tensor & self, const Tensor & buffer); +Tensor & _thnn_rrelu_with_noise_forward_out(Tensor & output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator); +Tensor _thnn_rrelu_with_noise_forward(const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator); +Tensor & _thnn_rrelu_with_noise_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training); +Tensor _thnn_rrelu_with_noise_backward(const Tensor & grad_output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training); +Tensor & _thnn_rrelu_with_noise_forward_(Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator); +Tensor & _thnn_softplus_forward_out(Tensor & output, const Tensor & self, Scalar beta, Scalar threshold); +Tensor _thnn_softplus_forward(const Tensor & self, Scalar beta, Scalar threshold); +Tensor & _thnn_softplus_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar beta, Scalar threshold, const Tensor & output); +Tensor _thnn_softplus_backward(const Tensor & grad_output, const Tensor & self, Scalar beta, Scalar threshold, const Tensor & output); +Tensor & _thnn_softshrink_forward_out(Tensor & output, const Tensor & self, Scalar lambd); +Tensor _thnn_softshrink_forward(const Tensor & self, Scalar lambd); +Tensor & _thnn_softshrink_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar lambd); +Tensor _thnn_softshrink_backward(const Tensor & grad_output, const Tensor & self, Scalar lambd); +Tensor & _thnn_tanh_forward_out(Tensor & output, const Tensor & self); +Tensor _thnn_tanh_forward(const Tensor & self); +Tensor & _thnn_tanh_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & output); +Tensor _thnn_tanh_backward(const Tensor & grad_output, const Tensor & output); + +} // namespace th +} // namespace legacy +} // namespace native +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/LegacyTHFunctionsCUDA.h b/thirdparty/libtorch/include/ATen/LegacyTHFunctionsCUDA.h new file mode 100644 index 0000000000..6534446241 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/LegacyTHFunctionsCUDA.h @@ -0,0 +1,300 @@ +#pragma once + +// @generated by aten/src/ATen/gen.py + +#include +#include +#include + +namespace c10 { +class Scalar; +} +namespace at { +struct Generator; +class Tensor; +struct Type; +} // namespace at + +namespace at { +namespace native { +namespace legacy { +namespace cuda { + +Tensor & _th_set_(Tensor & self, Storage source, int64_t storage_offset, IntArrayRef size, IntArrayRef stride); +Tensor & _th_set_(Tensor & self, const Tensor & source); +Tensor & _th_masked_fill_(Tensor & self, const Tensor & mask, Scalar value); +Tensor & s__th_masked_fill_(Tensor & self, const Tensor & mask, Scalar value); +Tensor & _th_masked_fill_bool_(Tensor & self, const Tensor & mask, Scalar value); +Tensor & s__th_masked_fill_bool_(Tensor & self, const Tensor & mask, Scalar value); +Tensor & _th_masked_scatter_(Tensor & self, const Tensor & mask, const Tensor & source); +Tensor & s__th_masked_scatter_(Tensor & self, const Tensor & mask, const Tensor & source); +Tensor & _th_masked_scatter_bool_(Tensor & self, const Tensor & mask, const Tensor & source); +Tensor & s__th_masked_scatter_bool_(Tensor & self, const Tensor & mask, const Tensor & source); +Tensor & _th_masked_select_out(Tensor & result, const Tensor & self, const Tensor & mask); +Tensor & s__th_masked_select_out(Tensor & result, const Tensor & self, const Tensor & mask); +Tensor _th_masked_select(const Tensor & self, const Tensor & mask); +Tensor s__th_masked_select(const Tensor & self, const Tensor & mask); +Tensor & _th_masked_select_bool_out(Tensor & result, const Tensor & self, const Tensor & mask); +Tensor & s__th_masked_select_bool_out(Tensor & result, const Tensor & self, const Tensor & mask); +Tensor _th_masked_select_bool(const Tensor & self, const Tensor & mask); +Tensor s__th_masked_select_bool(const Tensor & self, const Tensor & mask); +Tensor & _th_nonzero_out(Tensor & result, const Tensor & self); +Tensor _th_nonzero(const Tensor & self); +Tensor _th_clone(const Tensor & self); +Tensor & _th_index_select_out(Tensor & result, const Tensor & self, int64_t dim, const Tensor & index); +Tensor _th_index_select(const Tensor & self, int64_t dim, const Tensor & index); +Tensor & _th_index_copy_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); +Tensor & _th_take_out(Tensor & result, const Tensor & self, const Tensor & index); +Tensor _th_take(const Tensor & self, const Tensor & index); +Tensor & _th_put_(Tensor & self, const Tensor & index, const Tensor & source, bool accumulate); +Tensor & _th_index_add_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); +Tensor & _th_index_fill_(Tensor & self, int64_t dim, const Tensor & index, Scalar value); +Tensor & _th_scatter_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); +Tensor & _th_scatter_(Tensor & self, int64_t dim, const Tensor & index, Scalar value); +Tensor & _th_scatter_add_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); +Tensor & _th_gather_out(Tensor & result, const Tensor & self, int64_t dim, const Tensor & index); +Tensor _th_gather(const Tensor & self, int64_t dim, const Tensor & index); +bool _th_equal(const Tensor & self, const Tensor & other); +Tensor & _th_and_out(Tensor & result, const Tensor & self, Scalar other); +Tensor _th_and(const Tensor & self, Scalar other); +Tensor & _th_and_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_and_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_and(const Tensor & self, const Tensor & other); +Tensor s__th_and(const Tensor & self, const Tensor & other); +Tensor & _th_iand_(Tensor & self, Scalar other); +Tensor & _th_iand_(Tensor & self, const Tensor & other); +Tensor & s__th_iand_(Tensor & self, const Tensor & other); +Tensor & _th_or_out(Tensor & result, const Tensor & self, Scalar other); +Tensor _th_or(const Tensor & self, Scalar other); +Tensor & _th_or_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_or_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_or(const Tensor & self, const Tensor & other); +Tensor s__th_or(const Tensor & self, const Tensor & other); +Tensor & _th_ior_(Tensor & self, Scalar other); +Tensor & _th_ior_(Tensor & self, const Tensor & other); +Tensor & s__th_ior_(Tensor & self, const Tensor & other); +Tensor & _th_lshift_out(Tensor & result, const Tensor & self, Scalar other); +Tensor _th_lshift(const Tensor & self, Scalar other); +Tensor & _th_lshift_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_lshift_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_lshift(const Tensor & self, const Tensor & other); +Tensor s__th_lshift(const Tensor & self, const Tensor & other); +Tensor & _th_ilshift_(Tensor & self, Scalar other); +Tensor & _th_ilshift_(Tensor & self, const Tensor & other); +Tensor & s__th_ilshift_(Tensor & self, const Tensor & other); +Tensor & _th_rshift_out(Tensor & result, const Tensor & self, Scalar other); +Tensor _th_rshift(const Tensor & self, Scalar other); +Tensor & _th_rshift_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_rshift_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_rshift(const Tensor & self, const Tensor & other); +Tensor s__th_rshift(const Tensor & self, const Tensor & other); +Tensor & _th_irshift_(Tensor & self, Scalar other); +Tensor & _th_irshift_(Tensor & self, const Tensor & other); +Tensor & s__th_irshift_(Tensor & self, const Tensor & other); +Tensor & _th_min_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_min_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_min(const Tensor & self, const Tensor & other); +Tensor s__th_min(const Tensor & self, const Tensor & other); +Tensor _th_min(const Tensor & self); +std::tuple _th_min_out(Tensor & min, Tensor & min_indices, const Tensor & self, int64_t dim, bool keepdim); +std::tuple _th_min(const Tensor & self, int64_t dim, bool keepdim); +Tensor & _th_max_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_max_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_max(const Tensor & self, const Tensor & other); +Tensor s__th_max(const Tensor & self, const Tensor & other); +Tensor _th_max(const Tensor & self); +std::tuple _th_max_out(Tensor & max, Tensor & max_indices, const Tensor & self, int64_t dim, bool keepdim); +std::tuple _th_max(const Tensor & self, int64_t dim, bool keepdim); +std::tuple _th_mode_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool keepdim); +std::tuple _th_mode(const Tensor & self, int64_t dim, bool keepdim); +std::tuple _th_sort_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool descending); +std::tuple _th_sort(const Tensor & self, int64_t dim, bool descending); +std::tuple _th_topk_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, int64_t dim, bool largest, bool sorted); +std::tuple _th_topk(const Tensor & self, int64_t k, int64_t dim, bool largest, bool sorted); +Tensor & _th_exp_out(Tensor & result, const Tensor & self); +Tensor _th_exp(const Tensor & self); +Tensor & _th_cos_out(Tensor & result, const Tensor & self); +Tensor _th_cos(const Tensor & self); +Tensor & _th_cosh_out(Tensor & result, const Tensor & self); +Tensor _th_cosh(const Tensor & self); +Tensor & _th_tan_out(Tensor & result, const Tensor & self); +Tensor _th_tan(const Tensor & self); +Tensor & _th_atan_out(Tensor & result, const Tensor & self); +Tensor _th_atan(const Tensor & self); +Tensor & _th_tanh_out(Tensor & result, const Tensor & self); +Tensor _th_tanh(const Tensor & self); +Tensor & _th_erf_out(Tensor & result, const Tensor & self); +Tensor _th_erf(const Tensor & self); +Tensor & _th_erfc_out(Tensor & result, const Tensor & self); +Tensor _th_erfc(const Tensor & self); +Tensor & _th_var_out(Tensor & result, const Tensor & self, int64_t dim, bool unbiased, bool keepdim); +Tensor _th_var(const Tensor & self, int64_t dim, bool unbiased, bool keepdim); +Tensor _th_var(const Tensor & self, bool unbiased); +Tensor & _th_std_out(Tensor & result, const Tensor & self, int64_t dim, bool unbiased, bool keepdim); +Tensor _th_std(const Tensor & self, int64_t dim, bool unbiased, bool keepdim); +Tensor _th_std(const Tensor & self, bool unbiased); +Tensor & _th_renorm_out(Tensor & result, const Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); +Tensor _th_renorm(const Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); +Tensor & _th_renorm_(Tensor & self, Scalar p, int64_t dim, Scalar maxnorm); +Tensor _th_dist(const Tensor & self, const Tensor & other, Scalar p); +Tensor s__th_dist(const Tensor & self, const Tensor & other, Scalar p); +Tensor & _th_reciprocal_out(Tensor & result, const Tensor & self); +Tensor _th_reciprocal(const Tensor & self); +Tensor & _th_cumsum_out(Tensor & result, const Tensor & self, int64_t dim); +Tensor _th_cumsum(const Tensor & self, int64_t dim); +Tensor & _th_cumprod_out(Tensor & result, const Tensor & self, int64_t dim); +Tensor _th_cumprod(const Tensor & self, int64_t dim); +Tensor _th_trace(const Tensor & self); +Tensor & _th_fmod_out(Tensor & result, const Tensor & self, Scalar other); +Tensor _th_fmod(const Tensor & self, Scalar other); +Tensor & _th_fmod_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_fmod_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_fmod(const Tensor & self, const Tensor & other); +Tensor s__th_fmod(const Tensor & self, const Tensor & other); +Tensor & _th_fmod_(Tensor & self, Scalar other); +Tensor & _th_fmod_(Tensor & self, const Tensor & other); +Tensor & s__th_fmod_(Tensor & self, const Tensor & other); +Tensor & _th_remainder_out(Tensor & result, const Tensor & self, Scalar other); +Tensor _th_remainder(const Tensor & self, Scalar other); +Tensor & _th_remainder_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor & s__th_remainder_out(Tensor & result, const Tensor & self, const Tensor & other); +Tensor _th_remainder(const Tensor & self, const Tensor & other); +Tensor s__th_remainder(const Tensor & self, const Tensor & other); +Tensor & _th_remainder_(Tensor & self, Scalar other); +Tensor & _th_remainder_(Tensor & self, const Tensor & other); +Tensor & s__th_remainder_(Tensor & self, const Tensor & other); +Tensor & _th_clamp_out(Tensor & result, const Tensor & self, Scalar min, Scalar max); +Tensor _th_clamp(const Tensor & self, Scalar min, Scalar max); +Tensor & _th_clamp_min_out(Tensor & result, const Tensor & self, Scalar min); +Tensor _th_clamp_min(const Tensor & self, Scalar min); +Tensor & _th_clamp_max_out(Tensor & result, const Tensor & self, Scalar max); +Tensor _th_clamp_max(const Tensor & self, Scalar max); +Tensor _th_dot(const Tensor & self, const Tensor & tensor); +Tensor & _th_cross_kernel_out(Tensor & result, const Tensor & self, const Tensor & other, int64_t dim); +Tensor _th_cross_kernel(const Tensor & self, const Tensor & other, int64_t dim); +Tensor & _th_diag_out(Tensor & result, const Tensor & self, int64_t diagonal); +Tensor _th_diag(const Tensor & self, int64_t diagonal); +Tensor & _th_addmm_out(Tensor & result, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); +Tensor & s__th_addmm_out(Tensor & result, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); +Tensor _th_addmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); +Tensor s__th_addmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); +Tensor & _th_addmm_(Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); +Tensor & _th_addmv_out(Tensor & result, const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); +Tensor & s__th_addmv_out(Tensor & result, const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); +Tensor _th_addmv(const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); +Tensor s__th_addmv(const Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); +Tensor & _th_addmv_(Tensor & self, const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha); +Tensor & _th_addr_out(Tensor & result, const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); +Tensor & s__th_addr_out(Tensor & result, const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); +Tensor _th_addr(const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); +Tensor s__th_addr(const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); +Tensor & _th_addr_(Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); +Tensor & _th_ger_out(Tensor & result, const Tensor & self, const Tensor & vec2); +Tensor _th_ger(const Tensor & self, const Tensor & vec2); +Tensor & _th_mv_out(Tensor & result, const Tensor & self, const Tensor & vec); +Tensor _th_mv(const Tensor & self, const Tensor & vec); +Tensor & _th_mm_out(Tensor & result, const Tensor & self, const Tensor & mat2); +Tensor _th_mm(const Tensor & self, const Tensor & mat2); +Tensor & _th_bmm_out(Tensor & result, const Tensor & self, const Tensor & mat2); +Tensor _th_bmm(const Tensor & self, const Tensor & mat2); +Tensor & _th_addbmm_out(Tensor & result, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +Tensor & s__th_addbmm_out(Tensor & result, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +Tensor _th_addbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +Tensor s__th_addbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +Tensor & _th_addbmm_(Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +Tensor & _th_baddbmm_out(Tensor & result, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +Tensor & s__th_baddbmm_out(Tensor & result, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +Tensor _th_baddbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +Tensor s__th_baddbmm(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); +std::tuple _th_gels_out(Tensor & res1, Tensor & res2, const Tensor & self, const Tensor & A); +std::tuple _th_gels(const Tensor & self, const Tensor & A); +std::tuple _th_eig_out(Tensor & res1, Tensor & res2, const Tensor & self, bool eigenvectors); +std::tuple _th_eig(const Tensor & self, bool eigenvectors); +Tensor & _th_potri_out(Tensor & output, const Tensor & self, bool upper); +Tensor _th_potri(const Tensor & self, bool upper); +std::tuple _th_geqrf_out(Tensor & res1, Tensor & res2, const Tensor & self); +std::tuple _th_geqrf(const Tensor & self); +std::tuple _th_multinomial_alias_setup_out(Tensor & J, Tensor & q, const Tensor & probs); +std::tuple _th_multinomial_alias_setup(const Tensor & probs); +Tensor & _th_multinomial_alias_draw_out(Tensor & result, const Tensor & q, const Tensor & J, int64_t num_samples, Generator * generator); +Tensor _th_multinomial_alias_draw(const Tensor & q, const Tensor & J, int64_t num_samples, Generator * generator); +Tensor & _th_copy_ignoring_overlaps_(Tensor & self, const Tensor & src); +Tensor & _th_cat_out(Tensor & self, TensorList tensors, int64_t dim); +Tensor _th_cat(TensorList tensors, int64_t dim); +Tensor & _thnn_binary_cross_entropy_forward_out(Tensor & output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); +Tensor _thnn_binary_cross_entropy_forward(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); +Tensor & _thnn_binary_cross_entropy_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); +Tensor _thnn_binary_cross_entropy_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction); +Tensor & _thnn_multi_margin_loss_forward_out(Tensor & output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction); +Tensor _thnn_multi_margin_loss_forward(const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction); +Tensor & _thnn_multi_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction); +Tensor _thnn_multi_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight, int64_t reduction); +std::tuple _thnn_multilabel_margin_loss_forward_out(Tensor & output, Tensor & is_target, const Tensor & self, const Tensor & target, int64_t reduction); +std::tuple _thnn_multilabel_margin_loss_forward(const Tensor & self, const Tensor & target, int64_t reduction); +Tensor & _thnn_multilabel_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction, const Tensor & is_target); +Tensor _thnn_multilabel_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction, const Tensor & is_target); +std::tuple _thnn_nll_loss_forward_out(Tensor & output, Tensor & total_weight, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); +std::tuple _thnn_nll_loss_forward(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); +Tensor & _thnn_nll_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); +Tensor _thnn_nll_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); +std::tuple _thnn_nll_loss2d_forward_out(Tensor & output, Tensor & total_weight, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); +std::tuple _thnn_nll_loss2d_forward(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); +Tensor & _thnn_nll_loss2d_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); +Tensor _thnn_nll_loss2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); +Tensor & _thnn_soft_margin_loss_forward_out(Tensor & output, const Tensor & self, const Tensor & target, int64_t reduction); +Tensor _thnn_soft_margin_loss_forward(const Tensor & self, const Tensor & target, int64_t reduction); +Tensor & _thnn_soft_margin_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +Tensor _thnn_soft_margin_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +Tensor & _thnn_elu_forward_out(Tensor & output, const Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale); +Tensor _thnn_elu_forward(const Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale); +Tensor & _thnn_elu_backward_out(Tensor & grad_input, const Tensor & grad_output, Scalar alpha, Scalar scale, Scalar input_scale, const Tensor & output); +Tensor _thnn_elu_backward(const Tensor & grad_output, Scalar alpha, Scalar scale, Scalar input_scale, const Tensor & output); +Tensor & _thnn_elu_forward_(Tensor & self, Scalar alpha, Scalar scale, Scalar input_scale); +Tensor & _thnn_glu_forward_out(Tensor & output, const Tensor & self, int64_t dim); +Tensor _thnn_glu_forward(const Tensor & self, int64_t dim); +Tensor & _thnn_glu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, int64_t dim); +Tensor _thnn_glu_backward(const Tensor & grad_output, const Tensor & self, int64_t dim); +Tensor & _thnn_hardtanh_forward_out(Tensor & output, const Tensor & self, Scalar min_val, Scalar max_val); +Tensor _thnn_hardtanh_forward(const Tensor & self, Scalar min_val, Scalar max_val); +Tensor & _thnn_hardtanh_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar min_val, Scalar max_val); +Tensor _thnn_hardtanh_backward(const Tensor & grad_output, const Tensor & self, Scalar min_val, Scalar max_val); +Tensor & _thnn_hardtanh_forward_(Tensor & self, Scalar min_val, Scalar max_val); +Tensor & _thnn_leaky_relu_forward_out(Tensor & output, const Tensor & self, Scalar negative_slope); +Tensor _thnn_leaky_relu_forward(const Tensor & self, Scalar negative_slope); +Tensor & _thnn_leaky_relu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar negative_slope); +Tensor _thnn_leaky_relu_backward(const Tensor & grad_output, const Tensor & self, Scalar negative_slope); +Tensor & _thnn_leaky_relu_forward_(Tensor & self, Scalar negative_slope); +std::tuple _thnn_log_sigmoid_forward_out(Tensor & output, Tensor & buffer, const Tensor & self); +std::tuple _thnn_log_sigmoid_forward(const Tensor & self); +Tensor & _thnn_log_sigmoid_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & buffer); +Tensor _thnn_log_sigmoid_backward(const Tensor & grad_output, const Tensor & self, const Tensor & buffer); +Tensor & _thnn_rrelu_with_noise_forward_out(Tensor & output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator); +Tensor _thnn_rrelu_with_noise_forward(const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator); +Tensor & _thnn_rrelu_with_noise_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training); +Tensor _thnn_rrelu_with_noise_backward(const Tensor & grad_output, const Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training); +Tensor & _thnn_rrelu_with_noise_forward_(Tensor & self, const Tensor & noise, Scalar lower, Scalar upper, bool training, Generator * generator); +Tensor & _thnn_softplus_forward_out(Tensor & output, const Tensor & self, Scalar beta, Scalar threshold); +Tensor _thnn_softplus_forward(const Tensor & self, Scalar beta, Scalar threshold); +Tensor & _thnn_softplus_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar beta, Scalar threshold, const Tensor & output); +Tensor _thnn_softplus_backward(const Tensor & grad_output, const Tensor & self, Scalar beta, Scalar threshold, const Tensor & output); +Tensor & _thnn_softshrink_forward_out(Tensor & output, const Tensor & self, Scalar lambd); +Tensor _thnn_softshrink_forward(const Tensor & self, Scalar lambd); +Tensor & _thnn_softshrink_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, Scalar lambd); +Tensor _thnn_softshrink_backward(const Tensor & grad_output, const Tensor & self, Scalar lambd); +Tensor & _thnn_tanh_forward_out(Tensor & output, const Tensor & self); +Tensor _thnn_tanh_forward(const Tensor & self); +Tensor & _thnn_tanh_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & output); +Tensor _thnn_tanh_backward(const Tensor & grad_output, const Tensor & output); +std::tuple _thnn_conv2d_forward_out(Tensor & output, Tensor & columns, Tensor & ones, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); +std::tuple _thnn_conv2d_forward(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); +std::tuple _thnn_conv2d_backward_out(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & columns, const Tensor & ones); +std::tuple _thnn_conv2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & columns, const Tensor & ones, std::array output_mask); +Tensor & _thnn_conv_depthwise2d_forward_out(Tensor & output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); +Tensor _thnn_conv_depthwise2d_forward(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); +std::tuple _thnn_conv_depthwise2d_backward_out(Tensor & grad_input, Tensor & grad_weight, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); +std::tuple _thnn_conv_depthwise2d_backward(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); + +} // namespace th +} // namespace legacy +} // namespace native +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/MatrixRef.h b/thirdparty/libtorch/include/ATen/MatrixRef.h new file mode 100644 index 0000000000..d6bcc08add --- /dev/null +++ b/thirdparty/libtorch/include/ATen/MatrixRef.h @@ -0,0 +1,100 @@ +#pragma once +#include +#include + +#include + +namespace at { + /// MatrixRef - Like an ArrayRef, but with an extra recorded strides so that + /// we can easily view it as a multidimensional array. + /// + /// Like ArrayRef, this class does not own the underlying data, it is expected + /// to be used in situations where the data resides in some other buffer. + /// + /// This is intended to be trivially copyable, so it should be passed by + /// value. + /// + /// For now, 2D only (so the copies are actually cheap, without having + /// to write a SmallVector class) and contiguous only (so we can + /// return non-strided ArrayRef on index). + /// + /// P.S. dimension 0 indexes rows, dimension 1 indexes columns + template + class MatrixRef { + public: + typedef size_t size_type; + + private: + /// Underlying ArrayRef + ArrayRef arr; + + /// Stride of dim 0 (outer dimension) + size_type stride0; + + // Stride of dim 1 is assumed to be 1 + + public: + /// Construct an empty Matrixref. + /*implicit*/ MatrixRef() : arr(nullptr), stride0(0) {} + + /// Construct an MatrixRef from an ArrayRef and outer stride. + /*implicit*/ MatrixRef(ArrayRef arr, size_type stride0) + : arr(arr), stride0(stride0) { + TORCH_CHECK(arr.size() % stride0 == 0, "MatrixRef: ArrayRef size ", arr.size(), " not divisible by stride ", stride0) + } + + /// @} + /// @name Simple Operations + /// @{ + + /// empty - Check if the matrix is empty. + bool empty() const { return arr.empty(); } + + const T *data() const { return arr.data(); } + + /// size - Get size a dimension + size_t size(size_t dim) const { + if (dim == 0) { + return arr.size() / stride0; + } else if (dim == 1) { + return stride0; + } else { + TORCH_CHECK(0, "MatrixRef: out of bounds dimension ", dim, "; expected 0 or 1"); + } + } + + size_t numel() const { + return arr.size(); + } + + /// equals - Check for element-wise equality. + bool equals(MatrixRef RHS) const { + return stride0 == RHS.stride0 && arr.equals(RHS.arr); + } + + /// @} + /// @name Operator Overloads + /// @{ + ArrayRef operator[](size_t Index) const { + return arr.slice(Index*stride0, stride0); + } + + /// Disallow accidental assignment from a temporary. + /// + /// The declaration here is extra complicated so that "arrayRef = {}" + /// continues to select the move assignment operator. + template + typename std::enable_if::value, MatrixRef>::type & + operator=(U &&Temporary) = delete; + + /// Disallow accidental assignment from a temporary. + /// + /// The declaration here is extra complicated so that "arrayRef = {}" + /// continues to select the move assignment operator. + template + typename std::enable_if::value, MatrixRef>::type & + operator=(std::initializer_list) = delete; + + }; + +} // end namespace at diff --git a/thirdparty/libtorch/include/ATen/MemoryOverlap.h b/thirdparty/libtorch/include/ATen/MemoryOverlap.h new file mode 100644 index 0000000000..e997869c52 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/MemoryOverlap.h @@ -0,0 +1,30 @@ +#pragma once + +#include + +namespace at { + +// MemOverlap: Whether or not there is memory overlap +// +// NO: Absolutely no memory overlap +// YES: Absolutely yes memory overlap +// TOO_HARD: There might be memory overlap, but it was too expensive to compute. +// +// NB: Please update the python test for these if you renumber them. +enum class MemOverlap { NO, YES, TOO_HARD }; + +enum class MemOverlapStatus { FULL, PARTIAL, NO, TOO_HARD }; + +CAFFE2_API MemOverlap has_internal_overlap(const Tensor& t); +CAFFE2_API MemOverlap has_internal_overlap(TensorImpl* t); + +CAFFE2_API void assert_no_internal_overlap(const Tensor& t); +CAFFE2_API void assert_no_internal_overlap(TensorImpl* t); + +MemOverlapStatus get_overlap_status(const Tensor& a, const Tensor& b); +MemOverlapStatus get_overlap_status(TensorImpl* a, TensorImpl* b); + +void assert_no_partial_overlap(const Tensor& a, const Tensor& b); +void assert_no_partial_overlap(TensorImpl* a, TensorImpl* b); + +} diff --git a/thirdparty/libtorch/include/ATen/MkldnnCPUType.h b/thirdparty/libtorch/include/ATen/MkldnnCPUType.h new file mode 100644 index 0000000000..5087963838 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/MkldnnCPUType.h @@ -0,0 +1,67 @@ +#pragma once + +// @generated by aten/src/ATen/gen.py + +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +namespace c10 { +struct Storage; +} + +namespace at { + +class Tensor; +using TensorList = ArrayRef; + +class Context; +struct Generator; + +struct Quantizer; +// This is temporary typedef to enable Quantizer in aten native function API +// we'll remove them when we are actually exposing Quantizer class +// to frontend +using ConstQuantizerPtr = const c10::intrusive_ptr&; + +#ifdef USE_STATIC_DISPATCH +namespace MkldnnCPUType { + Tensor add(const Tensor & self, const Tensor & other, Scalar alpha); + Tensor & add_(Tensor & self, const Tensor & other, Scalar alpha); + Tensor & add_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha); + Tensor empty(IntArrayRef size, const TensorOptions & options, c10::optional memory_format); + Tensor mkldnn_linear(const Tensor & input, const Tensor & weight, const Tensor & bias); + Tensor mkldnn_max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + Tensor mul(const Tensor & self, const Tensor & other); + Tensor & mul_(Tensor & self, const Tensor & other); + Tensor & mul_out(Tensor & out, const Tensor & self, const Tensor & other); + std::tuple native_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps); + Tensor _mkldnn_reshape(const Tensor & self, IntArrayRef shape); + Tensor relu(const Tensor & self); + Tensor & relu_(Tensor & self); + Tensor sigmoid(const Tensor & self); + Tensor & sigmoid_(Tensor & self); + Tensor _softmax(const Tensor & self, int64_t dim, bool half_to_float); + Tensor _mkldnn_transpose(const Tensor & self, int64_t dim0, int64_t dim1); + Tensor & _mkldnn_transpose_(Tensor & self, int64_t dim0, int64_t dim1); + Tensor clone(const Tensor & self, c10::optional memory_format); + Tensor & zero_(Tensor & self); + Tensor to_dense(const Tensor & self); + Tensor mkldnn_reorder_conv2d_weight(const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups); + Tensor view(const Tensor & self, IntArrayRef size); + Tensor & adaptive_avg_pool2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); + Tensor mkldnn_adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size); + Tensor & avg_pool2d_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor avg_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +} +#endif + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/NamedTensor.h b/thirdparty/libtorch/include/ATen/NamedTensor.h new file mode 100644 index 0000000000..a7606b0a66 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/NamedTensor.h @@ -0,0 +1 @@ +#include diff --git a/thirdparty/libtorch/include/ATen/NamedTensorUtils.h b/thirdparty/libtorch/include/ATen/NamedTensorUtils.h new file mode 100644 index 0000000000..4b75420935 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/NamedTensorUtils.h @@ -0,0 +1,165 @@ +#pragma once +#include +#include +#include + +#include +#include +#include + +#ifdef BUILD_NAMEDTENSOR +namespace at { + +using NameVector = SmallVector; + +inline bool has_names(TensorList tensors) { + return std::any_of( + tensors.begin(), tensors.end(), [](const Tensor& t) { return t.has_names(); }); +} + +// Converts dim to an positional index. Errors if `dim` cannot be used to +// refer to any dimension of tensor. +CAFFE2_API int64_t dimname_to_position(const Tensor& tensor, Dimname dim); +CAFFE2_API std::vector dimnames_to_positions(const Tensor& tensor, DimnameList dims); + +// Unifies two DimnameList to produce a third. This is useful for implementing +// the named inference rule for binary broadcasting operations like add. +// +// There are three main constraints: +// 1) Check matching: Names must match positionally from the right. +// 2) Check misaligned: If a name `n` is in `names`, then it must appear at +// the same index from the right in other. +// 3) The output names are obtained by unifying the names individually from the right. +CAFFE2_API std::vector +unify_from_right(DimnameList names, DimnameList other, const char* action = "broadcast"); + +[[noreturn]] inline void reportNYIDimnameOverload(const char* op_name) { + TORCH_CHECK( + false, + op_name, ": You passed a dimname (string) to this op in place of a dimension " + "index but it does not yet support this behavior. Please pass a dimension " + "index to work around this."); +} + +// [NOTE] Writing name inference rules +// +// Operators that support named tensors are either composed of operations that +// support named tensors or implement some name inference rule. An op that +// implements its own name inference rule generally looks like the following: +// +// Tensor op(...) { +// perform_shape_checks(...); +// # (1) +// auto maybe_outnames = compute_outnames(...); +// auto result = [&]() { +// NoNamesGuard guard; +// return op_impl(...); +// }(); +// # (2) +// propagate_names_if_nonempty(result, maybe_outnames); +// +// Each op has (1) a compute outnames step and (2) a propagate names step. +// +// compute_outnames is responsible for checking that input names match and +// determining what the output names should be. It returns either: +// - {} (if the inputs tensors are all unnamed) +// - non-empty outnames. +// +// propagate_names_if_nonempty propagates the outnames if they exist to the result +// tensors. +// +// The {} case is an optimization; if the user does not use named tensors they +// pay no perf cost for it. + +namespace namedinference { + +// Propagates `names` to `result` if `names` is not empty. +// `names` can be empty; see [NOTE] Writing name inference rules +// If `names` is not empty, `names.size()` should equal `result.dim()`. +// When in doubt, use this overload instead of the others. +CAFFE2_API Tensor& propagate_names_if_nonempty( + Tensor& result, + DimnameList maybe_names, + bool validate_names = false); + +// Propagates `names` to `result`. Only use this if we are certain that there are +// names to propagate (that names is not empty). +CAFFE2_API Tensor& propagate_names( + Tensor& result, + DimnameList names, + bool validate_names = false); + +// Propagates all names from src to result. +CAFFE2_API void propagate_names(Tensor& result, const Tensor& src); + +// Propagates all names except for those at the excluded_idxs. +void propagate_names_except(Tensor& result, const Tensor& src, IntArrayRef excluded_idxs); + +// Used for reduction ops that have a `keepdim` arg. +void propagate_names_for_reduction(Tensor& result, const Tensor& src, IntArrayRef excluded_idxs, bool keepdim); + +void propagate_names_for_expand(Tensor& result, const Tensor& self); + +std::vector compute_cat_outnames(TensorList tensors); + +std::vector compute_broadcast_outnames( + const Tensor& self, + const Tensor& other); + +std::vector broadcast_to_outnames( + const Tensor& tensor, + const Tensor& reference_tensor, + const char* op_name); + +std::vector compute_matmul_outnames(const Tensor& self, const Tensor& other); + +std::vector compute_cdist_outnames(const Tensor& self, const Tensor& other); + +std::vector compute_bmm_outnames( + Tensor& result, + const Tensor& self, + const Tensor& other); + +std::vector compute_squeeze_outnames(const Tensor& tensor); + +// TensorImpl* overloads for Legacy TH/THC code. Use these sparingly. + +TensorImpl* propagate_names_if_nonempty( + TensorImpl* result, + DimnameList maybe_names, + bool validate_names = false); + +TensorImpl* propagate_names( + TensorImpl* result, + DimnameList names, + bool validate_names = false); + +void propagate_names(TensorImpl* result, /*const */TensorImpl* src); + +// result = m1 @ m2 + bias +void propagate_names_for_addmm( + TensorImpl* result, + /*const*/TensorImpl* m1, + /*const*/TensorImpl* m2, + /*const*/TensorImpl* bias); + +void propagate_names_for_addmv( + TensorImpl* result, + TensorImpl* mat, + TensorImpl* vec, + TensorImpl* bias); + +void check_names_for_dot(TensorImpl* vec1, TensorImpl* vec2); + +std::vector compute_baddbmm_outnames( + TensorImpl* result, + TensorImpl* self, + TensorImpl* other, + TensorImpl* bias); + +bool are_names_equal(TensorImpl* self, TensorImpl* other); + +} // namespace namedinference + +} // namespace at +#endif diff --git a/thirdparty/libtorch/include/ATen/NativeFunctions.h b/thirdparty/libtorch/include/ATen/NativeFunctions.h new file mode 100644 index 0000000000..9ccbe8151d --- /dev/null +++ b/thirdparty/libtorch/include/ATen/NativeFunctions.h @@ -0,0 +1,1709 @@ +#pragma once + +// @generated by aten/src/ATen/gen.py + +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +namespace c10 { +class Scalar; +} +namespace at { +struct Generator; +class Tensor; +struct Type; +} // namespace at + +namespace at { +namespace native { + +// These functions are defined in native/TensorFactories.cpp. +#define TENSOR(T, S) \ + CAFFE2_API Tensor tensor(ArrayRef values, const TensorOptions& options); \ + inline Tensor tensor( \ + std::initializer_list values, const TensorOptions& options) { \ + return native::tensor(ArrayRef(values), options); \ + } \ + inline Tensor tensor(T value, const TensorOptions& options) { \ + return native::tensor(ArrayRef(value), options); \ + } \ + inline Tensor tensor(ArrayRef values) { \ + return native::tensor(std::move(values), at::dtype(k##S)); \ + } \ + inline Tensor tensor(std::initializer_list values) { \ + return native::tensor(ArrayRef(values)); \ + } \ + inline Tensor tensor(T value) { \ + return native::tensor(ArrayRef(value)); \ + } +AT_FORALL_SCALAR_TYPES_AND3(Bool, Half, BFloat16, TENSOR) +#undef TENSOR + +CAFFE2_API Tensor _cast_Byte(const Tensor & self, bool non_blocking=false); +CAFFE2_API Tensor _cast_Char(const Tensor & self, bool non_blocking=false); +CAFFE2_API Tensor _cast_Double(const Tensor & self, bool non_blocking=false); +CAFFE2_API Tensor _cast_Float(const Tensor & self, bool non_blocking=false); +CAFFE2_API Tensor _cast_Int(const Tensor & self, bool non_blocking=false); +CAFFE2_API Tensor _cast_Long(const Tensor & self, bool non_blocking=false); +CAFFE2_API Tensor _cast_Short(const Tensor & self, bool non_blocking=false); +CAFFE2_API Tensor _cast_Half(const Tensor & self, bool non_blocking=false); +CAFFE2_API void backward(const Tensor & self, const Tensor & gradient={}, bool keep_graph=false, bool create_graph=false); +CAFFE2_API void set_data(const Tensor & self, const Tensor & new_data); +CAFFE2_API Tensor data(const Tensor & self); +CAFFE2_API bool is_leaf(const Tensor & self); +CAFFE2_API int64_t output_nr(const Tensor & self); +CAFFE2_API int64_t _version(const Tensor & self); +CAFFE2_API Tensor & requires_grad_(Tensor & self, bool _requires_grad=true); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & rename_(Tensor & self, c10::optional names); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor rename(const Tensor & self, c10::optional names); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor align_to(const Tensor & self, DimnameList names); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor align_to(const Tensor & self, DimnameList order, int64_t ellipsis_idx); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor align_as(const Tensor & self, const Tensor & other); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::vector align_tensors(TensorList tensors); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor refine_names(const Tensor & self, DimnameList names); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor unflatten(const Tensor & self, Dimname dim, IntArrayRef sizes, DimnameList names); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor unflatten(const Tensor & self, int64_t dim, IntArrayRef sizes, DimnameList names); +#endif +CAFFE2_API bool _use_cudnn_ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank); +CAFFE2_API std::tuple _cudnn_ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank, bool deterministic, bool zero_infinity); +CAFFE2_API Tensor _cudnn_rnn_flatten_weight(TensorList weight_arr, int64_t weight_stride0, int64_t input_size, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, bool bidirectional); +CAFFE2_API std::tuple _cudnn_rnn(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & weight_buf, const Tensor & hx, const Tensor & cx, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state); +CAFFE2_API std::tuple> _cudnn_rnn_backward(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & weight_buf, const Tensor & hx, const Tensor & cx, const Tensor & output, const Tensor & grad_output, const Tensor & grad_hy, const Tensor & grad_cy, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state, const Tensor & reserve, std::array output_mask); +CAFFE2_API Tensor _cudnn_init_dropout_state(double dropout, bool train, int64_t dropout_seed, const TensorOptions & options); +CAFFE2_API int64_t _debug_has_internal_overlap(const Tensor & self); +CAFFE2_API std::tuple fused_dropout_cuda(const Tensor & self, double p, Generator * generator=nullptr); +CAFFE2_API Tensor masked_scale_cuda(const Tensor & self, const Tensor & mask, double scale); +CAFFE2_API std::tuple _sobol_engine_draw(const Tensor & quasi, int64_t n, const Tensor & sobolstate, int64_t dimension, int64_t num_generated, c10::optional dtype); +CAFFE2_API Tensor & _sobol_engine_ff_(Tensor & self, int64_t n, const Tensor & sobolstate, int64_t dimension, int64_t num_generated); +CAFFE2_API Tensor & _sobol_engine_scramble_(Tensor & self, const Tensor & ltm, int64_t dimension); +CAFFE2_API Tensor & _sobol_engine_initialize_state_(Tensor & self, int64_t dimension); +CAFFE2_API Tensor _reshape_from_tensor(const Tensor & self, const Tensor & shape); +CAFFE2_API Tensor _shape_as_tensor(const Tensor & self); +CAFFE2_API Tensor dropout(const Tensor & input, double p, bool train); +CAFFE2_API Tensor & dropout_(Tensor & self, double p, bool train); +CAFFE2_API Tensor feature_dropout(const Tensor & input, double p, bool train); +CAFFE2_API Tensor & feature_dropout_(Tensor & self, double p, bool train); +CAFFE2_API Tensor alpha_dropout(const Tensor & input, double p, bool train); +CAFFE2_API Tensor & alpha_dropout_(Tensor & self, double p, bool train); +CAFFE2_API Tensor feature_alpha_dropout(const Tensor & input, double p, bool train); +CAFFE2_API Tensor & feature_alpha_dropout_(Tensor & self, double p, bool train); +CAFFE2_API Tensor abs(const Tensor & self); +CAFFE2_API Tensor & abs_(Tensor & self); +CAFFE2_API Tensor & abs_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor angle(const Tensor & self); +CAFFE2_API Tensor & _angle_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor real(const Tensor & self); +CAFFE2_API Tensor & _real_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor imag(const Tensor & self); +CAFFE2_API Tensor & _imag_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor conj(const Tensor & self); +CAFFE2_API Tensor & _conj_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor acos(const Tensor & self); +CAFFE2_API Tensor & acos_(Tensor & self); +CAFFE2_API Tensor & acos_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor avg_pool1d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true); +CAFFE2_API Tensor adaptive_avg_pool1d(const Tensor & self, IntArrayRef output_size); +CAFFE2_API std::tuple adaptive_max_pool1d(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor add(const Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor mkldnn_add(const Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor add_sparse(const Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor & add_(Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor & mkldnn_add_(Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor & add_sparse_(Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor & add_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor & mkldnn_add_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor & add_out_sparse_cpu(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor & add_out_sparse_cuda(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor add(const Tensor & self, Scalar other, Scalar alpha=1); +CAFFE2_API Tensor & add_(Tensor & self, Scalar other, Scalar alpha=1); +CAFFE2_API Tensor addr(const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & addr_(Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & addr_out(Tensor & out, const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor affine_grid_generator(const Tensor & theta, IntArrayRef size, bool align_corners); +CAFFE2_API Tensor affine_grid_generator_backward(const Tensor & grad, IntArrayRef size, bool align_corners); +CAFFE2_API Tensor all(const Tensor & self, int64_t dim, bool keepdim=false); +CAFFE2_API Tensor & all_out(Tensor & out, const Tensor & self, int64_t dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor all(const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & all_out(Tensor & out, const Tensor & self, Dimname dim, bool keepdim=false); +#endif +CAFFE2_API bool allclose(const Tensor & self, const Tensor & other, double rtol=1e-05, double atol=1e-08, bool equal_nan=false); +CAFFE2_API Tensor any(const Tensor & self, int64_t dim, bool keepdim=false); +CAFFE2_API Tensor & any_out(Tensor & out, const Tensor & self, int64_t dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor any(const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & any_out(Tensor & out, const Tensor & self, Dimname dim, bool keepdim=false); +#endif +CAFFE2_API Tensor arange(Scalar end, const TensorOptions & options={}); +CAFFE2_API Tensor arange(Scalar start, Scalar end, const TensorOptions & options={}); +CAFFE2_API Tensor arange(Scalar start, Scalar end, Scalar step, const TensorOptions & options={}); +CAFFE2_API Tensor & arange_out(Tensor & out, Scalar end); +CAFFE2_API Tensor & arange_cpu_out(Tensor & out, Scalar start, Scalar end, Scalar step=1); +CAFFE2_API Tensor & arange_cuda_out(Tensor & out, Scalar start, Scalar end, Scalar step=1); +CAFFE2_API Tensor _dim_arange(const Tensor & like, int64_t dim); +CAFFE2_API Tensor argmax(const Tensor & self, c10::optional dim=c10::nullopt, bool keepdim=false); +CAFFE2_API Tensor argmin(const Tensor & self, c10::optional dim=c10::nullopt, bool keepdim=false); +CAFFE2_API Tensor as_strided_tensorimpl(const Tensor & self, IntArrayRef size, IntArrayRef stride, c10::optional storage_offset=c10::nullopt); +CAFFE2_API Tensor as_strided_qtensorimpl(const Tensor & self, IntArrayRef size, IntArrayRef stride, c10::optional storage_offset=c10::nullopt); +CAFFE2_API Tensor & as_strided_(Tensor & self, IntArrayRef size, IntArrayRef stride, c10::optional storage_offset=c10::nullopt); +CAFFE2_API Tensor asin(const Tensor & self); +CAFFE2_API Tensor & asin_(Tensor & self); +CAFFE2_API Tensor & asin_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor atan(const Tensor & self); +CAFFE2_API Tensor & _atan__cpu(Tensor & self); +CAFFE2_API Tensor & _atan__cuda(Tensor & self); +CAFFE2_API Tensor & _atan_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & _atan_out_cuda(Tensor & out, const Tensor & self); +CAFFE2_API Tensor baddbmm_cpu(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor baddbmm_cuda(const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & baddbmm__cpu(Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & baddbmm__cuda(Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & _baddbmm_mkl_(Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & baddbmm_out_cpu(Tensor & out, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & baddbmm_out_cuda(Tensor & out, const Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor bartlett_window(int64_t window_length, const TensorOptions & options={}); +CAFFE2_API Tensor bartlett_window(int64_t window_length, bool periodic, const TensorOptions & options={}); +CAFFE2_API Tensor batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps, bool cudnn_enabled); +CAFFE2_API std::tuple _batch_norm_impl_index(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps, bool cudnn_enabled); +CAFFE2_API std::tuple _batch_norm_impl_index_backward(int64_t impl_index, const Tensor & input, const Tensor & grad_output, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_var_transform, bool train, double eps, std::array output_mask, const Tensor & reservedSpace); +CAFFE2_API Tensor bernoulli(const Tensor & self, Generator * generator=nullptr); +CAFFE2_API Tensor & bernoulli_out(Tensor & out, const Tensor & self, Generator * generator=nullptr); +CAFFE2_API Tensor & bernoulli_tensor_cpu_(Tensor & self, const Tensor & p, Generator * generator=nullptr); +CAFFE2_API Tensor & bernoulli_tensor_cuda_(Tensor & self, const Tensor & p, Generator * generator=nullptr); +CAFFE2_API Tensor & bernoulli_scalar_cpu_(Tensor & self, double p=0.5, Generator * generator=nullptr); +CAFFE2_API Tensor & bernoulli_scalar_cuda_(Tensor & self, double p=0.5, Generator * generator=nullptr); +CAFFE2_API Tensor bernoulli(const Tensor & self, double p, Generator * generator=nullptr); +CAFFE2_API Tensor bilinear(const Tensor & input1, const Tensor & input2, const Tensor & weight, const Tensor & bias); +CAFFE2_API Tensor binary_cross_entropy_with_logits(const Tensor & self, const Tensor & target, const Tensor & weight={}, const Tensor & pos_weight={}, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor binary_cross_entropy_with_logits_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight={}, const Tensor & pos_weight={}, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor _bincount_cpu(const Tensor & self, const Tensor & weights={}, int64_t minlength=0); +CAFFE2_API Tensor _bincount_cuda(const Tensor & self, const Tensor & weights={}, int64_t minlength=0); +CAFFE2_API Tensor bitwise_not(const Tensor & self); +CAFFE2_API Tensor & bitwise_not_(Tensor & self); +CAFFE2_API Tensor & bitwise_not_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor logical_not(const Tensor & self); +CAFFE2_API Tensor & logical_not_(Tensor & self); +CAFFE2_API Tensor & logical_not_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor logical_xor(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & logical_xor_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & logical_xor_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor blackman_window(int64_t window_length, const TensorOptions & options={}); +CAFFE2_API Tensor blackman_window(int64_t window_length, bool periodic, const TensorOptions & options={}); +CAFFE2_API Tensor bmm_cpu(const Tensor & self, const Tensor & mat2); +CAFFE2_API Tensor bmm_cuda(const Tensor & self, const Tensor & mat2); +CAFFE2_API Tensor & bmm_out_cpu(Tensor & out, const Tensor & self, const Tensor & mat2); +CAFFE2_API Tensor & bmm_out_cuda(Tensor & out, const Tensor & self, const Tensor & mat2); +CAFFE2_API std::vector broadcast_tensors(TensorList tensors); +CAFFE2_API Tensor cat(TensorList tensors, int64_t dim=0); +CAFFE2_API Tensor & cat_out(Tensor & out, TensorList tensors, int64_t dim=0); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor cat(TensorList tensors, Dimname dim); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & cat_out(Tensor & out, TensorList tensors, Dimname dim); +#endif +CAFFE2_API Tensor ceil(const Tensor & self); +CAFFE2_API Tensor & ceil_(Tensor & self); +CAFFE2_API Tensor & ceil_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor chain_matmul(TensorList matrices); +CAFFE2_API std::vector chunk(const Tensor & self, int64_t chunks, int64_t dim=0); +CAFFE2_API Tensor clamp(const Tensor & self, c10::optional min=c10::nullopt, c10::optional max=c10::nullopt); +CAFFE2_API Tensor & _clamp__cpu(Tensor & self, c10::optional min=c10::nullopt, c10::optional max=c10::nullopt); +CAFFE2_API Tensor & _clamp__cuda(Tensor & self, c10::optional min=c10::nullopt, c10::optional max=c10::nullopt); +CAFFE2_API Tensor & _clamp_out_cpu(Tensor & out, const Tensor & self, c10::optional min=c10::nullopt, c10::optional max=c10::nullopt); +CAFFE2_API Tensor & _clamp_out_cuda(Tensor & out, const Tensor & self, c10::optional min=c10::nullopt, c10::optional max=c10::nullopt); +CAFFE2_API Tensor clamp_max(const Tensor & self, Scalar max); +CAFFE2_API Tensor & _clamp_max__cpu(Tensor & self, Scalar max); +CAFFE2_API Tensor & _clamp_max__cuda(Tensor & self, Scalar max); +CAFFE2_API Tensor & _clamp_max_out_cpu(Tensor & out, const Tensor & self, Scalar max); +CAFFE2_API Tensor & _clamp_max_out_cuda(Tensor & out, const Tensor & self, Scalar max); +CAFFE2_API Tensor clamp_min(const Tensor & self, Scalar min); +CAFFE2_API Tensor & _clamp_min__cpu(Tensor & self, Scalar min); +CAFFE2_API Tensor & _clamp_min__cuda(Tensor & self, Scalar min); +CAFFE2_API Tensor & _clamp_min_out_cpu(Tensor & out, const Tensor & self, Scalar min); +CAFFE2_API Tensor & _clamp_min_out_cuda(Tensor & out, const Tensor & self, Scalar min); +CAFFE2_API bool cudnn_is_acceptable(const Tensor & self); +CAFFE2_API Tensor constant_pad_nd(const Tensor & self, IntArrayRef pad, Scalar value=0); +CAFFE2_API Tensor contiguous(const Tensor & self, MemoryFormat memory_format=MemoryFormat::Contiguous); +CAFFE2_API Tensor convolution(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups); +CAFFE2_API Tensor convolution_overrideable(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups); +CAFFE2_API std::tuple convolution_backward_overrideable(const Tensor & grad_output, const Tensor & input, const Tensor & weight, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups, std::array output_mask); +CAFFE2_API Tensor _convolution(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups, bool benchmark, bool deterministic, bool cudnn_enabled); +CAFFE2_API Tensor _convolution_nogroup(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding); +CAFFE2_API std::tuple _convolution_double_backward(const Tensor & ggI, const Tensor & ggW, const Tensor & ggb, const Tensor & gO, const Tensor & weight, const Tensor & self, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups, bool benchmark, bool deterministic, bool cudnn_enabled, std::array output_mask); +CAFFE2_API Tensor conv1d(const Tensor & input, const Tensor & weight, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1, int64_t groups=1); +CAFFE2_API Tensor conv2d(const Tensor & input, const Tensor & weight, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1, int64_t groups=1); +CAFFE2_API Tensor conv3d(const Tensor & input, const Tensor & weight, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1, int64_t groups=1); +CAFFE2_API Tensor conv_tbc(const Tensor & self, const Tensor & weight, const Tensor & bias, int64_t pad=0); +CAFFE2_API std::tuple conv_tbc_backward(const Tensor & self, const Tensor & input, const Tensor & weight, const Tensor & bias, int64_t pad); +CAFFE2_API Tensor conv_transpose1d(const Tensor & input, const Tensor & weight, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, int64_t groups=1, IntArrayRef dilation=1); +CAFFE2_API Tensor conv_transpose2d(const Tensor & input, const Tensor & weight, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, int64_t groups=1, IntArrayRef dilation=1); +CAFFE2_API Tensor conv_transpose3d(const Tensor & input, const Tensor & weight, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, int64_t groups=1, IntArrayRef dilation=1); +CAFFE2_API Tensor & copy_(Tensor & self, const Tensor & src, bool non_blocking=false); +CAFFE2_API Tensor cos(const Tensor & self); +CAFFE2_API Tensor & _cos__cpu(Tensor & self); +CAFFE2_API Tensor & _cos__cuda(Tensor & self); +CAFFE2_API Tensor & _cos_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & _cos_out_cuda(Tensor & out, const Tensor & self); +CAFFE2_API Tensor cosh(const Tensor & self); +CAFFE2_API Tensor & _cosh__cpu(Tensor & self); +CAFFE2_API Tensor & _cosh__cuda(Tensor & self); +CAFFE2_API Tensor & _cosh_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & _cosh_out_cuda(Tensor & out, const Tensor & self); +CAFFE2_API Tensor cosine_embedding_loss(const Tensor & input1, const Tensor & input2, const Tensor & target, double margin=0.0, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor cudnn_affine_grid_generator_forward(const Tensor & theta, int64_t N, int64_t C, int64_t H, int64_t W); +CAFFE2_API Tensor cudnn_affine_grid_generator_backward(const Tensor & grad, int64_t N, int64_t C, int64_t H, int64_t W); +CAFFE2_API std::tuple cudnn_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double exponential_average_factor, double epsilon); +CAFFE2_API std::tuple cudnn_batch_norm_backward(const Tensor & input, const Tensor & grad_output, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_var, double epsilon, const Tensor & reserveSpace); +CAFFE2_API Tensor cudnn_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API Tensor cudnn_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API std::tuple cudnn_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); +CAFFE2_API Tensor cudnn_convolution_backward_bias(const Tensor & grad_output); +CAFFE2_API Tensor cudnn_convolution_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API Tensor cudnn_convolution_transpose(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API std::tuple cudnn_convolution_transpose_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); +CAFFE2_API Tensor cudnn_convolution_backward_bias(const Tensor & grad_output); +CAFFE2_API Tensor cudnn_convolution_transpose_backward_input(const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API Tensor cudnn_convolution_transpose_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API Tensor cudnn_grid_sampler_forward(const Tensor & self, const Tensor & grid); +CAFFE2_API std::tuple cudnn_grid_sampler_backward(const Tensor & self, const Tensor & grid, const Tensor & grad_output); +CAFFE2_API Tensor cumsum(const Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +CAFFE2_API Tensor & cumsum_out(Tensor & out, const Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor cumsum(const Tensor & self, Dimname dim, c10::optional dtype=c10::nullopt); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & cumsum_out(Tensor & out, const Tensor & self, Dimname dim, c10::optional dtype=c10::nullopt); +#endif +CAFFE2_API Tensor cumprod(const Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +CAFFE2_API Tensor & cumprod_out(Tensor & out, const Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor cumprod(const Tensor & self, Dimname dim, c10::optional dtype=c10::nullopt); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & cumprod_out(Tensor & out, const Tensor & self, Dimname dim, c10::optional dtype=c10::nullopt); +#endif +CAFFE2_API Tensor ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank=0, int64_t reduction=at::Reduction::Mean, bool zero_infinity=false); +CAFFE2_API Tensor ctc_loss(const Tensor & log_probs, const Tensor & targets, const Tensor & input_lengths, const Tensor & target_lengths, int64_t blank=0, int64_t reduction=at::Reduction::Mean, bool zero_infinity=false); +CAFFE2_API std::tuple ctc_loss_cpu(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank=0, bool zero_infinity=false); +CAFFE2_API std::tuple ctc_loss_gpu(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank=0, bool zero_infinity=false); +CAFFE2_API Tensor ctc_loss_backward_cpu(const Tensor & grad, const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, const Tensor & neg_log_likelihood, const Tensor & log_alpha, int64_t blank, bool zero_infinity=false); +CAFFE2_API Tensor ctc_loss_backward_gpu(const Tensor & grad, const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, const Tensor & neg_log_likelihood, const Tensor & log_alpha, int64_t blank, bool zero_infinity=false); +CAFFE2_API Tensor det(const Tensor & self); +CAFFE2_API Tensor diag_embed(const Tensor & self, int64_t offset=0, int64_t dim1=-2, int64_t dim2=-1); +CAFFE2_API Tensor diagflat(const Tensor & self, int64_t offset=0); +CAFFE2_API Tensor diagonal(const Tensor & self, int64_t offset=0, int64_t dim1=0, int64_t dim2=1); +CAFFE2_API Tensor & fill_diagonal_(Tensor & self, Scalar fill_value, bool wrap=false); +CAFFE2_API Tensor div(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor div_sparse(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & div_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & div_sparse_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & div_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & div_out_sparse_zerodim(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor div(const Tensor & self, Scalar other); +CAFFE2_API Tensor & div_(Tensor & self, Scalar other); +CAFFE2_API Tensor & dot_out(Tensor & out, const Tensor & self, const Tensor & tensor); +CAFFE2_API Tensor einsum(std::string equation, TensorList tensors); +CAFFE2_API Tensor embedding(const Tensor & weight, const Tensor & indices, int64_t padding_idx=-1, bool scale_grad_by_freq=false, bool sparse=false); +CAFFE2_API Tensor embedding_backward(const Tensor & grad, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq, bool sparse); +CAFFE2_API Tensor embedding_dense_backward_cpu(const Tensor & grad_output, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq); +CAFFE2_API Tensor embedding_dense_backward_cuda(const Tensor & grad_output, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq); +CAFFE2_API Tensor & embedding_renorm_cpu_(Tensor & self, const Tensor & indices, double max_norm, double norm_type); +CAFFE2_API Tensor & embedding_renorm_cuda_(Tensor & self, const Tensor & indices, double max_norm, double norm_type); +CAFFE2_API Tensor embedding_sparse_backward(const Tensor & grad, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq); +CAFFE2_API std::tuple embedding_bag(const Tensor & weight, const Tensor & indices, const Tensor & offsets, bool scale_grad_by_freq=false, int64_t mode=0, bool sparse=false, const Tensor & per_sample_weights={}); +CAFFE2_API std::tuple _embedding_bag_cpu(const Tensor & weight, const Tensor & indices, const Tensor & offsets, bool scale_grad_by_freq=false, int64_t mode=0, bool sparse=false, const Tensor & per_sample_weights={}); +CAFFE2_API std::tuple _embedding_bag_cuda(const Tensor & weight, const Tensor & indices, const Tensor & offsets, bool scale_grad_by_freq=false, int64_t mode=0, bool sparse=false, const Tensor & per_sample_weights={}); +CAFFE2_API Tensor _embedding_bag_backward(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, const Tensor & maximum_indices, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, bool sparse, const Tensor & per_sample_weights); +CAFFE2_API Tensor _embedding_bag_sparse_backward(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, const Tensor & per_sample_weights); +CAFFE2_API Tensor _embedding_bag_dense_backward_cpu(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, const Tensor & maximum_indices, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, const Tensor & per_sample_weights); +CAFFE2_API Tensor _embedding_bag_dense_backward_cuda(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, const Tensor & maximum_indices, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, const Tensor & per_sample_weights); +CAFFE2_API Tensor _embedding_bag_per_sample_weights_backward_cpu(const Tensor & grad, const Tensor & weight, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, int64_t mode); +CAFFE2_API Tensor _embedding_bag_per_sample_weights_backward_cuda(const Tensor & grad, const Tensor & weight, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, int64_t mode); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor empty(IntArrayRef size, c10::optional names, const TensorOptions & options={}, c10::optional memory_format=c10::nullopt); +#endif +CAFFE2_API Tensor empty_cpu(IntArrayRef size, const TensorOptions & options={}, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor empty_cuda(IntArrayRef size, const TensorOptions & options={}, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor empty_mkldnn(IntArrayRef size, const TensorOptions & options={}, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor empty_sparse(IntArrayRef size, const TensorOptions & options={}, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor new_empty(const Tensor & self, IntArrayRef size, const TensorOptions & options={}); +CAFFE2_API Tensor new_full(const Tensor & self, IntArrayRef size, Scalar fill_value, const TensorOptions & options={}); +CAFFE2_API Tensor new_zeros(const Tensor & self, IntArrayRef size, const TensorOptions & options={}); +CAFFE2_API Tensor empty_affine_quantized_other_backends_stub(IntArrayRef size, const TensorOptions & options={}, double scale=1, int64_t zero_point=0, c10::optional memory_format=MemoryFormat::Contiguous); +CAFFE2_API Tensor empty_affine_quantized_cpu(IntArrayRef size, const TensorOptions & options={}, double scale=1, int64_t zero_point=0, c10::optional memory_format=MemoryFormat::Contiguous); +CAFFE2_API Tensor empty_per_channel_affine_quantized_other_backends_stub(IntArrayRef size, const Tensor & scales, const Tensor & zero_points, int64_t axis, const TensorOptions & options={}, c10::optional memory_format=MemoryFormat::Contiguous); +CAFFE2_API Tensor empty_per_channel_affine_quantized_cpu(IntArrayRef size, const Tensor & scales, const Tensor & zero_points, int64_t axis, const TensorOptions & options={}, c10::optional memory_format=MemoryFormat::Contiguous); +CAFFE2_API Tensor & resize_cpu_(Tensor & self, IntArrayRef size, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor & resize_cuda_(Tensor & self, IntArrayRef size, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor & quantized_resize_cpu_(Tensor & self, IntArrayRef size, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor & empty_out(Tensor & out, IntArrayRef size, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor empty_like(const Tensor & self, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor empty_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor empty_strided_cpu(IntArrayRef size, IntArrayRef stride, const TensorOptions & options={}); +CAFFE2_API Tensor empty_strided_cuda(IntArrayRef size, IntArrayRef stride, const TensorOptions & options={}); +CAFFE2_API Tensor erf(const Tensor & self); +CAFFE2_API Tensor & _erf__cpu(Tensor & self); +CAFFE2_API Tensor & _erf__cuda(Tensor & self); +CAFFE2_API Tensor & _erf_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & _erf_out_cuda(Tensor & out, const Tensor & self); +CAFFE2_API Tensor erfc(const Tensor & self); +CAFFE2_API Tensor & _erfc__cpu(Tensor & self); +CAFFE2_API Tensor & _erfc__cuda(Tensor & self); +CAFFE2_API Tensor & _erfc_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & _erfc_out_cuda(Tensor & out, const Tensor & self); +CAFFE2_API Tensor exp(const Tensor & self); +CAFFE2_API Tensor & _exp__cpu(Tensor & self); +CAFFE2_API Tensor & _exp__cuda(Tensor & self); +CAFFE2_API Tensor & _exp_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & _exp_out_cuda(Tensor & out, const Tensor & self); +CAFFE2_API Tensor expm1(const Tensor & self); +CAFFE2_API Tensor & expm1_(Tensor & self); +CAFFE2_API Tensor & expm1_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor expand(const Tensor & self, IntArrayRef size, bool implicit=false); +CAFFE2_API Tensor expand_as(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor eye(int64_t n, const TensorOptions & options={}); +CAFFE2_API Tensor eye(int64_t n, int64_t m, const TensorOptions & options={}); +CAFFE2_API Tensor & eye_out_cpu(Tensor & out, int64_t n); +CAFFE2_API Tensor & eye_out_cuda(Tensor & out, int64_t n); +CAFFE2_API Tensor & eye_out_cpu(Tensor & out, int64_t n, int64_t m); +CAFFE2_API Tensor & eye_out_cuda(Tensor & out, int64_t n, int64_t m); +CAFFE2_API Tensor flatten(const Tensor & self, int64_t start_dim=0, int64_t end_dim=-1); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor flatten(const Tensor & self, int64_t start_dim, int64_t end_dim, Dimname out_dim); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor flatten(const Tensor & self, Dimname start_dim, Dimname end_dim, Dimname out_dim); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor flatten(const Tensor & self, DimnameList dims, Dimname out_dim); +#endif +CAFFE2_API Tensor & fill_(Tensor & self, Scalar value); +CAFFE2_API Tensor & fill_(Tensor & self, const Tensor & value); +CAFFE2_API Tensor floor(const Tensor & self); +CAFFE2_API Tensor & floor_(Tensor & self); +CAFFE2_API Tensor & floor_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor frac(const Tensor & self); +CAFFE2_API Tensor & frac_(Tensor & self); +CAFFE2_API Tensor & frac_out(Tensor & out, const Tensor & self); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor full(IntArrayRef size, Scalar fill_value, c10::optional names, const TensorOptions & options={}); +#endif +CAFFE2_API Tensor full(IntArrayRef size, Scalar fill_value, const TensorOptions & options={}); +CAFFE2_API Tensor & full_out(Tensor & out, IntArrayRef size, Scalar fill_value); +CAFFE2_API Tensor full_like(const Tensor & self, Scalar fill_value, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor full_like(const Tensor & self, Scalar fill_value, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor from_file(std::string filename, c10::optional shared=c10::nullopt, c10::optional size=0, const TensorOptions & options={}); +CAFFE2_API Tensor grid_sampler(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +CAFFE2_API Tensor grid_sampler_2d_cpu(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +CAFFE2_API Tensor grid_sampler_2d_cuda(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +CAFFE2_API std::tuple grid_sampler_2d_backward_cpu(const Tensor & grad_output, const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +CAFFE2_API std::tuple grid_sampler_2d_backward_cuda(const Tensor & grad_output, const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +CAFFE2_API Tensor grid_sampler_3d_cpu(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +CAFFE2_API Tensor grid_sampler_3d_cuda(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +CAFFE2_API std::tuple grid_sampler_3d_backward_cpu(const Tensor & grad_output, const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +CAFFE2_API std::tuple grid_sampler_3d_backward_cuda(const Tensor & grad_output, const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); +CAFFE2_API Tensor hann_window(int64_t window_length, const TensorOptions & options={}); +CAFFE2_API Tensor hann_window(int64_t window_length, bool periodic, const TensorOptions & options={}); +CAFFE2_API Tensor hamming_window(int64_t window_length, const TensorOptions & options={}); +CAFFE2_API Tensor hamming_window(int64_t window_length, bool periodic, const TensorOptions & options={}); +CAFFE2_API Tensor hamming_window(int64_t window_length, bool periodic, double alpha, const TensorOptions & options={}); +CAFFE2_API Tensor hamming_window(int64_t window_length, bool periodic, double alpha, double beta, const TensorOptions & options={}); +CAFFE2_API Tensor hinge_embedding_loss(const Tensor & self, const Tensor & target, double margin=1.0, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor group_norm(const Tensor & input, int64_t num_groups, const Tensor & weight={}, const Tensor & bias={}, double eps=1e-05, bool cudnn_enabled=true); +CAFFE2_API Tensor fft(const Tensor & self, int64_t signal_ndim, bool normalized=false); +CAFFE2_API Tensor ifft(const Tensor & self, int64_t signal_ndim, bool normalized=false); +CAFFE2_API Tensor rfft(const Tensor & self, int64_t signal_ndim, bool normalized=false, bool onesided=true); +CAFFE2_API Tensor irfft(const Tensor & self, int64_t signal_ndim, bool normalized=false, bool onesided=true, IntArrayRef signal_sizes={}); +CAFFE2_API Tensor _fft_mkl(const Tensor & self, int64_t signal_ndim, bool complex_input, bool complex_output, bool inverse, IntArrayRef checked_signal_sizes, bool normalized, bool onesided, IntArrayRef output_sizes); +CAFFE2_API Tensor _fft_cufft(const Tensor & self, int64_t signal_ndim, bool complex_input, bool complex_output, bool inverse, IntArrayRef checked_signal_sizes, bool normalized, bool onesided, IntArrayRef output_sizes); +CAFFE2_API int64_t _cufft_get_plan_cache_size(int64_t device_index); +CAFFE2_API int64_t _cufft_get_plan_cache_max_size(int64_t device_index); +CAFFE2_API void _cufft_set_plan_cache_max_size(int64_t device_index, int64_t max_size); +CAFFE2_API void _cufft_clear_plan_cache(int64_t device_index); +CAFFE2_API Tensor index(const Tensor & self, TensorList indices); +CAFFE2_API Tensor & index_copy_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); +CAFFE2_API Tensor index_copy(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & index_copy_(Tensor & self, Dimname dim, const Tensor & index, const Tensor & source); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor index_copy(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & source); +#endif +CAFFE2_API Tensor & index_put_(Tensor & self, TensorList indices, const Tensor & values, bool accumulate=false); +CAFFE2_API Tensor index_put(const Tensor & self, TensorList indices, const Tensor & values, bool accumulate=false); +CAFFE2_API Tensor & _index_put_impl_(Tensor & self, TensorList indices, const Tensor & values, bool accumulate=false, bool unsafe=false); +CAFFE2_API Tensor instance_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool use_input_stats, double momentum, double eps, bool cudnn_enabled); +CAFFE2_API Tensor inverse(const Tensor & self); +CAFFE2_API Tensor & inverse_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor _inverse_helper_cpu(const Tensor & self); +CAFFE2_API Tensor _inverse_helper_cuda(const Tensor & self); +CAFFE2_API Tensor isclose(const Tensor & self, const Tensor & other, double rtol=1e-05, double atol=1e-08, bool equal_nan=false); +CAFFE2_API Tensor isnan(const Tensor & self); +CAFFE2_API bool is_distributed(const Tensor & self); +CAFFE2_API bool is_floating_point(const Tensor & self); +CAFFE2_API bool is_complex(const Tensor & self); +CAFFE2_API bool is_nonzero(const Tensor & self); +CAFFE2_API bool is_same_size(const Tensor & self, const Tensor & other); +CAFFE2_API bool is_signed(const Tensor & self); +CAFFE2_API Tensor kl_div(const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor kl_div_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor kl_div_backward_cuda(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +CAFFE2_API std::tuple kthvalue(const Tensor & self, int64_t k, int64_t dim=-1, bool keepdim=false); +CAFFE2_API std::tuple kthvalue_out_cpu(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, int64_t dim=-1, bool keepdim=false); +CAFFE2_API std::tuple kthvalue_out_cuda(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, int64_t dim=-1, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple kthvalue(const Tensor & self, int64_t k, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple kthvalue_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, Dimname dim, bool keepdim=false); +#endif +CAFFE2_API Tensor layer_norm(const Tensor & input, IntArrayRef normalized_shape, const Tensor & weight={}, const Tensor & bias={}, double eps=1e-05, bool cudnn_enable=true); +CAFFE2_API std::tuple layer_norm_cpu(const Tensor & input, const Tensor & weight, const Tensor & bias, int64_t M, int64_t N, double eps); +CAFFE2_API std::tuple layer_norm_cuda(const Tensor & input, const Tensor & weight, const Tensor & bias, int64_t M, int64_t N, double eps); +CAFFE2_API std::tuple layer_norm_backward_cpu(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & rstd, const Tensor & weight, int64_t M, int64_t N, std::array output_mask); +CAFFE2_API std::tuple layer_norm_backward_cuda(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & rstd, const Tensor & weight, int64_t M, int64_t N, std::array output_mask); +CAFFE2_API Tensor linear(const Tensor & input, const Tensor & weight, const Tensor & bias={}); +CAFFE2_API Tensor mkldnn_linear(const Tensor & input, const Tensor & weight, const Tensor & bias={}); +CAFFE2_API Tensor fbgemm_linear_int8_weight_fp32_activation(const Tensor & input, const Tensor & weight, const Tensor & packed, const Tensor & col_offsets, Scalar weight_scale, Scalar weight_zero_point, const Tensor & bias); +CAFFE2_API Tensor fbgemm_linear_int8_weight(const Tensor & input, const Tensor & weight, const Tensor & packed, const Tensor & col_offsets, Scalar weight_scale, Scalar weight_zero_point, const Tensor & bias); +CAFFE2_API std::tuple fbgemm_linear_quantize_weight(const Tensor & input); +CAFFE2_API Tensor fbgemm_pack_gemm_matrix_fp16(const Tensor & input); +CAFFE2_API Tensor fbgemm_linear_fp16_weight_fp32_activation(const Tensor & input, const Tensor & packed_weight, const Tensor & bias); +CAFFE2_API Tensor fbgemm_linear_fp16_weight(const Tensor & input, const Tensor & packed_weight, const Tensor & bias); +CAFFE2_API Tensor fbgemm_pack_quantized_matrix(const Tensor & input); +CAFFE2_API Tensor fbgemm_pack_quantized_matrix(const Tensor & input, int64_t K, int64_t N); +CAFFE2_API Tensor linspace(Scalar start, Scalar end, int64_t steps=100, const TensorOptions & options={}); +CAFFE2_API Tensor & linspace_cpu_out(Tensor & out, Scalar start, Scalar end, int64_t steps=100); +CAFFE2_API Tensor & linspace_cuda_out(Tensor & out, Scalar start, Scalar end, int64_t steps=100); +CAFFE2_API Tensor log(const Tensor & self); +CAFFE2_API Tensor & log_(Tensor & self); +CAFFE2_API Tensor & log_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor log10(const Tensor & self); +CAFFE2_API Tensor & log10_(Tensor & self); +CAFFE2_API Tensor & log10_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor log1p(const Tensor & self); +CAFFE2_API Tensor & log1p_(Tensor & self); +CAFFE2_API Tensor & log1p_sparse_(Tensor & self); +CAFFE2_API Tensor & log1p_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & log1p_out_sparse(Tensor & out, const Tensor & self); +CAFFE2_API Tensor log2(const Tensor & self); +CAFFE2_API Tensor & log2_(Tensor & self); +CAFFE2_API Tensor & log2_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor logdet(const Tensor & self); +CAFFE2_API Tensor logspace(Scalar start, Scalar end, int64_t steps=100, double base=10.0, const TensorOptions & options={}); +CAFFE2_API Tensor & logspace_cpu_out(Tensor & out, Scalar start, Scalar end, int64_t steps=100, double base=10.0); +CAFFE2_API Tensor & logspace_cuda_out(Tensor & out, Scalar start, Scalar end, int64_t steps=100, double base=10.0); +CAFFE2_API Tensor log_softmax(const Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor log_softmax(const Tensor & self, Dimname dim, c10::optional dtype=c10::nullopt); +#endif +CAFFE2_API Tensor log_softmax_cpu(const Tensor & self, int64_t dim, bool half_to_float); +CAFFE2_API Tensor log_softmax_cuda(const Tensor & self, int64_t dim, bool half_to_float); +CAFFE2_API Tensor log_softmax_backward_cpu(const Tensor & grad_output, const Tensor & output, int64_t dim, const Tensor & self); +CAFFE2_API Tensor log_softmax_backward_cuda(const Tensor & grad_output, const Tensor & output, int64_t dim, const Tensor & self); +CAFFE2_API Tensor logsumexp(const Tensor & self, IntArrayRef dim, bool keepdim=false); +CAFFE2_API Tensor & logsumexp_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor logsumexp(const Tensor & self, DimnameList dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & logsumexp_out(Tensor & out, const Tensor & self, DimnameList dim, bool keepdim=false); +#endif +CAFFE2_API Tensor margin_ranking_loss(const Tensor & input1, const Tensor & input2, const Tensor & target, double margin=0.0, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor matmul(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & matmul_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor matrix_rank(const Tensor & self, double tol, bool symmetric=false); +CAFFE2_API Tensor matrix_rank(const Tensor & self, bool symmetric=false); +CAFFE2_API Tensor matrix_power(const Tensor & self, int64_t n); +CAFFE2_API std::tuple max(const Tensor & self, int64_t dim, bool keepdim=false); +CAFFE2_API std::tuple max_out(Tensor & max, Tensor & max_values, const Tensor & self, int64_t dim, bool keepdim=false); +CAFFE2_API Tensor max_values(const Tensor & self, IntArrayRef dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple max(const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple max_out(Tensor & max, Tensor & max_values, const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor max_values(const Tensor & self, DimnameList dim, bool keepdim=false); +#endif +CAFFE2_API std::tuple max_pool1d_with_indices(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API Tensor max_pool1d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API Tensor max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API Tensor mkldnn_max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API Tensor quantized_max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API Tensor max_pool3d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API Tensor mean_cpu_gpu(const Tensor & self, c10::optional dtype=c10::nullopt); +CAFFE2_API Tensor quantized_mean_cpu(const Tensor & self, c10::optional dtype=c10::nullopt); +CAFFE2_API Tensor mean_cpu_gpu(const Tensor & self, IntArrayRef dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +CAFFE2_API Tensor quantized_mean_cpu(const Tensor & self, IntArrayRef dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +CAFFE2_API Tensor & mean_out_cpu_gpu(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +CAFFE2_API Tensor & quantized_mean_out_cpu(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor mean(const Tensor & self, DimnameList dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & mean_out(Tensor & out, const Tensor & self, DimnameList dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#endif +CAFFE2_API std::tuple median(const Tensor & self, int64_t dim, bool keepdim=false); +CAFFE2_API std::tuple median_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple median(const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple median_out(Tensor & values, Tensor & indices, const Tensor & self, Dimname dim, bool keepdim=false); +#endif +CAFFE2_API std::tuple min(const Tensor & self, int64_t dim, bool keepdim=false); +CAFFE2_API std::tuple min_out(Tensor & min, Tensor & min_indices, const Tensor & self, int64_t dim, bool keepdim=false); +CAFFE2_API Tensor min_values(const Tensor & self, IntArrayRef dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple min(const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple min_out(Tensor & min, Tensor & min_indices, const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor min_values(const Tensor & self, DimnameList dim, bool keepdim=false); +#endif +CAFFE2_API Tensor mkldnn_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups); +CAFFE2_API Tensor mkldnn_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool bias_defined); +CAFFE2_API std::tuple mkldnn_convolution_backward_weights(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool bias_defined); +CAFFE2_API std::tuple mkldnn_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, std::array output_mask); +CAFFE2_API std::tuple miopen_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double exponential_average_factor, double epsilon); +CAFFE2_API std::tuple miopen_batch_norm_backward(const Tensor & input, const Tensor & grad_output, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_var, double epsilon); +CAFFE2_API Tensor miopen_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API Tensor miopen_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API std::tuple miopen_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); +CAFFE2_API Tensor miopen_convolution_backward_bias(const Tensor & grad_output); +CAFFE2_API Tensor miopen_convolution_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API Tensor miopen_convolution_transpose(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API std::tuple miopen_convolution_transpose_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); +CAFFE2_API Tensor miopen_convolution_transpose_backward_input(const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API Tensor miopen_convolution_transpose_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API Tensor miopen_depthwise_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API Tensor miopen_depthwise_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API std::tuple miopen_depthwise_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic, std::array output_mask); +CAFFE2_API Tensor miopen_depthwise_convolution_backward_weight(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool benchmark, bool deterministic); +CAFFE2_API std::tuple miopen_rnn(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & hx, const Tensor & cx, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state); +CAFFE2_API std::tuple> miopen_rnn_backward(const Tensor & input, TensorList weight, int64_t weight_stride0, const Tensor & weight_buf, const Tensor & hx, const Tensor & cx, const Tensor & output, const Tensor & grad_output, const Tensor & grad_hy, const Tensor & grad_cy, int64_t mode, int64_t hidden_size, int64_t num_layers, bool batch_first, double dropout, bool train, bool bidirectional, IntArrayRef batch_sizes, const Tensor & dropout_state, const Tensor & reserve, std::array output_mask); +CAFFE2_API Tensor _sparse_mm(const Tensor & self, const Tensor & mat2); +CAFFE2_API Tensor & _sparse_mm_out(Tensor & out, const Tensor & self, const Tensor & mat2); +CAFFE2_API Tensor _sparse_mm(const Tensor & sparse, const Tensor & dense); +CAFFE2_API std::tuple mode(const Tensor & self, int64_t dim=-1, bool keepdim=false); +CAFFE2_API std::tuple mode_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim=-1, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple mode(const Tensor & self, Dimname dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple mode_out(Tensor & values, Tensor & indices, const Tensor & self, Dimname dim, bool keepdim=false); +#endif +CAFFE2_API Tensor mul(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor mkldnn_mul(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor mul_sparse(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & mul_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & mkldnn_mul_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & mul_sparse_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & mul_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & mkldnn_mul_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & mul_out_sparse_cpu(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & mul_out_sparse_cuda(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor mul(const Tensor & self, Scalar other); +CAFFE2_API Tensor & mul_(Tensor & self, Scalar other); +CAFFE2_API Tensor mvlgamma(const Tensor & self, int64_t p); +CAFFE2_API Tensor & mvlgamma_(Tensor & self, int64_t p); +CAFFE2_API Tensor narrow_copy_dense(const Tensor & self, int64_t dim, int64_t start, int64_t length); +CAFFE2_API Tensor narrow_copy_sparse(const Tensor & self, int64_t dim, int64_t start, int64_t length); +CAFFE2_API Tensor narrow(const Tensor & self, int64_t dim, int64_t start, int64_t length); +CAFFE2_API std::tuple batch_norm_cpu(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps); +CAFFE2_API std::tuple batch_norm_cuda(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps); +CAFFE2_API std::tuple mkldnn_batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps); +CAFFE2_API std::tuple batch_norm_stats_cuda(const Tensor & input, double eps); +CAFFE2_API Tensor batch_norm_elemt_cuda(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & mean, const Tensor & invstd, double eps); +CAFFE2_API Tensor & batch_norm_elemt_cuda_out(Tensor & out, const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & mean, const Tensor & invstd, double eps); +CAFFE2_API std::tuple batch_norm_gather_stats_cuda(const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & running_mean, const Tensor & running_var, double momentum, double eps, int64_t count); +CAFFE2_API std::tuple batch_norm_gather_stats_with_counts_cuda(const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & running_mean, const Tensor & running_var, double momentum, double eps, IntArrayRef counts); +CAFFE2_API std::tuple batch_norm_backward_cpu(const Tensor & grad_out, const Tensor & input, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_invstd, bool train, double eps, std::array output_mask); +CAFFE2_API std::tuple batch_norm_backward_cuda(const Tensor & grad_out, const Tensor & input, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_invstd, bool train, double eps, std::array output_mask); +CAFFE2_API std::tuple batch_norm_backward_reduce_cuda(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & weight, bool input_g, bool weight_g, bool bias_g); +CAFFE2_API Tensor batch_norm_backward_elemt_cuda(const Tensor & grad_out, const Tensor & input, const Tensor & mean, const Tensor & invstd, const Tensor & weight, const Tensor & mean_dy, const Tensor & mean_dy_xmu); +CAFFE2_API std::tuple batch_norm_update_stats_cpu(const Tensor & input, const Tensor & running_mean, const Tensor & running_var, double momentum); +CAFFE2_API std::tuple batch_norm_update_stats_cuda(const Tensor & input, const Tensor & running_mean, const Tensor & running_var, double momentum); +CAFFE2_API bool _nnpack_available(); +CAFFE2_API Tensor _nnpack_spatial_convolution(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride=1); +CAFFE2_API std::tuple _nnpack_spatial_convolution_backward(const Tensor & input, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, std::array output_mask); +CAFFE2_API Tensor _nnpack_spatial_convolution_backward_input(const Tensor & input, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding); +CAFFE2_API Tensor _nnpack_spatial_convolution_backward_weight(const Tensor & input, IntArrayRef weightsize, const Tensor & grad_output, IntArrayRef padding); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor ones(IntArrayRef size, c10::optional names, const TensorOptions & options={}); +#endif +CAFFE2_API Tensor ones(IntArrayRef size, const TensorOptions & options={}); +CAFFE2_API Tensor & ones_out(Tensor & out, IntArrayRef size); +CAFFE2_API Tensor ones_like(const Tensor & self, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor ones_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor pairwise_distance(const Tensor & x1, const Tensor & x2, double p=2, double eps=1e-06, bool keepdim=false); +CAFFE2_API Tensor cdist(const Tensor & x1, const Tensor & x2, double p=2, c10::optional compute_mode=c10::nullopt); +CAFFE2_API Tensor _cdist_backward(const Tensor & grad, const Tensor & x1, const Tensor & x2, double p, const Tensor & cdist); +CAFFE2_API Tensor pdist(const Tensor & self, double p=2); +CAFFE2_API Tensor _pdist_forward(const Tensor & self, double p=2); +CAFFE2_API Tensor _pdist_backward(const Tensor & grad, const Tensor & self, double p, const Tensor & pdist); +CAFFE2_API Tensor cosine_similarity(const Tensor & x1, const Tensor & x2, int64_t dim=1, double eps=1e-08); +CAFFE2_API Tensor permute(const Tensor & self, IntArrayRef dims); +CAFFE2_API Tensor numpy_T(const Tensor & self); +CAFFE2_API Tensor pixel_shuffle(const Tensor & self, int64_t upscale_factor); +CAFFE2_API bool is_pinned(const Tensor & self); +CAFFE2_API Tensor pin_memory(const Tensor & self); +CAFFE2_API Tensor pinverse(const Tensor & self, double rcond=1e-15); +CAFFE2_API Tensor poisson_nll_loss(const Tensor & input, const Tensor & target, bool log_input, bool full, double eps, int64_t reduction); +CAFFE2_API Tensor scalar_tensor(Scalar s, const TensorOptions & options={}); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor rand(IntArrayRef size, c10::optional names, const TensorOptions & options={}); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor rand(IntArrayRef size, Generator * generator, c10::optional names, const TensorOptions & options={}); +#endif +CAFFE2_API Tensor rand(IntArrayRef size, const TensorOptions & options={}); +CAFFE2_API Tensor rand(IntArrayRef size, Generator * generator, const TensorOptions & options={}); +CAFFE2_API Tensor & rand_out(Tensor & out, IntArrayRef size); +CAFFE2_API Tensor & rand_out(Tensor & out, IntArrayRef size, Generator * generator); +CAFFE2_API Tensor rand_like(const Tensor & self, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor rand_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor randint(int64_t high, IntArrayRef size, const TensorOptions & options={}); +CAFFE2_API Tensor randint(int64_t high, IntArrayRef size, Generator * generator, const TensorOptions & options={}); +CAFFE2_API Tensor randint(int64_t low, int64_t high, IntArrayRef size, const TensorOptions & options={}); +CAFFE2_API Tensor randint(int64_t low, int64_t high, IntArrayRef size, Generator * generator, const TensorOptions & options={}); +CAFFE2_API Tensor & randint_out(Tensor & out, int64_t high, IntArrayRef size); +CAFFE2_API Tensor & randint_out(Tensor & out, int64_t high, IntArrayRef size, Generator * generator); +CAFFE2_API Tensor & randint_out(Tensor & out, int64_t low, int64_t high, IntArrayRef size); +CAFFE2_API Tensor & randint_out(Tensor & out, int64_t low, int64_t high, IntArrayRef size, Generator * generator); +CAFFE2_API Tensor randint_like(const Tensor & self, int64_t high, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor randint_like(const Tensor & self, int64_t low, int64_t high, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor randint_like(const Tensor & self, int64_t high, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor randint_like(const Tensor & self, int64_t low, int64_t high, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor randn(IntArrayRef size, const TensorOptions & options={}); +CAFFE2_API Tensor randn(IntArrayRef size, Generator * generator, const TensorOptions & options={}); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor randn(IntArrayRef size, c10::optional names, const TensorOptions & options={}); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor randn(IntArrayRef size, Generator * generator, c10::optional names, const TensorOptions & options={}); +#endif +CAFFE2_API Tensor & randn_out(Tensor & out, IntArrayRef size); +CAFFE2_API Tensor & randn_out(Tensor & out, IntArrayRef size, Generator * generator); +CAFFE2_API Tensor randn_like(const Tensor & self, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor randn_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor randperm(int64_t n, const TensorOptions & options={}); +CAFFE2_API Tensor randperm(int64_t n, Generator * generator, const TensorOptions & options={}); +CAFFE2_API Tensor & randperm_out(Tensor & out, int64_t n); +CAFFE2_API Tensor & randperm_out_cpu(Tensor & out, int64_t n, Generator * generator); +CAFFE2_API Tensor & randperm_out_cuda(Tensor & out, int64_t n, Generator * generator); +CAFFE2_API Tensor range(Scalar start, Scalar end, Scalar step=1, const TensorOptions & options={}); +CAFFE2_API Tensor range(Scalar start, Scalar end, const TensorOptions & options={}); +CAFFE2_API Tensor & range_cpu_out(Tensor & out, Scalar start, Scalar end, Scalar step=1); +CAFFE2_API Tensor & range_cuda_out(Tensor & out, Scalar start, Scalar end, Scalar step=1); +CAFFE2_API Tensor reciprocal(const Tensor & self); +CAFFE2_API Tensor & _reciprocal__cpu(Tensor & self); +CAFFE2_API Tensor & _reciprocal__cuda(Tensor & self); +CAFFE2_API Tensor & _reciprocal_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & _reciprocal_out_cuda(Tensor & out, const Tensor & self); +CAFFE2_API Tensor neg(const Tensor & self); +CAFFE2_API Tensor & neg_(Tensor & self); +CAFFE2_API Tensor & neg_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor repeat(const Tensor & self, IntArrayRef repeats); +CAFFE2_API Tensor repeat_interleave_cpu(const Tensor & repeats); +CAFFE2_API Tensor repeat_interleave_cuda(const Tensor & repeats); +CAFFE2_API Tensor repeat_interleave(const Tensor & self, const Tensor & repeats, c10::optional dim=c10::nullopt); +CAFFE2_API Tensor repeat_interleave(const Tensor & self, int64_t repeats, c10::optional dim=c10::nullopt); +CAFFE2_API Tensor reshape(const Tensor & self, IntArrayRef shape); +CAFFE2_API Tensor mkldnn_reshape(const Tensor & self, IntArrayRef shape); +CAFFE2_API Tensor reshape_as(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor round(const Tensor & self); +CAFFE2_API Tensor & round_(Tensor & self); +CAFFE2_API Tensor & round_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor rrelu(const Tensor & self, Scalar lower=0.125, Scalar upper=0.333333333333, bool training=false, Generator * generator=nullptr); +CAFFE2_API Tensor & rrelu_(Tensor & self, Scalar lower=0.125, Scalar upper=0.333333333333, bool training=false, Generator * generator=nullptr); +CAFFE2_API Tensor relu(const Tensor & self); +CAFFE2_API Tensor mkldnn_relu(const Tensor & self); +CAFFE2_API Tensor quantized_relu(const Tensor & self); +CAFFE2_API Tensor & relu_(Tensor & self); +CAFFE2_API Tensor & mkldnn_relu_(Tensor & self); +CAFFE2_API Tensor & quantized_relu_(Tensor & self); +CAFFE2_API Tensor prelu_cpu(const Tensor & self, const Tensor & weight); +CAFFE2_API Tensor prelu_cuda(const Tensor & self, const Tensor & weight); +CAFFE2_API std::tuple prelu_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & weight); +CAFFE2_API std::tuple prelu_backward_cuda(const Tensor & grad_output, const Tensor & self, const Tensor & weight); +CAFFE2_API Tensor gelu_cpu(const Tensor & self); +CAFFE2_API Tensor gelu_cuda(const Tensor & self); +CAFFE2_API Tensor gelu_backward_cpu(const Tensor & grad, const Tensor & self); +CAFFE2_API Tensor gelu_backward_cuda(const Tensor & grad, const Tensor & self); +CAFFE2_API Tensor hardshrink_cpu(const Tensor & self, Scalar lambd=0.5); +CAFFE2_API Tensor hardshrink_cuda(const Tensor & self, Scalar lambd=0.5); +CAFFE2_API Tensor hardshrink_backward_cpu(const Tensor & grad_out, const Tensor & self, Scalar lambd); +CAFFE2_API Tensor hardshrink_backward_cuda(const Tensor & grad_out, const Tensor & self, Scalar lambd); +CAFFE2_API Tensor rsqrt(const Tensor & self); +CAFFE2_API Tensor & rsqrt_(Tensor & self); +CAFFE2_API Tensor & rsqrt_out(Tensor & out, const Tensor & self); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor select(const Tensor & self, Dimname dim, int64_t index); +#endif +CAFFE2_API Tensor select(const Tensor & self, int64_t dim, int64_t index); +CAFFE2_API Tensor selu(const Tensor & self); +CAFFE2_API Tensor & selu_(Tensor & self); +CAFFE2_API Tensor celu(const Tensor & self, Scalar alpha=1.0); +CAFFE2_API Tensor & celu_(Tensor & self, Scalar alpha=1.0); +CAFFE2_API Tensor sigmoid(const Tensor & self); +CAFFE2_API Tensor mkldnn_sigmoid(const Tensor & self); +CAFFE2_API Tensor & sigmoid_(Tensor & self); +CAFFE2_API Tensor & mkldnn_sigmoid_(Tensor & self); +CAFFE2_API Tensor & sigmoid_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor sin(const Tensor & self); +CAFFE2_API Tensor & sin_(Tensor & self); +CAFFE2_API Tensor & sin_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor sinh(const Tensor & self); +CAFFE2_API Tensor & sinh_(Tensor & self); +CAFFE2_API Tensor & sinh_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor detach(const Tensor & self); +CAFFE2_API Tensor & detach_(Tensor & self); +CAFFE2_API int64_t size(const Tensor & self, int64_t dim); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API int64_t size(const Tensor & self, Dimname dim); +#endif +CAFFE2_API Tensor slice(const Tensor & self, int64_t dim=0, int64_t start=0, int64_t end=9223372036854775807, int64_t step=1); +CAFFE2_API std::tuple slogdet(const Tensor & self); +CAFFE2_API Tensor smm(const Tensor & self, const Tensor & mat2); +CAFFE2_API Tensor softmax(const Tensor & self, int64_t dim, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor softmax(const Tensor & self, Dimname dim, c10::optional dtype=c10::nullopt); +#endif +CAFFE2_API Tensor softmax_cpu(const Tensor & self, int64_t dim, bool half_to_float); +CAFFE2_API Tensor softmax_cuda(const Tensor & self, int64_t dim, bool half_to_float); +CAFFE2_API Tensor mkldnn_softmax(const Tensor & self, int64_t dim, bool half_to_float); +CAFFE2_API Tensor softmax_backward_cpu(const Tensor & grad_output, const Tensor & output, int64_t dim, const Tensor & self); +CAFFE2_API Tensor softmax_backward_cuda(const Tensor & grad_output, const Tensor & output, int64_t dim, const Tensor & self); +CAFFE2_API std::vector split(const Tensor & self, int64_t split_size, int64_t dim=0); +CAFFE2_API std::vector split_with_sizes(const Tensor & self, IntArrayRef split_sizes, int64_t dim=0); +CAFFE2_API Tensor squeeze(const Tensor & self); +CAFFE2_API Tensor squeeze(const Tensor & self, int64_t dim); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor squeeze(const Tensor & self, Dimname dim); +#endif +CAFFE2_API Tensor & squeeze_(Tensor & self); +CAFFE2_API Tensor & squeeze_(Tensor & self, int64_t dim); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & squeeze_(Tensor & self, Dimname dim); +#endif +CAFFE2_API Tensor sspaddmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & _sspaddmm_out_only_sparse(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & _sspaddmm_out_only_sparse_cuda(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & _sspaddmm_out_cpu(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & _sspaddmm_out_cuda(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor stack(TensorList tensors, int64_t dim=0); +CAFFE2_API Tensor & stack_out(Tensor & out, TensorList tensors, int64_t dim=0); +CAFFE2_API Tensor stft(const Tensor & self, int64_t n_fft, c10::optional hop_length=c10::nullopt, c10::optional win_length=c10::nullopt, const Tensor & window={}, bool normalized=false, bool onesided=true); +CAFFE2_API int64_t stride(const Tensor & self, int64_t dim); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API int64_t stride(const Tensor & self, Dimname dim); +#endif +CAFFE2_API Tensor sum(const Tensor & self, c10::optional dtype=c10::nullopt); +CAFFE2_API Tensor sum(const Tensor & self, IntArrayRef dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor sum(const Tensor & self, DimnameList dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#endif +CAFFE2_API Tensor & sum_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & sum_out(Tensor & out, const Tensor & self, DimnameList dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#endif +CAFFE2_API Tensor sum_to_size(const Tensor & self, IntArrayRef size); +CAFFE2_API Tensor sqrt(const Tensor & self); +CAFFE2_API Tensor & sqrt_(Tensor & self); +CAFFE2_API Tensor & sqrt_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor std(const Tensor & self, bool unbiased=true); +CAFFE2_API Tensor std(const Tensor & self, IntArrayRef dim, bool unbiased=true, bool keepdim=false); +CAFFE2_API std::tuple std_mean(const Tensor & self, bool unbiased=true); +CAFFE2_API std::tuple std_mean(const Tensor & self, IntArrayRef dim, bool unbiased=true, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple std_mean(const Tensor & self, DimnameList dim, bool unbiased=true, bool keepdim=false); +#endif +CAFFE2_API Tensor & std_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool unbiased=true, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor std(const Tensor & self, DimnameList dim, bool unbiased=true, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & std_out(Tensor & out, const Tensor & self, DimnameList dim, bool unbiased=true, bool keepdim=false); +#endif +CAFFE2_API Tensor prod(const Tensor & self, c10::optional dtype=c10::nullopt); +CAFFE2_API Tensor prod(const Tensor & self, int64_t dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +CAFFE2_API Tensor & prod_out(Tensor & out, const Tensor & self, int64_t dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor prod(const Tensor & self, Dimname dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & prod_out(Tensor & out, const Tensor & self, Dimname dim, bool keepdim=false, c10::optional dtype=c10::nullopt); +#endif +CAFFE2_API Tensor t(const Tensor & self); +CAFFE2_API Tensor & t_(Tensor & self); +CAFFE2_API Tensor tan(const Tensor & self); +CAFFE2_API Tensor & _tan__cpu(Tensor & self); +CAFFE2_API Tensor & _tan__cuda(Tensor & self); +CAFFE2_API Tensor & _tan_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & _tan_out_cuda(Tensor & out, const Tensor & self); +CAFFE2_API Tensor tanh(const Tensor & self); +CAFFE2_API Tensor & _tanh__cpu(Tensor & self); +CAFFE2_API Tensor & _tanh__cuda(Tensor & self); +CAFFE2_API Tensor & _tanh_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & _tanh_out_cuda(Tensor & out, const Tensor & self); +CAFFE2_API Tensor tensordot(const Tensor & self, const Tensor & other, IntArrayRef dims_self, IntArrayRef dims_other); +CAFFE2_API Tensor threshold(const Tensor & self, Scalar threshold, Scalar value); +CAFFE2_API Tensor & threshold_(Tensor & self, Scalar threshold, Scalar value); +CAFFE2_API Tensor & threshold_out(Tensor & out, const Tensor & self, Scalar threshold, Scalar value); +CAFFE2_API Tensor threshold_backward(const Tensor & grad_output, const Tensor & self, Scalar threshold); +CAFFE2_API Tensor transpose(const Tensor & self, int64_t dim0, int64_t dim1); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor transpose(const Tensor & self, Dimname dim0, Dimname dim1); +#endif +CAFFE2_API Tensor mkldnn_transpose(const Tensor & self, int64_t dim0, int64_t dim1); +CAFFE2_API Tensor & transpose_(Tensor & self, int64_t dim0, int64_t dim1); +CAFFE2_API Tensor & mkldnn_transpose_(Tensor & self, int64_t dim0, int64_t dim1); +CAFFE2_API Tensor one_hot(const Tensor & self, int64_t num_classes=-1); +CAFFE2_API Tensor flip_cpu(const Tensor & self, IntArrayRef dims); +CAFFE2_API Tensor flip_cuda(const Tensor & self, IntArrayRef dims); +CAFFE2_API Tensor roll_cpu(const Tensor & self, IntArrayRef shifts, IntArrayRef dims={}); +CAFFE2_API Tensor roll_cuda(const Tensor & self, IntArrayRef shifts, IntArrayRef dims={}); +CAFFE2_API Tensor rot90(const Tensor & self, int64_t k=1, IntArrayRef dims={0,1}); +CAFFE2_API Tensor trapz(const Tensor & y, const Tensor & x, int64_t dim=-1); +CAFFE2_API Tensor trapz(const Tensor & y, double dx=1, int64_t dim=-1); +CAFFE2_API Tensor _trilinear(const Tensor & i1, const Tensor & i2, const Tensor & i3, IntArrayRef expand1, IntArrayRef expand2, IntArrayRef expand3, IntArrayRef sumdim, int64_t unroll_dim=1); +CAFFE2_API Tensor triplet_margin_loss(const Tensor & anchor, const Tensor & positive, const Tensor & negative, double margin=1.0, double p=2, double eps=1e-06, bool swap=false, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor trunc(const Tensor & self); +CAFFE2_API Tensor & trunc_(Tensor & self); +CAFFE2_API Tensor & trunc_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor type_as(const Tensor & self, const Tensor & other); +CAFFE2_API bool _has_compatible_shallow_copy_type(const Tensor & self, const Tensor & from); +CAFFE2_API std::tuple _unique_cpu(const Tensor & self, bool sorted=true, bool return_inverse=false); +CAFFE2_API std::tuple _unique_cuda(const Tensor & self, bool sorted=true, bool return_inverse=false); +CAFFE2_API std::tuple unique_dim_cpu(const Tensor & self, int64_t dim, bool sorted=true, bool return_inverse=false, bool return_counts=false); +CAFFE2_API std::tuple unique_dim_cuda(const Tensor & self, int64_t dim, bool sorted=true, bool return_inverse=false, bool return_counts=false); +CAFFE2_API std::tuple unique_consecutive_cpu(const Tensor & self, bool return_inverse=false, bool return_counts=false, c10::optional dim=c10::nullopt); +CAFFE2_API std::tuple unique_consecutive_cuda(const Tensor & self, bool return_inverse=false, bool return_counts=false, c10::optional dim=c10::nullopt); +CAFFE2_API std::tuple unique_dim_consecutive_cpu(const Tensor & self, int64_t dim, bool return_inverse=false, bool return_counts=false); +CAFFE2_API std::tuple unique_dim_consecutive_cuda(const Tensor & self, int64_t dim, bool return_inverse=false, bool return_counts=false); +CAFFE2_API std::tuple _unique2_cpu(const Tensor & self, bool sorted=true, bool return_inverse=false, bool return_counts=false); +CAFFE2_API std::tuple _unique2_cuda(const Tensor & self, bool sorted=true, bool return_inverse=false, bool return_counts=false); +CAFFE2_API Tensor _unsafe_view(const Tensor & self, IntArrayRef size); +CAFFE2_API Tensor unsqueeze(const Tensor & self, int64_t dim); +CAFFE2_API Tensor & unsqueeze_(Tensor & self, int64_t dim); +CAFFE2_API Tensor var(const Tensor & self, bool unbiased=true); +CAFFE2_API Tensor var(const Tensor & self, IntArrayRef dim, bool unbiased=true, bool keepdim=false); +CAFFE2_API Tensor & var_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool unbiased=true, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor var(const Tensor & self, DimnameList dim, bool unbiased=true, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & var_out(Tensor & out, const Tensor & self, DimnameList dim, bool unbiased=true, bool keepdim=false); +#endif +CAFFE2_API std::tuple var_mean(const Tensor & self, bool unbiased=true); +CAFFE2_API std::tuple var_mean(const Tensor & self, IntArrayRef dim, bool unbiased=true, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple var_mean(const Tensor & self, DimnameList dim, bool unbiased=true, bool keepdim=false); +#endif +CAFFE2_API Tensor view_as(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor where(const Tensor & condition, const Tensor & self, const Tensor & other); +CAFFE2_API std::vector where(const Tensor & condition); +CAFFE2_API Tensor _s_where_cpu(const Tensor & condition, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor _s_where_cuda(const Tensor & condition, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor norm_except_dim(const Tensor & v, int64_t pow=2, int64_t dim=0); +CAFFE2_API Tensor _weight_norm(const Tensor & v, const Tensor & g, int64_t dim=0); +CAFFE2_API std::tuple weight_norm_cuda(const Tensor & v, const Tensor & g, int64_t dim=0); +CAFFE2_API std::tuple weight_norm_cuda_backward(const Tensor & grad_w, const Tensor & saved_v, const Tensor & saved_g, const Tensor & saved_norms, int64_t dim); +CAFFE2_API std::tuple _weight_norm_differentiable_backward(const Tensor & grad_w, const Tensor & saved_v, const Tensor & saved_g, const Tensor & saved_norms, int64_t dim); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor zeros(IntArrayRef size, c10::optional names, const TensorOptions & options={}); +#endif +CAFFE2_API Tensor zeros(IntArrayRef size, const TensorOptions & options={}); +CAFFE2_API Tensor & zeros_out(Tensor & out, IntArrayRef size); +CAFFE2_API Tensor zeros_like(const Tensor & self, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor zeros_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor _standard_gamma_grad_cpu(const Tensor & self, const Tensor & output); +CAFFE2_API Tensor _standard_gamma_grad_cuda(const Tensor & self, const Tensor & output); +CAFFE2_API Tensor _s_gamma_cpu(const Tensor & self, Generator * generator=nullptr); +CAFFE2_API Tensor _s_gamma_cuda(const Tensor & self, Generator * generator=nullptr); +CAFFE2_API Tensor _dirichlet_grad_cpu(const Tensor & x, const Tensor & alpha, const Tensor & total); +CAFFE2_API Tensor _dirichlet_grad_cuda(const Tensor & x, const Tensor & alpha, const Tensor & total); +CAFFE2_API Tensor _s_dirichlet_cpu(const Tensor & self, Generator * generator=nullptr); +CAFFE2_API Tensor _s_dirichlet_cuda(const Tensor & self, Generator * generator=nullptr); +CAFFE2_API Tensor _s_poisson_cpu(const Tensor & self, Generator * generator=nullptr); +CAFFE2_API Tensor _s_poisson_cuda(const Tensor & self, Generator * generator=nullptr); +CAFFE2_API Tensor norm_sparse(const Tensor & self, Scalar p=2); +CAFFE2_API Tensor _sparse_sum(const Tensor & self); +CAFFE2_API Tensor _sparse_sum(const Tensor & self, ScalarType dtype); +CAFFE2_API Tensor _sparse_sum(const Tensor & self, IntArrayRef dim); +CAFFE2_API Tensor _sparse_sum(const Tensor & self, IntArrayRef dim, ScalarType dtype); +CAFFE2_API Tensor _sparse_sum_backward_cpu(const Tensor & grad, const Tensor & self, IntArrayRef dim); +CAFFE2_API Tensor _sparse_sum_backward_cuda(const Tensor & grad, const Tensor & self, IntArrayRef dim); +CAFFE2_API Tensor norm(const Tensor & self, c10::optional p, ScalarType dtype); +CAFFE2_API Tensor norm(const Tensor & self, Scalar p=2); +CAFFE2_API Tensor norm(const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim, ScalarType dtype); +CAFFE2_API Tensor norm(const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim=false); +CAFFE2_API Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim, ScalarType dtype); +CAFFE2_API Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor norm(const Tensor & self, c10::optional p, DimnameList dim, bool keepdim, ScalarType dtype); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor norm(const Tensor & self, c10::optional p, DimnameList dim, bool keepdim=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, DimnameList dim, bool keepdim, ScalarType dtype); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, DimnameList dim, bool keepdim=false); +#endif +CAFFE2_API Tensor frobenius_norm(const Tensor & self); +CAFFE2_API Tensor frobenius_norm(const Tensor & self, IntArrayRef dim, bool keepdim=false); +CAFFE2_API Tensor & frobenius_norm_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim=false); +CAFFE2_API Tensor nuclear_norm(const Tensor & self, bool keepdim=false); +CAFFE2_API Tensor & nuclear_norm_out(Tensor & out, const Tensor & self, bool keepdim=false); +CAFFE2_API Tensor nuclear_norm(const Tensor & self, IntArrayRef dim, bool keepdim=false); +CAFFE2_API Tensor & nuclear_norm_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim=false); +CAFFE2_API Tensor clone(const Tensor & self, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor mkldnn_clone(const Tensor & self, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor quantized_clone(const Tensor & self, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor clone_sparse(const Tensor & self, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor & resize_as_(Tensor & self, const Tensor & the_template, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor & pow_out(Tensor & out, const Tensor & self, Scalar exponent); +CAFFE2_API Tensor & pow_out_sparse_scalar(Tensor & out, const Tensor & self, Scalar exponent); +CAFFE2_API Tensor pow(const Tensor & self, Scalar exponent); +CAFFE2_API Tensor pow_sparse_scalar(const Tensor & self, Scalar exponent); +CAFFE2_API Tensor & zero_(Tensor & self); +CAFFE2_API Tensor & mkldnn_zero_(Tensor & self); +CAFFE2_API Tensor & zero_sparse_(Tensor & self); +CAFFE2_API Tensor & sub_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor & sub_out_sparse(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor sub(const Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor sub_sparse(const Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor & sub_(Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor & sub_sparse_(Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor sub(const Tensor & self, Scalar other, Scalar alpha=1); +CAFFE2_API Tensor & sub_(Tensor & self, Scalar other, Scalar alpha=1); +CAFFE2_API Tensor rsub(const Tensor & self, const Tensor & other, Scalar alpha=1); +CAFFE2_API Tensor rsub(const Tensor & self, Scalar other, Scalar alpha=1); +CAFFE2_API Tensor _sparse_addmm(const Tensor & self, const Tensor & sparse, const Tensor & dense, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & addmm_out_sparse_dense_cpu(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & addmm_out_sparse_dense_cuda(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor addmm_sparse_dense_cpu(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor addmm_sparse_dense_cuda(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & s_addmm_sparse_dense_cpu_(Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor & s_addmm_sparse_dense_cuda_(Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1); +CAFFE2_API Tensor sparse_coo_tensor(IntArrayRef size, const TensorOptions & options); +CAFFE2_API Tensor sparse_coo_tensor(const Tensor & indices, const Tensor & values, const TensorOptions & options={}); +CAFFE2_API Tensor sparse_coo_tensor(const Tensor & indices, const Tensor & values, IntArrayRef size, const TensorOptions & options={}); +CAFFE2_API Tensor _sparse_coo_tensor_unsafe(const Tensor & indices, const Tensor & values, IntArrayRef size, const TensorOptions & options={}); +CAFFE2_API Tensor new_with_dims_sparse(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size, const TensorOptions & options); +CAFFE2_API Tensor new_with_dims_and_tensor_sparse(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size, const Tensor & indices, const Tensor & values, const TensorOptions & options); +CAFFE2_API Tensor & sparse_resize_(Tensor & self, IntArrayRef size, int64_t sparse_dim, int64_t dense_dim); +CAFFE2_API Tensor & sparse_resize_and_clear_(Tensor & self, IntArrayRef size, int64_t sparse_dim, int64_t dense_dim); +CAFFE2_API Tensor sparse_mask_cpu(const Tensor & self, const Tensor & mask); +CAFFE2_API Tensor sparse_mask_cuda(const Tensor & self, const Tensor & mask); +CAFFE2_API Tensor mkldnn_to_dense(const Tensor & self); +CAFFE2_API Tensor sparse_to_dense(const Tensor & self); +CAFFE2_API Tensor to_dense_backward(const Tensor & grad, const Tensor & input); +CAFFE2_API int64_t sparse_dim_sparse(const Tensor & self); +CAFFE2_API int64_t sparse_dim_sparse(const Tensor & self); +CAFFE2_API int64_t dense_dim_sparse(const Tensor & self); +CAFFE2_API int64_t dense_dim_sparse(const Tensor & self); +CAFFE2_API int64_t _nnz_sparse(const Tensor & self); +CAFFE2_API Tensor coalesce_sparse_cpu(const Tensor & self); +CAFFE2_API Tensor coalesce_sparse_cuda(const Tensor & self); +CAFFE2_API bool is_coalesced_sparse(const Tensor & self); +CAFFE2_API Tensor _indices_sparse(const Tensor & self); +CAFFE2_API Tensor _values_sparse(const Tensor & self); +CAFFE2_API Tensor & _coalesced_sparse_(Tensor & self, bool coalesced); +CAFFE2_API Tensor indices_sparse(const Tensor & self); +CAFFE2_API Tensor values_sparse(const Tensor & self); +CAFFE2_API Tensor & hspmm_out_sparse_cpu(Tensor & out, const Tensor & mat1, const Tensor & mat2); +CAFFE2_API Tensor & hspmm_out_sparse_cuda(Tensor & out, const Tensor & mat1, const Tensor & mat2); +CAFFE2_API Tensor hspmm_sparse_cpu(const Tensor & mat1, const Tensor & mat2); +CAFFE2_API Tensor hspmm_sparse_cuda(const Tensor & mat1, const Tensor & mat2); +CAFFE2_API Tensor & copy_sparse_(Tensor & self, const Tensor & src, bool non_blocking=false); +CAFFE2_API std::vector unbind(const Tensor & self, int64_t dim=0); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::vector unbind(const Tensor & self, Dimname dim); +#endif +CAFFE2_API Tensor dense_to_sparse(const Tensor & self, int64_t sparse_dim); +CAFFE2_API Tensor dense_to_sparse(const Tensor & self); +CAFFE2_API Tensor dense_to_mkldnn(const Tensor & self); +CAFFE2_API Tensor mkldnn_reorder_conv2d_weight(const Tensor & self, IntArrayRef padding=0, IntArrayRef stride=1, IntArrayRef dilation=1, int64_t groups=1); +CAFFE2_API Tensor to_mkldnn_backward(const Tensor & grad, const Tensor & input); +CAFFE2_API Tensor quantize_per_tensor_cpu(const Tensor & self, double scale, int64_t zero_point, ScalarType dtype); +CAFFE2_API Tensor quantize_per_channel_cpu(const Tensor & self, const Tensor & scales, const Tensor & zero_points, int64_t axis, ScalarType dtype); +CAFFE2_API Tensor dequantize_quant(const Tensor & self); +CAFFE2_API double q_scale_quant(const Tensor & self); +CAFFE2_API int64_t q_zero_point_quant(const Tensor & self); +CAFFE2_API Tensor q_per_channel_scales_quant(const Tensor & self); +CAFFE2_API Tensor q_per_channel_zero_points_quant(const Tensor & self); +CAFFE2_API int64_t q_per_channel_axis_quant(const Tensor & self); +CAFFE2_API Tensor int_repr_quant(const Tensor & self); +CAFFE2_API Tensor make_per_tensor_quantized_tensor_cpu(const Tensor & self, double scale, int64_t zero_point); +CAFFE2_API Tensor make_per_channel_quantized_tensor_cpu(const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis); +CAFFE2_API QScheme qscheme_quant(const Tensor & self); +CAFFE2_API Tensor fake_quantize_per_tensor_affine_cpu(const Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max); +CAFFE2_API Tensor fake_quantize_per_tensor_affine_cuda(const Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max); +CAFFE2_API Tensor fake_quantize_per_tensor_affine_backward_cpu(const Tensor & grad, const Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max); +CAFFE2_API Tensor fake_quantize_per_tensor_affine_backward_cuda(const Tensor & grad, const Tensor & self, double scale, int64_t zero_point, int64_t quant_min, int64_t quant_max); +CAFFE2_API Tensor fake_quantize_per_channel_affine_cpu(const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max); +CAFFE2_API Tensor fake_quantize_per_channel_affine_cuda(const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max); +CAFFE2_API Tensor fake_quantize_per_channel_affine_backward_cpu(const Tensor & grad, const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max); +CAFFE2_API Tensor fake_quantize_per_channel_affine_backward_cuda(const Tensor & grad, const Tensor & self, const Tensor & scale, const Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max); +CAFFE2_API Tensor to(const Tensor & self, const TensorOptions & options, bool non_blocking=false, bool copy=false, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor to(const Tensor & self, Device device, ScalarType dtype, bool non_blocking=false, bool copy=false, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor to(const Tensor & self, ScalarType dtype, bool non_blocking=false, bool copy=false, c10::optional memory_format=c10::nullopt); +CAFFE2_API Tensor to(const Tensor & self, const Tensor & other, bool non_blocking=false, bool copy=false, c10::optional memory_format=c10::nullopt); +CAFFE2_API std::vector meshgrid(TensorList tensors); +CAFFE2_API Tensor cartesian_prod(TensorList tensors); +CAFFE2_API Tensor combinations(const Tensor & self, int64_t r=2, bool with_replacement=false); +CAFFE2_API Scalar item(const Tensor & self); +CAFFE2_API ScalarType result_type(const Tensor & tensor, const Tensor & other); +CAFFE2_API ScalarType result_type(const Tensor & tensor, Scalar other); +CAFFE2_API ScalarType result_type(Scalar scalar, const Tensor & tensor); +CAFFE2_API ScalarType result_type(Scalar scalar1, Scalar scalar2); +CAFFE2_API bool can_cast(ScalarType from, ScalarType to); +CAFFE2_API ScalarType promote_types(ScalarType type1, ScalarType type2); +CAFFE2_API Scalar _local_scalar_dense_cpu(const Tensor & self); +CAFFE2_API Scalar _local_scalar_dense_cuda(const Tensor & self); +CAFFE2_API std::tuple _thnn_fused_lstm_cell_cuda(const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & cx, const Tensor & input_bias={}, const Tensor & hidden_bias={}); +CAFFE2_API std::tuple _thnn_fused_lstm_cell_backward_cuda(const Tensor & grad_hy, const Tensor & grad_cy, const Tensor & cx, const Tensor & cy, const Tensor & workspace, bool has_bias); +CAFFE2_API std::tuple _thnn_differentiable_lstm_cell_backward(const Tensor & grad_hy, const Tensor & grad_cy, const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & input_bias, const Tensor & hidden_bias, const Tensor & cx, const Tensor & cy); +CAFFE2_API std::tuple _thnn_fused_gru_cell_cuda(const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & hx, const Tensor & input_bias={}, const Tensor & hidden_bias={}); +CAFFE2_API std::tuple _thnn_fused_gru_cell_backward_cuda(const Tensor & grad_hy, const Tensor & workspace, bool has_bias); +CAFFE2_API std::tuple _thnn_differentiable_gru_cell_backward(const Tensor & grad_hy, const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & hx, const Tensor & input_bias, const Tensor & hidden_bias); +CAFFE2_API std::tuple lstm(const Tensor & input, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); +CAFFE2_API std::tuple lstm(const Tensor & data, const Tensor & batch_sizes, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); +CAFFE2_API std::tuple gru(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); +CAFFE2_API std::tuple gru(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); +CAFFE2_API std::tuple rnn_tanh(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); +CAFFE2_API std::tuple rnn_tanh(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); +CAFFE2_API std::tuple rnn_relu(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); +CAFFE2_API std::tuple rnn_relu(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); +CAFFE2_API std::tuple lstm_cell(const Tensor & input, TensorList hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih={}, const Tensor & b_hh={}); +CAFFE2_API Tensor gru_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih={}, const Tensor & b_hh={}); +CAFFE2_API Tensor rnn_tanh_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih={}, const Tensor & b_hh={}); +CAFFE2_API Tensor rnn_relu_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih={}, const Tensor & b_hh={}); +CAFFE2_API std::tuple quantized_lstm(const Tensor & input, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first, c10::optional dtype=c10::nullopt, bool use_dynamic=false); +CAFFE2_API std::tuple quantized_lstm(const Tensor & data, const Tensor & batch_sizes, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, c10::optional dtype=c10::nullopt, bool use_dynamic=false); +CAFFE2_API std::tuple quantized_gru(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); +CAFFE2_API std::tuple quantized_gru(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); +CAFFE2_API std::tuple quantized_lstm_cell(const Tensor & input, TensorList hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh); +CAFFE2_API Tensor quantized_gru_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh); +CAFFE2_API Tensor quantized_rnn_relu_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh); +CAFFE2_API Tensor quantized_rnn_tanh_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh); +CAFFE2_API std::tuple _pack_padded_sequence(const Tensor & input, const Tensor & lengths, bool batch_first); +CAFFE2_API Tensor _pack_padded_sequence_backward(const Tensor & grad, IntArrayRef input_size, const Tensor & batch_sizes, bool batch_first); +CAFFE2_API std::tuple _pad_packed_sequence(const Tensor & data, const Tensor & batch_sizes, bool batch_first, Scalar padding_value, int64_t total_length); +CAFFE2_API Tensor & set_(Tensor & self, Storage source); +CAFFE2_API Tensor & set_storage(Tensor & self, Storage source, int64_t storage_offset, IntArrayRef size, IntArrayRef stride={}); +CAFFE2_API Tensor & set_cpu_(Tensor & self); +CAFFE2_API Tensor & set_cuda_(Tensor & self); +CAFFE2_API Tensor & set_quantizer_(Tensor & self, ConstQuantizerPtr quantizer); +CAFFE2_API bool is_set_to(const Tensor & self, const Tensor & tensor); +CAFFE2_API Tensor & masked_fill__cpu(Tensor & self, const Tensor & mask, Scalar value); +CAFFE2_API Tensor & masked_fill__cuda(Tensor & self, const Tensor & mask, Scalar value); +CAFFE2_API Tensor masked_fill(const Tensor & self, const Tensor & mask, Scalar value); +CAFFE2_API Tensor & masked_fill__cpu(Tensor & self, const Tensor & mask, const Tensor & value); +CAFFE2_API Tensor & masked_fill__cuda(Tensor & self, const Tensor & mask, const Tensor & value); +CAFFE2_API Tensor masked_fill(const Tensor & self, const Tensor & mask, const Tensor & value); +CAFFE2_API Tensor & masked_scatter__cpu(Tensor & self, const Tensor & mask, const Tensor & source); +CAFFE2_API Tensor & masked_scatter__cuda(Tensor & self, const Tensor & mask, const Tensor & source); +CAFFE2_API Tensor masked_scatter(const Tensor & self, const Tensor & mask, const Tensor & source); +CAFFE2_API Tensor view(const Tensor & self, IntArrayRef size); +CAFFE2_API Tensor mkldnn_view(const Tensor & self, IntArrayRef size); +CAFFE2_API Tensor & index_add_cpu_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); +CAFFE2_API Tensor index_add(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor index_add(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & source); +#endif +CAFFE2_API Tensor index_fill(const Tensor & self, int64_t dim, const Tensor & index, Scalar value); +CAFFE2_API Tensor & index_fill_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & value); +CAFFE2_API Tensor index_fill(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & value); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & index_fill_(Tensor & self, Dimname dim, const Tensor & index, Scalar value); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & index_fill_(Tensor & self, Dimname dim, const Tensor & index, const Tensor & value); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor index_fill(const Tensor & self, Dimname dim, const Tensor & index, Scalar value); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor index_fill(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & value); +#endif +CAFFE2_API Tensor scatter(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); +CAFFE2_API Tensor scatter(const Tensor & self, int64_t dim, const Tensor & index, Scalar value); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor scatter(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & src); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor scatter(const Tensor & self, Dimname dim, const Tensor & index, Scalar value); +#endif +CAFFE2_API Tensor scatter_add(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor scatter_add(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & src); +#endif +CAFFE2_API Tensor & lt_(Tensor & self, Scalar other); +CAFFE2_API Tensor & lt_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & gt_(Tensor & self, Scalar other); +CAFFE2_API Tensor & gt_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & le_(Tensor & self, Scalar other); +CAFFE2_API Tensor & le_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & ge_(Tensor & self, Scalar other); +CAFFE2_API Tensor & ge_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & eq_(Tensor & self, Scalar other); +CAFFE2_API Tensor & eq_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & ne_(Tensor & self, Scalar other); +CAFFE2_API Tensor & ne_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & bitwise_xor_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & bitwise_xor_out(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor bitwise_xor(const Tensor & self, Scalar other); +CAFFE2_API Tensor bitwise_xor(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & bitwise_xor_(Tensor & self, Scalar other); +CAFFE2_API Tensor & bitwise_xor_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor __xor__(const Tensor & self, Scalar other); +CAFFE2_API Tensor __xor__(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & __ixor__(Tensor & self, Scalar other); +CAFFE2_API Tensor & __ixor__(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & _lgamma__cpu(Tensor & self); +CAFFE2_API Tensor & _lgamma__cuda(Tensor & self); +CAFFE2_API Tensor & atan2_(Tensor & self, const Tensor & other); +CAFFE2_API Tensor & tril_cpu_(Tensor & self, int64_t diagonal=0); +CAFFE2_API Tensor & tril_cuda_(Tensor & self, int64_t diagonal=0); +CAFFE2_API Tensor & triu_cpu_(Tensor & self, int64_t diagonal=0); +CAFFE2_API Tensor & triu_cuda_(Tensor & self, int64_t diagonal=0); +CAFFE2_API Tensor & digamma_(Tensor & self); +CAFFE2_API Tensor & polygamma_(Tensor & self, int64_t n); +CAFFE2_API Tensor & pow_(Tensor & self, Scalar exponent); +CAFFE2_API Tensor & pow_(Tensor & self, const Tensor & exponent); +CAFFE2_API Tensor & lerp_cpu_scalar_(Tensor & self, const Tensor & end, Scalar weight); +CAFFE2_API Tensor & lerp_cuda_scalar_(Tensor & self, const Tensor & end, Scalar weight); +CAFFE2_API Tensor & lerp_cpu_tensor_(Tensor & self, const Tensor & end, const Tensor & weight); +CAFFE2_API Tensor & lerp_cuda_tensor_(Tensor & self, const Tensor & end, const Tensor & weight); +CAFFE2_API Tensor & addcdiv_(Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value=1); +CAFFE2_API Tensor & clamped_random_cuda_(Tensor & self, int64_t from, int64_t to, Generator * generator=nullptr); +CAFFE2_API Tensor & capped_random_cuda_(Tensor & self, int64_t to, Generator * generator=nullptr); +CAFFE2_API Tensor & random_cuda_(Tensor & self, Generator * generator=nullptr); +CAFFE2_API Tensor & uniform_cuda_(Tensor & self, double from=0, double to=1, Generator * generator=nullptr); +CAFFE2_API Tensor & normal_cuda_(Tensor & self, double mean=0, double std=1, Generator * generator=nullptr); +CAFFE2_API Tensor & cauchy_cuda_(Tensor & self, double median=0, double sigma=1, Generator * generator=nullptr); +CAFFE2_API Tensor & log_normal_cuda_(Tensor & self, double mean=1, double std=2, Generator * generator=nullptr); +CAFFE2_API Tensor & exponential_cuda_(Tensor & self, double lambd=1, Generator * generator=nullptr); +CAFFE2_API Tensor & geometric_cuda_(Tensor & self, double p, Generator * generator=nullptr); +CAFFE2_API Tensor & cross_out(Tensor & out, const Tensor & self, const Tensor & other, c10::optional dim=c10::nullopt); +CAFFE2_API Tensor cross(const Tensor & self, const Tensor & other, c10::optional dim=c10::nullopt); +CAFFE2_API Tensor & triu_cpu_out(Tensor & out, const Tensor & self, int64_t diagonal=0); +CAFFE2_API Tensor & triu_cuda_out(Tensor & out, const Tensor & self, int64_t diagonal=0); +CAFFE2_API Tensor triu(const Tensor & self, int64_t diagonal=0); +CAFFE2_API Tensor & tril_cpu_out(Tensor & out, const Tensor & self, int64_t diagonal=0); +CAFFE2_API Tensor & tril_cuda_out(Tensor & out, const Tensor & self, int64_t diagonal=0); +CAFFE2_API Tensor tril(const Tensor & self, int64_t diagonal=0); +CAFFE2_API Tensor tril_indices_cpu(int64_t row, int64_t col, int64_t offset=0, const TensorOptions & options=at::kLong); +CAFFE2_API Tensor tril_indices_cuda(int64_t row, int64_t col, int64_t offset=0, const TensorOptions & options=at::kLong); +CAFFE2_API Tensor triu_indices_cpu(int64_t row, int64_t col, int64_t offset=0, const TensorOptions & options=at::kLong); +CAFFE2_API Tensor triu_indices_cuda(int64_t row, int64_t col, int64_t offset=0, const TensorOptions & options=at::kLong); +CAFFE2_API Tensor & ne_out(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor & ne_out_quantized_cpu(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor ne(const Tensor & self, Scalar other); +CAFFE2_API Tensor ne_quantized_cpu(const Tensor & self, Scalar other); +CAFFE2_API Tensor & ne_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & ne_out_quantized_cpu(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor ne(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor ne_quantized_cpu(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & eq_out(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor & eq_out_quantized_cpu(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor eq(const Tensor & self, Scalar other); +CAFFE2_API Tensor eq_quantized_cpu(const Tensor & self, Scalar other); +CAFFE2_API Tensor & eq_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & eq_out_quantized_cpu(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor eq(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor eq_quantized_cpu(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & ge_out(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor & ge_out_quantized_cpu(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor ge(const Tensor & self, Scalar other); +CAFFE2_API Tensor ge_quantized_cpu(const Tensor & self, Scalar other); +CAFFE2_API Tensor & ge_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & ge_out_quantized_cpu(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor ge(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor ge_quantized_cpu(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & le_out(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor & le_out_quantized_cpu(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor le(const Tensor & self, Scalar other); +CAFFE2_API Tensor le_quantized_cpu(const Tensor & self, Scalar other); +CAFFE2_API Tensor & le_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & le_out_quantized_cpu(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor le(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor le_quantized_cpu(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & gt_out(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor & gt_out_quantized_cpu(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor gt(const Tensor & self, Scalar other); +CAFFE2_API Tensor gt_quantized_cpu(const Tensor & self, Scalar other); +CAFFE2_API Tensor & gt_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & gt_out_quantized_cpu(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor gt(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor gt_quantized_cpu(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & lt_out(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor & lt_out_quantized_cpu(Tensor & out, const Tensor & self, Scalar other); +CAFFE2_API Tensor lt(const Tensor & self, Scalar other); +CAFFE2_API Tensor lt_quantized_cpu(const Tensor & self, Scalar other); +CAFFE2_API Tensor & lt_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & lt_out_quantized_cpu(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor lt(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor lt_quantized_cpu(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor index_select_sparse(const Tensor & self, int64_t dim, const Tensor & index); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & index_select_out(Tensor & out, const Tensor & self, Dimname dim, const Tensor & index); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor index_select(const Tensor & self, Dimname dim, const Tensor & index); +#endif +CAFFE2_API Tensor & masked_select_out_cpu(Tensor & out, const Tensor & self, const Tensor & mask); +CAFFE2_API Tensor & masked_select_out_cuda(Tensor & out, const Tensor & self, const Tensor & mask); +CAFFE2_API Tensor masked_select_cpu(const Tensor & self, const Tensor & mask); +CAFFE2_API Tensor masked_select_cuda(const Tensor & self, const Tensor & mask); +CAFFE2_API std::vector nonzero_numpy(const Tensor & self); +CAFFE2_API Tensor & gather_out_cpu(Tensor & out, const Tensor & self, int64_t dim, const Tensor & index, bool sparse_grad=false); +CAFFE2_API Tensor & gather_out_cuda(Tensor & out, const Tensor & self, int64_t dim, const Tensor & index, bool sparse_grad=false); +CAFFE2_API Tensor gather_cpu(const Tensor & self, int64_t dim, const Tensor & index, bool sparse_grad=false); +CAFFE2_API Tensor gather_cuda(const Tensor & self, int64_t dim, const Tensor & index, bool sparse_grad=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor & gather_out(Tensor & out, const Tensor & self, Dimname dim, const Tensor & index, bool sparse_grad=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor gather(const Tensor & self, Dimname dim, const Tensor & index, bool sparse_grad=false); +#endif +CAFFE2_API Tensor _gather_sparse_backward(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & grad); +CAFFE2_API Tensor & addcmul_out(Tensor & out, const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value=1); +CAFFE2_API Tensor addcmul(const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value=1); +CAFFE2_API Tensor & addcmul_(Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value=1); +CAFFE2_API Tensor & addcdiv_out(Tensor & out, const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value=1); +CAFFE2_API Tensor addcdiv(const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value=1); +CAFFE2_API std::tuple triangular_solve_out(Tensor & X, Tensor & M, const Tensor & self, const Tensor & A, bool upper=true, bool transpose=false, bool unitriangular=false); +CAFFE2_API std::tuple triangular_solve(const Tensor & self, const Tensor & A, bool upper=true, bool transpose=false, bool unitriangular=false); +CAFFE2_API std::tuple _triangular_solve_helper_cpu(const Tensor & self, const Tensor & A, bool upper, bool transpose, bool unitriangular); +CAFFE2_API std::tuple _triangular_solve_helper_cuda(const Tensor & self, const Tensor & A, bool upper, bool transpose, bool unitriangular); +CAFFE2_API std::tuple symeig_out(Tensor & e, Tensor & V, const Tensor & self, bool eigenvectors=false, bool upper=true); +CAFFE2_API std::tuple symeig(const Tensor & self, bool eigenvectors=false, bool upper=true); +CAFFE2_API std::tuple _symeig_helper_cpu(const Tensor & self, bool eigenvectors, bool upper); +CAFFE2_API std::tuple _symeig_helper_cuda(const Tensor & self, bool eigenvectors, bool upper); +CAFFE2_API std::tuple svd_out(Tensor & U, Tensor & S, Tensor & V, const Tensor & self, bool some=true, bool compute_uv=true); +CAFFE2_API std::tuple svd(const Tensor & self, bool some=true, bool compute_uv=true); +CAFFE2_API std::tuple _svd_helper_cpu(const Tensor & self, bool some, bool compute_uv); +CAFFE2_API std::tuple _svd_helper_cuda(const Tensor & self, bool some, bool compute_uv); +CAFFE2_API Tensor & cholesky_out(Tensor & out, const Tensor & self, bool upper=false); +CAFFE2_API Tensor cholesky(const Tensor & self, bool upper=false); +CAFFE2_API Tensor _cholesky_helper_cpu(const Tensor & self, bool upper); +CAFFE2_API Tensor _cholesky_helper_cuda(const Tensor & self, bool upper); +CAFFE2_API Tensor & cholesky_solve_out(Tensor & out, const Tensor & self, const Tensor & input2, bool upper=false); +CAFFE2_API Tensor cholesky_solve(const Tensor & self, const Tensor & input2, bool upper=false); +CAFFE2_API Tensor _cholesky_solve_helper_cpu(const Tensor & self, const Tensor & A, bool upper); +CAFFE2_API Tensor _cholesky_solve_helper_cuda(const Tensor & self, const Tensor & A, bool upper); +CAFFE2_API std::tuple solve(const Tensor & self, const Tensor & A); +CAFFE2_API std::tuple solve_out(Tensor & solution, Tensor & lu, const Tensor & self, const Tensor & A); +CAFFE2_API std::tuple _solve_helper_cpu(const Tensor & self, const Tensor & A); +CAFFE2_API std::tuple _solve_helper_cuda(const Tensor & self, const Tensor & A); +CAFFE2_API std::tuple qr_out(Tensor & Q, Tensor & R, const Tensor & self, bool some=true); +CAFFE2_API std::tuple qr(const Tensor & self, bool some=true); +CAFFE2_API std::tuple _qr_helper_cpu(const Tensor & self, bool some); +CAFFE2_API std::tuple _qr_helper_cuda(const Tensor & self, bool some); +CAFFE2_API std::tuple _lu_with_info_cpu(const Tensor & self, bool pivot=true, bool check_errors=true); +CAFFE2_API std::tuple _lu_with_info_cuda(const Tensor & self, bool pivot=true, bool check_errors=true); +CAFFE2_API Tensor & lu_solve_out(Tensor & out, const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots); +CAFFE2_API Tensor lu_solve(const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots); +CAFFE2_API Tensor _lu_solve_helper_cpu(const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots); +CAFFE2_API Tensor _lu_solve_helper_cuda(const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots); +CAFFE2_API Tensor & multinomial_out(Tensor & out, const Tensor & self, int64_t num_samples, bool replacement=false, Generator * generator=nullptr); +CAFFE2_API Tensor multinomial(const Tensor & self, int64_t num_samples, bool replacement=false, Generator * generator=nullptr); +CAFFE2_API Tensor & _lgamma_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & _lgamma_out_cuda(Tensor & out, const Tensor & self); +CAFFE2_API Tensor lgamma(const Tensor & self); +CAFFE2_API Tensor & digamma_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor digamma(const Tensor & self); +CAFFE2_API Tensor & polygamma_out(Tensor & out, int64_t n, const Tensor & self); +CAFFE2_API Tensor polygamma(int64_t n, const Tensor & self); +CAFFE2_API Tensor erfinv(const Tensor & self); +CAFFE2_API Tensor & _erfinv__cpu(Tensor & self); +CAFFE2_API Tensor & _erfinv__cuda(Tensor & self); +CAFFE2_API Tensor & _erfinv_out_cpu(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & _erfinv_out_cuda(Tensor & out, const Tensor & self); +CAFFE2_API Tensor sign(const Tensor & self); +CAFFE2_API Tensor & sign_(Tensor & self); +CAFFE2_API Tensor & sign_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor & atan2_out(Tensor & out, const Tensor & self, const Tensor & other); +CAFFE2_API Tensor atan2(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & lerp_cpu_scalar_out(Tensor & out, const Tensor & self, const Tensor & end, Scalar weight); +CAFFE2_API Tensor & lerp_cuda_scalar_out(Tensor & out, const Tensor & self, const Tensor & end, Scalar weight); +CAFFE2_API Tensor & lerp_cpu_tensor_out(Tensor & out, const Tensor & self, const Tensor & end, const Tensor & weight); +CAFFE2_API Tensor & lerp_cuda_tensor_out(Tensor & out, const Tensor & self, const Tensor & end, const Tensor & weight); +CAFFE2_API Tensor lerp_cpu_scalar(const Tensor & self, const Tensor & end, Scalar weight); +CAFFE2_API Tensor lerp_cuda_scalar(const Tensor & self, const Tensor & end, Scalar weight); +CAFFE2_API Tensor lerp_cpu_tensor(const Tensor & self, const Tensor & end, const Tensor & weight); +CAFFE2_API Tensor lerp_cuda_tensor(const Tensor & self, const Tensor & end, const Tensor & weight); +CAFFE2_API Tensor & _histc_out_cuda(Tensor & out, const Tensor & self, int64_t bins=100, Scalar min=0, Scalar max=0); +CAFFE2_API Tensor _histc_cuda(const Tensor & self, int64_t bins=100, Scalar min=0, Scalar max=0); +CAFFE2_API Tensor min_quant(const Tensor & self); +CAFFE2_API Tensor max_quant(const Tensor & self); +CAFFE2_API Tensor median_cpu(const Tensor & self); +CAFFE2_API Tensor median_cuda(const Tensor & self); +CAFFE2_API std::tuple sort_quant(const Tensor & self, int64_t dim=-1, bool descending=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple sort_out(Tensor & values, Tensor & indices, const Tensor & self, Dimname dim, bool descending=false); +#endif +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API std::tuple sort(const Tensor & self, Dimname dim, bool descending=false); +#endif +CAFFE2_API Tensor argsort(const Tensor & self, int64_t dim=-1, bool descending=false); +#ifdef BUILD_NAMEDTENSOR +CAFFE2_API Tensor argsort(const Tensor & self, Dimname dim, bool descending=false); +#endif +CAFFE2_API std::tuple topk_out_cpu(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, int64_t dim=-1, bool largest=true, bool sorted=true); +CAFFE2_API std::tuple topk(const Tensor & self, int64_t k, int64_t dim=-1, bool largest=true, bool sorted=true); +CAFFE2_API std::tuple quantized_topk_cpu(const Tensor & self, int64_t k, int64_t dim=-1, bool largest=true, bool sorted=true); +CAFFE2_API Tensor all(const Tensor & self); +CAFFE2_API Tensor any(const Tensor & self); +CAFFE2_API Tensor unfold(const Tensor & self, int64_t dimension, int64_t size, int64_t step); +CAFFE2_API bool quantized_equal(const Tensor & self, const Tensor & other); +CAFFE2_API Tensor & pow_out(Tensor & out, const Tensor & self, const Tensor & exponent); +CAFFE2_API Tensor pow(const Tensor & self, const Tensor & exponent); +CAFFE2_API Tensor & pow_out(Tensor & out, Scalar self, const Tensor & exponent); +CAFFE2_API Tensor pow(Scalar self, const Tensor & exponent); +CAFFE2_API Tensor & normal_out_cuda(Tensor & out, const Tensor & mean, double std=1, Generator * generator=nullptr); +CAFFE2_API Tensor normal_cuda(const Tensor & mean, double std=1, Generator * generator=nullptr); +CAFFE2_API Tensor & normal_out_cuda(Tensor & out, double mean, const Tensor & std, Generator * generator=nullptr); +CAFFE2_API Tensor normal_cuda(double mean, const Tensor & std, Generator * generator=nullptr); +CAFFE2_API Tensor & normal_out_cuda(Tensor & out, const Tensor & mean, const Tensor & std, Generator * generator=nullptr); +CAFFE2_API Tensor normal_cuda(const Tensor & mean, const Tensor & std, Generator * generator=nullptr); +CAFFE2_API Tensor normal(double mean, double std, IntArrayRef size, Generator * generator=nullptr, const TensorOptions & options={}); +CAFFE2_API Tensor & normal_out(Tensor & out, double mean, double std, IntArrayRef size, Generator * generator=nullptr); +CAFFE2_API Tensor alias(const Tensor & self); +CAFFE2_API Tensor & mse_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor mse_loss(const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor & mse_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +CAFFE2_API Tensor mse_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +CAFFE2_API Tensor & l1_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor l1_loss(const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor & l1_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +CAFFE2_API Tensor l1_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +CAFFE2_API Tensor & multi_margin_loss_cpu_out(Tensor & out, const Tensor & self, const Tensor & target, Scalar p=1, Scalar margin=1, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor multi_margin_loss_cpu(const Tensor & self, const Tensor & target, Scalar p=1, Scalar margin=1, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor & multi_margin_loss_cpu_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor multi_margin_loss_cpu_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, Scalar p, Scalar margin, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor & multilabel_margin_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor multilabel_margin_loss(const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +CAFFE2_API std::tuple multilabel_margin_loss_forward_out_cpu(Tensor & output, Tensor & is_target, const Tensor & self, const Tensor & target, int64_t reduction); +CAFFE2_API std::tuple multilabel_margin_loss_forward_cpu(const Tensor & self, const Tensor & target, int64_t reduction); +CAFFE2_API Tensor & multilabel_margin_loss_backward_cpu_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction, const Tensor & is_target); +CAFFE2_API Tensor multilabel_margin_loss_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction, const Tensor & is_target); +CAFFE2_API Tensor & nll_loss_out(Tensor & out, const Tensor & self, const Tensor & target, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean, int64_t ignore_index=-100); +CAFFE2_API Tensor nll_loss(const Tensor & self, const Tensor & target, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean, int64_t ignore_index=-100); +CAFFE2_API std::tuple nll_loss_forward_out_cpu(Tensor & output, Tensor & total_weight, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); +CAFFE2_API std::tuple nll_loss_forward_cpu(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); +CAFFE2_API Tensor & nll_loss_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); +CAFFE2_API Tensor nll_loss_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); +CAFFE2_API Tensor & nll_loss2d_out(Tensor & out, const Tensor & self, const Tensor & target, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean, int64_t ignore_index=-100); +CAFFE2_API Tensor nll_loss2d(const Tensor & self, const Tensor & target, const Tensor & weight={}, int64_t reduction=at::Reduction::Mean, int64_t ignore_index=-100); +CAFFE2_API std::tuple nll_loss2d_forward_out_cpu(Tensor & output, Tensor & total_weight, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); +CAFFE2_API std::tuple nll_loss2d_forward_cpu(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); +CAFFE2_API Tensor & nll_loss2d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); +CAFFE2_API Tensor nll_loss2d_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index, const Tensor & total_weight); +CAFFE2_API Tensor & smooth_l1_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor smooth_l1_loss(const Tensor & self, const Tensor & target, int64_t reduction=at::Reduction::Mean); +CAFFE2_API Tensor & smooth_l1_loss_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +CAFFE2_API Tensor smooth_l1_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); +CAFFE2_API Tensor & log_sigmoid_out(Tensor & out, const Tensor & self); +CAFFE2_API Tensor log_sigmoid(const Tensor & self); +CAFFE2_API Tensor & adaptive_avg_pool2d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor & adaptive_avg_pool2d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor & mkldnn_adaptive_avg_pool2d_out(Tensor & out, const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor mkldnn_adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor adaptive_avg_pool2d_cpu(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor adaptive_avg_pool2d_cuda(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor quantized_adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor adaptive_avg_pool2d_backward_cpu(const Tensor & grad_output, const Tensor & self); +CAFFE2_API Tensor adaptive_avg_pool2d_backward_cuda(const Tensor & grad_output, const Tensor & self); +CAFFE2_API Tensor & adaptive_avg_pool3d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor & adaptive_avg_pool3d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor adaptive_avg_pool3d_cpu(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor adaptive_avg_pool3d_cuda(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor & adaptive_avg_pool3d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self); +CAFFE2_API Tensor & adaptive_avg_pool3d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self); +CAFFE2_API Tensor adaptive_avg_pool3d_backward_cpu(const Tensor & grad_output, const Tensor & self); +CAFFE2_API Tensor adaptive_avg_pool3d_backward_cuda(const Tensor & grad_output, const Tensor & self); +CAFFE2_API std::tuple adaptive_max_pool2d_out_cpu(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef output_size); +CAFFE2_API std::tuple adaptive_max_pool2d_out_cuda(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef output_size); +CAFFE2_API std::tuple adaptive_max_pool2d_cpu(const Tensor & self, IntArrayRef output_size); +CAFFE2_API std::tuple adaptive_max_pool2d_cuda(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor & adaptive_max_pool2d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices); +CAFFE2_API Tensor & adaptive_max_pool2d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices); +CAFFE2_API Tensor adaptive_max_pool2d_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & indices); +CAFFE2_API Tensor adaptive_max_pool2d_backward_cuda(const Tensor & grad_output, const Tensor & self, const Tensor & indices); +CAFFE2_API std::tuple adaptive_max_pool3d_out_cpu(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef output_size); +CAFFE2_API std::tuple adaptive_max_pool3d_out_cuda(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef output_size); +CAFFE2_API std::tuple adaptive_max_pool3d_cpu(const Tensor & self, IntArrayRef output_size); +CAFFE2_API std::tuple adaptive_max_pool3d_cuda(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor & adaptive_max_pool3d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices); +CAFFE2_API Tensor & adaptive_max_pool3d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices); +CAFFE2_API Tensor adaptive_max_pool3d_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & indices); +CAFFE2_API Tensor adaptive_max_pool3d_backward_cuda(const Tensor & grad_output, const Tensor & self, const Tensor & indices); +CAFFE2_API Tensor & avg_pool2d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +CAFFE2_API Tensor & avg_pool2d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +CAFFE2_API Tensor & mkldnn_avg_pool2d_out(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +CAFFE2_API Tensor avg_pool2d_cpu(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +CAFFE2_API Tensor avg_pool2d_cuda(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +CAFFE2_API Tensor mkldnn_avg_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +CAFFE2_API Tensor quantized_avg_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +CAFFE2_API Tensor & avg_pool2d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +CAFFE2_API Tensor & avg_pool2d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +CAFFE2_API Tensor avg_pool2d_backward_cpu(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +CAFFE2_API Tensor avg_pool2d_backward_cuda(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +CAFFE2_API Tensor & avg_pool3d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +CAFFE2_API Tensor & avg_pool3d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +CAFFE2_API Tensor avg_pool3d_cpu(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +CAFFE2_API Tensor avg_pool3d_cuda(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, bool ceil_mode=false, bool count_include_pad=true, c10::optional divisor_override=c10::nullopt); +CAFFE2_API Tensor & avg_pool3d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +CAFFE2_API Tensor & avg_pool3d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +CAFFE2_API Tensor avg_pool3d_backward_cpu(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +CAFFE2_API Tensor avg_pool3d_backward_cuda(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); +CAFFE2_API std::tuple fractional_max_pool2d_out_cpu(Tensor & output, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); +CAFFE2_API std::tuple fractional_max_pool2d_out_cuda(Tensor & output, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); +CAFFE2_API std::tuple fractional_max_pool2d_cpu(const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); +CAFFE2_API std::tuple fractional_max_pool2d_cuda(const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); +CAFFE2_API Tensor & fractional_max_pool2d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); +CAFFE2_API Tensor & fractional_max_pool2d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); +CAFFE2_API Tensor fractional_max_pool2d_backward_cpu(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); +CAFFE2_API Tensor fractional_max_pool2d_backward_cuda(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); +CAFFE2_API std::tuple fractional_max_pool3d_out_cpu(Tensor & output, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); +CAFFE2_API std::tuple fractional_max_pool3d_out_cuda(Tensor & output, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); +CAFFE2_API std::tuple fractional_max_pool3d_cpu(const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); +CAFFE2_API std::tuple fractional_max_pool3d_cuda(const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & random_samples); +CAFFE2_API Tensor & fractional_max_pool3d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); +CAFFE2_API Tensor & fractional_max_pool3d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); +CAFFE2_API Tensor fractional_max_pool3d_backward_cpu(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); +CAFFE2_API Tensor fractional_max_pool3d_backward_cuda(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef output_size, const Tensor & indices); +CAFFE2_API std::tuple max_pool2d_with_indices_out_cpu(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API std::tuple max_pool2d_with_indices_out_cuda(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API std::tuple max_pool2d_with_indices_cpu(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API std::tuple max_pool2d_with_indices_cuda(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API Tensor & max_pool2d_with_indices_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); +CAFFE2_API Tensor & max_pool2d_with_indices_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); +CAFFE2_API Tensor max_pool2d_with_indices_backward_cpu(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); +CAFFE2_API Tensor max_pool2d_with_indices_backward_cuda(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); +CAFFE2_API std::tuple max_pool3d_with_indices_out_cpu(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API std::tuple max_pool3d_with_indices_out_cuda(Tensor & out, Tensor & indices, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API std::tuple max_pool3d_with_indices_cpu(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API std::tuple max_pool3d_with_indices_cuda(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride={}, IntArrayRef padding=0, IntArrayRef dilation=1, bool ceil_mode=false); +CAFFE2_API Tensor & max_pool3d_with_indices_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); +CAFFE2_API Tensor & max_pool3d_with_indices_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); +CAFFE2_API Tensor max_pool3d_with_indices_backward_cpu(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); +CAFFE2_API Tensor max_pool3d_with_indices_backward_cuda(const Tensor & grad_output, const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode, const Tensor & indices); +CAFFE2_API Tensor & max_unpooling2d_forward_out_cpu(Tensor & out, const Tensor & self, const Tensor & indices, IntArrayRef output_size); +CAFFE2_API Tensor & max_unpooling2d_forward_out_cuda(Tensor & out, const Tensor & self, const Tensor & indices, IntArrayRef output_size); +CAFFE2_API Tensor max_unpooling2d_forward_cpu(const Tensor & self, const Tensor & indices, IntArrayRef output_size); +CAFFE2_API Tensor max_unpooling2d_forward_cuda(const Tensor & self, const Tensor & indices, IntArrayRef output_size); +CAFFE2_API Tensor & max_unpooling2d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size); +CAFFE2_API Tensor & max_unpooling2d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size); +CAFFE2_API Tensor max_unpooling2d_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size); +CAFFE2_API Tensor max_unpooling2d_backward_cuda(const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size); +CAFFE2_API Tensor & max_unpooling3d_forward_out_cpu(Tensor & out, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); +CAFFE2_API Tensor & max_unpooling3d_forward_out_cuda(Tensor & out, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); +CAFFE2_API Tensor max_unpooling3d_forward_cpu(const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); +CAFFE2_API Tensor max_unpooling3d_forward_cuda(const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); +CAFFE2_API Tensor & max_unpooling3d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); +CAFFE2_API Tensor & max_unpooling3d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); +CAFFE2_API Tensor max_unpooling3d_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); +CAFFE2_API Tensor max_unpooling3d_backward_cuda(const Tensor & grad_output, const Tensor & self, const Tensor & indices, IntArrayRef output_size, IntArrayRef stride, IntArrayRef padding); +CAFFE2_API Tensor & reflection_pad1d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & reflection_pad1d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor reflection_pad1d_cpu(const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor reflection_pad1d_cuda(const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & reflection_pad1d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & reflection_pad1d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor reflection_pad1d_backward_cpu(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor reflection_pad1d_backward_cuda(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & reflection_pad2d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & reflection_pad2d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor reflection_pad2d_cpu(const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor reflection_pad2d_cuda(const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & reflection_pad2d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & reflection_pad2d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor reflection_pad2d_backward_cpu(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor reflection_pad2d_backward_cuda(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & replication_pad1d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & replication_pad1d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor replication_pad1d_cpu(const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor replication_pad1d_cuda(const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & replication_pad1d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & replication_pad1d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor replication_pad1d_backward_cpu(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor replication_pad1d_backward_cuda(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & replication_pad2d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & replication_pad2d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor replication_pad2d_cpu(const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor replication_pad2d_cuda(const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & replication_pad2d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & replication_pad2d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor replication_pad2d_backward_cpu(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor replication_pad2d_backward_cuda(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & replication_pad3d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & replication_pad3d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor replication_pad3d_cpu(const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor replication_pad3d_cuda(const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & replication_pad3d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor & replication_pad3d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor replication_pad3d_backward_cpu(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor replication_pad3d_backward_cuda(const Tensor & grad_output, const Tensor & self, IntArrayRef padding); +CAFFE2_API Tensor _test_optional_float(const Tensor & self, c10::optional scale=c10::nullopt); +CAFFE2_API Tensor & upsample_linear1d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor & upsample_linear1d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor upsample_linear1d_cpu(const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor upsample_linear1d_cuda(const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor & upsample_linear1d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor & upsample_linear1d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor upsample_linear1d_backward_cpu(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor upsample_linear1d_backward_cuda(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor & upsample_bilinear2d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor & upsample_bilinear2d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor upsample_bilinear2d_cpu(const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor upsample_bilinear2d_cuda(const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor quantized_upsample_bilinear2d_cpu(const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor & upsample_bilinear2d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor & upsample_bilinear2d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor upsample_bilinear2d_backward_cpu(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor upsample_bilinear2d_backward_cuda(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor & upsample_bicubic2d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor & upsample_bicubic2d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor upsample_bicubic2d_cpu(const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor upsample_bicubic2d_cuda(const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor & upsample_bicubic2d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor & upsample_bicubic2d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor upsample_bicubic2d_backward_cpu(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor upsample_bicubic2d_backward_cuda(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor & upsample_trilinear3d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor & upsample_trilinear3d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor upsample_trilinear3d_cpu(const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor upsample_trilinear3d_cuda(const Tensor & self, IntArrayRef output_size, bool align_corners); +CAFFE2_API Tensor & upsample_trilinear3d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor & upsample_trilinear3d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor upsample_trilinear3d_backward_cpu(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor upsample_trilinear3d_backward_cuda(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size, bool align_corners); +CAFFE2_API Tensor & upsample_nearest1d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor & upsample_nearest1d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor upsample_nearest1d_cpu(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor upsample_nearest1d_cuda(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor & upsample_nearest1d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +CAFFE2_API Tensor & upsample_nearest1d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +CAFFE2_API Tensor upsample_nearest1d_backward_cpu(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +CAFFE2_API Tensor upsample_nearest1d_backward_cuda(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +CAFFE2_API Tensor & upsample_nearest2d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor & upsample_nearest2d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor upsample_nearest2d_cpu(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor upsample_nearest2d_cuda(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor quantized_upsample_nearest2d_cpu(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor & upsample_nearest2d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +CAFFE2_API Tensor & upsample_nearest2d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +CAFFE2_API Tensor upsample_nearest2d_backward_cpu(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +CAFFE2_API Tensor upsample_nearest2d_backward_cuda(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +CAFFE2_API Tensor & upsample_nearest3d_out_cpu(Tensor & out, const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor & upsample_nearest3d_out_cuda(Tensor & out, const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor upsample_nearest3d_cpu(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor upsample_nearest3d_cuda(const Tensor & self, IntArrayRef output_size); +CAFFE2_API Tensor & upsample_nearest3d_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +CAFFE2_API Tensor & upsample_nearest3d_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +CAFFE2_API Tensor upsample_nearest3d_backward_cpu(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +CAFFE2_API Tensor upsample_nearest3d_backward_cuda(const Tensor & grad_output, IntArrayRef output_size, IntArrayRef input_size); +CAFFE2_API Tensor & sigmoid_backward_out(Tensor & grad_input, const Tensor & grad_output, const Tensor & output); +CAFFE2_API Tensor sigmoid_backward(const Tensor & grad_output, const Tensor & output); +CAFFE2_API Tensor & slow_conv_transpose2d_out_cpu(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, IntArrayRef dilation=1); +CAFFE2_API Tensor & slow_conv_transpose2d_out_cuda(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, IntArrayRef dilation=1); +CAFFE2_API Tensor slow_conv_transpose2d_cpu(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, IntArrayRef dilation=1); +CAFFE2_API Tensor slow_conv_transpose2d_cuda(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, IntArrayRef dilation=1); +CAFFE2_API std::tuple slow_conv_transpose2d_backward_out_cpu(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & columns, const Tensor & ones); +CAFFE2_API std::tuple slow_conv_transpose2d_backward_out_cuda(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & columns, const Tensor & ones); +CAFFE2_API std::tuple slow_conv_transpose2d_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & columns, const Tensor & ones, std::array output_mask); +CAFFE2_API std::tuple slow_conv_transpose2d_backward_cuda(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & columns, const Tensor & ones, std::array output_mask); +CAFFE2_API Tensor & slow_conv_transpose3d_out_cpu(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, IntArrayRef dilation=1); +CAFFE2_API Tensor & slow_conv_transpose3d_out_cuda(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, IntArrayRef dilation=1); +CAFFE2_API Tensor slow_conv_transpose3d_cpu(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, IntArrayRef dilation=1); +CAFFE2_API Tensor slow_conv_transpose3d_cuda(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef output_padding=0, IntArrayRef dilation=1); +CAFFE2_API std::tuple slow_conv_transpose3d_backward_out_cpu(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & finput, const Tensor & fgrad_input); +CAFFE2_API std::tuple slow_conv_transpose3d_backward_out_cuda(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & finput, const Tensor & fgrad_input); +CAFFE2_API std::tuple slow_conv_transpose3d_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask); +CAFFE2_API std::tuple slow_conv_transpose3d_backward_cuda(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, IntArrayRef dilation, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask); +CAFFE2_API Tensor & thnn_conv2d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0); +CAFFE2_API Tensor thnn_conv2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0); +CAFFE2_API std::tuple slow_conv2d_forward_out_cpu(Tensor & output, Tensor & finput, Tensor & fgrad_input, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); +CAFFE2_API std::tuple slow_conv2d_forward_cpu(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); +CAFFE2_API std::tuple slow_conv2d_backward_out_cpu(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input); +CAFFE2_API std::tuple slow_conv2d_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask); +CAFFE2_API Tensor & thnn_conv_depthwise2d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1); +CAFFE2_API Tensor thnn_conv_depthwise2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1); +CAFFE2_API Tensor & slow_conv3d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0); +CAFFE2_API Tensor slow_conv3d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0); +CAFFE2_API std::tuple slow_conv3d_forward_out_cpu(Tensor & output, Tensor & finput, Tensor & fgrad_input, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); +CAFFE2_API std::tuple slow_conv3d_forward_cpu(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); +CAFFE2_API std::tuple slow_conv3d_backward_out_cpu(Tensor & grad_input, Tensor & grad_weight, Tensor & grad_bias, const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input); +CAFFE2_API std::tuple slow_conv3d_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, const Tensor & finput, const Tensor & fgrad_input, std::array output_mask); +CAFFE2_API Tensor slow_conv_dilated2d_cpu(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1); +CAFFE2_API Tensor slow_conv_dilated2d_cuda(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1); +CAFFE2_API std::tuple slow_conv_dilated2d_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); +CAFFE2_API std::tuple slow_conv_dilated2d_backward_cuda(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); +CAFFE2_API Tensor slow_conv_dilated3d_cpu(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1); +CAFFE2_API Tensor slow_conv_dilated3d_cuda(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias={}, IntArrayRef stride=1, IntArrayRef padding=0, IntArrayRef dilation=1); +CAFFE2_API std::tuple slow_conv_dilated3d_backward_cpu(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); +CAFFE2_API std::tuple slow_conv_dilated3d_backward_cuda(const Tensor & grad_output, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, std::array output_mask); +CAFFE2_API Tensor & col2im_out_cpu(Tensor & out, const Tensor & self, IntArrayRef output_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor & col2im_out_cuda(Tensor & out, const Tensor & self, IntArrayRef output_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor col2im_cpu(const Tensor & self, IntArrayRef output_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor col2im_cuda(const Tensor & self, IntArrayRef output_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor & col2im_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor & col2im_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor col2im_backward_cpu(const Tensor & grad_output, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor col2im_backward_cuda(const Tensor & grad_output, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor & im2col_out_cpu(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor & im2col_out_cuda(Tensor & out, const Tensor & self, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor im2col_cpu(const Tensor & self, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor im2col_cuda(const Tensor & self, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor & im2col_backward_out_cpu(Tensor & grad_input, const Tensor & grad_output, IntArrayRef input_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor & im2col_backward_out_cuda(Tensor & grad_input, const Tensor & grad_output, IntArrayRef input_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor im2col_backward_cpu(const Tensor & grad_output, IntArrayRef input_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor im2col_backward_cuda(const Tensor & grad_output, IntArrayRef input_size, IntArrayRef kernel_size, IntArrayRef dilation, IntArrayRef padding, IntArrayRef stride); +CAFFE2_API Tensor isfinite(const Tensor & self); + +} // namespace native +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/NumericUtils.h b/thirdparty/libtorch/include/ATen/NumericUtils.h new file mode 100644 index 0000000000..3b002ee8e7 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/NumericUtils.h @@ -0,0 +1,46 @@ +#pragma once + +#ifdef __HIPCC__ +#include +#endif + +#include +#include +#include +#include +#include +#include + +namespace at { + +// std::isnan isn't performant to use on integral types; it will +// (uselessly) convert to floating point and then do the test. +// This function is. + +template ::value, int>::type = 0> +inline C10_HOST_DEVICE bool _isnan(T val) { + return false; +} + +template ::value, int>::type = 0> +inline C10_HOST_DEVICE bool _isnan(T val) { +#if defined(__CUDACC__) || defined(__HIPCC__) + return ::isnan(val); +#else + return std::isnan(val); +#endif +} + +template ::value, int>::type = 0> +inline bool _isnan(T val) { + return std::isnan(val.real()) || std::isnan(val.imag()); +} + +inline C10_HOST_DEVICE bool _isnan(at::BFloat16 val) { + return at::_isnan(float(val)); +} + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/OpaqueTensorImpl.h b/thirdparty/libtorch/include/ATen/OpaqueTensorImpl.h new file mode 100644 index 0000000000..dd467d9398 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/OpaqueTensorImpl.h @@ -0,0 +1,142 @@ +#pragma once + +#include +#include +#include + +namespace at { + +// An "Opaque" TensorImpl -- there are no strides and (for now) +// even data() is not supported (thus no pointer arithmetic). + +// NOTE: We could allow data() in the future, but would have to ensure pointer +// arithmetic code is properly guarded. +// +// NOTE: This does not support resize_ (and other metadata-changing ops) because of +// `shallow_copy_and_detach`. We would need to define an interface to "shallow copy" +// in order to add support. + +template +struct CAFFE2_API OpaqueTensorImpl : public TensorImpl { + // public constructor for now... + OpaqueTensorImpl(at::TensorTypeSet type_set, const caffe2::TypeMeta& data_type, c10::Device device, + OpaqueHandle opaque_handle, c10::IntArrayRef sizes) + : TensorImpl(type_set, data_type, device), + opaque_handle_(std::move(opaque_handle)) + { + sizes_ = sizes.vec(); + refresh_numel(); + } + + void release_resources() override { + TensorImpl::release_resources(); + opaque_handle_ = {}; + } + + IntArrayRef strides() const override { + AT_ERROR("opaque tensors do not have strides"); + } + + bool is_contiguous(c10::MemoryFormat memory_format=c10::MemoryFormat::Contiguous) const override { + AT_ERROR("opaque tensors do not have is_contiguous"); + } + + int64_t stride(int64_t d) const override { + AT_ERROR("opaque tensors do not have strides"); + } + + void resize_dim(int64_t ndim) override { + AT_ERROR("opaque tensors do not have resize_dim"); + } + + void set_size(int64_t dim, int64_t new_size) override { + AT_ERROR("opaque tensors do not have set_size"); + } + + void set_stride(int64_t dim, int64_t new_stride) override { + AT_ERROR("opaque tensors do not have set_stride"); + } + + void set_storage_offset(int64_t storage_offset) override { + AT_ERROR("opaque tensors do not have set_storage_offset"); + } + + TensorImpl* maybe_zero_dim(bool condition_when_zero_dim) override { + AT_ERROR("opaque tensors do not support maybe_zero_dim"); + } + + bool has_storage() const override { + return false; + } + + const Storage& storage() const override{ + AT_ERROR("opaque tensors do not have storage"); + } + + int64_t storage_offset() const override { + AT_ERROR("opaque tensors do not have storage"); + } + + /** + * Return a TensorImpl that is a shallow-copy of this TensorImpl. + * + * For usage of `version_counter` and `allow_tensor_metadata_change`, + * see NOTE [ TensorImpl Shallow-Copying ]. + */ + c10::intrusive_ptr shallow_copy_and_detach( + const c10::VariableVersion& version_counter, + bool allow_tensor_metadata_change) const override { + auto impl = c10::make_intrusive>( + type_set(), dtype(), device(), opaque_handle_, sizes_); + copy_tensor_metadata( + /*src_impl=*/this, + /*dest_impl=*/impl.get(), + /*version_counter=*/version_counter, + /*allow_tensor_metadata_change=*/allow_tensor_metadata_change); + impl->refresh_numel(); + return impl; + } + + /** + * Shallow-copies data from another TensorImpl into this TensorImpl. + * + * For why this function doesn't check this TensorImpl's `allow_tensor_metadata_change_`, + * see NOTE [ TensorImpl Shallow-Copying ]. + */ + void shallow_copy_from(const c10::intrusive_ptr& impl) override { + AT_ASSERT(has_compatible_shallow_copy_type(impl->type_set())); + auto opaque_impl = static_cast*>(impl.get()); + copy_tensor_metadata( + /*src_impl=*/opaque_impl, + /*dest_impl=*/this, + /*version_counter=*/version_counter(), + /*allow_tensor_metadata_change=*/allow_tensor_metadata_change()); + refresh_numel(); + } + + OpaqueHandle& unsafe_opaque_handle() { + return opaque_handle_; + } + +private: + OpaqueHandle opaque_handle_; + + /** + * Copy the tensor metadata fields (e.g. sizes / strides / storage pointer / storage_offset) + * from one TensorImpl to another TensorImpl. + * + * For usage of `version_counter` and `allow_tensor_metadata_change`, see NOTE [ TensorImpl Shallow-Copying ]. + */ + static void copy_tensor_metadata( + const OpaqueTensorImpl* src_opaque_impl, + OpaqueTensorImpl* dest_opaque_impl, + const c10::VariableVersion& version_counter, + bool allow_tensor_metadata_change) { + TensorImpl::copy_tensor_metadata(src_opaque_impl, dest_opaque_impl, version_counter, allow_tensor_metadata_change); + + // OpaqueTensorImpl-specific fields. + dest_opaque_impl->opaque_handle_ = src_opaque_impl->opaque_handle_; + } +}; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/PTThreadPool.h b/thirdparty/libtorch/include/ATen/PTThreadPool.h new file mode 100644 index 0000000000..f5e8a1a182 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/PTThreadPool.h @@ -0,0 +1,19 @@ +#pragma once + +#include +#include + +namespace at { + +class CAFFE2_API PTThreadPool : public c10::ThreadPool { +public: + explicit PTThreadPool( + int pool_size, + int numa_node_id = -1) + : c10::ThreadPool(pool_size, numa_node_id, [](){ + c10::setThreadName("PTThreadPool"); + at::init_num_threads(); + }) {} +}; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/Parallel.h b/thirdparty/libtorch/include/ATen/Parallel.h new file mode 100644 index 0000000000..eb30946ab2 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Parallel.h @@ -0,0 +1,136 @@ +#pragma once +#include +#include + +namespace at { +namespace internal { +// This parameter is heuristically chosen to determine the minimum number of +// work that warrants parallelism. For example, when summing an array, it is +// deemed inefficient to parallelise over arrays shorter than 32768. Further, +// no parallel algorithm (such as parallel_reduce) should split work into +// smaller than GRAIN_SIZE chunks. +constexpr int64_t GRAIN_SIZE = 32768; +} // namespace internal + +inline int64_t divup(int64_t x, int64_t y) { + return (x + y - 1) / y; +} + +// Called during new thread initialization +CAFFE2_API void init_num_threads(); + +// Sets the number of threads to be used in parallel region +CAFFE2_API void set_num_threads(int); + +// Returns the number of threads used in parallel region +CAFFE2_API int get_num_threads(); + +// Returns the current thread number (starting from 0) +// in the current parallel region, or 0 in the sequential region +CAFFE2_API int get_thread_num(); + +// Checks whether the code runs in parallel region +CAFFE2_API bool in_parallel_region(); + +/* +parallel_for + +begin: index at which to start applying user function + +end: index at which to stop applying user function + +grain_size: number of elements per chunk. impacts the degree of parallelization + +f: user function applied in parallel to the chunks, signature: + void f(int64_t begin, int64_t end) + +Warning: parallel_for does NOT copy thread local +states from the current thread to the worker threads. +This means for example that Tensor operations CANNOT be used in the +body of your function, only data pointers. +*/ +template +inline void parallel_for( + const int64_t begin, + const int64_t end, + const int64_t grain_size, + const F& f); + +/* +parallel_reduce + +begin: index at which to start applying reduction + +end: index at which to stop applying reduction + +grain_size: number of elements per chunk. impacts number of elements in +intermediate results tensor and degree of parallelization. + +ident: identity for binary combination function sf. sf(ident, x) needs to return +x. + +f: function for reduction over a chunk. f needs to be of signature scalar_t +f(int64_t partial_begin, int64_t partial_end, scalar_t identifiy) + +sf: function to combine two partial results. sf needs to be of signature +scalar_t sf(scalar_t x, scalar_t y) + +For example, you might have a tensor of 10000 entires and want to sum together +all the elements. Parallel_reduce with a grain_size of 2500 will then allocate +an intermediate result tensor with 4 elements. Then it will execute the function +"f" you provide and pass the beginning and end index of these chunks, so +0-2499, 2500-4999, etc. and the combination identity. It will then write out +the result from each of these chunks into the intermediate result tensor. After +that it'll reduce the partial results from each chunk into a single number using +the combination function sf and the identity ident. For a total summation this +would be "+" and 0 respectively. This is similar to tbb's approach [1], where +you need to provide a function to accumulate a subrange, a function to combine +two partial results and an identity. + +Warning: parallel_reduce does NOT copy thread local +states from the current thread to the worker threads. +This means for example that Tensor operations CANNOT be used in the +body of your function, only data pointers. + +[1] https://software.intel.com/en-us/node/506154 +*/ +template +inline scalar_t parallel_reduce( + const int64_t begin, + const int64_t end, + const int64_t grain_size, + const scalar_t ident, + const F& f, + const SF& sf); + +// Returns a detailed string describing parallelization settings +CAFFE2_API std::string get_parallel_info(); + +// Sets number of threads used for inter-op parallelism +CAFFE2_API void set_num_interop_threads(int); + +// Returns the number of threads used for inter-op parallelism +CAFFE2_API int get_num_interop_threads(); + +// Launches inter-op parallel task +CAFFE2_API void launch(std::function func); + +// Launches intra-op parallel task +CAFFE2_API void intraop_launch(std::function func); + +// Launches intra-op parallel task, returns a future +CAFFE2_API std::shared_ptr intraop_launch_future( + std::function func); + +// Returns number of intra-op threads used by default +CAFFE2_API int intraop_default_num_threads(); + +} // namespace at + +#if AT_PARALLEL_OPENMP +#include +#elif AT_PARALLEL_NATIVE +#include +#elif AT_PARALLEL_NATIVE_TBB +#include +#endif diff --git a/thirdparty/libtorch/include/ATen/ParallelNative.h b/thirdparty/libtorch/include/ATen/ParallelNative.h new file mode 100644 index 0000000000..58d3445cc5 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/ParallelNative.h @@ -0,0 +1,92 @@ +#pragma once + +#include +#include +#include + +#define INTRA_OP_PARALLEL + +namespace at { +namespace internal { + +inline std::tuple calc_num_tasks_and_chunk_size( + int64_t begin, int64_t end, int64_t grain_size) { + if ((end - begin) < grain_size) { + return std::make_tuple(1, std::max((int64_t)0, end - begin)); + } + // Choose number of tasks based on grain size and number of threads. + size_t chunk_size = divup((end - begin), get_num_threads()); + // Make sure each task is at least grain_size size. + chunk_size = std::max((size_t)grain_size, chunk_size); + size_t num_tasks = divup((end - begin), chunk_size); + return std::make_tuple(num_tasks, chunk_size); +} + +CAFFE2_API void _parallel_run( + const int64_t begin, + const int64_t end, + const int64_t grain_size, + const std::function& f); + +} // namespace internal + +template +inline void parallel_for( + const int64_t begin, + const int64_t end, + const int64_t grain_size, + const F& f) { + TORCH_CHECK(grain_size >= 0); + if (begin >= end) { + return; + } + if ((end - begin) < grain_size || in_parallel_region()) { + f(begin, end); + return; + } + internal::_parallel_run( + begin, + end, + grain_size, + [f](int64_t start, int64_t end, size_t /* unused */) { + f(start, end); + } + ); +} + +template +inline scalar_t parallel_reduce( + const int64_t begin, + const int64_t end, + const int64_t grain_size, + const scalar_t ident, + const F& f, + const SF& sf) { + TORCH_CHECK(grain_size >= 0); + if (begin >= end) { + return ident; + } + if ((end - begin) < grain_size || in_parallel_region()) { + return f(begin, end, ident); + } + size_t num_tasks, chunk_size; + std::tie(num_tasks, chunk_size) = + internal::calc_num_tasks_and_chunk_size(begin, end, grain_size); + std::vector results(num_tasks); + scalar_t* results_data = results.data(); + internal::_parallel_run( + begin, + end, + grain_size, + [f, ident, results_data](int64_t start, int64_t end, size_t task_id) { + results_data[task_id] = f(start, end, ident); + } + ); + scalar_t result = ident; + for (auto partial_result : results) { + result = sf(result, partial_result); + } + return result; +} + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/ParallelNativeTBB.h b/thirdparty/libtorch/include/ATen/ParallelNativeTBB.h new file mode 100644 index 0000000000..aa412320e9 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/ParallelNativeTBB.h @@ -0,0 +1,91 @@ +#pragma once +#include + +#include +#include + +#ifdef _WIN32 +#define WIN32_LEAN_AND_MEAN +#endif +#include "tbb/tbb.h" + +#define INTRA_OP_PARALLEL + +namespace at { + +template +inline void parallel_for( + const int64_t begin, + const int64_t end, + const int64_t grain_size, + const F& f) { + TORCH_CHECK(grain_size >= 0); + if (begin >= end) { + return; + } + if ((end - begin) < grain_size || get_num_threads() == 1) { + f(begin, end); + return; + } + std::atomic_flag err_flag = ATOMIC_FLAG_INIT; + std::exception_ptr eptr; + tbb::parallel_for(tbb::blocked_range(begin, end, grain_size), + [&eptr, &err_flag, f](const tbb::blocked_range& r) { + try { + f(r.begin(), r.end()); + } catch (...) { + if (!err_flag.test_and_set()) { + eptr = std::current_exception(); + } + } + }); + if (eptr) { + std::rethrow_exception(eptr); + } +} + +template +inline scalar_t parallel_reduce( + const int64_t begin, + const int64_t end, + const int64_t grain_size, + const scalar_t ident, + const F& f, + const SF& sf) { + TORCH_CHECK(grain_size >= 0); + if (begin >= end) { + return ident; + } + if ((end - begin) < grain_size || get_num_threads() == 1) { + return f(begin, end, ident); + } + scalar_t result; + std::atomic_flag err_flag = ATOMIC_FLAG_INIT; + std::exception_ptr eptr; + result = tbb::parallel_reduce( + tbb::blocked_range(begin, end, grain_size), ident, + [&eptr, &err_flag, f, ident] + (const tbb::blocked_range& r, scalar_t ident) { + try { + return f(r.begin(), r.end(), ident); + } catch (...) { + if (!err_flag.test_and_set()) { + eptr = std::current_exception(); + } + return ident; + } + }, + sf + ); + if (eptr) { + std::rethrow_exception(eptr); + } + return result; +} + +template +void intraop_invoke(const F0& f0, const F1& f1) { + tbb::parallel_invoke(f0, f1); +} + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/ParallelOpenMP.h b/thirdparty/libtorch/include/ATen/ParallelOpenMP.h new file mode 100644 index 0000000000..fecb9858d3 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/ParallelOpenMP.h @@ -0,0 +1,99 @@ +#pragma once +#include + +#include +#include + +#ifdef _OPENMP +#define INTRA_OP_PARALLEL + +#include +#endif + +namespace at { + +template +inline void parallel_for( + const int64_t begin, + const int64_t end, + const int64_t grain_size, + const F& f) { + TORCH_CHECK(grain_size >= 0); + if (begin >= end) { + return; + } +#ifdef _OPENMP + std::atomic_flag err_flag = ATOMIC_FLAG_INIT; + std::exception_ptr eptr; + // choose number of tasks based on grain size and number of threads + int64_t num_threads = omp_in_parallel() ? 1 : omp_get_max_threads(); + if (grain_size > 0) { + num_threads = std::min(num_threads, divup((end - begin), grain_size)); + } + +#pragma omp parallel num_threads(num_threads) + { + int64_t num_threads = omp_get_num_threads(); + int64_t tid = omp_get_thread_num(); + int64_t chunk_size = divup((end - begin), num_threads); + int64_t begin_tid = begin + tid * chunk_size; + if (begin_tid < end) { + try { + f(begin_tid, std::min(end, chunk_size + begin_tid)); + } catch (...) { + if (!err_flag.test_and_set()) { + eptr = std::current_exception(); + } + } + } + } + if (eptr) { + std::rethrow_exception(eptr); + } +#else + f(begin, end); +#endif +} + +template +inline scalar_t parallel_reduce( + const int64_t begin, + const int64_t end, + const int64_t grain_size, + const scalar_t ident, + const F& f, + const SF& sf) { + TORCH_CHECK(grain_size >= 0); + if (begin >= end) { + return ident; + } else if (in_parallel_region() || get_num_threads() == 1) { + return f(begin, end, ident); + } else { + const int64_t num_results = divup((end - begin), grain_size); + std::vector results(num_results); + scalar_t* results_data = results.data(); + std::atomic_flag err_flag = ATOMIC_FLAG_INIT; + std::exception_ptr eptr; +#pragma omp parallel for if ((end - begin) >= grain_size) + for (int64_t id = 0; id < num_results; id++) { + int64_t i = begin + id * grain_size; + try { + results_data[id] = f(i, i + std::min(end - i, grain_size), ident); + } catch (...) { + if (!err_flag.test_and_set()) { + eptr = std::current_exception(); + } + } + } + if (eptr) { + std::rethrow_exception(eptr); + } + scalar_t result = ident; + for (auto partial_result : results) { + result = sf(result, partial_result); + } + return result; + } +} + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/QuantizedCPUType.h b/thirdparty/libtorch/include/ATen/QuantizedCPUType.h new file mode 100644 index 0000000000..980e4c1f10 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/QuantizedCPUType.h @@ -0,0 +1,95 @@ +#pragma once + +// @generated by aten/src/ATen/gen.py + +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +namespace c10 { +struct Storage; +} + +namespace at { + +class Tensor; +using TensorList = ArrayRef; + +class Context; +struct Generator; + +struct Quantizer; +// This is temporary typedef to enable Quantizer in aten native function API +// we'll remove them when we are actually exposing Quantizer class +// to frontend +using ConstQuantizerPtr = const c10::intrusive_ptr&; + +#ifdef USE_STATIC_DISPATCH +namespace QuantizedCPUType { + Tensor as_strided(const Tensor & self, IntArrayRef size, IntArrayRef stride, c10::optional storage_offset); + Tensor _empty_affine_quantized(IntArrayRef size, const TensorOptions & options, double scale, int64_t zero_point, c10::optional memory_format); + Tensor _empty_per_channel_affine_quantized(IntArrayRef size, const Tensor & scales, const Tensor & zero_points, int64_t axis, const TensorOptions & options, c10::optional memory_format); + Tensor & resize_(Tensor & self, IntArrayRef size, c10::optional memory_format); + Tensor quantized_max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + Tensor mean(const Tensor & self, c10::optional dtype); + Tensor mean(const Tensor & self, IntArrayRef dim, bool keepdim, c10::optional dtype); + Tensor & mean_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim, c10::optional dtype); + Tensor relu(const Tensor & self); + Tensor & relu_(Tensor & self); + Tensor clone(const Tensor & self, c10::optional memory_format); + Tensor dequantize(const Tensor & self); + double q_scale(const Tensor & self); + int64_t q_zero_point(const Tensor & self); + Tensor q_per_channel_scales(const Tensor & self); + Tensor q_per_channel_zero_points(const Tensor & self); + int64_t q_per_channel_axis(const Tensor & self); + Tensor int_repr(const Tensor & self); + QScheme qscheme(const Tensor & self); + Tensor & set_(Tensor & self, Storage source, int64_t storage_offset, IntArrayRef size, IntArrayRef stride); + Tensor & set_quantizer_(Tensor & self, ConstQuantizerPtr quantizer); + Tensor view(const Tensor & self, IntArrayRef size); + Tensor & ne_out(Tensor & out, const Tensor & self, Scalar other); + Tensor ne(const Tensor & self, Scalar other); + Tensor & ne_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor ne(const Tensor & self, const Tensor & other); + Tensor & eq_out(Tensor & out, const Tensor & self, Scalar other); + Tensor eq(const Tensor & self, Scalar other); + Tensor & eq_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor eq(const Tensor & self, const Tensor & other); + Tensor & ge_out(Tensor & out, const Tensor & self, Scalar other); + Tensor ge(const Tensor & self, Scalar other); + Tensor & ge_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor ge(const Tensor & self, const Tensor & other); + Tensor & le_out(Tensor & out, const Tensor & self, Scalar other); + Tensor le(const Tensor & self, Scalar other); + Tensor & le_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor le(const Tensor & self, const Tensor & other); + Tensor & gt_out(Tensor & out, const Tensor & self, Scalar other); + Tensor gt(const Tensor & self, Scalar other); + Tensor & gt_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor gt(const Tensor & self, const Tensor & other); + Tensor & lt_out(Tensor & out, const Tensor & self, Scalar other); + Tensor lt(const Tensor & self, Scalar other); + Tensor & lt_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor lt(const Tensor & self, const Tensor & other); + Tensor min(const Tensor & self); + Tensor max(const Tensor & self); + std::tuple sort(const Tensor & self, int64_t dim, bool descending); + std::tuple topk(const Tensor & self, int64_t k, int64_t dim, bool largest, bool sorted); + bool equal(const Tensor & self, const Tensor & other); + Tensor _adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size); + Tensor avg_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad, c10::optional divisor_override); + Tensor upsample_bilinear2d(const Tensor & self, IntArrayRef output_size, bool align_corners); + Tensor upsample_nearest2d(const Tensor & self, IntArrayRef output_size); +} +#endif + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/Scalar.h b/thirdparty/libtorch/include/ATen/Scalar.h new file mode 100644 index 0000000000..e12557428f --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Scalar.h @@ -0,0 +1,3 @@ +#pragma once + +#include diff --git a/thirdparty/libtorch/include/ATen/ScalarOps.h b/thirdparty/libtorch/include/ATen/ScalarOps.h new file mode 100644 index 0000000000..8b26ed1d51 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/ScalarOps.h @@ -0,0 +1,38 @@ +#pragma once + +#include +#include +#include + +// This is in the c10 namespace because we use ADL to find the functions in it. +namespace c10 { + +// FIXME: this should be (and was) Scalar::toTensor, but there is currently no way +// to implement this without going through Derived Types (which are not part of core). +inline at::Tensor scalar_to_tensor(Scalar s, const Device device = at::kCPU) { + // This is the fast track we have for CPU scalar tensors. + if (device == at::kCPU) { + if (s.isFloatingPoint()) { + return at::native::scalar_tensor(s, at::device(at::kCPU).dtype(at::kDouble)); + } else if (s.isBoolean()) { + return at::native::scalar_tensor(s, at::device(at::kCPU).dtype(at::kBool)); + } else if (s.isComplex()) { + return at::native::scalar_tensor(s, at::device(at::kCPU).dtype(at::kComplexDouble)); + } else { + AT_ASSERT(s.isIntegral(false)); + return at::native::scalar_tensor(s, at::device(at::kCPU).dtype(at::kLong)); + } + } + if (s.isFloatingPoint()) { + return at::scalar_tensor(s, at::device(device).dtype(at::kDouble)); + } else if (s.isBoolean()) { + return at::scalar_tensor(s, at::device(device).dtype(at::kBool)); + } else if (s.isComplex()) { + return at::scalar_tensor(s, at::device(device).dtype(at::kComplexDouble)); + } else { + AT_ASSERT(s.isIntegral(false)); + return at::scalar_tensor(s, at::device(device).dtype(at::kLong)); + } +} + +} diff --git a/thirdparty/libtorch/include/ATen/ScalarType.h b/thirdparty/libtorch/include/ATen/ScalarType.h new file mode 100644 index 0000000000..2181250740 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/ScalarType.h @@ -0,0 +1,4 @@ +#pragma once +#include // for BC reasons +#include +#include diff --git a/thirdparty/libtorch/include/ATen/SmallVector.h b/thirdparty/libtorch/include/ATen/SmallVector.h new file mode 100644 index 0000000000..fabfa44190 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/SmallVector.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/SparseCPUType.h b/thirdparty/libtorch/include/ATen/SparseCPUType.h new file mode 100644 index 0000000000..244fc483dc --- /dev/null +++ b/thirdparty/libtorch/include/ATen/SparseCPUType.h @@ -0,0 +1,90 @@ +#pragma once + +// @generated by aten/src/ATen/gen.py + +#include +#include +#include +#include +#include +#include +#include +#include +#include + + + +namespace c10 { +struct Storage; +} + +namespace at { + +class Tensor; +using TensorList = ArrayRef; + +class Context; +struct Generator; + +struct Quantizer; +// This is temporary typedef to enable Quantizer in aten native function API +// we'll remove them when we are actually exposing Quantizer class +// to frontend +using ConstQuantizerPtr = const c10::intrusive_ptr&; + +#ifdef USE_STATIC_DISPATCH +namespace SparseCPUType { + Tensor add(const Tensor & self, const Tensor & other, Scalar alpha); + Tensor & add_(Tensor & self, const Tensor & other, Scalar alpha); + Tensor & add_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha); + Tensor div(const Tensor & self, const Tensor & other); + Tensor & div_(Tensor & self, const Tensor & other); + Tensor & div_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor empty(IntArrayRef size, const TensorOptions & options, c10::optional memory_format); + Tensor & log1p_(Tensor & self); + Tensor & log1p_out(Tensor & out, const Tensor & self); + Tensor mm(const Tensor & self, const Tensor & mat2); + Tensor & mm_out(Tensor & out, const Tensor & self, const Tensor & mat2); + Tensor mul(const Tensor & self, const Tensor & other); + Tensor & mul_(Tensor & self, const Tensor & other); + Tensor & mul_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor narrow_copy(const Tensor & self, int64_t dim, int64_t start, int64_t length); + Tensor & sspaddmm_out(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor native_norm(const Tensor & self, Scalar p); + Tensor _sparse_sum_backward(const Tensor & grad, const Tensor & self, IntArrayRef dim); + Tensor clone(const Tensor & self, c10::optional memory_format); + Tensor & pow_out(Tensor & out, const Tensor & self, Scalar exponent); + Tensor pow(const Tensor & self, Scalar exponent); + Tensor & zero_(Tensor & self); + Tensor & sub_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha); + Tensor sub(const Tensor & self, const Tensor & other, Scalar alpha); + Tensor & sub_(Tensor & self, const Tensor & other, Scalar alpha); + Tensor & addmm_out(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor addmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor & addmm_(Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor _sparse_coo_tensor_with_dims(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size, const TensorOptions & options); + Tensor _sparse_coo_tensor_with_dims_and_tensors(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size, const Tensor & indices, const Tensor & values, const TensorOptions & options); + Tensor & sparse_resize_(Tensor & self, IntArrayRef size, int64_t sparse_dim, int64_t dense_dim); + Tensor & sparse_resize_and_clear_(Tensor & self, IntArrayRef size, int64_t sparse_dim, int64_t dense_dim); + Tensor sparse_mask(const Tensor & self, const Tensor & mask); + Tensor to_dense(const Tensor & self); + int64_t sparse_dim(const Tensor & self); + int64_t _dimI(const Tensor & self); + int64_t dense_dim(const Tensor & self); + int64_t _dimV(const Tensor & self); + int64_t _nnz(const Tensor & self); + Tensor coalesce(const Tensor & self); + bool is_coalesced(const Tensor & self); + Tensor _indices(const Tensor & self); + Tensor _values(const Tensor & self); + Tensor & _coalesced_(Tensor & self, bool coalesced); + Tensor indices(const Tensor & self); + Tensor values(const Tensor & self); + Tensor & hspmm_out(Tensor & out, const Tensor & mat1, const Tensor & mat2); + Tensor hspmm(const Tensor & mat1, const Tensor & mat2); + Tensor & copy_sparse_to_sparse_(Tensor & self, const Tensor & src, bool non_blocking); + Tensor index_select(const Tensor & self, int64_t dim, const Tensor & index); +} +#endif + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/SparseCUDAType.h b/thirdparty/libtorch/include/ATen/SparseCUDAType.h new file mode 100644 index 0000000000..afa7f43af3 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/SparseCUDAType.h @@ -0,0 +1,93 @@ +#pragma once + +// @generated by aten/src/ATen/gen.py + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +namespace c10 { +struct Storage; +} + +namespace at { + +class Tensor; +using TensorList = ArrayRef; + +class Context; +struct Generator; + +struct Quantizer; +// This is temporary typedef to enable Quantizer in aten native function API +// we'll remove them when we are actually exposing Quantizer class +// to frontend +using ConstQuantizerPtr = const c10::intrusive_ptr&; + +#ifdef USE_STATIC_DISPATCH +namespace SparseCUDAType { + Tensor add(const Tensor & self, const Tensor & other, Scalar alpha); + Tensor & add_(Tensor & self, const Tensor & other, Scalar alpha); + Tensor & add_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha); + Tensor div(const Tensor & self, const Tensor & other); + Tensor & div_(Tensor & self, const Tensor & other); + Tensor & div_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor empty(IntArrayRef size, const TensorOptions & options, c10::optional memory_format); + Tensor & log1p_(Tensor & self); + Tensor & log1p_out(Tensor & out, const Tensor & self); + Tensor mm(const Tensor & self, const Tensor & mat2); + Tensor & mm_out(Tensor & out, const Tensor & self, const Tensor & mat2); + Tensor mul(const Tensor & self, const Tensor & other); + Tensor & mul_(Tensor & self, const Tensor & other); + Tensor & mul_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor narrow_copy(const Tensor & self, int64_t dim, int64_t start, int64_t length); + Tensor & sspaddmm_out(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor native_norm(const Tensor & self, Scalar p); + Tensor _sparse_sum_backward(const Tensor & grad, const Tensor & self, IntArrayRef dim); + Tensor clone(const Tensor & self, c10::optional memory_format); + Tensor & pow_out(Tensor & out, const Tensor & self, Scalar exponent); + Tensor pow(const Tensor & self, Scalar exponent); + Tensor & zero_(Tensor & self); + Tensor & sub_out(Tensor & out, const Tensor & self, const Tensor & other, Scalar alpha); + Tensor sub(const Tensor & self, const Tensor & other, Scalar alpha); + Tensor & sub_(Tensor & self, const Tensor & other, Scalar alpha); + Tensor & addmm_out(Tensor & out, const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor addmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor & addmm_(Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor _sparse_coo_tensor_with_dims(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size, const TensorOptions & options); + Tensor _sparse_coo_tensor_with_dims_and_tensors(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size, const Tensor & indices, const Tensor & values, const TensorOptions & options); + Tensor & sparse_resize_(Tensor & self, IntArrayRef size, int64_t sparse_dim, int64_t dense_dim); + Tensor & sparse_resize_and_clear_(Tensor & self, IntArrayRef size, int64_t sparse_dim, int64_t dense_dim); + Tensor sparse_mask(const Tensor & self, const Tensor & mask); + Tensor to_dense(const Tensor & self); + int64_t sparse_dim(const Tensor & self); + int64_t _dimI(const Tensor & self); + int64_t dense_dim(const Tensor & self); + int64_t _dimV(const Tensor & self); + int64_t _nnz(const Tensor & self); + Tensor coalesce(const Tensor & self); + bool is_coalesced(const Tensor & self); + Tensor _indices(const Tensor & self); + Tensor _values(const Tensor & self); + Tensor & _coalesced_(Tensor & self, bool coalesced); + Tensor indices(const Tensor & self); + Tensor values(const Tensor & self); + Tensor & hspmm_out(Tensor & out, const Tensor & mat1, const Tensor & mat2); + Tensor hspmm(const Tensor & mat1, const Tensor & mat2); + Tensor & copy_sparse_to_sparse_(Tensor & self, const Tensor & src, bool non_blocking); + Tensor index_select(const Tensor & self, int64_t dim, const Tensor & index); +} +#endif + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/SparseTensorImpl.h b/thirdparty/libtorch/include/ATen/SparseTensorImpl.h new file mode 100644 index 0000000000..0a52bcab9c --- /dev/null +++ b/thirdparty/libtorch/include/ATen/SparseTensorImpl.h @@ -0,0 +1,246 @@ +#pragma once + +#include +#include +#include + +namespace at { +struct CAFFE2_API SparseTensorImpl : public TensorImpl { + // Stored in COO format, indices + values. + + // INVARIANTS: + // sparse_dim: range [0, len(shape)]; sparse_dim + dense_dim = len(shape) + // dense_dim : range [0, len(shape)]; sparse_dim + dense_dim = len(shape) + // _indices.shape: dimensionality: 2, shape: (sparse_dim, nnz) + // _values.shape: dimensionality: 1 + dense_dim. shape: (nnz, shape[sparse_dim:]) + + int64_t sparse_dim_ = 0; // number of sparse dimensions + int64_t dense_dim_ = 0; // number of dense dimensions + + Tensor indices_; // always a LongTensor + Tensor values_; + + // A sparse tensor is 'coalesced' if every index occurs at most once in + // the indices tensor, and the indices are in sorted order. (This means + // that it is very easy to convert a coalesced tensor to CSR format: you + // need only compute CSR format indices.) + // + // Most math operations can only be performed on coalesced sparse tensors, + // because many algorithms proceed by merging two sorted lists (of indices). + bool coalesced_ = false; + +public: + // Public for now... + explicit SparseTensorImpl(at::TensorTypeSet, const caffe2::TypeMeta&); + + int64_t nnz() const { return values_.size(0); } + int64_t sparse_dim() const { return sparse_dim_; } + int64_t dense_dim() const { return dense_dim_; } + bool coalesced() const { return coalesced_; } + Tensor indices() const { return indices_; } + Tensor values() const { return values_; } + + IntArrayRef strides() const override; + bool is_contiguous(at::MemoryFormat memory_format=at::MemoryFormat::Contiguous) const override; + int64_t stride(int64_t d) const override; + void resize_dim(int64_t ndim) override; + void set_size(int64_t dim, int64_t new_size) override; + void set_stride(int64_t dim, int64_t new_stride) override; + void set_storage_offset(int64_t storage_offset) override; + + int64_t dim() const override; + TensorImpl* maybe_zero_dim(bool condition_when_zero_dim) override; + bool has_storage() const override; + const Storage& storage() const override; + int64_t storage_offset() const override; + + // WARNING: This function does NOT preserve invariants of sparse_dim/dense_dim with + // respect to indices and values + void raw_resize_(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size) { + TORCH_CHECK(allow_tensor_metadata_change(), "raw_resize_ ", err_msg_tensor_metadata_change_not_allowed); + sizes_ = size.vec(); + sparse_dim_ = sparse_dim; + dense_dim_ = dense_dim; + refresh_numel(); + } + + // NOTE: This function preserves invariants of sparse_dim/dense_dim with respect to + // indices and values. + // + // NOTE: This function supports the following cases: + // 1. When we keep the number of dense dimensions unchanged, and NOT shrinking the size of + // any of the dense dimensions. + // 2. When we keep the number of sparse dimensions unchanged, and NOT shrinking the size of + // any of the sparse dimensions. + // 3. When the sparse tensor has zero nnz, in which case we are free to change the shapes of + // both its sparse and dense dimensions. + // + // This function DOESN'T support (and will throw an error) the following cases: + // 1. When we attempt to change the number of sparse dimensions on a non-empty sparse tensor + // (such an operation will invalidate the indices stored). + // 2. When we attempt to change the number of dense dimensions on a non-empty sparse tensor + // (such an operation will behave differently from an equivalent dense tensor's resize method, + // and for API consistency we don't support it). + // 3. When we attempt to shrink the size of any of the dense dimensions on a non-empty sparse tensor + // (such an operation will behave differently from an equivalent dense tensor's resize method, + // and for API consistency we don't support it). + // 4. When we attempt to shrink the size of any of the sparse dimensions on a non-empty sparse tensor + // (this could make some of the stored indices out-of-bound and thus unsafe). + void resize_(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size) { + TORCH_CHECK(allow_tensor_metadata_change(), "resize_ ", err_msg_tensor_metadata_change_not_allowed); + TORCH_CHECK(sparse_dim + dense_dim == static_cast(size.size()), "number of dimensions must be sparse_dim (", sparse_dim, ") + dense_dim (", dense_dim, "), but got ", size.size()); + if (nnz() > 0) { + auto alt_options_msg = "You could try the following options:\n\ +1. If you need an empty sparse tensor of this size, call `x = torch.sparse_coo_tensor(size)`.\n\ +2. If you need to resize this tensor, you have the following options:\n\ + 1. For both sparse and dense dimensions, keep the number of them constant and the size of them non-shrinking, and then try the same call again.\n\ + 2. Or, create a new sparse tensor with the correct indices and values from this sparse tensor."; + + TORCH_CHECK(sparse_dim == sparse_dim_, + "changing the number of sparse dimensions (from ", sparse_dim_, " to ", sparse_dim, ") on a non-empty sparse tensor is not supported.\n", alt_options_msg); + + TORCH_CHECK(dense_dim == dense_dim_, + "changing the number of dense dimensions (from ", dense_dim_, " to ", dense_dim, ") on a non-empty sparse tensor is not supported.\n", alt_options_msg); + + bool shrinking_sparse_dims = false; + bool shrinking_dense_dim = false; + auto sparse_size_original = sizes().slice(0, sparse_dim); + auto sparse_size_new = size.slice(0, sparse_dim); + for (int64_t i = 0; i < sparse_dim; i++) { + if (sparse_size_new[i] < sparse_size_original[i]) { + shrinking_sparse_dims = true; + break; + } + } + auto dense_size_original = sizes().slice(sparse_dim); + auto dense_size_new = size.slice(sparse_dim); + for (int64_t i = 0; i < dense_dim; i++) { + if (dense_size_new[i] < dense_size_original[i]) { + shrinking_dense_dim = true; + break; + } + } + + TORCH_CHECK(!shrinking_sparse_dims, + "shrinking the size of sparse dimensions (from ", sparse_size_original, " to ", sparse_size_new, ") on a non-empty sparse tensor is not supported.\n", alt_options_msg); + + TORCH_CHECK(!shrinking_dense_dim, + "shrinking the size of dense dimensions (from ", dense_size_original, " to ", dense_size_new, ") on a non-empty sparse tensor is not supported.\n", alt_options_msg); + } + + if ((!size.equals(sizes_)) || (sparse_dim != sparse_dim_) || (dense_dim != dense_dim_)) { + auto nnz = values().size(0); + std::vector values_size = {nnz}; + auto dense_size = size.slice(sparse_dim); + values_size.insert(values_size.end(), dense_size.begin(), dense_size.end()); + values_.resize_(values_size); + indices_.resize_({sparse_dim, nnz}); + } + + sizes_ = size.vec(); + sparse_dim_ = sparse_dim; + dense_dim_ = dense_dim; + refresh_numel(); + } + + // NOTE: this function will resize the sparse tensor and also set `indices` and `values` to empty. + void resize_and_clear_(int64_t sparse_dim, int64_t dense_dim, IntArrayRef size) { + TORCH_CHECK(allow_tensor_metadata_change(), "resize_and_clear_ ", err_msg_tensor_metadata_change_not_allowed); + TORCH_CHECK(sparse_dim + dense_dim == static_cast(size.size()), "number of dimensions must be sparse_dim (", sparse_dim, ") + dense_dim (", dense_dim, "), but got ", size.size()); + + sizes_ = size.vec(); + sparse_dim_ = sparse_dim; + dense_dim_ = dense_dim; + + auto empty_indices = at::empty({sparse_dim, 0}, indices().options()); + std::vector values_size = {0}; + auto dense_size = sizes().slice(sparse_dim); + values_size.insert(values_size.end(), dense_size.begin(), dense_size.end()); + auto empty_values = at::empty(values_size, values().options()); + set_indices_and_values_unsafe(empty_indices, empty_values); + refresh_numel(); + } + + void set_coalesced(bool coalesced) { + TORCH_CHECK(allow_tensor_metadata_change(), "set_coalesced ", err_msg_tensor_metadata_change_not_allowed); + coalesced_ = coalesced; + } + + // NOTE: this function is only used internally and not exposed to Python frontend + void set_nnz_and_narrow(int64_t new_nnz) { + TORCH_CHECK(allow_tensor_metadata_change(), "set_nnz_and_narrow ", err_msg_tensor_metadata_change_not_allowed); + AT_ASSERT(new_nnz <= nnz()); + indices_ = indices_.narrow(1, 0, new_nnz); + values_ = values_.narrow(0, 0, new_nnz); + } + + // Takes indices and values and directly puts them into the sparse tensor, no copy. + // NOTE: this function is unsafe because it doesn't check whether any indices are + // out of boundaries of `sizes`, so it should ONLY be used where we know that the + // indices are guaranteed to be within bounds. + // This used to be called THSTensor_(_move) + // NB: This used to be able to avoid a refcount bump, but I was too lazy to + // make it happen + void set_indices_and_values_unsafe(const Tensor& indices, const Tensor& values); + + /** + * Return a TensorImpl that is a shallow-copy of this TensorImpl. + * + * For usage of `version_counter` and `allow_tensor_metadata_change`, + * see NOTE [ TensorImpl Shallow-Copying ]. + */ + c10::intrusive_ptr shallow_copy_and_detach( + const c10::VariableVersion& version_counter, + bool allow_tensor_metadata_change) const override { + auto impl = c10::make_intrusive(type_set(), dtype()); + copy_tensor_metadata( + /*src_impl=*/this, + /*dest_impl=*/impl.get(), + /*version_counter=*/version_counter, + /*allow_tensor_metadata_change=*/allow_tensor_metadata_change); + impl->refresh_numel(); + return impl; + } + + /** + * Shallow-copies data from another TensorImpl into this TensorImpl. + * + * For why this function doesn't check this TensorImpl's `allow_tensor_metadata_change_`, + * see NOTE [ TensorImpl Shallow-Copying ]. + */ + void shallow_copy_from(const c10::intrusive_ptr& impl) override { + AT_ASSERT(has_compatible_shallow_copy_type(impl->type_set())); + auto sparse_impl = static_cast(impl.get()); + copy_tensor_metadata( + /*src_impl=*/sparse_impl, + /*dest_impl=*/this, + /*version_counter=*/version_counter(), + /*allow_tensor_metadata_change=*/allow_tensor_metadata_change()); + refresh_numel(); + } +private: + explicit SparseTensorImpl(at::TensorTypeSet, const caffe2::TypeMeta&, at::Tensor indices, at::Tensor values); + + /** + * Copy the tensor metadata fields (e.g. sizes / strides / storage pointer / storage_offset) + * from one TensorImpl to another TensorImpl. + * + * For usage of `version_counter` and `allow_tensor_metadata_change`, see NOTE [ TensorImpl Shallow-Copying ]. + */ + static void copy_tensor_metadata( + const SparseTensorImpl* src_sparse_impl, + SparseTensorImpl* dest_sparse_impl, + const c10::VariableVersion& version_counter, + bool allow_tensor_metadata_change) { + TensorImpl::copy_tensor_metadata(src_sparse_impl, dest_sparse_impl, version_counter, allow_tensor_metadata_change); + + // Sparse-specific fields + dest_sparse_impl->sparse_dim_ = src_sparse_impl->sparse_dim(); + dest_sparse_impl->dense_dim_ = src_sparse_impl->dense_dim(); + dest_sparse_impl->indices_ = src_sparse_impl->indices(); + dest_sparse_impl->values_ = src_sparse_impl->values(); + dest_sparse_impl->coalesced_ = src_sparse_impl->coalesced(); + } +}; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/SparseTensorUtils.h b/thirdparty/libtorch/include/ATen/SparseTensorUtils.h new file mode 100644 index 0000000000..2ed5588126 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/SparseTensorUtils.h @@ -0,0 +1,132 @@ +#pragma once + +#include +#include + +namespace at { namespace sparse { + +// Just for documentary purposes +using SparseTensor = Tensor; +using LongTensor = Tensor; +using IntTensor = Tensor; +using SparseType = Type; + +// This is an internal utility function for getting at the SparseTensorImpl, +// so that we can write sparse tensor specific accessors for special fields +// in SparseTensor. You should only use this for writing low level +// setters/getters for SparseTensorImpl fields; otherwise, you should use +// the low level setters/getters that were implemented using this. +// +// This may be called repeatedly, so make sure it's pretty cheap. +inline SparseTensorImpl* get_sparse_impl(const SparseTensor& self) { + TORCH_INTERNAL_ASSERT(at::impl::variable_excluded_from_dispatch()); + AT_ASSERTM(self.is_sparse(), "_internal_get_SparseTensorImpl: not a sparse tensor"); + return static_cast(self.unsafeGetTensorImpl()); +} + +// Takes indices and values and directly puts them into the sparse tensor, no +// copy. This used to be called THSTensor_(_move) +inline void alias_into_sparse(const SparseTensor& self, const LongTensor& indices, const Tensor& values) { + get_sparse_impl(self)->set_indices_and_values_unsafe(indices, values); +} + +// Take indices and values and makes a (data) copy of them to put into the sparse +// indices/values. This used to be called THSTensor_(_set) +inline void copy_into_sparse(const SparseTensor& self, const LongTensor& indices, const Tensor& values, bool non_blocking) { + alias_into_sparse( + self, + indices.to(self._indices().options(), non_blocking, /*copy=*/true), + values.to(self._values().options(), non_blocking, /*copy=*/true)); +} + +// TODO: put this into the public API +inline bool is_same_tensor(const Tensor& lhs, const Tensor& rhs) { + return lhs.unsafeGetTensorImpl() == rhs.unsafeGetTensorImpl(); +} + +inline bool is_same_density(const SparseTensor& self, const SparseTensor& src) { + return self.sparse_dim() == src.sparse_dim() && self.dense_dim() == src.dense_dim(); +} + +// Give us a new values tensor, with the same dimensionality +// as 'values' but with a new number of non-zero elements. +// TODO: Expose this for real in ATen, some day? +// NB: Doesn't preserve data. +inline Tensor new_values_with_size_of(const Tensor& values, int64_t nnz) { + std::vector size = values.sizes().vec(); + size[0] = nnz; + return at::empty(size, values.options()); +} + +// NOTE [ Flatten Sparse Indices ] +// This helper function flattens a sparse indices tensor (a LongTensor) into a 1D +// indices tensor. E.g., +// input = [[2, 4, 0], +// [3, 1, 10]] +// full_size = [2, 12] +// output = [ 2 * 12 + 3, 4 * 12 + 1, 0 * 12 + 10 ] = [27, 49, 10] +// +// In other words, assuming that each `indices[i, :]` is a valid index to a +// tensor `t` of shape `full_size`. This returns the corresponding indices to +// the flattened tensor `t.reshape( prod(full_size[:indices.size(0)]), -1 )`. +// if forceClone is true, the result will forced to be a clone of self. +// if force_clone is true, the result will forced to be a clone of self. +inline LongTensor flatten_indices(const Tensor& indices, IntArrayRef full_size, bool force_clone = false) { + int64_t sparse_dim = indices.size(0); + if (sparse_dim == 1) { + if (force_clone) { + return indices.squeeze(0).clone(at::MemoryFormat::Contiguous); + } else { + return indices.squeeze(0); + } + } else { + std::vector indices_mult_cpu_vec; + indices_mult_cpu_vec.reserve(sparse_dim); + int64_t mult = 1; + for (int64_t i = sparse_dim - 1; i >= 0; i--) { + indices_mult_cpu_vec[i] = mult; + mult *= full_size[i]; + } + auto indices_mult_cpu = at::from_blob( + indices_mult_cpu_vec.data(), + /*size=*/{sparse_dim, 1}, + indices.options().device(kCPU)); + // NB: must be blocking because this blob may be freed after this closure, + // and non_blocking copy will see garbage. + auto indices_mult = indices_mult_cpu.to(indices.device(), /*non_blocking=*/false); + // Ideally we want matmul but matmul is slow on CPU Long and not implemented + // on CUDA Long. So mul is faster. + return indices.mul(indices_mult).sum(0); + } +} + +// Flatten sparse tensor's indices from nD to 1D, similar to NOTE [ Flatten Sparse Indices ], +// except this one allows partial flatten: only flatten on specified dims. Note that +// the flatten indices might be uncoalesced if dims_to_flatten.size() < sparse_dim. +// Also if input indices is already coalesced, the flattened indices will also be sorted. +// +// args: +// indices: sparse tensor indices +// sizes: sparse tensor sizes +// dims_to_flatten: a list of dim index to flatten +// +// Ex1: +// indices = [[2, 4, 0], +// [3, 1, 3]] +// sizes = [2, 12] +// dims_to_flatten = [0, 1] +// new_indices = [ 2 * 12 + 3, 4 * 12 + 1, 0 * 12 + 3 ] = [27, 49, 3] +// +// Ex2: +// dims_to_flatten = [1] +// new_indices = [ 3, 1, 3 ] # uncoalesced +inline LongTensor flatten_indices_by_dims(const LongTensor& indices, const IntArrayRef& sizes, const IntArrayRef& dims_to_flatten){ + LongTensor new_indices = at::zeros({indices.size(1)}, indices.options()); + for (auto d : dims_to_flatten) { + new_indices.mul_(sizes[d]); + new_indices.add_(indices.select(0, d)); + } + return new_indices; +} + +}} // namespace at::sparse diff --git a/thirdparty/libtorch/include/ATen/Storage.h b/thirdparty/libtorch/include/ATen/Storage.h new file mode 100644 index 0000000000..5d6285281f --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Storage.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/Tensor.h b/thirdparty/libtorch/include/ATen/Tensor.h new file mode 100644 index 0000000000..2dc92d43d4 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Tensor.h @@ -0,0 +1,12 @@ +#pragma once + +/* + * We split Tensor.h into TensorBody.h and TensorMethods.h because we want + * all TensorMethods to be inlined, but they depend on the Dispatcher, + * which in turn depends on many other things, which then depend back on Tensor. + * + * We can break this dependency chain by having the dispatcher only depend on + * TensorBody.h and not TensorMethods.h. + */ +#include +#include diff --git a/thirdparty/libtorch/include/ATen/TensorAccessor.h b/thirdparty/libtorch/include/ATen/TensorAccessor.h new file mode 100644 index 0000000000..528ed7b876 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/TensorAccessor.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/TensorGeometry.h b/thirdparty/libtorch/include/ATen/TensorGeometry.h new file mode 100644 index 0000000000..291892a14d --- /dev/null +++ b/thirdparty/libtorch/include/ATen/TensorGeometry.h @@ -0,0 +1,62 @@ +#pragma once + +#include +#include + +namespace at { + +struct CAFFE2_API TensorGeometry { + TensorGeometry() : storage_offset_(0) {} + + explicit TensorGeometry(IntArrayRef sizes) + : sizes_(sizes.vec()) + , strides_(sizes.size()) + , storage_offset_(0) { + int64_t dim = sizes.size(); + int64_t expected_stride = 1; + for (int64_t i = dim - 1; i >= 0; i--) { + strides_[i] = expected_stride; + expected_stride *= sizes_[i]; + } + numel_ = expected_stride; + } + + explicit TensorGeometry(const Tensor& t) + : sizes_(t.sizes().vec()) + , strides_(t.strides().vec()) + , storage_offset_(t.storage_offset()) + , numel_(t.numel()) {} + + // true if the tensor is contiguous + bool is_contiguous() const; + + int64_t dim() const { return sizes_.size(); } + int64_t size(int64_t dim) const { + dim = maybe_wrap_dim(dim, this->dim()); + return sizes_.at(static_cast(dim)); + } + IntArrayRef sizes() const { return IntArrayRef{ sizes_ }; } + int64_t stride(int64_t dim) const { + dim = maybe_wrap_dim(dim, this->dim()); + return strides_.at(static_cast(dim)); + } + IntArrayRef strides() const { return IntArrayRef{ strides_ }; } + int64_t storage_offset() const { return storage_offset_; } + int64_t numel() const { return numel_; } + + TensorGeometry transpose(int64_t dim0, int64_t dim1) { + TensorGeometry r = *this; // copy + TORCH_CHECK(dim0 < dim(), "transpose: dim0=", dim0, " out of range (dim=", dim(), ")") + TORCH_CHECK(dim1 < dim(), "transpose: dim1=", dim1, " out of range (dim=", dim(), ")") + std::swap(r.sizes_[dim0], r.sizes_[dim1]); + std::swap(r.strides_[dim0], r.strides_[dim1]); + return r; + } + + std::vector sizes_; + std::vector strides_; + int64_t storage_offset_; + int64_t numel_; +}; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/TensorNames.h b/thirdparty/libtorch/include/ATen/TensorNames.h new file mode 100644 index 0000000000..797051795b --- /dev/null +++ b/thirdparty/libtorch/include/ATen/TensorNames.h @@ -0,0 +1,77 @@ +#pragma once + +#include +#include + +namespace at { namespace namedinference { + +#ifdef BUILD_NAMEDTENSOR + +// TensorName and TensorNames are wrappers around Dimname and DimnameList +// that contain helper functions to make writing name inference rules easier. +// +// A TensorName represents a Dimname associated with some DimnameList (from a Tensor). +// This encapsulates all the information that is needed to check if names *match* +// and to *unify* names. +// +// Definition: Two names in two tensors *match* if they are equal, or if at +// least one of them is a wildcard that can be *refined* to the other name. +// +// Definition: unify(name, other) fails if the names do not match. Otherwise, +// it returns the most refined of name and other. +// +// Here is an example of checking if two names match. +// tensor: Tensor[A, None] +// other: Tensor[A] +// +// Let's say we wish to check if tensor.names[-1] matches other.names[-1]. +// None (in tensor) cannot match A (in other) because if the None were refined +// to A, `tensor` would have duplicate names [A, A]. Therefore we need to check +// tensor.names [A, None] for the existence of A. +struct CAFFE2_API TensorName { + explicit TensorName(ArrayRef origin, int origin_idx) + : origin_(origin), + name_(origin[maybe_wrap_dim(origin_idx, origin.size())]), + origin_idx_(origin_idx) {} + + // op_name is only used for error reporting. + const TensorName& unify(const TensorName& other, const char* op_name) const; + Dimname toDimname() const; + + private: + ArrayRef origin_; + Dimname name_; + int origin_idx_; // A named tensor can have at most 64 dims. + + CAFFE2_API friend std::ostream& operator<<( + std::ostream& out, + const TensorName& tensorname); +}; + +using TensorNameVec = SmallVector; + +struct CAFFE2_API TensorNames { + explicit TensorNames(ArrayRef names); + + // Create TensorNames from names[start:end]. Each individual TensorName stores + // `names`, NOT names[start:end], because the original tensor's names are `names`. + explicit TensorNames(ArrayRef names, int64_t start, int64_t end); + + // op_name is only used for error reporting. + TensorNames& unifyFromRightInplace( + const TensorNames& other, + const char* op_name = "unify"); + void checkUnique(const char* op_name) const; + + void append(TensorName&& name); + std::vector toDimnameVec() const; + + private: + explicit TensorNames(TensorNameVec&& names) : names_(names) {}; + + TensorNameVec names_; +}; + +#endif + +}} // namespace at::namedinference diff --git a/thirdparty/libtorch/include/ATen/TensorOperators.h b/thirdparty/libtorch/include/ATen/TensorOperators.h new file mode 100644 index 0000000000..1756ee1cfc --- /dev/null +++ b/thirdparty/libtorch/include/ATen/TensorOperators.h @@ -0,0 +1,99 @@ +#pragma once + +#include +#include + +#include +#include + +namespace at { + +inline Tensor & Tensor::operator=(Tensor const & rhs) && { + return copy_(rhs); +} +inline Tensor & Tensor::operator=(Tensor && rhs) && { + return copy_(rhs); +} +inline Tensor & Tensor::operator=(Scalar v) && { + return fill_(v); +} +inline Tensor Tensor::operator-() const { + return neg(); +} +inline Tensor& Tensor::operator+=(const Tensor & other) { + return add_(other); +} +inline Tensor& Tensor::operator+=(Scalar other) { + return add_(other); +} +inline Tensor& Tensor::operator-=(const Tensor & other) { + return sub_(other); +} +inline Tensor& Tensor::operator-=(Scalar other) { + return sub_(other); +} +inline Tensor& Tensor::operator*=(const Tensor & other) { + return mul_(other); +} +inline Tensor& Tensor::operator*=(Scalar other) { + return mul_(other); +} +inline Tensor& Tensor::operator/=(const Tensor & other) { + return div_(other); +} +inline Tensor& Tensor::operator/=(Scalar other) { + return div_(other); +} +inline Tensor Tensor::operator[](Scalar index) const { + if (!index.isIntegral(false)) { + AT_INDEX_ERROR("Can only index tensors with integral scalars"); + } + return select(0, index.toLong()); +} +inline Tensor Tensor::operator[](Tensor index) const { + // These properties are checked in the Scalar constructor, but we already + // check them here to provide more useful diagnostics for the user. + if (!index.defined()) { + AT_INDEX_ERROR("Can only index with tensors that are defined"); + } + if (index.dim() != 0) { + AT_INDEX_ERROR( + "Can only index with tensors that are scalars (zero-dim)"); + } + // The Scalar(Tensor) constructor is explicit, so we need to call it. + return this->operator[](index.item()); +} +inline Tensor Tensor::operator[](int64_t index) const { + return select(0, index); +} + +#define AT_FORALL_BINARY_OPS(_) \ +_(+,x.add(y), y.add(x)) \ +_(*,x.mul(y), y.mul(x)) \ +_(-,x.sub(y), ::at::empty_like(y, at::MemoryFormat::Preserve).fill_(x).sub_(y)) \ +_(/,x.div(y), ::at::empty_like(y, at::MemoryFormat::Preserve).fill_(x).div_(y)) \ +_(%,x.remainder(y), ::at::empty_like(y, at::MemoryFormat::Preserve).fill_(x).remainder_(y)) \ +_(<,x.lt(y), y.gt(x)) \ +_(<=,x.le(y), y.ge(x)) \ +_(>,x.gt(y),y.lt(x)) \ +_(>=,x.ge(y), y.le(x)) \ +_(==,x.eq(y), y.eq(x)) \ +_(!=,x.ne(y), y.ne(x)) + +#define DEFINE_OPERATOR(op,body,reverse_scalar_body) \ +static inline Tensor operator op(const Tensor & x, const Tensor & y) { \ + return body; \ +} \ +static inline Tensor operator op(const Tensor & x, Scalar y) { \ + return body; \ +} \ +static inline Tensor operator op(Scalar x, const Tensor & y) { \ + return reverse_scalar_body; \ +} + + +AT_FORALL_BINARY_OPS(DEFINE_OPERATOR) +#undef DEFINE_OPERATOR +#undef AT_FORALL_BINARY_OPS + +} diff --git a/thirdparty/libtorch/include/ATen/TensorOptions.h b/thirdparty/libtorch/include/ATen/TensorOptions.h new file mode 100644 index 0000000000..b3edba8efd --- /dev/null +++ b/thirdparty/libtorch/include/ATen/TensorOptions.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/TensorUtils.h b/thirdparty/libtorch/include/ATen/TensorUtils.h new file mode 100644 index 0000000000..a210a13d1f --- /dev/null +++ b/thirdparty/libtorch/include/ATen/TensorUtils.h @@ -0,0 +1,153 @@ +#pragma once + +#include +#include +#include + +// These functions are NOT in Utils.h, because this file has a dep on Tensor.h + +namespace at { + +// The following are utility functions for checking that arguments +// make sense. These are particularly useful for native functions, +// which do NO argument checking by default. + +struct CAFFE2_API TensorArg { + Tensor tensor; + const char* name; + int pos; // 1-indexed + TensorArg(Tensor tensor, const char* name, int pos) + : tensor(std::move(tensor)), name(name), pos(pos) {} + const Tensor* operator->() const { return &tensor; } + const Tensor& operator*() const { return tensor; } +}; + +struct CAFFE2_API TensorGeometryArg { + TensorGeometry tensor; + const char* name; + int pos; // 1-indexed + /* implicit */ TensorGeometryArg(TensorArg arg) + : tensor(TensorGeometry{arg.tensor}), name(arg.name), pos(arg.pos) {} + TensorGeometryArg(TensorGeometry tensor, const char* name, int pos) + : tensor(tensor), name(name), pos(pos) {} + const TensorGeometry* operator->() const { return &tensor; } + const TensorGeometry& operator*() const { return tensor; } +}; + +// A string describing which function did checks on its input +// arguments. +// TODO: Consider generalizing this into a call stack. +using CheckedFrom = const char*; + +// The undefined convention: singular operators assume their arguments +// are defined, but functions which take multiple tensors will +// implicitly filter out undefined tensors (to make it easier to perform +// tests which should apply if the tensor is defined, and should not +// otherwise.) +// +// NB: This means that the n-ary operators take lists of TensorArg, +// not TensorGeometryArg, because the Tensor to TensorGeometry +// conversion will blow up if you have undefined tensors. + +CAFFE2_API std::ostream& operator<<(std::ostream& out, TensorGeometryArg t); +CAFFE2_API void checkDim( + CheckedFrom c, + const TensorGeometryArg& t, + int64_t dim); +// NB: this is an inclusive-exclusive range +CAFFE2_API void checkDimRange( + CheckedFrom c, + const TensorGeometryArg& t, + int64_t dim_start, + int64_t dim_end); +CAFFE2_API void checkSameDim( + CheckedFrom c, + const TensorGeometryArg& t1, + const TensorGeometryArg& t2); +CAFFE2_API void checkContiguous(CheckedFrom c, const TensorGeometryArg& t); +CAFFE2_API void checkAllContiguous(CheckedFrom c, at::ArrayRef ts); +CAFFE2_API void checkSize( + CheckedFrom c, + const TensorGeometryArg& t, + IntArrayRef sizes); +CAFFE2_API void checkSize( + CheckedFrom c, + const TensorGeometryArg& t, + int64_t dim, + int64_t size); +CAFFE2_API void checkNumel( + CheckedFrom c, + const TensorGeometryArg& t, + int64_t numel); +CAFFE2_API void checkSameNumel( + CheckedFrom c, + const TensorGeometryArg& t1, + const TensorGeometryArg& t2); +CAFFE2_API void checkAllSameNumel(CheckedFrom c, ArrayRef tensors); +CAFFE2_API void checkScalarType( + CheckedFrom c, + const TensorArg& t, + ScalarType s); +CAFFE2_API void checkScalarTypes( + CheckedFrom c, + const TensorArg& t, + at::ArrayRef l); +CAFFE2_API void checkSameGPU( + CheckedFrom c, + const TensorArg& t1, + const TensorArg& t2); +CAFFE2_API void checkAllSameGPU(CheckedFrom c, ArrayRef tensors); +CAFFE2_API void checkSameType( + CheckedFrom c, + const TensorArg& t1, + const TensorArg& t2); +CAFFE2_API void checkAllSameType(CheckedFrom c, ArrayRef tensors); +CAFFE2_API void checkSameSize( + CheckedFrom c, + const TensorArg& t1, + const TensorArg& t2); +CAFFE2_API void checkDefined(CheckedFrom c, const TensorArg& t); +CAFFE2_API void checkAllDefined(CheckedFrom c, at::ArrayRef t); + +// FixMe: does TensorArg slow things down? +CAFFE2_API void checkBackend( + CheckedFrom c, + at::ArrayRef t, + at::Backend backend); + +CAFFE2_API void checkDeviceType( + CheckedFrom c, + at::ArrayRef tensors, + at::DeviceType device_type); + +CAFFE2_API void checkLayout(CheckedFrom c, const Tensor& t, Layout layout); + +CAFFE2_API void checkLayout(CheckedFrom c, at::ArrayRef tensors, at::Layout layout); + +// Methods for getting data_ptr if tensor is defined +CAFFE2_API void* maybe_data_ptr(const Tensor& tensor); +CAFFE2_API void* maybe_data_ptr(const TensorArg& tensor); + +// Return if the tensor geometry represented by `sizes` and `strides` is contiguous +// Although we cache is_contiguous in tensor now, this is till useful because it +// allows checking if a particular geometry is contiguous without explicitly +// constructing a tensor, e.g., when you want to choose a kernel strategy based +// on whether a subgeometry is contiguous. +CAFFE2_API bool geometry_is_contiguous(IntArrayRef sizes, IntArrayRef strides); + +// Correspond to THCUNN_check_dim_size/THNN_check_dim_size +CAFFE2_API void check_dim_size( + const Tensor& tensor, + int64_t dim, + int64_t dim_size, + int64_t size); + +namespace detail { +CAFFE2_API std::vector defaultStrides(IntArrayRef sizes); +CAFFE2_API int64_t computeStorageSize(IntArrayRef sizes, IntArrayRef strides); +CAFFE2_API c10::optional> computeStride( + IntArrayRef oldshape, + IntArrayRef oldstride, + IntArrayRef newshape); +} // namespace detail +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/ThreadLocalDebugInfo.h b/thirdparty/libtorch/include/ATen/ThreadLocalDebugInfo.h new file mode 100644 index 0000000000..f464e4a4c6 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/ThreadLocalDebugInfo.h @@ -0,0 +1,46 @@ +#pragma once + +#include + +#include +#include + +namespace at { + +// Thread local debug information is propagated across the forward +// (including async fork tasks) and backward passes and is supposed +// to be utilized by the user's code to pass extra information from +// the higher layers (e.g. model id) down to the operator callbacks +// (e.g. used for logging) + +class CAFFE2_API ThreadLocalDebugInfoBase { + public: + ThreadLocalDebugInfoBase() {} + virtual ~ThreadLocalDebugInfoBase() {} +}; + +CAFFE2_API std::shared_ptr +getThreadLocalDebugInfo() noexcept; + +// Sets thread local debug information, returns the previously set +// debug information +CAFFE2_API std::shared_ptr +setThreadLocalDebugInfo( + std::shared_ptr info) noexcept; + +class CAFFE2_API DebugInfoGuard { + public: + explicit DebugInfoGuard( + std::shared_ptr info) { + prev_info_ = setThreadLocalDebugInfo(std::move(info)); + } + + ~DebugInfoGuard() { + setThreadLocalDebugInfo(std::move(prev_info_)); + } + + private: + std::shared_ptr prev_info_; +}; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/TypeDefault.h b/thirdparty/libtorch/include/ATen/TypeDefault.h new file mode 100644 index 0000000000..905293401b --- /dev/null +++ b/thirdparty/libtorch/include/ATen/TypeDefault.h @@ -0,0 +1,867 @@ +#pragma once + +// @generated by aten/src/ATen/gen.py + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace c10 { +struct Storage; +} + +namespace at { + +class Tensor; +using TensorList = ArrayRef; + +class Context; +struct Generator; + +struct Quantizer; +// This is temporary typedef to enable Quantizer in aten native function API +// we'll remove them when we are actually exposing Quantizer class +// to frontend +using ConstQuantizerPtr = const c10::intrusive_ptr&; + +namespace TypeDefault { + Tensor _cast_Byte(const Tensor & self, bool non_blocking); + Tensor _cast_Char(const Tensor & self, bool non_blocking); + Tensor _cast_Double(const Tensor & self, bool non_blocking); + Tensor _cast_Float(const Tensor & self, bool non_blocking); + Tensor _cast_Int(const Tensor & self, bool non_blocking); + Tensor _cast_Long(const Tensor & self, bool non_blocking); + Tensor _cast_Short(const Tensor & self, bool non_blocking); + Tensor _cast_Half(const Tensor & self, bool non_blocking); + void backward(const Tensor & self, const Tensor & gradient, bool keep_graph, bool create_graph); + void set_data(const Tensor & self, const Tensor & new_data); + Tensor data(const Tensor & self); + bool is_leaf(const Tensor & self); + int64_t output_nr(const Tensor & self); + int64_t _version(const Tensor & self); + Tensor & requires_grad_(Tensor & self, bool _requires_grad); + #ifdef BUILD_NAMEDTENSOR + Tensor & rename_(Tensor & self, c10::optional names); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor rename(const Tensor & self, c10::optional names); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor align_to(const Tensor & self, DimnameList names); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor align_to(const Tensor & self, DimnameList order, int64_t ellipsis_idx); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor align_as(const Tensor & self, const Tensor & other); + #endif + #ifdef BUILD_NAMEDTENSOR + std::vector align_tensors(TensorList tensors); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor refine_names(const Tensor & self, DimnameList names); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor unflatten(const Tensor & self, Dimname dim, IntArrayRef sizes, DimnameList names); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor unflatten(const Tensor & self, int64_t dim, IntArrayRef sizes, DimnameList names); + #endif + int64_t _debug_has_internal_overlap(const Tensor & self); + std::tuple _sobol_engine_draw(const Tensor & quasi, int64_t n, const Tensor & sobolstate, int64_t dimension, int64_t num_generated, c10::optional dtype); + Tensor & _sobol_engine_ff_(Tensor & self, int64_t n, const Tensor & sobolstate, int64_t dimension, int64_t num_generated); + Tensor & _sobol_engine_scramble_(Tensor & self, const Tensor & ltm, int64_t dimension); + Tensor & _sobol_engine_initialize_state_(Tensor & self, int64_t dimension); + Tensor _reshape_from_tensor(const Tensor & self, const Tensor & shape); + Tensor _shape_as_tensor(const Tensor & self); + Tensor dropout(const Tensor & input, double p, bool train); + Tensor & dropout_(Tensor & self, double p, bool train); + Tensor feature_dropout(const Tensor & input, double p, bool train); + Tensor & feature_dropout_(Tensor & self, double p, bool train); + Tensor alpha_dropout(const Tensor & input, double p, bool train); + Tensor & alpha_dropout_(Tensor & self, double p, bool train); + Tensor feature_alpha_dropout(const Tensor & input, double p, bool train); + Tensor & feature_alpha_dropout_(Tensor & self, double p, bool train); + Tensor abs(const Tensor & self); + Tensor & abs_(Tensor & self); + Tensor & abs_out(Tensor & out, const Tensor & self); + Tensor angle(const Tensor & self); + Tensor real(const Tensor & self); + Tensor imag(const Tensor & self); + Tensor conj(const Tensor & self); + Tensor acos(const Tensor & self); + Tensor & acos_(Tensor & self); + Tensor & acos_out(Tensor & out, const Tensor & self); + Tensor avg_pool1d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, bool ceil_mode, bool count_include_pad); + Tensor adaptive_avg_pool1d(const Tensor & self, IntArrayRef output_size); + std::tuple adaptive_max_pool1d(const Tensor & self, IntArrayRef output_size); + Tensor add(const Tensor & self, Scalar other, Scalar alpha); + Tensor & add_(Tensor & self, Scalar other, Scalar alpha); + Tensor addr(const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); + Tensor & addr_(Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); + Tensor & addr_out(Tensor & out, const Tensor & self, const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha); + Tensor affine_grid_generator(const Tensor & theta, IntArrayRef size, bool align_corners); + Tensor affine_grid_generator_backward(const Tensor & grad, IntArrayRef size, bool align_corners); + Tensor all(const Tensor & self, int64_t dim, bool keepdim); + Tensor & all_out(Tensor & out, const Tensor & self, int64_t dim, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + Tensor all(const Tensor & self, Dimname dim, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & all_out(Tensor & out, const Tensor & self, Dimname dim, bool keepdim); + #endif + bool allclose(const Tensor & self, const Tensor & other, double rtol, double atol, bool equal_nan); + Tensor any(const Tensor & self, int64_t dim, bool keepdim); + Tensor & any_out(Tensor & out, const Tensor & self, int64_t dim, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + Tensor any(const Tensor & self, Dimname dim, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & any_out(Tensor & out, const Tensor & self, Dimname dim, bool keepdim); + #endif + Tensor arange(Scalar end, const TensorOptions & options); + Tensor arange(Scalar start, Scalar end, const TensorOptions & options); + Tensor arange(Scalar start, Scalar end, Scalar step, const TensorOptions & options); + Tensor & arange_out(Tensor & out, Scalar end); + Tensor _dim_arange(const Tensor & like, int64_t dim); + Tensor argmax(const Tensor & self, c10::optional dim, bool keepdim); + Tensor argmin(const Tensor & self, c10::optional dim, bool keepdim); + Tensor & as_strided_(Tensor & self, IntArrayRef size, IntArrayRef stride, c10::optional storage_offset); + Tensor asin(const Tensor & self); + Tensor & asin_(Tensor & self); + Tensor & asin_out(Tensor & out, const Tensor & self); + Tensor atan(const Tensor & self); + Tensor & _baddbmm_mkl_(Tensor & self, const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha); + Tensor bartlett_window(int64_t window_length, const TensorOptions & options); + Tensor bartlett_window(int64_t window_length, bool periodic, const TensorOptions & options); + Tensor batch_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps, bool cudnn_enabled); + std::tuple _batch_norm_impl_index(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool training, double momentum, double eps, bool cudnn_enabled); + std::tuple _batch_norm_impl_index_backward(int64_t impl_index, const Tensor & input, const Tensor & grad_output, const Tensor & weight, const Tensor & running_mean, const Tensor & running_var, const Tensor & save_mean, const Tensor & save_var_transform, bool train, double eps, std::array output_mask, const Tensor & reservedSpace); + Tensor bernoulli(const Tensor & self, Generator * generator); + Tensor & bernoulli_out(Tensor & out, const Tensor & self, Generator * generator); + Tensor bernoulli(const Tensor & self, double p, Generator * generator); + Tensor bilinear(const Tensor & input1, const Tensor & input2, const Tensor & weight, const Tensor & bias); + Tensor binary_cross_entropy_with_logits(const Tensor & self, const Tensor & target, const Tensor & weight, const Tensor & pos_weight, int64_t reduction); + Tensor binary_cross_entropy_with_logits_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, const Tensor & weight, const Tensor & pos_weight, int64_t reduction); + Tensor bitwise_not(const Tensor & self); + Tensor & bitwise_not_(Tensor & self); + Tensor logical_not(const Tensor & self); + Tensor & logical_not_(Tensor & self); + Tensor logical_xor(const Tensor & self, const Tensor & other); + Tensor & logical_xor_(Tensor & self, const Tensor & other); + Tensor blackman_window(int64_t window_length, const TensorOptions & options); + Tensor blackman_window(int64_t window_length, bool periodic, const TensorOptions & options); + std::vector broadcast_tensors(TensorList tensors); + Tensor cat(TensorList tensors, int64_t dim); + Tensor & cat_out(Tensor & out, TensorList tensors, int64_t dim); + #ifdef BUILD_NAMEDTENSOR + Tensor cat(TensorList tensors, Dimname dim); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & cat_out(Tensor & out, TensorList tensors, Dimname dim); + #endif + Tensor ceil(const Tensor & self); + Tensor & ceil_(Tensor & self); + Tensor chain_matmul(TensorList matrices); + std::vector chunk(const Tensor & self, int64_t chunks, int64_t dim); + Tensor clamp(const Tensor & self, c10::optional min, c10::optional max); + Tensor clamp_max(const Tensor & self, Scalar max); + Tensor clamp_min(const Tensor & self, Scalar min); + bool cudnn_is_acceptable(const Tensor & self); + Tensor constant_pad_nd(const Tensor & self, IntArrayRef pad, Scalar value); + Tensor contiguous(const Tensor & self, MemoryFormat memory_format); + Tensor convolution(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups); + Tensor convolution_overrideable(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups); + std::tuple convolution_backward_overrideable(const Tensor & grad_output, const Tensor & input, const Tensor & weight, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups, std::array output_mask); + Tensor _convolution(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups, bool benchmark, bool deterministic, bool cudnn_enabled); + Tensor _convolution_nogroup(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding); + std::tuple _convolution_double_backward(const Tensor & ggI, const Tensor & ggW, const Tensor & ggb, const Tensor & gO, const Tensor & weight, const Tensor & self, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool transposed, IntArrayRef output_padding, int64_t groups, bool benchmark, bool deterministic, bool cudnn_enabled, std::array output_mask); + Tensor conv1d(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, int64_t groups); + Tensor conv2d(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, int64_t groups); + Tensor conv3d(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, int64_t groups); + Tensor conv_tbc(const Tensor & self, const Tensor & weight, const Tensor & bias, int64_t pad); + std::tuple conv_tbc_backward(const Tensor & self, const Tensor & input, const Tensor & weight, const Tensor & bias, int64_t pad); + Tensor conv_transpose1d(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, int64_t groups, IntArrayRef dilation); + Tensor conv_transpose2d(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, int64_t groups, IntArrayRef dilation); + Tensor conv_transpose3d(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef output_padding, int64_t groups, IntArrayRef dilation); + Tensor & copy_(Tensor & self, const Tensor & src, bool non_blocking); + Tensor cos(const Tensor & self); + Tensor cosh(const Tensor & self); + Tensor cosine_embedding_loss(const Tensor & input1, const Tensor & input2, const Tensor & target, double margin, int64_t reduction); + Tensor cumsum(const Tensor & self, int64_t dim, c10::optional dtype); + Tensor & cumsum_out(Tensor & out, const Tensor & self, int64_t dim, c10::optional dtype); + #ifdef BUILD_NAMEDTENSOR + Tensor cumsum(const Tensor & self, Dimname dim, c10::optional dtype); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & cumsum_out(Tensor & out, const Tensor & self, Dimname dim, c10::optional dtype); + #endif + Tensor cumprod(const Tensor & self, int64_t dim, c10::optional dtype); + Tensor & cumprod_out(Tensor & out, const Tensor & self, int64_t dim, c10::optional dtype); + #ifdef BUILD_NAMEDTENSOR + Tensor cumprod(const Tensor & self, Dimname dim, c10::optional dtype); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & cumprod_out(Tensor & out, const Tensor & self, Dimname dim, c10::optional dtype); + #endif + Tensor ctc_loss(const Tensor & log_probs, const Tensor & targets, IntArrayRef input_lengths, IntArrayRef target_lengths, int64_t blank, int64_t reduction, bool zero_infinity); + Tensor ctc_loss(const Tensor & log_probs, const Tensor & targets, const Tensor & input_lengths, const Tensor & target_lengths, int64_t blank, int64_t reduction, bool zero_infinity); + Tensor det(const Tensor & self); + Tensor diag_embed(const Tensor & self, int64_t offset, int64_t dim1, int64_t dim2); + Tensor diagflat(const Tensor & self, int64_t offset); + Tensor diagonal(const Tensor & self, int64_t offset, int64_t dim1, int64_t dim2); + Tensor & fill_diagonal_(Tensor & self, Scalar fill_value, bool wrap); + Tensor div(const Tensor & self, Scalar other); + Tensor & div_(Tensor & self, Scalar other); + Tensor & dot_out(Tensor & out, const Tensor & self, const Tensor & tensor); + Tensor einsum(std::string equation, TensorList tensors); + Tensor embedding(const Tensor & weight, const Tensor & indices, int64_t padding_idx, bool scale_grad_by_freq, bool sparse); + Tensor embedding_backward(const Tensor & grad, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq, bool sparse); + Tensor embedding_sparse_backward(const Tensor & grad, const Tensor & indices, int64_t num_weights, int64_t padding_idx, bool scale_grad_by_freq); + std::tuple embedding_bag(const Tensor & weight, const Tensor & indices, const Tensor & offsets, bool scale_grad_by_freq, int64_t mode, bool sparse, const Tensor & per_sample_weights); + Tensor _embedding_bag_backward(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, const Tensor & maximum_indices, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, bool sparse, const Tensor & per_sample_weights); + Tensor _embedding_bag_sparse_backward(const Tensor & grad, const Tensor & indices, const Tensor & offsets, const Tensor & offset2bag, const Tensor & bag_size, int64_t num_weights, bool scale_grad_by_freq, int64_t mode, const Tensor & per_sample_weights); + #ifdef BUILD_NAMEDTENSOR + Tensor empty(IntArrayRef size, c10::optional names, const TensorOptions & options, c10::optional memory_format); + #endif + Tensor new_empty(const Tensor & self, IntArrayRef size, const TensorOptions & options); + Tensor new_full(const Tensor & self, IntArrayRef size, Scalar fill_value, const TensorOptions & options); + Tensor new_zeros(const Tensor & self, IntArrayRef size, const TensorOptions & options); + Tensor & empty_out(Tensor & out, IntArrayRef size, c10::optional memory_format); + Tensor empty_like(const Tensor & self, c10::optional memory_format); + Tensor empty_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format); + Tensor erf(const Tensor & self); + Tensor erfc(const Tensor & self); + Tensor exp(const Tensor & self); + Tensor expm1(const Tensor & self); + Tensor & expm1_(Tensor & self); + Tensor expand(const Tensor & self, IntArrayRef size, bool implicit); + Tensor expand_as(const Tensor & self, const Tensor & other); + Tensor eye(int64_t n, const TensorOptions & options); + Tensor eye(int64_t n, int64_t m, const TensorOptions & options); + Tensor flatten(const Tensor & self, int64_t start_dim, int64_t end_dim); + #ifdef BUILD_NAMEDTENSOR + Tensor flatten(const Tensor & self, int64_t start_dim, int64_t end_dim, Dimname out_dim); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor flatten(const Tensor & self, Dimname start_dim, Dimname end_dim, Dimname out_dim); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor flatten(const Tensor & self, DimnameList dims, Dimname out_dim); + #endif + Tensor & fill_(Tensor & self, Scalar value); + Tensor & fill_(Tensor & self, const Tensor & value); + Tensor floor(const Tensor & self); + Tensor & floor_(Tensor & self); + Tensor frac(const Tensor & self); + Tensor & frac_(Tensor & self); + Tensor & frac_out(Tensor & out, const Tensor & self); + #ifdef BUILD_NAMEDTENSOR + Tensor full(IntArrayRef size, Scalar fill_value, c10::optional names, const TensorOptions & options); + #endif + Tensor full(IntArrayRef size, Scalar fill_value, const TensorOptions & options); + Tensor & full_out(Tensor & out, IntArrayRef size, Scalar fill_value); + Tensor full_like(const Tensor & self, Scalar fill_value, c10::optional memory_format); + Tensor full_like(const Tensor & self, Scalar fill_value, const TensorOptions & options, c10::optional memory_format); + Tensor grid_sampler(const Tensor & input, const Tensor & grid, int64_t interpolation_mode, int64_t padding_mode, bool align_corners); + Tensor hann_window(int64_t window_length, const TensorOptions & options); + Tensor hann_window(int64_t window_length, bool periodic, const TensorOptions & options); + Tensor hamming_window(int64_t window_length, const TensorOptions & options); + Tensor hamming_window(int64_t window_length, bool periodic, const TensorOptions & options); + Tensor hamming_window(int64_t window_length, bool periodic, double alpha, const TensorOptions & options); + Tensor hamming_window(int64_t window_length, bool periodic, double alpha, double beta, const TensorOptions & options); + Tensor hinge_embedding_loss(const Tensor & self, const Tensor & target, double margin, int64_t reduction); + Tensor group_norm(const Tensor & input, int64_t num_groups, const Tensor & weight, const Tensor & bias, double eps, bool cudnn_enabled); + Tensor fft(const Tensor & self, int64_t signal_ndim, bool normalized); + Tensor ifft(const Tensor & self, int64_t signal_ndim, bool normalized); + Tensor rfft(const Tensor & self, int64_t signal_ndim, bool normalized, bool onesided); + Tensor irfft(const Tensor & self, int64_t signal_ndim, bool normalized, bool onesided, IntArrayRef signal_sizes); + int64_t _cufft_get_plan_cache_size(int64_t device_index); + int64_t _cufft_get_plan_cache_max_size(int64_t device_index); + void _cufft_set_plan_cache_max_size(int64_t device_index, int64_t max_size); + void _cufft_clear_plan_cache(int64_t device_index); + Tensor index(const Tensor & self, TensorList indices); + Tensor & index_copy_(Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); + Tensor index_copy(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); + #ifdef BUILD_NAMEDTENSOR + Tensor & index_copy_(Tensor & self, Dimname dim, const Tensor & index, const Tensor & source); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor index_copy(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & source); + #endif + Tensor & index_put_(Tensor & self, TensorList indices, const Tensor & values, bool accumulate); + Tensor index_put(const Tensor & self, TensorList indices, const Tensor & values, bool accumulate); + Tensor & _index_put_impl_(Tensor & self, TensorList indices, const Tensor & values, bool accumulate, bool unsafe); + Tensor instance_norm(const Tensor & input, const Tensor & weight, const Tensor & bias, const Tensor & running_mean, const Tensor & running_var, bool use_input_stats, double momentum, double eps, bool cudnn_enabled); + Tensor inverse(const Tensor & self); + Tensor & inverse_out(Tensor & out, const Tensor & self); + Tensor isclose(const Tensor & self, const Tensor & other, double rtol, double atol, bool equal_nan); + Tensor isnan(const Tensor & self); + bool is_distributed(const Tensor & self); + bool is_floating_point(const Tensor & self); + bool is_complex(const Tensor & self); + bool is_nonzero(const Tensor & self); + bool is_same_size(const Tensor & self, const Tensor & other); + bool is_signed(const Tensor & self); + Tensor kl_div(const Tensor & self, const Tensor & target, int64_t reduction); + std::tuple kthvalue(const Tensor & self, int64_t k, int64_t dim, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + std::tuple kthvalue(const Tensor & self, int64_t k, Dimname dim, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + std::tuple kthvalue_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t k, Dimname dim, bool keepdim); + #endif + Tensor layer_norm(const Tensor & input, IntArrayRef normalized_shape, const Tensor & weight, const Tensor & bias, double eps, bool cudnn_enable); + Tensor linear(const Tensor & input, const Tensor & weight, const Tensor & bias); + Tensor fbgemm_linear_int8_weight_fp32_activation(const Tensor & input, const Tensor & weight, const Tensor & packed, const Tensor & col_offsets, Scalar weight_scale, Scalar weight_zero_point, const Tensor & bias); + Tensor fbgemm_linear_int8_weight(const Tensor & input, const Tensor & weight, const Tensor & packed, const Tensor & col_offsets, Scalar weight_scale, Scalar weight_zero_point, const Tensor & bias); + std::tuple fbgemm_linear_quantize_weight(const Tensor & input); + Tensor fbgemm_pack_gemm_matrix_fp16(const Tensor & input); + Tensor fbgemm_linear_fp16_weight_fp32_activation(const Tensor & input, const Tensor & packed_weight, const Tensor & bias); + Tensor fbgemm_linear_fp16_weight(const Tensor & input, const Tensor & packed_weight, const Tensor & bias); + Tensor fbgemm_pack_quantized_matrix(const Tensor & input); + Tensor fbgemm_pack_quantized_matrix(const Tensor & input, int64_t K, int64_t N); + Tensor linspace(Scalar start, Scalar end, int64_t steps, const TensorOptions & options); + Tensor log(const Tensor & self); + Tensor & log_(Tensor & self); + Tensor log10(const Tensor & self); + Tensor & log10_(Tensor & self); + Tensor log1p(const Tensor & self); + Tensor log2(const Tensor & self); + Tensor & log2_(Tensor & self); + Tensor logdet(const Tensor & self); + Tensor logspace(Scalar start, Scalar end, int64_t steps, double base, const TensorOptions & options); + Tensor log_softmax(const Tensor & self, int64_t dim, c10::optional dtype); + #ifdef BUILD_NAMEDTENSOR + Tensor log_softmax(const Tensor & self, Dimname dim, c10::optional dtype); + #endif + Tensor logsumexp(const Tensor & self, IntArrayRef dim, bool keepdim); + Tensor & logsumexp_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + Tensor logsumexp(const Tensor & self, DimnameList dim, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & logsumexp_out(Tensor & out, const Tensor & self, DimnameList dim, bool keepdim); + #endif + Tensor margin_ranking_loss(const Tensor & input1, const Tensor & input2, const Tensor & target, double margin, int64_t reduction); + Tensor matmul(const Tensor & self, const Tensor & other); + Tensor & matmul_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor matrix_rank(const Tensor & self, double tol, bool symmetric); + Tensor matrix_rank(const Tensor & self, bool symmetric); + Tensor matrix_power(const Tensor & self, int64_t n); + std::tuple max(const Tensor & self, int64_t dim, bool keepdim); + std::tuple max_out(Tensor & max, Tensor & max_values, const Tensor & self, int64_t dim, bool keepdim); + Tensor max_values(const Tensor & self, IntArrayRef dim, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + std::tuple max(const Tensor & self, Dimname dim, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + std::tuple max_out(Tensor & max, Tensor & max_values, const Tensor & self, Dimname dim, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor max_values(const Tensor & self, DimnameList dim, bool keepdim); + #endif + std::tuple max_pool1d_with_indices(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + Tensor max_pool1d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + Tensor max_pool2d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + Tensor max_pool3d(const Tensor & self, IntArrayRef kernel_size, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation, bool ceil_mode); + #ifdef BUILD_NAMEDTENSOR + Tensor mean(const Tensor & self, DimnameList dim, bool keepdim, c10::optional dtype); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & mean_out(Tensor & out, const Tensor & self, DimnameList dim, bool keepdim, c10::optional dtype); + #endif + std::tuple median(const Tensor & self, int64_t dim, bool keepdim); + std::tuple median_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + std::tuple median(const Tensor & self, Dimname dim, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + std::tuple median_out(Tensor & values, Tensor & indices, const Tensor & self, Dimname dim, bool keepdim); + #endif + std::tuple min(const Tensor & self, int64_t dim, bool keepdim); + std::tuple min_out(Tensor & min, Tensor & min_indices, const Tensor & self, int64_t dim, bool keepdim); + Tensor min_values(const Tensor & self, IntArrayRef dim, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + std::tuple min(const Tensor & self, Dimname dim, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + std::tuple min_out(Tensor & min, Tensor & min_indices, const Tensor & self, Dimname dim, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor min_values(const Tensor & self, DimnameList dim, bool keepdim); + #endif + Tensor mkldnn_convolution(const Tensor & self, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups); + Tensor mkldnn_convolution_backward_input(IntArrayRef self_size, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool bias_defined); + std::tuple mkldnn_convolution_backward_weights(IntArrayRef weight_size, const Tensor & grad_output, const Tensor & self, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, bool bias_defined); + std::tuple mkldnn_convolution_backward(const Tensor & self, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, IntArrayRef stride, IntArrayRef dilation, int64_t groups, std::array output_mask); + Tensor _sparse_mm(const Tensor & sparse, const Tensor & dense); + std::tuple mode(const Tensor & self, int64_t dim, bool keepdim); + std::tuple mode_out(Tensor & values, Tensor & indices, const Tensor & self, int64_t dim, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + std::tuple mode(const Tensor & self, Dimname dim, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + std::tuple mode_out(Tensor & values, Tensor & indices, const Tensor & self, Dimname dim, bool keepdim); + #endif + Tensor mul(const Tensor & self, Scalar other); + Tensor & mul_(Tensor & self, Scalar other); + Tensor mvlgamma(const Tensor & self, int64_t p); + Tensor & mvlgamma_(Tensor & self, int64_t p); + Tensor narrow(const Tensor & self, int64_t dim, int64_t start, int64_t length); + bool _nnpack_available(); + Tensor _nnpack_spatial_convolution(const Tensor & input, const Tensor & weight, const Tensor & bias, IntArrayRef padding, IntArrayRef stride); + std::tuple _nnpack_spatial_convolution_backward(const Tensor & input, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding, std::array output_mask); + Tensor _nnpack_spatial_convolution_backward_input(const Tensor & input, const Tensor & grad_output, const Tensor & weight, IntArrayRef padding); + Tensor _nnpack_spatial_convolution_backward_weight(const Tensor & input, IntArrayRef weightsize, const Tensor & grad_output, IntArrayRef padding); + #ifdef BUILD_NAMEDTENSOR + Tensor ones(IntArrayRef size, c10::optional names, const TensorOptions & options); + #endif + Tensor ones(IntArrayRef size, const TensorOptions & options); + Tensor & ones_out(Tensor & out, IntArrayRef size); + Tensor ones_like(const Tensor & self, c10::optional memory_format); + Tensor ones_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format); + Tensor pairwise_distance(const Tensor & x1, const Tensor & x2, double p, double eps, bool keepdim); + Tensor cdist(const Tensor & x1, const Tensor & x2, double p, c10::optional compute_mode); + Tensor _cdist_backward(const Tensor & grad, const Tensor & x1, const Tensor & x2, double p, const Tensor & cdist); + Tensor pdist(const Tensor & self, double p); + Tensor _pdist_forward(const Tensor & self, double p); + Tensor _pdist_backward(const Tensor & grad, const Tensor & self, double p, const Tensor & pdist); + Tensor cosine_similarity(const Tensor & x1, const Tensor & x2, int64_t dim, double eps); + Tensor permute(const Tensor & self, IntArrayRef dims); + Tensor numpy_T(const Tensor & self); + Tensor pixel_shuffle(const Tensor & self, int64_t upscale_factor); + bool is_pinned(const Tensor & self); + Tensor pin_memory(const Tensor & self); + Tensor pinverse(const Tensor & self, double rcond); + Tensor poisson_nll_loss(const Tensor & input, const Tensor & target, bool log_input, bool full, double eps, int64_t reduction); + Tensor scalar_tensor(Scalar s, const TensorOptions & options); + #ifdef BUILD_NAMEDTENSOR + Tensor rand(IntArrayRef size, c10::optional names, const TensorOptions & options); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor rand(IntArrayRef size, Generator * generator, c10::optional names, const TensorOptions & options); + #endif + Tensor rand(IntArrayRef size, const TensorOptions & options); + Tensor rand(IntArrayRef size, Generator * generator, const TensorOptions & options); + Tensor & rand_out(Tensor & out, IntArrayRef size); + Tensor & rand_out(Tensor & out, IntArrayRef size, Generator * generator); + Tensor rand_like(const Tensor & self, c10::optional memory_format); + Tensor rand_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format); + Tensor randint(int64_t high, IntArrayRef size, const TensorOptions & options); + Tensor randint(int64_t high, IntArrayRef size, Generator * generator, const TensorOptions & options); + Tensor randint(int64_t low, int64_t high, IntArrayRef size, const TensorOptions & options); + Tensor randint(int64_t low, int64_t high, IntArrayRef size, Generator * generator, const TensorOptions & options); + Tensor & randint_out(Tensor & out, int64_t high, IntArrayRef size); + Tensor & randint_out(Tensor & out, int64_t high, IntArrayRef size, Generator * generator); + Tensor & randint_out(Tensor & out, int64_t low, int64_t high, IntArrayRef size); + Tensor & randint_out(Tensor & out, int64_t low, int64_t high, IntArrayRef size, Generator * generator); + Tensor randint_like(const Tensor & self, int64_t high, c10::optional memory_format); + Tensor randint_like(const Tensor & self, int64_t low, int64_t high, c10::optional memory_format); + Tensor randint_like(const Tensor & self, int64_t high, const TensorOptions & options, c10::optional memory_format); + Tensor randint_like(const Tensor & self, int64_t low, int64_t high, const TensorOptions & options, c10::optional memory_format); + Tensor randn(IntArrayRef size, const TensorOptions & options); + Tensor randn(IntArrayRef size, Generator * generator, const TensorOptions & options); + #ifdef BUILD_NAMEDTENSOR + Tensor randn(IntArrayRef size, c10::optional names, const TensorOptions & options); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor randn(IntArrayRef size, Generator * generator, c10::optional names, const TensorOptions & options); + #endif + Tensor & randn_out(Tensor & out, IntArrayRef size); + Tensor & randn_out(Tensor & out, IntArrayRef size, Generator * generator); + Tensor randn_like(const Tensor & self, c10::optional memory_format); + Tensor randn_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format); + Tensor randperm(int64_t n, const TensorOptions & options); + Tensor randperm(int64_t n, Generator * generator, const TensorOptions & options); + Tensor & randperm_out(Tensor & out, int64_t n); + Tensor range(Scalar start, Scalar end, Scalar step, const TensorOptions & options); + Tensor range(Scalar start, Scalar end, const TensorOptions & options); + Tensor reciprocal(const Tensor & self); + Tensor neg(const Tensor & self); + Tensor & neg_(Tensor & self); + Tensor repeat(const Tensor & self, IntArrayRef repeats); + Tensor repeat_interleave(const Tensor & self, const Tensor & repeats, c10::optional dim); + Tensor repeat_interleave(const Tensor & self, int64_t repeats, c10::optional dim); + Tensor reshape(const Tensor & self, IntArrayRef shape); + Tensor reshape_as(const Tensor & self, const Tensor & other); + Tensor round(const Tensor & self); + Tensor & round_(Tensor & self); + Tensor rrelu(const Tensor & self, Scalar lower, Scalar upper, bool training, Generator * generator); + Tensor & rrelu_(Tensor & self, Scalar lower, Scalar upper, bool training, Generator * generator); + Tensor rsqrt(const Tensor & self); + Tensor & rsqrt_(Tensor & self); + #ifdef BUILD_NAMEDTENSOR + Tensor select(const Tensor & self, Dimname dim, int64_t index); + #endif + Tensor select(const Tensor & self, int64_t dim, int64_t index); + Tensor selu(const Tensor & self); + Tensor & selu_(Tensor & self); + Tensor celu(const Tensor & self, Scalar alpha); + Tensor & celu_(Tensor & self, Scalar alpha); + Tensor & sigmoid_out(Tensor & out, const Tensor & self); + Tensor sin(const Tensor & self); + Tensor & sin_(Tensor & self); + Tensor sinh(const Tensor & self); + Tensor & sinh_(Tensor & self); + Tensor & sinh_out(Tensor & out, const Tensor & self); + Tensor detach(const Tensor & self); + Tensor & detach_(Tensor & self); + int64_t size(const Tensor & self, int64_t dim); + #ifdef BUILD_NAMEDTENSOR + int64_t size(const Tensor & self, Dimname dim); + #endif + Tensor slice(const Tensor & self, int64_t dim, int64_t start, int64_t end, int64_t step); + std::tuple slogdet(const Tensor & self); + Tensor smm(const Tensor & self, const Tensor & mat2); + Tensor softmax(const Tensor & self, int64_t dim, c10::optional dtype); + #ifdef BUILD_NAMEDTENSOR + Tensor softmax(const Tensor & self, Dimname dim, c10::optional dtype); + #endif + std::vector split(const Tensor & self, int64_t split_size, int64_t dim); + std::vector split_with_sizes(const Tensor & self, IntArrayRef split_sizes, int64_t dim); + Tensor squeeze(const Tensor & self); + Tensor squeeze(const Tensor & self, int64_t dim); + #ifdef BUILD_NAMEDTENSOR + Tensor squeeze(const Tensor & self, Dimname dim); + #endif + Tensor & squeeze_(Tensor & self); + Tensor & squeeze_(Tensor & self, int64_t dim); + #ifdef BUILD_NAMEDTENSOR + Tensor & squeeze_(Tensor & self, Dimname dim); + #endif + Tensor sspaddmm(const Tensor & self, const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha); + Tensor stack(TensorList tensors, int64_t dim); + Tensor & stack_out(Tensor & out, TensorList tensors, int64_t dim); + Tensor stft(const Tensor & self, int64_t n_fft, c10::optional hop_length, c10::optional win_length, const Tensor & window, bool normalized, bool onesided); + int64_t stride(const Tensor & self, int64_t dim); + #ifdef BUILD_NAMEDTENSOR + int64_t stride(const Tensor & self, Dimname dim); + #endif + Tensor sum(const Tensor & self, c10::optional dtype); + Tensor sum(const Tensor & self, IntArrayRef dim, bool keepdim, c10::optional dtype); + #ifdef BUILD_NAMEDTENSOR + Tensor sum(const Tensor & self, DimnameList dim, bool keepdim, c10::optional dtype); + #endif + Tensor & sum_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim, c10::optional dtype); + #ifdef BUILD_NAMEDTENSOR + Tensor & sum_out(Tensor & out, const Tensor & self, DimnameList dim, bool keepdim, c10::optional dtype); + #endif + Tensor sum_to_size(const Tensor & self, IntArrayRef size); + Tensor sqrt(const Tensor & self); + Tensor & sqrt_(Tensor & self); + Tensor & sqrt_out(Tensor & out, const Tensor & self); + Tensor std(const Tensor & self, bool unbiased); + Tensor std(const Tensor & self, IntArrayRef dim, bool unbiased, bool keepdim); + std::tuple std_mean(const Tensor & self, bool unbiased); + std::tuple std_mean(const Tensor & self, IntArrayRef dim, bool unbiased, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + std::tuple std_mean(const Tensor & self, DimnameList dim, bool unbiased, bool keepdim); + #endif + Tensor & std_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool unbiased, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + Tensor std(const Tensor & self, DimnameList dim, bool unbiased, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & std_out(Tensor & out, const Tensor & self, DimnameList dim, bool unbiased, bool keepdim); + #endif + Tensor prod(const Tensor & self, c10::optional dtype); + Tensor prod(const Tensor & self, int64_t dim, bool keepdim, c10::optional dtype); + Tensor & prod_out(Tensor & out, const Tensor & self, int64_t dim, bool keepdim, c10::optional dtype); + #ifdef BUILD_NAMEDTENSOR + Tensor prod(const Tensor & self, Dimname dim, bool keepdim, c10::optional dtype); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & prod_out(Tensor & out, const Tensor & self, Dimname dim, bool keepdim, c10::optional dtype); + #endif + Tensor t(const Tensor & self); + Tensor & t_(Tensor & self); + Tensor tan(const Tensor & self); + Tensor tanh(const Tensor & self); + Tensor tensordot(const Tensor & self, const Tensor & other, IntArrayRef dims_self, IntArrayRef dims_other); + Tensor threshold(const Tensor & self, Scalar threshold, Scalar value); + Tensor & threshold_(Tensor & self, Scalar threshold, Scalar value); + Tensor & threshold_out(Tensor & out, const Tensor & self, Scalar threshold, Scalar value); + Tensor threshold_backward(const Tensor & grad_output, const Tensor & self, Scalar threshold); + Tensor transpose(const Tensor & self, int64_t dim0, int64_t dim1); + #ifdef BUILD_NAMEDTENSOR + Tensor transpose(const Tensor & self, Dimname dim0, Dimname dim1); + #endif + Tensor & transpose_(Tensor & self, int64_t dim0, int64_t dim1); + Tensor one_hot(const Tensor & self, int64_t num_classes); + Tensor rot90(const Tensor & self, int64_t k, IntArrayRef dims); + Tensor trapz(const Tensor & y, const Tensor & x, int64_t dim); + Tensor trapz(const Tensor & y, double dx, int64_t dim); + Tensor _trilinear(const Tensor & i1, const Tensor & i2, const Tensor & i3, IntArrayRef expand1, IntArrayRef expand2, IntArrayRef expand3, IntArrayRef sumdim, int64_t unroll_dim); + Tensor triplet_margin_loss(const Tensor & anchor, const Tensor & positive, const Tensor & negative, double margin, double p, double eps, bool swap, int64_t reduction); + Tensor trunc(const Tensor & self); + Tensor & trunc_(Tensor & self); + Tensor type_as(const Tensor & self, const Tensor & other); + bool _has_compatible_shallow_copy_type(const Tensor & self, const Tensor & from); + Tensor _unsafe_view(const Tensor & self, IntArrayRef size); + Tensor unsqueeze(const Tensor & self, int64_t dim); + Tensor & unsqueeze_(Tensor & self, int64_t dim); + Tensor var(const Tensor & self, bool unbiased); + Tensor var(const Tensor & self, IntArrayRef dim, bool unbiased, bool keepdim); + Tensor & var_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool unbiased, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + Tensor var(const Tensor & self, DimnameList dim, bool unbiased, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & var_out(Tensor & out, const Tensor & self, DimnameList dim, bool unbiased, bool keepdim); + #endif + std::tuple var_mean(const Tensor & self, bool unbiased); + std::tuple var_mean(const Tensor & self, IntArrayRef dim, bool unbiased, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + std::tuple var_mean(const Tensor & self, DimnameList dim, bool unbiased, bool keepdim); + #endif + Tensor view_as(const Tensor & self, const Tensor & other); + Tensor where(const Tensor & condition, const Tensor & self, const Tensor & other); + std::vector where(const Tensor & condition); + Tensor norm_except_dim(const Tensor & v, int64_t pow, int64_t dim); + Tensor _weight_norm(const Tensor & v, const Tensor & g, int64_t dim); + std::tuple _weight_norm_differentiable_backward(const Tensor & grad_w, const Tensor & saved_v, const Tensor & saved_g, const Tensor & saved_norms, int64_t dim); + #ifdef BUILD_NAMEDTENSOR + Tensor zeros(IntArrayRef size, c10::optional names, const TensorOptions & options); + #endif + Tensor zeros(IntArrayRef size, const TensorOptions & options); + Tensor & zeros_out(Tensor & out, IntArrayRef size); + Tensor zeros_like(const Tensor & self, c10::optional memory_format); + Tensor zeros_like(const Tensor & self, const TensorOptions & options, c10::optional memory_format); + Tensor _sparse_sum(const Tensor & self); + Tensor _sparse_sum(const Tensor & self, ScalarType dtype); + Tensor _sparse_sum(const Tensor & self, IntArrayRef dim); + Tensor _sparse_sum(const Tensor & self, IntArrayRef dim, ScalarType dtype); + Tensor norm(const Tensor & self, c10::optional p, ScalarType dtype); + Tensor norm(const Tensor & self, Scalar p); + Tensor norm(const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim, ScalarType dtype); + Tensor norm(const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim); + Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim, ScalarType dtype); + Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, IntArrayRef dim, bool keepdim); + #ifdef BUILD_NAMEDTENSOR + Tensor norm(const Tensor & self, c10::optional p, DimnameList dim, bool keepdim, ScalarType dtype); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor norm(const Tensor & self, c10::optional p, DimnameList dim, bool keepdim); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, DimnameList dim, bool keepdim, ScalarType dtype); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & norm_out(Tensor & out, const Tensor & self, c10::optional p, DimnameList dim, bool keepdim); + #endif + Tensor frobenius_norm(const Tensor & self); + Tensor frobenius_norm(const Tensor & self, IntArrayRef dim, bool keepdim); + Tensor & frobenius_norm_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim); + Tensor nuclear_norm(const Tensor & self, bool keepdim); + Tensor & nuclear_norm_out(Tensor & out, const Tensor & self, bool keepdim); + Tensor nuclear_norm(const Tensor & self, IntArrayRef dim, bool keepdim); + Tensor & nuclear_norm_out(Tensor & out, const Tensor & self, IntArrayRef dim, bool keepdim); + Tensor & resize_as_(Tensor & self, const Tensor & the_template, c10::optional memory_format); + Tensor sub(const Tensor & self, Scalar other, Scalar alpha); + Tensor & sub_(Tensor & self, Scalar other, Scalar alpha); + Tensor rsub(const Tensor & self, const Tensor & other, Scalar alpha); + Tensor rsub(const Tensor & self, Scalar other, Scalar alpha); + Tensor _sparse_addmm(const Tensor & self, const Tensor & sparse, const Tensor & dense, Scalar beta, Scalar alpha); + Tensor sparse_coo_tensor(IntArrayRef size, const TensorOptions & options); + Tensor sparse_coo_tensor(const Tensor & indices, const Tensor & values, const TensorOptions & options); + Tensor sparse_coo_tensor(const Tensor & indices, const Tensor & values, IntArrayRef size, const TensorOptions & options); + Tensor _sparse_coo_tensor_unsafe(const Tensor & indices, const Tensor & values, IntArrayRef size, const TensorOptions & options); + Tensor to_dense_backward(const Tensor & grad, const Tensor & input); + std::vector unbind(const Tensor & self, int64_t dim); + #ifdef BUILD_NAMEDTENSOR + std::vector unbind(const Tensor & self, Dimname dim); + #endif + Tensor to_mkldnn_backward(const Tensor & grad, const Tensor & input); + Tensor to(const Tensor & self, const TensorOptions & options, bool non_blocking, bool copy, c10::optional memory_format); + Tensor to(const Tensor & self, Device device, ScalarType dtype, bool non_blocking, bool copy, c10::optional memory_format); + Tensor to(const Tensor & self, ScalarType dtype, bool non_blocking, bool copy, c10::optional memory_format); + Tensor to(const Tensor & self, const Tensor & other, bool non_blocking, bool copy, c10::optional memory_format); + std::vector meshgrid(TensorList tensors); + Tensor cartesian_prod(TensorList tensors); + Tensor combinations(const Tensor & self, int64_t r, bool with_replacement); + Scalar item(const Tensor & self); + ScalarType result_type(const Tensor & tensor, const Tensor & other); + ScalarType result_type(const Tensor & tensor, Scalar other); + ScalarType result_type(Scalar scalar, const Tensor & tensor); + ScalarType result_type(Scalar scalar1, Scalar scalar2); + bool can_cast(ScalarType from, ScalarType to); + ScalarType promote_types(ScalarType type1, ScalarType type2); + std::tuple _thnn_differentiable_lstm_cell_backward(const Tensor & grad_hy, const Tensor & grad_cy, const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & input_bias, const Tensor & hidden_bias, const Tensor & cx, const Tensor & cy); + std::tuple _thnn_differentiable_gru_cell_backward(const Tensor & grad_hy, const Tensor & input_gates, const Tensor & hidden_gates, const Tensor & hx, const Tensor & input_bias, const Tensor & hidden_bias); + std::tuple lstm(const Tensor & input, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); + std::tuple lstm(const Tensor & data, const Tensor & batch_sizes, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); + std::tuple gru(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); + std::tuple gru(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); + std::tuple rnn_tanh(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); + std::tuple rnn_tanh(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); + std::tuple rnn_relu(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); + std::tuple rnn_relu(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); + std::tuple lstm_cell(const Tensor & input, TensorList hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh); + Tensor gru_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh); + Tensor rnn_tanh_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh); + Tensor rnn_relu_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh); + std::tuple quantized_lstm(const Tensor & input, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first, c10::optional dtype, bool use_dynamic); + std::tuple quantized_lstm(const Tensor & data, const Tensor & batch_sizes, TensorList hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, c10::optional dtype, bool use_dynamic); + std::tuple quantized_gru(const Tensor & input, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional, bool batch_first); + std::tuple quantized_gru(const Tensor & data, const Tensor & batch_sizes, const Tensor & hx, TensorList params, bool has_biases, int64_t num_layers, double dropout, bool train, bool bidirectional); + std::tuple quantized_lstm_cell(const Tensor & input, TensorList hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh); + Tensor quantized_gru_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh); + Tensor quantized_rnn_relu_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh); + Tensor quantized_rnn_tanh_cell(const Tensor & input, const Tensor & hx, const Tensor & w_ih, const Tensor & w_hh, const Tensor & b_ih, const Tensor & b_hh, const Tensor & packed_ih, const Tensor & packed_hh, const Tensor & col_offsets_ih, const Tensor & col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh); + std::tuple _pack_padded_sequence(const Tensor & input, const Tensor & lengths, bool batch_first); + Tensor _pack_padded_sequence_backward(const Tensor & grad, IntArrayRef input_size, const Tensor & batch_sizes, bool batch_first); + std::tuple _pad_packed_sequence(const Tensor & data, const Tensor & batch_sizes, bool batch_first, Scalar padding_value, int64_t total_length); + Tensor masked_fill(const Tensor & self, const Tensor & mask, Scalar value); + Tensor masked_fill(const Tensor & self, const Tensor & mask, const Tensor & value); + Tensor masked_scatter(const Tensor & self, const Tensor & mask, const Tensor & source); + Tensor index_add(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & source); + #ifdef BUILD_NAMEDTENSOR + Tensor index_add(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & source); + #endif + Tensor index_fill(const Tensor & self, int64_t dim, const Tensor & index, Scalar value); + Tensor index_fill(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & value); + #ifdef BUILD_NAMEDTENSOR + Tensor & index_fill_(Tensor & self, Dimname dim, const Tensor & index, Scalar value); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & index_fill_(Tensor & self, Dimname dim, const Tensor & index, const Tensor & value); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor index_fill(const Tensor & self, Dimname dim, const Tensor & index, Scalar value); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor index_fill(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & value); + #endif + Tensor scatter(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); + Tensor scatter(const Tensor & self, int64_t dim, const Tensor & index, Scalar value); + #ifdef BUILD_NAMEDTENSOR + Tensor scatter(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & src); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor scatter(const Tensor & self, Dimname dim, const Tensor & index, Scalar value); + #endif + Tensor scatter_add(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & src); + #ifdef BUILD_NAMEDTENSOR + Tensor scatter_add(const Tensor & self, Dimname dim, const Tensor & index, const Tensor & src); + #endif + Tensor & lt_(Tensor & self, Scalar other); + Tensor & lt_(Tensor & self, const Tensor & other); + Tensor & gt_(Tensor & self, Scalar other); + Tensor & gt_(Tensor & self, const Tensor & other); + Tensor & le_(Tensor & self, Scalar other); + Tensor & le_(Tensor & self, const Tensor & other); + Tensor & ge_(Tensor & self, Scalar other); + Tensor & ge_(Tensor & self, const Tensor & other); + Tensor & eq_(Tensor & self, Scalar other); + Tensor & eq_(Tensor & self, const Tensor & other); + Tensor & ne_(Tensor & self, Scalar other); + Tensor & ne_(Tensor & self, const Tensor & other); + Tensor bitwise_xor(const Tensor & self, Scalar other); + Tensor bitwise_xor(const Tensor & self, const Tensor & other); + Tensor & bitwise_xor_(Tensor & self, Scalar other); + Tensor & bitwise_xor_(Tensor & self, const Tensor & other); + Tensor __xor__(const Tensor & self, Scalar other); + Tensor __xor__(const Tensor & self, const Tensor & other); + Tensor & __ixor__(Tensor & self, Scalar other); + Tensor & __ixor__(Tensor & self, const Tensor & other); + Tensor & atan2_(Tensor & self, const Tensor & other); + Tensor & digamma_(Tensor & self); + Tensor & polygamma_(Tensor & self, int64_t n); + Tensor & addcdiv_(Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value); + Tensor & cross_out(Tensor & out, const Tensor & self, const Tensor & other, c10::optional dim); + Tensor cross(const Tensor & self, const Tensor & other, c10::optional dim); + Tensor triu(const Tensor & self, int64_t diagonal); + Tensor tril(const Tensor & self, int64_t diagonal); + #ifdef BUILD_NAMEDTENSOR + Tensor & index_select_out(Tensor & out, const Tensor & self, Dimname dim, const Tensor & index); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor index_select(const Tensor & self, Dimname dim, const Tensor & index); + #endif + std::vector nonzero_numpy(const Tensor & self); + #ifdef BUILD_NAMEDTENSOR + Tensor & gather_out(Tensor & out, const Tensor & self, Dimname dim, const Tensor & index, bool sparse_grad); + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor gather(const Tensor & self, Dimname dim, const Tensor & index, bool sparse_grad); + #endif + Tensor _gather_sparse_backward(const Tensor & self, int64_t dim, const Tensor & index, const Tensor & grad); + Tensor & addcmul_out(Tensor & out, const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value); + Tensor addcmul(const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value); + Tensor & addcmul_(Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value); + Tensor & addcdiv_out(Tensor & out, const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value); + Tensor addcdiv(const Tensor & self, const Tensor & tensor1, const Tensor & tensor2, Scalar value); + std::tuple triangular_solve_out(Tensor & X, Tensor & M, const Tensor & self, const Tensor & A, bool upper, bool transpose, bool unitriangular); + std::tuple triangular_solve(const Tensor & self, const Tensor & A, bool upper, bool transpose, bool unitriangular); + std::tuple symeig_out(Tensor & e, Tensor & V, const Tensor & self, bool eigenvectors, bool upper); + std::tuple symeig(const Tensor & self, bool eigenvectors, bool upper); + std::tuple svd_out(Tensor & U, Tensor & S, Tensor & V, const Tensor & self, bool some, bool compute_uv); + std::tuple svd(const Tensor & self, bool some, bool compute_uv); + Tensor & cholesky_out(Tensor & out, const Tensor & self, bool upper); + Tensor cholesky(const Tensor & self, bool upper); + Tensor & cholesky_solve_out(Tensor & out, const Tensor & self, const Tensor & input2, bool upper); + Tensor cholesky_solve(const Tensor & self, const Tensor & input2, bool upper); + std::tuple solve(const Tensor & self, const Tensor & A); + std::tuple solve_out(Tensor & solution, Tensor & lu, const Tensor & self, const Tensor & A); + std::tuple qr_out(Tensor & Q, Tensor & R, const Tensor & self, bool some); + std::tuple qr(const Tensor & self, bool some); + Tensor & lu_solve_out(Tensor & out, const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots); + Tensor lu_solve(const Tensor & self, const Tensor & LU_data, const Tensor & LU_pivots); + Tensor & digamma_out(Tensor & out, const Tensor & self); + Tensor digamma(const Tensor & self); + Tensor & polygamma_out(Tensor & out, int64_t n, const Tensor & self); + Tensor polygamma(int64_t n, const Tensor & self); + Tensor sign(const Tensor & self); + Tensor & sign_(Tensor & self); + Tensor & atan2_out(Tensor & out, const Tensor & self, const Tensor & other); + Tensor atan2(const Tensor & self, const Tensor & other); + #ifdef BUILD_NAMEDTENSOR + std::tuple sort_out(Tensor & values, Tensor & indices, const Tensor & self, Dimname dim, bool descending); + #endif + #ifdef BUILD_NAMEDTENSOR + std::tuple sort(const Tensor & self, Dimname dim, bool descending); + #endif + Tensor argsort(const Tensor & self, int64_t dim, bool descending); + #ifdef BUILD_NAMEDTENSOR + Tensor argsort(const Tensor & self, Dimname dim, bool descending); + #endif + Tensor all(const Tensor & self); + Tensor any(const Tensor & self); + Tensor normal(double mean, double std, IntArrayRef size, Generator * generator, const TensorOptions & options); + Tensor & normal_out(Tensor & out, double mean, double std, IntArrayRef size, Generator * generator); + Tensor alias(const Tensor & self); + Tensor & mse_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor mse_loss(const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & l1_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor l1_loss(const Tensor & self, const Tensor & target, int64_t reduction); + Tensor l1_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & multilabel_margin_loss_out(Tensor & out, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor multilabel_margin_loss(const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & nll_loss_out(Tensor & out, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); + Tensor nll_loss(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); + Tensor & nll_loss2d_out(Tensor & out, const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); + Tensor nll_loss2d(const Tensor & self, const Tensor & target, const Tensor & weight, int64_t reduction, int64_t ignore_index); + Tensor smooth_l1_loss(const Tensor & self, const Tensor & target, int64_t reduction); + Tensor smooth_l1_loss_backward(const Tensor & grad_output, const Tensor & self, const Tensor & target, int64_t reduction); + Tensor & log_sigmoid_out(Tensor & out, const Tensor & self); + Tensor log_sigmoid(const Tensor & self); + Tensor adaptive_avg_pool2d(const Tensor & self, IntArrayRef output_size); + Tensor _test_optional_float(const Tensor & self, c10::optional scale); + Tensor sigmoid_backward(const Tensor & grad_output, const Tensor & output); + Tensor & thnn_conv2d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); + Tensor thnn_conv2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); + Tensor & thnn_conv_depthwise2d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); + Tensor thnn_conv_depthwise2d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding, IntArrayRef dilation); + Tensor & slow_conv3d_out(Tensor & out, const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); + Tensor slow_conv3d(const Tensor & self, const Tensor & weight, IntArrayRef kernel_size, const Tensor & bias, IntArrayRef stride, IntArrayRef padding); + Tensor isfinite(const Tensor & self); +} // namespace TypeDefault + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/Utils.h b/thirdparty/libtorch/include/ATen/Utils.h new file mode 100644 index 0000000000..d7cf4cdee3 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Utils.h @@ -0,0 +1,164 @@ +#pragma once + +#include +#include +#include +#include + +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#if defined(__clang__) +#define __ubsan_ignore_float_divide_by_zero__ __attribute__((no_sanitize("float-divide-by-zero"))) +#define __ubsan_ignore_vptr__ __attribute__((no_sanitize("vptr"))) +#else +#define __ubsan_ignore_float_divide_by_zero__ +#define __ubsan_ignore_vptr__ +#endif + +#define AT_DISALLOW_COPY_AND_ASSIGN(TypeName) \ + TypeName(const TypeName&) = delete; \ + void operator=(const TypeName&) = delete + +namespace at { + +CAFFE2_API int _crash_if_asan(int); + +static inline const Storage& checked_storage( + const Storage& expr, + const char* name, + int pos, + DeviceType device_type, + caffe2::TypeMeta dtype) { + if (expr.device_type() != device_type) { + AT_ERROR( + "Expected object of device type ", + device_type, + " but got device type ", + expr.data_ptr().device().type(), + " for argument #", + pos, + " '", + name, + "'"); + } + if (expr.dtype() != dtype) { + AT_ERROR( + "Expected object of data type ", + dtype, + " but got data type ", + expr.dtype().id(), + " for argument #", + pos, + " '", + name, + "'"); + } + return expr; +} + +// TODO: This unwrapping code is ONLY used for TH bindings; once TH goes +// away, we can delete this function +static inline TensorImpl* checked_dense_tensor_unwrap(const Tensor& expr, const char * name, int pos, const char * api, bool allowNull, DeviceType device_type, ScalarType scalar_type) { + if(allowNull && !expr.defined()) { + return nullptr; + } + if (expr.layout() != Layout::Strided) { + AT_ERROR("Expected dense tensor but got ", expr.layout(), + " for argument #", pos, " '", name, "' in call to ", api); + } + if (expr.device().type() != device_type) { + AT_ERROR("Expected object of device type ", device_type, " but got device type ", expr.device().type(), + " for argument #", pos, " '", name, "' in call to ", api); + } + if (expr.scalar_type() != scalar_type) { + AT_ERROR("Expected object of scalar type ", scalar_type, " but got scalar type ", expr.scalar_type(), + " for argument #", pos, " '", name, "' in call to ", api); + } + return expr.unsafeGetTensorImpl(); +} + +// Converts a TensorList (i.e. ArrayRef to vector of TensorImpl*) +static inline std::vector checked_tensor_list_unwrap(ArrayRef tensors, const char * name, int pos, Backend backend, ScalarType scalar_type) { + std::vector unwrapped; + unwrapped.reserve(tensors.size()); + for (unsigned int i = 0; i < tensors.size(); ++i) { + const auto& expr = tensors[i]; + if (tensorTypeIdToBackend(c10::impl::dispatchTypeId(expr.type_set())) != backend) { + AT_ERROR("Expected object of backend ", backend, " but got backend ", tensorTypeIdToBackend(c10::impl::dispatchTypeId(expr.type_set())), + " for sequence element ", i, " in sequence argument at position #", pos, " '", name, "'"); + } + if (expr.scalar_type() != scalar_type) { + AT_ERROR("Expected object of scalar type ", scalar_type, " but got scalar type ", expr.scalar_type(), + " for sequence element ", i , " in sequence argument at position #", pos, " '", name, "'"); + } + unwrapped.emplace_back(expr.unsafeGetTensorImpl()); + } + return unwrapped; +} + +template +std::array check_intlist(ArrayRef list, const char * name, int pos) { + if (list.empty()) { + // TODO: is this necessary? We used to treat nullptr-vs-not in IntList differently + // with strides as a way of faking optional. + list = {}; + } + auto res = std::array(); + if (list.size() == 1 && N > 1) { + res.fill(list[0]); + return res; + } + if (list.size() != N) { + AT_ERROR("Expected a list of ", N, " ints but got ", list.size(), " for argument #", pos, " '", name, "'"); + } + std::copy_n(list.begin(), N, res.begin()); + return res; +} + +inline int64_t sum_intlist(ArrayRef list) { + return std::accumulate(list.begin(), list.end(), 0ll); +} + +inline int64_t prod_intlist(ArrayRef list) { + return std::accumulate(list.begin(), list.end(), 1ll, std::multiplies()); +} + +/** + * Utility function used in tensor implementations, which + * supplies the default generator to tensors, if an input generator + * is not supplied. The input Generator* is also static casted to + * the backend generator type (CPU/CUDAGenerator etc.) + */ +template +static inline T * get_generator_or_default(Generator * expr, Generator * defaultValue) { + if (!expr) { + expr = defaultValue; + } + if (T::device_type() == expr->device().type()) { + return static_cast(expr); + } + AT_ERROR("Expected a '", T::device_type(), "' device type for generator but found '", expr->device().type(), "'"); +} + +/** + * Utility function to static cast input Generator* to + * the backend generator type (CPU/CUDAGenerator etc.) + */ +template +static inline T * check_generator(Generator * expr) { + if (T::device_type() == expr->device().type()) { + return static_cast(expr); + } + AT_ERROR("Expected a '", T::device_type(), "' device type for generator but found '", expr->device().type(), "'"); +} + +} // at diff --git a/thirdparty/libtorch/include/ATen/Version.h b/thirdparty/libtorch/include/ATen/Version.h new file mode 100644 index 0000000000..18fd31d3ed --- /dev/null +++ b/thirdparty/libtorch/include/ATen/Version.h @@ -0,0 +1,14 @@ +#include + +namespace at { + +/// Returns a detailed string describing the configuration PyTorch. +CAFFE2_API std::string show_config(); + +CAFFE2_API std::string get_mkl_version(); + +CAFFE2_API std::string get_mkldnn_version(); + +CAFFE2_API std::string get_openmp_version(); + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/WrapDimUtils.h b/thirdparty/libtorch/include/ATen/WrapDimUtils.h new file mode 100644 index 0000000000..1cdbe150c1 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/WrapDimUtils.h @@ -0,0 +1,82 @@ +#pragma once + +#include +#include +#include + +namespace at { + +static inline int64_t maybe_wrap_dim(int64_t dim, int64_t dim_post_expr, bool wrap_scalar=true) { + return c10::maybe_wrap_dim(dim, dim_post_expr, wrap_scalar); +} + +static inline int64_t maybe_wrap_dim(int64_t dim, TensorImpl *tensor) { + return maybe_wrap_dim(dim, tensor->dim()); +} + +static inline int64_t maybe_wrap_dim(int64_t dim, TensorList tensors) { + if (tensors.size() == 0) { + // can't wrap empty TensorList; rely on underlying implementation to throw error if necessary. + return dim; + } + return maybe_wrap_dim(dim, tensors[0].dim()); +} + +static inline int64_t maybe_wrap_dim(int64_t dim, const std::vector> & tensor_sizes) { + if (tensor_sizes.size() == 0) { + // can't wrap empty list; rely on underlying implementation to throw error if necessary + return dim; + } + return maybe_wrap_dim(dim, tensor_sizes[0].size()); +} + +// wrap each of dims basing on dim_post_expr +static inline void maybe_wrap_dims(std::vector& dims, int64_t dim_post_expr) { + if (dim_post_expr <= 0) { + dim_post_expr = 1; // this will make range [-1, 0] + } + int64_t min = -dim_post_expr; + int64_t max = dim_post_expr - 1; + for (auto& dim : dims) { + if (dim < min || dim > max) { + AT_INDEX_ERROR( + "Dimension out of range (expected to be in range of [", + min, ", ", max, "], but got ", dim, ")"); + } + if (dim < 0) dim += dim_post_expr; + } +} + +// previously, size [0] tensors were the only possible empty tensors; thus, it wasn't possible +// to cat empty tensors unless all the other tensors were 1-dimensional, so we allowed these tensors +// to be "skipped" (both for wrap dimension behavior and dimension size checking). +// We maintain this behavior for backwards compatibility, but only for this specific size +// (i.e. other empty sizes are not skipped). +static inline int64_t legacy_cat_wrap_dim(int64_t dim, const std::vector>& tensor_sizes) { + for (auto& sizes : tensor_sizes) { + if (sizes == std::vector({0})) { + continue; + } + return maybe_wrap_dim(dim, sizes.size()); + } + return dim; +} + +static inline int64_t legacy_cat_wrap_dim(int64_t dim, TensorList tensors) { + for (auto& tensor : tensors) { + if (tensor.dim() == 1 && tensor.sizes()[0] == 0) { + continue; + } + return maybe_wrap_dim(dim, tensor.dim()); + } + return dim; +} + +// wrap negative dims in a vector +static inline void wrap_all_dims(std::vector& dims_to_wrap, int64_t tensor_total_dims) { + for (size_t i = 0; i < dims_to_wrap.size(); i++) { + dims_to_wrap[i] = maybe_wrap_dim(dims_to_wrap[i], tensor_total_dims); + } +} + +} diff --git a/thirdparty/libtorch/include/ATen/WrapDimUtilsMulti.h b/thirdparty/libtorch/include/ATen/WrapDimUtilsMulti.h new file mode 100644 index 0000000000..a2af1b0dcd --- /dev/null +++ b/thirdparty/libtorch/include/ATen/WrapDimUtilsMulti.h @@ -0,0 +1,26 @@ +#pragma once + +#include +#include +#include +#include + +namespace at { + +// This is in an extra file to work around strange interaction of +// bitset on Windows with operator overloading + +constexpr size_t dim_bitset_size = 64; + +static inline std::bitset dim_list_to_bitset(IntArrayRef dims, int64_t ndims) { + TORCH_CHECK(ndims <= (int64_t) dim_bitset_size, "only tensors with up to ", dim_bitset_size, " dims are supported"); + std::bitset seen; + for (size_t i = 0; i < dims.size(); i++) { + size_t dim = maybe_wrap_dim(dims[i], ndims); + TORCH_CHECK(!seen[dim], "dim ", dim, " appears multiple times in the list of dims"); + seen[dim] = true; + } + return seen; +} + +} diff --git a/thirdparty/libtorch/include/ATen/core/ATenGeneral.h b/thirdparty/libtorch/include/ATen/core/ATenGeneral.h new file mode 100644 index 0000000000..9b787a2163 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/ATenGeneral.h @@ -0,0 +1,3 @@ +#pragma once + +#include diff --git a/thirdparty/libtorch/include/ATen/core/Array.h b/thirdparty/libtorch/include/ATen/core/Array.h new file mode 100644 index 0000000000..043d132cdf --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Array.h @@ -0,0 +1,38 @@ +#pragma once + +// A fixed-size array type usable from both host and +// device code. + +#include + +namespace at { namespace detail { + +template +struct alignas(16) Array { + T data[size]; + + C10_HOST_DEVICE T operator[](int i) const { + return data[i]; + } + C10_HOST_DEVICE T& operator[](int i) { + return data[i]; + } +#ifdef __HIP_PLATFORM_HCC__ + C10_HOST_DEVICE Array() = default; + C10_HOST_DEVICE Array(const Array&) = default; + C10_HOST_DEVICE Array& operator=(const Array&) = default; +#else + Array() = default; + Array(const Array&) = default; + Array& operator=(const Array&) = default; +#endif + + // Fill the array with x. + C10_HOST_DEVICE Array(T x) { + for (int i = 0; i < size; i++) { + data[i] = x; + } + } +}; + +}} diff --git a/thirdparty/libtorch/include/ATen/core/Backtrace.h b/thirdparty/libtorch/include/ATen/core/Backtrace.h new file mode 100644 index 0000000000..ac72896875 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Backtrace.h @@ -0,0 +1,2 @@ +#include +#include diff --git a/thirdparty/libtorch/include/ATen/core/DeprecatedTypeProperties.h b/thirdparty/libtorch/include/ATen/core/DeprecatedTypeProperties.h new file mode 100644 index 0000000000..719cd9a186 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/DeprecatedTypeProperties.h @@ -0,0 +1,131 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include + + +namespace at { + +class Tensor; + +// This class specifies a Backend and a ScalarType. Currently, it primarily +// serves as a replacement return value for Tensor::type(). Previously, +// Tensor::type() returned Type&, but we are changing Type to not be +// dtype-specific. +class CAFFE2_API DeprecatedTypeProperties { + public: + DeprecatedTypeProperties(Backend backend, ScalarType scalar_type) + : backend_(backend), scalar_type_(scalar_type) {} + + Backend backend() const { + return backend_; + } + + Layout layout() const { + return layout_from_backend(backend_); + } + + bool is_sparse() const { + return layout_from_backend(backend()) == kSparse; + } + + DeviceType device_type() const { + return backendToDeviceType(backend_); + } + + bool is_cuda() const { + return backendToDeviceType(backend_) == kCUDA; + } + + ScalarType scalarType() const { + return scalar_type_; + } + + caffe2::TypeMeta typeMeta() const { + return scalarTypeToTypeMeta(scalar_type_); + } + + bool operator==(const DeprecatedTypeProperties& other) const { + return backend_ == other.backend() && scalar_type_ == other.scalarType(); + } + + bool operator!=(const DeprecatedTypeProperties& other) const { + return !(*this == other); + } + + std::string toString() const { + std::string base_str; + if (backend_ == Backend::Undefined || scalar_type_ == ScalarType::Undefined) { + base_str = "UndefinedType"; + } else { + base_str = std::string(at::toString(backend_)) + at::toString(scalar_type_) + "Type"; + } + return base_str; + } + + DeprecatedTypeProperties & toBackend(Backend b) const { + return globalDeprecatedTypePropertiesRegistry().getDeprecatedTypeProperties( + b, scalar_type_); + } + + DeprecatedTypeProperties & toScalarType(ScalarType s) const { + return globalDeprecatedTypePropertiesRegistry().getDeprecatedTypeProperties( + backend_, s); + } + + DeprecatedTypeProperties & cpu() const { + return toBackend(Backend::CPU); + } + + DeprecatedTypeProperties & cuda() const { + return toBackend(Backend::CUDA); + } + + DeprecatedTypeProperties & hip() const { + return toBackend(Backend::HIP); + } + + /// Constructs the `TensorOptions` from a type and a `device_index`. + TensorOptions options(int16_t device_index = -1) const { + return TensorOptions().dtype(typeMeta()) + .device(device_type(), device_index) + .layout(layout()); + } + + /// Constructs the `TensorOptions` from a type and a Device. Asserts that + /// the device type matches the device type of the type. + TensorOptions options(c10::optional device_opt) const { + if (!device_opt.has_value()) { + return options(-1); + } else { + Device device = device_opt.value(); + AT_ASSERT(device.type() == device_type()); + return options(device.index()); + } + } + + operator TensorOptions() const { + return options(); + } + + int64_t id() const { + return static_cast(backend()) * + static_cast(ScalarType::NumOptions) + + static_cast(scalarType()); + } + + Tensor unsafeTensorFromTH(void * th_pointer, bool retain) const; + Storage unsafeStorageFromTH(void * th_pointer, bool retain) const; + Tensor copy(const Tensor & src, bool non_blocking=false, c10::optional to_device={}) const; + + private: + Backend backend_; + ScalarType scalar_type_; +}; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/core/DeprecatedTypePropertiesRegistry.h b/thirdparty/libtorch/include/ATen/core/DeprecatedTypePropertiesRegistry.h new file mode 100644 index 0000000000..d9b29a35b3 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/DeprecatedTypePropertiesRegistry.h @@ -0,0 +1,31 @@ +#pragma once + +// In order to preserve bc, we make DeprecatedTypeProperties instances unique +// just like they are for Type. + +#include +#include + +namespace at { + +class DeprecatedTypeProperties; + +struct CAFFE2_API DeprecatedTypePropertiesDeleter { + void operator()(DeprecatedTypeProperties * ptr); +}; + +class CAFFE2_API DeprecatedTypePropertiesRegistry { + public: + DeprecatedTypePropertiesRegistry(); + + DeprecatedTypeProperties& getDeprecatedTypeProperties(Backend p, ScalarType s) const; + +private: + std::unique_ptr registry + [static_cast(Backend::NumOptions)] + [static_cast(ScalarType::NumOptions)]; +}; + +CAFFE2_API DeprecatedTypePropertiesRegistry& globalDeprecatedTypePropertiesRegistry(); + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/core/Dict.h b/thirdparty/libtorch/include/ATen/core/Dict.h new file mode 100644 index 0000000000..304afbdfde --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Dict.h @@ -0,0 +1,373 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +namespace c10 { +struct IValue; +template class Dict; +struct Type; +using TypePtr = std::shared_ptr; + +namespace impl { +bool shallowEquals(const IValue& lhs, const IValue& rhs); + +using valid_dict_key_types = guts::typelist::typelist< + int64_t, + std::string, + double, + bool, + at::Tensor +>; +} + +namespace detail { + +struct DictKeyHash { + size_t operator()(const IValue& ivalue) const; +}; + +struct DictKeyEqualTo { + bool operator()(const IValue& lhs, const IValue& rhs) const { + return impl::shallowEquals(lhs, rhs); + } +}; + +struct DictImpl final : public c10::intrusive_ptr_target { + using dict_map_type = ska_ordered::order_preserving_flat_hash_map; + struct DictElementTypes final { + TypePtr keyType; + TypePtr valueType; + }; + + explicit DictImpl(dict_map_type dict_, DictElementTypes elementTypes_) + : dict(std::move(dict_)) + , elementTypes(std::move(elementTypes_)) {} + dict_map_type dict; + + DictElementTypes elementTypes; + + intrusive_ptr copy() const; +}; + +} + +namespace impl { +template class DictIterator; + +/** + * A reference to an entry in the Dict. + * Use the `key()` and `value()` methods to read the element. + */ +template +class DictEntryRef final { +public: + explicit DictEntryRef(Iterator iterator) + : iterator_(std::move(iterator)) {} + + Key key() const { + return iterator_->first.template to(); + } + + Value value() const { + return iterator_->second.template to(); + } + + template + void setValue(Value_&& value) const { + static_assert(std::is_constructible::value, "Wrong type for the value argument of setValue()"); + iterator_->second = Value(std::forward(value)); + } + +private: + // allow copying and moving, but only our friends (i.e. the Dict class) can do + // it. Copying/moving this reference wrapper would be too ambiguous to allow it + // in the public API. + DictEntryRef(const DictEntryRef&) = default; + DictEntryRef& operator=(const DictEntryRef&) = default; + DictEntryRef(DictEntryRef&&) noexcept = default; + DictEntryRef& operator=(DictEntryRef&& rhs) & noexcept = default; + + Iterator iterator_; + friend class DictIterator; + friend class Dict; +}; + +// this wraps map_type::iterator to make sure user code can't rely +// on it being the type of the underlying map. +template +class DictIterator final : public std::iterator> { +public: + explicit DictIterator() = default; + ~DictIterator() = default; + + DictIterator(const DictIterator& rhs): entryRef_(rhs.entryRef_) {} + DictIterator(DictIterator&& rhs) noexcept: entryRef_(std::move(rhs.entryRef_)) {} + DictIterator& operator=(const DictIterator& rhs) { + entryRef_ = rhs.entryRef_; + return *this; + } + DictIterator& operator=(DictIterator&& rhs) noexcept { + entryRef_ = std::move(rhs.entryRef_); + return *this; + } + + DictIterator& operator++() { + ++entryRef_.iterator_; + return *this; + } + + DictIterator operator++(int) { + DictIterator copy(*this); + ++*this; + return copy; + } + + const DictEntryRef& operator*() const { + return entryRef_; + } + + const DictEntryRef* operator->() const { + return &entryRef_; + } + + friend typename std::iterator>::difference_type operator-(const DictIterator& lhs, const DictIterator& rhs) { + return lhs.entryRef_.iterator_ - rhs.entryRef_.iterator_; + } + +private: + explicit DictIterator(Iterator iterator): entryRef_(std::move(iterator)) {} + + const Iterator& get_iterator_() const { + return entryRef_.iterator_; + } + + friend bool operator==(const DictIterator& lhs, const DictIterator& rhs) { + return lhs.get_iterator_() == rhs.get_iterator_(); + } + + friend bool operator!=(const DictIterator& lhs, const DictIterator& rhs) { + return lhs.get_iterator_() != rhs.get_iterator_(); + } + + friend bool operator<(const DictIterator& lhs, const DictIterator& rhs) { + return lhs.get_iterator_() < rhs.get_iterator_(); + } + + friend bool operator<=(const DictIterator& lhs, const DictIterator& rhs) { + return lhs.get_iterator_() <= rhs.get_iterator_(); + } + + friend bool operator>(const DictIterator& lhs, const DictIterator& rhs) { + return lhs.get_iterator_() > rhs.get_iterator_(); + } + + friend bool operator>=(const DictIterator& lhs, const DictIterator& rhs) { + return lhs.get_iterator_() >= rhs.get_iterator_(); + } + + DictEntryRef entryRef_; + + friend class DictIterator; + friend class Dict; +}; + +template Dict toTypedDict(Dict dict); +template Dict toGenericDict(Dict dict); +} + +/** + * An object of this class stores a map from Key to Value. + * + * This is a pointer type. After a copy, both Dicts + * will share the same storage: + * + * > Dict a; + * > Dict b = a; + * > b.insert(3, "three"); + * > ASSERT("three" == a.at(3)); + * + * We use this class in the PyTorch kernel API because that + * allows us to do optimizations and switch out the underlying + * map implementation without breaking backwards compatibility + * for the kernel API. + */ +template +class Dict final { +private: + static_assert((std::is_same::value && std::is_same::value) || guts::typelist::contains::value, "Invalid Key type for Dict. We only support int64_t, double, bool, and string."); + + // impl_ stores the underlying map as a ska_ordered::order_preserving_flat_hash_map. + // We intentionally don't offer conversion from/to + // order_preserving_flat_hash_map, return references to it or something like that, + // because such operations would get expensive if we switch out + // the actual map implementation. + // This is an intrusive_ptr because Dict is a pointer type. + // Invariant: This will never be a nullptr, there will always be a valid + // DictImpl. + c10::intrusive_ptr impl_; + + explicit Dict(c10::intrusive_ptr&& impl); + friend struct IValue; + template friend Dict impl::toTypedDict(Dict); + template friend Dict impl::toGenericDict(Dict); + +public: + using key_type = Key; + using mapped_type = Value; + using size_type = typename detail::DictImpl::dict_map_type::size_type; + using iterator = impl::DictIterator; + + /** + * Creates an empty dict. + */ + explicit Dict(); + + /** + * Create a generic dict with runtime type information. + * This only works for c10::impl::GenericDict and is not part of the public API + * but only supposed to be used internally by PyTorch. + */ + explicit Dict(TypePtr keyType, TypePtr valueType); + + ~Dict() = default; + + Dict(const Dict&) = default; + Dict& operator=(const Dict&) = default; + Dict(Dict&&) noexcept; + Dict& operator=(Dict&&) noexcept; + + /** + * Create a new Dict pointing to a deep copy of the same data. + * The Dict returned is a new dict with separate storage. + * Changes in it are not reflected in the original dict or vice versa. + */ + Dict copy() const; + + /** + * Returns an iterator to the first element of the container. + * If the container is empty, the returned iterator will be equal to end(). + */ + iterator begin() const; + + /** + * Returns an iterator to the element following the last element of the container. + * This element acts as a placeholder; attempting to access it results in undefined behavior. + */ + iterator end() const; + + /** + * Checks if the container has no elements. + */ + bool empty() const; + + /** + * Returns the number of elements in the container. + */ + size_type size() const; + + /** + * Erases all elements from the container. After this call, size() returns zero. + * Invalidates any references, pointers, or iterators referring to contained elements. May also invalidate past-the-end iterators. + */ + void clear() const; + + /** + * Inserts element(s) into the container, if the container doesn't already contain an element with an equivalent key. + * May invalidate any references, pointers, or iterators referring to contained elements. + * + * @return A pair consisting of an iterator to the inserted element (or to the element that prevented the insertion) and a bool denoting whether the insertion took place. + */ + template + std::pair insert(Key_&& key, Value_&& value) const; + + /** + * If an element with the given key already exists, it is overwritten with the given value. + * Otherwise, a new element with the given key and value are inserted. + * May invalidate any references, pointers, or iterators referring to contained elements. + * + * @return The bool component is true if the insertion took place and false if the assignment took place. The iterator component is pointing at the element that was inserted or updated. + */ + template + std::pair insert_or_assign(Key_&& key, Value_&& value) const; + + /** + * Removes the element pointed to by iter. + * May invalidate any references, pointers, or iterators referring to contained elements. + * The iterator iter must be valid and dereferenceable. Thus the end() iterator (which is valid, but is not dereferenceable) cannot be used as a value for iter. + */ + void erase(iterator iter) const; + + /** + * Removes the element with the given key, if it exists. + * May invalidate any references, pointers, or iterators referring to contained elements. + * + * @return The number of elements removed. This is either '1' if an element with the key existed, or '0' if it didn't. + */ + C10_NODISCARD size_t erase(const Key& key) const; + + /** + * Returns the mapped value of the element with key equivalent to key. + * If no such element exists, an exception of type std::out_of_range is thrown. + */ + Value at(const Key& key) const; + + /** + * Finds an element with key equivalent to key. + * + * @return Iterator to an element with key equivalent to key. + * If no such element is found, past-the-end (see end()) iterator is returned. + */ + iterator find(const Key& key) const; + + /** + * Checks if there is an element with key equivalent to key in the container. + * + * @return true if there is such an element, otherwise false. + */ + bool contains(const Key& key) const; + + /** + * Increase the capacity so that at least count elements can be stored without + * having to reallocate or rehash. + */ + void reserve(size_type count) const; + + + // private API for now because the return type will change to TypePtr + // instead of optional once types are mandatory. + TypePtr keyType() const; + TypePtr valueType() const; + + // [unsafe set type] + // These functions mutate the tagged type of this dictionary in place. + // There is no checking that the members of the dictionary are instances + // of the new types, nor is there a check that other IValues which + // hold references to this dictionary have the right static type. + // This functionality is used only in the unpickler, where at + // creation type the real type of the dictionary is unknown, but + // then later recovered from the static type information of the + // unpickled object. + void unsafeSetKeyType(TypePtr t); + void unsafeSetValueType(TypePtr t); +}; + +namespace impl { +// GenericDict is how IValue stores dicts. It is, however, not part of the +// public API. Kernels should use Dicts with concrete Key, Value types instead +// (maybe except for some internal prim ops). +using GenericDict = Dict; + +} +} + +namespace torch { + template using Dict = c10::Dict; +} + +#include diff --git a/thirdparty/libtorch/include/ATen/core/Dict_inl.h b/thirdparty/libtorch/include/ATen/core/Dict_inl.h new file mode 100644 index 0000000000..a0e57fb182 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Dict_inl.h @@ -0,0 +1,204 @@ +#pragma once + +#include +#include + +namespace c10 { + +template TypePtr getTypePtr(); +std::string toString(TypePtr typePtr); + +namespace impl { +inline bool shallowEquals(const IValue& lhs, const IValue& rhs) { + if (lhs.isNone()) { + return rhs.isNone(); + } else if (lhs.isInt()) { + return rhs.isInt() && lhs.toInt() == rhs.toInt(); + } else if (lhs.isString()) { + return rhs.isString() && lhs.toStringRef() == rhs.toStringRef(); + } else if (lhs.isDouble()) { + return rhs.isDouble() && lhs.toDouble() == rhs.toDouble(); + } else if (lhs.isBool()) { + return rhs.isBool() && lhs.toBool() == rhs.toBool(); + } else if (lhs.isIntList()) { + return rhs.isIntList() && lhs.toIntListRef() == rhs.toIntListRef(); + } else if (lhs.isTensor()) { + return lhs.toTensor().is_same(rhs.toTensor()); + } else { + AT_ERROR("shallowEquals(IValue, IValue) not implemented for type ", lhs.tagKind()); + } +} + +template +Dict toTypedDict(GenericDict dict) { + TORCH_INTERNAL_ASSERT(*getTypePtr() == *dict.impl_->elementTypes.keyType, "Tried to cast a Dict<", toString(dict.impl_->elementTypes.keyType), ", ", toString(dict.impl_->elementTypes.valueType) ,"> to a Dict<", toString(getTypePtr()), ", ", toString(getTypePtr()), ">. Key types mismatch."); + TORCH_INTERNAL_ASSERT(*getTypePtr() == *dict.impl_->elementTypes.valueType, "Tried to cast a Dict<", toString(dict.impl_->elementTypes.keyType), ", ", toString(dict.impl_->elementTypes.valueType) ,"> to a Dict<", toString(getTypePtr()), ", ", toString(getTypePtr()), ">. Value types mismatch."); + + return Dict(std::move(dict.impl_)); +} + +template +GenericDict toGenericDict(Dict dict) { + return GenericDict(std::move(dict.impl_)); +} +} + +namespace detail { + +inline size_t DictKeyHash::operator()(const IValue& ivalue) const { + if (ivalue.isInt()) { + return std::hash()(ivalue.toInt()); + } else if (ivalue.isString()) { + return std::hash()(ivalue.toStringRef()); + } else if (ivalue.isDouble()) { + return std::hash()(ivalue.toDouble()); + } else if (ivalue.isBool()) { + return std::hash()(ivalue.toBool()); + } else if (ivalue.isTensor()) { + return std::hash()(ivalue.toTensor().unsafeGetTensorImpl()); + } else { + throw std::runtime_error("Can't hash IValues with this tag"); + } +} + +inline intrusive_ptr DictImpl::copy() const { + return make_intrusive(dict, elementTypes); +} + +} + +template +Dict::Dict() + :Dict(make_intrusive( + detail::DictImpl::dict_map_type(), + detail::DictImpl::DictElementTypes{getTypePtr(), getTypePtr()})) { + static_assert(!std::is_same::value, "This constructor is not valid for Dict. Please use c10::impl::GenericDict(keyType, valueType) instead, or if you absolutely have to, use c10::impl::GenericDict(c10::impl::deprecatedUntypedDict())."); + static_assert(!std::is_same::value, "This constructor is not valid for Dict<_, IValue>. Please use c10::impl::GenericDict(keyType, valueType) instead, or if you absolutely have to, use c10::impl::GenericDict(c10::impl::deprecatedUntypedDict())."); +} + +template +Dict::Dict(TypePtr keyType, TypePtr valueType) +: Dict(make_intrusive( + detail::DictImpl::dict_map_type(), + detail::DictImpl::DictElementTypes {std::move(keyType), std::move(valueType)})) { + static_assert(std::is_same::value, "This constructor is only valid for c10::impl::GenericDict."); + static_assert(std::is_same::value, "This constructor is only valid for c10::impl::GenericDict."); +} + +template +Dict::Dict(Dict&& rhs) noexcept: impl_(std::move(rhs.impl_)) { + rhs.impl_ = make_intrusive(detail::DictImpl::dict_map_type(), impl_->elementTypes); +} + +template +Dict::Dict(c10::intrusive_ptr&& impl): impl_(std::move(impl)) {} + +template +Dict& Dict::operator=(Dict&& rhs) noexcept { + impl_ = std::move(rhs.impl_); + rhs.impl_ = make_intrusive(detail::DictImpl::dict_map_type(), impl_->elementTypes); + return *this; +} + +template +Dict Dict::copy() const { + return Dict(impl_->copy()); +} + +template +typename Dict::iterator Dict::begin() const { + return iterator{impl_->dict.begin()}; +} + +template +typename Dict::iterator Dict::end() const { + return iterator{impl_->dict.end()}; +} + +template +bool Dict::empty() const { + return impl_->dict.empty(); +} + +template +typename Dict::size_type Dict::size() const { + return impl_->dict.size(); +} + +template +void Dict::clear() const { + impl_->dict.clear(); +} + +template +template +std::pair::iterator, bool> Dict::insert(Key_&& key, Value_&& value) const { + static_assert(std::is_constructible::value, "Wrong type for the key argument of Dict::insert"); + static_assert(std::is_constructible::value, "Wrong type for the value argument of Dict::insert"); + auto inserted = impl_->dict.insert(std::pair{ + Key(std::forward(key)), + Value(std::forward(value))}); + return {iterator{inserted.first}, inserted.second}; +} + +template +template +std::pair::iterator, bool> Dict::insert_or_assign(Key_&& key, Value_&& value) const { + static_assert(std::is_constructible::value, "Wrong type for the key argument of Dict::insert_or_assign"); + static_assert(std::is_constructible::value, "Wrong type for the value argument of Dict::insert_or_assign"); + auto inserted = impl_->dict.insert_or_assign( + Key(std::forward(key)), + Value(std::forward(value))); + return {iterator{inserted.first}, inserted.second}; +} + +template +void Dict::erase(iterator iter) const { + impl_->dict.erase(iter.entryRef_.iterator_); +} + +template +C10_NODISCARD size_t Dict::erase(const Key& key) const { + return impl_->dict.erase(key); +} + +template +Value Dict::at(const Key& key) const { + return impl_->dict.at(key).template to(); +} + +template +typename Dict::iterator Dict::find(const Key& key) const { + return iterator{impl_->dict.find(key)}; +} + +template +bool Dict::contains(const Key& key) const { + return end() != find(key); +} + +template +void Dict::reserve(size_type count) const { + impl_->dict.reserve(count); +} + +template +TypePtr Dict::keyType() const { + return impl_->elementTypes.keyType; +} + +template +TypePtr Dict::valueType() const { + return impl_->elementTypes.valueType; +} +template +void Dict::unsafeSetKeyType(TypePtr t) { + impl_->elementTypes.keyType = std::move(t); +} + +template +void Dict::unsafeSetValueType(TypePtr t) { + impl_->elementTypes.valueType = std::move(t); +} + +} diff --git a/thirdparty/libtorch/include/ATen/core/DimVector.h b/thirdparty/libtorch/include/ATen/core/DimVector.h new file mode 100644 index 0000000000..6e9e2c037a --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/DimVector.h @@ -0,0 +1,13 @@ +#pragma once + +#include +#include + +namespace at { + +constexpr size_t kDimVectorStaticSize = 5; + +/// A container for sizes or strides +using DimVector = SmallVector; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/core/Dimname.h b/thirdparty/libtorch/include/ATen/core/Dimname.h new file mode 100644 index 0000000000..499e7f52a1 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Dimname.h @@ -0,0 +1,51 @@ +#pragma once +#include + +#ifdef BUILD_NAMEDTENSOR +#include +#include +#include +#include + +namespace at { + +enum class NameType: uint8_t { BASIC, WILDCARD }; + +struct CAFFE2_API Dimname { + static Dimname fromSymbol(Symbol name); + static Dimname wildcard(); + static bool isValidName(const std::string& name); + + NameType type() const { return type_; } + Symbol symbol() const { return name_; } + + bool isBasic() const { return type_ == NameType::BASIC; } + bool isWildcard() const { return type_ == NameType::WILDCARD; } + + bool matches(Dimname other) const; + optional unify(Dimname other) const; + + private: + Dimname(Symbol name) + : name_(name), type_(NameType::BASIC) {} + Dimname(Symbol name, NameType type) + : name_(name), type_(type) {} + + Symbol name_; + NameType type_; +}; + +using DimnameList = c10::ArrayRef; + +CAFFE2_API std::ostream& operator<<(std::ostream& out, const Dimname& dimname); + +inline bool operator==(const Dimname& lhs, const Dimname& rhs) { + return lhs.symbol() == rhs.symbol(); +} + +inline bool operator!=(const Dimname& lhs, const Dimname& rhs) { + return !(lhs == rhs); +} + +} // namespace at +#endif diff --git a/thirdparty/libtorch/include/ATen/core/DistributionsHelper.h b/thirdparty/libtorch/include/ATen/core/DistributionsHelper.h new file mode 100644 index 0000000000..03252d9200 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/DistributionsHelper.h @@ -0,0 +1,264 @@ +#pragma once + +// define constants like M_PI and C keywords for MSVC +#ifdef _MSC_VER +#define _USE_MATH_DEFINES +#include +#endif + +#include +#include +#include +#include +#include + +/** + * Distributions kernel adapted from THRandom.cpp + * The kernels try to follow std::random distributions signature + * For instance: in ATen + * auto gen = at::detail::createCPUGenerator(); + * at::uniform_real_distribution uniform(0, 1); + * auto sample = uniform(gen.get()); + * + * vs std::random + * + * std::mt19937 gen; + * std::uniform_real_distribution uniform(0, 1); + * auto sample = uniform(gen); + */ + + +namespace at { + +// Using VectorType in Box-muller derived distributions to avoid +// code duplication +template +struct VectorType { }; + +#if defined(__CUDACC__) || defined(__HIPCC__) +template <> struct VectorType { using type = at::detail::Array; }; +#endif +template <> struct VectorType { using type = at::detail::Array; }; +template <> struct VectorType { using type = at::detail::Array; }; +template <> struct VectorType { using type = at::detail::Array; }; + +template +using vect_type = typename VectorType::type; + +// Using DistAccumType in accumulate types for distributions. +// Note: Ideally we'd be using ATen/AccumulateType.h but looks +// like the there is some inconsistency in how accumulate types +// are mapped currently, e.g. for the cpu side, float is mapped +// to double. +template +struct DistAccumType { }; + +#if defined(__CUDACC__) || defined(__HIPCC__) +template <> struct DistAccumType { using type = float; }; +#endif +template <> struct DistAccumType { using type = float; }; +template <> struct DistAccumType { using type = float; }; +template <> struct DistAccumType { using type = double; }; + +template +using dist_acctype = typename DistAccumType::type; + +// Constants for uniform distribution +// doubles have 52 bits of mantissa (fractional part) +constexpr uint64_t DOUBLE_MASK = (1ULL << 53) - 1; +constexpr double DOUBLE_DIVISOR = 1.0 / (1ULL << 53); + +// floats have 23 bits of mantissa (fractional part) +constexpr uint32_t FLOAT_MASK = (1 << 24) - 1; +constexpr float FLOAT_DIVISOR = 1.0f / (1 << 24); + +/** + * Samples a uniform distribution in the range [0,1) of type T + */ +template +struct uniform_real_distribution { + + inline uniform_real_distribution(T a_in, T b_in) { + TORCH_CHECK(a_in <= b_in); + TORCH_CHECK(b_in-a_in <= std::numeric_limits::max()); + a = a_in; + b = b_in; + } + + inline dist_acctype operator()(at::CPUGenerator* generator){ + dist_acctype x; + if(std::is_same::value) { + x = (generator->random64() & DOUBLE_MASK) * DOUBLE_DIVISOR; + } else { + x = (generator->random() & FLOAT_MASK) * FLOAT_DIVISOR; + } + return (x * (b - a) + a); + } + + private: + T a; + T b; +}; + +/** + * Samples a normal distribution using the Box-Muller method + * Takes mean and standard deviation as inputs + * Note that Box-muller method returns two samples at a time. + * Hence, we cache the "next" sample in the CPUGenerator class. + */ +template +struct normal_distribution { + + inline normal_distribution(T mean_in, T stdv_in) { + TORCH_CHECK(stdv_in > 0); + mean = mean_in; + stdv = stdv_in; + } + + inline dist_acctype operator()(at::CPUGenerator* generator){ + dist_acctype ret; + // return cached values if available + if (std::is_same::value) { + if (generator->next_double_normal_sample()) { + ret = *(generator->next_double_normal_sample()) * stdv + mean; + // reset c10::optional to null + generator->set_next_double_normal_sample(c10::optional()); + return ret; + } + } else { + if (generator->next_float_normal_sample()) { + ret = *(generator->next_float_normal_sample()) * stdv + mean; + // reset c10::optional to null + generator->set_next_float_normal_sample(c10::optional()); + return ret; + } + } + // otherwise generate new normal values + uniform_real_distribution uniform(0.0, 1.0); + const dist_acctype u1 = uniform(generator); + const dist_acctype u2 = uniform(generator); + const dist_acctype r = ::sqrt(static_cast(-2.0) * ::log(static_cast(1.0)-u2)); + const dist_acctype theta = static_cast(2.0) * static_cast(M_PI) * u1; + if (std::is_same::value) { + dist_acctype cache = r * ::sin(theta); + generator->set_next_double_normal_sample(c10::optional(cache)); + } else { + dist_acctype cache = r * ::sin(theta); + generator->set_next_float_normal_sample(c10::optional(cache)); + } + ret = r * ::cos(theta) * stdv + mean; + return ret; + } + + private: + T mean; + T stdv; +}; + +/** + * Samples a bernoulli distribution given a probability input + */ +template +struct bernoulli_distribution { + + inline bernoulli_distribution(T p_in) { + TORCH_CHECK(p_in >= 0 && p_in <= 1); + p = p_in; + } + + inline int operator()(at::CPUGenerator* generator) { + uniform_real_distribution uniform(0.0, 1.0); + return uniform(generator) < p; + } + + private: + T p; +}; + +/** + * Samples a geometric distribution given a probability input + */ +template +struct geometric_distribution { + + inline geometric_distribution(T p_in) { + TORCH_CHECK(p_in > 0 && p_in < 1); + p = p_in; + } + + inline int operator()(at::CPUGenerator* generator) { + uniform_real_distribution uniform(0.0, 1.0); + dist_acctype sample = uniform(generator); + return static_cast(::log(static_cast(1.0)-sample) / ::log(p)) + 1; + } + + private: + T p; +}; + +/** + * Samples an exponential distribution given a lambda input + */ +template +struct exponential_distribution { + + inline exponential_distribution(T lambda_in) { + lambda = lambda_in; + } + + inline T operator()(at::CPUGenerator* generator) { + uniform_real_distribution uniform(0.0, 1.0); + dist_acctype sample = uniform(generator); + return static_cast(-1.0) / lambda * ::log(static_cast(1.0)-sample); + } + + private: + T lambda; +}; + +/** + * Samples a cauchy distribution given median and sigma as inputs + */ +template +struct cauchy_distribution { + + inline cauchy_distribution(T median_in, T sigma_in) { + median = median_in; + sigma = sigma_in; + } + + inline T operator()(at::CPUGenerator* generator) { + uniform_real_distribution uniform(0.0, 1.0); + return median + sigma * ::tan(static_cast(M_PI) * (uniform(generator)-static_cast(0.5))); + } + + private: + T median; + T sigma; +}; + +/** + * Samples a lognormal distribution + * Takes mean and standard deviation as inputs + * Outputs two samples at a time + */ +template +struct lognormal_distribution { + + inline lognormal_distribution(T mean_in, T stdv_in) { + TORCH_CHECK(stdv_in > 0); + mean = mean_in; + stdv = stdv_in; + } + + inline T operator()(at::CPUGenerator* generator){ + normal_distribution normal(mean, stdv); + return ::exp(normal(generator)); + } + + private: + T mean; + T stdv; +}; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/core/EnableNamedTensor.h b/thirdparty/libtorch/include/ATen/core/EnableNamedTensor.h new file mode 100644 index 0000000000..f304ca3e88 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/EnableNamedTensor.h @@ -0,0 +1,11 @@ +#pragma once + +#include + +// We are working on removing the BUILD_NAMEDTENSOR flag from the codebase. +// +// PyTorch's codegen also uses a similar flag. You can find it in +// - aten/src/ATen/env.py +#ifndef BUILD_NAMEDTENSOR +#define BUILD_NAMEDTENSOR +#endif diff --git a/thirdparty/libtorch/include/ATen/core/Formatting.h b/thirdparty/libtorch/include/ATen/core/Formatting.h new file mode 100644 index 0000000000..63c5e12e96 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Formatting.h @@ -0,0 +1,29 @@ +#pragma once + +#include +#include +#include + + +namespace c10 { +CAFFE2_API std::ostream& operator<<(std::ostream& out, Backend b); +} +namespace at { + +CAFFE2_API std::ostream& operator<<(std::ostream& out, const DeprecatedTypeProperties& t); +CAFFE2_API std::ostream& print( + std::ostream& stream, + const Tensor& tensor, + int64_t linesize); +static inline std::ostream& operator<<(std::ostream & out, const Tensor & t) { + return print(out,t,80); +} +static inline void print(const Tensor & t, int64_t linesize=80) { + print(std::cout,t,linesize); +} + +static inline std::ostream& operator<<(std::ostream & out, Scalar s) { + return out << (s.isFloatingPoint() ? s.toDouble() : s.toLong()); +} + +} diff --git a/thirdparty/libtorch/include/ATen/core/Generator.h b/thirdparty/libtorch/include/ATen/core/Generator.h new file mode 100644 index 0000000000..161824a11f --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Generator.h @@ -0,0 +1,89 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +/** + * Note [Generator] + * ~~~~~~~~~~~~~~~~ + * A Pseudo Random Number Generator (PRNG) is an engine that uses an algorithm to + * generate a seemingly random sequence of numbers, that may be later be used in creating + * a random distribution. Such an engine almost always maintains a state and requires a + * seed to start off the creation of random numbers. Often times, users have + * found it beneficial to be able to explicitly create, retain, and destroy + * PRNG states and also be able to have control over the seed value. + * + * A Generator in ATen gives users the ability to read, write and modify a PRNG engine. + * For instance, it does so by letting users seed a PRNG engine, fork the state of the + * engine, etc. + * + * By default, there is one generator per device, and a device's generator is + * lazily created. A user can use the torch.Generator() api to create their own generator. + * Currently torch.Generator() can only create a CPUGenerator. + */ + +/** + * Note [Acquire lock when using random generators] + * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Generator and its derived classes are NOT thread-safe. Please note that most of the + * places where we have inserted locking for generators are historically based, and we + * haven't actually checked that everything is truly thread safe (and it probably isn't). + * Please use the public mutex_ when using any methods from these classes, except for the + * read-only methods. You can learn about the usage by looking into the unittests + * (aten/src/ATen/cpu_generator_test.cpp) and other places where we have used lock_guard. + * + * TODO: Look into changing the threading semantics of Generators in ATen (e.g., making + * them non-thread safe and instead making the generator state splittable, to accommodate + * forks into other threads). + */ + +namespace at { + +// The default seed is selected to be a large number +// with good distribution of 0s and 1s in bit representation +constexpr uint64_t default_rng_seed_val = 67280421310721; + +struct CAFFE2_API Generator { + // Constructors + Generator(Device device_in); + + // Delete all copy and move assignment in favor of clone() + // method + Generator(const Generator& other) = delete; + Generator(Generator&& other) = delete; + Generator& operator=(const Generator& other) = delete; + + virtual ~Generator() = default; + std::shared_ptr clone() const; + + // Common methods for all generators + virtual void set_current_seed(uint64_t seed) = 0; + virtual uint64_t current_seed() const = 0; + virtual uint64_t seed() = 0; + Device device() const; + + // See Note [Acquire lock when using random generators] + std::mutex mutex_; + + private: + Device device_; + virtual Generator* clone_impl() const = 0; +}; + +namespace detail { + +CAFFE2_API uint64_t getNonDeterministicRandom(bool is_cuda = false); + +} // namespace detail + +} // namespace at + diff --git a/thirdparty/libtorch/include/ATen/core/LegacyDeviceTypeInit.h b/thirdparty/libtorch/include/ATen/core/LegacyDeviceTypeInit.h new file mode 100644 index 0000000000..dd3a5529a4 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/LegacyDeviceTypeInit.h @@ -0,0 +1,37 @@ +#pragma once + +// The legacy mechanism for initializing device types; this is used by +// LegacyTypeDispatch. + +#include +#include +#include +#include + +namespace at { + +struct CAFFE2_API LegacyDeviceTypeInitInterface { + virtual ~LegacyDeviceTypeInitInterface() {} + virtual void initCPU() const { + AT_ERROR("cannot use CPU without ATen library"); + } + virtual void initCUDA() const { + AT_ERROR("cannot use CUDA without ATen CUDA library"); + } + virtual void initHIP() const { + AT_ERROR("cannot use HIP without ATen HIP library"); + } +}; + +struct CAFFE2_API LegacyDeviceTypeInitArgs {}; + +C10_DECLARE_REGISTRY( + LegacyDeviceTypeInitRegistry, + LegacyDeviceTypeInitInterface, + LegacyDeviceTypeInitArgs); +#define REGISTER_LEGACY_TYPE_INIT(clsname) \ + C10_REGISTER_CLASS(LegacyDeviceTypeInitRegistry, clsname, clsname) + +CAFFE2_API const LegacyDeviceTypeInitInterface& getLegacyDeviceTypeInit(); + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/core/LegacyTypeDispatch.h b/thirdparty/libtorch/include/ATen/core/LegacyTypeDispatch.h new file mode 100644 index 0000000000..e6ed3ecd51 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/LegacyTypeDispatch.h @@ -0,0 +1,92 @@ +#pragma once + +// The legacy mechanism for dispatching operators in ATen is a Type +// object, which is essentially a giant virtual dispatch table +// for every operation we support dynamically dispatching over. +// +// This has been deprecated in favor of ATenDispatch, and in the future, +// c10 dispatcher. +// TODO: Clean up what remains here + +#include +#include +#include +#include +#include +#include +#include + +namespace at { + +class CAFFE2_API LegacyTypeDispatch { + public: + void initForTensorTypeSet(TensorTypeSet ts) { + // TODO: Avoid use of legacyExtractTypeId here. The key + // problem is that you may get a TensorTypeSet with + // VariableTensorId set; should you initialize the "underlying" + // type in that case? Hard to say. + auto b = tensorTypeIdToBackend(legacyExtractTypeId(ts)); + auto p = backendToDeviceType(b); + static std::once_flag cpu_once; + static std::once_flag cuda_once; + if (p == DeviceType::CPU) { + std::call_once(cpu_once, [] { + getLegacyDeviceTypeInit().initCPU(); + }); + } else if (p == DeviceType::CUDA) { + std::call_once(cuda_once, [] { + getLegacyDeviceTypeInit().initCUDA(); + }); + } else if (p == DeviceType::HIP) { + std::call_once(cuda_once, [] { + getLegacyDeviceTypeInit().initHIP(); + }); + } + } +}; + +CAFFE2_API LegacyTypeDispatch& globalLegacyTypeDispatch(); + +// A RAII, thread local (!) guard that will disable dispatch to variable +// handler. +// +// NOTE [ Treating Variables as non-Variables in type dispatch ] +// +// What exactly does AutoNonVariableType do? The short answer is, it causes +// dispatches on ATen functions to go to the non-variable implementation, +// bypassing autograd handling (and also profiling and tracing). +// +// To understand why this guard exists, it's helpful to understand the history +// behind how Variable was implemented. Previously, Variables were implemented +// as a wrapper on Tensors; so the act of processing a Variable involved +// unwrapping the underlying Tensor, and then calling the underlying base +// operation on /that/ operation +// +// However, after the Variable/Tensor merge, there is no concept of unwrapping +// a tensor anymore. If you just call the operation on the same variable +// again inside your VariableType handler, you'll dispatch back to +// VariableType, which is not what we want. +// +// The solution to the above problem is to add `at::NonVariableTypeMode`, which +// when enabled will cause `legacyTensorType()` and `getType()` to always return +// non-Variable type, even if the tensor being called on is a variable. +// +// TODO: Since `torch::NoGradGuard` serves almost the same purpose in libtorch, +// we should merge these two thread-local guards. However, NoGradGuard does +// something subtly different: it turns off gradient recording, but DOES NOT +// skip VariableType implementation (as we still might need to profile or +// trace). To unify the two, we would first have to move profiling and tracing +// out of VariableType. + +struct CAFFE2_API AutoNonVariableTypeMode { + // NB: The enabled parameter must ALWAYS be black, as Henry Ford used to say. + // TODO: Eliminate this parameter entirely + AutoNonVariableTypeMode(bool enabled = true) : + guard_(TensorTypeId::VariableTensorId) { + + TORCH_INTERNAL_ASSERT(enabled); + } + c10::impl::ExcludeTensorTypeIdGuard guard_; +}; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/core/List.h b/thirdparty/libtorch/include/ATen/core/List.h new file mode 100644 index 0000000000..3126da8fb7 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/List.h @@ -0,0 +1,473 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +namespace at { +class Tensor; +} +namespace c10 { +struct IValue; +template class List; +struct Type; +using TypePtr = std::shared_ptr; + +namespace detail { + +template +struct ListImpl final : public c10::intrusive_ptr_target { + using list_type = std::vector; + + explicit ListImpl(list_type list_, TypePtr elementType_) + : list(std::move(list_)) + , elementType(std::move(elementType_)) {} + + list_type list; + + TypePtr elementType; + + intrusive_ptr copy() const { + return make_intrusive(list, elementType); + } +}; +} + +namespace impl { + +template class ListIterator; + +template class ListElementReference; + +template +void swap(ListElementReference&& lhs, ListElementReference&& rhs); + +template +class ListElementReference final { +public: + operator T() const; + + ListElementReference& operator=(T&& new_value) &&; + + ListElementReference& operator=(const T& new_value) &&; + + // assigning another ref to this assigns the underlying value + ListElementReference& operator=(ListElementReference&& rhs) &&; + + friend void swap(ListElementReference&& lhs, ListElementReference&& rhs); + +private: + ListElementReference(Iterator iter) + : iterator_(iter) {} + + ListElementReference(const ListElementReference&) = delete; + ListElementReference& operator=(const ListElementReference&) = delete; + + // allow moving, but only our friends (i.e. the List class) can move us + ListElementReference(ListElementReference&&) noexcept = default; + ListElementReference& operator=(ListElementReference&& rhs) & noexcept { + iterator_ = std::move(rhs.iterator_); + return *this; + } + + friend class List; + friend class ListIterator; + + Iterator iterator_; +}; + +// this wraps vector::iterator to make sure user code can't rely +// on it being the type of the underlying vector. +template +class ListIterator final : public std::iterator { +public: + explicit ListIterator() = default; + ~ListIterator() = default; + + ListIterator(const ListIterator&) = default; + ListIterator(ListIterator&&) noexcept = default; + ListIterator& operator=(const ListIterator&) = default; + ListIterator& operator=(ListIterator&&) = default; + + ListIterator& operator++() { + ++iterator_; + return *this; + } + + ListIterator operator++(int) { + ListIterator copy(*this); + ++*this; + return copy; + } + + ListIterator& operator--() { + --iterator_; + return *this; + } + + ListIterator operator--(int) { + ListIterator copy(*this); + --*this; + return copy; + } + + ListIterator& operator+=(typename List::size_type offset) { + iterator_ += offset; + return *this; + } + + ListIterator& operator-=(typename List::size_type offset) { + iterator_ -= offset; + return *this; + } + + ListIterator operator+(typename List::size_type offset) const { + return ListIterator{iterator_ + offset}; + } + + ListIterator operator-(typename List::size_type offset) const { + return ListIterator{iterator_ - offset}; + } + + friend typename std::iterator::difference_type operator-(const ListIterator& lhs, const ListIterator& rhs) { + return lhs.iterator_ - rhs.iterator_; + } + + ListElementReference operator*() const { + return {iterator_}; + } + +private: + explicit ListIterator(Iterator iterator): iterator_(std::move(iterator)) {} + + Iterator iterator_; + + friend bool operator==(const ListIterator& lhs, const ListIterator& rhs) { + return lhs.iterator_ == rhs.iterator_; + } + + friend bool operator!=(const ListIterator& lhs, const ListIterator& rhs) { + return !(lhs == rhs); + } + + friend bool operator<(const ListIterator& lhs, const ListIterator& rhs) { + return lhs.iterator_ < rhs.iterator_; + } + + friend bool operator<=(const ListIterator& lhs, const ListIterator& rhs) { + return lhs.iterator_ <= rhs.iterator_; + } + + friend bool operator>(const ListIterator& lhs, const ListIterator& rhs) { + return lhs.iterator_ > rhs.iterator_; + } + + friend bool operator>=(const ListIterator& lhs, const ListIterator& rhs) { + return lhs.iterator_ >= rhs.iterator_; + } + + friend class ListIterator::list_type::iterator, StorageT>; + friend class List; +}; + +template List toTypedList(List list); +template List toGenericList(List list); +const IValue* ptr_to_first_element(const List& list); +template List toList(std::vector list); +template const std::vector& toVector(const List& list); +} +template bool list_is_equal(const List& lhs, const List& rhs); + +/** + * An object of this class stores a list of values of type T. + * + * This is a pointer type. After a copy, both Lists + * will share the same storage: + * + * > List a; + * > List b = a; + * > b.push_back("three"); + * > ASSERT("three" == a.get(0)); + * + * We use this class in the PyTorch kernel API instead of + * std::vector, because that allows us to do optimizations + * and switch out the underlying list implementation without + * breaking backwards compatibility for the kernel API. + */ +template +class List final { +private: + // List of types that don't use IValue based lists + using types_with_direct_list_implementation = guts::typelist::typelist< + int64_t, + double, + bool, + at::Tensor + >; + + using StorageT = guts::conditional_t< + guts::typelist::contains::value, + T, // The types listed in types_with_direct_list_implementation store the list as std::vector + IValue // All other types store the list as std::vector + >; + + // This is an intrusive_ptr because List is a pointer type. + // Invariant: This will never be a nullptr, there will always be a valid + // ListImpl. + c10::intrusive_ptr> impl_; + + using internal_reference_type = impl::ListElementReference::StorageT>::list_type::iterator, typename List::StorageT>; + +public: + using value_type = T; + using size_type = typename detail::ListImpl::list_type::size_type; + using iterator = impl::ListIterator::list_type::iterator, StorageT>; + using reverse_iterator = impl::ListIterator::list_type::reverse_iterator, StorageT>; + using internal_value_type_test_only = StorageT; + + /** + * Constructs an empty list. + */ + explicit List(); + + /** + * Constructs a list with some initial values. + * Example: + * List a({2, 3, 4}); + */ + explicit List(std::initializer_list initial_values); + explicit List(ArrayRef initial_values); + + /** + * Create a generic list with runtime type information. + * This only works for c10::impl::GenericList and is not part of the public API + * but only supposed to be used internally by PyTorch. + */ + explicit List(TypePtr elementType); + + List(const List&) = default; + List& operator=(const List&) = default; + List(List&&) noexcept; + List& operator=(List&&) noexcept; + + /** + * Create a new List pointing to a deep copy of the same data. + * The List returned is a new list with separate storage. + * Changes in it are not reflected in the original list or vice versa. + */ + List copy() const; + + /** + * Returns the element at specified location pos, with bounds checking. + * If pos is not within the range of the container, an exception of type std::out_of_range is thrown. + */ + value_type get(size_type pos) const; + + /** + * Moves out the element at the specified location pos and returns it, with bounds checking. + * If pos is not within the range of the container, an exception of type std::out_of_range is thrown. + * The list contains an invalid element at position pos afterwards. Any operations + * on it before re-setting it are invalid. + */ + value_type extract(size_type pos) const; + + /** + * Returns a reference to the element at specified location pos, with bounds checking. + * If pos is not within the range of the container, an exception of type std::out_of_range is thrown. + * + * You cannot store the reference, but you can read it and assign new values to it: + * + * List list = ...; + * list[2] = 5; + * int64_t v = list[1]; + */ + internal_reference_type operator[](size_type pos) const; + + /** + * Assigns a new value to the element at location pos. + */ + void set(size_type pos, const value_type& value) const; + + /** + * Assigns a new value to the element at location pos. + */ + void set(size_type pos, value_type&& value) const; + + /** + * Returns an iterator to the first element of the container. + * If the container is empty, the returned iterator will be equal to end(). + */ + iterator begin() const; + + /** + * Returns an iterator to the element following the last element of the container. + * This element acts as a placeholder; attempting to access it results in undefined behavior. + */ + iterator end() const; + + /** + * Checks if the container has no elements. + */ + bool empty() const; + + /** + * Returns the number of elements in the container + */ + size_type size() const; + + /** + * Increase the capacity of the vector to a value that's greater or equal to new_cap. + */ + void reserve(size_type new_cap) const; + + /** + * Erases all elements from the container. After this call, size() returns zero. + * Invalidates any references, pointers, or iterators referring to contained elements. Any past-the-end iterators are also invalidated. + */ + void clear() const; + + /** + * Inserts value before pos. + * May invalidate any references, pointers, or iterators referring to contained elements. Any past-the-end iterators may also be invalidated. + */ + iterator insert(iterator pos, const T& value) const; + + /** + * Inserts value before pos. + * May invalidate any references, pointers, or iterators referring to contained elements. Any past-the-end iterators may also be invalidated. + */ + iterator insert(iterator pos, T&& value) const; + + /** + * Inserts a new element into the container directly before pos. + * The new element is constructed with the given arguments. + * May invalidate any references, pointers, or iterators referring to contained elements. Any past-the-end iterators may also be invalidated. + */ + template + iterator emplace(iterator pos, Args&&... value) const; + + /** + * Appends the given element value to the end of the container. + * May invalidate any references, pointers, or iterators referring to contained elements. Any past-the-end iterators may also be invalidated. + */ + void push_back(const T& value) const; + + /** + * Appends the given element value to the end of the container. + * May invalidate any references, pointers, or iterators referring to contained elements. Any past-the-end iterators may also be invalidated. + */ + void push_back(T&& value) const; + + /** + * Appends the given list to the end of the container. Uses at most one memory allocation. + * May invalidate any references, pointers, or iterators referring to contained elements. Any past-the-end iterators may also be invalidated. + */ + void append(List lst) const; + + /** + * Appends the given element value to the end of the container. + * The new element is constructed with the given arguments. + * May invalidate any references, pointers, or iterators referring to contained elements. Any past-the-end iterators may also be invalidated. + */ + template + void emplace_back(Args&&... args) const; + + /** + * Removes the element at pos. + * May invalidate any references, pointers, or iterators referring to contained elements. Any past-the-end iterators may also be invalidated. + */ + iterator erase(iterator pos) const; + + /** + * Removes the elements in the range [first, last). + * May invalidate any references, pointers, or iterators referring to contained elements. Any past-the-end iterators may also be invalidated. + */ + iterator erase(iterator first, iterator last) const; + + /** + * Removes the last element of the container. + * Calling pop_back on an empty container is undefined. + * May invalidate any references, pointers, or iterators referring to contained elements. Any past-the-end iterators may also be invalidated. + */ + void pop_back() const; + + /** + * Resizes the container to contain count elements. + * If the current size is less than count, additional default-inserted elements are appended. + * May invalidate any references, pointers, or iterators referring to contained elements. Any past-the-end iterators may also be invalidated. + */ + void resize(size_type count) const; + + /** + * Resizes the container to contain count elements. + * If the current size is less than count, additional copies of value are appended. + * May invalidate any references, pointers, or iterators referring to contained elements. Any past-the-end iterators may also be invalidated. + */ + void resize(size_type count, const T& value) const; + + /** + * Compares two lists for equality. Two lists are equal if they have the + * same number of elements and for each list position the elements at + * that position are equal. + */ + friend bool list_is_equal(const List& lhs, const List& rhs); + + /** + * Returns the number of Lists currently pointing to this same list. + * If this is the only instance pointing to this list, returns 1. + */ + // TODO Test use_count + size_t use_count() const; + + TypePtr elementType() const; + + // See [unsafe set type] for why this exists. + void unsafeSetElementType(TypePtr t); + +private: + explicit List(c10::intrusive_ptr>&& elements); + friend struct IValue; + template friend List impl::toTypedList(List); + template friend List impl::toGenericList(List); + friend const IValue* impl::ptr_to_first_element(const List& list); + template friend List impl::toList(std::vector list); + template friend const std::vector& impl::toVector(const List& list); +}; + +namespace impl { +// GenericList is how IValue stores lists. It is, however, not part of the +// public API. Kernels should use Lists with concrete types instead +// (maybe except for some internal prim ops). +using GenericList = List; + +inline const IValue* ptr_to_first_element(const GenericList& list) { + return &list.impl_->list[0]; +} + +template +const std::vector& toVector(const List& list) { + static_assert(std::is_same::value || std::is_same::StorageT>::value, "toVector only works for lists that store their elements as std::vector. You tried to call it for a list that stores its elements as std::vector."); + + return list.impl_->list; +} + +template +List toList(std::vector list) { + static_assert(std::is_same::value || std::is_same::StorageT>::value, "toList only works for lists that store their elements as std::vector. You tried to call it for a list that stores its elements as std::vector."); + List result; + result.impl_->list = std::move(list); + return result; +} + +} +} + +namespace torch { + template using List = c10::List; +} + +#include diff --git a/thirdparty/libtorch/include/ATen/core/List_inl.h b/thirdparty/libtorch/include/ATen/core/List_inl.h new file mode 100644 index 0000000000..d954d4d5cc --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/List_inl.h @@ -0,0 +1,292 @@ +#pragma once + +#include +#include + +namespace c10 { + +template TypePtr getTypePtr(); +std::string toString(TypePtr typePtr); + +template +List::List(c10::intrusive_ptr>&& elements) +: impl_(std::move(elements)) {} + +template +List::List() +: List(make_intrusive::StorageT>>( + typename detail::ListImpl::StorageT>::list_type(), + getTypePtr())) { + static_assert(!std::is_same::value, "This constructor is not valid for List. Please use c10::impl::GenericList(elementType) instead."); +} + +template +List::List(ArrayRef values) +: List(make_intrusive::StorageT>>( + typename detail::ListImpl::StorageT>::list_type(), + getTypePtr())) { + static_assert(!std::is_same::value, "This constructor is not valid for List. Please use c10::impl::GenericList(elementType)."); + impl_->list.reserve(values.size()); + for (const T& element : values) { + impl_->list.push_back(element); + } +} + +template +List::List(std::initializer_list initial_values) +: List(ArrayRef(initial_values)) { + static_assert(!std::is_same::value, "This constructor is not valid for List. Please use c10::impl::GenericList(elementType)."); +} + +template +List::List(TypePtr elementType) +: List(make_intrusive>( + typename detail::ListImpl::list_type(), + std::move(elementType))) { + static_assert(std::is_same::value, "This constructor is only valid for c10::impl::GenericList."); +} + +namespace impl { +template +List toTypedList(impl::GenericList list) { + static_assert(std::is_same::StorageT>::value, "Can only call toTypedList with lists that store their elements as IValues."); + TORCH_INTERNAL_ASSERT(*getTypePtr() == *list.impl_->elementType, "Tried to cast a List<", toString(list.impl_->elementType), "> to a List<", toString(getTypePtr()), ">. Types mismatch."); + return List(std::move(list.impl_)); +} + +template +impl::GenericList toGenericList(List list) { + static_assert(std::is_same::StorageT>::value, "Can only call toGenericList with lists that store their elements as IValues."); + return GenericList(std::move(list.impl_)); +} +} + +template +List::List(List&& rhs) noexcept: impl_(std::move(rhs.impl_)) { + rhs.impl_ = make_intrusive>(std::vector{}, impl_->elementType); +} + +template +List& List::operator=(List&& rhs) noexcept { + impl_ = std::move(rhs.impl_); + rhs.impl_ = make_intrusive>(std::vector{}, impl_->elementType); + return *this; +} + +template +List List::copy() const { + return List(impl_->copy()); +} + +namespace detail { + template + T list_element_to(T element) { + return element; + } + template + T list_element_to(const IValue& element) { + return element.template to(); + } + template + T list_element_to(IValue&& element) { + return std::move(element).template to(); + } + template + StorageT list_element_from(const T& element) { + return element; + } + template + StorageT list_element_from(T&& element) { + return std::move(element); + } +} + +namespace impl { + +template +ListElementReference::operator T() const { + return detail::list_element_to(*iterator_); +} + +template +ListElementReference& ListElementReference::operator=(T&& new_value) && { + *iterator_ = detail::list_element_from(std::move(new_value)); + return *this; +} + +template +ListElementReference& ListElementReference::operator=(const T& new_value) && { + *iterator_ = detail::list_element_from(std::move(new_value)); + return *this; +} + +template +ListElementReference& ListElementReference::operator=(ListElementReference&& rhs) && { + *iterator_ = *rhs.iterator_; + return *this; +} + +template +void swap(ListElementReference&& lhs, ListElementReference&& rhs) { + std::swap(*lhs.iterator_, *rhs.iterator_); +} +} + +template +void List::set(size_type pos, const value_type& value) const { + impl_->list.at(pos) = detail::list_element_from(value); +} + +template +void List::set(size_type pos, value_type&& value) const { + impl_->list.at(pos) = detail::list_element_from(std::move(value)); +} + +template +typename List::value_type List::get(size_type pos) const { + return detail::list_element_to(impl_->list.at(pos)); +} + +template +typename List::internal_reference_type List::operator[](size_type pos) const { + static_cast(impl_->list.at(pos)); // Throw the exception if it is out of range. + return {impl_->list.begin() + pos}; +} + +template +typename List::value_type List::extract(size_type pos) const { + auto& elem = impl_->list.at(pos); + auto result = detail::list_element_to(std::move(elem)); + if (std::is_same::value) { + // Reset the list element to a T() instead of None to keep it correctly typed + elem = detail::list_element_from(T{}); + } + return result; +} + +template +typename List::iterator List::begin() const { + return iterator(impl_->list.begin()); +} + +template +typename List::iterator List::end() const { + return iterator(impl_->list.end()); +} + +template +bool List::empty() const { + return impl_->list.empty(); +} + +template +typename List::size_type List::size() const { + return impl_->list.size(); +} + +template +void List::reserve(size_type new_cap) const { + impl_->list.reserve(new_cap); +} + +template +void List::clear() const { + impl_->list.clear(); +} + +template +typename List::iterator List::insert(iterator pos, const T& value) const { + return iterator { impl_->list.insert(pos.iterator_, detail::list_element_from(value)) }; +} + +template +typename List::iterator List::insert(iterator pos, T&& value) const { + return iterator { impl_->list.insert(pos.iterator_, detail::list_element_from(std::move(value))) }; +} + +template +template +typename List::iterator List::emplace(iterator pos, Args&&... value) const { + // TODO Use list_element_from? + return iterator { impl_->list.emplace(pos.iterator_, std::forward(value)...) }; +} + +template +void List::push_back(const T& value) const { + impl_->list.push_back(detail::list_element_from(value)); +} + +template +void List::push_back(T&& value) const { + impl_->list.push_back(detail::list_element_from(std::move(value))); +} + +template +void List::append(List b) const { + if (b.use_count() == 1) { + impl_->list.insert(impl_->list.end(), make_move_iterator(b.impl_->list.begin()), make_move_iterator(b.impl_->list.end())); + } else { + impl_->list.insert(impl_->list.end(), b.impl_->list.begin(), b.impl_->list.end()); + } +} + +template +template +void List::emplace_back(Args&&... args) const { + // TODO Use list_element_from? + impl_->list.emplace_back(std::forward(args)...); +} + +template +typename List::iterator List::erase(iterator pos) const { + return iterator { impl_->list.erase(pos.iterator_) }; +} + +template +typename List::iterator List::erase(iterator first, iterator last) const { + return iterator { impl_->list.erase(first.iterator_, last.iterator_) }; +} + +template +void List::pop_back() const { + impl_->list.pop_back(); +} + +template +void List::resize(size_type count) const { + impl_->list.resize(count, T{}); +} + +template +void List::resize(size_type count, const T& value) const { + impl_->list.resize(count, value); +} + +template +bool list_is_equal(const List& lhs, const List& rhs) { + if (lhs.size() != rhs.size()) { + return false; + } + for (size_t i = 0; i < lhs.size(); ++i) { + if (lhs.get(i) != rhs.get(i)) { + return false; + } + } + return true; +} + +template +size_t List::use_count() const { + return impl_.use_count(); +} + +template +TypePtr List::elementType() const { + return impl_->elementType; +} + +template +void List::unsafeSetElementType(TypePtr t) { + impl_->elementType = std::move(t); +} +} diff --git a/thirdparty/libtorch/include/ATen/core/MT19937RNGEngine.h b/thirdparty/libtorch/include/ATen/core/MT19937RNGEngine.h new file mode 100644 index 0000000000..ac3b685731 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/MT19937RNGEngine.h @@ -0,0 +1,192 @@ +#pragma once + +// define constants like M_PI and C keywords for MSVC +#ifdef _MSC_VER +#define _USE_MATH_DEFINES +#include +#endif + +#include +#include +#include + +namespace at { + +constexpr int MERSENNE_STATE_N = 624; +constexpr int MERSENNE_STATE_M = 397; +constexpr uint32_t MATRIX_A = 0x9908b0df; +constexpr uint32_t UMASK = 0x80000000; +constexpr uint32_t LMASK = 0x7fffffff; + +/** + * Note [Mt19937 Engine implementation] + * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Originally implemented in: + * http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/MT2002/CODES/MTARCOK/mt19937ar-cok.c + * and modified with C++ constructs. Moreover the state array of the engine + * has been modified to hold 32 bit uints instead of 64 bits. + * + * Note that we reimplemented mt19937 instead of using std::mt19937 because, + * at::mt19937 turns out to be faster in the pytorch codebase. PyTorch builds with -O2 + * by default and following are the benchmark numbers (benchmark code can be found at + * https://github.com/syed-ahmed/benchmark-rngs): + * + * with -O2 + * Time to get 100000000 philox randoms with at::uniform_real_distribution = 0.462759s + * Time to get 100000000 at::mt19937 randoms with at::uniform_real_distribution = 0.39628s + * Time to get 100000000 std::mt19937 randoms with std::uniform_real_distribution = 0.352087s + * Time to get 100000000 std::mt19937 randoms with at::uniform_real_distribution = 0.419454s + * + * std::mt19937 is faster when used in conjuction with std::uniform_real_distribution, + * however we can't use std::uniform_real_distribution because of this bug: + * http://open-std.org/JTC1/SC22/WG21/docs/lwg-active.html#2524. Plus, even if we used + * std::uniform_real_distribution and filtered out the 1's, it is a different algorithm + * than what's in pytorch currently and that messes up the tests in tests_distributions.py. + * The other option, using std::mt19937 with at::uniform_real_distribution is a tad bit slower + * than at::mt19937 with at::uniform_real_distribution and hence, we went with the latter. + * + * Copyright notice: + * A C-program for MT19937, with initialization improved 2002/2/10. + * Coded by Takuji Nishimura and Makoto Matsumoto. + * This is a faster version by taking Shawn Cokus's optimization, + * Matthe Bellew's simplification, Isaku Wada's real version. + * + * Before using, initialize the state by using init_genrand(seed) + * or init_by_array(init_key, key_length). + * + * Copyright (C) 1997 - 2002, Makoto Matsumoto and Takuji Nishimura, + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * 3. The names of its contributors may not be used to endorse or promote + * products derived from this software without specific prior written + * permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * + * Any feedback is very welcome. + * http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html + * email: m-mat @ math.sci.hiroshima-u.ac.jp (remove space) + */ + +/** + * mt19937_data_pod is used to get POD data in and out + * of mt19937_engine. Used in torch.get_rng_state and + * torch.set_rng_state functions. + */ +struct mt19937_data_pod { + uint64_t seed_; + int left_; + bool seeded_; + uint32_t next_; + std::array state_; +}; + +class mt19937_engine { +public: + + inline explicit mt19937_engine(uint64_t seed = 5489) { + init_with_uint32(seed); + } + + inline mt19937_data_pod data() const { + return data_; + } + + inline void set_data(mt19937_data_pod data) { + data_ = data; + } + + inline uint64_t seed() const { + return data_.seed_; + } + + inline bool is_valid() { + if ((data_.seeded_ == true) + && (data_.left_ > 0 && data_.left_ <= MERSENNE_STATE_N) + && (data_.next_ <= MERSENNE_STATE_N)) { + return true; + } + return false; + } + + inline uint32_t operator()() { + uint32_t y; + + if (--(data_.left_) == 0) { + next_state(); + } + y = *(data_.state_.data() + data_.next_++); + y ^= (y >> 11); + y ^= (y << 7) & 0x9d2c5680; + y ^= (y << 15) & 0xefc60000; + y ^= (y >> 18); + + return y; + } + +private: + mt19937_data_pod data_; + + inline void init_with_uint32(uint64_t seed) { + data_.seed_ = seed; + data_.seeded_ = true; + data_.state_[0] = seed & 0xffffffff; + for(int j = 1; j < MERSENNE_STATE_N; j++) { + data_.state_[j] = (1812433253 * (data_.state_[j-1] ^ (data_.state_[j-1] >> 30)) + j); + data_.state_[j] &= 0xffffffff; + } + data_.left_ = 1; + data_.next_ = 0; + } + + inline uint32_t mix_bits(uint32_t u, uint32_t v) { + return (u & UMASK) | (v & LMASK); + } + + inline uint32_t twist(uint32_t u, uint32_t v) { + return (mix_bits(u,v) >> 1) ^ (v & 1 ? MATRIX_A : 0); + } + + inline void next_state() { + uint32_t* p = data_.state_.data(); + data_.left_ = MERSENNE_STATE_N; + data_.next_ = 0; + + for(int j = MERSENNE_STATE_N - MERSENNE_STATE_M + 1; --j; p++) { + *p = p[MERSENNE_STATE_M] ^ twist(p[0], p[1]); + } + + for(int j = MERSENNE_STATE_M; --j; p++) { + *p = p[MERSENNE_STATE_M - MERSENNE_STATE_N] ^ twist(p[0], p[1]); + } + + *p = p[MERSENNE_STATE_M - MERSENNE_STATE_N] ^ twist(p[0], data_.state_[0]); + } + +}; + +typedef mt19937_engine mt19937; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/core/Macros.h b/thirdparty/libtorch/include/ATen/core/Macros.h new file mode 100644 index 0000000000..f8643d1ace --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Macros.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/core/NamedTensor.h b/thirdparty/libtorch/include/ATen/core/NamedTensor.h new file mode 100644 index 0000000000..61a8b3ce98 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/NamedTensor.h @@ -0,0 +1,119 @@ +#pragma once + +#include +#include +#include +#include + +#ifdef BUILD_NAMEDTENSOR +namespace at { + +// XXX: This file exists because TensorImpl is in c10, but Dimname is in ATen. +// Due to the c10/ATen library split, TensorImpl cannot depend on Dimname, +// so we have a couple of workarounds. +// +// In the long term, we'll move Dimname to c10 and everything in this file +// can be refactored out. The main blocker for that is that "c10::Symbol" +// actually exists outside of c10 and needs to be moved in. + +// TensorImpl has a unique_ptr field. +// XXX: Ideally we would just put optional> into TensorImpl. +struct CAFFE2_API NamedTensorMeta : public c10::NamedTensorMetaInterface { + explicit NamedTensorMeta(int64_t num_names) + : names_(std::vector(num_names, Dimname::wildcard())) {} + + explicit NamedTensorMeta(DimnameList names) + : names_(names.vec()) {} + explicit NamedTensorMeta(std::vector&& names) + : names_(std::move(names)) {} + + std::unique_ptr clone() const override { + return c10::guts::make_unique(names_); + } + + bool has_names() const; + DimnameList names() const { return names_; } + + // Used for an assertion in TensorImpl.h + int64_t slow_dim() const override { + return names_.size(); + } + + void set_names(DimnameList new_names) { + TORCH_INTERNAL_ASSERT(new_names.size() == names_.size()); + std::copy(new_names.begin(), new_names.end(), names_.begin()); + } + + void set_names(std::vector&& new_names) { + TORCH_INTERNAL_ASSERT(new_names.size() == names_.size()); + names_ = std::move(new_names); + } + + private: + std::vector names_; +}; + +// When NamesMode is disabled, then all operations ignore tensors' names fields. +// Concretely speaking, all tensors are treated as having nullopt names. +struct CAFFE2_API NamesMode { + static bool is_enabled(); + static void set_enabled(bool enabled); +}; + + +// A RAII, thread local (!) guard that enables or disables names upon +// construction, and sets it back to the original value upon destruction. +struct CAFFE2_API NoNamesGuard { + NoNamesGuard() : prev_mode(NamesMode::is_enabled()) { + NamesMode::set_enabled(false); + } + ~NoNamesGuard() { + NamesMode::set_enabled(prev_mode); + } + private: + bool prev_mode; +}; + +void check_names_valid_for(const Tensor& tensor, DimnameList names); +void check_names_valid_for(int64_t tensor_dim, DimnameList names); + +// Sets the names of `tensor` to be `names`. +CAFFE2_API Tensor& internal_set_names_inplace(Tensor& tensor, optional names); +CAFFE2_API Tensor& internal_set_names_inplace(Tensor& tensor, std::vector&& names, bool validate_names); + +constexpr size_t kMaxNamedTensorDim = 64; + +DimnameList default_names(size_t len); + +namespace impl { + +// Some helper functions on TensorImpl. Useful for working with names in TH. +// XXX: Ideally these would exist as methods on TensorImpl +CAFFE2_API void internal_set_names_inplace(TensorImpl* impl, optional names, bool validate_names); +CAFFE2_API void internal_set_names_inplace(TensorImpl* impl, std::vector&& names, bool validate_names); + +void check_names_valid_for(TensorImpl* impl, DimnameList names); + +// Returns true if the tensor's names exist and are not all 'None'. +// Returns false if the tensor's names don't exist (were not allocated), +// or if all names are 'None'. +// We treat not-allocated-names the same as allocated names that are all 'None'. +CAFFE2_API bool has_names(const TensorImpl* impl); + +// Returns the names of the tensor's dimensions. +// Unnamed tensors are treated as having 'None' in all dimension; this method +// would return a DimnameList of all 'None's for an unnamed tensor. +CAFFE2_API DimnameList get_names(const TensorImpl* impl); + +// This is more of an implementation detail; one should use impl::get_names / +// Tensor::names() whenever possible because it provides a cleaner API. +// Returns the names of the tensor if they have been allocated; returns nullopt +// instead if the haven't been. The names of a tensor are not allocated if a +// tensor is constructed with names=None. +CAFFE2_API optional get_opt_names(const TensorImpl* impl); + + +} // namespace impl + +} // namespace at +#endif diff --git a/thirdparty/libtorch/include/ATen/core/OpsAlreadyMovedToC10.h b/thirdparty/libtorch/include/ATen/core/OpsAlreadyMovedToC10.h new file mode 100644 index 0000000000..ab68c0d4fd --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/OpsAlreadyMovedToC10.h @@ -0,0 +1,14 @@ +#pragma once + +#include + +namespace c10 { +struct OperatorName; +} + +namespace at { + +// list of ATen ops that come from native_functions.yaml +CAFFE2_API bool is_aten_op(const c10::OperatorName& opName); + +} diff --git a/thirdparty/libtorch/include/ATen/core/PhiloxRNGEngine.h b/thirdparty/libtorch/include/ATen/core/PhiloxRNGEngine.h new file mode 100644 index 0000000000..1e597e8fc1 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/PhiloxRNGEngine.h @@ -0,0 +1,203 @@ +#pragma once + +// define constants like M_PI and C keywords for MSVC +#ifdef _MSC_VER +#define _USE_MATH_DEFINES +#include +#endif + +#include + +#ifdef __CUDACC__ +#include +#endif + +#include +#include +#include +#include +#include + +namespace at { + +// typedefs for holding vector data +namespace detail { + +typedef at::detail::Array UINT4; +typedef at::detail::Array UINT2; +typedef at::detail::Array DOUBLE2; +typedef at::detail::Array FLOAT2; + +} // namespace detail + +/** + * Note [Philox Engine implementation] + * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Originally implemented in PyTorch's fusion compiler + * Refer to: http://www.thesalmons.org/john/random123/papers/random123sc11.pdf + * for details regarding the engine. + * + * Note that currently this implementation of the philox engine is not used + * anywhere except for tests in cpu_generator_test.cpp. However, this engine + * will replace curandStatePhilox4_32_10_t in the future. + * + * The philox engine takes a seed value, a subsequeunce + * for starting the generation and an offset for the subsequence. + * Think of this engine as an algorithm producing a huge array. We are + * parallelizing this array by partitioning the huge array and assigning + * a thread index to each partition. In other words, each seed value + * (there are 2^64 possible seed values) gives a sub array of size + * 2^128 (each element in that array is a 128 bit number). Reasoning + * behind the array being of size 2^128 is, there are 2^64 possible + * thread index value and there is an array of size 2^64 for each of + * those thread index. Hence 2^64 * 2^64 = 2^128 for each seed value. + * + * In short, this generator can produce 2^64 (seed values) * 2^128 (number + * of elements in an array given by a seed value) = 2^192 values. + * + * Arguments: + * seed: Seed values could be any number from 0 to 2^64-1. + * subsequence: Subsequence is just the cuda thread indexing with: + * - blockIdx.x * blockDim.x + threadIdx.x + * offset: The offset variable in PhiloxEngine decides how many 128-bit + * random numbers to skip (i.e. how many groups of 4, 32-bit numbers to skip) + * and hence really decides the total number of randoms that can be achieved + * for the given subsequence. + */ + +class philox_engine { +public: + + C10_HOST_DEVICE inline explicit philox_engine(uint64_t seed = 67280421310721, + uint64_t subsequence = 0, + uint64_t offset = 0) { + key[0] = static_cast(seed); + key[1] = static_cast(seed >> 32); + counter = detail::UINT4(0); + counter[2] = static_cast(subsequence); + counter[3] = static_cast(subsequence >> 32); + STATE = 0; + incr_n(offset); + } + + /** + * Produces a unique 32-bit pseudo random number on every invocation + */ + C10_HOST_DEVICE inline uint32_t operator()() { + if(STATE == 0) { + detail::UINT4 counter_ = counter; + detail::UINT2 key_ = key; + + counter_ = single_round(counter_, key_); + key_[0] += (kPhilox10A); key_[1] += (kPhilox10B); + counter_ = single_round(counter_, key_); + key_[0] += (kPhilox10A); key_[1] += (kPhilox10B); + counter_ = single_round(counter_, key_); + key_[0] += (kPhilox10A); key_[1] += (kPhilox10B); + counter_ = single_round(counter_, key_); + key_[0] += (kPhilox10A); key_[1] += (kPhilox10B); + counter_ = single_round(counter_, key_); + key_[0] += (kPhilox10A); key_[1] += (kPhilox10B); + counter_ = single_round(counter_, key_); + key_[0] += (kPhilox10A); key_[1] += (kPhilox10B); + counter_ = single_round(counter_, key_); + key_[0] += (kPhilox10A); key_[1] += (kPhilox10B); + counter_ = single_round(counter_, key_); + key_[0] += (kPhilox10A); key_[1] += (kPhilox10B); + counter_ = single_round(counter_, key_); + key_[0] += (kPhilox10A); key_[1] += (kPhilox10B); + + output = single_round(counter_, key_); + incr(); + } + uint32_t ret = output[STATE]; + STATE = (STATE + 1) & 3; + return ret; + } + + /** + * Function that Skips N 128 bit numbers in a subsequence + */ + C10_HOST_DEVICE inline void incr_n(uint64_t n) { + uint32_t nlo = static_cast(n); + uint32_t nhi = static_cast(n >> 32); + counter[0] += nlo; + // if overflow in x has occured, carry over to nhi + if (counter[0] < nlo) { + nhi++; + // if overflow in nhi has occured during carry over, + // propagate that overflow to y and exit to increment z + // otherwise return + counter[1] += nhi; + if(nhi != 0) { + if (nhi <= counter[1]) { + return; + } + } + } else { + // if overflow in y has occured during addition, + // exit to increment z + // otherwise return + counter[1] += nhi; + if (nhi <= counter[1]) { + return; + } + } + if (++counter[2]) + return; + ++counter[3]; + } + + /** + * Function that Skips one 128 bit number in a subsequence + */ + C10_HOST_DEVICE inline void incr() { + if (++counter[0]) + return; + if (++counter[1]) + return; + if (++counter[2]) { + return; + } + ++counter[3]; + } + +private: + detail::UINT4 counter; + detail::UINT4 output; + detail::UINT2 key; + uint32_t STATE; + + C10_HOST_DEVICE inline uint32_t mulhilo32(uint32_t a, uint32_t b, + uint32_t *result_high) { + #ifdef __CUDA_ARCH__ + *result_high = __umulhi(a, b); + return a*b; + #else + const uint64_t product = static_cast(a) * b; + *result_high = static_cast(product >> 32); + return static_cast(product); + #endif + } + + C10_HOST_DEVICE inline detail::UINT4 single_round(detail::UINT4 ctr, detail::UINT2 in_key) { + uint32_t hi0; + uint32_t hi1; + uint32_t lo0 = mulhilo32(kPhiloxSA, ctr[0], &hi0); + uint32_t lo1 = mulhilo32(kPhiloxSB, ctr[2], &hi1); + detail::UINT4 ret; + ret[0] = hi1 ^ ctr[1] ^ in_key[0]; + ret[1] = lo1; + ret[2] = hi0 ^ ctr[3] ^ in_key[1]; + ret[3] = lo0; + return ret; + } + static const uint32_t kPhilox10A = 0x9E3779B9; + static const uint32_t kPhilox10B = 0xBB67AE85; + static const uint32_t kPhiloxSA = 0xD2511F53; + static const uint32_t kPhiloxSB = 0xCD9E8D57; +}; + +typedef philox_engine Philox4_32_10; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/core/Range.h b/thirdparty/libtorch/include/ATen/core/Range.h new file mode 100644 index 0000000000..2bf6b2b73a --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Range.h @@ -0,0 +1,25 @@ +#pragma once + +#include +#include + +namespace at { + +struct Range { + Range(int64_t begin, int64_t end) + : begin(begin) + , end(end) {} + + int64_t size() const { return end - begin; } + + Range operator/(int64_t divisor) { + return Range(begin / divisor, end / divisor); + } + + int64_t begin; + int64_t end; +}; + +std::ostream& operator<<(std::ostream& out, const Range& range); + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/core/Reduction.h b/thirdparty/libtorch/include/ATen/core/Reduction.h new file mode 100644 index 0000000000..23c6ea3cab --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Reduction.h @@ -0,0 +1,16 @@ +#pragma once + +namespace at { +namespace Reduction { + +// NB: Keep this in sync with Reduction class in torch/nn/_reduction.py +// These constants control the reduction behavior of loss functions. +// Ideally, this would be a scoped enum, but jit doesn't support that +enum Reduction { + None, // Do not reduce + Mean, // (Possibly weighted) mean of losses + Sum, // Sum losses + END +}; +} // namespace Reduction +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/core/Scalar.h b/thirdparty/libtorch/include/ATen/core/Scalar.h new file mode 100644 index 0000000000..a14b48f012 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Scalar.h @@ -0,0 +1 @@ +#include diff --git a/thirdparty/libtorch/include/ATen/core/ScalarType.h b/thirdparty/libtorch/include/ATen/core/ScalarType.h new file mode 100644 index 0000000000..eb30ee86f7 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/ScalarType.h @@ -0,0 +1 @@ +#include diff --git a/thirdparty/libtorch/include/ATen/core/Tensor.h b/thirdparty/libtorch/include/ATen/core/Tensor.h new file mode 100644 index 0000000000..2dc92d43d4 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Tensor.h @@ -0,0 +1,12 @@ +#pragma once + +/* + * We split Tensor.h into TensorBody.h and TensorMethods.h because we want + * all TensorMethods to be inlined, but they depend on the Dispatcher, + * which in turn depends on many other things, which then depend back on Tensor. + * + * We can break this dependency chain by having the dispatcher only depend on + * TensorBody.h and not TensorMethods.h. + */ +#include +#include diff --git a/thirdparty/libtorch/include/ATen/core/TensorAccessor.h b/thirdparty/libtorch/include/ATen/core/TensorAccessor.h new file mode 100644 index 0000000000..95f37fcb09 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/TensorAccessor.h @@ -0,0 +1,232 @@ +#pragma once + +#include +#include +#include +#include + +namespace at { + +// The PtrTraits argument to the TensorAccessor/GenericPackedTensorAccessor +// is used to enable the __restrict__ keyword/modifier for the data +// passed to cuda. +template +struct DefaultPtrTraits { + typedef T* PtrType; +}; + +#if defined(__CUDACC__) || defined(__HIPCC__) +template +struct RestrictPtrTraits { + typedef T* __restrict__ PtrType; +}; +#endif + +// TensorAccessorBase and TensorAccessor are used for both CPU and CUDA tensors. +// For CUDA tensors it is used in device code (only). This means that we restrict ourselves +// to functions and types available there (e.g. IntArrayRef isn't). + +// The PtrTraits argument is only relevant to cuda to support `__restrict__` pointers. +template class PtrTraits = DefaultPtrTraits, typename index_t = int64_t> +class TensorAccessorBase { +public: + typedef typename PtrTraits::PtrType PtrType; + + C10_HOST_DEVICE TensorAccessorBase( + PtrType data_, + const index_t* sizes_, + const index_t* strides_) + : data_(data_), sizes_(sizes_), strides_(strides_) {} + C10_HOST IntArrayRef sizes() const { + return IntArrayRef(sizes_,N); + } + C10_HOST IntArrayRef strides() const { + return IntArrayRef(strides_,N); + } + C10_HOST_DEVICE index_t stride(index_t i) const { + return strides_[i]; + } + C10_HOST_DEVICE index_t size(index_t i) const { + return sizes_[i]; + } + C10_HOST_DEVICE PtrType data() { + return data_; + } + C10_HOST_DEVICE const PtrType data() const { + return data_; + } +protected: + PtrType data_; + const index_t* sizes_; + const index_t* strides_; +}; + +// The `TensorAccessor` is typically instantiated for CPU `Tensor`s using +// `Tensor.accessor()`. +// For CUDA `Tensor`s, `GenericPackedTensorAccessor` is used on the host and only +// indexing on the device uses `TensorAccessor`s. +template class PtrTraits = DefaultPtrTraits, typename index_t = int64_t> +class TensorAccessor : public TensorAccessorBase { +public: + typedef typename PtrTraits::PtrType PtrType; + + C10_HOST_DEVICE TensorAccessor( + PtrType data_, + const index_t* sizes_, + const index_t* strides_) + : TensorAccessorBase(data_,sizes_,strides_) {} + + C10_HOST_DEVICE TensorAccessor operator[](index_t i) { + return TensorAccessor(this->data_ + this->strides_[0]*i,this->sizes_+1,this->strides_+1); + } + + C10_HOST_DEVICE const TensorAccessor operator[](index_t i) const { + return TensorAccessor(this->data_ + this->strides_[0]*i,this->sizes_+1,this->strides_+1); + } +}; + +template class PtrTraits, typename index_t> +class TensorAccessor : public TensorAccessorBase { +public: + typedef typename PtrTraits::PtrType PtrType; + + C10_HOST_DEVICE TensorAccessor( + PtrType data_, + const index_t* sizes_, + const index_t* strides_) + : TensorAccessorBase(data_,sizes_,strides_) {} + C10_HOST_DEVICE T & operator[](index_t i) { + return this->data_[this->strides_[0]*i]; + } + C10_HOST_DEVICE const T & operator[](index_t i) const { + return this->data_[this->strides_[0]*i]; + } +}; + + +// GenericPackedTensorAccessorBase and GenericPackedTensorAccessor are used on for CUDA `Tensor`s on the host +// and as +// In contrast to `TensorAccessor`s, they copy the strides and sizes on instantiation (on the host) +// in order to transfer them on the device when calling kernels. +// On the device, indexing of multidimensional tensors gives to `TensorAccessor`s. +// Use RestrictPtrTraits as PtrTraits if you want the tensor's data pointer to be marked as __restrict__. +// Instantiation from data, sizes, strides is only needed on the host and std::copy isn't available +// on the device, so those functions are host only. +template class PtrTraits = DefaultPtrTraits, typename index_t = int64_t> +class GenericPackedTensorAccessorBase { +public: + typedef typename PtrTraits::PtrType PtrType; + C10_HOST GenericPackedTensorAccessorBase( + PtrType data_, + const index_t* sizes_, + const index_t* strides_) + : data_(data_) { + std::copy(sizes_, sizes_ + N, std::begin(this->sizes_)); + std::copy(strides_, strides_ + N, std::begin(this->strides_)); + } + + // if index_t is not int64_t, we want to have an int64_t constructor + template ::value>::type> + C10_HOST GenericPackedTensorAccessorBase( + PtrType data_, + const source_index_t* sizes_, + const source_index_t* strides_) + : data_(data_) { + for (int i = 0; i < N; i++) { + this->sizes_[i] = sizes_[i]; + this->strides_[i] = strides_[i]; + } + } + + C10_HOST_DEVICE index_t stride(index_t i) const { + return strides_[i]; + } + C10_HOST_DEVICE index_t size(index_t i) const { + return sizes_[i]; + } + C10_HOST_DEVICE PtrType data() { + return data_; + } + C10_HOST_DEVICE const PtrType data() const { + return data_; + } +protected: + PtrType data_; + index_t sizes_[N]; + index_t strides_[N]; +}; + +template class PtrTraits = DefaultPtrTraits, typename index_t = int64_t> +class GenericPackedTensorAccessor : public GenericPackedTensorAccessorBase { +public: + typedef typename PtrTraits::PtrType PtrType; + + C10_HOST GenericPackedTensorAccessor( + PtrType data_, + const index_t* sizes_, + const index_t* strides_) + : GenericPackedTensorAccessorBase(data_, sizes_, strides_) {} + + // if index_t is not int64_t, we want to have an int64_t constructor + template ::value>::type> + C10_HOST GenericPackedTensorAccessor( + PtrType data_, + const source_index_t* sizes_, + const source_index_t* strides_) + : GenericPackedTensorAccessorBase(data_, sizes_, strides_) {} + + C10_DEVICE TensorAccessor operator[](index_t i) { + index_t* new_sizes = this->sizes_ + 1; + index_t* new_strides = this->strides_ + 1; + return TensorAccessor(this->data_ + this->strides_[0]*i, new_sizes, new_strides); + } + + C10_DEVICE const TensorAccessor operator[](index_t i) const { + const index_t* new_sizes = this->sizes_ + 1; + const index_t* new_strides = this->strides_ + 1; + return TensorAccessor(this->data_ + this->strides_[0]*i, new_sizes, new_strides); + } +}; + +template class PtrTraits, typename index_t> +class GenericPackedTensorAccessor : public GenericPackedTensorAccessorBase { +public: + typedef typename PtrTraits::PtrType PtrType; + C10_HOST GenericPackedTensorAccessor( + PtrType data_, + const index_t* sizes_, + const index_t* strides_) + : GenericPackedTensorAccessorBase(data_, sizes_, strides_) {} + + // if index_t is not int64_t, we want to have an int64_t constructor + template ::value>::type> + C10_HOST GenericPackedTensorAccessor( + PtrType data_, + const source_index_t* sizes_, + const source_index_t* strides_) + : GenericPackedTensorAccessorBase(data_, sizes_, strides_) {} + + C10_DEVICE T & operator[](index_t i) { + return this->data_[this->strides_[0] * i]; + } + C10_DEVICE const T& operator[](index_t i) const { + return this->data_[this->strides_[0]*i]; + } +}; + + +// Can't put this directly into the macro function args because of commas +#define AT_X GenericPackedTensorAccessor + +// Old name for `GenericPackedTensorAccessor` +template class PtrTraits = DefaultPtrTraits, typename index_t = int64_t> +C10_DEFINE_DEPRECATED_USING(PackedTensorAccessor, AT_X) + +#undef AT_X + +template class PtrTraits = DefaultPtrTraits> +using PackedTensorAccessor32 = GenericPackedTensorAccessor; + +template class PtrTraits = DefaultPtrTraits> +using PackedTensorAccessor64 = GenericPackedTensorAccessor; +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/core/TensorBody.h b/thirdparty/libtorch/include/ATen/core/TensorBody.h new file mode 100644 index 0000000000..256a3c1288 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/TensorBody.h @@ -0,0 +1,1122 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace caffe2 { +class Tensor; +} +namespace c10{ +struct TensorOptions; +} +namespace at { +struct Generator; +struct Type; +class DeprecatedTypeProperties; +class Tensor; +} // namespace at + +namespace torch { namespace autograd { + +struct Node; + +}} // namespace torch::autograd + +namespace at { + +class Tensor; +using TensorList = ArrayRef; + +struct Quantizer; +// This is temporary typedef to enable Quantizer in aten native function API +// we'll remove them when we are actually exposing Quantizer class +// to frontend +using QuantizerPtr = c10::intrusive_ptr; +using ConstQuantizerPtr = const c10::intrusive_ptr&; + +namespace impl { +inline bool variable_excluded_from_dispatch() { + return c10::impl::tls_local_tensor_type_set().excluded_.has(TensorTypeId::VariableTensorId); +} +} + +// Tensor is a "generic" object holding a pointer to the underlying TensorImpl object, which +// has an embedded reference count. In this way, Tensor is similar to boost::intrusive_ptr. +// +// For example: +// +// void func(Tensor a) { +// Tensor b = a; +// ... +// } +// +// In this example, when we say Tensor b = a, we are creating a new object that points to the +// same underlying TensorImpl, and bumps its reference count. When b goes out of scope, the +// destructor decrements the reference count by calling release() on the TensorImpl it points to. +// The existing constructors, operator overloads, etc. take care to implement the correct semantics. +// +// Note that Tensor can also be NULL, i.e. it is not associated with any underlying TensorImpl, and +// special care must be taken to handle this. +class CAFFE2_API Tensor { + public: + Tensor(){}; + // This constructor should not be used by end users and is an implementation + // detail invoked by autogenerated code. + explicit Tensor( + c10::intrusive_ptr tensor_impl) + : impl_(std::move(tensor_impl)) { + if (impl_.get() == nullptr) { + throw std::runtime_error("TensorImpl with nullptr is not supported"); + } + } + Tensor(const Tensor&) = default; + Tensor(Tensor&&) = default; + + + public: + // Creates a new wrapper from TensorImpl. Intentionally a free method because + // it should be used with care. Checks necessary invariants + static Tensor wrap_tensor_impl( + c10::intrusive_ptr tensor_impl) { + Tensor r(std::move(tensor_impl)); + r.enforce_invariants(); + return r; + } + + int64_t dim() const { + return impl_->dim(); + } + int64_t storage_offset() const { + return impl_->storage_offset(); + } + + TensorImpl * unsafeGetTensorImpl() const { + return impl_.get(); + } + TensorImpl * unsafeReleaseTensorImpl() { + return impl_.release(); + } + const c10::intrusive_ptr& getIntrusivePtr() const { + return impl_; + } + + bool defined() const { + return impl_; + } + + void reset() { + impl_.reset(); + } + + // The following overloads are very intruiging. Consider the following + // program: + // + // x[1] = 3; + // + // We would expect that the first entry of x is written to 3. But how can we + // actually achieve this? x[1] evaluates to a tensor... + // + // The answer is, using a ref-qualifier. x[1] is an rvalue, which cannot be + // (profitably) assigned to in the traditional sense, so we overload + // assignment to mean, "Actually, copy 3 into the tensor data." This is done + // with an rvalue-reference ref-qualified overload (the methods with && at the + // end of their type.) + // + // There's one more fly in the ointment: We also want + // + // Tensor x = y; + // + // to work, and we want it NOT to copy. So we need a traditional operator= + // overload. But we MUST specify a mutable lvalue ref-qualifier, to + // disambiguate the traditional overload from the rvalue-reference + // ref-qualified overload. Otherwise, it will be ambiguous, because + // a non ref-qualified method is eligible for all situations. + + // Unfortunately, we have to write these constructors out manually + // to work around an MSVC bug: + // error C2580: 'at::Tensor &at::Tensor::operator =(const at::Tensor &) &': + // multiple versions of a defaulted special member functions are not allowed + // Tensor& operator=(const Tensor&) & = default; + // Tensor& operator=(Tensor&&) & = default; + Tensor& operator=(const Tensor& x) & { + impl_ = x.impl_; + return *this; + } + Tensor& operator=(Tensor&& x) & { + impl_ = std::move(x.impl_); + return *this; + } + + Tensor& operator=(Scalar v) &&; + Tensor& operator=(const Tensor&) &&; + Tensor& operator=(Tensor&&) &&; + + bool is_same(const Tensor& other) const noexcept { + return impl_ == other.impl_; + } + size_t use_count() const noexcept { + return impl_.use_count(); + } + size_t weak_use_count() const noexcept { + return impl_.weak_use_count(); + } + + std::string toString() const; + + IntArrayRef sizes() const { + return impl_->sizes(); + } + IntArrayRef strides() const { + return impl_->strides(); + } +#ifdef BUILD_NAMEDTENSOR + // See impl::get_opt_names in ATen/NamedTensor.h for docs. + optional opt_names() const { + return impl::get_opt_names(unsafeGetTensorImpl()); + } + // See impl::get_names in ATen/NamedTensor.h for docs. + DimnameList names() const { + return impl::get_names(unsafeGetTensorImpl()); + } +#endif + int64_t ndimension() const { + return dim(); + } + + bool is_contiguous(at::MemoryFormat memory_format=at::MemoryFormat::Contiguous) const { + return impl_->is_contiguous(memory_format); + } + + bool is_non_overlapping_and_dense() const { + return impl_->is_non_overlapping_and_dense(); + } + + at::MemoryFormat suggest_memory_format() const { + if (!is_mkldnn() && !is_sparse() && !impl_->is_contiguous() && impl_->is_strides_like_channels_last()) { + return at::MemoryFormat::ChannelsLast; + } + return at::MemoryFormat::Contiguous; + } + + // Total bytes consumed by the "view" of elements of the array. Does not + // include size of metadata. The number reported here does not necessarily + // correspond to the true physical memory consumed by a tensor; instead, + // it reports the memory the tensor would take *if* it were contiguous. + // Defined to be numel() * itemsize() + size_t nbytes() const { + return impl_->numel() * impl_->itemsize(); + } + + int64_t numel() const { + return impl_->numel(); + } + + // Length of one array element in bytes. This is the traditional + // Numpy naming. + size_t itemsize() const { + return impl_->itemsize(); + } + + // Same as itemsize(). This is the PyTorch naming. + size_t element_size() const { + return impl_->itemsize(); + } + + DeprecatedTypeProperties & type() const { + return globalDeprecatedTypePropertiesRegistry().getDeprecatedTypeProperties( + tensorTypeIdToBackend(legacyExtractTypeId(type_set())), + scalar_type()); + } + TensorTypeSet type_set() const { + return impl_->type_set(); + } + ScalarType scalar_type() const { + return typeMetaToScalarType(impl_->dtype()); + } + bool has_storage() const { + return defined() && impl_->has_storage(); + } + const Storage& storage() const { + return impl_->storage(); + } + bool is_alias_of(const at::Tensor& other) const{ + return impl_->storage().is_alias_of(other.storage()); + } + Tensor toType(ScalarType t) const; + Tensor toBackend(Backend b) const; + + C10_DEPRECATED_MESSAGE("Tensor.is_variable() is deprecated; everything is a variable now. (If you want to assert that variable has been appropriately handled already, use at::impl::variable_excluded_from_dispatch())") + bool is_variable() const noexcept { + return !at::impl::variable_excluded_from_dispatch(); + } + + /// Returns a `Tensor`'s layout. Defined in Type.h + Layout layout() const noexcept; + + /// Returns a `Tensor`'s dtype (`TypeMeta`). Defined in TensorMethods.h + caffe2::TypeMeta dtype() const noexcept; + + /// Returns a `Tensor`'s device. + Device device() const; + + /// Returns a `Tensor`'s device index. + int64_t get_device() const; + + /// Returns if a `Tensor` has CUDA backend. + bool is_cuda() const; + + /// Returns if a `Tensor` has HIP backend. + bool is_hip() const; + + /// Returns if a `Tensor` has sparse backend. + bool is_sparse() const; + + /// Returns if a `Tensor` is mkldnn tensor. + bool is_mkldnn() const; + + /// Returns if a `Tensor` has quantized backend. + bool is_quantized() const; + + /// If a tensor is a quantized tensor, returns its quantizer + /// TODO: it's not in native_functions.yaml yet as it's not exposed to python + QuantizerPtr quantizer() const; + +#ifdef BUILD_NAMEDTENSOR + /// Returns if a `Tensor` has any dimension names + bool has_names() const; + + /// Returns a `Tensor`'s dimension names data structure + const NamedTensorMeta* get_named_tensor_meta() const; + NamedTensorMeta* get_named_tensor_meta(); +#endif + + /// Returns the `TensorOptions` corresponding to this `Tensor`. Defined in + /// TensorOptions.h. + TensorOptions options() const; + + void* data_ptr() const { + return this->unsafeGetTensorImpl()->data(); + } + + template + T * data_ptr() const; + + template + C10_DEPRECATED_MESSAGE("Tensor.data() is deprecated. Please use Tensor.data_ptr() instead.") + T * data() const { + return data_ptr(); + } + + template + T item() const; + + // Purposely not defined here to avoid inlining + void print() const; + + // Return a `TensorAccessor` for CPU `Tensor`s. You have to specify scalar type and + // dimension. + template + TensorAccessor accessor() const& { + static_assert(N > 0, "accessor is used for indexing tensor, for scalars use *data_ptr()"); + TORCH_CHECK(dim() == N, "expected ", N, " dims but tensor has ", dim()); + return TensorAccessor(data_ptr(),sizes().data(),strides().data()); + } + template + TensorAccessor accessor() && = delete; + + // Return a `GenericPackedTensorAccessor` for CUDA `Tensor`s. You have to specify scalar type and + // dimension. You can optionally specify RestrictPtrTraits as a template parameter to + // cast the data pointer to a __restrict__ pointer. + // In order to use this, your CUDA kernel has to take a corresponding GenericPackedTensorAccessor + // as an argument. + template class PtrTraits = DefaultPtrTraits, typename index_t = int64_t> + GenericPackedTensorAccessor generic_packed_accessor() const& { + static_assert(N > 0, "accessor is used for indexing tensor, for scalars use *data_ptr()"); + TORCH_CHECK(dim() == N, "expected ", N, " dims but tensor has ", dim()); + return GenericPackedTensorAccessor(static_cast::PtrType>(data_ptr()),sizes().data(),strides().data()); + } + template class PtrTraits = DefaultPtrTraits, typename index_t = int64_t> + GenericPackedTensorAccessor generic_packed_accessor() && = delete; + + template class PtrTraits = DefaultPtrTraits> + PackedTensorAccessor32 packed_accessor32() const& { + return generic_packed_accessor(); + } + template class PtrTraits = DefaultPtrTraits> + PackedTensorAccessor32 packed_accessor32() && = delete; + + template class PtrTraits = DefaultPtrTraits> + PackedTensorAccessor64 packed_accessor64() const& { + return generic_packed_accessor(); + } + template class PtrTraits = DefaultPtrTraits> + PackedTensorAccessor64 packed_accessor64() && = delete; + + template class PtrTraits = DefaultPtrTraits, typename index_t = int64_t> + C10_DEPRECATED_MESSAGE("packed_accessor is deprecated, use packed_accessor32 or packed_accessor64 instead") + GenericPackedTensorAccessor packed_accessor() const & { + return generic_packed_accessor(); + } + template class PtrTraits = DefaultPtrTraits, typename index_t = int64_t> + C10_DEPRECATED_MESSAGE("packed_accessor is deprecated, use packed_accessor32 or packed_accessor64 instead") + GenericPackedTensorAccessor packed_accessor() && = delete; + + Tensor operator-() const; + Tensor& operator+=(const Tensor & other); + Tensor& operator+=(Scalar other); + Tensor& operator-=(const Tensor & other); + Tensor& operator-=(Scalar other); + Tensor& operator*=(const Tensor & other); + Tensor& operator*=(Scalar other); + Tensor& operator/=(const Tensor & other); + Tensor& operator/=(Scalar other); + Tensor operator[](Scalar index) const; + Tensor operator[](Tensor index) const; + Tensor operator[](int64_t index) const; + + Tensor cpu() const; + Tensor cuda() const; + Tensor hip() const; + + // ~~~~~ Autograd API ~~~~~ + + Tensor& set_requires_grad(bool requires_grad) { + impl_->set_requires_grad(requires_grad); + return *this; + } + bool requires_grad() const { + return impl_->requires_grad(); + } + + Tensor& grad() { + return impl_->grad(); + } + const Tensor& grad() const { + return impl_->grad(); + } + + // STOP. Thinking of adding a method here, which only makes use + // of other ATen methods? Define it in native_functions.yaml. + + //example + //Tensor * add(Tensor & b); + void backward(const Tensor & gradient={}, bool keep_graph=false, bool create_graph=false) const; + void set_data(const Tensor & new_data) const; + Tensor data() const; + bool is_leaf() const; + int64_t output_nr() const; + int64_t _version() const; + Tensor & requires_grad_(bool _requires_grad=true) const; + #ifdef BUILD_NAMEDTENSOR + Tensor & rename_(c10::optional names) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor rename(c10::optional names) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor align_to(DimnameList names) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor align_to(DimnameList order, int64_t ellipsis_idx) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor align_as(const Tensor & other) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor refine_names(DimnameList names) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor unflatten(Dimname dim, IntArrayRef sizes, DimnameList names) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor unflatten(int64_t dim, IntArrayRef sizes, DimnameList names) const; + #endif + Tensor abs() const; + Tensor & abs_() const; + Tensor angle() const; + Tensor real() const; + Tensor imag() const; + Tensor conj() const; + Tensor acos() const; + Tensor & acos_() const; + Tensor add(const Tensor & other, Scalar alpha=1) const; + Tensor & add_(const Tensor & other, Scalar alpha=1) const; + Tensor add(Scalar other, Scalar alpha=1) const; + Tensor & add_(Scalar other, Scalar alpha=1) const; + Tensor addmv(const Tensor & mat, const Tensor & vec, Scalar beta=1, Scalar alpha=1) const; + Tensor & addmv_(const Tensor & mat, const Tensor & vec, Scalar beta=1, Scalar alpha=1) const; + Tensor addr(const Tensor & vec1, const Tensor & vec2, Scalar beta=1, Scalar alpha=1) const; + Tensor & addr_(const Tensor & vec1, const Tensor & vec2, Scalar beta=1, Scalar alpha=1) const; + Tensor all(int64_t dim, bool keepdim=false) const; + #ifdef BUILD_NAMEDTENSOR + Tensor all(Dimname dim, bool keepdim=false) const; + #endif + bool allclose(const Tensor & other, double rtol=1e-05, double atol=1e-08, bool equal_nan=false) const; + Tensor any(int64_t dim, bool keepdim=false) const; + #ifdef BUILD_NAMEDTENSOR + Tensor any(Dimname dim, bool keepdim=false) const; + #endif + Tensor argmax(c10::optional dim=c10::nullopt, bool keepdim=false) const; + Tensor argmin(c10::optional dim=c10::nullopt, bool keepdim=false) const; + Tensor as_strided(IntArrayRef size, IntArrayRef stride, c10::optional storage_offset=c10::nullopt) const; + Tensor & as_strided_(IntArrayRef size, IntArrayRef stride, c10::optional storage_offset=c10::nullopt) const; + Tensor asin() const; + Tensor & asin_() const; + Tensor atan() const; + Tensor & atan_() const; + Tensor baddbmm(const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1) const; + Tensor & baddbmm_(const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1) const; + Tensor bernoulli(Generator * generator=nullptr) const; + Tensor & bernoulli_(const Tensor & p, Generator * generator=nullptr) const; + Tensor & bernoulli_(double p=0.5, Generator * generator=nullptr) const; + Tensor bernoulli(double p, Generator * generator=nullptr) const; + Tensor bincount(const Tensor & weights={}, int64_t minlength=0) const; + Tensor bitwise_not() const; + Tensor & bitwise_not_() const; + Tensor logical_not() const; + Tensor & logical_not_() const; + Tensor logical_xor(const Tensor & other) const; + Tensor & logical_xor_(const Tensor & other) const; + Tensor bmm(const Tensor & mat2) const; + Tensor ceil() const; + Tensor & ceil_() const; + std::vector chunk(int64_t chunks, int64_t dim=0) const; + Tensor clamp(c10::optional min=c10::nullopt, c10::optional max=c10::nullopt) const; + Tensor & clamp_(c10::optional min=c10::nullopt, c10::optional max=c10::nullopt) const; + Tensor clamp_max(Scalar max) const; + Tensor & clamp_max_(Scalar max) const; + Tensor clamp_min(Scalar min) const; + Tensor & clamp_min_(Scalar min) const; + Tensor contiguous(MemoryFormat memory_format=MemoryFormat::Contiguous) const; + Tensor & copy_(const Tensor & src, bool non_blocking=false) const; + Tensor cos() const; + Tensor & cos_() const; + Tensor cosh() const; + Tensor & cosh_() const; + Tensor cumsum(int64_t dim, c10::optional dtype=c10::nullopt) const; + #ifdef BUILD_NAMEDTENSOR + Tensor cumsum(Dimname dim, c10::optional dtype=c10::nullopt) const; + #endif + Tensor cumprod(int64_t dim, c10::optional dtype=c10::nullopt) const; + #ifdef BUILD_NAMEDTENSOR + Tensor cumprod(Dimname dim, c10::optional dtype=c10::nullopt) const; + #endif + Tensor det() const; + Tensor diag_embed(int64_t offset=0, int64_t dim1=-2, int64_t dim2=-1) const; + Tensor diagflat(int64_t offset=0) const; + Tensor diagonal(int64_t offset=0, int64_t dim1=0, int64_t dim2=1) const; + Tensor & fill_diagonal_(Scalar fill_value, bool wrap=false) const; + Tensor div(const Tensor & other) const; + Tensor & div_(const Tensor & other) const; + Tensor div(Scalar other) const; + Tensor & div_(Scalar other) const; + Tensor dot(const Tensor & tensor) const; + Tensor new_empty(IntArrayRef size, const TensorOptions & options={}) const; + Tensor new_full(IntArrayRef size, Scalar fill_value, const TensorOptions & options={}) const; + Tensor new_zeros(IntArrayRef size, const TensorOptions & options={}) const; + Tensor & resize_(IntArrayRef size, c10::optional memory_format=c10::nullopt) const; + Tensor erf() const; + Tensor & erf_() const; + Tensor erfc() const; + Tensor & erfc_() const; + Tensor exp() const; + Tensor & exp_() const; + Tensor expm1() const; + Tensor & expm1_() const; + Tensor expand(IntArrayRef size, bool implicit=false) const; + Tensor expand_as(const Tensor & other) const; + Tensor flatten(int64_t start_dim=0, int64_t end_dim=-1) const; + #ifdef BUILD_NAMEDTENSOR + Tensor flatten(int64_t start_dim, int64_t end_dim, Dimname out_dim) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor flatten(Dimname start_dim, Dimname end_dim, Dimname out_dim) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor flatten(DimnameList dims, Dimname out_dim) const; + #endif + Tensor & fill_(Scalar value) const; + Tensor & fill_(const Tensor & value) const; + Tensor floor() const; + Tensor & floor_() const; + Tensor frac() const; + Tensor & frac_() const; + Tensor ger(const Tensor & vec2) const; + Tensor fft(int64_t signal_ndim, bool normalized=false) const; + Tensor ifft(int64_t signal_ndim, bool normalized=false) const; + Tensor rfft(int64_t signal_ndim, bool normalized=false, bool onesided=true) const; + Tensor irfft(int64_t signal_ndim, bool normalized=false, bool onesided=true, IntArrayRef signal_sizes={}) const; + Tensor index(TensorList indices) const; + Tensor & index_copy_(int64_t dim, const Tensor & index, const Tensor & source) const; + Tensor index_copy(int64_t dim, const Tensor & index, const Tensor & source) const; + #ifdef BUILD_NAMEDTENSOR + Tensor & index_copy_(Dimname dim, const Tensor & index, const Tensor & source) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor index_copy(Dimname dim, const Tensor & index, const Tensor & source) const; + #endif + Tensor & index_put_(TensorList indices, const Tensor & values, bool accumulate=false) const; + Tensor index_put(TensorList indices, const Tensor & values, bool accumulate=false) const; + Tensor inverse() const; + Tensor isclose(const Tensor & other, double rtol=1e-05, double atol=1e-08, bool equal_nan=false) const; + bool is_distributed() const; + bool is_floating_point() const; + bool is_complex() const; + bool is_nonzero() const; + bool is_same_size(const Tensor & other) const; + bool is_signed() const; + std::tuple kthvalue(int64_t k, int64_t dim=-1, bool keepdim=false) const; + #ifdef BUILD_NAMEDTENSOR + std::tuple kthvalue(int64_t k, Dimname dim, bool keepdim=false) const; + #endif + Tensor log() const; + Tensor & log_() const; + Tensor log10() const; + Tensor & log10_() const; + Tensor log1p() const; + Tensor & log1p_() const; + Tensor log2() const; + Tensor & log2_() const; + Tensor logdet() const; + Tensor log_softmax(int64_t dim, c10::optional dtype=c10::nullopt) const; + #ifdef BUILD_NAMEDTENSOR + Tensor log_softmax(Dimname dim, c10::optional dtype=c10::nullopt) const; + #endif + Tensor logsumexp(IntArrayRef dim, bool keepdim=false) const; + #ifdef BUILD_NAMEDTENSOR + Tensor logsumexp(DimnameList dim, bool keepdim=false) const; + #endif + Tensor matmul(const Tensor & other) const; + Tensor matrix_power(int64_t n) const; + std::tuple max(int64_t dim, bool keepdim=false) const; + Tensor max_values(IntArrayRef dim, bool keepdim=false) const; + #ifdef BUILD_NAMEDTENSOR + std::tuple max(Dimname dim, bool keepdim=false) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor max_values(DimnameList dim, bool keepdim=false) const; + #endif + Tensor mean(c10::optional dtype=c10::nullopt) const; + Tensor mean(IntArrayRef dim, bool keepdim=false, c10::optional dtype=c10::nullopt) const; + #ifdef BUILD_NAMEDTENSOR + Tensor mean(DimnameList dim, bool keepdim=false, c10::optional dtype=c10::nullopt) const; + #endif + std::tuple median(int64_t dim, bool keepdim=false) const; + #ifdef BUILD_NAMEDTENSOR + std::tuple median(Dimname dim, bool keepdim=false) const; + #endif + std::tuple min(int64_t dim, bool keepdim=false) const; + Tensor min_values(IntArrayRef dim, bool keepdim=false) const; + #ifdef BUILD_NAMEDTENSOR + std::tuple min(Dimname dim, bool keepdim=false) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor min_values(DimnameList dim, bool keepdim=false) const; + #endif + Tensor mm(const Tensor & mat2) const; + std::tuple mode(int64_t dim=-1, bool keepdim=false) const; + #ifdef BUILD_NAMEDTENSOR + std::tuple mode(Dimname dim, bool keepdim=false) const; + #endif + Tensor mul(const Tensor & other) const; + Tensor & mul_(const Tensor & other) const; + Tensor mul(Scalar other) const; + Tensor & mul_(Scalar other) const; + Tensor mv(const Tensor & vec) const; + Tensor mvlgamma(int64_t p) const; + Tensor & mvlgamma_(int64_t p) const; + Tensor narrow_copy(int64_t dim, int64_t start, int64_t length) const; + Tensor narrow(int64_t dim, int64_t start, int64_t length) const; + Tensor permute(IntArrayRef dims) const; + Tensor numpy_T() const; + bool is_pinned() const; + Tensor pin_memory() const; + Tensor pinverse(double rcond=1e-15) const; + Tensor reciprocal() const; + Tensor & reciprocal_() const; + Tensor neg() const; + Tensor & neg_() const; + Tensor repeat(IntArrayRef repeats) const; + Tensor repeat_interleave(const Tensor & repeats, c10::optional dim=c10::nullopt) const; + Tensor repeat_interleave(int64_t repeats, c10::optional dim=c10::nullopt) const; + Tensor reshape(IntArrayRef shape) const; + Tensor reshape_as(const Tensor & other) const; + Tensor round() const; + Tensor & round_() const; + Tensor relu() const; + Tensor & relu_() const; + Tensor prelu(const Tensor & weight) const; + std::tuple prelu_backward(const Tensor & grad_output, const Tensor & weight) const; + Tensor hardshrink(Scalar lambd=0.5) const; + Tensor hardshrink_backward(const Tensor & grad_out, Scalar lambd) const; + Tensor rsqrt() const; + Tensor & rsqrt_() const; + #ifdef BUILD_NAMEDTENSOR + Tensor select(Dimname dim, int64_t index) const; + #endif + Tensor select(int64_t dim, int64_t index) const; + Tensor sigmoid() const; + Tensor & sigmoid_() const; + Tensor sin() const; + Tensor & sin_() const; + Tensor sinh() const; + Tensor & sinh_() const; + Tensor detach() const; + Tensor & detach_() const; + int64_t size(int64_t dim) const; + #ifdef BUILD_NAMEDTENSOR + int64_t size(Dimname dim) const; + #endif + Tensor slice(int64_t dim=0, int64_t start=0, int64_t end=9223372036854775807, int64_t step=1) const; + std::tuple slogdet() const; + Tensor smm(const Tensor & mat2) const; + Tensor softmax(int64_t dim, c10::optional dtype=c10::nullopt) const; + #ifdef BUILD_NAMEDTENSOR + Tensor softmax(Dimname dim, c10::optional dtype=c10::nullopt) const; + #endif + std::vector split(int64_t split_size, int64_t dim=0) const; + std::vector split_with_sizes(IntArrayRef split_sizes, int64_t dim=0) const; + Tensor squeeze() const; + Tensor squeeze(int64_t dim) const; + #ifdef BUILD_NAMEDTENSOR + Tensor squeeze(Dimname dim) const; + #endif + Tensor & squeeze_() const; + Tensor & squeeze_(int64_t dim) const; + #ifdef BUILD_NAMEDTENSOR + Tensor & squeeze_(Dimname dim) const; + #endif + Tensor sspaddmm(const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1) const; + Tensor stft(int64_t n_fft, c10::optional hop_length=c10::nullopt, c10::optional win_length=c10::nullopt, const Tensor & window={}, bool normalized=false, bool onesided=true) const; + int64_t stride(int64_t dim) const; + #ifdef BUILD_NAMEDTENSOR + int64_t stride(Dimname dim) const; + #endif + Tensor sum(c10::optional dtype=c10::nullopt) const; + Tensor sum(IntArrayRef dim, bool keepdim=false, c10::optional dtype=c10::nullopt) const; + #ifdef BUILD_NAMEDTENSOR + Tensor sum(DimnameList dim, bool keepdim=false, c10::optional dtype=c10::nullopt) const; + #endif + Tensor sum_to_size(IntArrayRef size) const; + Tensor sqrt() const; + Tensor & sqrt_() const; + Tensor std(bool unbiased=true) const; + Tensor std(IntArrayRef dim, bool unbiased=true, bool keepdim=false) const; + #ifdef BUILD_NAMEDTENSOR + Tensor std(DimnameList dim, bool unbiased=true, bool keepdim=false) const; + #endif + Tensor prod(c10::optional dtype=c10::nullopt) const; + Tensor prod(int64_t dim, bool keepdim=false, c10::optional dtype=c10::nullopt) const; + #ifdef BUILD_NAMEDTENSOR + Tensor prod(Dimname dim, bool keepdim=false, c10::optional dtype=c10::nullopt) const; + #endif + Tensor t() const; + Tensor & t_() const; + Tensor tan() const; + Tensor & tan_() const; + Tensor tanh() const; + Tensor & tanh_() const; + Tensor transpose(int64_t dim0, int64_t dim1) const; + #ifdef BUILD_NAMEDTENSOR + Tensor transpose(Dimname dim0, Dimname dim1) const; + #endif + Tensor & transpose_(int64_t dim0, int64_t dim1) const; + Tensor flip(IntArrayRef dims) const; + Tensor roll(IntArrayRef shifts, IntArrayRef dims={}) const; + Tensor rot90(int64_t k=1, IntArrayRef dims={0,1}) const; + Tensor trunc() const; + Tensor & trunc_() const; + Tensor type_as(const Tensor & other) const; + Tensor unsqueeze(int64_t dim) const; + Tensor & unsqueeze_(int64_t dim) const; + Tensor var(bool unbiased=true) const; + Tensor var(IntArrayRef dim, bool unbiased=true, bool keepdim=false) const; + #ifdef BUILD_NAMEDTENSOR + Tensor var(DimnameList dim, bool unbiased=true, bool keepdim=false) const; + #endif + Tensor view_as(const Tensor & other) const; + Tensor where(const Tensor & condition, const Tensor & other) const; + Tensor norm(c10::optional p, ScalarType dtype) const; + Tensor norm(Scalar p=2) const; + Tensor norm(c10::optional p, IntArrayRef dim, bool keepdim, ScalarType dtype) const; + Tensor norm(c10::optional p, IntArrayRef dim, bool keepdim=false) const; + #ifdef BUILD_NAMEDTENSOR + Tensor norm(c10::optional p, DimnameList dim, bool keepdim, ScalarType dtype) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor norm(c10::optional p, DimnameList dim, bool keepdim=false) const; + #endif + Tensor clone(c10::optional memory_format=c10::nullopt) const; + Tensor & resize_as_(const Tensor & the_template, c10::optional memory_format=c10::nullopt) const; + Tensor pow(Scalar exponent) const; + Tensor & zero_() const; + Tensor sub(const Tensor & other, Scalar alpha=1) const; + Tensor & sub_(const Tensor & other, Scalar alpha=1) const; + Tensor sub(Scalar other, Scalar alpha=1) const; + Tensor & sub_(Scalar other, Scalar alpha=1) const; + Tensor addmm(const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1) const; + Tensor & addmm_(const Tensor & mat1, const Tensor & mat2, Scalar beta=1, Scalar alpha=1) const; + Tensor & sparse_resize_(IntArrayRef size, int64_t sparse_dim, int64_t dense_dim) const; + Tensor & sparse_resize_and_clear_(IntArrayRef size, int64_t sparse_dim, int64_t dense_dim) const; + Tensor sparse_mask(const Tensor & mask) const; + Tensor to_dense() const; + int64_t sparse_dim() const; + int64_t _dimI() const; + int64_t dense_dim() const; + int64_t _dimV() const; + int64_t _nnz() const; + Tensor coalesce() const; + bool is_coalesced() const; + Tensor _indices() const; + Tensor _values() const; + Tensor & _coalesced_(bool coalesced) const; + Tensor indices() const; + Tensor values() const; + std::vector unbind(int64_t dim=0) const; + #ifdef BUILD_NAMEDTENSOR + std::vector unbind(Dimname dim) const; + #endif + Tensor to_sparse(int64_t sparse_dim) const; + Tensor to_sparse() const; + Tensor to_mkldnn() const; + Tensor dequantize() const; + double q_scale() const; + int64_t q_zero_point() const; + Tensor q_per_channel_scales() const; + Tensor q_per_channel_zero_points() const; + int64_t q_per_channel_axis() const; + Tensor int_repr() const; + QScheme qscheme() const; + Tensor to(const TensorOptions & options, bool non_blocking=false, bool copy=false, c10::optional memory_format=c10::nullopt) const; + Tensor to(Device device, ScalarType dtype, bool non_blocking=false, bool copy=false, c10::optional memory_format=c10::nullopt) const; + Tensor to(ScalarType dtype, bool non_blocking=false, bool copy=false, c10::optional memory_format=c10::nullopt) const; + Tensor to(const Tensor & other, bool non_blocking=false, bool copy=false, c10::optional memory_format=c10::nullopt) const; + Scalar item() const; + Tensor & set_(Storage source) const; + Tensor & set_(Storage source, int64_t storage_offset, IntArrayRef size, IntArrayRef stride={}) const; + Tensor & set_(const Tensor & source) const; + Tensor & set_() const; + Tensor & set_quantizer_(ConstQuantizerPtr quantizer) const; + bool is_set_to(const Tensor & tensor) const; + Tensor & masked_fill_(const Tensor & mask, Scalar value) const; + Tensor masked_fill(const Tensor & mask, Scalar value) const; + Tensor & masked_fill_(const Tensor & mask, const Tensor & value) const; + Tensor masked_fill(const Tensor & mask, const Tensor & value) const; + Tensor & masked_scatter_(const Tensor & mask, const Tensor & source) const; + Tensor masked_scatter(const Tensor & mask, const Tensor & source) const; + Tensor view(IntArrayRef size) const; + Tensor & put_(const Tensor & index, const Tensor & source, bool accumulate=false) const; + Tensor & index_add_(int64_t dim, const Tensor & index, const Tensor & source) const; + Tensor index_add(int64_t dim, const Tensor & index, const Tensor & source) const; + #ifdef BUILD_NAMEDTENSOR + Tensor index_add(Dimname dim, const Tensor & index, const Tensor & source) const; + #endif + Tensor & index_fill_(int64_t dim, const Tensor & index, Scalar value) const; + Tensor index_fill(int64_t dim, const Tensor & index, Scalar value) const; + Tensor & index_fill_(int64_t dim, const Tensor & index, const Tensor & value) const; + Tensor index_fill(int64_t dim, const Tensor & index, const Tensor & value) const; + #ifdef BUILD_NAMEDTENSOR + Tensor & index_fill_(Dimname dim, const Tensor & index, Scalar value) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor & index_fill_(Dimname dim, const Tensor & index, const Tensor & value) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor index_fill(Dimname dim, const Tensor & index, Scalar value) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor index_fill(Dimname dim, const Tensor & index, const Tensor & value) const; + #endif + Tensor & scatter_(int64_t dim, const Tensor & index, const Tensor & src) const; + Tensor scatter(int64_t dim, const Tensor & index, const Tensor & src) const; + Tensor & scatter_(int64_t dim, const Tensor & index, Scalar value) const; + Tensor scatter(int64_t dim, const Tensor & index, Scalar value) const; + #ifdef BUILD_NAMEDTENSOR + Tensor scatter(Dimname dim, const Tensor & index, const Tensor & src) const; + #endif + #ifdef BUILD_NAMEDTENSOR + Tensor scatter(Dimname dim, const Tensor & index, Scalar value) const; + #endif + Tensor & scatter_add_(int64_t dim, const Tensor & index, const Tensor & src) const; + Tensor scatter_add(int64_t dim, const Tensor & index, const Tensor & src) const; + #ifdef BUILD_NAMEDTENSOR + Tensor scatter_add(Dimname dim, const Tensor & index, const Tensor & src) const; + #endif + Tensor & lt_(Scalar other) const; + Tensor & lt_(const Tensor & other) const; + Tensor & gt_(Scalar other) const; + Tensor & gt_(const Tensor & other) const; + Tensor & le_(Scalar other) const; + Tensor & le_(const Tensor & other) const; + Tensor & ge_(Scalar other) const; + Tensor & ge_(const Tensor & other) const; + Tensor & eq_(Scalar other) const; + Tensor & eq_(const Tensor & other) const; + Tensor & ne_(Scalar other) const; + Tensor & ne_(const Tensor & other) const; + Tensor __and__(Scalar other) const; + Tensor __and__(const Tensor & other) const; + Tensor & __iand__(Scalar other) const; + Tensor & __iand__(const Tensor & other) const; + Tensor __or__(Scalar other) const; + Tensor __or__(const Tensor & other) const; + Tensor & __ior__(Scalar other) const; + Tensor & __ior__(const Tensor & other) const; + Tensor bitwise_xor(Scalar other) const; + Tensor bitwise_xor(const Tensor & other) const; + Tensor & bitwise_xor_(Scalar other) const; + Tensor & bitwise_xor_(const Tensor & other) const; + Tensor __xor__(Scalar other) const; + Tensor __xor__(const Tensor & other) const; + Tensor & __ixor__(Scalar other) const; + Tensor & __ixor__(const Tensor & other) const; + Tensor __lshift__(Scalar other) const; + Tensor __lshift__(const Tensor & other) const; + Tensor & __ilshift__(Scalar other) const; + Tensor & __ilshift__(const Tensor & other) const; + Tensor __rshift__(Scalar other) const; + Tensor __rshift__(const Tensor & other) const; + Tensor & __irshift__(Scalar other) const; + Tensor & __irshift__(const Tensor & other) const; + Tensor & lgamma_() const; + Tensor & atan2_(const Tensor & other) const; + Tensor & tril_(int64_t diagonal=0) const; + Tensor & triu_(int64_t diagonal=0) const; + Tensor & digamma_() const; + Tensor & polygamma_(int64_t n) const; + Tensor & renorm_(Scalar p, int64_t dim, Scalar maxnorm) const; + Tensor & pow_(Scalar exponent) const; + Tensor & pow_(const Tensor & exponent) const; + Tensor & lerp_(const Tensor & end, Scalar weight) const; + Tensor & lerp_(const Tensor & end, const Tensor & weight) const; + Tensor & fmod_(Scalar other) const; + Tensor & fmod_(const Tensor & other) const; + Tensor & remainder_(Scalar other) const; + Tensor & remainder_(const Tensor & other) const; + Tensor & addbmm_(const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1) const; + Tensor addbmm(const Tensor & batch1, const Tensor & batch2, Scalar beta=1, Scalar alpha=1) const; + Tensor & addcdiv_(const Tensor & tensor1, const Tensor & tensor2, Scalar value=1) const; + Tensor & random_(int64_t from, int64_t to, Generator * generator=nullptr) const; + Tensor & random_(int64_t to, Generator * generator=nullptr) const; + Tensor & random_(Generator * generator=nullptr) const; + Tensor & uniform_(double from=0, double to=1, Generator * generator=nullptr) const; + Tensor & normal_(double mean=0, double std=1, Generator * generator=nullptr) const; + Tensor & cauchy_(double median=0, double sigma=1, Generator * generator=nullptr) const; + Tensor & log_normal_(double mean=1, double std=2, Generator * generator=nullptr) const; + Tensor & exponential_(double lambd=1, Generator * generator=nullptr) const; + Tensor & geometric_(double p, Generator * generator=nullptr) const; + Tensor diag(int64_t diagonal=0) const; + Tensor cross(const Tensor & other, c10::optional dim=c10::nullopt) const; + Tensor triu(int64_t diagonal=0) const; + Tensor tril(int64_t diagonal=0) const; + Tensor trace() const; + Tensor ne(Scalar other) const; + Tensor ne(const Tensor & other) const; + Tensor eq(Scalar other) const; + Tensor eq(const Tensor & other) const; + Tensor ge(Scalar other) const; + Tensor ge(const Tensor & other) const; + Tensor le(Scalar other) const; + Tensor le(const Tensor & other) const; + Tensor gt(Scalar other) const; + Tensor gt(const Tensor & other) const; + Tensor lt(Scalar other) const; + Tensor lt(const Tensor & other) const; + Tensor take(const Tensor & index) const; + Tensor index_select(int64_t dim, const Tensor & index) const; + #ifdef BUILD_NAMEDTENSOR + Tensor index_select(Dimname dim, const Tensor & index) const; + #endif + Tensor masked_select(const Tensor & mask) const; + Tensor nonzero() const; + std::vector nonzero_numpy() const; + Tensor gather(int64_t dim, const Tensor & index, bool sparse_grad=false) const; + #ifdef BUILD_NAMEDTENSOR + Tensor gather(Dimname dim, const Tensor & index, bool sparse_grad=false) const; + #endif + Tensor addcmul(const Tensor & tensor1, const Tensor & tensor2, Scalar value=1) const; + Tensor & addcmul_(const Tensor & tensor1, const Tensor & tensor2, Scalar value=1) const; + Tensor addcdiv(const Tensor & tensor1, const Tensor & tensor2, Scalar value=1) const; + std::tuple lstsq(const Tensor & A) const; + std::tuple triangular_solve(const Tensor & A, bool upper=true, bool transpose=false, bool unitriangular=false) const; + std::tuple symeig(bool eigenvectors=false, bool upper=true) const; + std::tuple eig(bool eigenvectors=false) const; + std::tuple svd(bool some=true, bool compute_uv=true) const; + Tensor cholesky(bool upper=false) const; + Tensor cholesky_solve(const Tensor & input2, bool upper=false) const; + std::tuple solve(const Tensor & A) const; + Tensor cholesky_inverse(bool upper=false) const; + std::tuple qr(bool some=true) const; + std::tuple geqrf() const; + Tensor orgqr(const Tensor & input2) const; + Tensor ormqr(const Tensor & input2, const Tensor & input3, bool left=true, bool transpose=false) const; + Tensor lu_solve(const Tensor & LU_data, const Tensor & LU_pivots) const; + Tensor multinomial(int64_t num_samples, bool replacement=false, Generator * generator=nullptr) const; + Tensor lgamma() const; + Tensor digamma() const; + Tensor polygamma(int64_t n) const; + Tensor erfinv() const; + Tensor & erfinv_() const; + Tensor sign() const; + Tensor & sign_() const; + Tensor dist(const Tensor & other, Scalar p=2) const; + Tensor atan2(const Tensor & other) const; + Tensor lerp(const Tensor & end, Scalar weight) const; + Tensor lerp(const Tensor & end, const Tensor & weight) const; + Tensor histc(int64_t bins=100, Scalar min=0, Scalar max=0) const; + Tensor fmod(Scalar other) const; + Tensor fmod(const Tensor & other) const; + Tensor remainder(Scalar other) const; + Tensor remainder(const Tensor & other) const; + Tensor min(const Tensor & other) const; + Tensor min() const; + Tensor max(const Tensor & other) const; + Tensor max() const; + Tensor median() const; + std::tuple sort(int64_t dim=-1, bool descending=false) const; + #ifdef BUILD_NAMEDTENSOR + std::tuple sort(Dimname dim, bool descending=false) const; + #endif + Tensor argsort(int64_t dim=-1, bool descending=false) const; + #ifdef BUILD_NAMEDTENSOR + Tensor argsort(Dimname dim, bool descending=false) const; + #endif + std::tuple topk(int64_t k, int64_t dim=-1, bool largest=true, bool sorted=true) const; + Tensor all() const; + Tensor any() const; + Tensor renorm(Scalar p, int64_t dim, Scalar maxnorm) const; + Tensor unfold(int64_t dimension, int64_t size, int64_t step) const; + bool equal(const Tensor & other) const; + Tensor pow(const Tensor & exponent) const; + Tensor alias() const; + + // We changed .dtype() to return a TypeMeta in #12766. Ideally, we want the + // at::kDouble and its friends to be TypeMeta's, but that hasn't happened yet. + // Before that change, we make this method to maintain BC for C++ usage like + // `x.to(y.dtype)`. + // TODO: remove following two after at::kDouble and its friends are TypeMeta's. + inline Tensor to(caffe2::TypeMeta type_meta, bool non_blocking=false, bool copy=false) const { + return this->to(/*scalar_type=*/typeMetaToScalarType(type_meta), non_blocking, copy); + } + inline Tensor to(Device device, caffe2::TypeMeta type_meta, bool non_blocking=false, bool copy=false) const { + return this->to(device, /*scalar_type=*/typeMetaToScalarType(type_meta), non_blocking, copy); + } + + template + auto m(F func, Args&&... params) const -> decltype(func(*this, std::forward(params)...)) { + return func(*this, std::forward(params)...); + } + + /// NOTE: This is similar to the legacy `.data()` function on `Variable`, and is intended + /// to be used from functions that need to access the `Variable`'s equivalent `Tensor` + /// (i.e. `Tensor` that shares the same storage and tensor metadata with the `Variable`). + /// + /// One notable difference with the legacy `.data()` function is that changes to the + /// returned `Tensor`'s tensor metadata (e.g. sizes / strides / storage / storage_offset) + /// will not update the original `Variable`, due to the fact that this function + /// shallow-copies the `Variable`'s underlying TensorImpl. + at::Tensor tensor_data() const; + + /// NOTE: `var.variable_data()` in C++ has the same semantics as `tensor.data` + /// in Python, which create a new `Variable` that shares the same storage and + /// tensor metadata with the original `Variable`, but with a completely new + /// autograd history. + /// + /// NOTE: If we change the tensor metadata (e.g. sizes / strides / + /// storage / storage_offset) of a variable created from `var.variable_data()`, those + /// changes will not update the original variable `var`. In `.variable_data()`, we set + /// `allow_tensor_metadata_change_` to false to make such changes explicitly illegal, + /// in order to prevent users from changing metadata of `var.variable_data()` + /// and expecting the original variable `var` to also be updated. + at::Tensor variable_data() const; + + // Gradient Node and Edges + //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /// Gets the gradient function of the `Variable`. If this is a leaf variable, + /// the pointer returned will be null. + /// + /// For View Variables: + /// Gets the up-to-date grad_fn. If the shared data or base was modified, we + /// re-create the grad_fn to express the up-to-date view relationship between + /// this and the base Variable. + const std::shared_ptr& grad_fn() const; + + // Hooks + //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + template + using hook_return_void_t = c10::guts::enable_if_t::type>::value, unsigned>; + template + using hook_return_var_t = c10::guts::enable_if_t::type, Tensor>::value, unsigned>; + + // Returns the index of the hook in the list which can be used to remove hook + // Register a hook with no return value + template + hook_return_void_t register_hook(T&& hook) const; + // Register a hook with variable return value + template + hook_return_var_t register_hook(T&& hook) const; + +private: + unsigned _register_hook(std::function hook) const; + +public: + + // Remove hook at given position + void remove_hook(unsigned pos) const; + + // View Variables + //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + /// Returns true if this `Variable` is a view of another `Variable`. + bool is_view() const; + + /// Returns the `Variable` that this `Variable` is a view of. If this + /// `Variable` is not a view, throw a `std::runtime_error`. + const Tensor& base() const; + + // Miscellaneous + //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + const std::string& name() const; + +protected: + friend class ::caffe2::Tensor; + + void enforce_invariants(); + c10::intrusive_ptr impl_; +}; + +namespace detail { +// Helper creator for Tensor class which doesn't requires the users to pass +// in an intrusive_ptr instead it just converts the argument passed to +// requested intrusive_ptr type. +template +Tensor make_tensor(Args&&... args) { + return Tensor(c10::make_intrusive(std::forward(args)...)); +} + +} // namespace detail + +static inline TensorTypeId legacyExtractTypeId(const Tensor& t) { + return legacyExtractTypeId(t.type_set()); +} + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/core/TensorMethods.h b/thirdparty/libtorch/include/ATen/core/TensorMethods.h new file mode 100644 index 0000000000..0ac8ffe365 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/TensorMethods.h @@ -0,0 +1,6354 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef USE_STATIC_DISPATCH +#include +#include +#include +#include +#endif + +namespace at { + +struct Quantizer; +// This is temporary typedef to enable Quantizer in aten native function API +// we'll remove them when we are actually exposing Quantizer class +// to frontend +using ConstQuantizerPtr = const c10::intrusive_ptr&; + +inline Tensor Tensor::cpu() const { + return to(options().device(DeviceType::CPU), /*non_blocking*/ false, /*copy*/ false); +} + +// TODO: The Python version also accepts arguments +inline Tensor Tensor::cuda() const { + return to(options().device(DeviceType::CUDA), /*non_blocking*/ false, /*copy*/ false); +} + +inline Tensor Tensor::hip() const { + return to(options().device(DeviceType::HIP), /*non_blocking*/ false, /*copy*/ false); +} + +inline Tensor Tensor::toType(ScalarType t) const { + return to(options().dtype(t), /*non_blocking*/ false, /*copy*/ false); +} + +// TODO: Deprecate me +inline Tensor Tensor::toBackend(Backend b) const { + return to(options().device(backendToDeviceType(b)).layout(layout_from_backend(b)), /*non_blocking*/ false, /*copy*/ false); +} + +inline TensorOptions Tensor::options() const { + return TensorOptions().dtype(dtype()) + .device(device()) + .layout(layout()); +} + +// all static inline to allow for inlining of the non-dynamic part of dispatch +inline void Tensor::backward(const Tensor & gradient, bool keep_graph, bool create_graph) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + TypeDefault::backward(const_cast(*this), gradient, keep_graph, create_graph); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), gradient, keep_graph, create_graph); +#endif +} +inline void Tensor::set_data(const Tensor & new_data) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + TypeDefault::set_data(const_cast(*this), new_data); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::set_data", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), new_data); +#endif +} +inline Tensor Tensor::data() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::data(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::data", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline bool Tensor::is_leaf() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_leaf(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::is_leaf", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline int64_t Tensor::output_nr() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::output_nr(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::output_nr", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline int64_t Tensor::_version() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::_version(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::_version", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::requires_grad_(bool _requires_grad) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::requires_grad_(const_cast(*this), _requires_grad); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::requires_grad_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), _requires_grad); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor & Tensor::rename_(c10::optional names) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rename_(const_cast(*this), names); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::rename_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), names); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::rename(c10::optional names) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rename(const_cast(*this), names); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::rename", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), names); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::align_to(DimnameList names) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::align_to(const_cast(*this), names); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::align_to", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), names); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::align_to(DimnameList order, int64_t ellipsis_idx) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::align_to(const_cast(*this), order, ellipsis_idx); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::align_to", "ellipsis_idx"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), order, ellipsis_idx); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::align_as(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::align_as(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::align_as", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::refine_names(DimnameList names) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::refine_names(const_cast(*this), names); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::refine_names", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), names); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::unflatten(Dimname dim, IntArrayRef sizes, DimnameList names) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::unflatten(const_cast(*this), dim, sizes, names); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::unflatten", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, sizes, names); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::unflatten(int64_t dim, IntArrayRef sizes, DimnameList names) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::unflatten(const_cast(*this), dim, sizes, names); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::unflatten", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, sizes, names); +#endif +} +#endif +inline Tensor Tensor::abs() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::abs(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::abs", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::abs_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::abs_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::abs_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::angle() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::angle(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::angle", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::real() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::real(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::real", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::imag() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::imag(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::imag", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::conj() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::conj(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::conj", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::acos() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::acos(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::acos", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::acos_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::acos_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::acos_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::add(const Tensor & other, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::add(const_cast(*this), other, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::add(const_cast(*this), other, alpha); + break; + default: + AT_ERROR("add not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::add", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other, alpha); +#endif +} +inline Tensor & Tensor::add_(const Tensor & other, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::add_(const_cast(*this), other, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::add_(const_cast(*this), other, alpha); + break; + default: + AT_ERROR("add_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::add_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other, alpha); +#endif +} +inline Tensor Tensor::add(Scalar other, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::add(const_cast(*this), other, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::add", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other, alpha); +#endif +} +inline Tensor & Tensor::add_(Scalar other, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::add_(const_cast(*this), other, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::add_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other, alpha); +#endif +} +inline Tensor Tensor::addmv(const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::addmv(const_cast(*this), mat, vec, beta, alpha); + break; + default: + AT_ERROR("addmv not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::addmv", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), mat, vec, beta, alpha); +#endif +} +inline Tensor & Tensor::addmv_(const Tensor & mat, const Tensor & vec, Scalar beta, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::addmv_(const_cast(*this), mat, vec, beta, alpha); + break; + default: + AT_ERROR("addmv_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::addmv_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), mat, vec, beta, alpha); +#endif +} +inline Tensor Tensor::addr(const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::addr(const_cast(*this), vec1, vec2, beta, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::addr", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), vec1, vec2, beta, alpha); +#endif +} +inline Tensor & Tensor::addr_(const Tensor & vec1, const Tensor & vec2, Scalar beta, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::addr_(const_cast(*this), vec1, vec2, beta, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::addr_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), vec1, vec2, beta, alpha); +#endif +} +inline Tensor Tensor::all(int64_t dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::all(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::all", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::all(Dimname dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::all(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::all", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, keepdim); +#endif +} +#endif +inline bool Tensor::allclose(const Tensor & other, double rtol, double atol, bool equal_nan) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::allclose(const_cast(*this), other, rtol, atol, equal_nan); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::allclose", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other, rtol, atol, equal_nan); +#endif +} +inline Tensor Tensor::any(int64_t dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::any(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::any", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::any(Dimname dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::any(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::any", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, keepdim); +#endif +} +#endif +inline Tensor Tensor::argmax(c10::optional dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::argmax(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::argmax", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed, bool>( + op, const_cast(*this), dim, keepdim); +#endif +} +inline Tensor Tensor::argmin(c10::optional dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::argmin(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::argmin", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed, bool>( + op, const_cast(*this), dim, keepdim); +#endif +} +inline Tensor Tensor::as_strided(IntArrayRef size, IntArrayRef stride, c10::optional storage_offset) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::as_strided(const_cast(*this), size, stride, storage_offset); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::as_strided(const_cast(*this), size, stride, storage_offset); + break; + default: + AT_ERROR("as_strided not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::as_strided", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), size, stride, storage_offset); +#endif +} +inline Tensor & Tensor::as_strided_(IntArrayRef size, IntArrayRef stride, c10::optional storage_offset) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::as_strided_(const_cast(*this), size, stride, storage_offset); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::as_strided_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), size, stride, storage_offset); +#endif +} +inline Tensor Tensor::asin() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::asin(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::asin", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::asin_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::asin_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::asin_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::atan() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::atan(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::atan", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::atan_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::atan_(const_cast(*this)); + break; + default: + AT_ERROR("atan_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::atan_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::baddbmm(const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::baddbmm(const_cast(*this), batch1, batch2, beta, alpha); + break; + default: + AT_ERROR("baddbmm not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::baddbmm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), batch1, batch2, beta, alpha); +#endif +} +inline Tensor & Tensor::baddbmm_(const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::baddbmm_(const_cast(*this), batch1, batch2, beta, alpha); + break; + default: + AT_ERROR("baddbmm_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::baddbmm_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), batch1, batch2, beta, alpha); +#endif +} +inline Tensor Tensor::bernoulli(Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bernoulli(const_cast(*this), generator); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::bernoulli", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), generator); +#endif +} +inline Tensor & Tensor::bernoulli_(const Tensor & p, Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::bernoulli_(const_cast(*this), p, generator); + break; + default: + AT_ERROR("bernoulli_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::bernoulli_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), p, generator); +#endif +} +inline Tensor & Tensor::bernoulli_(double p, Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::bernoulli_(const_cast(*this), p, generator); + break; + default: + AT_ERROR("bernoulli_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::bernoulli_", "float"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), p, generator); +#endif +} +inline Tensor Tensor::bernoulli(double p, Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bernoulli(const_cast(*this), p, generator); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::bernoulli", "p"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), p, generator); +#endif +} +inline Tensor Tensor::bincount(const Tensor & weights, int64_t minlength) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::bincount(const_cast(*this), weights, minlength); + break; + default: + AT_ERROR("bincount not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::bincount", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), weights, minlength); +#endif +} +inline Tensor Tensor::bitwise_not() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bitwise_not(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::bitwise_not", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::bitwise_not_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bitwise_not_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::bitwise_not_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::logical_not() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logical_not(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::logical_not", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::logical_not_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logical_not_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::logical_not_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::logical_xor(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logical_xor(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::logical_xor", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::logical_xor_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logical_xor_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::logical_xor_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::bmm(const Tensor & mat2) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::bmm(const_cast(*this), mat2); + break; + default: + AT_ERROR("bmm not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::bmm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), mat2); +#endif +} +inline Tensor Tensor::ceil() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ceil(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ceil", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::ceil_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ceil_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ceil_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline std::vector Tensor::chunk(int64_t chunks, int64_t dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::chunk(const_cast(*this), chunks, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::chunk", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, int64_t>( + op, const_cast(*this), chunks, dim); +#endif +} +inline Tensor Tensor::clamp(c10::optional min, c10::optional max) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::clamp(const_cast(*this), min, max); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::clamp", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed, c10::optional>( + op, const_cast(*this), min, max); +#endif +} +inline Tensor & Tensor::clamp_(c10::optional min, c10::optional max) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::clamp_(const_cast(*this), min, max); + break; + default: + AT_ERROR("clamp_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::clamp_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, c10::optional>( + op, const_cast(*this), min, max); +#endif +} +inline Tensor Tensor::clamp_max(Scalar max) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::clamp_max(const_cast(*this), max); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::clamp_max", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), max); +#endif +} +inline Tensor & Tensor::clamp_max_(Scalar max) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::clamp_max_(const_cast(*this), max); + break; + default: + AT_ERROR("clamp_max_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::clamp_max_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), max); +#endif +} +inline Tensor Tensor::clamp_min(Scalar min) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::clamp_min(const_cast(*this), min); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::clamp_min", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), min); +#endif +} +inline Tensor & Tensor::clamp_min_(Scalar min) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::clamp_min_(const_cast(*this), min); + break; + default: + AT_ERROR("clamp_min_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::clamp_min_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), min); +#endif +} +inline Tensor Tensor::contiguous(MemoryFormat memory_format) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::contiguous(const_cast(*this), memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::contiguous", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), memory_format); +#endif +} +inline Tensor & Tensor::copy_(const Tensor & src, bool non_blocking) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::copy_(const_cast(*this), src, non_blocking); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::copy_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), src, non_blocking); +#endif +} +inline Tensor Tensor::cos() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cos(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cos", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::cos_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::cos_(const_cast(*this)); + break; + default: + AT_ERROR("cos_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cos_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::cosh() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cosh(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cosh", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::cosh_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::cosh_(const_cast(*this)); + break; + default: + AT_ERROR("cosh_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cosh_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::cumsum(int64_t dim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cumsum(const_cast(*this), dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cumsum", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::cumsum(Dimname dim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cumsum(const_cast(*this), dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cumsum", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, dtype); +#endif +} +#endif +inline Tensor Tensor::cumprod(int64_t dim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cumprod(const_cast(*this), dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cumprod", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::cumprod(Dimname dim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cumprod(const_cast(*this), dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cumprod", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, dtype); +#endif +} +#endif +inline Tensor Tensor::det() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::det(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::det", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::diag_embed(int64_t offset, int64_t dim1, int64_t dim2) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::diag_embed(const_cast(*this), offset, dim1, dim2); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::diag_embed", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), offset, dim1, dim2); +#endif +} +inline Tensor Tensor::diagflat(int64_t offset) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::diagflat(const_cast(*this), offset); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::diagflat", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), offset); +#endif +} +inline Tensor Tensor::diagonal(int64_t offset, int64_t dim1, int64_t dim2) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::diagonal(const_cast(*this), offset, dim1, dim2); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::diagonal", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), offset, dim1, dim2); +#endif +} +inline Tensor & Tensor::fill_diagonal_(Scalar fill_value, bool wrap) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fill_diagonal_(const_cast(*this), fill_value, wrap); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::fill_diagonal_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), fill_value, wrap); +#endif +} +inline Tensor Tensor::div(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::div(const_cast(*this), other); + break; + case Backend::SparseCPU: + return SparseCPUType::div(const_cast(*this), other); + break; + default: + AT_ERROR("div not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::div", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::div_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::div_(const_cast(*this), other); + break; + case Backend::SparseCPU: + return SparseCPUType::div_(const_cast(*this), other); + break; + default: + AT_ERROR("div_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::div_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::div(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::div(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::div", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::div_(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::div_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::div_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::dot(const Tensor & tensor) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::dot(const_cast(*this), tensor); + break; + default: + AT_ERROR("dot not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::dot", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), tensor); +#endif +} +inline Tensor Tensor::new_empty(IntArrayRef size, const TensorOptions & options) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::new_empty(const_cast(*this), size, options); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::new_empty", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), size, options); +#endif +} +inline Tensor Tensor::new_full(IntArrayRef size, Scalar fill_value, const TensorOptions & options) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::new_full(const_cast(*this), size, fill_value, options); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::new_full", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), size, fill_value, options); +#endif +} +inline Tensor Tensor::new_zeros(IntArrayRef size, const TensorOptions & options) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::new_zeros(const_cast(*this), size, options); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::new_zeros", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), size, options); +#endif +} +inline Tensor & Tensor::resize_(IntArrayRef size, c10::optional memory_format) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::resize_(const_cast(*this), size, memory_format); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::resize_(const_cast(*this), size, memory_format); + break; + default: + AT_ERROR("resize_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::resize_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), size, memory_format); +#endif +} +inline Tensor Tensor::erf() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::erf(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::erf", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::erf_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::erf_(const_cast(*this)); + break; + default: + AT_ERROR("erf_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::erf_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::erfc() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::erfc(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::erfc", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::erfc_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::erfc_(const_cast(*this)); + break; + default: + AT_ERROR("erfc_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::erfc_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::exp() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::exp(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::exp", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::exp_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::exp_(const_cast(*this)); + break; + default: + AT_ERROR("exp_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::exp_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::expm1() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::expm1(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::expm1", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::expm1_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::expm1_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::expm1_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::expand(IntArrayRef size, bool implicit) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::expand(const_cast(*this), size, implicit); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::expand", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), size, implicit); +#endif +} +inline Tensor Tensor::expand_as(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::expand_as(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::expand_as", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::flatten(int64_t start_dim, int64_t end_dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::flatten(const_cast(*this), start_dim, end_dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::flatten", "using_ints"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), start_dim, end_dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::flatten(int64_t start_dim, int64_t end_dim, Dimname out_dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::flatten(const_cast(*this), start_dim, end_dim, out_dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::flatten", "named_out_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), start_dim, end_dim, out_dim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::flatten(Dimname start_dim, Dimname end_dim, Dimname out_dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::flatten(const_cast(*this), start_dim, end_dim, out_dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::flatten", "using_names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), start_dim, end_dim, out_dim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::flatten(DimnameList dims, Dimname out_dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::flatten(const_cast(*this), dims, out_dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::flatten", "DimnameList"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dims, out_dim); +#endif +} +#endif +inline Tensor & Tensor::fill_(Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fill_(const_cast(*this), value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::fill_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), value); +#endif +} +inline Tensor & Tensor::fill_(const Tensor & value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fill_(const_cast(*this), value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::fill_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), value); +#endif +} +inline Tensor Tensor::floor() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::floor(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::floor", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::floor_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::floor_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::floor_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::frac() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::frac(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::frac", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::frac_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::frac_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::frac_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::ger(const Tensor & vec2) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::ger(const_cast(*this), vec2); + break; + default: + AT_ERROR("ger not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ger", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), vec2); +#endif +} +inline Tensor Tensor::fft(int64_t signal_ndim, bool normalized) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::fft(const_cast(*this), signal_ndim, normalized); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::fft", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), signal_ndim, normalized); +#endif +} +inline Tensor Tensor::ifft(int64_t signal_ndim, bool normalized) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ifft(const_cast(*this), signal_ndim, normalized); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ifft", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), signal_ndim, normalized); +#endif +} +inline Tensor Tensor::rfft(int64_t signal_ndim, bool normalized, bool onesided) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rfft(const_cast(*this), signal_ndim, normalized, onesided); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::rfft", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), signal_ndim, normalized, onesided); +#endif +} +inline Tensor Tensor::irfft(int64_t signal_ndim, bool normalized, bool onesided, IntArrayRef signal_sizes) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::irfft(const_cast(*this), signal_ndim, normalized, onesided, signal_sizes); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::irfft", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), signal_ndim, normalized, onesided, signal_sizes); +#endif +} +inline Tensor Tensor::index(TensorList indices) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index(const_cast(*this), indices); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), indices); +#endif +} +inline Tensor & Tensor::index_copy_(int64_t dim, const Tensor & index, const Tensor & source) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_copy_(const_cast(*this), dim, index, source); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_copy_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, source); +#endif +} +inline Tensor Tensor::index_copy(int64_t dim, const Tensor & index, const Tensor & source) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_copy(const_cast(*this), dim, index, source); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_copy", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, index, source); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor & Tensor::index_copy_(Dimname dim, const Tensor & index, const Tensor & source) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_copy_(const_cast(*this), dim, index, source); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_copy_", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, source); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::index_copy(Dimname dim, const Tensor & index, const Tensor & source) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_copy(const_cast(*this), dim, index, source); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_copy", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, source); +#endif +} +#endif +inline Tensor & Tensor::index_put_(TensorList indices, const Tensor & values, bool accumulate) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_put_(const_cast(*this), indices, values, accumulate); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_put_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), indices, values, accumulate); +#endif +} +inline Tensor Tensor::index_put(TensorList indices, const Tensor & values, bool accumulate) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_put(const_cast(*this), indices, values, accumulate); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_put", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), indices, values, accumulate); +#endif +} +inline Tensor Tensor::inverse() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::inverse(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::inverse", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::isclose(const Tensor & other, double rtol, double atol, bool equal_nan) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::isclose(const_cast(*this), other, rtol, atol, equal_nan); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::isclose", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other, rtol, atol, equal_nan); +#endif +} +inline bool Tensor::is_distributed() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_distributed(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::is_distributed", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline bool Tensor::is_floating_point() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_floating_point(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::is_floating_point", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline bool Tensor::is_complex() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_complex(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::is_complex", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline bool Tensor::is_nonzero() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_nonzero(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::is_nonzero", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline bool Tensor::is_same_size(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_same_size(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::is_same_size", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline bool Tensor::is_signed() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_signed(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::is_signed", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline std::tuple Tensor::kthvalue(int64_t k, int64_t dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::kthvalue(const_cast(*this), k, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::kthvalue", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, int64_t, bool>( + op, const_cast(*this), k, dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline std::tuple Tensor::kthvalue(int64_t k, Dimname dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::kthvalue(const_cast(*this), k, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::kthvalue", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, Dimname, bool>( + op, const_cast(*this), k, dim, keepdim); +#endif +} +#endif +inline Tensor Tensor::log() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::log", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::log_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::log_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::log10() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log10(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::log10", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::log10_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log10_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::log10_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::log1p() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log1p(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::log1p", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::log1p_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::log1p_(const_cast(*this)); + break; + case Backend::SparseCPU: + return SparseCPUType::log1p_(const_cast(*this)); + break; + default: + AT_ERROR("log1p_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::log1p_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::log2() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log2(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::log2", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::log2_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log2_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::log2_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::logdet() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logdet(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::logdet", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::log_softmax(int64_t dim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log_softmax(const_cast(*this), dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::log_softmax", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::log_softmax(Dimname dim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::log_softmax(const_cast(*this), dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::log_softmax", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, dtype); +#endif +} +#endif +inline Tensor Tensor::logsumexp(IntArrayRef dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logsumexp(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::logsumexp", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::logsumexp(DimnameList dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::logsumexp(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::logsumexp", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, keepdim); +#endif +} +#endif +inline Tensor Tensor::matmul(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::matmul(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::matmul", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::matrix_power(int64_t n) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::matrix_power(const_cast(*this), n); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::matrix_power", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), n); +#endif +} +inline std::tuple Tensor::max(int64_t dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::max", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, const_cast(*this), dim, keepdim); +#endif +} +inline Tensor Tensor::max_values(IntArrayRef dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max_values(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::max_values", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline std::tuple Tensor::max(Dimname dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::max", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, Dimname, bool>( + op, const_cast(*this), dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::max_values(DimnameList dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::max_values(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::max_values", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, keepdim); +#endif +} +#endif +inline Tensor Tensor::mean(c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::mean(const_cast(*this), dtype); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::mean(const_cast(*this), dtype); + break; + default: + AT_ERROR("mean not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mean", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dtype); +#endif +} +inline Tensor Tensor::mean(IntArrayRef dim, bool keepdim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::mean(const_cast(*this), dim, keepdim, dtype); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::mean(const_cast(*this), dim, keepdim, dtype); + break; + default: + AT_ERROR("mean not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mean", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, keepdim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::mean(DimnameList dim, bool keepdim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mean(const_cast(*this), dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mean", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, keepdim, dtype); +#endif +} +#endif +inline std::tuple Tensor::median(int64_t dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::median(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::median", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, const_cast(*this), dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline std::tuple Tensor::median(Dimname dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::median(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::median", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, Dimname, bool>( + op, const_cast(*this), dim, keepdim); +#endif +} +#endif +inline std::tuple Tensor::min(int64_t dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::min(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::min", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, const_cast(*this), dim, keepdim); +#endif +} +inline Tensor Tensor::min_values(IntArrayRef dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::min_values(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::min_values", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline std::tuple Tensor::min(Dimname dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::min(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::min", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, Dimname, bool>( + op, const_cast(*this), dim, keepdim); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::min_values(DimnameList dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::min_values(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::min_values", "names"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, keepdim); +#endif +} +#endif +inline Tensor Tensor::mm(const Tensor & mat2) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::mm(const_cast(*this), mat2); + break; + case Backend::SparseCPU: + return SparseCPUType::mm(const_cast(*this), mat2); + break; + default: + AT_ERROR("mm not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), mat2); +#endif +} +inline std::tuple Tensor::mode(int64_t dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mode(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mode", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, const_cast(*this), dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline std::tuple Tensor::mode(Dimname dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mode(const_cast(*this), dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mode", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, Dimname, bool>( + op, const_cast(*this), dim, keepdim); +#endif +} +#endif +inline Tensor Tensor::mul(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::mul(const_cast(*this), other); + break; + case Backend::SparseCPU: + return SparseCPUType::mul(const_cast(*this), other); + break; + default: + AT_ERROR("mul not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mul", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::mul_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::mul_(const_cast(*this), other); + break; + case Backend::SparseCPU: + return SparseCPUType::mul_(const_cast(*this), other); + break; + default: + AT_ERROR("mul_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mul_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::mul(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mul(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mul", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::mul_(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mul_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mul_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::mv(const Tensor & vec) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::mv(const_cast(*this), vec); + break; + default: + AT_ERROR("mv not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mv", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), vec); +#endif +} +inline Tensor Tensor::mvlgamma(int64_t p) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mvlgamma(const_cast(*this), p); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mvlgamma", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), p); +#endif +} +inline Tensor & Tensor::mvlgamma_(int64_t p) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::mvlgamma_(const_cast(*this), p); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::mvlgamma_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), p); +#endif +} +inline Tensor Tensor::narrow_copy(int64_t dim, int64_t start, int64_t length) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::narrow_copy(const_cast(*this), dim, start, length); + break; + case Backend::SparseCPU: + return SparseCPUType::narrow_copy(const_cast(*this), dim, start, length); + break; + default: + AT_ERROR("narrow_copy not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::narrow_copy", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, start, length); +#endif +} +inline Tensor Tensor::narrow(int64_t dim, int64_t start, int64_t length) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::narrow(const_cast(*this), dim, start, length); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::narrow", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, start, length); +#endif +} +inline Tensor Tensor::permute(IntArrayRef dims) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::permute(const_cast(*this), dims); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::permute", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dims); +#endif +} +inline Tensor Tensor::numpy_T() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::numpy_T(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::numpy_T", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline bool Tensor::is_pinned() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::is_pinned(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::is_pinned", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::pin_memory() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::pin_memory(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::pin_memory", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::pinverse(double rcond) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::pinverse(const_cast(*this), rcond); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::pinverse", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), rcond); +#endif +} +inline Tensor Tensor::reciprocal() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::reciprocal(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::reciprocal", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::reciprocal_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::reciprocal_(const_cast(*this)); + break; + default: + AT_ERROR("reciprocal_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::reciprocal_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::neg() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::neg(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::neg", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::neg_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::neg_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::neg_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::repeat(IntArrayRef repeats) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::repeat(const_cast(*this), repeats); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::repeat", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), repeats); +#endif +} +inline Tensor Tensor::repeat_interleave(const Tensor & repeats, c10::optional dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::repeat_interleave(const_cast(*this), repeats, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::repeat_interleave", "self_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed>( + op, const_cast(*this), repeats, dim); +#endif +} +inline Tensor Tensor::repeat_interleave(int64_t repeats, c10::optional dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::repeat_interleave(const_cast(*this), repeats, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::repeat_interleave", "self_int"}).value(); + return c10::Dispatcher::singleton().callUnboxed>( + op, const_cast(*this), repeats, dim); +#endif +} +inline Tensor Tensor::reshape(IntArrayRef shape) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::reshape(const_cast(*this), shape); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::reshape", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), shape); +#endif +} +inline Tensor Tensor::reshape_as(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::reshape_as(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::reshape_as", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::round() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::round(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::round", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::round_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::round_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::round_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::relu() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::relu(const_cast(*this)); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::relu(const_cast(*this)); + break; + default: + AT_ERROR("relu not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::relu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::relu_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::relu_(const_cast(*this)); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::relu_(const_cast(*this)); + break; + default: + AT_ERROR("relu_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::relu_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::prelu(const Tensor & weight) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::prelu(const_cast(*this), weight); + break; + default: + AT_ERROR("prelu not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::prelu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), weight); +#endif +} +inline std::tuple Tensor::prelu_backward(const Tensor & grad_output, const Tensor & weight) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::prelu_backward(grad_output, const_cast(*this), weight); + break; + default: + AT_ERROR("prelu_backward not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::prelu_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, const Tensor &>( + op, grad_output, const_cast(*this), weight); +#endif +} +inline Tensor Tensor::hardshrink(Scalar lambd) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::hardshrink(const_cast(*this), lambd); + break; + default: + AT_ERROR("hardshrink not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::hardshrink", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), lambd); +#endif +} +inline Tensor Tensor::hardshrink_backward(const Tensor & grad_out, Scalar lambd) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::hardshrink_backward(grad_out, const_cast(*this), lambd); + break; + default: + AT_ERROR("hardshrink_backward not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::hardshrink_backward", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, grad_out, const_cast(*this), lambd); +#endif +} +inline Tensor Tensor::rsqrt() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rsqrt(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::rsqrt", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::rsqrt_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rsqrt_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::rsqrt_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::select(Dimname dim, int64_t index) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::select(const_cast(*this), dim, index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::select", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index); +#endif +} +#endif +inline Tensor Tensor::select(int64_t dim, int64_t index) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::select(const_cast(*this), dim, index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::select", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index); +#endif +} +inline Tensor Tensor::sigmoid() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::sigmoid(const_cast(*this)); + break; + default: + AT_ERROR("sigmoid not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sigmoid", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::sigmoid_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::sigmoid_(const_cast(*this)); + break; + default: + AT_ERROR("sigmoid_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sigmoid_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::sin() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sin(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sin", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::sin_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sin_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sin_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::sinh() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sinh(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sinh", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::sinh_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sinh_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sinh_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::detach() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::detach(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::detach", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::detach_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::detach_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::detach_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline int64_t Tensor::size(int64_t dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::size(const_cast(*this), dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::size", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline int64_t Tensor::size(Dimname dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::size(const_cast(*this), dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::size", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim); +#endif +} +#endif +inline Tensor Tensor::slice(int64_t dim, int64_t start, int64_t end, int64_t step) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::slice(const_cast(*this), dim, start, end, step); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::slice", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, start, end, step); +#endif +} +inline std::tuple Tensor::slogdet() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::slogdet(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::slogdet", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &>( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::smm(const Tensor & mat2) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::smm(const_cast(*this), mat2); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::smm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), mat2); +#endif +} +inline Tensor Tensor::softmax(int64_t dim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::softmax(const_cast(*this), dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::softmax", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::softmax(Dimname dim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::softmax(const_cast(*this), dim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::softmax", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, dtype); +#endif +} +#endif +inline std::vector Tensor::split(int64_t split_size, int64_t dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::split(const_cast(*this), split_size, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::split", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, int64_t>( + op, const_cast(*this), split_size, dim); +#endif +} +inline std::vector Tensor::split_with_sizes(IntArrayRef split_sizes, int64_t dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::split_with_sizes(const_cast(*this), split_sizes, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::split_with_sizes", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, IntArrayRef, int64_t>( + op, const_cast(*this), split_sizes, dim); +#endif +} +inline Tensor Tensor::squeeze() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::squeeze(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::squeeze", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::squeeze(int64_t dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::squeeze(const_cast(*this), dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::squeeze", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::squeeze(Dimname dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::squeeze(const_cast(*this), dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::squeeze", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim); +#endif +} +#endif +inline Tensor & Tensor::squeeze_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::squeeze_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::squeeze_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::squeeze_(int64_t dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::squeeze_(const_cast(*this), dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::squeeze_", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor & Tensor::squeeze_(Dimname dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::squeeze_(const_cast(*this), dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::squeeze_", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim); +#endif +} +#endif +inline Tensor Tensor::sspaddmm(const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sspaddmm(const_cast(*this), mat1, mat2, beta, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sspaddmm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), mat1, mat2, beta, alpha); +#endif +} +inline Tensor Tensor::stft(int64_t n_fft, c10::optional hop_length, c10::optional win_length, const Tensor & window, bool normalized, bool onesided) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::stft(const_cast(*this), n_fft, hop_length, win_length, window, normalized, onesided); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::stft", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, c10::optional, const Tensor &, bool, bool>( + op, const_cast(*this), n_fft, hop_length, win_length, window, normalized, onesided); +#endif +} +inline int64_t Tensor::stride(int64_t dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::stride(const_cast(*this), dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::stride", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline int64_t Tensor::stride(Dimname dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::stride(const_cast(*this), dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::stride", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim); +#endif +} +#endif +inline Tensor Tensor::sum(c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sum(const_cast(*this), dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sum", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dtype); +#endif +} +inline Tensor Tensor::sum(IntArrayRef dim, bool keepdim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sum(const_cast(*this), dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sum", "dim_IntList"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, keepdim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::sum(DimnameList dim, bool keepdim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sum(const_cast(*this), dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sum", "dim_DimnameList"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, keepdim, dtype); +#endif +} +#endif +inline Tensor Tensor::sum_to_size(IntArrayRef size) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sum_to_size(const_cast(*this), size); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sum_to_size", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), size); +#endif +} +inline Tensor Tensor::sqrt() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sqrt(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sqrt", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::sqrt_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sqrt_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sqrt_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::std(bool unbiased) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::std(const_cast(*this), unbiased); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::std", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), unbiased); +#endif +} +inline Tensor Tensor::std(IntArrayRef dim, bool unbiased, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::std(const_cast(*this), dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::std", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, unbiased, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::std(DimnameList dim, bool unbiased, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::std(const_cast(*this), dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::std", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, unbiased, keepdim); +#endif +} +#endif +inline Tensor Tensor::prod(c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::prod(const_cast(*this), dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::prod", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dtype); +#endif +} +inline Tensor Tensor::prod(int64_t dim, bool keepdim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::prod(const_cast(*this), dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::prod", "dim_int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, keepdim, dtype); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::prod(Dimname dim, bool keepdim, c10::optional dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::prod(const_cast(*this), dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::prod", "dim_Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dim, keepdim, dtype); +#endif +} +#endif +inline Tensor Tensor::t() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::t(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::t", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::t_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::t_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::t_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::tan() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::tan(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::tan", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::tan_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::tan_(const_cast(*this)); + break; + default: + AT_ERROR("tan_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::tan_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::tanh() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::tanh(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::tanh", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::tanh_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::tanh_(const_cast(*this)); + break; + default: + AT_ERROR("tanh_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::tanh_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::transpose(int64_t dim0, int64_t dim1) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::transpose(const_cast(*this), dim0, dim1); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::transpose", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim0, dim1); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::transpose(Dimname dim0, Dimname dim1) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::transpose(const_cast(*this), dim0, dim1); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::transpose", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim0, dim1); +#endif +} +#endif +inline Tensor & Tensor::transpose_(int64_t dim0, int64_t dim1) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::transpose_(const_cast(*this), dim0, dim1); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::transpose_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim0, dim1); +#endif +} +inline Tensor Tensor::flip(IntArrayRef dims) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::flip(const_cast(*this), dims); + break; + default: + AT_ERROR("flip not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::flip", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dims); +#endif +} +inline Tensor Tensor::roll(IntArrayRef shifts, IntArrayRef dims) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::roll(const_cast(*this), shifts, dims); + break; + default: + AT_ERROR("roll not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::roll", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), shifts, dims); +#endif +} +inline Tensor Tensor::rot90(int64_t k, IntArrayRef dims) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::rot90(const_cast(*this), k, dims); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::rot90", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), k, dims); +#endif +} +inline Tensor Tensor::trunc() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::trunc(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::trunc", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::trunc_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::trunc_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::trunc_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::type_as(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::type_as(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::type_as", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::unsqueeze(int64_t dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::unsqueeze(const_cast(*this), dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::unsqueeze", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim); +#endif +} +inline Tensor & Tensor::unsqueeze_(int64_t dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::unsqueeze_(const_cast(*this), dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::unsqueeze_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim); +#endif +} +inline Tensor Tensor::var(bool unbiased) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::var(const_cast(*this), unbiased); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::var", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), unbiased); +#endif +} +inline Tensor Tensor::var(IntArrayRef dim, bool unbiased, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::var(const_cast(*this), dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::var", "dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, unbiased, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::var(DimnameList dim, bool unbiased, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::var(const_cast(*this), dim, unbiased, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::var", "names_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, unbiased, keepdim); +#endif +} +#endif +inline Tensor Tensor::view_as(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::view_as(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::view_as", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::where(const Tensor & condition, const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::where(condition, const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::where", "self"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, condition, const_cast(*this), other); +#endif +} +inline Tensor Tensor::norm(c10::optional p, ScalarType dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm(const_cast(*this), p, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::norm", "ScalarOpt_dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, ScalarType>( + op, const_cast(*this), p, dtype); +#endif +} +inline Tensor Tensor::norm(Scalar p) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm(const_cast(*this), p); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::norm", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), p); +#endif +} +inline Tensor Tensor::norm(c10::optional p, IntArrayRef dim, bool keepdim, ScalarType dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm(const_cast(*this), p, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::norm", "ScalarOpt_dim_dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, IntArrayRef, bool, ScalarType>( + op, const_cast(*this), p, dim, keepdim, dtype); +#endif +} +inline Tensor Tensor::norm(c10::optional p, IntArrayRef dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm(const_cast(*this), p, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::norm", "ScalarOpt_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, IntArrayRef, bool>( + op, const_cast(*this), p, dim, keepdim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::norm(c10::optional p, DimnameList dim, bool keepdim, ScalarType dtype) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm(const_cast(*this), p, dim, keepdim, dtype); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::norm", "names_ScalarOpt_dim_dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, DimnameList, bool, ScalarType>( + op, const_cast(*this), p, dim, keepdim, dtype); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::norm(c10::optional p, DimnameList dim, bool keepdim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::norm(const_cast(*this), p, dim, keepdim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::norm", "names_ScalarOpt_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, DimnameList, bool>( + op, const_cast(*this), p, dim, keepdim); +#endif +} +#endif +inline Tensor Tensor::clone(c10::optional memory_format) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::clone(const_cast(*this), memory_format); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::clone(const_cast(*this), memory_format); + break; + case Backend::SparseCPU: + return SparseCPUType::clone(const_cast(*this), memory_format); + break; + default: + AT_ERROR("clone not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::clone", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), memory_format); +#endif +} +inline Tensor & Tensor::resize_as_(const Tensor & the_template, c10::optional memory_format) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::resize_as_(const_cast(*this), the_template, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::resize_as_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), the_template, memory_format); +#endif +} +inline Tensor Tensor::pow(Scalar exponent) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::pow(const_cast(*this), exponent); + break; + case Backend::SparseCPU: + return SparseCPUType::pow(const_cast(*this), exponent); + break; + default: + AT_ERROR("pow not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::pow", "Tensor_Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), exponent); +#endif +} +inline Tensor & Tensor::zero_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::zero_(const_cast(*this)); + break; + case Backend::SparseCPU: + return SparseCPUType::zero_(const_cast(*this)); + break; + default: + AT_ERROR("zero_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::zero_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::sub(const Tensor & other, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::sub(const_cast(*this), other, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::sub(const_cast(*this), other, alpha); + break; + default: + AT_ERROR("sub not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sub", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other, alpha); +#endif +} +inline Tensor & Tensor::sub_(const Tensor & other, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::sub_(const_cast(*this), other, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::sub_(const_cast(*this), other, alpha); + break; + default: + AT_ERROR("sub_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sub_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other, alpha); +#endif +} +inline Tensor Tensor::sub(Scalar other, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sub(const_cast(*this), other, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sub", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other, alpha); +#endif +} +inline Tensor & Tensor::sub_(Scalar other, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sub_(const_cast(*this), other, alpha); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sub_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other, alpha); +#endif +} +inline Tensor Tensor::addmm(const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::addmm(const_cast(*this), mat1, mat2, beta, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::addmm(const_cast(*this), mat1, mat2, beta, alpha); + break; + default: + AT_ERROR("addmm not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::addmm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), mat1, mat2, beta, alpha); +#endif +} +inline Tensor & Tensor::addmm_(const Tensor & mat1, const Tensor & mat2, Scalar beta, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::addmm_(const_cast(*this), mat1, mat2, beta, alpha); + break; + case Backend::SparseCPU: + return SparseCPUType::addmm_(const_cast(*this), mat1, mat2, beta, alpha); + break; + default: + AT_ERROR("addmm_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::addmm_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), mat1, mat2, beta, alpha); +#endif +} +inline Tensor & Tensor::sparse_resize_(IntArrayRef size, int64_t sparse_dim, int64_t dense_dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::sparse_resize_(const_cast(*this), size, sparse_dim, dense_dim); + break; + default: + AT_ERROR("sparse_resize_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sparse_resize_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), size, sparse_dim, dense_dim); +#endif +} +inline Tensor & Tensor::sparse_resize_and_clear_(IntArrayRef size, int64_t sparse_dim, int64_t dense_dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::sparse_resize_and_clear_(const_cast(*this), size, sparse_dim, dense_dim); + break; + default: + AT_ERROR("sparse_resize_and_clear_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sparse_resize_and_clear_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), size, sparse_dim, dense_dim); +#endif +} +inline Tensor Tensor::sparse_mask(const Tensor & mask) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::sparse_mask(const_cast(*this), mask); + break; + default: + AT_ERROR("sparse_mask not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sparse_mask", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), mask); +#endif +} +inline Tensor Tensor::to_dense() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::to_dense(const_cast(*this)); + break; + default: + AT_ERROR("to_dense not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::to_dense", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline int64_t Tensor::sparse_dim() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::sparse_dim(const_cast(*this)); + break; + default: + AT_ERROR("sparse_dim not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sparse_dim", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline int64_t Tensor::_dimI() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::_dimI(const_cast(*this)); + break; + default: + AT_ERROR("_dimI not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::_dimI", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline int64_t Tensor::dense_dim() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::dense_dim(const_cast(*this)); + break; + default: + AT_ERROR("dense_dim not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::dense_dim", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline int64_t Tensor::_dimV() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::_dimV(const_cast(*this)); + break; + default: + AT_ERROR("_dimV not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::_dimV", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline int64_t Tensor::_nnz() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::_nnz(const_cast(*this)); + break; + default: + AT_ERROR("_nnz not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::_nnz", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::coalesce() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::coalesce(const_cast(*this)); + break; + default: + AT_ERROR("coalesce not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::coalesce", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline bool Tensor::is_coalesced() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::is_coalesced(const_cast(*this)); + break; + default: + AT_ERROR("is_coalesced not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::is_coalesced", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::_indices() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::_indices(const_cast(*this)); + break; + default: + AT_ERROR("_indices not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::_indices", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::_values() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::_values(const_cast(*this)); + break; + default: + AT_ERROR("_values not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::_values", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::_coalesced_(bool coalesced) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::_coalesced_(const_cast(*this), coalesced); + break; + default: + AT_ERROR("_coalesced_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::_coalesced_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), coalesced); +#endif +} +inline Tensor Tensor::indices() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::indices(const_cast(*this)); + break; + default: + AT_ERROR("indices not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::indices", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::values() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::SparseCPU: + return SparseCPUType::values(const_cast(*this)); + break; + default: + AT_ERROR("values not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::values", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline std::vector Tensor::unbind(int64_t dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::unbind(const_cast(*this), dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::unbind", "int"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t>( + op, const_cast(*this), dim); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline std::vector Tensor::unbind(Dimname dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::unbind(const_cast(*this), dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::unbind", "Dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, Dimname>( + op, const_cast(*this), dim); +#endif +} +#endif +inline Tensor Tensor::to_sparse(int64_t sparse_dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::to_sparse(const_cast(*this), sparse_dim); + break; + default: + AT_ERROR("to_sparse not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::to_sparse", "sparse_dim"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), sparse_dim); +#endif +} +inline Tensor Tensor::to_sparse() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::to_sparse(const_cast(*this)); + break; + default: + AT_ERROR("to_sparse not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::to_sparse", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::to_mkldnn() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::to_mkldnn(const_cast(*this)); + break; + default: + AT_ERROR("to_mkldnn not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::to_mkldnn", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::dequantize() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::dequantize(const_cast(*this)); + break; + default: + AT_ERROR("dequantize not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::dequantize", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline double Tensor::q_scale() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::q_scale(const_cast(*this)); + break; + default: + AT_ERROR("q_scale not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::q_scale", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline int64_t Tensor::q_zero_point() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::q_zero_point(const_cast(*this)); + break; + default: + AT_ERROR("q_zero_point not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::q_zero_point", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::q_per_channel_scales() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::q_per_channel_scales(const_cast(*this)); + break; + default: + AT_ERROR("q_per_channel_scales not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::q_per_channel_scales", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::q_per_channel_zero_points() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::q_per_channel_zero_points(const_cast(*this)); + break; + default: + AT_ERROR("q_per_channel_zero_points not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::q_per_channel_zero_points", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline int64_t Tensor::q_per_channel_axis() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::q_per_channel_axis(const_cast(*this)); + break; + default: + AT_ERROR("q_per_channel_axis not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::q_per_channel_axis", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::int_repr() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::int_repr(const_cast(*this)); + break; + default: + AT_ERROR("int_repr not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::int_repr", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline QScheme Tensor::qscheme() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::qscheme(const_cast(*this)); + break; + default: + AT_ERROR("qscheme not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::qscheme", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::to(const TensorOptions & options, bool non_blocking, bool copy, c10::optional memory_format) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::to(const_cast(*this), options, non_blocking, copy, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::to", "dtype_layout"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), options, non_blocking, copy, memory_format); +#endif +} +inline Tensor Tensor::to(Device device, ScalarType dtype, bool non_blocking, bool copy, c10::optional memory_format) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::to(const_cast(*this), device, dtype, non_blocking, copy, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::to", "device"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), device, dtype, non_blocking, copy, memory_format); +#endif +} +inline Tensor Tensor::to(ScalarType dtype, bool non_blocking, bool copy, c10::optional memory_format) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::to(const_cast(*this), dtype, non_blocking, copy, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::to", "dtype"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), dtype, non_blocking, copy, memory_format); +#endif +} +inline Tensor Tensor::to(const Tensor & other, bool non_blocking, bool copy, c10::optional memory_format) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::to(const_cast(*this), other, non_blocking, copy, memory_format); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::to", "other"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly>( + op, const_cast(*this), other, non_blocking, copy, memory_format); +#endif +} +inline Scalar Tensor::item() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::item(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::item", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::set_(Storage source) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::set_(const_cast(*this), source); + break; + default: + AT_ERROR("set_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::set_", "source_Storage"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), source); +#endif +} +inline Tensor & Tensor::set_(Storage source, int64_t storage_offset, IntArrayRef size, IntArrayRef stride) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::set_(const_cast(*this), source, storage_offset, size, stride); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::set_(const_cast(*this), source, storage_offset, size, stride); + break; + default: + AT_ERROR("set_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::set_", "source_Storage_storage_offset"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), source, storage_offset, size, stride); +#endif +} +inline Tensor & Tensor::set_(const Tensor & source) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::set_(const_cast(*this), source); + break; + default: + AT_ERROR("set_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::set_", "source_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), source); +#endif +} +inline Tensor & Tensor::set_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::set_(const_cast(*this)); + break; + default: + AT_ERROR("set_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::set_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::set_quantizer_(ConstQuantizerPtr quantizer) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::QuantizedCPU: + return QuantizedCPUType::set_quantizer_(const_cast(*this), quantizer); + break; + default: + AT_ERROR("set_quantizer_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::set_quantizer_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), quantizer); +#endif +} +inline bool Tensor::is_set_to(const Tensor & tensor) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::is_set_to(const_cast(*this), tensor); + break; + default: + AT_ERROR("is_set_to not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::is_set_to", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), tensor); +#endif +} +inline Tensor & Tensor::masked_fill_(const Tensor & mask, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::masked_fill_(const_cast(*this), mask, value); + break; + default: + AT_ERROR("masked_fill_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::masked_fill_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), mask, value); +#endif +} +inline Tensor Tensor::masked_fill(const Tensor & mask, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::masked_fill(const_cast(*this), mask, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::masked_fill", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), mask, value); +#endif +} +inline Tensor & Tensor::masked_fill_(const Tensor & mask, const Tensor & value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::masked_fill_(const_cast(*this), mask, value); + break; + default: + AT_ERROR("masked_fill_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::masked_fill_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), mask, value); +#endif +} +inline Tensor Tensor::masked_fill(const Tensor & mask, const Tensor & value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::masked_fill(const_cast(*this), mask, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::masked_fill", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), mask, value); +#endif +} +inline Tensor & Tensor::masked_scatter_(const Tensor & mask, const Tensor & source) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::masked_scatter_(const_cast(*this), mask, source); + break; + default: + AT_ERROR("masked_scatter_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::masked_scatter_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), mask, source); +#endif +} +inline Tensor Tensor::masked_scatter(const Tensor & mask, const Tensor & source) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::masked_scatter(const_cast(*this), mask, source); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::masked_scatter", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), mask, source); +#endif +} +inline Tensor Tensor::view(IntArrayRef size) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::view(const_cast(*this), size); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::view(const_cast(*this), size); + break; + default: + AT_ERROR("view not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::view", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), size); +#endif +} +inline Tensor & Tensor::put_(const Tensor & index, const Tensor & source, bool accumulate) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::put_(const_cast(*this), index, source, accumulate); + break; + default: + AT_ERROR("put_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::put_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), index, source, accumulate); +#endif +} +inline Tensor & Tensor::index_add_(int64_t dim, const Tensor & index, const Tensor & source) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::index_add_(const_cast(*this), dim, index, source); + break; + default: + AT_ERROR("index_add_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_add_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, source); +#endif +} +inline Tensor Tensor::index_add(int64_t dim, const Tensor & index, const Tensor & source) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_add(const_cast(*this), dim, index, source); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_add", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, index, source); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::index_add(Dimname dim, const Tensor & index, const Tensor & source) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_add(const_cast(*this), dim, index, source); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_add", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, source); +#endif +} +#endif +inline Tensor & Tensor::index_fill_(int64_t dim, const Tensor & index, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::index_fill_(const_cast(*this), dim, index, value); + break; + default: + AT_ERROR("index_fill_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_fill_", "int_Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, value); +#endif +} +inline Tensor Tensor::index_fill(int64_t dim, const Tensor & index, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_fill(const_cast(*this), dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_fill", "int_Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, index, value); +#endif +} +inline Tensor & Tensor::index_fill_(int64_t dim, const Tensor & index, const Tensor & value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::index_fill_(const_cast(*this), dim, index, value); + break; + default: + AT_ERROR("index_fill_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_fill_", "int_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, value); +#endif +} +inline Tensor Tensor::index_fill(int64_t dim, const Tensor & index, const Tensor & value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_fill(const_cast(*this), dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_fill", "int_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, index, value); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor & Tensor::index_fill_(Dimname dim, const Tensor & index, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_fill_(const_cast(*this), dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_fill_", "Dimname_Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, value); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor & Tensor::index_fill_(Dimname dim, const Tensor & index, const Tensor & value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_fill_(const_cast(*this), dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_fill_", "Dimname_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, value); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::index_fill(Dimname dim, const Tensor & index, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_fill(const_cast(*this), dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_fill", "Dimname_Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, value); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::index_fill(Dimname dim, const Tensor & index, const Tensor & value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_fill(const_cast(*this), dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_fill", "Dimname_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, value); +#endif +} +#endif +inline Tensor & Tensor::scatter_(int64_t dim, const Tensor & index, const Tensor & src) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::scatter_(const_cast(*this), dim, index, src); + break; + default: + AT_ERROR("scatter_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::scatter_", "src"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, src); +#endif +} +inline Tensor Tensor::scatter(int64_t dim, const Tensor & index, const Tensor & src) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scatter(const_cast(*this), dim, index, src); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::scatter", "src"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, index, src); +#endif +} +inline Tensor & Tensor::scatter_(int64_t dim, const Tensor & index, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::scatter_(const_cast(*this), dim, index, value); + break; + default: + AT_ERROR("scatter_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::scatter_", "value"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, value); +#endif +} +inline Tensor Tensor::scatter(int64_t dim, const Tensor & index, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scatter(const_cast(*this), dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::scatter", "value"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, index, value); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::scatter(Dimname dim, const Tensor & index, const Tensor & src) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scatter(const_cast(*this), dim, index, src); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::scatter", "dimname_src"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, src); +#endif +} +#endif +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::scatter(Dimname dim, const Tensor & index, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scatter(const_cast(*this), dim, index, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::scatter", "dimname_value"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, value); +#endif +} +#endif +inline Tensor & Tensor::scatter_add_(int64_t dim, const Tensor & index, const Tensor & src) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::scatter_add_(const_cast(*this), dim, index, src); + break; + default: + AT_ERROR("scatter_add_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::scatter_add_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, src); +#endif +} +inline Tensor Tensor::scatter_add(int64_t dim, const Tensor & index, const Tensor & src) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scatter_add(const_cast(*this), dim, index, src); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::scatter_add", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, index, src); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::scatter_add(Dimname dim, const Tensor & index, const Tensor & src) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::scatter_add(const_cast(*this), dim, index, src); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::scatter_add", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, src); +#endif +} +#endif +inline Tensor & Tensor::lt_(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::lt_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::lt_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::lt_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::lt_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::lt_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::gt_(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::gt_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::gt_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::gt_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::gt_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::gt_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::le_(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::le_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::le_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::le_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::le_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::le_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::ge_(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ge_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ge_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::ge_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ge_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ge_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::eq_(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::eq_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::eq_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::eq_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::eq_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::eq_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::ne_(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ne_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ne_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::ne_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::ne_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ne_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::__and__(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__and__(const_cast(*this), other); + break; + default: + AT_ERROR("__and__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__and__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::__and__(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__and__(const_cast(*this), other); + break; + default: + AT_ERROR("__and__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__and__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::__iand__(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__iand__(const_cast(*this), other); + break; + default: + AT_ERROR("__iand__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__iand__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::__iand__(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__iand__(const_cast(*this), other); + break; + default: + AT_ERROR("__iand__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__iand__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::__or__(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__or__(const_cast(*this), other); + break; + default: + AT_ERROR("__or__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__or__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::__or__(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__or__(const_cast(*this), other); + break; + default: + AT_ERROR("__or__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__or__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::__ior__(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__ior__(const_cast(*this), other); + break; + default: + AT_ERROR("__ior__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__ior__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::__ior__(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__ior__(const_cast(*this), other); + break; + default: + AT_ERROR("__ior__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__ior__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::bitwise_xor(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bitwise_xor(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::bitwise_xor", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::bitwise_xor(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bitwise_xor(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::bitwise_xor", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::bitwise_xor_(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bitwise_xor_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::bitwise_xor_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::bitwise_xor_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::bitwise_xor_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::bitwise_xor_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::__xor__(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::__xor__(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__xor__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::__xor__(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::__xor__(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__xor__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::__ixor__(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::__ixor__(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__ixor__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::__ixor__(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::__ixor__(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__ixor__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::__lshift__(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__lshift__(const_cast(*this), other); + break; + default: + AT_ERROR("__lshift__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__lshift__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::__lshift__(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__lshift__(const_cast(*this), other); + break; + default: + AT_ERROR("__lshift__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__lshift__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::__ilshift__(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__ilshift__(const_cast(*this), other); + break; + default: + AT_ERROR("__ilshift__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__ilshift__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::__ilshift__(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__ilshift__(const_cast(*this), other); + break; + default: + AT_ERROR("__ilshift__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__ilshift__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::__rshift__(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__rshift__(const_cast(*this), other); + break; + default: + AT_ERROR("__rshift__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__rshift__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::__rshift__(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__rshift__(const_cast(*this), other); + break; + default: + AT_ERROR("__rshift__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__rshift__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::__irshift__(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__irshift__(const_cast(*this), other); + break; + default: + AT_ERROR("__irshift__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__irshift__", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::__irshift__(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::__irshift__(const_cast(*this), other); + break; + default: + AT_ERROR("__irshift__ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::__irshift__", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::lgamma_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::lgamma_(const_cast(*this)); + break; + default: + AT_ERROR("lgamma_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::lgamma_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::atan2_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::atan2_(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::atan2_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::tril_(int64_t diagonal) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::tril_(const_cast(*this), diagonal); + break; + default: + AT_ERROR("tril_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::tril_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), diagonal); +#endif +} +inline Tensor & Tensor::triu_(int64_t diagonal) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::triu_(const_cast(*this), diagonal); + break; + default: + AT_ERROR("triu_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::triu_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), diagonal); +#endif +} +inline Tensor & Tensor::digamma_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::digamma_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::digamma_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::polygamma_(int64_t n) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::polygamma_(const_cast(*this), n); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::polygamma_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), n); +#endif +} +inline Tensor & Tensor::renorm_(Scalar p, int64_t dim, Scalar maxnorm) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::renorm_(const_cast(*this), p, dim, maxnorm); + break; + default: + AT_ERROR("renorm_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::renorm_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), p, dim, maxnorm); +#endif +} +inline Tensor & Tensor::pow_(Scalar exponent) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::pow_(const_cast(*this), exponent); + break; + default: + AT_ERROR("pow_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::pow_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), exponent); +#endif +} +inline Tensor & Tensor::pow_(const Tensor & exponent) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::pow_(const_cast(*this), exponent); + break; + default: + AT_ERROR("pow_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::pow_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), exponent); +#endif +} +inline Tensor & Tensor::lerp_(const Tensor & end, Scalar weight) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::lerp_(const_cast(*this), end, weight); + break; + default: + AT_ERROR("lerp_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::lerp_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), end, weight); +#endif +} +inline Tensor & Tensor::lerp_(const Tensor & end, const Tensor & weight) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::lerp_(const_cast(*this), end, weight); + break; + default: + AT_ERROR("lerp_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::lerp_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), end, weight); +#endif +} +inline Tensor & Tensor::fmod_(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::fmod_(const_cast(*this), other); + break; + default: + AT_ERROR("fmod_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::fmod_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::fmod_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::fmod_(const_cast(*this), other); + break; + default: + AT_ERROR("fmod_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::fmod_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::remainder_(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::remainder_(const_cast(*this), other); + break; + default: + AT_ERROR("remainder_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::remainder_", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::remainder_(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::remainder_(const_cast(*this), other); + break; + default: + AT_ERROR("remainder_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::remainder_", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), other); +#endif +} +inline Tensor & Tensor::addbmm_(const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::addbmm_(const_cast(*this), batch1, batch2, beta, alpha); + break; + default: + AT_ERROR("addbmm_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::addbmm_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), batch1, batch2, beta, alpha); +#endif +} +inline Tensor Tensor::addbmm(const Tensor & batch1, const Tensor & batch2, Scalar beta, Scalar alpha) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::addbmm(const_cast(*this), batch1, batch2, beta, alpha); + break; + default: + AT_ERROR("addbmm not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::addbmm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), batch1, batch2, beta, alpha); +#endif +} +inline Tensor & Tensor::addcdiv_(const Tensor & tensor1, const Tensor & tensor2, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::addcdiv_(const_cast(*this), tensor1, tensor2, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::addcdiv_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), tensor1, tensor2, value); +#endif +} +inline Tensor & Tensor::random_(int64_t from, int64_t to, Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::random_(const_cast(*this), from, to, generator); + break; + default: + AT_ERROR("random_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::random_", "from"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), from, to, generator); +#endif +} +inline Tensor & Tensor::random_(int64_t to, Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::random_(const_cast(*this), to, generator); + break; + default: + AT_ERROR("random_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::random_", "to"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), to, generator); +#endif +} +inline Tensor & Tensor::random_(Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::random_(const_cast(*this), generator); + break; + default: + AT_ERROR("random_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::random_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), generator); +#endif +} +inline Tensor & Tensor::uniform_(double from, double to, Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::uniform_(const_cast(*this), from, to, generator); + break; + default: + AT_ERROR("uniform_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::uniform_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), from, to, generator); +#endif +} +inline Tensor & Tensor::normal_(double mean, double std, Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::normal_(const_cast(*this), mean, std, generator); + break; + default: + AT_ERROR("normal_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::normal_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), mean, std, generator); +#endif +} +inline Tensor & Tensor::cauchy_(double median, double sigma, Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::cauchy_(const_cast(*this), median, sigma, generator); + break; + default: + AT_ERROR("cauchy_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cauchy_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), median, sigma, generator); +#endif +} +inline Tensor & Tensor::log_normal_(double mean, double std, Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::log_normal_(const_cast(*this), mean, std, generator); + break; + default: + AT_ERROR("log_normal_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::log_normal_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), mean, std, generator); +#endif +} +inline Tensor & Tensor::exponential_(double lambd, Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::exponential_(const_cast(*this), lambd, generator); + break; + default: + AT_ERROR("exponential_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::exponential_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), lambd, generator); +#endif +} +inline Tensor & Tensor::geometric_(double p, Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::geometric_(const_cast(*this), p, generator); + break; + default: + AT_ERROR("geometric_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::geometric_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), p, generator); +#endif +} +inline Tensor Tensor::diag(int64_t diagonal) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::diag(const_cast(*this), diagonal); + break; + default: + AT_ERROR("diag not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::diag", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), diagonal); +#endif +} +inline Tensor Tensor::cross(const Tensor & other, c10::optional dim) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cross(const_cast(*this), other, dim); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cross", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed>( + op, const_cast(*this), other, dim); +#endif +} +inline Tensor Tensor::triu(int64_t diagonal) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::triu(const_cast(*this), diagonal); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::triu", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), diagonal); +#endif +} +inline Tensor Tensor::tril(int64_t diagonal) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::tril(const_cast(*this), diagonal); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::tril", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), diagonal); +#endif +} +inline Tensor Tensor::trace() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::trace(const_cast(*this)); + break; + default: + AT_ERROR("trace not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::trace", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::ne(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::ne(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::ne(const_cast(*this), other); + break; + default: + AT_ERROR("ne not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ne", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::ne(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::ne(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::ne(const_cast(*this), other); + break; + default: + AT_ERROR("ne not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ne", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::eq(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::eq(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::eq(const_cast(*this), other); + break; + default: + AT_ERROR("eq not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::eq", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::eq(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::eq(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::eq(const_cast(*this), other); + break; + default: + AT_ERROR("eq not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::eq", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::ge(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::ge(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::ge(const_cast(*this), other); + break; + default: + AT_ERROR("ge not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ge", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::ge(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::ge(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::ge(const_cast(*this), other); + break; + default: + AT_ERROR("ge not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ge", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::le(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::le(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::le(const_cast(*this), other); + break; + default: + AT_ERROR("le not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::le", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::le(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::le(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::le(const_cast(*this), other); + break; + default: + AT_ERROR("le not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::le", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::gt(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::gt(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::gt(const_cast(*this), other); + break; + default: + AT_ERROR("gt not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::gt", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::gt(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::gt(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::gt(const_cast(*this), other); + break; + default: + AT_ERROR("gt not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::gt", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::lt(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::lt(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::lt(const_cast(*this), other); + break; + default: + AT_ERROR("lt not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::lt", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::lt(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::lt(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::lt(const_cast(*this), other); + break; + default: + AT_ERROR("lt not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::lt", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::take(const Tensor & index) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::take(const_cast(*this), index); + break; + default: + AT_ERROR("take not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::take", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), index); +#endif +} +inline Tensor Tensor::index_select(int64_t dim, const Tensor & index) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::index_select(const_cast(*this), dim, index); + break; + case Backend::SparseCPU: + return SparseCPUType::index_select(const_cast(*this), dim, index); + break; + default: + AT_ERROR("index_select not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_select", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, index); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::index_select(Dimname dim, const Tensor & index) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::index_select(const_cast(*this), dim, index); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::index_select", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index); +#endif +} +#endif +inline Tensor Tensor::masked_select(const Tensor & mask) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::masked_select(const_cast(*this), mask); + break; + default: + AT_ERROR("masked_select not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::masked_select", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), mask); +#endif +} +inline Tensor Tensor::nonzero() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::nonzero(const_cast(*this)); + break; + default: + AT_ERROR("nonzero not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::nonzero", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline std::vector Tensor::nonzero_numpy() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::nonzero_numpy(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::nonzero_numpy", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &>( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::gather(int64_t dim, const Tensor & index, bool sparse_grad) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::gather(const_cast(*this), dim, index, sparse_grad); + break; + default: + AT_ERROR("gather not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::gather", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, index, sparse_grad); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::gather(Dimname dim, const Tensor & index, bool sparse_grad) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::gather(const_cast(*this), dim, index, sparse_grad); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::gather", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, index, sparse_grad); +#endif +} +#endif +inline Tensor Tensor::addcmul(const Tensor & tensor1, const Tensor & tensor2, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::addcmul(const_cast(*this), tensor1, tensor2, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::addcmul", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), tensor1, tensor2, value); +#endif +} +inline Tensor & Tensor::addcmul_(const Tensor & tensor1, const Tensor & tensor2, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::addcmul_(const_cast(*this), tensor1, tensor2, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::addcmul_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), tensor1, tensor2, value); +#endif +} +inline Tensor Tensor::addcdiv(const Tensor & tensor1, const Tensor & tensor2, Scalar value) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::addcdiv(const_cast(*this), tensor1, tensor2, value); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::addcdiv", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), tensor1, tensor2, value); +#endif +} +inline std::tuple Tensor::lstsq(const Tensor & A) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::lstsq(const_cast(*this), A); + break; + default: + AT_ERROR("lstsq not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::lstsq", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &>( + op, const_cast(*this), A); +#endif +} +inline std::tuple Tensor::triangular_solve(const Tensor & A, bool upper, bool transpose, bool unitriangular) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::triangular_solve(const_cast(*this), A, upper, transpose, unitriangular); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::triangular_solve", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &, bool, bool, bool>( + op, const_cast(*this), A, upper, transpose, unitriangular); +#endif +} +inline std::tuple Tensor::symeig(bool eigenvectors, bool upper) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::symeig(const_cast(*this), eigenvectors, upper); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::symeig", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool, bool>( + op, const_cast(*this), eigenvectors, upper); +#endif +} +inline std::tuple Tensor::eig(bool eigenvectors) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::eig(const_cast(*this), eigenvectors); + break; + default: + AT_ERROR("eig not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::eig", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool>( + op, const_cast(*this), eigenvectors); +#endif +} +inline std::tuple Tensor::svd(bool some, bool compute_uv) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::svd(const_cast(*this), some, compute_uv); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::svd", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool, bool>( + op, const_cast(*this), some, compute_uv); +#endif +} +inline Tensor Tensor::cholesky(bool upper) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cholesky(const_cast(*this), upper); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cholesky", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), upper); +#endif +} +inline Tensor Tensor::cholesky_solve(const Tensor & input2, bool upper) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::cholesky_solve(const_cast(*this), input2, upper); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cholesky_solve", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), input2, upper); +#endif +} +inline std::tuple Tensor::solve(const Tensor & A) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::solve(const_cast(*this), A); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::solve", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, const Tensor &>( + op, const_cast(*this), A); +#endif +} +inline Tensor Tensor::cholesky_inverse(bool upper) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::cholesky_inverse(const_cast(*this), upper); + break; + default: + AT_ERROR("cholesky_inverse not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::cholesky_inverse", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), upper); +#endif +} +inline std::tuple Tensor::qr(bool some) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::qr(const_cast(*this), some); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::qr", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, bool>( + op, const_cast(*this), some); +#endif +} +inline std::tuple Tensor::geqrf() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::geqrf(const_cast(*this)); + break; + default: + AT_ERROR("geqrf not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::geqrf", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &>( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::orgqr(const Tensor & input2) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::orgqr(const_cast(*this), input2); + break; + default: + AT_ERROR("orgqr not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::orgqr", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), input2); +#endif +} +inline Tensor Tensor::ormqr(const Tensor & input2, const Tensor & input3, bool left, bool transpose) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::ormqr(const_cast(*this), input2, input3, left, transpose); + break; + default: + AT_ERROR("ormqr not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::ormqr", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), input2, input3, left, transpose); +#endif +} +inline Tensor Tensor::lu_solve(const Tensor & LU_data, const Tensor & LU_pivots) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::lu_solve(const_cast(*this), LU_data, LU_pivots); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::lu_solve", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), LU_data, LU_pivots); +#endif +} +inline Tensor Tensor::multinomial(int64_t num_samples, bool replacement, Generator * generator) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::multinomial(const_cast(*this), num_samples, replacement, generator); + break; + default: + AT_ERROR("multinomial not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::multinomial", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), num_samples, replacement, generator); +#endif +} +inline Tensor Tensor::lgamma() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::lgamma(const_cast(*this)); + break; + default: + AT_ERROR("lgamma not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::lgamma", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::digamma() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::digamma(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::digamma", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::polygamma(int64_t n) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::polygamma(n, const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::polygamma", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, n, const_cast(*this)); +#endif +} +inline Tensor Tensor::erfinv() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::erfinv(const_cast(*this)); + break; + default: + AT_ERROR("erfinv not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::erfinv", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::erfinv_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::erfinv_(const_cast(*this)); + break; + default: + AT_ERROR("erfinv_ not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::erfinv_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::sign() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sign(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sign", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor & Tensor::sign_() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sign_(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sign_", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::dist(const Tensor & other, Scalar p) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::dist(const_cast(*this), other, p); + break; + default: + AT_ERROR("dist not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::dist", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other, p); +#endif +} +inline Tensor Tensor::atan2(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::atan2(const_cast(*this), other); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::atan2", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::lerp(const Tensor & end, Scalar weight) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::lerp(const_cast(*this), end, weight); + break; + default: + AT_ERROR("lerp not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::lerp", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), end, weight); +#endif +} +inline Tensor Tensor::lerp(const Tensor & end, const Tensor & weight) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::lerp(const_cast(*this), end, weight); + break; + default: + AT_ERROR("lerp not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::lerp", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), end, weight); +#endif +} +inline Tensor Tensor::histc(int64_t bins, Scalar min, Scalar max) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::histc(const_cast(*this), bins, min, max); + break; + default: + AT_ERROR("histc not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::histc", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), bins, min, max); +#endif +} +inline Tensor Tensor::fmod(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::fmod(const_cast(*this), other); + break; + default: + AT_ERROR("fmod not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::fmod", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::fmod(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::fmod(const_cast(*this), other); + break; + default: + AT_ERROR("fmod not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::fmod", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::remainder(Scalar other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::remainder(const_cast(*this), other); + break; + default: + AT_ERROR("remainder not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::remainder", "Scalar"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::remainder(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::remainder(const_cast(*this), other); + break; + default: + AT_ERROR("remainder not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::remainder", "Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::min(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::min(const_cast(*this), other); + break; + default: + AT_ERROR("min not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::min", "other"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::min() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::min(const_cast(*this)); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::min(const_cast(*this)); + break; + default: + AT_ERROR("min not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::min", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::max(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::max(const_cast(*this), other); + break; + default: + AT_ERROR("max not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::max", "other"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::max() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::max(const_cast(*this)); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::max(const_cast(*this)); + break; + default: + AT_ERROR("max not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::max", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::median() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::median(const_cast(*this)); + break; + default: + AT_ERROR("median not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::median", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline std::tuple Tensor::sort(int64_t dim, bool descending) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::sort(const_cast(*this), dim, descending); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::sort(const_cast(*this), dim, descending); + break; + default: + AT_ERROR("sort not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sort", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, bool>( + op, const_cast(*this), dim, descending); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline std::tuple Tensor::sort(Dimname dim, bool descending) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::sort(const_cast(*this), dim, descending); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::sort", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, Dimname, bool>( + op, const_cast(*this), dim, descending); +#endif +} +#endif +inline Tensor Tensor::argsort(int64_t dim, bool descending) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::argsort(const_cast(*this), dim, descending); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::argsort", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), dim, descending); +#endif +} +#ifdef BUILD_NAMEDTENSOR +inline Tensor Tensor::argsort(Dimname dim, bool descending) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::argsort(const_cast(*this), dim, descending); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::argsort", "dimname"}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dim, descending); +#endif +} +#endif +inline std::tuple Tensor::topk(int64_t k, int64_t dim, bool largest, bool sorted) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::topk(const_cast(*this), k, dim, largest, sorted); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::topk(const_cast(*this), k, dim, largest, sorted); + break; + default: + AT_ERROR("topk not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::topk", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly, const Tensor &, int64_t, int64_t, bool, bool>( + op, const_cast(*this), k, dim, largest, sorted); +#endif +} +inline Tensor Tensor::all() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::all(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::all", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::any() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::any(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::any", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this)); +#endif +} +inline Tensor Tensor::renorm(Scalar p, int64_t dim, Scalar maxnorm) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::renorm(const_cast(*this), p, dim, maxnorm); + break; + default: + AT_ERROR("renorm not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::renorm", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), p, dim, maxnorm); +#endif +} +inline Tensor Tensor::unfold(int64_t dimension, int64_t size, int64_t step) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::unfold(const_cast(*this), dimension, size, step); + break; + default: + AT_ERROR("unfold not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::unfold", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this), dimension, size, step); +#endif +} +inline bool Tensor::equal(const Tensor & other) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::equal(const_cast(*this), other); + break; + case Backend::QuantizedCPU: + return QuantizedCPUType::equal(const_cast(*this), other); + break; + default: + AT_ERROR("equal not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::equal", ""}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), other); +#endif +} +inline Tensor Tensor::pow(const Tensor & exponent) const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + switch(tensorTypeIdToBackend(c10::impl::dispatchTypeId(type_set()))) { + case Backend::CPU: + return CPUType::pow(const_cast(*this), exponent); + break; + default: + AT_ERROR("pow not implemented for ", at::toString(type_set())); + } +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::pow", "Tensor_Tensor"}).value(); + return c10::Dispatcher::singleton().callUnboxed( + op, const_cast(*this), exponent); +#endif +} +inline Tensor Tensor::alias() const { +#ifdef USE_STATIC_DISPATCH + at::AutoNonVariableTypeMode _var_guard(true); + return TypeDefault::alias(const_cast(*this)); +#else + static c10::OperatorHandle op = c10::Dispatcher::singleton().findSchema({"aten::alias", ""}).value(); + return c10::Dispatcher::singleton().callUnboxedOnly( + op, const_cast(*this)); +#endif +} + +inline caffe2::TypeMeta Tensor::dtype() const noexcept { + return impl_->dtype(); +} + +inline Layout Tensor::layout() const noexcept { + return impl_->layout(); +} + +inline Device Tensor::device() const { + return impl_->device(); +} + +inline int64_t Tensor::get_device() const { + // NB: this is not a native function to avoid dispatching overhead. + return impl_->get_device(); +} + +inline int64_t get_device(Tensor self) { + return self.get_device(); +} + +inline bool Tensor::is_cuda() const { + // NB: this is not a native function to avoid dispatching overhead. + return impl_->is_cuda(); +} + +#ifdef BUILD_NAMEDTENSOR +inline NamedTensorMeta* Tensor::get_named_tensor_meta() { + return static_cast(impl_->named_tensor_meta()); +} + +inline const NamedTensorMeta* Tensor::get_named_tensor_meta() const { + return static_cast(impl_->named_tensor_meta()); +} + +inline bool Tensor::has_names() const { + return impl::has_names(unsafeGetTensorImpl()); +} +#endif + +inline bool is_cuda(Tensor self) { + return self.is_cuda(); +} + +inline bool Tensor::is_hip() const { + // NB: this is not a native function to avoid dispatching overhead. + return impl_->is_hip(); +} + +inline bool is_hip(Tensor self) { + return self.is_hip(); +} + +inline bool Tensor::is_sparse() const { + // NB: this is not a native function to avoid dispatching overhead. + return impl_->is_sparse(); +} + +inline bool is_sparse(Tensor self) { + return self.is_sparse(); +} + +inline bool Tensor::is_mkldnn() const { + // NB: this is not a native function to avoid dispatching overhead. + return impl_->is_mkldnn(); +} + +inline bool is_mkldnn(Tensor self) { + return self.is_mkldnn(); +} + +inline bool Tensor::is_quantized() const { + // NB: this is not a native function to avoid dispatching overhead. + return impl_->is_quantized(); +} + +inline bool is_quantized(Tensor self) { + return self.is_quantized(); +} + +#define DEFINE_CAST(T, name) \ + template <> \ + inline T* Tensor::data_ptr() const { \ + TORCH_CHECK( \ + scalar_type() == ScalarType::name, \ + "expected scalar type ", \ + #name, \ + " but found ", \ + c10::toString(scalar_type())); \ + return static_cast(this->unsafeGetTensorImpl()->data()); \ + } + +AT_FORALL_SCALAR_TYPES_WITH_COMPLEX_EXCEPT_COMPLEX_HALF(DEFINE_CAST) +AT_FORALL_QINT_TYPES(DEFINE_CAST) +#undef DEFINE_CAST + +#define DEFINE_ITEM(T, name) \ + template <> \ + inline T Tensor::item() const { \ + return item().to##name(); \ + } + +AT_FORALL_SCALAR_TYPES_WITH_COMPLEX_EXCEPT_COMPLEX_HALF(DEFINE_ITEM) +#undef DEFINE_ITEM + +// Gradient Node and Edges +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +template +auto Tensor::register_hook(T&& hook) const -> Tensor::hook_return_void_t { + // Return the grad argument in case of a hook with void return type to have an + // std::function with Tensor return type + std::function fn(hook); + return _register_hook([fn](const Tensor& grad) { + fn(grad); + return Tensor(); + }); +} + +template +auto Tensor::register_hook(T&& hook) const -> Tensor::hook_return_var_t { + return _register_hook(hook); +} + + +} //namespace at diff --git a/thirdparty/libtorch/include/ATen/core/UndefinedTensorImpl.h b/thirdparty/libtorch/include/ATen/core/UndefinedTensorImpl.h new file mode 100644 index 0000000000..885f6e195f --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/UndefinedTensorImpl.h @@ -0,0 +1 @@ +#include diff --git a/thirdparty/libtorch/include/ATen/core/UnsafeFromTH.h b/thirdparty/libtorch/include/ATen/core/UnsafeFromTH.h new file mode 100644 index 0000000000..4abf66aaf4 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/UnsafeFromTH.h @@ -0,0 +1,20 @@ +#include + +namespace at { + +inline Tensor unsafeTensorFromTH(void * th_pointer, bool retain) { + auto tensor_impl = c10::intrusive_ptr::reclaim(static_cast(th_pointer)); + if (retain && tensor_impl.get() != UndefinedTensorImpl::singleton()) { + c10::raw::intrusive_ptr::incref(tensor_impl.get()); + } + return Tensor(std::move(tensor_impl)); +} + +inline Storage unsafeStorageFromTH(void * th_pointer, bool retain) { + if (retain && th_pointer) { + c10::raw::intrusive_ptr::incref(static_cast(th_pointer)); + } + return Storage(c10::intrusive_ptr::reclaim(static_cast(th_pointer))); +} + +} diff --git a/thirdparty/libtorch/include/ATen/core/VariableHooksInterface.h b/thirdparty/libtorch/include/ATen/core/VariableHooksInterface.h new file mode 100644 index 0000000000..b6ac67df2b --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/VariableHooksInterface.h @@ -0,0 +1,62 @@ +#pragma once + +#include +#include + +// A little explanation about why this file exists at all. We have +// a few methods on Tensor class which require access to reified access to +// AutogradMeta. In open source, this isn't a big deal: we just access +// torch/csrc/autograd/variable.h from aten/src/ATen/core/Tensor.cpp and +// we can put the definitions inline. This is because everything gets balled +// into a single dynamic library in the end. +// +// However, inside our Facebook internal version of our build system, we +// have a split between aten and torch/csrc. So we cannot simply just +// cross this boundary. "Now wait," you might say, "Why don't we just +// merge the libraries inside Facebook". Well, the problem is that there +// are some downstream applications which are at binary size limit, and +// incorporating all of the extra code from libtorch would push them +// over (admarket/adreview/service:adreviewservice, see also +// https://github.com/pytorch/pytorch/pull/29299) So if you want to do that, +// we have to fix all of the services like this. +// +// I didn't want to block eliminating Tensor-Variable on this work, so I +// had to introduce another dynamic dispatch to get to the variable +// implementations (which live in torch/csrc/autograd/variable.cpp, FYI). +// +// I also considered using our existing dynamic dispatch mechanism, c10 +// dispatcher, to do this. However, (1) some of the functions on Tensor +// have weird signatures that are not supported by autograd, and (2) +// see this bug https://github.com/pytorch/pytorch/issues/30102 + +namespace torch { namespace autograd { + +struct Node; + +}} // namespace torch::autograd + +namespace at { +namespace impl { + +struct CAFFE2_API VariableHooksInterface { + virtual ~VariableHooksInterface() = default; + virtual Tensor tensor_data(const Tensor&) const = 0; + virtual Tensor variable_data(const Tensor&) const = 0; + virtual const std::shared_ptr& grad_fn(const Tensor&) const = 0; + virtual unsigned _register_hook(const Tensor&, std::function hook) const = 0; + virtual void remove_hook(const Tensor&, unsigned pos) const = 0; + virtual bool is_view(const Tensor&) const = 0; + virtual const Tensor& base(const Tensor&) const = 0; + virtual const std::string& name(const Tensor&) const = 0; +}; + +CAFFE2_API void SetVariableHooks(VariableHooksInterface* hooks); +CAFFE2_API VariableHooksInterface* GetVariableHooks(); + +struct CAFFE2_API VariableHooksRegisterer { + explicit VariableHooksRegisterer(VariableHooksInterface* hooks) { + SetVariableHooks(hooks); + } +}; + +}} // namespace at::impl diff --git a/thirdparty/libtorch/include/ATen/core/Variadic.h b/thirdparty/libtorch/include/ATen/core/Variadic.h new file mode 100644 index 0000000000..b49d94bba1 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/Variadic.h @@ -0,0 +1,76 @@ +#pragma once + +#include +#include +#include +#include + +#include + +namespace at { + +// This class allows you to write variadic functions which +// call a (possibly overloaded) function on each argument, +// in order. This is most commonly used in autogenerated code, +// where it is convenient to have a function that can uniformly +// take arguments of different types. If your arguments +// are homogenous consider using a std::initializer_list instead. +// +// For examples of this in use, see torch/csrc/utils/variadic.h +template +struct IterArgs { + template + inline F& apply() { + return self(); + } + + // NB: Use perfect forwarding here, otherwise we'll make value + // copies of all arguments! + template + inline F& apply(T&& arg, Args&&... args) { + self()(std::forward(arg)); + if (self().short_circuit()) { + return self(); + } else { + return apply(std::forward(args)...); + } + } + + // Here are some handy overloads which provide sensible + // defaults for container-like structures that one might + // be interested in recursing into. You can enable them + // by adding: + // + // using IterArgs::operator() + // + // to your struct. These are not enabled by default because + // you may be able to process these structures more efficiently + // than handling them one-by-one. + + template + void operator()(at::ArrayRef args) { + for (const auto& arg : args) { + self()(arg); + if (self().short_circuit()) + return; + } + } + + // NB: we need to specify std::vector manually as C++ won't + // do an implicit conversion to make a template deduction go through. + template + void operator()(const std::vector& args) { + self()(at::ArrayRef{args}); + } + + constexpr bool short_circuit() const { + return false; + } + + private: + inline F& self() { + return *static_cast(this); + } +}; + +} // namespace torch diff --git a/thirdparty/libtorch/include/ATen/core/alias_info.h b/thirdparty/libtorch/include/ATen/core/alias_info.h new file mode 100644 index 0000000000..96c4d0a4e3 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/alias_info.h @@ -0,0 +1,119 @@ +#pragma once +#include +#include +#include +#include + +namespace c10 { +/** + * class AliasInfo + * + * Data structure to hold aliasing information for an `Argument`. They can be + * nested to represent aliasing information on contained types. + * + * There is a `beforeSet` which describes the aliasing information before the + * operator executes, and an `afterSet` that describes aliasing info + * after execution. + */ +class AliasInfo { + public: + // Symbol for the set that can alias anything + static Symbol wildcardSet() { + static const Symbol wc = Symbol::fromQualString("alias::*"); + return wc; + } + + void setIsWrite(bool isWrite) { + isWrite_ = isWrite; + } + + bool isWrite() const { + return isWrite_; + } + + void addBeforeSet(Symbol aliasSet) { + beforeSets_.insert(aliasSet); + } + + void addAfterSet(Symbol aliasSet) { + afterSets_.insert(aliasSet); + } + + const std::unordered_set& beforeSets() const { + return beforeSets_; + } + + const std::unordered_set& afterSets() const { + return afterSets_; + } + + Symbol beforeSet() const { + AT_ASSERT(beforeSets_.size() == 1); + return *beforeSets_.begin(); + } + + bool isWildcardBefore() const { + return beforeSets_.count(wildcardSet()) != 0; + } + + bool isWildcardAfter() const { + return afterSets_.count(wildcardSet()) != 0; + } + + // the alias info for the contained types of the type + // e.g. if this is an annotation on List[T], `sets` refers to + // the alias sets that the list may be in + // while containedTypes()[0] refers to the sets that members of the list + // may be in + void addContainedType(AliasInfo aliasInfo) { + containedTypes_.push_back(std::move(aliasInfo)); + } + const std::vector& containedTypes() const { + return containedTypes_; + } + + private: + std::unordered_set beforeSets_; + std::unordered_set afterSets_; + std::vector containedTypes_; + bool isWrite_ = false; +}; + +inline bool operator==(const AliasInfo& lhs, const AliasInfo& rhs) { + return lhs.isWrite() == rhs.isWrite() + && lhs.beforeSets() == rhs.beforeSets() + && lhs.afterSets() == rhs.afterSets() + && lhs.containedTypes() == rhs.containedTypes(); +} + +// this does match the way things are represented in the schema +inline std::ostream& operator<<(std::ostream& out, const AliasInfo& aliasInfo) { + out << "("; + bool first = true; + for (const auto& set : aliasInfo.beforeSets()) { + if (first) { + first = false; + } else { + out << "|"; + } + out << set.toUnqualString(); + } + if (aliasInfo.isWrite()) { + out << "!"; + } + if (aliasInfo.beforeSets() != aliasInfo.afterSets()) { + out << " -> "; + first = true; + for (const auto& set : aliasInfo.afterSets()) { + if (first) { + first = false; + } else { + out << "|"; + } + out << set.toUnqualString(); + } + } + out << ")"; + return out; +} +} // namespace c10 diff --git a/thirdparty/libtorch/include/ATen/core/aten_interned_strings.h b/thirdparty/libtorch/include/ATen/core/aten_interned_strings.h new file mode 100644 index 0000000000..ecb0a74e95 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/aten_interned_strings.h @@ -0,0 +1,1020 @@ +#pragma once + +// ATen symbols correspond exactly to operators defined in ATen. Every +// symbol here corresponds exactly to an ATen operation which is defined +// in Declarations.yaml; attributes are in one-to-one correspondence with +// their ATen name. +// +// To explicitly use interned strings as symbols in your code, you must add +// them to this list. + +#define FORALL_ATEN_BASE_SYMBOLS(_) \ +_(aten, __and__) \ +_(aten, __iand__) \ +_(aten, __ilshift__) \ +_(aten, __ior__) \ +_(aten, __irshift__) \ +_(aten, __ixor__) \ +_(aten, __lshift__) \ +_(aten, __or__) \ +_(aten, __rshift__) \ +_(aten, __xor__) \ +_(aten, _abs) \ +_(aten, _acos) \ +_(aten, _addmv) \ +_(aten, _addr) \ +_(aten, _arange) \ +_(aten, _argmax) \ +_(aten, _argmin) \ +_(aten, _asin) \ +_(aten, _atan) \ +_(aten, _baddbmm_mkl) \ +_(aten, _cast_Byte) \ +_(aten, _cast_Char) \ +_(aten, _cast_Double) \ +_(aten, _cast_Float) \ +_(aten, _cast_Half) \ +_(aten, _cast_Int) \ +_(aten, _cast_Long) \ +_(aten, _cast_Short) \ +_(aten, _cat) \ +_(aten, _ceil) \ +_(aten, _convolution) \ +_(aten, _convolution_double_backward) \ +_(aten, convolution_overrideable) \ +_(aten, convolution_backward_overrideable) \ +_(aten, _convolution_nogroup) \ +_(aten, _copy_ignoring_overlaps) \ +_(aten, _cos) \ +_(aten, _cosh) \ +_(aten, _ctc_loss) \ +_(aten, _ctc_loss_backward) \ +_(aten, _cudnn_ctc_loss) \ +_(aten, _cudnn_init_dropout_state) \ +_(aten, _cudnn_rnn) \ +_(aten, _cudnn_rnn_backward) \ +_(aten, _cudnn_rnn_flatten_weight) \ +_(aten, _cufft_clear_plan_cache) \ +_(aten, _cufft_get_plan_cache_max_size) \ +_(aten, _cufft_get_plan_cache_size) \ +_(aten, _cufft_set_plan_cache_max_size) \ +_(aten, _cumprod) \ +_(aten, _cumsum) \ +_(aten, _denseDims) \ +_(aten, _dimI) \ +_(aten, _dimV) \ +_(aten, _dim_arange) \ +_(aten, _dirichlet_grad) \ +_(aten, _dot) \ +_(aten, _embedding_bag) \ +_(aten, _embedding_bag_backward) \ +_(aten, _embedding_bag_dense_backward) \ +_(aten, _embedding_bag_sparse_backward) \ +_(aten, _erf) \ +_(aten, _erfc) \ +_(aten, _exp) \ +_(aten, _expm1) \ +_(aten, _fft_with_size) \ +_(aten, _fill) \ +_(aten, _floor) \ +_(aten, _fused_dropout) \ +_(aten, _ger) \ +_(aten, _indexCopy) \ +_(aten, _indices) \ +_(aten, _linspace) \ +_(aten, _local_scalar) \ +_(aten, _local_scalar_dense) \ +_(aten, _log) \ +_(aten, _log10) \ +_(aten, _log1p) \ +_(aten, _log2) \ +_(aten, _logspace) \ +_(aten, _lu_with_info) \ +_(aten, _masked_scale) \ +_(aten, _mm) \ +_(aten, _mv) \ +_(aten, _nnz) \ +_(aten, _pack_padded_sequence) \ +_(aten, _pack_padded_sequence_backward) \ +_(aten, _pad_packed_sequence) \ +_(aten, _pdist_backward) \ +_(aten, _pdist_forward) \ +_(aten, _prod) \ +_(aten, _prodall) \ +_(aten, _range) \ +_(aten, _reshape_from_tensor) \ +_(aten, _round) \ +_(aten, _rsqrt) \ +_(aten, _s_where) \ +_(aten, _shape_as_tensor) \ +_(aten, _sigmoid) \ +_(aten, _sigmoid_backward) \ +_(aten, _sigmoid_forward) \ +_(aten, _sin) \ +_(aten, _sinh) \ +_(aten, _sparseDims) \ +_(aten, _sparse_add) \ +_(aten, _sparse_addmm) \ +_(aten, _sparse_coo_tensor_with_dims) \ +_(aten, _sparse_coo_tensor_with_dims_and_tensors) \ +_(aten, _sparse_coo_tensor_unsafe) \ +_(aten, _sparse_dense_add) \ +_(aten, _sparse_div_scalar) \ +_(aten, _sparse_div_zerodim) \ +_(aten, _sparse_mul) \ +_(aten, _sparse_mul_scalar) \ +_(aten, _sparse_mul_zerodim) \ +_(aten, _sparse_sum) \ +_(aten, _sqrt) \ +_(aten, _standard_gamma) \ +_(aten, _standard_gamma_grad) \ +_(aten, _sum) \ +_(aten, _sum_cuda) \ +_(aten, _sumall) \ +_(aten, _tan) \ +_(aten, _tanh) \ +_(aten, _tanh_backward) \ +_(aten, _tanh_forward) \ +_(aten, _th_baddbmm) \ +_(aten, _th_bmm) \ +_(aten, _th_clamp) \ +_(aten, _th_clamp_max) \ +_(aten, _th_clamp_min) \ +_(aten, _th_get_device) \ +_(aten, _th_kthvalue) \ +_(aten, _th_max) \ +_(aten, _th_median) \ +_(aten, _th_min) \ +_(aten, _th_mode) \ +_(aten, _th_prod) \ +_(aten, _th_sigmoid) \ +_(aten, _th_std) \ +_(aten, _th_sum) \ +_(aten, _th_tanh) \ +_(aten, _th_var) \ +_(aten, _thnn_fused_gru_cell) \ +_(aten, _thnn_fused_gru_cell_backward) \ +_(aten, _thnn_fused_lstm_cell) \ +_(aten, _thnn_fused_lstm_cell_backward) \ +_(aten, _trilinear) \ +_(aten, _trunc) \ +_(aten, _unique) \ +_(aten, _unique_dim) \ +_(aten, _unsafe_view) \ +_(aten, _values) \ +_(aten, _weight_norm) \ +_(aten, _weight_norm_cuda_interface) \ +_(aten, _weight_norm_cuda_interface_backward) \ +_(aten, _weight_norm_differentiable_backward) \ +_(aten, abs) \ +_(aten, acos) \ +_(aten, adaptive_avg_pool1d) \ +_(aten, adaptive_avg_pool2d) \ +_(aten, adaptive_avg_pool2d_backward) \ +_(aten, adaptive_avg_pool2d_forward) \ +_(aten, adaptive_avg_pool3d) \ +_(aten, adaptive_avg_pool3d_backward) \ +_(aten, adaptive_avg_pool3d_forward) \ +_(aten, adaptive_max_pool1d) \ +_(aten, adaptive_max_pool2d) \ +_(aten, adaptive_max_pool2d_backward) \ +_(aten, adaptive_max_pool2d_forward) \ +_(aten, adaptive_max_pool3d) \ +_(aten, adaptive_max_pool3d_backward) \ +_(aten, adaptive_max_pool3d_forward) \ +_(aten, add) \ +_(aten, add_) \ +_(aten, addbmm) \ +_(aten, addcdiv) \ +_(aten, addcmul) \ +_(aten, addmm) \ +_(aten, addmv) \ +_(aten, addr) \ +_(aten, affine_grid_generator) \ +_(aten, affine_grid_generator_backward) \ +_(aten, alias) \ +_(aten, all) \ +_(aten, allclose) \ +_(aten, alpha_dropout) \ +_(aten, any) \ +_(aten, arange) \ +_(aten, argmax) \ +_(aten, argmin) \ +_(aten, as_strided) \ +_(aten, as_tensor) \ +_(aten, asin) \ +_(aten, atan) \ +_(aten, atan2) \ +_(aten, avg_pool1d) \ +_(aten, avg_pool2d) \ +_(aten, avg_pool2d_backward) \ +_(aten, avg_pool2d_forward) \ +_(aten, avg_pool3d) \ +_(aten, avg_pool3d_backward) \ +_(aten, avg_pool3d_forward) \ +_(aten, baddbmm) \ +_(aten, bartlett_window) \ +_(aten, batch_norm) \ +_(aten, bernoulli) \ +_(aten, bilinear) \ +_(aten, binary_cross_entropy) \ +_(aten, binary_cross_entropy_backward) \ +_(aten, binary_cross_entropy_forward) \ +_(aten, binary_cross_entropy_with_logits) \ +_(aten, binary_cross_entropy_with_logits_backward) \ +_(aten, binary_cross_entropy_with_logits_target_backward) \ +_(aten, bincount) \ +_(aten, blackman_window) \ +_(aten, bmm) \ +_(aten, broadcast_tensors) \ +_(aten, cartesian_prod) \ +_(aten, cat) \ +_(aten, cauchy) \ +_(aten, ceil) \ +_(aten, celu) \ +_(aten, chain_matmul) \ +_(aten, cholesky) \ +_(aten, cholesky_inverse) \ +_(aten, cholesky_solve) \ +_(aten, chunk) \ +_(aten, clamp) \ +_(aten, clamp_max) \ +_(aten, clamp_min) \ +_(aten, clone) \ +_(aten, coalesce) \ +_(aten, combinations) \ +_(aten, constant_pad_nd) \ +_(aten, contiguous) \ +_(aten, conv1d) \ +_(aten, conv2d) \ +_(aten, conv3d) \ +_(aten, conv_tbc) \ +_(aten, conv_tbc_backward) \ +_(aten, conv_transpose1d) \ +_(aten, convolution) \ +_(aten, copy_sparse_to_sparse) \ +_(aten, cos) \ +_(aten, cosh) \ +_(aten, cosine_embedding_loss) \ +_(aten, cosine_similarity) \ +_(aten, cross) \ +_(aten, std_mean) \ +_(aten, var_mean) \ +_(aten, ctc_loss) \ +_(aten, cudnn_affine_grid_generator) \ +_(aten, cudnn_affine_grid_generator_backward) \ +_(aten, cudnn_batch_norm) \ +_(aten, cudnn_batch_norm_backward) \ +_(aten, cudnn_convolution) \ +_(aten, cudnn_convolution_backward) \ +_(aten, cudnn_convolution_backward_bias) \ +_(aten, cudnn_convolution_backward_input) \ +_(aten, cudnn_convolution_backward_weight) \ +_(aten, cudnn_convolution_transpose) \ +_(aten, cudnn_convolution_transpose_backward) \ +_(aten, cudnn_convolution_transpose_backward_bias) \ +_(aten, cudnn_convolution_transpose_backward_input) \ +_(aten, cudnn_convolution_transpose_backward_weight) \ +_(aten, cudnn_grid_sampler) \ +_(aten, cudnn_grid_sampler_backward) \ +_(aten, cudnn_is_acceptable) \ +_(aten, cumprod) \ +_(aten, cumsum) \ +_(aten, data_ptr) \ +_(aten, det) \ +_(aten, detach) \ +_(aten, diag) \ +_(aten, diag_embed) \ +_(aten, diagflat) \ +_(aten, diagonal) \ +_(aten, fill_diagonal_) \ +_(aten, digamma) \ +_(aten, dim) \ +_(aten, dist) \ +_(aten, div) \ +_(aten, div_) \ +_(aten, dot) \ +_(aten, dropout) \ +_(aten, eig) \ +_(aten, einsum) \ +_(aten, elu) \ +_(aten, elu_backward) \ +_(aten, elu_forward) \ +_(aten, embedding) \ +_(aten, embedding_backward) \ +_(aten, embedding_bag) \ +_(aten, embedding_dense_backward) \ +_(aten, embedding_renorm) \ +_(aten, embedding_sparse_backward) \ +_(aten, empty) \ +_(aten, empty_like) \ +_(aten, empty_strided) \ +_(aten, eq) \ +_(aten, equal) \ +_(aten, erf) \ +_(aten, erfc) \ +_(aten, erfinv) \ +_(aten, exp) \ +_(aten, expand) \ +_(aten, expand_as) \ +_(aten, expm1) \ +_(aten, exponential) \ +_(aten, eye) \ +_(aten, feature_alpha_dropout) \ +_(aten, feature_dropout) \ +_(aten, fft) \ +_(aten, fill) \ +_(aten, flatten) \ +_(aten, flip) \ +_(aten, floor) \ +_(aten, fmod) \ +_(aten, frac) \ +_(aten, fractional_max_pool2d) \ +_(aten, fractional_max_pool2d_backward) \ +_(aten, fractional_max_pool2d_forward) \ +_(aten, frobenius_norm) \ +_(aten, full) \ +_(aten, full_like) \ +_(aten, gather) \ +_(aten, ge) \ +_(aten, gelu) \ +_(aten, geometric) \ +_(aten, geqrf) \ +_(aten, ger) \ +_(aten, get_device) \ +_(aten, glu) \ +_(aten, glu_backward) \ +_(aten, glu_forward) \ +_(aten, grid_sampler) \ +_(aten, grid_sampler_2d) \ +_(aten, grid_sampler_2d_backward) \ +_(aten, grid_sampler_3d) \ +_(aten, grid_sampler_3d_backward) \ +_(aten, group_norm) \ +_(aten, gru) \ +_(aten, gru_cell) \ +_(aten, gt) \ +_(aten, hamming_window) \ +_(aten, hann_window) \ +_(aten, hardshrink) \ +_(aten, hardshrink_backward) \ +_(aten, hardtanh) \ +_(aten, hardtanh_backward) \ +_(aten, hardtanh_forward) \ +_(aten, hinge_embedding_loss) \ +_(aten, histc) \ +_(aten, hspmm) \ +_(aten, ifft) \ +_(aten, index) \ +_(aten, index_add) \ +_(aten, index_copy) \ +_(aten, index_fill) \ +_(aten, index_put) \ +_(aten, index_select) \ +_(aten, indices) \ +_(aten, instance_norm) \ +_(aten, inverse) \ +_(aten, irfft) \ +_(aten, is_coalesced) \ +_(aten, is_complex) \ +_(aten, is_contiguous) \ +_(aten, is_cuda) \ +_(aten, is_distributed) \ +_(aten, is_floating_point) \ +_(aten, is_nonzero) \ +_(aten, is_same_size) \ +_(aten, is_set_to) \ +_(aten, is_signed) \ +_(aten, is_sparse) \ +_(aten, isclose) \ +_(aten, kl_div) \ +_(aten, kl_div_backward) \ +_(aten, kthvalue) \ +_(aten, l1_loss) \ +_(aten, l1_loss_backward) \ +_(aten, l1_loss_forward) \ +_(aten, layer_norm) \ +_(aten, le) \ +_(aten, leaky_relu) \ +_(aten, leaky_relu_backward) \ +_(aten, leaky_relu_forward) \ +_(aten, lerp) \ +_(aten, lgamma) \ +_(aten, linear) \ +_(aten, linspace) \ +_(aten, log) \ +_(aten, log10) \ +_(aten, log1p) \ +_(aten, log2) \ +_(aten, log_normal) \ +_(aten, log_sigmoid) \ +_(aten, log_sigmoid_backward) \ +_(aten, log_sigmoid_forward) \ +_(aten, log_softmax) \ +_(aten, _log_softmax) \ +_(aten, _log_softmax_backward_data) \ +_(aten, logdet) \ +_(aten, logspace) \ +_(aten, logsumexp) \ +_(aten, lstm) \ +_(aten, lstm_cell) \ +_(aten, lstsq) \ +_(aten, lt) \ +_(aten, lu_solve) \ +_(aten, margin_ranking_loss) \ +_(aten, masked_fill) \ +_(aten, masked_scatter) \ +_(aten, masked_select) \ +_(aten, matmul) \ +_(aten, matrix_power) \ +_(aten, matrix_rank) \ +_(aten, max) \ +_(aten, max_pool1d) \ +_(aten, max_pool1d_with_indices) \ +_(aten, max_pool2d) \ +_(aten, max_pool2d_with_indices) \ +_(aten, max_pool2d_with_indices_backward) \ +_(aten, max_pool2d_with_indices_forward) \ +_(aten, max_pool3d) \ +_(aten, max_pool3d_with_indices) \ +_(aten, max_pool3d_with_indices_backward) \ +_(aten, max_pool3d_with_indices_forward) \ +_(aten, max_unpool2d) \ +_(aten, max_unpool2d_backward) \ +_(aten, max_unpool2d_forward) \ +_(aten, max_unpool3d) \ +_(aten, max_unpool3d_backward) \ +_(aten, max_unpool3d_forward) \ +_(aten, max_values) \ +_(aten, mean) \ +_(aten, median) \ +_(aten, meshgrid) \ +_(aten, min) \ +_(aten, min_values) \ +_(aten, miopen_batch_norm) \ +_(aten, miopen_batch_norm_backward) \ +_(aten, miopen_convolution) \ +_(aten, miopen_convolution_backward) \ +_(aten, miopen_convolution_backward_bias) \ +_(aten, miopen_convolution_backward_input) \ +_(aten, miopen_convolution_backward_weight) \ +_(aten, miopen_convolution_transpose) \ +_(aten, miopen_convolution_transpose_backward) \ +_(aten, miopen_convolution_transpose_backward_input) \ +_(aten, miopen_convolution_transpose_backward_weight) \ +_(aten, miopen_depthwise_convolution) \ +_(aten, miopen_depthwise_convolution_backward) \ +_(aten, miopen_depthwise_convolution_backward_input) \ +_(aten, miopen_depthwise_convolution_backward_weight) \ +_(aten, miopen_rnn) \ +_(aten, miopen_rnn_backward) \ +_(aten, mkldnn_convolution) \ +_(aten, mkldnn_convolution_backward) \ +_(aten, mkldnn_convolution_backward_input) \ +_(aten, mkldnn_convolution_backward_weights) \ +_(aten, mm) \ +_(aten, mode) \ +_(aten, mse_loss) \ +_(aten, mse_loss_backward) \ +_(aten, mse_loss_forward) \ +_(aten, mul) \ +_(aten, mul_) \ +_(aten, multi_margin_loss) \ +_(aten, multi_margin_loss_backward) \ +_(aten, multi_margin_loss_forward) \ +_(aten, multilabel_margin_loss) \ +_(aten, multilabel_margin_loss_backward) \ +_(aten, multilabel_margin_loss_forward) \ +_(aten, multinomial) \ +_(aten, mv) \ +_(aten, mvlgamma) \ +_(aten, narrow) \ +_(aten, narrow_copy) \ +_(aten, native_batch_norm) \ +_(aten, native_batch_norm_backward) \ +_(aten, native_clone) \ +_(aten, native_get_device) \ +_(aten, native_norm) \ +_(aten, native_pow) \ +_(aten, native_resize_as) \ +_(aten, native_tensor) \ +_(aten, native_zero) \ +_(aten, ne) \ +_(aten, neg) \ +_(aten, bitwise_not) \ +_(aten, bitwise_xor) \ +_(aten, nll_loss) \ +_(aten, nll_loss2d) \ +_(aten, nll_loss2d_backward) \ +_(aten, nll_loss2d_forward) \ +_(aten, nll_loss_backward) \ +_(aten, nll_loss_forward) \ +_(aten, nonzero) \ +_(aten, norm) \ +_(aten, norm_except_dim) \ +_(aten, normal) \ +_(aten, nuclear_norm) \ +_(aten, numel) \ +_(aten, ones) \ +_(aten, ones_like) \ +_(aten, orgqr) \ +_(aten, ormqr) \ +_(aten, pairwise_distance) \ +_(aten, pdist) \ +_(aten, cdist) \ +_(aten, permute) \ +_(aten, pin_memory) \ +_(aten, pinverse) \ +_(aten, pixel_shuffle) \ +_(aten, poisson) \ +_(aten, polygamma) \ +_(aten, pow) \ +_(aten, prelu) \ +_(aten, prelu_backward) \ +_(aten, prod) \ +_(aten, put) \ +_(aten, qr) \ +_(aten, rand) \ +_(aten, rand_like) \ +_(aten, randint) \ +_(aten, randint_like) \ +_(aten, randn) \ +_(aten, randn_like) \ +_(aten, random) \ +_(aten, randperm) \ +_(aten, range) \ +_(aten, reciprocal) \ +_(aten, reflection_pad1d) \ +_(aten, reflection_pad1d_backward) \ +_(aten, reflection_pad1d_forward) \ +_(aten, reflection_pad2d) \ +_(aten, reflection_pad2d_backward) \ +_(aten, reflection_pad2d_forward) \ +_(aten, relu) \ +_(aten, remainder) \ +_(aten, renorm) \ +_(aten, repeat) \ +_(aten, replication_pad1d) \ +_(aten, replication_pad1d_backward) \ +_(aten, replication_pad1d_forward) \ +_(aten, replication_pad2d) \ +_(aten, replication_pad2d_backward) \ +_(aten, replication_pad2d_forward) \ +_(aten, replication_pad3d) \ +_(aten, replication_pad3d_backward) \ +_(aten, replication_pad3d_forward) \ +_(aten, reshape) \ +_(aten, reshape_as) \ +_(aten, resize) \ +_(aten, resize_) \ +_(aten, resize_as) \ +_(aten, resize_as_) \ +_(aten, rfft) \ +_(aten, rnn_relu) \ +_(aten, rnn_relu_cell) \ +_(aten, rnn_tanh) \ +_(aten, rnn_tanh_cell) \ +_(aten, rot90) \ +_(aten, round) \ +_(aten, rrelu) \ +_(aten, rrelu_with_noise) \ +_(aten, rrelu_with_noise_backward) \ +_(aten, rrelu_with_noise_forward) \ +_(aten, rsqrt) \ +_(aten, scatter) \ +_(aten, scatter_add) \ +_(aten, select) \ +_(aten, selu) \ +_(aten, set) \ +_(aten, sigmoid) \ +_(aten, sign) \ +_(aten, sin) \ +_(aten, sinh) \ +_(aten, size) \ +_(aten, sizes) \ +_(aten, slice) \ +_(aten, slogdet) \ +_(aten, smm) \ +_(aten, smooth_l1_loss) \ +_(aten, smooth_l1_loss_backward) \ +_(aten, smooth_l1_loss_forward) \ +_(aten, soft_margin_loss) \ +_(aten, soft_margin_loss_backward) \ +_(aten, soft_margin_loss_forward) \ +_(aten, softmax) \ +_(aten, _softmax) \ +_(aten, _softmax_backward_data) \ +_(aten, softplus) \ +_(aten, softplus_backward) \ +_(aten, softplus_forward) \ +_(aten, softshrink) \ +_(aten, softshrink_backward) \ +_(aten, softshrink_forward) \ +_(aten, solve) \ +_(aten, sort) \ +_(aten, sparse_coo_tensor) \ +_(aten, sparse_mask) \ +_(aten, sparse_resize) \ +_(aten, sparse_resize_and_clear) \ +_(aten, split) \ +_(aten, split_with_sizes) \ +_(aten, sqrt) \ +_(aten, squeeze) \ +_(aten, sspaddmm) \ +_(aten, stack) \ +_(aten, std) \ +_(aten, stft) \ +_(aten, storage_offset) \ +_(aten, stride) \ +_(aten, strides) \ +_(aten, sub) \ +_(aten, sub_) \ +_(aten, rsub) \ +_(aten, sum) \ +_(aten, sum_to_size) \ +_(aten, svd) \ +_(aten, symeig) \ +_(aten, t) \ +_(aten, take) \ +_(aten, tan) \ +_(aten, tanh) \ +_(aten, tensor) \ +_(aten, tensordot) \ +_(aten, th_addmm) \ +_(aten, th_clone) \ +_(aten, th_norm) \ +_(aten, th_pow) \ +_(aten, th_resize_as) \ +_(aten, th_tensor) \ +_(aten, th_zero) \ +_(aten, thnn_conv2d) \ +_(aten, thnn_conv2d_backward) \ +_(aten, thnn_conv2d_forward) \ +_(aten, slow_conv3d) \ +_(aten, slow_conv3d_backward) \ +_(aten, slow_conv3d_forward) \ +_(aten, thnn_conv_depthwise2d) \ +_(aten, thnn_conv_depthwise2d_backward) \ +_(aten, thnn_conv_depthwise2d_forward) \ +_(aten, slow_conv_dilated2d) \ +_(aten, slow_conv_dilated2d_backward) \ +_(aten, slow_conv_dilated3d) \ +_(aten, slow_conv_dilated3d_backward) \ +_(aten, slow_conv_transpose2d) \ +_(aten, slow_conv_transpose2d_backward) \ +_(aten, slow_conv_transpose3d) \ +_(aten, slow_conv_transpose3d_backward) \ +_(aten, threshold) \ +_(aten, threshold_backward) \ +_(aten, to) \ +_(aten, to_sparse) \ +_(aten, to_dense) \ +_(aten, topk) \ +_(aten, trace) \ +_(aten, transpose) \ +_(aten, triangular_solve) \ +_(aten, tril) \ +_(aten, triplet_margin_loss) \ +_(aten, triu) \ +_(aten, trunc) \ +_(aten, type_as) \ +_(aten, unbind) \ +_(aten, unfold) \ +_(aten, uniform) \ +_(aten, unsqueeze) \ +_(aten, upsample_bilinear2d) \ +_(aten, upsample_bilinear2d_backward) \ +_(aten, upsample_bilinear2d_forward) \ +_(aten, upsample_bicubic2d) \ +_(aten, upsample_bicubic2d_backward) \ +_(aten, upsample_bicubic2d_forward) \ +_(aten, upsample_linear1d) \ +_(aten, upsample_linear1d_backward) \ +_(aten, upsample_linear1d_forward) \ +_(aten, upsample_nearest1d) \ +_(aten, upsample_nearest1d_backward) \ +_(aten, upsample_nearest1d_forward) \ +_(aten, upsample_nearest2d) \ +_(aten, upsample_nearest2d_backward) \ +_(aten, upsample_nearest2d_forward) \ +_(aten, upsample_nearest3d) \ +_(aten, upsample_nearest3d_backward) \ +_(aten, upsample_nearest3d_forward) \ +_(aten, upsample_trilinear3d) \ +_(aten, upsample_trilinear3d_backward) \ +_(aten, upsample_trilinear3d_forward) \ +_(aten, values) \ +_(aten, var) \ +_(aten, view) \ +_(aten, view_as) \ +_(aten, where) \ +_(aten, zero) \ +_(aten, zeros) \ +_(aten, zeros_like) \ +/* nothing */ + +#define FORALL_ATTR_BASE_SYMBOLS(_) \ +_(attr, A) \ +_(attr, C) \ +_(attr, H) \ +_(attr, LU_data) \ +_(attr, LU_pivots) \ +_(attr, N) \ +_(attr, W) \ +_(attr, accumulate) \ +_(attr, align_corners) \ +_(attr, alpha) \ +_(attr, anchor) \ +_(attr, argmaxes) \ +_(attr, atol) \ +_(attr, b_hh) \ +_(attr, b_ih) \ +_(attr, bag_size) \ +_(attr, base) \ +_(attr, batch1) \ +_(attr, batch2) \ +_(attr, batch_first) \ +_(attr, batch_sizes) \ +_(attr, benchmark) \ +_(attr, beta) \ +_(attr, bias) \ +_(attr, bias_defined) \ +_(attr, bidirectional) \ +_(attr, bins) \ +_(attr, blank) \ +_(attr, buffer) \ +_(attr, ceil_mode) \ +_(attr, checked_signal_sizes) \ +_(attr, chunks) \ +_(attr, columns) \ +_(attr, complex_input) \ +_(attr, complex_output) \ +_(attr, condition) \ +_(attr, count_include_pad) \ +_(attr, cudnn_enable) \ +_(attr, cudnn_enabled) \ +_(attr, cx) \ +_(attr, cy) \ +_(attr, data) \ +_(attr, dense_dim) \ +_(attr, descending) \ +_(attr, deterministic) \ +_(attr, device) \ +_(attr, diagonal) \ +_(attr, dilation) \ +_(attr, dim) \ +_(attr, dim0) \ +_(attr, dim1) \ +_(attr, dim2) \ +_(attr, dimension) \ +_(attr, dims) \ +_(attr, dims_other) \ +_(attr, dims_self) \ +_(attr, divisor_override) \ +_(attr, dropout) \ +_(attr, dropout_seed) \ +_(attr, dropout_state) \ +_(attr, dtype) \ +_(attr, eigenvectors) \ +_(attr, end) \ +_(attr, end_dim) \ +_(attr, eps) \ +_(attr, epsilon) \ +_(attr, equal_nan) \ +_(attr, equation) \ +_(attr, expand1) \ +_(attr, expand2) \ +_(attr, expand3) \ +_(attr, exponent) \ +_(attr, exponential_average_factor) \ +_(attr, fgrad_input) \ +_(attr, fill_value) \ +_(attr, finput) \ +_(attr, from) \ +_(attr, g) \ +_(attr, gO) \ +_(attr, generator) \ +_(attr, ggI) \ +_(attr, ggW) \ +_(attr, ggb) \ +_(attr, grad) \ +_(attr, gradOutput) \ +_(attr, grad_bias) \ +_(attr, grad_cy) \ +_(attr, grad_hy) \ +_(attr, grad_input) \ +_(attr, grad_out) \ +_(attr, grad_output) \ +_(attr, grad_w) \ +_(attr, grad_weight) \ +_(attr, grid) \ +_(attr, groups) \ +_(attr, has_bias) \ +_(attr, has_biases) \ +_(attr, hidden_bias) \ +_(attr, hidden_gates) \ +_(attr, hidden_size) \ +_(attr, high) \ +_(attr, hop_length) \ +_(attr, hx) \ +_(attr, i1) \ +_(attr, i2) \ +_(attr, i3) \ +_(attr, ignore_index) \ +_(attr, implicit) \ +_(attr, index) \ +_(attr, indices) \ +_(attr, info) \ +_(attr, input) \ +_(attr, input1) \ +_(attr, input2) \ +_(attr, input3) \ +_(attr, input_bias) \ +_(attr, input_gates) \ +_(attr, input_lengths) \ +_(attr, input_scale) \ +_(attr, input_size) \ +_(attr, interpolation_mode) \ +_(attr, inverse) \ +_(attr, is_target) \ +_(attr, k) \ +_(attr, keepdim) \ +_(attr, kernel_size) \ +_(attr, lambd) \ +_(attr, largest) \ +_(attr, layout) \ +_(attr, left) \ +_(attr, length) \ +_(attr, lengths) \ +_(attr, like) \ +_(attr, log_alpha) \ +_(attr, log_probs) \ +_(attr, low) \ +_(attr, lower) \ +_(attr, lu) \ +_(attr, m) \ +_(attr, margin) \ +_(attr, mask) \ +_(attr, mat) \ +_(attr, mat1) \ +_(attr, mat2) \ +_(attr, max) \ +_(attr, max_indices) \ +_(attr, max_norm) \ +_(attr, max_size) \ +_(attr, max_val) \ +_(attr, max_values) \ +_(attr, maximum_indices) \ +_(attr, maxnorm) \ +_(attr, mean) \ +_(attr, median) \ +_(attr, min) \ +_(attr, min_indices) \ +_(attr, min_val) \ +_(attr, minlength) \ +_(attr, mode) \ +_(attr, momentum) \ +_(attr, n) \ +_(attr, n_fft) \ +_(attr, neg_log_likelihood) \ +_(attr, negative) \ +_(attr, negative_slope) \ +_(attr, noise) \ +_(attr, non_blocking) \ +_(attr, norm_type) \ +_(attr, normalized) \ +_(attr, normalized_shape) \ +_(attr, num_groups) \ +_(attr, num_layers) \ +_(attr, num_samples) \ +_(attr, num_weights) \ +_(attr, offset) \ +_(attr, offset2bag) \ +_(attr, offsets) \ +_(attr, ones) \ +_(attr, onesided) \ +_(attr, options) \ +_(attr, other) \ +_(attr, output) \ +_(attr, output_mask) \ +_(attr, output_padding) \ +_(attr, output_size) \ +_(attr, output_sizes) \ +_(attr, p) \ +_(attr, pad) \ +_(attr, padding) \ +_(attr, padding_idx) \ +_(attr, padding_mode) \ +_(attr, padding_value) \ +_(attr, params) \ +_(attr, pdist) \ +_(attr, cdist) \ +_(attr, std_mean) \ +_(attr, var_mean) \ +_(attr, periodic) \ +_(attr, pivot) \ +_(attr, pivots) \ +_(attr, pooledHeight) \ +_(attr, pooledWidth) \ +_(attr, positive) \ +_(attr, pow) \ +_(attr, random_samples) \ +_(attr, rcond) \ +_(attr, reduction) \ +_(attr, repeats) \ +_(attr, replacement) \ +_(attr, res1) \ +_(attr, res2) \ +_(attr, res3) \ +_(attr, reserve) \ +_(attr, result) \ +_(attr, return_inverse) \ +_(attr, rois) \ +_(attr, rtol) \ +_(attr, running_mean) \ +_(attr, running_var) \ +_(attr, save_mean) \ +_(attr, save_std) \ +_(attr, save_var) \ +_(attr, saved_g) \ +_(attr, saved_norms) \ +_(attr, saved_v) \ +_(attr, scale) \ +_(attr, scale_grad_by_freq) \ +_(attr, self) \ +_(attr, self_size) \ +_(attr, self_ty) \ +_(attr, shape) \ +_(attr, sigma) \ +_(attr, signal_ndim) \ +_(attr, signal_sizes) \ +_(attr, size) \ +_(attr, solution) \ +_(attr, some) \ +_(attr, sorted) \ +_(attr, source) \ +_(attr, sparse) \ +_(attr, sparse_dim) \ +_(attr, sparse_dtype) \ +_(attr, spatialScale) \ +_(attr, split_size) \ +_(attr, split_sizes) \ +_(attr, src) \ +_(attr, start) \ +_(attr, start_dim) \ +_(attr, std) \ +_(attr, step) \ +_(attr, steps) \ +_(attr, storage) \ +_(attr, storageOffset) \ +_(attr, storage_offset) \ +_(attr, stride) \ +_(attr, sumdim) \ +_(attr, swap) \ +_(attr, symmetric) \ +_(attr, target) \ +_(attr, target_lengths) \ +_(attr, targets) \ +_(attr, tensor) \ +_(attr, tensor1) \ +_(attr, tensor2) \ +_(attr, tensors) \ +_(attr, the_template) \ +_(attr, theta) \ +_(attr, threshold) \ +_(attr, to) \ +_(attr, tol) \ +_(attr, total) \ +_(attr, total_length) \ +_(attr, total_weight) \ +_(attr, train) \ +_(attr, training) \ +_(attr, transpose) \ +_(attr, transposed) \ +_(attr, unbiased) \ +_(attr, unitriangular) \ +_(attr, unroll_dim) \ +_(attr, upper) \ +_(attr, upscale_factor) \ +_(attr, use_input_stats) \ +_(attr, v) \ +_(attr, value) \ +_(attr, values) \ +_(attr, vec) \ +_(attr, vec1) \ +_(attr, vec2) \ +_(attr, w_hh) \ +_(attr, w_ih) \ +_(attr, weight) \ +_(attr, weight_arr) \ +_(attr, weight_buf) \ +_(attr, weight_size) \ +_(attr, weight_stride0) \ +_(attr, weights) \ +_(attr, win_length) \ +_(attr, window) \ +_(attr, window_length) \ +_(attr, workspace) \ +_(attr, x) \ +_(attr, x1) \ +_(attr, x2) diff --git a/thirdparty/libtorch/include/ATen/core/blob.h b/thirdparty/libtorch/include/ATen/core/blob.h new file mode 100644 index 0000000000..60694160c7 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/blob.h @@ -0,0 +1,210 @@ +#pragma once + +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace caffe2 { + +class Tensor; + +/** + * @brief Blob is a general container that hosts a typed pointer. + * + * A Blob hosts a pointer as well as its type, and takes charge of deleting it + * properly when the blob is deallocated or re-allocated with a new type. A blob + * could contain anything, although the most common case is to contain a Tensor. + */ +class CAFFE2_API Blob final : public c10::intrusive_ptr_target { + public: + /** + * Initializes an empty Blob. + */ + Blob() noexcept : meta_(), pointer_(nullptr), has_ownership_(false) {} + ~Blob() { + Reset(); + } + + Blob(Blob&& other) noexcept : Blob() { + swap(other); + } + + Blob& operator=(Blob&& other) noexcept { + Blob(std::move(other)).swap(*this); + return *this; + } + + /** + * Checks if the content stored in the blob is of type T. + */ + template + bool IsType() const noexcept { + return meta_.Match(); + } + + /** + * Returns the meta info of the blob. + */ + const TypeMeta& meta() const noexcept { + return meta_; + } + + /** + * Returns a printable typename of the blob. + */ + const char* TypeName() const noexcept { + return meta_.name(); + } + + /** + * @brief Gets the const reference of the stored object. The code checks if + * the stored object is of the desired type. + */ + // TODO(jerryzh): add a Get(DeviceType) function? + template + const T& Get() const { + AT_ASSERTM( + IsType(), + "wrong type for the Blob instance. Blob contains ", + meta_.name(), + " while caller expects ", + TypeMeta::TypeName()); + // TODO: after we add Get(DeviceType) + // and changed all the callsites, we can add + // a static assert here to enforce T != Tensor + return *static_cast(pointer_); + } + + const void* GetRaw() const noexcept { + return pointer_; + } + void* GetRaw() noexcept { + return pointer_; + } + + /** + * @brief Gets a mutable pointer to the stored object. + * + * If the current object is not of the right type, a new object is created + * and the old object is freed. Note that type T should have a default + * constructor. Otherwise, create the object yourself first, and use + * Reset(). + */ + template + T* GetMutable() { + static_assert( + std::is_default_constructible::value, + "GetMutable can't be called with non-default-constructible types. " + "Try using specialized methods"); + if (IsType()) { + return static_cast(pointer_); + } else { + // TODO Re-enable logging + // VLOG(1) << "Create new mutable object " << TypeMeta::TypeName(); + return Reset(new T()); + } + } + + template + T* GetMutableOrNull() { + if (IsType()) { + return static_cast(pointer_); + } else { + return nullptr; + } + } + + /** + * Sets the underlying object to the allocated one. The Blob then takes over + * the ownership of the passed in pointer. If there is already an object in + * the Blob, the old object is freed. + * + * This is used when the underlying class T does not have a default ctor, or + * complex initializations needs to be done outside the blob. + */ + template + T* Reset(T* allocated) { + free_(); + meta_ = TypeMeta::Make(); + pointer_ = static_cast(allocated); + has_ownership_ = true; + return allocated; + } + + /** + * Sets the underlying object to the allocated one, but does not take over + * the ownership of the passed in pointer. If there is already an object in + * the Blob, the old object is freed. + * + * Unlike Reset, this does not take over the ownership of the pointer and the + * caller is responsible for making sure that the lifetime of the allocated + * blob outlasts the lifetime of any access to this blob, until another Reset + * call is made or the blob is destructed. + */ + template + typename std::remove_const::type* ShareExternal( + typename std::remove_const::type* allocated) { + return static_cast(ShareExternal( + static_cast(allocated), + TypeMeta::Make::type>())); + } + + // TODO Remove ShareExternal() and have Blob always own its content + void* ShareExternal(void* allocated, const TypeMeta& meta) { + free_(); + meta_ = meta; + pointer_ = static_cast(allocated); + has_ownership_ = false; + return allocated; + } + + /** + * Resets the Blob to an empty one. + */ + void Reset() { + free_(); + pointer_ = nullptr; + meta_ = TypeMeta(); + has_ownership_ = false; + } + + /** + * @brief Swaps the underlying storage of two blobs. + */ + void swap(Blob& rhs) { + using std::swap; + swap(meta_, rhs.meta_); + swap(pointer_, rhs.pointer_); + swap(has_ownership_, rhs.has_ownership_); + } + + private: + void free_() { + if (has_ownership_) { + AT_ASSERTM(pointer_ != nullptr, "Can't have ownership of nullptr"); + (*meta_.deleteFn())(pointer_); + } + } + + TypeMeta meta_; + void* pointer_ = nullptr; + bool has_ownership_ = false; + + C10_DISABLE_COPY_AND_ASSIGN(Blob); +}; + +inline void swap(Blob& lhs, Blob& rhs) { + lhs.swap(rhs); +} + +inline std::ostream& operator<<(std::ostream& out, const Blob& v) { + return out << "Blob[" << v.TypeName() << "]"; +} + +} // namespace caffe2 diff --git a/thirdparty/libtorch/include/ATen/core/boxing/KernelFunction.h b/thirdparty/libtorch/include/ATen/core/boxing/KernelFunction.h new file mode 100644 index 0000000000..90c9ebdfe2 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/boxing/KernelFunction.h @@ -0,0 +1,408 @@ +#pragma once + +#include +#include +#include +#include +#include + +namespace c10 { + +namespace detail { +template struct boxAndCallBoxedFunc; +} + +/** + * KernelFunction is similar to std::function but stores a kernel function. + * You can create a KernelFunction from a boxed or unboxed function/functor/lambda + * and call it in a boxed or unboxed way. If the way it was created doesn't + * match the way it was called, it will do boxing or unboxing as necessary. + */ +class CAFFE2_API KernelFunction final { +public: + using BoxedKernelFunction = void(OperatorKernel*, Stack*); + + KernelFunction() + : functorFactory_() + , functor_(nullptr) + , boxed_kernel_func_(nullptr) + , unboxed_kernel_func_(nullptr) + {} + + bool isValid() const { + // TODO We want to introduce the invariant that all kernels must be callable in a boxed way, then this should only check boxed_kernel_func_. + return boxed_kernel_func_ != nullptr || unboxed_kernel_func_ != nullptr; + } + + /** + * Call the function in a boxed way. + * If the kernel function was created with an unboxed function, + * this will call an unboxing wrapper which then calls into that + * unboxed function. + * + * Example: + * + * > void boxed_func(OperatorKernel*, Stack* stack) {...} + * > KernelFunction func = KernelFunction::makeFromBoxedFunction(&boxed_func); + * > Tensor result = func.callBoxed(stack); + * + * Or, with an unboxed implementation: + * + * > KernelFunction func = KernelFunction::makeFromUnboxedLambda( + * > [] (Tensor a, bool b) -> Tensor {...}); + * > Tensor result = func.callBoxed(stack); + */ + void callBoxed(Stack* stack) const { + if (C10_UNLIKELY(boxed_kernel_func_ == nullptr)) { + if (unboxed_kernel_func_ == nullptr) { + TORCH_INTERNAL_ASSERT(false, "Tried to call KernelFunction::callBoxed() on an uninitialized KernelFunction."); + } else { + // TODO We want to introduce the invariant that all kernels must be callable in a boxed way, then this case should be impossible. + TORCH_INTERNAL_ASSERT(false, "Tried to call KernelFunction::callBoxed() on a KernelFunction that can only be called with KernelFunction::callUnboxed()."); + } + } + + (*boxed_kernel_func_)(getFunctor_(), stack); + } + + /** + * Call the function in an unboxed way. + * As the "Only" in the name suggests, this only works for KernelFunctions + * that are backed by an unboxed kernel. If the KernelFunction was created + * in a boxed way, this will fail (also see KernelFunction::callUnboxed()). + * + * KernelFunction::callUnboxed() is generally better, since it will allow + * calling KernelFunctions that are backed by either boxed or unboxed + * kernels, but that one will not work for all types. + * + * Example: + * + * > KernelFunction func = KernelFunction::makeFromUnboxedLambda( + * > [] (Tensor a, bool b) -> Tensor {...}); + * > Tensor result = func.callUnboxedOnly(tensor1, true); + */ + template + Return callUnboxedOnly(Args... args) const { + // note: Args above is intentionally not Args&&. We don't want perfect + // forwarding, which would require Args to be deduced, but instead we + // want callers to explicitly specify the Args. + + // TODO Remove this function once all kernels support a boxed variant + + if (C10_LIKELY(unboxed_kernel_func_ != nullptr)) { + using ActualSignature = Return (OperatorKernel*, Args...); + ActualSignature* func = reinterpret_cast(unboxed_kernel_func_); + return (*func)(getFunctor_(), std::forward(args)...); + } + + TORCH_INTERNAL_ASSERT(false, "Tried to call KernelFunction::callUnboxedOnly() for a kernel that doesn't have an unboxed version."); + } + + /** + * Call the function in an unboxed way. + * If the kernel function was created with a boxed function, + * this will box all inputs and then call into that boxed function. + * + * Note that this doesn't work for all types yet. + * + * Example: + * + * > KernelFunction func = KernelFunction::makeFromUnboxedLambda( + * > [] (Tensor a, bool b) -> Tensor {...}); + * > Tensor result = func.callUnboxed(tensor1, true); + * + * Or, with a boxed implementation: + * + * > void boxed_func(OperatorKernel*, Stack* stack) {...} + * > KernelFunction func = KernelFunction::makeFromBoxedFunction(&boxed_func); + * > Tensor result = func.callUnboxed(tensor1, true); + */ + template + Return callUnboxed(Args... args) const { + // note: Args above is intentionally not Args&&. We don't want perfect + // forwarding, which would require Args to be deduced, but instead we + // want callers to explicitly specify the Args. + + if (C10_LIKELY(unboxed_kernel_func_ != nullptr)) { + using ActualSignature = Return (OperatorKernel*, Args...); + ActualSignature* func = reinterpret_cast(unboxed_kernel_func_); + return (*func)(getFunctor_(), std::forward(args)...); + } + + TORCH_INTERNAL_ASSERT(boxed_kernel_func_ != nullptr, "Tried to call KernelFunction::callUnboxed() on an uninitialized KernelFunction."); + return detail::boxAndCallBoxedFunc::call(boxed_kernel_func_, getFunctor_(), std::forward(args)...); + } + + /** + * Create a KernelFunction from a boxed function. + * + * Example: + * + * > void boxed_func(OperatorKernel*, Stack* stack) {...} + * > KernelFunction func = KernelFunction::makeFromBoxedFunction(&boxed_func); + */ + static KernelFunction makeFromBoxedFunction(BoxedKernelFunction* func) { + return KernelFunction( + nullptr, // no functorFactory_, this can only be called in a boxed way. + nullptr, // no functor_ object either + func, + nullptr // no unboxed function pointer + ); + } + + /** + * Create a KernelFunction from an unboxed functor. + * + * Example: + * + * > class MyFunctor final { + * > public: + * > Tensor operator()(Tensor a, Tensor b) {...} + * > }; + * > KernelFunction func = KernelFunction::makeFromUnboxedFunctor(guts::make_unique()); + */ + template + static KernelFunction makeFromUnboxedFunctor(std::unique_ptr kernelFunctor) { + static_assert(guts::is_functor::value, "Tried to call KernelFunction::makeFromUnboxedFunctor but the argument is not a functor."); + static_assert(std::is_base_of::value, "Tried to call KernelFunction::makeFromUnboxedFunctor, but the functor doesn't inherit from c10::OperatorKernel. Please have the functor inherit from it."); + + return KernelFunction( + nullptr, // no functorFactory_ because we already have the functor_ + std::move(kernelFunctor), + &detail::wrap_kernel_functor_boxed::call, + reinterpret_cast(&detail::wrap_kernel_functor_unboxed::call) + ); + } + + /** + * Create a KernelFunction from an unboxed functor and delay functor creation + * until the first call to the KernelFunction. This is useful for functors + * that are registered at static initialization time but can't be created + * there yet. For example, we want to allow functors to store Tensor members + * (we can't create Tensor objects at static initialization time because of SIOF) + * but these functors are registered as kernels at static initialization time. + * Using this method, we can delay functor instantiation until the operator + * is called for the first time. + * + * Example: + * + * > class MyFunctor final { + * > public: + * > Tensor operator()(Tensor a, Tensor b) {...} + * > }; + * > KernelFunction func = KernelFunction::makeFromUnboxedFunctor([] { + * > return guts::make_unique(); + * > }); + */ + template + static KernelFunction makeFromUnboxedFunctorFactory(std::function()> kernelFunctorFactory) { + static_assert(guts::is_functor::value, "Tried to call KernelFunction::makeFromUnboxedFunctor but the argument is not a functor."); + static_assert(std::is_base_of::value, "Tried to call KernelFunction::makeFromUnboxedFunctor, but the functor doesn't inherit from c10::OperatorKernel. Please have the functor inherit from it."); + + return KernelFunction( + std::move(kernelFunctorFactory), + nullptr, // delay creation of functor_ (it will be created by calling functorFactory_ later) + &detail::wrap_kernel_functor_boxed::call, + reinterpret_cast(&detail::wrap_kernel_functor_unboxed::call) + ); + } + + /** + * Create a KernelFunction from an unboxed functor and prevent creation of an + * unboxing-wrapper. This means that you can only call this KernelFunction + * using KernelFunction::callUnboxedOnly(), not using KernelFunction::callBoxed() + * or KernelFunction::callUnboxed(). + * + * This is necessary because our unboxing wrappers don't work for all types + * yet, so if you want to use one of these types as function arguments, + * you need to use makeFromUnboxedOnlyFunctor. + * + * Example: + * + * > class MyFunctor final { + * > public: + * > Tensor operator()(Tensor a, Tensor b) {...} + * > }; + * > KernelFunction func = KernelFunction::makeFromUnboxedOnlyFunctor(guts::make_unique()); + */ + template + static KernelFunction makeFromUnboxedOnlyFunctor(std::unique_ptr kernelFunctor) { + // TODO We want to get rid of kernels that have only an unboxed function pointer. + // All kernels should have a boxed pointer. + + static_assert(guts::is_functor::value, "Tried to call KernelFunction::makeFromUnboxedFunctor but the argument is not a functor."); + static_assert(std::is_base_of::value, "Tried to call KernelFunction::makeFromUnboxedFunctor, but the functor doesn't inherit from c10::OperatorKernel. Please have the functor inherit from it."); + + return KernelFunction( + nullptr, // no functorFactory_ because we already have the functor_ + std::move(kernelFunctor), + nullptr, // Don't create a boxed kernel for this + reinterpret_cast(&detail::wrap_kernel_functor_unboxed::call) + ); + } + + /** + * Create a KernelFunction from an unboxed function. + * This is usually better than KernelFunction::makeFromUnboxedRuntimeFunction + * because knowing the function pointer as a template argument (i.e. at + * compile time) allows the compiler to inline the function into its + * unboxing wrapper and yields better performance when calling the function. + * + * Example: + * + * > Tensor unboxed_func(Tensor a, Tensor b) {...} + * > KernelFunction func = KernelFunction::makeFromUnboxedFunction(); + */ + template + static KernelFunction makeFromUnboxedFunction() { + static_assert(guts::is_function_type::value, "Tried to call KernelFunction::makeFromUnboxedFunction with invalid template parameters. They must be ."); + static_assert(!std::is_same::value, "Tried to call KernelFunction::makeFromUnboxedFunction with a boxed function pointer. Please use KernelFunction::makeFromBoxedFunction instead."); + static_assert(func != nullptr, "Kernel function cannot be nullptr"); + + return makeFromUnboxedFunctor::type>( + guts::make_unique_base::type>() + ); + } + + /** + * Create a KernelFunction from an unboxed function and prevent creation of an + * unboxing-wrapper. This means that you can only call this KernelFunction + * using KernelFunction::callUnboxedOnly(), not using KernelFunction::callBoxed() + * or KernelFunction::callUnboxed(). + * + * This is necessary because our unboxing wrappers don't work for all types + * yet, so if you want to use one of these types as function arguments, + * you need to use makeFromUnboxedOnlyFunctor. + * + * Example: + * + * > Tensor unboxed_func(Tensor a, Tensor b) {...} + * > KernelFunction func = KernelFunction::makeFromUnboxedOnlyFunction(); + */ + template + static KernelFunction makeFromUnboxedOnlyFunction() { + // TODO We want to get rid of kernels that have only an unboxed function pointer. + // All kernels should have a boxed pointer. + + static_assert(guts::is_function_type::value, "Tried to call KernelFunction::makeFromUnboxedOnlyFunction with invalid template parameters. They must be ."); + static_assert(!std::is_same::value, "Tried to call KernelFunction::makeFromUnboxedOnlyFunction with a boxed function pointer. Please use KernelFunction::makeFromBoxedFunction instead."); + static_assert(func != nullptr, "Kernel function cannot be nullptr"); + + return makeFromUnboxedOnlyFunctor::type> ( + guts::make_unique_base::type>() + ); + } + + /** + * Create a KernelFunction from an unboxed function. + * KernelFunction::makeFromUnboxedFunction is usually a better choice than + * this if you know the function pointer at compile time, see doc comment + * there for an explanation. + * + * Example: + * + * > Tensor unboxed_func(Tensor a, Tensor b) {...} + * > KernelFunction func = KernelFunction::makeFromUnboxedRuntimeFunction(&unboxed_func); + */ + template + static KernelFunction makeFromUnboxedRuntimeFunction(FuncType* func) { + static_assert(guts::is_function_type::value, "Tried to call KernelFunction::makeFromUnboxedRuntimeFunction with a non-function type."); + static_assert(!std::is_same::value, "Tried to call KernelFunction::makeFromUnboxedRuntimeFunction with a boxed function pointer. Please use KernelFunction::makeFromBoxedFunction instead."); + TORCH_INTERNAL_ASSERT(func != nullptr, "Kernel function cannot be nullptr"); + + return makeFromUnboxedFunctor>>( + guts::make_unique_base>>(func) + ); + } + + template + static KernelFunction makeFromUnboxedOnlyRuntimeFunction(FuncType* func) { + static_assert(guts::is_function_type::value, "Tried to call KernelFunction::makeFromUnboxedRuntimeFunction with a non-function type."); + static_assert(!std::is_same::value, "Tried to call KernelFunction::makeFromUnboxedRuntimeFunction with a boxed function pointer. Please use KernelFunction::makeFromBoxedFunction instead."); + TORCH_INTERNAL_ASSERT(func != nullptr, "Kernel function cannot be nullptr"); + + return makeFromUnboxedOnlyFunctor>>( + guts::make_unique_base>>(func) + ); + } + + /** + * Create a KernelFunction from an unboxed lambda. + * + * Example: + * + * > KernelFunction func = KernelFunction::makeFromUnboxedLambda( + * > [] (Tensor a, bool b) -> Tensor {...}); + */ + template + static KernelFunction makeFromUnboxedLambda(Lambda&& lambda) { + static_assert(guts::is_functor>::value, "Tried to call KernelFunction::makeFromUnboxedLambda with a non-lambda type."); + + return makeFromUnboxedFunctor>>( + guts::make_unique_base>>(std::forward(lambda)) + ); + } + +private: + + explicit KernelFunction(std::function()> functorFactory, std::unique_ptr functor, BoxedKernelFunction* boxed_kernel_func, void* unboxed_kernel_func) + : functorFactory_(std::move(functorFactory)) + , functor_(std::move(functor)) + , boxed_kernel_func_(boxed_kernel_func) + , unboxed_kernel_func_(unboxed_kernel_func) + {} + + OperatorKernel* getFunctor_() const { + if (functor_.get() == nullptr) { + if (!functorFactory_) { + return nullptr; + } + functor_ = functorFactory_(); + } + return functor_.get(); + } + + // If the operator has an unboxed_kernel_func, then either + // functorFactory_ or functor_ must be set, possibly both. + // If functor_ is not set but functorFactory_ is, we will create + // functor_ by calling functorFactory_ the first time it is needed. + // We use this indirection because many KernelFunctions are created + // at static initialization time but are created with functors that + // store Tensor and we can't call the Tensor() constructor at static + // initialization time yet (SIOF). So these register with a + // functorFactory_ instead of a functor_ and will be initialized + // on the first call to the KernelFunction. + std::function()> functorFactory_; + mutable std::shared_ptr functor_; + + BoxedKernelFunction* boxed_kernel_func_; + void* unboxed_kernel_func_; +}; + +namespace detail { +template +struct boxAndCallBoxedFunc final { + static Return call(KernelFunction::BoxedKernelFunction* boxed_kernel_func, OperatorKernel* functor, Args... args) { + // TODO Reuse stack vector instead of allocating? + std::vector stack {std::forward(args)...}; + + (*boxed_kernel_func)(functor, &stack); + + TORCH_INTERNAL_ASSERT(stack.size() == 1, "A boxed kernel should only push one return to the stack"); + return std::move(stack[0]).to(); + } +}; +template +struct boxAndCallBoxedFunc final { + static void call(KernelFunction::BoxedKernelFunction* boxed_kernel_func, OperatorKernel* functor, Args... args) { + // TODO Reuse stack vector instead of allocating? + std::vector stack {std::forward(args)...}; + + (*boxed_kernel_func)(functor, &stack); + + TORCH_INTERNAL_ASSERT(stack.size() == 0, "A boxed kernel returned a value but when we called it with KernelFunction::callUnboxed, we expected it to return void."); + } +}; +} + +} diff --git a/thirdparty/libtorch/include/ATen/core/boxing/kernel_function.h b/thirdparty/libtorch/include/ATen/core/boxing/kernel_function.h new file mode 100644 index 0000000000..ebd82fc540 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/boxing/kernel_function.h @@ -0,0 +1,29 @@ +#pragma once + +#include + +namespace c10 { +namespace detail { + // WrapKernelFunction: Wraps a compile time function pointer into a kernel functor. + // Since it is a compile time function pointer, many compilers can inline it + // into the wrapper and you don't get any performance overhead for wrapping. + template class WrapKernelFunction_ {}; + template + class WrapKernelFunction_> final : public c10::OperatorKernel { + public: + auto operator()(Parameters... args) -> decltype((*kernel_func)(std::forward(args)...)) { + return (*kernel_func)(std::forward(args)...); + } + }; + template::value>> + struct WrapKernelFunction final { + using type = WrapKernelFunction_< + FuncType, + kernel_func, + typename guts::function_traits::return_type, + typename guts::function_traits::parameter_types + >; + }; +} + +} diff --git a/thirdparty/libtorch/include/ATen/core/boxing/kernel_functor.h b/thirdparty/libtorch/include/ATen/core/boxing/kernel_functor.h new file mode 100644 index 0000000000..3b7296fdfa --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/boxing/kernel_functor.h @@ -0,0 +1,302 @@ +#pragma once + +#include +#include + +namespace c10 { + +using Stack = torch::jit::Stack; // TODO Instead of this, move torch::jit::Stack to the c10 namespace. + +/** + * Inherit from OperatorKernel to implement a c10 kernel. + * + * Example: + * > namespace { + * > class my_kernel_cpu final : public c10::OperatorKernel { + * > public: + * > Tensor operator()(Tensor a, Tensor b) {...} + * > }; + * > } + * + * The kernel class is allowed to have members but these are equivalent + * to global variables. The kernel implementation is responsible for + * preventing race conditions on them. + * + * See below for how to register this kernel with PyTorch. + */ +struct CAFFE2_API OperatorKernel { + virtual ~OperatorKernel() = default; +}; + +namespace detail { + // supported_primitive_arg_types defines which primitive types we allow in + // kernel functions as arguments or returns. + // Additionally, we support lists, dicts and optionals containing these types. + using supported_primitive_arg_types = guts::typelist::typelist< + int64_t, + double, + bool, + std::string, + at::Tensor, + at::Scalar + >; + + template struct assert_is_valid_input_type { + assert_is_valid_input_type() { + auto tmap = c10::getCustomClassTypeMap(); + TORCH_CHECK(c10::isCustomClassRegistered(), "Tried to use undefined class as input argument"); + } + }; + + template + struct assert_is_valid_input_type::value>> { + // everything is ok, this is a primitive type + }; + + template + struct assert_is_valid_input_type, AllowDeprecatedTypes> + : assert_is_valid_input_type {}; + + template + struct assert_is_valid_input_type, AllowDeprecatedTypes> + : assert_is_valid_input_type { + static_assert(guts::typelist::contains::value, "You tried to register a kernel with an unsupported input type: Dict where Key is invalid. We only support int64_t, double, bool, and string."); + }; + + template + struct assert_is_valid_input_type, AllowDeprecatedTypes> + : assert_is_valid_input_type { + static_assert(AllowDeprecatedTypes, "You tried to register a kernel with an unsupported input type: std::unordered_map. Please use Dict instead."); + static_assert(guts::typelist::contains::value, "You tried to register a kernel with an unsupported input type: std::unordered_map where Key is invalid. We only support int64_t, double, bool, and string."); + }; + + template + struct assert_is_valid_input_type, AllowDeprecatedTypes> + : assert_is_valid_input_type { + static_assert(!std::is_same::value, "You tried to register a kernel with an unsupported input type: List. Please use List, List or Tensor instead."); + }; + + template + struct assert_is_valid_input_type, AllowDeprecatedTypes> + : assert_is_valid_input_type { + static_assert(!std::is_same::value, "You tried to register a kernel with an unsupported input type: std::vector. Please use List, List or Tensor instead."); + // TODO static_assert(AllowDeprecatedTypes, "You tried to register a kernel with an unsupported input type: std::vector. Please use List instead."); + }; + + // The following specialisations of assert_is_valid_input_type are technically not + // necessary since we would hit the base case and show an error message + // there if they didn't exist, but we can show a better error message + // in some common error scenarios. + template + struct assert_is_valid_input_type::value>> { + // There is no reason to support float when we have double. Keep the API lean. + static_assert(guts::false_t::value, "You tried to register a kernel with an unsupported input type: float. Please use double instead."); + }; + template + struct assert_is_valid_input_type::value>> { + static_assert(guts::false_t::value, "You tried to register a kernel with an unsupported input type: const char*. Please use std::string instead."); + }; + template + struct assert_is_valid_input_type, T>::value>> { + static_assert(guts::false_t::value, "You tried to register a kernel with an unsupported input type: vector. Please use List instead."); + }; + template + struct assert_is_valid_input_type::value && !guts::typelist::contains::value>> { + static_assert(guts::false_t::value, "You tried to register a kernel with an unsupported integral input type. Please use int64_t instead."); + }; + + template struct assert_is_valid_output_type { + assert_is_valid_output_type() { + auto tmap = getCustomClassTypeMap(); + TORCH_CHECK(c10::isCustomClassRegistered(), "Tried to use undefined class as output"); + } + }; + + template + struct assert_is_valid_output_type::value>> { + // everything is ok, this is a primitive type + }; + + template + struct assert_is_valid_output_type, AllowDeprecatedTypes> + : assert_is_valid_output_type {}; + + template + struct assert_is_valid_output_type, AllowDeprecatedTypes> + : assert_is_valid_output_type { + static_assert(guts::typelist::contains::value, "You tried to register a kernel with an unsupported output type: Dict where Key is invalid. We only support int64_t, double, bool, and string."); + static_assert(!std::is_same::value, "You tried to register a kernel with an unsupported output type: Dict. Please use Dict or Dict."); + }; + + template + struct assert_is_valid_output_type, AllowDeprecatedTypes> + : assert_is_valid_output_type { + static_assert(AllowDeprecatedTypes, "You tried to register a kernel with an unsupported output type: std::unordered_map. Please use Dict instead."); + static_assert(guts::typelist::contains::value, "You tried to register a kernel with an unsupported output type: std::unordered_map where Key is invalid. We only support int64_t, double, bool, and string."); + static_assert(!std::is_same::value, "You tried to register a kernel with an unsupported output type: std::unordered_map. Please use Dict or Dict."); + }; + + template + struct assert_is_valid_output_type, AllowDeprecatedTypes> + : assert_is_valid_output_type { + static_assert(!std::is_same::value, "You tried to register a kernel with an unsupported output type: List. Please use List, List or Tensor instead."); + }; + + template + struct assert_is_valid_output_type, AllowDeprecatedTypes> + : assert_is_valid_output_type { + static_assert(!std::is_same::value, "You tried to register a kernel with an unsupported output type: std::vector. Please use List, List or Tensor instead."); + // TODO static_assert(AllowDeprecatedTypes, "You tried to register a kernel with an unsupported output type: std::vector. Please use List instead."); + }; + + // The following specialisations of assert_is_valid_output_type are technically not + // necessary since we would hit the base case and show an error message + // there if they didn't exist, but we can show a better error message + // in some common error scenarios. + template + struct assert_is_valid_output_type::value>> { + // There is no reason to support float when we have double. Keep the API lean. + static_assert(guts::false_t::value, "You tried to register a kernel with an unsupported output type: float. Please use double instead."); + }; + template + struct assert_is_valid_output_type::value>> { + static_assert(guts::false_t::value, "You tried to register a kernel with an unsupported output type: const char*. Please use std::string instead."); + }; + template + struct assert_is_valid_output_type, T>::value>> { + static_assert(guts::false_t::value, "You tried to register a kernel with an unsupported output type: vector. Please use List instead."); + }; + template + struct assert_is_valid_output_type::value && !guts::typelist::contains::value>> { + static_assert(guts::false_t::value, "You tried to register a kernel with an unsupported integral output type. Please use int64_t instead."); + }; + + + template + T ivalue_to_arg(IValue&& v) { + assert_is_valid_input_type(); + return std::move(v).to(); + } + + template + IValue return_to_ivalue(T&& v) { + assert_is_valid_output_type(); + return c10::ivalue::from(v); + } + + template + typename guts::infer_function_traits_t::return_type call_functor_with_args_from_stack_(Functor* functor, Stack* stack, guts::index_sequence) { + (void)(stack); // when sizeof...(ivalue_arg_indices) == 0, this argument would be unused and we have to silence the compiler warning. + + constexpr size_t num_ivalue_args = sizeof...(ivalue_arg_indices); + + using IValueArgTypes = typename guts::infer_function_traits_t::parameter_types; + return (*functor)(ivalue_to_arg>>, AllowDeprecatedTypes>( + std::move(torch::jit::peek(*stack, ivalue_arg_indices, num_ivalue_args)) + )...); + } + + template + typename guts::infer_function_traits_t::return_type call_functor_with_args_from_stack(Functor* functor, Stack* stack) { + constexpr size_t num_ivalue_args = guts::infer_function_traits_t::number_of_parameters; + return call_functor_with_args_from_stack_(functor, stack, guts::make_index_sequence()); + } + + template + struct push_outputs final { + static void call(OutputType&& output, Stack* stack) { + torch::jit::push(*stack, return_to_ivalue(std::move(output))); + } + }; + template + struct push_outputs, AllowDeprecatedTypes> final { + static void call(std::tuple&& output, Stack* stack) { + call_(std::move(output), stack, guts::make_index_sequence()); + } + + private: + template + static void call_(std::tuple&& output, Stack* stack, guts::index_sequence) { + torch::jit::push(*stack, return_to_ivalue(std::move(std::get(output)))...); + } + }; + + template struct wrap_kernel_functor_boxed final {}; + + // SFINAE version for kernels that return an output + template + struct wrap_kernel_functor_boxed::return_type>::value>> final { + static_assert(std::is_base_of::value, "Tried to register a kernel functor using the kernel() API, but it doesn't inherit from c10::OperatorKernel. Please have the functor inherit from it."); + + static void call(OperatorKernel* functor, Stack* stack) { + constexpr size_t num_inputs = guts::infer_function_traits_t::number_of_parameters; + KernelFunctor* functor_ = static_cast(functor); + auto output = call_functor_with_args_from_stack(functor_, stack); + torch::jit::drop(*stack, num_inputs); + push_outputs::return_type, AllowDeprecatedTypes>::call(std::move(output), stack); + } + }; + + // SFINAE version for kernels that don't return an output + template + struct wrap_kernel_functor_boxed::return_type>::value>> final { + static_assert(std::is_base_of::value, "Tried to register a kernel functor using the kernel() API, but it doesn't inherit from c10::OperatorKernel. Please have the functor inherit from it."); + + static void call(OperatorKernel* functor, Stack* stack) { + constexpr size_t num_inputs = guts::infer_function_traits_t::number_of_parameters; + KernelFunctor* functor_ = static_cast(functor); + call_functor_with_args_from_stack(functor_, stack); + torch::jit::pop(*stack, num_inputs); + } + }; + + template struct wrap_kernel_functor_unboxed_ final {}; + template struct wrap_kernel_functor_unboxed_ final { + static_assert(std::is_same::return_type>::value, "Return type mismatch"); + static_assert(std::is_same, typename guts::infer_function_traits_t::parameter_types>::value, "Parameter types mismatch"); + + static ReturnType call(OperatorKernel* functor, ParameterTypes... args) { + KernelFunctor* functor_ = static_cast(functor); + return (*functor_)(std::forward(args)...); + } + }; + template using wrap_kernel_functor_unboxed = wrap_kernel_functor_unboxed_::func_type>; + + template + class KernelFactory final { + static_assert(std::is_constructible::value, "Wrong argument types for constructor of kernel functor."); + + public: + explicit constexpr KernelFactory(Args... args) + : constructor_parameters_(std::move(args)...) {} + + std::unique_ptr operator()() const { + return guts::apply( + [] (const Args&... params) -> std::unique_ptr {return guts::make_unique_base(params...); }, + constructor_parameters_); + } + + private: + std::tuple constructor_parameters_; + }; + + template + std::unique_ptr inferFunctionSchema_() { + return guts::make_unique(inferFunctionSchema("", "")); + } + + template + class FunctionSchemaInferer final { + public: + using func_type = typename c10::guts::infer_function_traits_t::func_type; + std::unique_ptr operator()() const { + return inferFunctionSchema_(); + } + }; +} + +} + +namespace torch { + using OperatorKernel = c10::OperatorKernel; +} diff --git a/thirdparty/libtorch/include/ATen/core/boxing/kernel_lambda.h b/thirdparty/libtorch/include/ATen/core/boxing/kernel_lambda.h new file mode 100644 index 0000000000..6210535840 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/boxing/kernel_lambda.h @@ -0,0 +1,37 @@ +#pragma once + +#include +#include + +namespace c10 { + +namespace detail { + // WrapRuntimeKernelFunctor: Wraps any runtime functor into a functor that + // inherits from c10::OperatorKernel, so it can be used as a c10 kernel. + // This can, for example, be used for lamdas, functors or even function pointers. + // In the case of function pointers, since it is a runtime function pointer, + // there is an overhead for calling it whenever the kernel is invoked. + template class WrapRuntimeKernelFunctor_ {}; + template + class WrapRuntimeKernelFunctor_> final : public c10::OperatorKernel { + public: + template + explicit WrapRuntimeKernelFunctor_(FuncType_&& kernel_func) + : kernel_func_(std::forward(kernel_func)) {} + + auto operator()(Parameters... args) -> decltype(std::declval()(std::forward(args)...)) { + return kernel_func_(std::forward(args)...); + } + + private: + FuncType kernel_func_; + }; + template + using WrapRuntimeKernelFunctor = WrapRuntimeKernelFunctor_< + FuncType, + typename guts::infer_function_traits_t::return_type, + typename guts::infer_function_traits_t::parameter_types + >; +} + +} diff --git a/thirdparty/libtorch/include/ATen/core/boxing/test_helpers.h b/thirdparty/libtorch/include/ATen/core/boxing/test_helpers.h new file mode 100644 index 0000000000..4957cbf661 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/boxing/test_helpers.h @@ -0,0 +1,86 @@ +#pragma once + +#include +#include + +#include +#include +#include +#include + +template +inline std::vector makeStack(Inputs&&... inputs) { + return {std::forward(inputs)...}; +} + +inline at::Tensor dummyTensor(c10::TensorTypeId dispatch_key) { + auto* allocator = c10::GetCPUAllocator(); + int64_t nelements = 1; + auto dtype = caffe2::TypeMeta::Make(); + auto storage_impl = c10::make_intrusive( + dtype, + nelements, + allocator->allocate(nelements * dtype.itemsize()), + allocator, + /*resizable=*/true); + return at::detail::make_tensor(storage_impl, dispatch_key); +} + +template +inline std::vector callOp(const c10::OperatorHandle& op, Args... args) { + auto stack = makeStack(std::forward(args)...); + c10::Dispatcher::singleton().callBoxed(op, &stack); + return stack; +} + +template +inline Result callOpUnboxed(const c10::OperatorHandle& op, Args... args) { + return c10::Dispatcher::singleton() + .template callUnboxed(op, std::forward(args)...); +} + +inline void expectDoesntFindKernel(const char* op_name, c10::TensorTypeId dispatch_key) { + auto op = c10::Dispatcher::singleton().findSchema({op_name, ""}); + EXPECT_ANY_THROW( + callOp(*op, dummyTensor(dispatch_key), 5); + ); +} + +inline void expectDoesntFindOperator(const char* op_name) { + auto op = c10::Dispatcher::singleton().findSchema({op_name, ""}); + EXPECT_FALSE(op.has_value()); +} + +template +inline void expectThrows(Functor&& functor, const char* expectMessageContains) { + try { + std::forward(functor)(); + } catch (const Exception& e) { + EXPECT_THAT(e.what(), testing::HasSubstr(expectMessageContains)); + return; + } + ADD_FAILURE() << "Expected to throw exception containing \"" + << expectMessageContains << "\" but didn't throw"; +} + +template +void expectListEquals(c10::ArrayRef expected, c10::List actual) { + EXPECT_EQ(expected.size(), actual.size()); + for (size_t i = 0; i < expected.size(); ++i) { + EXPECT_EQ(expected[i], actual.get(i)); + } +} + +template +void expectListEquals(c10::ArrayRef expected, std::vector actual) { + EXPECT_EQ(expected.size(), actual.size()); + for (size_t i = 0; i < expected.size(); ++i) { + EXPECT_EQ(expected[i], actual[i]); + } +} + +// NB: This is not really sound, but all of the type sets constructed here +// are singletons so it's fine +static inline c10::TensorTypeId extractTypeId(const at::Tensor& t) { + return legacyExtractTypeId(t.type_set()); +} diff --git a/thirdparty/libtorch/include/ATen/core/dispatch/DispatchKeyExtractor.h b/thirdparty/libtorch/include/ATen/core/dispatch/DispatchKeyExtractor.h new file mode 100644 index 0000000000..79995cdee1 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/dispatch/DispatchKeyExtractor.h @@ -0,0 +1,121 @@ +#pragma once + +#include +#include +#include +#include +#include + +namespace c10 { + +namespace impl { + +// Take a TensorTypeSet for a Tensor, and combine it with the current thread +// local valid (implemented) and enabled (not implemented) TensorTypeSets +// to determine what the actual dispatch TensorTypeId should be. Unlike +// Tensor::type_set(), the value of this on a tensor can change depending +// on TLS. +// +// NB: I didn't make this take a Tensor to avoid header include shenanigans. +// +// TODO: I'm not sure if this should live in this header or not; the operant +// question is whether or not we have access to all the relevant TLS at this +// point. +static inline TensorTypeId dispatchTypeId(TensorTypeSet ts) { + c10::impl::LocalTensorTypeSet local = c10::impl::tls_local_tensor_type_set(); + return ((ts | local.included_) - local.excluded_).highestPriorityTypeId(); +} + +} + +namespace detail { + struct MultiDispatchTensorTypeSet : at::IterArgs { + TensorTypeSet ts; + void operator()(const at::Tensor& x) { + ts = ts | x.type_set(); + } + void operator()(const TensorOptions& x) { + ts = ts | x.type_set(); + } + void operator()(at::ArrayRef xs) { + for (const auto& x : xs) { + ts = ts | x.type_set(); + } + } + template + void operator()(const T& x) { + // do nothing + } + }; + + // NB: take by const reference (Don't do universal forwarding here! You + // don't want to move into this function!) + template + TensorTypeSet multi_dispatch_tensor_type_set(const Args&... args) { + return MultiDispatchTensorTypeSet().apply(args...).ts; + } +} + +/** + * An instance of DispatchKeyExtractor knows how to get a dispatch key given + * a list of arguments for an operator call. The instance is specific for + * a certain operator as different operators have different ways to extract + * the dispatch key (e.g. different numbers of arguments). + */ +struct DispatchKeyExtractor final { +public: + static DispatchKeyExtractor make(const FunctionSchema& schema) { + return DispatchKeyExtractor(schema.arguments().size()); + } + + c10::optional getDispatchKeyBoxed(const Stack* stack) const { + // TODO Unboxed dispatch supports TensorOptions (i.e. ScalarType/Device/Layout) arguments + // but boxed doesn't yet. These should be aligned and do the same thing. + + TensorTypeSet ts; + for (const auto& ivalue : torch::jit::last(*stack, num_args_)) { + if (C10_LIKELY(ivalue.isTensor())) { + // NB: Take care not to introduce a refcount bump (there's + // no safe toTensorRef method, alas) + ts = ts | ivalue.unsafeToTensorImpl()->type_set(); + } else if (C10_UNLIKELY(ivalue.isTensorList())) { + for (const auto& tensor : ivalue.toTensorListRef()) { + ts = ts | tensor.type_set(); + } + } + } + if (C10_UNLIKELY(ts.empty())) { + return c10::nullopt; + } + + // TODO: Don't use legacy extractor; blocked on c10 understanding variable + return c10::legacyExtractTypeId(ts); + } + + template + c10::optional getDispatchKeyUnboxed(const Args&... args) const { + auto type_set = detail::multi_dispatch_tensor_type_set(args...); + return typeSetToDispatchKey_(type_set); + } + +private: + static c10::optional typeSetToDispatchKey_(const TensorTypeSet& typeSet) { + if (C10_UNLIKELY(typeSet.empty())) { + return c10::nullopt; + } + + return impl::dispatchTypeId(typeSet); + } + + explicit DispatchKeyExtractor(size_t num_args) + : num_args_(num_args) {} + + // this is caching the index so we don't have to parse the schema inputs + // again and again for each dispatcher lookup. + // num_args_ is allowed to be zero; that just means you must do the + // fallthrough + // TODO: a potential optimization is to store a bitfield of arg locations, + size_t num_args_; +}; + +} diff --git a/thirdparty/libtorch/include/ATen/core/dispatch/DispatchTable.h b/thirdparty/libtorch/include/ATen/core/dispatch/DispatchTable.h new file mode 100644 index 0000000000..65848a11ef --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/dispatch/DispatchTable.h @@ -0,0 +1,149 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +namespace c10 { + +/** + * Per-operator dispatch table. + * + * Given an operator specified by a FunctionSchema, this class records a dispatch + * table for various kernels provided for this operator. For example, if we + * consider the operator add(Tensor, Tensor), the dispatch table for this + * operator may contain implementations for various dynamic tensor types, such + * as CPUTensorId, CUDATensorId, etc. + */ +class DispatchTable final { + public: + DispatchTable(const FunctionSchema& schema) + : kernels_() + , catchallKernel_(c10::nullopt) + , dispatchKeyExtractor_(DispatchKeyExtractor::make(schema)) + , operatorName_(toString(schema.operator_name())) {} + + /** + * Register a kernel in the table at some dispatch key. + * @param dispatch_key Dispatch key to define when this kernel is selected. + * @param kernel Concrete kernel function implementation to register + */ + void setKernel(TensorTypeId dispatchKey, const KernelFunction& kernel) { + TORCH_INTERNAL_ASSERT(dispatchKey != TensorTypeId::UndefinedTensorId); + // The following assertion is disabled because we're codegenerating + // autograd kernels for operators without tensor arguments even though + // they are never called. These, however, register kernels for + // VariableTensorId. + // TODO Stop generating those kernels and re-enable this assertion here. + auto emplaced = kernels_.emplace(dispatchKey, kernel); + if (!emplaced.second) { + // Element already existed. Overwrite it. + emplaced.first->second = kernel; + TORCH_WARN("Registered a kernel for operator ", operatorName_," with dispatch key ", toString(dispatchKey), " that overwrote a previously registered kernel with the same dispatch key for the same operator."); + } + } + + /** + * Deregister the kernel for some dispatch key. + * + * @param dispatch_key Dispatch key to unregister. + */ + void removeKernelIfExists(TensorTypeId dispatchKey) { + auto num_removed = kernels_.erase(dispatchKey); + TORCH_INTERNAL_ASSERT(num_removed <= 1); // This is not a multi-map + } + + /** + * Register a catch-all kernel that is called for this operator + * independent of the inputs. An operator can have either + * a catch-all kernel or a set of kernels with concrete + * dispatch keys, not both. + */ + void setCatchallKernel(const KernelFunction& kernel) { + if (catchallKernel_.has_value()) { + TORCH_WARN("Registered a catch-all kernel for operator ", operatorName_," that overwrote a previously registered catch-all kernel for the same operator."); + } + catchallKernel_ = kernel; + } + + /** + * Remove the catch-all kernel. + */ + void removeCatchallKernel() { + TORCH_INTERNAL_ASSERT(catchallKernel_.has_value(), "Tried to remove the catch-all kernel for operator ", operatorName_," but there is no catch-all kernel registered."); + catchallKernel_ = c10::nullopt; + } + + bool isEmpty() const { + return !catchallKernel_.has_value() && kernels_.size() == 0; + } + + std::string listAllDispatchKeys() const { + std::ostringstream str; + str << "["; + + if (kernels_.size() != 0) { + str << toString(kernels_.begin()->first); + for (auto iter = ++kernels_.begin(); iter != kernels_.end(); ++iter) { + str << ", " << toString(iter->first); + } + } + if (catchallKernel_.has_value()) { + if (kernels_.size() != 0) { + str << ", "; + } + str << "CATCH-ALL"; + } + str << "]"; + return str.str(); + } + + const KernelFunction* lookup(TensorTypeId dispatchKey) const { + auto found = kernels_.find(dispatchKey); + if (found != kernels_.end()) { + return &found->second; + } else { + return nullptr; + } + } + + const KernelFunction* lookupCatchallKernel() const { + if (!catchallKernel_.has_value()) { + return nullptr; + } + + return &*catchallKernel_; + } + + const DispatchKeyExtractor& dispatchKeyExtractor() const { + return dispatchKeyExtractor_; + } + + const std::string& operatorName() const { + return operatorName_; + } + +private: + + ska::flat_hash_map kernels_; + c10::optional catchallKernel_; + DispatchKeyExtractor dispatchKeyExtractor_; + std::string operatorName_; +}; + +} // namespace c10 diff --git a/thirdparty/libtorch/include/ATen/core/dispatch/Dispatcher.h b/thirdparty/libtorch/include/ATen/core/dispatch/Dispatcher.h new file mode 100644 index 0000000000..7fd4bc4263 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/dispatch/Dispatcher.h @@ -0,0 +1,260 @@ +#pragma once + +#include +#include +#include +#include +#include + +namespace c10 { + +class CAFFE2_API OperatorHandle; + +/** + * Implement this interface and register your instance with the dispatcher + * to get notified when operators are registered or deregistered with + * the dispatcher. + */ +class CAFFE2_API OpRegistrationListener { +public: + virtual ~OpRegistrationListener(); + + virtual void onOperatorRegistered(const OperatorHandle& op) = 0; + virtual void onOperatorDeregistered(const OperatorHandle& op) = 0; +}; + +namespace detail { +class RegistrationListenerList; +} +class SchemaRegistrationHandleRAII; + +/** + * Top-level dispatch interface for dispatching via the dynamic dispatcher. + */ +class CAFFE2_API Dispatcher final { +private: + struct OperatorDef final { + explicit OperatorDef(FunctionSchema&& schema, OperatorOptions&& options) + : op(std::move(schema), std::move(options)), refcount(0) {} + + impl::OperatorEntry op; + size_t refcount; + }; + friend class OperatorHandle; + +public: + ~Dispatcher(); + + // Implementation note: this class abstracts over the fact that we have per-operator + // dispatch tables. This could be easily adjusted to have a single global hash + // table. + + static Dispatcher& singleton(); + + /** + * Register a new operator schema. + * + * If a schema with the same operator name and overload name already exists, + * this function will check that both schemas are exactly identical. + * + * @return An OperatorHandle for the registered schema which can be used to + * register kernels for the operator and a RegistrationHandleRAII RAII + * object that manages the lifetime of the registration. Once that + * object is destructed, the kernel will be deregistered. + */ + std::pair registerSchema(FunctionSchema schema, OperatorOptions options); + + /** + * Looks for an operator schema with the given name and overload name + * and returns it if it is registered. + * Returns nullopt otherwise. + */ + c10::optional findSchema(const OperatorName& operator_name); + + /** + * Register a kernel to the dispatch table for an operator. + * If dispatch_key is nullopt, then this registers a fallback kernel. + * + * @return A RAII object that manages the lifetime of the registration. + * Once that object is destructed, the kernel will be deregistered. + */ + RegistrationHandleRAII registerKernel(const OperatorHandle& op, TensorTypeId dispatch_key, KernelFunction kernel); + + /** + * Register a fallback kernel for an operator. + * After this, when trying to lookup a kernel for an unknown dispatch key, + * it will not fail anymore, but return the fallback kernel instead. + * + * @return A RAII object that manages the lifetime of the registration. + * Once that object is destructed, the kernel will be deregistered. + */ + RegistrationHandleRAII registerCatchallKernel(const OperatorHandle& op, KernelFunction kernel); + + /** + * Register a fallback kernel for a backend. + * If an operator is called but there is no concrete kernel for the dispatch + * key of the given operator arguments, it will check if there is such a + * fallback kernel for the given dispatch key and, if yes, call that one. + */ + RegistrationHandleRAII registerBackendFallbackKernel(TensorTypeId dispatch_key, KernelFunction kernel); + + template + Return callUnboxed(const OperatorHandle& op, Args... args) const; + + template + Return callUnboxedOnly(const OperatorHandle& op, Args... args) const; + + void callBoxed(const OperatorHandle& op, Stack* stack) const; + + /** + * Add a listener that gets called whenever a new op is registered or an existing + * op is deregistered. Immediately after registering, this listener gets called + * for all previously registered ops, so it can be used to keep track of ops + * registered with this dispatcher. + */ + void addRegistrationListener(std::unique_ptr listener); + +private: + Dispatcher(); + + OperatorHandle findOrRegisterSchema_(FunctionSchema&& schema, OperatorOptions&& options); + + void deregisterSchema_(const OperatorHandle& op, const OperatorName& op_name); + void deregisterBackendFallbackKernel_(TensorTypeId dispatchKey); + + static const KernelFunction& dispatch_(const DispatchTable& dispatchTable, const ska::flat_hash_map& backendFallbackKernels, c10::optional dispatch_key); + + template + Return doCallUnboxed(const DispatchTable& dispatchTable, const LeftRight>& backendFallbackKernels_, Args... args) const; + template + Return doCallUnboxedOnly(const DispatchTable& dispatchTable, const LeftRight>& backendFallbackKernels_, Args... args) const; + + std::list operators_; + LeftRight> operatorLookupTable_; + LeftRight> backendFallbackKernels_; + std::unique_ptr listeners_; + std::mutex mutex_; +}; + +/** + * This is a handle to an operator schema registered with the dispatcher. + * This handle can be used to register kernels with the dispatcher or + * to lookup a kernel for a certain set of arguments. + */ +class CAFFE2_API OperatorHandle final { +public: + OperatorHandle(OperatorHandle&&) noexcept = default; + OperatorHandle& operator=(OperatorHandle&&) noexcept = default; + OperatorHandle(const OperatorHandle&) = default; + OperatorHandle& operator=(const OperatorHandle&) = default; + + const FunctionSchema& schema() const { + return operatorIterator_->op.schema(); + } + + const OperatorOptions& options() const { + return operatorIterator_->op.options(); + } + +private: + explicit OperatorHandle(std::list::iterator operatorIterator) + : operatorIterator_(std::move(operatorIterator)) {} + friend class Dispatcher; + + std::list::iterator operatorIterator_; +}; + +namespace detail { +template inline void unused_arg_(const Args&...) {} +} + +template +inline Return Dispatcher::callUnboxed(const OperatorHandle& op, Args... args) const { + detail::unused_arg_(args...); // workaround for a false-positive warning about unused parameters in gcc 5 + + // note: this doesn't need the mutex because write operations on the list keep iterators intact. + return op.operatorIterator_->op.readDispatchTable([&] (const DispatchTable& dispatchTable) -> Return { + // TODO This should be a nested lambda instead of a separate function call, but that triggers an internal + // compiler error on GCC5. Change this once we don't need gcc 5 anymore. + return doCallUnboxed(dispatchTable, backendFallbackKernels_, std::forward(args)...); + }); +} + +template +inline Return Dispatcher::doCallUnboxed(const DispatchTable& dispatchTable, const LeftRight>& backendFallbackKernels, Args... args) const { + detail::unused_arg_(args...); // workaround for a false-positive warning about unused parameters in gcc 5 + return backendFallbackKernels.read([&] (const ska::flat_hash_map& backendFallbackKernels) -> Return { + c10::optional dispatchKey = dispatchTable.dispatchKeyExtractor().getDispatchKeyUnboxed(args...); + const KernelFunction& kernel = dispatch_(dispatchTable, backendFallbackKernels, dispatchKey); + return kernel.template callUnboxed(std::forward(args)...); + }); +} + +template +inline Return Dispatcher::callUnboxedOnly(const OperatorHandle& op, Args... args) const { + detail::unused_arg_(args...); // workaround for a false-positive warning about unused parameters in gcc 5 + + // note: this doesn't need the mutex because write operations on the list keep iterators intact. + return op.operatorIterator_->op.readDispatchTable([&] (const DispatchTable& dispatchTable) -> Return { + // TODO This should be a nested lambda instead of a separate function call, but that triggers an internal + // compiler error on GCC5. Change this once we don't need gcc 5 anymore. + return doCallUnboxedOnly(dispatchTable, backendFallbackKernels_, std::forward(args)...); + }); +} + +template +inline Return Dispatcher::doCallUnboxedOnly(const DispatchTable& dispatchTable, const LeftRight>& backendFallbackKernels, Args... args) const { + detail::unused_arg_(args...); // workaround for a false-positive warning about unused parameters in gcc 5 + return backendFallbackKernels.read([&] (const ska::flat_hash_map& backendFallbackKernels) -> Return { + c10::optional dispatchKey = dispatchTable.dispatchKeyExtractor().getDispatchKeyUnboxed(args...); + const KernelFunction& kernel = dispatch_(dispatchTable, backendFallbackKernels, dispatchKey); + return kernel.template callUnboxedOnly(std::forward(args)...); + }); +} + +inline void Dispatcher::callBoxed(const OperatorHandle& op, Stack* stack) const { + // note: this doesn't need the mutex because write operations on the list keep iterators intact. + return op.operatorIterator_->op.readDispatchTable([&] (const DispatchTable& dispatchTable) { + return backendFallbackKernels_.read([&] (const ska::flat_hash_map& backendFallbackKernels) { + c10::optional dispatchKey = dispatchTable.dispatchKeyExtractor().getDispatchKeyBoxed(stack); + const KernelFunction& kernel = dispatch_(dispatchTable, backendFallbackKernels, dispatchKey); + kernel.callBoxed(stack); + }); + }); +} + +inline const KernelFunction& Dispatcher::dispatch_(const DispatchTable& dispatchTable, const ska::flat_hash_map& backendFallbackKernels, c10::optional dispatchKey) { + if (C10_LIKELY(dispatchKey.has_value())) { + const KernelFunction* backendKernel = dispatchTable.lookup(*dispatchKey); + + if (nullptr != backendKernel) { + return *backendKernel; + } + + auto backendFallbackKernel = backendFallbackKernels.find(*dispatchKey); + if (backendFallbackKernel != backendFallbackKernels.end()) { + return backendFallbackKernel->second; + } + } + + const KernelFunction* catchallKernel = dispatchTable.lookupCatchallKernel(); + if (C10_LIKELY(nullptr != catchallKernel)) { + return *catchallKernel; + } + + if (!dispatchKey.has_value() || *dispatchKey == TensorTypeId::UndefinedTensorId) { + TORCH_CHECK(false, + "There were no tensor arguments to this function (e.g., you passed an " + "empty list of Tensors), but no fallback function is registered for schema ", dispatchTable.operatorName(), + ". This usually means that this function requires a non-empty list of Tensors. " + "Available functions are ", dispatchTable.listAllDispatchKeys()) + } + + const std::string dispatchKeyStr = toString(*dispatchKey); + TORCH_CHECK(false, "Could not run '", dispatchTable.operatorName(), "' with arguments", + " from the '", dispatchKeyStr, "' backend. '", + dispatchTable.operatorName(), "' is only available for these backends: ", + dispatchTable.listAllDispatchKeys(), "."); +} + +} // namespace c10 diff --git a/thirdparty/libtorch/include/ATen/core/dispatch/OperatorEntry.h b/thirdparty/libtorch/include/ATen/core/dispatch/OperatorEntry.h new file mode 100644 index 0000000000..47d1f0e3a9 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/dispatch/OperatorEntry.h @@ -0,0 +1,104 @@ +#pragma once + +#include +#include +#include +#include + +namespace c10 { +namespace impl { + class OperatorEntry; +} + +namespace impl { + +// This is a private class used inside the Dispatcher to represent an operator +// and its dispatch table. This is not part of the public API. +class OperatorEntry final { +public: + explicit OperatorEntry(FunctionSchema&& schema, OperatorOptions&& options); + + OperatorEntry(const OperatorEntry&) = delete; + OperatorEntry(OperatorEntry&&) noexcept = delete; + OperatorEntry& operator=(const OperatorEntry&) = delete; + OperatorEntry& operator=(OperatorEntry&&) noexcept = delete; + + const FunctionSchema& schema() const { + return schema_; + } + + template + typename guts::infer_function_traits_t::return_type readDispatchTable(Functor&& functor) const { + return dispatchTable_.read(std::forward(functor)); + } + + void prepareForDeregistration(); + + RegistrationHandleRAII registerKernel(TensorTypeId dispatch_key, KernelFunction kernel); + RegistrationHandleRAII registerCatchallKernel(KernelFunction kernel); + + const OperatorOptions& options() { + return options_; + } + + void updateOptionsAliasAnalysis(AliasAnalysisKind a) { + options_.setAliasAnalysis(a); + } + +private: + void deregisterKernel_(TensorTypeId dispatch_key, std::list::iterator kernel); + void deregisterCatchallKernel_(std::list::iterator kernel); + + FunctionSchema schema_; + + // The dispatchTable stores the current kernel for each dispatch key + LeftRight dispatchTable_; + + // kernels_ stores all registered kernels for the corresponding dispatch key + // and catchAllKernels_ stores the catch-all kernels. + // If an operator library gets loaded that overwrites an already existing kernel, + // both kernels will be in that list but only the newer one will be in + // dispatchTable. If any of the kernels go away (say the library gets + // unloaded), we remove the kernel from this list and update the + // dispatchTable if necessary. + // Kernels in the list are ordered by registration time descendingly, + // newer registrations are before older registrations. + // We do not combine dispatchTable and kernels into one hash map because + // kernels is a larger data structure and accessed quite infrequently + // while dispatchTable is accessed often and should be kept small to fit + // into CPU caches. + // Invariants: + // - dispatchTable[dispatch_key] == kernels_[dispatch_key].front() + // - dispatchTable[dispatch_key] does not exist if and only if + // kernels_[dispatch_key] does not exist + // - If kernels_[dispatch_key] exists, then it has elements. + // It is never an empty list. + // Analogous invariants for catchAllKernels_. + // + // Why do we do that? + // ----- + // We mostly do this to enable Jupyter notebooks where a cell registering + // a kernel could be executed multiple times and the later execution + // should overwrite the earlier one. Note that this still fails when the + // function schema changed between the executions, but it works as long + // as the function schema didn't change. A better solution would be to + // unload the old extension library from the Jupyter cell when the cell is + // re-executed and then only allow one kernel here, i.e. error if a kernel + // is already registered, but that's a lot of effort to implement and + // currently not high-pri. + ska::flat_hash_map> kernels_; + std::list catchAllKernels_; + + // Some metadata about the operator + OperatorOptions options_; + + std::mutex kernelsMutex_; // protects kernels_ + + // This function re-establishes the invariant that dispatchTable + // contains the front element from the kernels list for a given dispatch key. + void updateDispatchTable_(TensorTypeId dispatch_key); + void updateCatchallDispatchTable_(); +}; + +} +} diff --git a/thirdparty/libtorch/include/ATen/core/dispatch/OperatorOptions.h b/thirdparty/libtorch/include/ATen/core/dispatch/OperatorOptions.h new file mode 100644 index 0000000000..0fe5eeafae --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/dispatch/OperatorOptions.h @@ -0,0 +1,61 @@ +#pragma once + +#include + +namespace c10 { +namespace impl { +class OperatorEntry; +} + +enum class AliasAnalysisKind : uint8_t { + INTERNAL_SPECIAL_CASE, + CONSERVATIVE, // The most conservative alias analysis type, assumes + // side-effects. This is the default analysis. + FROM_SCHEMA, + PURE_FUNCTION +}; + +#if !defined(_MSC_VER) +constexpr // Our current MSVC version has a bug that doesn't allow this to be constexpr. +#endif +inline const char* toString(AliasAnalysisKind aliasAnalysisKind) { + return (aliasAnalysisKind == AliasAnalysisKind::CONSERVATIVE) + ? "CONSERVATIVE" + : (aliasAnalysisKind == AliasAnalysisKind::FROM_SCHEMA) + ? "FROM_SCHEMA" + : (aliasAnalysisKind == AliasAnalysisKind::PURE_FUNCTION) + ? "PURE_FUNCTION" + : (aliasAnalysisKind == AliasAnalysisKind::INTERNAL_SPECIAL_CASE) + ? "INTERNAL_SPECIAL_CASE" + : "UNKNOWN"; +} + +struct OperatorOptions final { +public: + bool isDefaultAliasAnalysisKind() const { + return aliasAnalysisKind_ == c10::nullopt; + } + + AliasAnalysisKind aliasAnalysis() const { + return !isDefaultAliasAnalysisKind() + ? *aliasAnalysisKind_ + : AliasAnalysisKind::CONSERVATIVE; + } + + void setAliasAnalysis(AliasAnalysisKind v) { + aliasAnalysisKind_ = v; + } + + friend bool operator==(const OperatorOptions& lhs, const OperatorOptions& rhs) { + return lhs.aliasAnalysisKind_ == rhs.aliasAnalysisKind_; + } + + friend bool operator!=(const OperatorOptions& lhs, const OperatorOptions& rhs) { + return !(lhs == rhs); + } + +private: + c10::optional aliasAnalysisKind_; +}; + +} // namespace c10 diff --git a/thirdparty/libtorch/include/ATen/core/dispatch/RegistrationHandleRAII.h b/thirdparty/libtorch/include/ATen/core/dispatch/RegistrationHandleRAII.h new file mode 100644 index 0000000000..e6ef2128fd --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/dispatch/RegistrationHandleRAII.h @@ -0,0 +1,36 @@ +#pragma once + +#include + +namespace c10 { + +class RegistrationHandleRAII final { +public: + explicit RegistrationHandleRAII(std::function onDestruction) + : onDestruction_(std::move(onDestruction)) {} + + ~RegistrationHandleRAII() { + if (onDestruction_) { + onDestruction_(); + } + } + + RegistrationHandleRAII(const RegistrationHandleRAII&) = delete; + RegistrationHandleRAII& operator=(const RegistrationHandleRAII&) = delete; + + RegistrationHandleRAII(RegistrationHandleRAII&& rhs) noexcept + : onDestruction_(std::move(rhs.onDestruction_)) { + rhs.onDestruction_ = nullptr; + } + + RegistrationHandleRAII& operator=(RegistrationHandleRAII&& rhs) noexcept { + onDestruction_ = std::move(rhs.onDestruction_); + rhs.onDestruction_ = nullptr; + return *this; + } + +private: + std::function onDestruction_; +}; + +} diff --git a/thirdparty/libtorch/include/ATen/core/function_schema.h b/thirdparty/libtorch/include/ATen/core/function_schema.h new file mode 100644 index 0000000000..d8bdbf24b3 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/function_schema.h @@ -0,0 +1,340 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +namespace c10 { + +// schema as used in the compiler for resolving function calls and reporting +// errors. These objects should be constructed from C10 schema once those +// are available. + +struct Argument; +struct FunctionSchema; + +namespace detail { +inline bool defaultValueEquals_( + const c10::optional& lhs, + const c10::optional& rhs) { + if (lhs.has_value()) { + return rhs.has_value() && impl::shallowEquals(*lhs, *rhs); + } else { + return !rhs.has_value(); + } +} +} // namespace detail + +bool operator==(const Argument& lhs, const Argument& rhs); + +struct Argument { + Argument( + std::string name = "", + TypePtr type = nullptr, + c10::optional N = c10::nullopt, + c10::optional default_value = c10::nullopt, + bool kwarg_only = false, + c10::optional alias_info = c10::nullopt, + bool is_inferred_type = false) + : name_(std::move(name)), + type_(type ? type : TensorType::get()), + N_(std::move(N)), + default_value_(std::move(default_value)), + kwarg_only_(kwarg_only), + alias_info_(std::move(alias_info)), + is_inferred_type_(is_inferred_type) { + } + const std::string& name() const { + return name_; + } + TypePtr type() const { + return type_; + } + c10::optional N() const { + return N_; + } + const c10::optional& default_value() const { + return default_value_; + } + bool kwarg_only() const { + return kwarg_only_; + } + const c10::optional& alias_info() const { + return alias_info_; + } + bool is_inferred_type() const { + return is_inferred_type_; + } + std::string formatTypeMismatchMsg(const std::string& actual_type) const { + std::string inferred_type_hint; + if (is_inferred_type()) { + inferred_type_hint = c10::str( + "Inferred '", + name(), + "' to be of type 'Tensor' ", + "because it was not annotated with an explicit type.\n"); + } + return c10::str( + "Expected a value of type '", + type()->python_str(), + "' for argument '", + name(), + "' but instead found type '", + actual_type, + "'.\n", + inferred_type_hint); + } + + Argument cloneWithType(TypePtr new_type) const { + return Argument(name_, new_type, N_, default_value_, kwarg_only_, alias_info_); + } + + // this function check whether this Argument is backward compatible with + // the old one. we consider the following cases are backward compatible: + // 1) two arguments are equal + // 2) this arg's type should be subtype of old + // 3) this arg must provide the same default value if old arg has one, + bool isBackwardCompatibleWith( + const Argument& old, + std::ostream* why_not=nullptr) const; + +private: + std::string name_; + TypePtr type_; + // for list types, an optional statically known length for the list + // e.g. for int[3]: type = ListType::ofInts(), N = 3 + // If present, this will allow scalars to be broadcast to this length to + // become a list. + c10::optional N_; + + c10::optional default_value_; + // is this only specifyable as a keyword argument? + bool kwarg_only_; + c10::optional alias_info_; + bool is_inferred_type_; +}; + +inline bool operator==(const Argument& lhs, const Argument& rhs) { + return lhs.name() == rhs.name() + && *lhs.type() == *rhs.type() + && lhs.N() == rhs.N() + && detail::defaultValueEquals_(lhs.default_value(), rhs.default_value()) + && lhs.kwarg_only() == rhs.kwarg_only() + && lhs.alias_info() == rhs.alias_info(); +} + +bool operator==(const FunctionSchema& lhs, const FunctionSchema& rhs); + +struct FunctionSchema { + FunctionSchema( + std::string name, + std::string overload_name, + std::vector arguments, + std::vector returns, + bool is_vararg = false, + bool is_varret = false) + : name_({std::move(name), std::move(overload_name)}), + arguments_(std::move(arguments)), + returns_(std::move(returns)), + is_vararg_(is_vararg), + is_varret_(is_varret) {} + + FunctionSchema( + Symbol name, + std::string overload_name, + std::vector arguments, + std::vector returns, + bool is_vararg = false, + bool is_varret = false) + : FunctionSchema( + name.toQualString(), + std::move(overload_name), + std::move(arguments), + std::move(returns), + is_vararg, + is_varret) {} + + // check whether this schema is backward compatible with the old one. + // the following conditions are considered as this schema is backward + // compatible with old: + // 1) two schemas are equal + // 2) this schema has the same or more positional args than old, + // and any positional arg in this schema is backward compatible + // with the corresponding one in old schema, which could be an arg + // or a kwarg, if it has, or it must provide a default value + // 3) this schema has the same or more kwargs than old, and all the kwargs + // in old schema can find the corresponding kwarg in this schema which + // is backward compatible with the old kwarg, and the extra kwargs in + // this schema must provide default values. + bool isBackwardCompatibleWith( + const FunctionSchema& old, + std::ostream* why_not=nullptr) const; + +private: + OperatorName name_; + std::vector arguments_; + std::vector returns_; + // if true then this schema takes an arbitrary number of additional arguments + // after the argument specified in arguments + // currently this is used primarily to represent 'primtive' operators whose + // arguments are not checked by schema + bool is_vararg_; + bool is_varret_; + void checkArg(const IValue& value, const Argument& argument, optional pos) const; + +public: + const OperatorName& operator_name() const { + return name_; + } + const std::string& name() const { + return name_.name; + } + const std::string& overload_name() const { + return name_.overload_name; + } + const std::vector& arguments() const { + return arguments_; + } + const std::vector& returns() const { + return returns_; + } + bool is_vararg() const { + return is_vararg_; + } + bool is_varret() const { + return is_varret_; + } + bool is_mutable() const { + return std::any_of( + arguments_.cbegin(), arguments_.cend(), [](const Argument& arg) { + const auto& aliasInfo = arg.alias_info(); + return aliasInfo && aliasInfo.value().isWrite(); + }); + } + + c10::optional argumentIndexWithName(const std::string& name) const { + for(size_t i = 0; i < arguments().size(); ++i) { + if(name == arguments()[i].name()) + return i; + } + return c10::nullopt; + } + FunctionSchema cloneWithArguments(std::vector new_arguments) const { + return FunctionSchema( + name(), + overload_name(), + std::move(new_arguments), + returns(), + is_vararg(), + is_varret()); + } + + std::string formatTypeMismatchMsg( + const Argument& expected, + const std::string& actual_type, + c10::optional position = c10::nullopt, + c10::optional value = c10::nullopt) const; + + FunctionSchema cloneWithRemappedTypes( + const std::function type_map) const; + + // Check that inputs have the correct types and appends any missing default + // values. + void checkAndNormalizeInputs( + std::vector& inputs, + const std::unordered_map& kwargs) const; + + void findErrorInKwargs(const std::vector& kwargs) const; + + bool hasAnyAliasInfo() const { + for (const auto& arg : arguments_) { + if (arg.alias_info().has_value()) { + return true; + } + } + for (const auto& ret : returns_) { + if (ret.alias_info().has_value()) { + return true; + } + } + return false; + } + + // can a function with this schema be substituted for a function of rhs's + // schema and have the program typecheck? + // as_method - if true, treat this schema as a method and ignore + // the first argument, which will be the object in both cases + bool isSubtypeOf(const FunctionSchema& rhs, bool as_method, std::ostream* why_not=nullptr) const; +}; + +inline bool operator==(const FunctionSchema& lhs, const FunctionSchema& rhs) { + return lhs.name() == rhs.name() + && lhs.overload_name() == rhs.overload_name() + && lhs.arguments() == rhs.arguments() + && lhs.returns() == rhs.returns() + && lhs.is_vararg() == rhs.is_vararg() + && lhs.is_varret() == rhs.is_varret(); +} + +inline bool operator!=(const FunctionSchema& lhs, const FunctionSchema& rhs) { + return !(lhs == rhs); +} + +// print out Argument, which is compatible with FunctionSchema parser +// full format: Type(alias)? name=default_value +inline std::ostream& operator<<(std::ostream& out, const Argument& arg) { + bool optional_type = arg.type()->kind() == OptionalType::Kind; + // for adjusting the ? position. + // in schema, we have Tensor?(a!) input, and t(a!)?. + // however, t?(a!) doesn't work with schema parser. + // so we always use Type(alias)? format + std::stringstream oss; + if (auto list = arg.type()->cast()) { + oss << list->getElementType()->str(); + oss << "["; + if (arg.N()) { + oss << *arg.N(); + } + oss << "]"; + } else { + oss << arg.type()->str(); + } + if (optional_type) { + oss.seekp(oss.str().size() - 1); + } + if (arg.alias_info()) { + oss << arg.alias_info().value(); + } + if (optional_type) { + oss << "?"; + } + out << oss.str(); + if (!arg.name().empty()) { + out << " " << arg.name(); + } + if (arg.default_value()) { + out << "="; + if (arg.type()->kind() == c10::TypeKind::StringType) { + printQuotedString(out, arg.default_value().value().toStringRef()); + } else { + out << arg.default_value().value(); + } + } + return out; +} + +inline std::ostream& operator<<(std::ostream& out, const FunctionSchema& schema); + +inline std::string toString(const FunctionSchema& schema) { + std::ostringstream str; + str << schema; + return str.str(); +} + +} // namespace c10 + +#include diff --git a/thirdparty/libtorch/include/ATen/core/function_schema_inl.h b/thirdparty/libtorch/include/ATen/core/function_schema_inl.h new file mode 100644 index 0000000000..18855f6ab9 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/function_schema_inl.h @@ -0,0 +1,334 @@ +#pragma once + +// note: windows build doesn't find symbols in operator files unless +// this is a header file + +namespace c10 { + +inline std::ostream& operator<<(std::ostream& out, const FunctionSchema& schema) { + // eventually this should look almost identical to python arg parser, but + // it is simpler for now to work directly on this schema + + out << schema.name(); + if (schema.overload_name() != "") { + out << "." << schema.overload_name(); + } + out << "("; + + bool seen_kwarg_only = false; + for(size_t i = 0; i < schema.arguments().size(); ++i) { + if (i > 0) out << ", "; + if (schema.arguments()[i].kwarg_only() && !seen_kwarg_only) { + out << "*, "; + seen_kwarg_only = true; + } + out << schema.arguments()[i]; + } + + if(schema.is_vararg()) { + if(schema.arguments().size() > 0) + out << ", "; + out << "..."; + } + + out << ") -> "; + + const auto& returns = schema.returns(); + out << "("; + for(size_t i = 0; i < returns.size(); ++i) { + if (i > 0) { + out << ", "; + } + out << returns.at(i); + } + if (schema.is_varret()) { + if (returns.size() != 0) { + out << ", "; + } + out << "..."; + } + out << ")"; + return out; +} + +inline bool Argument::isBackwardCompatibleWith( + const Argument& old, + std::ostream* why_not) const { + const Argument* lhs = this; + const Argument* rhs = &old; + if (!(lhs->name() == rhs->name() + && lhs->N() == rhs->N() + && lhs->alias_info() == rhs->alias_info())) { + return false; + } + if (lhs->kwarg_only() && !rhs->kwarg_only()) { + return false; + } + if (!rhs->type()->isSubtypeOfExt(lhs->type(), why_not)) { + return false; + } + if (rhs->default_value().has_value() && + !detail::defaultValueEquals_(lhs->default_value(), + rhs->default_value())) { + return false; + } + return true; +} + +inline std::string FunctionSchema::formatTypeMismatchMsg( + const Argument& expected, + const std::string& actual_type, + c10::optional position, + c10::optional value) const { + std::string position_str; + if (position) { + position_str = c10::str("Position: ", *position, "\n"); + } + std::string value_str; + if (value) { + value_str = c10::str("Value: ", *value, "\n"); + } + return c10::str( + name(), + "() ", + expected.formatTypeMismatchMsg(actual_type), + position_str, + value_str, + "Declaration: ", + *this); +} + +inline bool FunctionSchema::isBackwardCompatibleWith( + const FunctionSchema& old, + std::ostream* why_not) const { + if (!(name() == old.name() + && overload_name() == old.overload_name() + // we are conservative on is_vararg and is_varret, + // since they are only used by internal operators + && is_vararg() == old.is_vararg() + && is_varret() == old.is_varret() + && returns().size() == old.returns().size() + && arguments().size() >= old.arguments().size())) { + return false; + } + for (size_t i = 0; i < returns().size(); ++i) { + // functions are covariant in arguments but contravariant in returns + if (!old.returns().at(i).isBackwardCompatibleWith( + returns().at(i), + why_not)) { + return false; + } + } + std::vector args, old_args; + std::map kwargs, old_kwargs; + auto split_func = [](const std::vector& arguments, + std::vector* positionals, + std::map* nameds) { + for (const Argument& arg : arguments) { + if (!arg.kwarg_only()) { + positionals->emplace_back(&arg); + } + nameds->emplace(arg.name(), &arg); + } + }; + // we split args into positional and keyward parts, + split_func(arguments(), &args, &kwargs); + split_func(old.arguments(), &old_args, &old_kwargs); + if (old_args.size() > args.size()) { + return false; + } + // make sure that all the old positional args have their corresponding + // backward compatible positional args in this schema + for (size_t i = 0; i < old_args.size(); ++i) { + if (!args.at(i)->isBackwardCompatibleWith( + *old_args.at(i), + why_not)) { + return false; + } + } + // check the extra positional args in this schema either has corresponding + // backward compatible keyward args since positional args also can be used as + // a keyward arg, or provided default values + for (size_t i = old_args.size(); i < args.size(); ++i) { + if (!args.at(i)->default_value()) { + auto it = old_kwargs.find(args.at(i)->name()); + if (it == old_kwargs.end() || + !args.at(i)->isBackwardCompatibleWith( + *it->second, + why_not)) { + return false; + } + } + } + // make sure that all the keyword args in the old schema have their + // corresponding backward compatible keyward args in this schema + for (auto& kv : old_kwargs) { + auto it = kwargs.find(kv.first); + if (it == kwargs.end() || + !it->second->isBackwardCompatibleWith( + *kv.second, + why_not)) { + return false; + } + kwargs.erase(it); + } + // check all the extra keyword args in this schema provide default values + for (auto& kv : kwargs) { + if (!kv.second->default_value()) { + return false; + } + } + + return true; +} + +inline void FunctionSchema::checkArg( + const IValue& value, + const Argument& argument, + optional pos) const { + if (!value.type()->isSubtypeOf(argument.type())) { + std::string position = pos ? ::c10::str(" in position ", *pos) : ""; + TORCH_CHECK( + false, + formatTypeMismatchMsg( + argument, value.type()->python_str(), pos)); + } +} + +inline void FunctionSchema::findErrorInKwargs(const std::vector& kwargs) const { + // First check if any of the kwargs are unknown, i.e. don't match the name of + // any argument in the schema. + for (const auto& kwarg : kwargs) { + if (!std::count_if( + arguments().begin(), + arguments().end(), + [&kwarg](const Argument& argument) { + return argument.name() == kwarg; + })) { + throw std::runtime_error(c10::str( + "Unknown keyword argument '", + kwarg, + "' for operator '", + name(), + "'. Schema: ", + *this)); + } + } + // If there are unconsumed kwargs but none of them were unknown, the first + // positional argument present in the kwargs is duplicated. + for (const auto& argument : arguments()) { + if (std::find(kwargs.begin(), kwargs.end(), argument.name()) != kwargs.end()) { + AT_ASSERT(!argument.default_value()); + throw std::runtime_error(c10::str( + "Argument '", + argument.name(), + "' specified both as positional and ", + "keyword argument. Schema: ", + *this)); + } + } +} + +inline void FunctionSchema::checkAndNormalizeInputs( + std::vector& inputs, + const std::unordered_map& kwargs) const { + // Do we have more inputs than the schema accepts? + TORCH_CHECK( + inputs.size() <= arguments().size(), + "Expected at most ", + arguments().size(), + " argument(s) for operator '", + name(), + "', but received ", + inputs.size(), + " argument(s). Declaration: ", + *this); + + size_t consumed_kwargs = 0; + for (size_t pos = 0; pos < arguments().size(); ++pos) { + const auto& argument = arguments()[pos]; + if (pos < inputs.size()) { + checkArg(inputs[pos], argument, pos); + continue; + } + auto it = kwargs.find(argument.name()); + if (it != kwargs.end()) { + checkArg(it->second, argument, nullopt); + inputs.push_back(it->second); + consumed_kwargs++; + continue; + } + if (argument.default_value()) { + inputs.push_back(*argument.default_value()); + continue; + } + AT_ERROR( + name(), + "() is missing value for argument '", + argument.name(), + "'. Declaration: ", + *this); + } + if (consumed_kwargs != kwargs.size()) { + std::vector names; + for(const auto& k : kwargs) { + names.emplace_back(k.first); + } + findErrorInKwargs(names); + } +} + +inline FunctionSchema FunctionSchema::cloneWithRemappedTypes( + const std::function type_map) const { + auto update_args = [&](const std::vector& args) { + std::vector new_args; + new_args.reserve(args.size()); + for(const Argument& arg : args) { + new_args.emplace_back(arg.cloneWithType(type_map(arg.type()))); + } + return new_args; + }; + return FunctionSchema( + name(), + overload_name(), + update_args(arguments()), + update_args(returns()), + is_vararg(), + is_varret()); +} + +// covariant subtyping of list of Arguments +inline bool isSubtypeOfList( + ArrayRef child, + ArrayRef parent, + std::ostream* why_not) { + if (child.size() != parent.size()) { + return false; + } + for (size_t i = 0; i < child.size(); ++i) { + const Argument& c = child[i]; + const Argument& p = parent[i]; + if (c.name() != p.name()) { + return false; + } + if (!c.type()->isSubtypeOfExt(p.type(), why_not)) { + return false; + } + } + return true; +} + +inline bool FunctionSchema::isSubtypeOf( + const FunctionSchema& rhs, + bool as_method, + std::ostream* why_not) const { + size_t start = as_method ? 1 : 0; + // functions are covariant in arguments but contravariant in returns + return isSubtypeOfList( + ArrayRef(arguments()).slice(start), + ArrayRef(rhs.arguments()).slice(start), + why_not) && + isSubtypeOfList(rhs.returns(), returns(), why_not); +} + +} // namespace c10 diff --git a/thirdparty/libtorch/include/ATen/core/functional.h b/thirdparty/libtorch/include/ATen/core/functional.h new file mode 100644 index 0000000000..6b4f3447f5 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/functional.h @@ -0,0 +1,54 @@ +#pragma once + +#include +#include + +namespace c10 { + +// The passed in function must take T by value (T), or by +// const reference (const T&); taking T by non-const reference +// will result in an error like: +// +// error: no type named 'type' in 'class std::result_of' +// +// No explicit template parameters are required. + +// Overload for explicit function and ArrayRef +template +inline auto fmap(const T& inputs, const F& fn) -> std::vector { + std::vector r; + r.reserve(inputs.size()); + for(const auto & input : inputs) + r.push_back(fn(input)); + return r; +} + +// C++ forbids taking an address of a constructor, so here's a workaround... +// Overload for constructor (R) application +template +inline std::vector fmap(const T& inputs) { + std::vector r; + r.reserve(inputs.size()); + for(auto & input : inputs) + r.push_back(R(input)); + return r; +} + +template +inline std::vector filter(at::ArrayRef inputs, const F& fn) { + std::vector r; + r.reserve(inputs.size()); + for(auto & input : inputs) { + if (fn(input)) { + r.push_back(input); + } + } + return r; +} + +template +inline std::vector filter(const std::vector& inputs, const F& fn) { + return filter(static_cast>(inputs), fn); +} + +} // namespace c10 diff --git a/thirdparty/libtorch/include/ATen/core/grad_mode.h b/thirdparty/libtorch/include/ATen/core/grad_mode.h new file mode 100644 index 0000000000..acd5fd09e5 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/grad_mode.h @@ -0,0 +1,30 @@ +#pragma once + +#include + +namespace at { + +struct CAFFE2_API GradMode { + static bool is_enabled(); + static void set_enabled(bool enabled); +}; + +// A RAII, thread local (!) guard that enables or disables grad mode upon +// construction, and sets it back to the original value upon destruction. +struct CAFFE2_API AutoGradMode { + AutoGradMode(bool enabled) : prev_mode(GradMode::is_enabled()) { + GradMode::set_enabled(enabled); + } + ~AutoGradMode() { + GradMode::set_enabled(prev_mode); + } + bool prev_mode; +}; + +// A RAII, thread local (!) guard that stops future operations from building +// gradients. +struct CAFFE2_API NoGradGuard : public AutoGradMode { + NoGradGuard() : AutoGradMode(/*enabled=*/false) {} +}; + +} diff --git a/thirdparty/libtorch/include/ATen/core/interned_strings.h b/thirdparty/libtorch/include/ATen/core/interned_strings.h new file mode 100644 index 0000000000..4426136ee2 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/interned_strings.h @@ -0,0 +1,403 @@ +#pragma once +#include +#include +#include +#include +#include + +#include + +#if !defined(C10_MOBILE) || defined(FEATURE_TORCH_MOBILE) +#include +#endif + +namespace c10 { + +#if !defined(C10_MOBILE) || defined(FEATURE_TORCH_MOBILE) +#define FORALL_NS_SYMBOLS(_) \ + _(namespaces, prim) \ + _(namespaces, aten) \ + _(namespaces, onnx) \ + _(namespaces, attr) \ + _(namespaces, scope) \ + _(namespaces, user) \ + _(namespaces, _caffe2) \ + _(namespaces, dimname) \ + _(namespaces, namespaces) \ + _(prim, Assign) \ + _(prim, BroadcastingChunk) \ + _(prim, BroadcastSizes) \ + _(prim, Constant) \ + _(prim, ChunkSizes) \ + _(prim, Drop) \ + _(prim, Eval) \ + _(prim, Expand) /* onnx */ \ + _(prim, FusionGroup) \ + _(prim, DifferentiableGraph) \ + _(prim, If) \ + _(prim, Jump) /* debug */ \ + _(prim, JumpNZ) /* debug */ \ + _(prim, JumpZ) /* debug */ \ + _(prim, Load) \ + _(prim, Loop) \ + _(prim, Param) \ + _(prim, PackPadded) /* onnx */ \ + _(prim, PadPacked) /* onnx */ \ + _(prim, Placeholder) /* debug */ \ + _(prim, Print) \ + _(prim, PythonOp) \ + _(prim, IgnoredPythonOp) \ + _(prim, Reverse) \ + _(prim, Return) \ + _(prim, ReturnStmt) \ + _(prim, BreakStmt) \ + _(prim, ContinueStmt) \ + _(prim, Store) \ + _(prim, AutogradZero) \ + _(prim, AutogradAnyNonZero) \ + _(prim, Starred) \ + _(prim, TupleConstruct) \ + _(prim, TupleUnpack) \ + _(prim, TupleIndex) \ + _(prim, TupleSlice) \ + _(prim, ListConstruct) \ + _(prim, ListUnpack) \ + _(prim, DictConstruct) \ + _(prim, StringIndex) \ + _(prim, NumToTensor) \ + _(prim, Uninitialized) \ + _(prim, ImplicitTensorToNum) \ + _(aten, Bool) \ + _(aten, Int) \ + _(aten, Float) \ + _(aten, str) \ + _(prim, device) \ + _(prim, dtype) \ + _(prim, shape) \ + _(prim, requires_grad) \ + _(prim, AutogradAdd) \ + _(prim, GradOf) \ + _(aten, grad) \ + _(aten, backward) \ + _(prim, Guard) \ + _(prim, BailOut) \ + _(prim, FusedConcat) \ + _(prim, ConstantChunk) \ + _(prim, MMTreeReduce) \ + _(prim, MMBatchSide) \ + _(prim, min) \ + _(prim, max) \ + _(prim, abs) \ + _(aten, divmod) \ + _(prim, zip) \ + _(prim, enumerate) \ + _(prim, range) \ + _(prim, rangelist) \ + _(prim, isinstance) \ + _(prim, unchecked_cast) \ + _(aten, _grad_sum_to_size) \ + _(aten, _size_if_not_equal) \ + _(aten, _ncf_unsqueeze) \ + _(aten, warn) \ + _(aten, sorted) \ + _(aten, floordiv) \ + _(aten, __range_length) \ + _(aten, __derive_index) \ + _(aten, __round_to_zero_floordiv) \ + _(aten, _unwrap_optional) \ + _(prim, fork) \ + _(prim, forkClosure) \ + _(prim, RaiseException) \ + _(prim, Function) \ + _(prim, CreateObject) \ + _(prim, SetAttr) \ + _(prim, GetAttr) \ + _(prim, HasAttr) \ + _(prim, profile) \ + _(prim, AddStatValue) \ + _(prim, TimePoint) \ + _(prim, CallFunction) \ + _(prim, CallMethod) \ + _(prim, LoopContinuation) \ + _(prim, annotate) \ + _(prim, TracedModuleForward) \ + _(prim, TracedFork) \ + _(prim, TracedAttr) \ + _(aten, append) \ + _(aten, item) \ + _(aten, format) \ + _(aten, __not__) \ + _(aten, __is__) \ + _(aten, __isnot__) \ + _(aten, copy) \ + _(aten, copy_) \ + _(aten, t_) \ + _(aten, addbmm_) \ + _(aten, addcdiv_) \ + _(aten, addcmul_) \ + _(aten, addmv_) \ + _(aten, addr_) \ + _(aten, baddbmm_) \ + _(aten, ge_) \ + _(aten, gt_) \ + _(aten, le_) \ + _(aten, lerp_) \ + _(aten, lt_) \ + _(aten, ne_) \ + _(aten, transpose_) \ + _(aten, unsqueeze_) \ + _(aten, __getitem__) \ + _(aten, _set_item) \ + _(aten, manual_seed) \ + _(aten, set_) \ + _(aten, index_put_) \ + _(aten, device) \ + _(aten, hash) \ + _(aten, len) \ + _(aten, list) \ + _(aten, wait) \ + _(aten, save) \ + _(aten, keys) \ + _(aten, ord) \ + _(aten, chr) \ + _(aten, hex) \ + _(aten, oct) \ + _(aten, clear) \ + _(aten, setdefault) \ + _(aten, bin) \ + _(prim, unchecked_unwrap_optional) \ + _(aten, __contains__) \ + _(prim, BailoutTemplate) \ + FORALL_ATEN_BASE_SYMBOLS(_) \ + _(onnx, Add) \ + _(onnx, Concat) \ + _(onnx, Constant) \ + _(onnx, ConstantFill) \ + _(onnx, Div) \ + _(onnx, GRU) \ + _(onnx, Gather) \ + _(onnx, Gemm) \ + _(onnx, LSTM) \ + _(onnx, Mul) \ + _(onnx, Pow) \ + _(onnx, RNN) \ + _(onnx, Shape) \ + _(onnx, Size) \ + _(onnx, Slice) \ + _(onnx, Squeeze) \ + _(onnx, Sub) \ + _(onnx, Transpose) \ + _(onnx, Unsqueeze) \ + _(onnx, Loop) \ + _(onnx, If) \ + _(onnx, Reshape) \ + _(onnx, Expand) \ + _(onnx, Equal) \ + _(onnx, Greater) \ + _(onnx, Less) \ + _(onnx, Not) \ + _(onnx, ATen) \ + _(onnx, Split) \ + _(onnx, ConstantOfShape) \ + _(onnx, Cast) \ + _(onnx, Mod) \ + FORALL_ATTR_BASE_SYMBOLS(_) \ + _(attr, Subgraph) \ + _(attr, ReverseSubgraph) \ + _(attr, f_real_outputs) \ + _(attr, df_input_vjps) \ + _(attr, df_input_captured_inputs) \ + _(attr, df_input_captured_outputs) \ + _(attr, df_output_vjps) \ + _(attr, axes) \ + _(attr, axis) \ + _(attr, broadcast) \ + _(attr, direction) \ + _(attr, ends) \ + _(attr, inplace) \ + _(attr, input_as_shape) \ + _(attr, is_zero) \ + _(attr, perm) \ + _(attr, sizes) \ + _(attr, starts) \ + _(attr, transA) \ + _(attr, transB) \ + _(attr, name) \ + _(attr, a) \ + _(attr, b) \ + _(attr, beg) \ + _(attr, idx) \ + _(attr, split) \ + _(attr, slot) \ + _(attr, kinds) \ + _(attr, types) \ + _(attr, scope) +#else +#define FORALL_NS_SYMBOLS(_) \ + _(namespaces, prim) \ + _(namespaces, aten) \ + _(namespaces, onnx) \ + _(namespaces, attr) \ + _(namespaces, scope) \ + _(namespaces, user) \ + _(namespaces, _caffe2) \ + _(namespaces, dimname) \ + _(namespaces, namespaces) +#endif + +// 'prim' symbols are synthetic operators that occur only in the IR +// and don't have corresponding implementations in ATen. + +// 'onnx' symbols correspond to ONNX operators. Their semantics +// are defined in https://github.com/onnx/onnx/blob/master/docs/Operators.md +// The particular version we are targeting is specified by '_onnx_opset_version' +// in torch.onnx.symbolic_helper +// +// In general, most ONNX operators won't get an entry here, because they +// are handled from the Python end. However, you may occasionally need +// to intern an ONNX symbol here so that you can conveniently write an +// optimization on ONNX operations. + +// 'attr' symbols are attribute keys. They are shared between both ONNX and ATen +// operators (you disambiguate their meaning by looking at the operator itself). +// In general, you only need to define attribute keys that are used by +// onnx or prim; ATen attributes are automatically generated in FORALL_ATTR_BASE_SYMBOLS. + +// Note [Symbol allocation] +// ~~~~~~~~~~~~~~~~~~~~~~~~ +// +// 1. Symbol namespace is split up into namespaces. +// +// 2. The intended access pattern for built-in symbols is onnx::MatMul +// in the c10 namespace (this is a Symbol). +// + +// Built-in constant definition strategy: +// - Enum is the most convenient way to generate a contiguous sequence +// of numbers for an identifier. +// - However, an enum gives you a fresh type. We want onnx::MatMul to +// be type Symbol, not some random enum type! +// - Therefore, after using enums to generate the sequence of integers, +// we then declare constexpr Symbols to get everything the actual Symbol +// type we want. Symbols must be constexpr to be valid to be "case"ed on. + +using unique_t = uint32_t; + +const std::string& domain_prefix(); + +// A Symbol is like an interned string, but with a little extra +// structure; it is namespaced via SymbolNamespace and the resulting +// intern pointers support efficient namespace testing. +struct CAFFE2_API Symbol { + explicit constexpr Symbol() : value(0) {}; + explicit constexpr Symbol(unique_t uniq) + : value(uniq) {} + + // Get a Symbol for a qualified string like "attr::bar" + static Symbol fromQualString(const std::string & s); + + // Get a Symbol from a domain and an unqualified string like "org.pytorch.attr" and "bar" + static Symbol fromDomainAndUnqualString(const std::string & d, const std::string & s); + + // Constructors for our various namespaced strings. This will construct + // the appropriate namespaced string, e.g., "attr::foo" for the + // argument "foo", and then attempt to intern it. DO NOT USE THIS + // with a string literal; attr::foo should be available in that case + // (and if it's not, you should add it to the built-ins list above.) + static Symbol attr(const std::string & s); + static Symbol aten(const std::string & s); + static Symbol onnx(const std::string & s); + static Symbol prim(const std::string & s); + static Symbol user(const std::string & s); + static Symbol caffe2(const std::string & s); + static Symbol dimname(const std::string & s); + // TODO: eliminate me + static Symbol scope(const std::string & s); + + bool is_attr() const; + bool is_aten() const; + bool is_prim() const; + bool is_onnx() const; + bool is_user() const; + bool is_caffe2() const; + bool is_dimname() const; + + // So we can switch on this + constexpr operator unique_t() const { + return value; + } + + Symbol ns() const; + + // Give a string corresponding to the unqualified version of this name, e.g., + // "mm". Use this in a context where the intended namespace of the string is + // obvious; this is a *lossy* conversion. + const char * toUnqualString() const; + + // Give a string corresponding to the qualified version of this name, + // e.g., "aten::mm". This string format is made available to Python bindings + // (so we know how to parse it.) + const char * toQualString() const; + + // This describes a symbol in a case where humans read it. At the moment it's + // the same as toQualString. This has to be a const char* returned because + // a lot of printf style macros use it. + const char * toDisplayString() const; + + // Give a string corresponding to the domain name for the symbol, + // e.g., "org.pytorch.aten". + std::string domainString() const; + +private: + explicit Symbol(Symbol ns, const std::string & s); + unique_t value; +}; + +static inline bool operator==(Symbol lhs, Symbol rhs) { + return static_cast(lhs) == static_cast(rhs); +} + +enum class _keys : unique_t { + #define DEFINE_KEY(ns, s) ns##_##s, + FORALL_NS_SYMBOLS(DEFINE_KEY) + #undef DEFINE_KEY + num_symbols +}; + +#define DEFINE_SYMBOL(s) \ + constexpr Symbol s(static_cast(_keys::s)); + +#undef DEFINE_SYMBOL + +#define DEFINE_SYMBOL(ns, s) \ + namespace ns { constexpr Symbol s(static_cast(_keys::ns##_##s)); } +FORALL_NS_SYMBOLS(DEFINE_SYMBOL) +#undef DEFINE_SYMBOL + +inline Symbol Symbol::attr(const std::string & s) { return Symbol::fromQualString("attr::" + s); } +inline Symbol Symbol::aten(const std::string & s) { return Symbol::fromQualString("aten::" + s); } +inline Symbol Symbol::onnx(const std::string & s) { return Symbol::fromQualString("onnx::" + s); } +inline Symbol Symbol::prim(const std::string & s) { return Symbol::fromQualString("prim::" + s); } +inline Symbol Symbol::scope(const std::string & s) { return Symbol::fromQualString("scope::" + s); } +inline Symbol Symbol::user(const std::string & s) { return Symbol::fromQualString("user::" + s); } +inline Symbol Symbol::caffe2(const std::string & s) { return Symbol::fromQualString("_caffe2::" + s); } +inline Symbol Symbol::dimname(const std::string & s) { return Symbol::fromQualString("dimname::" + s); } +inline bool Symbol::is_attr() const { return ns() == namespaces::attr; } +inline bool Symbol::is_aten() const { return ns() == namespaces::aten; } +inline bool Symbol::is_prim() const { return ns() == namespaces::prim; } +inline bool Symbol::is_onnx() const { return ns() == namespaces::onnx; } +inline bool Symbol::is_user() const { return ns() == namespaces::user; } +inline bool Symbol::is_caffe2() const { return ns() == namespaces::_caffe2; } +inline bool Symbol::is_dimname() const { return ns() == namespaces::dimname; } + +} // namespace c10 + +// make symbol behave like an integer in hash tables +namespace std { +template <> +struct hash { + size_t operator()(c10::Symbol s) const { + return std::hash()(static_cast(s)); + } +}; +} diff --git a/thirdparty/libtorch/include/ATen/core/interned_strings_class.h b/thirdparty/libtorch/include/ATen/core/interned_strings_class.h new file mode 100644 index 0000000000..b13e3f18eb --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/interned_strings_class.h @@ -0,0 +1,36 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace c10 { + +struct CAFFE2_API InternedStrings { + InternedStrings(); + Symbol symbol(const std::string& s); + std::pair string(Symbol sym); + Symbol ns(Symbol sym); + + private: + // prereq - holding mutex_ + Symbol _symbol(const std::string& s); + std::pair customString(Symbol sym); + std::unordered_map string_to_sym_; + + struct SymbolInfo { + Symbol ns; + std::string qual_name; + std::string unqual_name; + }; + std::vector sym_to_info_; + + std::mutex mutex_; +}; + +} // namespace c10 diff --git a/thirdparty/libtorch/include/ATen/core/ivalue.h b/thirdparty/libtorch/include/ATen/core/ivalue.h new file mode 100644 index 0000000000..9faa9a7629 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/ivalue.h @@ -0,0 +1,651 @@ +#pragma once + +#include +#include +#include +#include +#include + +namespace torch { +namespace jit { +class CustomClassHolder : public c10::intrusive_ptr_target {}; +struct Function; +namespace script { +struct CompilationUnit; +struct Module; +} +} // namespace jit +} // namespace torch +namespace c10 { +template class Dict; +template class List; +struct IValue; +struct ClassType; +struct Type; +using TypePtr = std::shared_ptr; +namespace ivalue { +struct Tuple; +struct Future; +struct ConstantString; +struct GenericDict; +struct Object; +} + +// IValue is the generic tagged union used by the interpreter to hold +// all value types. +// It is a 16-byte object with an 8-byte payload and an 8-byte tag. +// The tag is currently 4 bytes to determine the type, and 1 byte +// to mark whether that type is a subtype of c10::intrusive_ptr_target and needs +// retain/release calls. + +#define TORCH_FORALL_TAGS(_) \ + _(None) \ + _(Tensor) \ + _(Double) \ + _(Int) \ + _(Bool) \ + _(Tuple) \ + _(IntList) \ + _(DoubleList) \ + _(BoolList) \ + _(String) \ + _(TensorList) \ + _(Blob) \ + _(GenericList) \ + _(GenericDict) \ + _(Future) \ + _(Device) \ + _(Object) \ + _(Uninitialized) \ + _(Capsule) + +struct CAFFE2_API IValue final { + IValue() : payload{0}, tag(Tag::None), is_intrusive_ptr(false) {} + IValue(const IValue& rhs) + : IValue(rhs.payload, rhs.tag, rhs.is_intrusive_ptr) { + if (is_intrusive_ptr) { + c10::raw::intrusive_ptr::incref(payload.as_intrusive_ptr); + } + } + IValue(IValue&& rhs) noexcept : IValue() { + swap(rhs); + } + ~IValue() { + if (is_intrusive_ptr) { + c10::raw::intrusive_ptr::decref(payload.as_intrusive_ptr); + } + } + IValue & operator=(IValue && rhs) & noexcept { + IValue(std::move(rhs)).swap(*this); // this also sets rhs to None + return *this; + } + IValue & operator=(IValue const & rhs) & { + IValue(rhs).swap(*this); + return *this; + } + + void dump() const; + + bool isAliasOf(const IValue& rhs) const { + if (this->tag != rhs.tag) { + // Trivially don't alias if the type is different + return false; + } + + if (!this->is_intrusive_ptr) { + // Primitive types don't alias anything + return false; + } + + AT_ASSERT(rhs.is_intrusive_ptr); + + // Tensors should be compared based on internal storage + if (this->isTensor()) { + const auto thisTensor = this->toTensor(); + const auto rhsTensor = rhs.toTensor(); + return thisTensor.is_alias_of(rhsTensor); + } + + // Other types can be compared by their ptr value + return this->payload.as_intrusive_ptr == rhs.payload.as_intrusive_ptr; + } + + size_t use_count() const noexcept { + if (!is_intrusive_ptr) { + return 1; + } + + return c10::raw::intrusive_ptr::use_count(payload.as_intrusive_ptr); + } + + void swap(IValue & rhs) noexcept { + std::swap(payload, rhs.payload); + std::swap(is_intrusive_ptr, rhs.is_intrusive_ptr); + std::swap(tag, rhs.tag); + } + + // Accessors for subtypes are arranged together below + // While some of these accessors could be generated through templates, + // we prefer to write them manually for clarity + + // Tensor + IValue(at::Tensor t) + : tag(Tag::Tensor), is_intrusive_ptr(t.defined()) { + // Note: the undefined tensor is not refcounted, so while it + // is tagged as a tensor, is_intrusive_ptr is set to false. + // This is not an optional optimization: our incref call + // *will not* do the right thing when called on an + // undefined tensor. + payload.as_intrusive_ptr = t.unsafeReleaseTensorImpl(); + } + bool isTensor() const { return Tag::Tensor == tag; } + at::Tensor toTensor() &&; + at::Tensor toTensor() const &; + at::TensorImpl* unsafeToTensorImpl() const { + return static_cast(payload.as_intrusive_ptr); + } + + const IValue& toIValue() const { + return *this; + } + IValue& toIValue() { + return *this; + } + + IValue(intrusive_ptr blob) + : tag(Tag::Blob), is_intrusive_ptr(true) { + // TODO (after Tensor merge) If we pass in a Blob holding a Tensor, extract + // and store it as a Tensor instead. + payload.as_intrusive_ptr = blob.release(); + } + bool isBlob() const { + return Tag::Blob == tag; + } + c10::intrusive_ptr toBlob() &&; + c10::intrusive_ptr toBlob() const &; + + // Capsule + IValue(intrusive_ptr blob); + bool isCapsule() const { + return Tag::Capsule == tag; + } + c10::intrusive_ptr toCapsule() &&; + c10::intrusive_ptr toCapsule() const &; + + // Tuple + IValue(c10::intrusive_ptr v); + + template < + typename... Args, + c10::guts::enable_if_t< + !c10::guts::disjunction< + std::is_lvalue_reference..., + c10::guts::negation>...>:: + value, + std::nullptr_t> = nullptr> + IValue(const std::tuple& t); + bool isTuple() const { return Tag::Tuple == tag; } + c10::intrusive_ptr toTuple() &&; + c10::intrusive_ptr toTuple() const &; + + // Double + IValue(double d) + : tag(Tag::Double), is_intrusive_ptr(false) { + payload.as_double = d; + } + bool isDouble() const { return Tag::Double == tag; } + double toDouble() const { + AT_ASSERT(isDouble()); + return payload.as_double; + } + + // Future + IValue(c10::intrusive_ptr v); + bool isFuture() const { return Tag::Future == tag; } + c10::intrusive_ptr toFuture() &&; + c10::intrusive_ptr toFuture() const &; + + // Int + IValue(int64_t i) + : tag(Tag::Int), is_intrusive_ptr(false) { + payload.as_int = i; + } + + // allow you to pass literals (3, 4) without ambiguity + IValue(int32_t i) + : IValue(static_cast(i)) {} + + bool isInt() const { return Tag::Int == tag; } + + int64_t toInt() const { + AT_ASSERT(isInt()); + return payload.as_int; + } + + // Bool + IValue(bool b) + : tag(Tag::Bool), is_intrusive_ptr(false) { + payload.as_bool = b; + } + bool isBool() const { return Tag::Bool == tag; } + bool toBool() const { + AT_ASSERT(isBool()); + return payload.as_bool; + } + + // IntList + IValue(c10::List v); + IValue(c10::ArrayRef v); + /// \cond DOXYGEN_CANNOT_HANDLE_CONSTRUCTORS_WITH_MACROS_SO_EXCLUDE_THIS_LINE_FROM_DOXYGEN + C10_DEPRECATED_MESSAGE("IValues based on std::vector are potentially slow and deprecated. Please use c10::List instead.") + /// \endcond + IValue(std::vector v); + bool isIntList() const { return Tag::IntList == tag; } + c10::List toIntList() &&; + c10::List toIntList() const &; + c10::ArrayRef toIntListRef() const; + + // ConstantString + IValue(c10::intrusive_ptr v); + IValue(std::string v); + IValue(const char* v): IValue(std::string(v)) {} + bool isString() const { return Tag::String == tag; } + c10::intrusive_ptr toString() &&; + c10::intrusive_ptr toString() const &; + const std::string& toStringRef() const; + + // DoubleList + IValue(c10::List v); + /// \cond DOXYGEN_CANNOT_HANDLE_CONSTRUCTORS_WITH_MACROS_SO_EXCLUDE_THIS_LINE_FROM_DOXYGEN + C10_DEPRECATED_MESSAGE("IValues based on std::vector are potentially slow and deprecated. Please use c10::List instead.") + /// \endcond + IValue(std::vector v); + bool isDoubleList() const { return Tag::DoubleList == tag; } + c10::List toDoubleList() &&; + c10::List toDoubleList() const &; + c10::ArrayRef toDoubleListRef() const; + + // BoolList + IValue(c10::List v); + /// \cond DOXYGEN_CANNOT_HANDLE_CONSTRUCTORS_WITH_MACROS_SO_EXCLUDE_THIS_LINE_FROM_DOXYGEN + C10_DEPRECATED_MESSAGE("IValues based on std::vector are potentially slow and deprecated. Please use c10::List instead.") + /// \endcond + IValue(std::vector v); + bool isBoolList() const { return Tag::BoolList == tag; } + c10::List toBoolList() &&; + c10::List toBoolList() const &; + + //TensorList + IValue(c10::List v); + /// \cond DOXYGEN_CANNOT_HANDLE_CONSTRUCTORS_WITH_MACROS_SO_EXCLUDE_THIS_LINE_FROM_DOXYGEN + C10_DEPRECATED_MESSAGE("IValues based on std::vector are potentially slow and deprecated. Please use c10::List instead.") + /// \endcond + IValue(std::vector v); + bool isTensorList() const { return Tag::TensorList == tag; } + c10::List toTensorList() &&; + c10::List toTensorList() const &; + c10::ArrayRef toTensorListRef() const; + + //GenericList + IValue(c10::List v); + bool isGenericList() const { return Tag::GenericList == tag; } + c10::List toGenericList() &&; + c10::List toGenericList() const &; + c10::ArrayRef toGenericListRef() const; + + template + IValue(c10::List v); + template + /// \cond DOXYGEN_CANNOT_HANDLE_CONSTRUCTORS_WITH_MACROS_SO_EXCLUDE_THIS_LINE_FROM_DOXYGEN + C10_DEPRECATED_MESSAGE("IValues based on std::vector are potentially slow and deprecated. Please use c10::List instead.") + /// \endcond + IValue(std::vector v); + + // GenericDict + IValue(c10::Dict v); + bool isGenericDict() const { return Tag::GenericDict == tag; } + c10::Dict toGenericDict() &&; + c10::Dict toGenericDict() const &; + + template + IValue(c10::Dict v); + + template + /// \cond DOXYGEN_CANNOT_HANDLE_CONSTRUCTORS_WITH_MACROS_SO_EXCLUDE_THIS_LINE_FROM_DOXYGEN + C10_DEPRECATED_MESSAGE("IValues based on std::unordered_map are slow and deprecated. Please use c10::Dict instead.") + /// \endcond + IValue(std::unordered_map v); + + template + IValue(c10::optional v); + IValue(c10::nullopt_t); + + // ClassType + IValue(c10::intrusive_ptr v); + bool isObject() const { return tag == Tag::Object; } + c10::intrusive_ptr toObject() &&; + c10::intrusive_ptr toObject() const & ; + const ivalue::Object& toObjectRef() const; + + torch::jit::script::Module toModule() const; + bool isModule() const; + + // None + bool isNone() const { + return Tag::None == tag; + } + std::string toNone() const { + AT_ASSERT(isNone()); + return "None"; + } + + static IValue uninitialized() { + auto i = IValue(); + i.tag = Tag::Uninitialized; + return i; + } + + // Scalar, which gets encoded as either an Int or a Double + IValue(at::Scalar s) + : IValue() { + if(s.isFloatingPoint()) { + *this = s.toDouble(); + } else { + *this = s.toLong(); + } + } + bool isScalar() const { + return isDouble() || isInt(); + } + at::Scalar toScalar() const { + if(isDouble()) + return toDouble(); + else if(isInt()) + return toInt(); + throw std::runtime_error("IValue is not a Scalar"); + } + + // Device + IValue(c10::Device d) + : tag(Tag::Device), is_intrusive_ptr(false) { + payload.as_device.type = d.type(); + payload.as_device.index = d.index(); + } + bool isDevice() const { return Tag::Device == tag; } + c10::Device toDevice() const { + AT_ASSERT(isDevice()); + return c10::Device(payload.as_device.type, payload.as_device.index); + } + + // ScalarType + IValue(ScalarType t) + : IValue(static_cast::type>(t)) {} + at::ScalarType toScalarType() const { + return static_cast(toInt()); + } + + // Layout + IValue(Layout l) + : IValue(static_cast::type>(l)) {} + at::Layout toLayout() const { + return static_cast(toInt()); + } + + // MemoryFormat + IValue(MemoryFormat m) + : IValue(static_cast::type>(m)) {} + at::MemoryFormat toMemoryFormat() const { + return static_cast(toInt()); + } + + // QScheme + IValue(at::QScheme qscheme) + : tag(Tag::Int), is_intrusive_ptr(false) { + payload.as_int = static_cast(qscheme); + } + + at::QScheme toQScheme() const { + return static_cast(toInt()); + } + + + // for debugging + std::string tagKind() const { + switch(tag) { + #define DEFINE_CASE(x) case Tag::x: return #x; + TORCH_FORALL_TAGS(DEFINE_CASE) + #undef DEFINE_CASE + } + return "InvalidTag(" + c10::guts::to_string(static_cast(tag)) + ")"; + } + + // generic v.to() implementations + // that can be used in special functions like pop/push + // that use template meta-programming. + // prefer the directly named methods when you can, + // since they are simpler to understand + + // Note: if you get linker errors saying one of these is missing, + // change it to ... && = delete; and you will see better error messages for why + // However, we cannot commit this because some compiler versions barf on it. + template + T to() &&; + template + T to() const &; + + // ToOptional: convert a IValue to the Optional obj that accepts both T and None + template + optional toOptional(); + + // this is a shallow comparison of two IValues to test the object identity + bool isSameIdentity(const IValue& rhs) const; + + CAFFE2_API friend std::ostream& operator<<( + std::ostream& out, + const IValue& v); + + bool isPtrType() const { + return is_intrusive_ptr; + } + + const void* internalToPointer() const { + TORCH_INTERNAL_ASSERT(isPtrType(), "Can only call internalToPointer() for pointer types"); + return payload.as_intrusive_ptr; + } + + TypePtr type() const; + + private: + // NOTE: IValue tags are intentionally private. In the future we may encode + // this value different (e.g. using NaN boxing), and this would make it more + // costly to determine the tag for all types vs just determining if something + // is a particular type. Instead we want clients to use the `isX` methods when + // possible. If for perf. reasons you really, absolutely, must have a jump + // table, then we can revisit this. + enum class Tag : uint32_t { +#define DEFINE_TAG(x) x, + TORCH_FORALL_TAGS(DEFINE_TAG) +#undef DEFINE_TAG + }; + + template> + c10::intrusive_ptr moveToIntrusivePtr(); + template> + c10::intrusive_ptr toIntrusivePtr() const; + + void clearToNone() { + payload.as_int = 0; + tag = Tag::None; + is_intrusive_ptr = false; + } + + union Payload { + int64_t as_int; + double as_double; + bool as_bool; + c10::intrusive_ptr_target* as_intrusive_ptr; + struct { + DeviceType type; + DeviceIndex index; + } as_device; + }; + + IValue(Payload p, Tag t, bool i) + : payload(p), tag(t), is_intrusive_ptr(i) {} + + Payload payload; + Tag tag; + bool is_intrusive_ptr; + friend struct WeakIValue; +}; + +struct CAFFE2_API WeakIValue final { + WeakIValue() + : payload{0} + , tag(IValue::Tag::None) + , is_intrusive_ptr(false) {} + + WeakIValue(const WeakIValue& rhs) + : payload(rhs.payload), + tag(rhs.tag), + is_intrusive_ptr(rhs.is_intrusive_ptr) { + if (is_intrusive_ptr) { + c10::raw::weak_intrusive_ptr::incref(payload.as_intrusive_ptr); + } + } + WeakIValue(const IValue& rhs) + : payload(rhs.payload), + tag(rhs.tag), + is_intrusive_ptr(rhs.is_intrusive_ptr) { + if (is_intrusive_ptr) { + c10::raw::weak_intrusive_ptr::incref(payload.as_intrusive_ptr); + } + } + WeakIValue(WeakIValue&& rhs) noexcept : WeakIValue() { + swap(rhs); + } + ~WeakIValue() { + if (is_intrusive_ptr) { + c10::raw::weak_intrusive_ptr::decref(payload.as_intrusive_ptr); + } + } + WeakIValue & operator=(WeakIValue && rhs) & noexcept { + WeakIValue(std::move(rhs)).swap(*this); // this also sets rhs to None + return *this; + } + WeakIValue & operator=(WeakIValue const & rhs) & { + WeakIValue(rhs).swap(*this); + return *this; + } + void swap(WeakIValue & rhs) noexcept { + std::swap(payload, rhs.payload); + std::swap(is_intrusive_ptr, rhs.is_intrusive_ptr); + std::swap(tag, rhs.tag); + } + + bool isSameIdentity(const WeakIValue& rhs) const { + return payload.as_int == rhs.payload.as_int && tag == rhs.tag && + is_intrusive_ptr == rhs.is_intrusive_ptr; + } + + IValue lock() const { + if (!is_intrusive_ptr) { + return IValue(payload, tag, false); + } + auto temp = c10::weak_intrusive_ptr::reclaim( + payload.as_intrusive_ptr); + IValue::Payload pl; + pl.as_intrusive_ptr = temp.lock().release(); + temp.release(); + if (!pl.as_intrusive_ptr) { + return IValue(); + } else { + return IValue(pl, tag, true); + } + } + + size_t use_count() const noexcept { + if (!is_intrusive_ptr) { + return 1; + } + auto temp = c10::weak_intrusive_ptr::reclaim( + payload.as_intrusive_ptr); + size_t result = temp.use_count(); + temp.release(); + return result; + } + + size_t weak_use_count() const noexcept { + if (!is_intrusive_ptr) { + return 1; + } + auto temp = c10::weak_intrusive_ptr::reclaim( + payload.as_intrusive_ptr); + size_t result = temp.weak_use_count(); + temp.release(); + return result; + } + size_t hash() const { + return payload.as_int; + } + +private: + IValue::Payload payload; + IValue::Tag tag; + bool is_intrusive_ptr; +}; + +// An owning pointer to a Class. Just a pair of shared_ptrs to the class type +// and its owning CU, so that the class type is guaranteed to stay alive as long +// as we hold this object. +struct StrongTypePtr { + StrongTypePtr( + std::shared_ptr cu, + std::shared_ptr type) + : cu_(std::move(cu)), type_(type) { + TORCH_INTERNAL_ASSERT(cu_); + TORCH_INTERNAL_ASSERT(type_); + } + std::shared_ptr cu_; + std::shared_ptr type_; +}; + +TORCH_API std::unordered_map& getCustomClassTypeMap(); + +#ifndef C10_MOBILE + +template +c10::StrongTypePtr getCustomClassType() { + auto tmap = c10::getCustomClassTypeMap(); + auto res = tmap.find(typeid(T).name()); + if (res == tmap.end()) { + throw c10::Error("Can't find class id in custom class type map", ""); + } + return res->second; +} + +template +inline bool isCustomClassRegistered() { + auto tmap = c10::getCustomClassTypeMap(); + return tmap.find(typeid(T).name()) != tmap.end(); +} + +#else // C10_MOBILE + +template +c10::StrongTypePtr getCustomClassType() { + throw c10::Error("Custom class is not supported on mobile.", ""); +} + +template +inline bool isCustomClassRegistered() { + return false; +} + +#endif // C10_MOBILE + +TORCH_API std::unordered_map>& +getClassConverter(); +} + +#include diff --git a/thirdparty/libtorch/include/ATen/core/ivalue_inl.h b/thirdparty/libtorch/include/ATen/core/ivalue_inl.h new file mode 100644 index 0000000000..bece57fe77 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/ivalue_inl.h @@ -0,0 +1,848 @@ +#pragma once + +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +namespace torch { +namespace jit { +struct Function; +namespace script { +struct CompilationUnit; +} +} // namespace jit +} // namespace torch +namespace c10 { +struct IValue; +struct ClassType; +struct TupleType; + +// For custom class __init__ registration, we need to pass in a function +// that looks like this: [](IValue x, args...) + +// However, kernel_functor.h automatically sets the input types of the function +// by introspecting the types of the functor (which is IValue in this case). +// However, we need the type it binds to be Foo. + +// Instead, we pass in a lambda [](ivalue_holder x, args...) from +// which getTypePtr can recover the original class pointer. + +template +struct tagged_capsule { + IValue ivalue; +}; + +template +c10::intrusive_ptr IValue::moveToIntrusivePtr() { + auto t = c10::intrusive_ptr::reclaim(static_cast(payload.as_intrusive_ptr)); + clearToNone(); + return t; +} +template +c10::intrusive_ptr IValue::toIntrusivePtr() const { + auto r = c10::intrusive_ptr::reclaim(static_cast(payload.as_intrusive_ptr)); + auto p = r; + r.release(); + return p; +} + +template +intrusive_ptr static_intrusive_pointer_cast(intrusive_ptr r) { + return intrusive_ptr::reclaim(static_cast(r.release())); +} + +inline c10::intrusive_ptr IValue::toFuture() && { + AT_ASSERT(isFuture(), "Expected Future but got ", tagKind()); + return moveToIntrusivePtr(); +} +inline c10::intrusive_ptr IValue::toFuture() const & { + AT_ASSERT(isFuture(), "Expected Future but got ", tagKind()); + return toIntrusivePtr(); +} +inline c10::intrusive_ptr IValue::toString() && { + AT_ASSERT(isString(), "Expected String but got ", tagKind()); + return moveToIntrusivePtr(); +} +inline c10::intrusive_ptr IValue::toString() const & { + AT_ASSERT(isString(), "Expected String but got ", tagKind()); + return toIntrusivePtr(); +} +inline c10::intrusive_ptr IValue::toObject() && { + AT_ASSERT(isObject(), "Expected Object but got ", tagKind()); + return toIntrusivePtr(); +} +inline c10::intrusive_ptr IValue::toObject() const & { + AT_ASSERT(isObject(), "Expected Object but got ", tagKind()); + return toIntrusivePtr(); +} +inline at::Tensor IValue::toTensor() && { + AT_ASSERT(isTensor(), "Expected Tensor but got ", tagKind()); + return at::Tensor(moveToIntrusivePtr()); +} +inline at::Tensor IValue::toTensor() const & { + AT_ASSERT(isTensor(), "Expected Tensor but got ", tagKind()); + return at::Tensor(toIntrusivePtr()); +} +inline c10::intrusive_ptr IValue::toBlob() && { + AT_ASSERT(isBlob(), "Expected Blob but got ", tagKind()); + return moveToIntrusivePtr(); +} +inline c10::intrusive_ptr IValue::toBlob() const & { + AT_ASSERT(isBlob(), "Expected Blob but got ", tagKind()); + return toIntrusivePtr();; +} +inline c10::intrusive_ptr IValue::toCapsule() && { + TORCH_INTERNAL_ASSERT(isCapsule()); + return moveToIntrusivePtr(); +} +inline c10::intrusive_ptr IValue::toCapsule() const & { + TORCH_INTERNAL_ASSERT(isCapsule()); + return toIntrusivePtr(); +} + +namespace ivalue { + +template +using Shared = c10::intrusive_ptr; + +// string +struct CAFFE2_API ConstantString final : c10::intrusive_ptr_target { + private: + const std::string str_; + public: + ConstantString(std::string str) + : str_(std::move(str)) {} + static c10::intrusive_ptr create(std::string str_); + const std::string & string() const { + return str_; + } + operator const std::string & () const { + return string(); + } + CAFFE2_API friend std::ostream& operator<<( + std::ostream& out, + const ConstantString& v); +}; + +struct Future; + +struct CAFFE2_API Tuple : c10::intrusive_ptr_target { + private: + std::vector elements_; + mutable std::shared_ptr type_; // lazily computed for unnamed tuples + + public: + // named tuples have additional type information, so we + // directly create them tagged + static c10::intrusive_ptr createNamed( + std::vector elements_, + std::shared_ptr type_) { + return c10::make_intrusive(std::move(elements_), type_); + } + static c10::intrusive_ptr create(std::vector elements_) { + return c10::make_intrusive(std::move(elements_)); + } + + template + static c10::intrusive_ptr create(Args... elements_) { + return c10::make_intrusive(std::vector{IValue(elements_)...}); + } + + const std::vector& elements() const & { + return elements_; + } + operator const std::vector&() const { + return elements(); + } + + std::vector& elements() & { + return elements_; + } + operator std::vector&() { + return elements(); + } + + std::vector&& elements() && { + return std::move(elements_); + } + std::shared_ptr type() const; + + private: + Tuple(std::vector elements, std::shared_ptr type = nullptr) + : elements_(std::move(elements)), type_(std::move(type)) {} + + friend class c10::intrusive_ptr; +}; + +struct Object; +} + +// Future +struct C10_EXPORT ivalue::Future final : c10::intrusive_ptr_target { + private: + c10::intrusive_ptr intrusive_from_this() { + c10::raw::intrusive_ptr::incref(this); // we are creating a new pointer + // from a raw `this` pointer + // so we need to bump the refcount + // to account for this ownership + return c10::intrusive_ptr::reclaim(this); + } + + public: + Future(TypePtr type) : type_(type) {} + struct CAFFE2_API FutureError final : public std::exception { + FutureError(std::string&& error_msg_) + : error_msg(std::move(error_msg_)) {} + + FutureError() = default; + + const char* what() const noexcept override { + return error_msg.c_str(); + } + + std::string error_msg; + }; + + /** + * Wait on the future until it completes. + */ + void wait() { + std::unique_lock lock(mutex_); + while (!completed_) { + finished_cv_.wait(lock); + } + } + + /** + * Explicitly mark the future as completed with the output value. + */ + void markCompleted(IValue value) { + std::unique_lock lock(mutex_); + AT_ASSERT(!completed()); + completed_ = true; + value_ = std::move(value); + + fireCallbacks(); + finished_cv_.notify_all(); + } + + void markCompleted() { + markCompleted(IValue {}); + } + + void markCompleted(FutureError&& error_) { + std::unique_lock lock(mutex_); + AT_ASSERT(!completed()); + completed_ = true; + has_error = true; + error = std::move(error_); + + fireCallbacks(); + finished_cv_.notify_all(); + } + + // Get the result of the current future. + IValue value() { + std::unique_lock lock(mutex_); + AT_ASSERT(completed()); + if (has_error) { + throw error; + } + return value_; + } + + /** + * Add a callback to the future. + * The callbacks will be executed once the future completes. + * If the future has already completed, + * this function will execute the callback immediately. + */ + void addCallback(std::function callback) { + std::unique_lock lock(mutex_); + if (completed()) { + lock.unlock(); + callback(); + return; + } + callbacks.push_back(callback); + } + + // Check if the current future has completed + bool completed() const{ + return completed_; + } + + CAFFE2_API friend std::ostream& operator<<( + std::ostream& out, + const Future& v); + + TypePtr type() const { + return type_; + } + + private: + void fireCallbacks() { + AT_ASSERT(completed()); + // There is no need to protect callbacks with the lock. + // Once completed_ is set to true, no one can add new callback to the list. + for (auto& callback : callbacks) { + callback(); + } + callbacks.clear(); + } + + std::mutex mutex_; + std::atomic_bool completed_ = {false}; // is this future complete + std::condition_variable finished_cv_; + + IValue value_; // when finished the value + TypePtr type_; + std::vector> callbacks; + bool has_error = false; + FutureError error; +}; + +// User-defined object. +struct C10_EXPORT ivalue::Object final : c10::intrusive_ptr_target { + public: + Object(StrongTypePtr type, size_t numSlots) : type_(std::move(type)) { + slots_.resize(numSlots); + } + + static c10::intrusive_ptr create( + StrongTypePtr type, + size_t numSlots) { + return c10::make_intrusive(std::move(type), numSlots); + } + + /** + * Slot API. + * + * Attributes are stored as a simple vector so that lookups are fast at + * runtime. A "slot" is just an index into that vector, which can be computed + * statically if you have access to the class type. Use this API if you are + * writing compiler stuff. + */ + void setSlot(size_t slot, IValue v) { + if (slot >= slots_.size()) { + // for module types, it is possible that the members of the class have + // expanded after the object was created. In this case, we expand + // the slots to the right size + resizeObject(slot); + } + slots_[slot] = v; + } + + const IValue& getSlot(size_t slot) const { + return slots_.at(slot); + } + + void unsafeRemoveSlot(size_t slot) { + TORCH_CHECK(slot < slots_.size()); + slots_.erase(slots_.begin() + slot); + } + + /** + * Attribute API. + * + * Wrappers around the slot stuff so that users can access attributes + * directly. Use this API if you are a user. + * + * Note: Unlike in Python, TorchScript must make a distinction between + * attributes (which are IValues) and methods (which are Methods). If you + * want a method, use `obj.type()->getMethod()` + */ + IValue getAttr(const std::string& name) const; + void setAttr(const std::string& name, IValue v); + // Remove attribute by name, caller is responsible for + // the safety of this operation + // We didn't remove the attribute in the type because the type + // might be shared by multiple objects. + // Therefore after removing attribute, the object is in an inconsistent + // state where it has more attribute types in its Type than + // the attribute slots it has, user needs to make sure the object + // has consistent by removing the attribute in type as well + void unsafeRemoveAttr(const std::string& name); + + std::string name() const; + + const std::vector& slots() const { + return slots_; + } + std::shared_ptr type() const { + return type_.type_; + } + + std::shared_ptr compilation_unit() { + return type_.cu_; + } + + private: + void resizeObject(size_t slot); + StrongTypePtr type_; + std::vector slots_; +}; + +std::vector> iterationOrder(const c10::Dict& dict); + +#undef TORCH_FORALL_TAGS + +namespace detail { + +struct _guarded_unsigned_long_unique_dummy final { + _guarded_unsigned_long_unique_dummy(int64_t){}; +}; +using _guarded_unsigned_long = c10::guts::conditional_t< + std::is_same::value || + std::is_same::value, + _guarded_unsigned_long_unique_dummy, + unsigned long>; + +} // namespace detail + +inline const ivalue::Object& IValue::toObjectRef() const { + AT_ASSERT(isObject(), "Expected Object but got ", tagKind()); + return *static_cast(payload.as_intrusive_ptr); +} + +// note: when adding a DEFINE_TO case here you should also add a +// toX method to IValue. These named methods are much more discoverable +// than the to templated function. + +#define DEFINE_TO(type, method_name) \ +template<> \ +inline type IValue::to() && { \ + return std::move(*this).method_name(); \ +} \ +template<> \ +inline type IValue::to() const & { \ + return this->method_name(); \ +} +DEFINE_TO(at::Tensor, toTensor) +DEFINE_TO(float, toDouble) +DEFINE_TO(double, toDouble) +DEFINE_TO(unsigned char, toInt) +DEFINE_TO(signed char, toInt) +DEFINE_TO(unsigned short, toInt) +DEFINE_TO(short, toInt) +DEFINE_TO(int, toInt) +DEFINE_TO(uint32_t, toInt) +DEFINE_TO(uint64_t, toInt) +DEFINE_TO(detail::_guarded_unsigned_long, toInt) +DEFINE_TO(int64_t, toInt) +DEFINE_TO(bool, toBool) +DEFINE_TO(c10::intrusive_ptr, toBlob); +DEFINE_TO(c10::intrusive_ptr, toString) +DEFINE_TO(c10::intrusive_ptr, toObject) +DEFINE_TO(at::Scalar, toScalar) +DEFINE_TO(c10::List, toIntList) +DEFINE_TO(c10::List, toDoubleList) +DEFINE_TO(c10::List, toBoolList) +DEFINE_TO(c10::List, toTensorList) +DEFINE_TO(c10::impl::GenericList, toGenericList) +DEFINE_TO(c10::impl::GenericDict, toGenericDict) +DEFINE_TO(c10::intrusive_ptr, toTuple) +DEFINE_TO(std::string, toStringRef) +DEFINE_TO(c10::intrusive_ptr, toFuture) +DEFINE_TO(IValue, toIValue) +DEFINE_TO(c10::Device, toDevice) +DEFINE_TO(at::ScalarType, toScalarType) +DEFINE_TO(at::Layout, toLayout) +DEFINE_TO(at::MemoryFormat, toMemoryFormat) +DEFINE_TO(at::QScheme, toQScheme) + +template +struct _fake_type {}; + +// generic_to converts an IValue from a generic list or generic dict +// to a concrete list/dict type likelike List, Dict<...> or optional. +// Note that in the case of lists, this only works for IValue-based lists, +// i.e. not for int64_t, double, ... +// generic_to is an implementation detail of IValue::to and not +// supposed to be called directly. +// The _fake_type parameter allows us to overload +// based on the return type. +template +C10_DEPRECATED_MESSAGE("IValues based on std::vector are potentially slow and deprecated. Please use c10::List instead.") +std::vector generic_to( + IValue ivalue, + _fake_type>) { + // We need to do a deep copy of the vector because there might be other + // references to this same IValue that also use the list. We can't just + // move the elements out. + auto list = std::move(ivalue).to>(); + std::vector result; + result.reserve(list.size()); + for (Elem v : list) { + result.push_back(std::move(v)); + } + return result; +} + +template +T generic_to( + IValue ivalue, + _fake_type) { + using ElemType = typename std::remove_pointer::type::element_type; + auto obj = ivalue.toObject(); + auto capsule = obj->getSlot(0); + return c10::static_intrusive_pointer_cast(capsule.toCapsule()); +} + +template +tagged_capsule generic_to( + IValue ivalue, + _fake_type>) { + return tagged_capsule{ivalue}; +} + +template +c10::List generic_to( + IValue ivalue, + _fake_type>) { + return impl::toTypedList(std::move(ivalue).toGenericList()); +} + +template +c10::Dict generic_to( + IValue ivalue, + _fake_type>) { + return impl::toTypedDict(std::move(ivalue).toGenericDict()); +} + +template +C10_DEPRECATED_MESSAGE("IValues based on std::unordered_map are slow and deprecated. Please use c10::Dict instead.") +std::unordered_map generic_to( + IValue ivalue, + _fake_type>) { + std::unordered_map specialized_dict; + + for (const auto& item : std::move(ivalue).toGenericDict()) { + specialized_dict[item.key().to()] = item.value().to(); + } + + return specialized_dict; +} + +template +c10::optional generic_to( + IValue ivalue, + _fake_type>) { + if (ivalue.isNone()) { + return c10::nullopt; + } + return std::move(ivalue).to(); +} + +namespace detail { +template +Tuple generic_to_tuple_impl( + const std::vector& t, + c10::guts::index_sequence) { + return std::make_tuple( + t[INDEX].to::type>()...); +} +} + +template < + typename... Args, + typename Indices = c10::guts::make_index_sequence, + c10::guts::enable_if_t< + !c10::guts::disjunction< + std::is_lvalue_reference..., + c10::guts::negation>...>::value, + std::nullptr_t> = nullptr> +std::tuple generic_to(IValue ivalue, _fake_type>) { + auto vals = ivalue.toTuple()->elements(); + TORCH_CHECK(vals.size() == sizeof...(Args)); + return detail::generic_to_tuple_impl>(vals, Indices{}); +} + +template +inline T IValue::to() && { + return generic_to(std::move(*this), _fake_type{}); +} + +template +inline T IValue::to() const& { + return generic_to(*this, _fake_type{}); +} + +inline c10::List IValue::toIntList() && { + AT_ASSERT(isIntList(), "Expected IntList but got ", tagKind()); + return c10::List(moveToIntrusivePtr>()); +} +inline c10::List IValue::toIntList() const & { + AT_ASSERT(isIntList(), "Expected IntList but got ", tagKind()); + return c10::List(toIntrusivePtr>()); +} +inline c10::ArrayRef IValue::toIntListRef() const { + AT_ASSERT(isIntList(), "Expected IntList but got ", tagKind()); + return static_cast*>(payload.as_intrusive_ptr)->list; +} +inline c10::List IValue::toDoubleList() && { + AT_ASSERT(isDoubleList(), "Expected DoubleList but got ", tagKind()); + return c10::List(moveToIntrusivePtr>()); +} +inline c10::List IValue::toDoubleList() const & { + AT_ASSERT(isDoubleList(), "Expected DoubleList but got ", tagKind()); + return c10::List(toIntrusivePtr>()); +} +inline c10::ArrayRef IValue::toDoubleListRef() const { + AT_ASSERT(isDoubleList(), "Expected DoubleList but got ", tagKind()); + return static_cast*>(payload.as_intrusive_ptr)->list; +} +inline c10::List IValue::toBoolList() && { + AT_ASSERT(isBoolList(), "Expected BoolList but got ", tagKind()); + return c10::List(moveToIntrusivePtr>()); +} +inline c10::List IValue::toBoolList() const & { + AT_ASSERT(isBoolList(), "Expected BoolList but got ", tagKind()); + return c10::List(toIntrusivePtr>()); +} +inline c10::List IValue::toTensorList() && { + AT_ASSERT(isTensorList(), "Expected TensorList but got ", tagKind()); + return c10::List(moveToIntrusivePtr>()); +} +inline c10::List IValue::toTensorList() const & { + AT_ASSERT(isTensorList(), "Expected TensorList but got ", tagKind()); + return c10::List(toIntrusivePtr>()); +} +inline c10::ArrayRef IValue::toTensorListRef() const { + AT_ASSERT(isTensorList(), "Expected TensorList but got ", tagKind()); + return static_cast*>(payload.as_intrusive_ptr)->list; +} +inline c10::List IValue::toGenericList() && { + AT_ASSERT(isGenericList(), "Expected GenericList but got ", tagKind()); + return c10::List(moveToIntrusivePtr>()); +} +inline c10::List IValue::toGenericList() const & { + AT_ASSERT(isGenericList(), "Expected GenericList but got ", tagKind()); + return c10::List(toIntrusivePtr>()); +} +inline c10::ArrayRef IValue::toGenericListRef() const { + AT_ASSERT(isGenericList(), "Expected GenericList but got ", tagKind()); + return static_cast*>(payload.as_intrusive_ptr)->list; +} +inline c10::Dict IValue::toGenericDict() && { + AT_ASSERT(isGenericDict(), "Expected GenericDict but got ", tagKind()); + return c10::Dict(moveToIntrusivePtr()); +} +inline c10::Dict IValue::toGenericDict() const & { + AT_ASSERT(isGenericDict(), "Expected GenericDict but got ", tagKind()); + return c10::Dict(toIntrusivePtr()); +} +inline c10::intrusive_ptr IValue::toTuple() && { + AT_ASSERT(isTuple(), "Expected Tuple but got ", tagKind()); + return moveToIntrusivePtr(); +} +inline c10::intrusive_ptr IValue::toTuple() const & { + AT_ASSERT(isTuple(), "Expected Tuple but got ", tagKind()); + return toIntrusivePtr(); +} + +inline IValue::IValue(c10::intrusive_ptr v) +: tag(Tag::Tuple), is_intrusive_ptr(true) { + payload.as_intrusive_ptr = v.release(); +} +template < + typename... Args, + c10::guts::enable_if_t< + !c10::guts::disjunction< + std::is_lvalue_reference..., + c10::guts::negation>...>::value, + std::nullptr_t>> +inline IValue::IValue(const std::tuple& t) + : IValue( + std::move(c10::guts::apply(c10::ivalue::Tuple::create, t))) { +} +inline IValue::IValue(c10::List v) +: tag(Tag::IntList), is_intrusive_ptr(true) { + payload.as_intrusive_ptr = v.impl_.release(); +} +inline IValue::IValue(std::vector v) +: IValue(c10::impl::toList(v)) {} +inline IValue::IValue(c10::ArrayRef v) +: IValue(c10::List(v)) {} + +inline IValue::IValue(c10::intrusive_ptr v) +: tag(Tag::String), is_intrusive_ptr(true) { + payload.as_intrusive_ptr = v.release(); +} +inline IValue::IValue(std::string v) +: IValue(ivalue::ConstantString::create(std::move(v))) {} + +inline IValue::IValue(c10::List v) +: tag(Tag::DoubleList), is_intrusive_ptr(true) { + payload.as_intrusive_ptr = v.impl_.release(); +} +inline IValue::IValue(std::vector v) +: IValue(c10::impl::toList(std::move(v))) {} + +inline IValue::IValue(c10::List v) +: tag(Tag::BoolList), is_intrusive_ptr(true) { + payload.as_intrusive_ptr = v.impl_.release(); +} +inline IValue::IValue(std::vector v) +: IValue(c10::impl::toList(std::move(v))) {} + +inline IValue::IValue(c10::List v) +: tag(Tag::TensorList), is_intrusive_ptr(true) { + payload.as_intrusive_ptr = v.impl_.release(); +} +inline IValue::IValue(std::vector v) +: IValue(c10::impl::toList(std::move(v))) {} + +inline IValue::IValue(c10::impl::GenericList v) +: tag(Tag::GenericList), is_intrusive_ptr(true) { + payload.as_intrusive_ptr = v.impl_.release(); +} + +template inline IValue::IValue(c10::List v) +: IValue(impl::toGenericList(std::move(v))) { + static_assert(std::is_same::StorageT>::value, "Can only use this constructor for generic list types"); +} +template inline IValue::IValue(std::vector v) +: IValue(c10::List()) { + static_assert(std::is_same::StorageT>::value, "Can only use this constructor for generic list types"); + auto list = to>(); + list.reserve(v.size()); + for (auto& e : v) { + list.push_back(std::move(e)); + } +} + +inline IValue::IValue(c10::impl::GenericDict v) +: tag(Tag::GenericDict), is_intrusive_ptr(true) { + payload.as_intrusive_ptr = v.impl_.release(); +} +template +inline IValue::IValue(c10::Dict v) +: IValue(impl::toGenericDict(std::move(v))) {} + +template inline IValue::IValue(std::unordered_map v) +: IValue(Dict()) { + auto dict = to>(); + dict.reserve(v.size()); + for (auto& e : v) { + dict.insert(std::move(e.first), std::move(e.second)); + } +} + +template inline IValue::IValue(c10::optional v): IValue() { + if (v.has_value()) { + *this = IValue(std::move(*v)); + } +} + +inline IValue::IValue(c10::nullopt_t): IValue() {} + +inline IValue::IValue(c10::intrusive_ptr v) +: tag(Tag::Object), is_intrusive_ptr(true) { + payload.as_intrusive_ptr = v.release(); +} +inline IValue::IValue(c10::intrusive_ptr v) +: tag(Tag::Capsule), is_intrusive_ptr(true) { + payload.as_intrusive_ptr = v.release(); +} +inline IValue::IValue(c10::intrusive_ptr v) +: tag(Tag::Future), is_intrusive_ptr(true) { + payload.as_intrusive_ptr = v.release(); +} + +inline const std::string& IValue::toStringRef() const { + return toString()->string(); +} + +template +inline optional IValue::toOptional() { + if (this->isNone()) { + return nullopt; + } + return this->to(); +} + +inline bool IValue::isSameIdentity(const IValue& rhs) const { + // We choose to not use memcmp for payload check due to potential random padding characters on union type + + // Semantics: + // 1. None is None, False is False, and True is True are all true + // 2. If it is a tensor type, we need to take undefined tensor into account + // 3. Undefined_tensor is None and vice versa should be true + // 4. If it is a reference type (i.e. is_intrusive_ptr), then is is True when the pointed-to object is the same. + // 5. False for all other comparisons. + if (this->isNone() && rhs.isNone()) { + return true; + } else if (this->isBool() && rhs.isBool()) { + // for bool type, do equality check + return this->toBool() == rhs.toBool(); + } else if (this->isTensor() && rhs.isTensor()) { + // for tensor type, just check the as_intrusive_ptr since is_intrusive_ptr is false for undefined tensor + return this->payload.as_intrusive_ptr == rhs.payload.as_intrusive_ptr; + } else if (this->isTensor() && rhs.isNone()) { + // special case: undefined tensor and None are the same identity + return !this->is_intrusive_ptr; + } else if (this->isNone() && rhs.isTensor()) { + // special case: undefined tensor and None are the same identity + return !rhs.is_intrusive_ptr; + } else { + // for objects holding in IValue, do shallow compare on pointer address to testify the identity + return this->is_intrusive_ptr && rhs.is_intrusive_ptr + && this->payload.as_intrusive_ptr == rhs.payload.as_intrusive_ptr; + } +} + +namespace ivalue { +namespace detail { +// This code allows us to template on a function based on whether IValue has a +// constructor for it. Specifically, has_constructor{} inherits from std::true_type if +// IValue(T) compiles, and inherits from std::false_type if IValue(T) doesn't. +// We use it for calling the IValue constructor for `from` if it exists, and otherwise +// attempt to use our custom class code. +template struct type_sink { typedef void type; }; +template using type_sink_t = typename type_sink::type; +template struct has_constructor : std::false_type {}; \ +template struct has_constructor< + T, + type_sink_t< decltype( IValue(std::declval())) > +>: std::true_type {}; + +template +IValue from_(T x, std::true_type) { + return IValue(x); +} +template +IValue from_(c10::intrusive_ptr x, std::false_type) { + using inputType = c10::intrusive_ptr; + if (!isCustomClassRegistered()) { + throw c10::Error("Trying to return a class that we don't support and isn't a registered custom class.", ""); + } + auto res = getCustomClassType(); + auto retObject = ivalue::Object::create(res->second, 1); + auto objPtr = c10::static_intrusive_pointer_cast(x); + + retObject->setSlot(0, IValue(objPtr)); + auto resIVal = IValue(std::move(retObject)); + return resIVal; +} +template +IValue from_(T x, std::false_type) { + static_assert(guts::false_t::value, "You are calling from with a type that it doesn't support, and isn't a potential custom class (ie: is an intrusive_ptr)"); + return IValue(); +} +} + +template +IValue from(T x) { + return detail::from_(x, detail::has_constructor{}); +} + +} +} // namespace c10 diff --git a/thirdparty/libtorch/include/ATen/core/jit_type.h b/thirdparty/libtorch/include/ATen/core/jit_type.h new file mode 100644 index 0000000000..7b877ba5ca --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/jit_type.h @@ -0,0 +1,1644 @@ +#pragma once + +#include +#include +#include +#include +#include +#include + +#include + +#include +#include +#include +#include + +struct ClassType; +namespace torch { +namespace jit { +struct Function; +namespace script { +struct CompilationUnit; +} +} // namespace jit +} // namespace torch + +namespace c10 { + +struct FunctionSchema; +using OptNameList = c10::optional>; + +#define C10_FORALL_TYPES(_) \ + _(AnyType) \ + _(TensorType) \ + _(TupleType) \ + _(ListType) \ + _(DictType) \ + _(NumberType) \ + _(FloatType) \ + _(FutureType) \ + _(IntType) \ + _(NoneType) \ + _(StringType) \ + _(GeneratorType) \ + _(BoolType) \ + _(OptionalType) \ + _(VarType) \ + _(DeviceObjType) \ + _(FunctionType) \ + _(ClassType) \ + _(CapsuleType) \ + _(InterfaceType) + +enum class TypeKind { +#define DEFINE_TYPE(T) T, + C10_FORALL_TYPES(DEFINE_TYPE) +#undef DEFINE_TYPE +}; + +CAFFE2_API const char* typeKindToString(TypeKind kind); + +struct Type; +using TypePtr = std::shared_ptr; + +struct CAFFE2_API Type : std::enable_shared_from_this { + private: + TypeKind kind_; + + protected: + Type(TypeKind kind) : kind_(kind) {} + + public: + virtual bool operator==(const Type& rhs) const = 0; + + // subtyping relation. By default, we return true for the case + // when the type is exactly equal or if this <: T where rhs = Optional[T] + + // if this returns false and the why_not stream is non-null, it contains + // additional details that describe why this is not a subtype of 'rhs'. + // This additional information should only contain details that are not obvious + // from the python_str() that describes the type. For instance it is clear that `int <: str` is false + // but not clear why `Foo <: InterfaceBar` might be false. + virtual bool isSubtypeOfExt(const TypePtr rhs, std::ostream* why_not) const; + virtual bool is_module() const; + bool isSubtypeOf(const TypePtr rhs) const { + return isSubtypeOfExt(rhs, nullptr); + } + + // How this type will appear in FunctionSchema declarations + virtual std::string str() const = 0; + + // How this type will appear as if it were a type annotation in Python + // which is sometimes different than how it appears in declarations (e.g. + // int[] vs List[int]) + virtual std::string python_str() const { + return str(); + } + + TypeKind kind() const { + return kind_; + } + + virtual bool requires_grad() const { + for (const auto& ct : containedTypes()) { + if (ct->requires_grad()) { + return true; + } + } + return false; + } + + // Dynamically cast this object to the subclass indicated by the + // template variable, returning nullptr if the cast is invalid. + template + std::shared_ptr cast() { + if (T::Kind == kind()) { + return std::static_pointer_cast(shared_from_this()); + } + return nullptr; + } + template + std::shared_ptr cast() const { + if (T::Kind == kind()) { + return std::static_pointer_cast(shared_from_this()); + } + return nullptr; + } + template + std::shared_ptr expect() { + auto r = cast(); + AT_ASSERT(r); + return r; + } + template + std::shared_ptr expect() const { + auto r = cast(); + AT_ASSERT(r); + return r; + } + virtual ~Type() = default; + virtual bool hasFreeVariables() const { + return false; + } + // list of types this type contains, e.g. for a List then element type of a + // list for a tuple, the types of the tuple elements + virtual at::ArrayRef containedTypes() const { + return {}; + } + // create a new version of this type, replacing its contained types with + // contained_types + TypePtr withContained(std::vector contained_types) { + auto current_contained = containedTypes(); + AT_ASSERT(current_contained.size() == contained_types.size()); + if (current_contained.equals(contained_types)) { + return shared_from_this(); + } + return createWithContained(std::move(contained_types)); + } + // per-type constructor, you only need to override this if the + // containedTypes() is not empty + virtual TypePtr createWithContained( + std::vector contained_types) const { + AT_ERROR( + "type with contained types did not overload createWithContained: ", + str()); + } +}; + +struct AnyType; +using AnyTypePtr = std::shared_ptr; +// Any is the top of the type hierarchy, all other types are subtypes +// T <: Any, forall T +struct CAFFE2_API AnyType : public Type { + static AnyTypePtr create() { + return AnyTypePtr( + new AnyType()); // NOLINT(modernize-make-shared) + } + bool operator==(const Type& rhs) const override { + return rhs.kind() == kind(); + } + std::string str() const override { + return "Any"; + } + static const TypeKind Kind = TypeKind::AnyType; + // global singleton + static AnyTypePtr get(); + + private: + AnyType() : Type(TypeKind::AnyType) {} +}; + +inline std::string toString(TypePtr typePtr) { + return typePtr->str(); +} + +inline bool operator!=(const Type& lhs, const Type& rhs) { + return !(lhs == rhs); +} + +// common base for all types that have a single sub element +// e.g. Future[T], Option[T], List[T] +template +struct SingleElementType : public Type { + static const TypeKind Kind = K; + + TypePtr getElementType() const { + return elem; + } + + bool hasFreeVariables() const override { + return getElementType()->hasFreeVariables(); + } + + at::ArrayRef containedTypes() const override { + return elem; + } + + bool operator==(const Type& rhs) const override { + if (auto rhs_ = rhs.cast()) { + return *getElementType() == *rhs_->getElementType(); + } + return false; + } + + protected: + SingleElementType(TypePtr elem) : Type(Kind), elem(std::move(elem)) {} + + private: + TypePtr elem; +}; + +struct OptionalType; +using OptionalTypePtr = std::shared_ptr; +// This type represents an optional type, for each element type. +// Optional[T] can accept both T and None(nullopt in C++) +// Subtype hierarchy for Optional: +// 1. Optional[T] <: Optional[R] iff T <: R +// 2. T <: Optional[R] if T <: R +// 3. None <: Optional[T] for all T +struct CAFFE2_API OptionalType + : public SingleElementType { + static OptionalTypePtr create(TypePtr element) { + // Optional is a union of [None, T], so Optional[[Optional[T]]] -> + // Optional[T] + if (auto opt_ptr = element->cast()) { + return opt_ptr; + } + return OptionalTypePtr( + new OptionalType(std::move(element))); // NOLINT(modernize-make-shared) + } + + std::string str() const override { + std::stringstream ss; + ss << getElementType()->str() << "?"; + return ss.str(); + } + std::string python_str() const override { + std::stringstream ss; + ss << "Optional[" << getElementType()->python_str() << "]"; + return ss.str(); + } + + TypePtr createWithContained( + std::vector contained_types) const override { + AT_ASSERT(contained_types.size() == 1); + return create(contained_types[0]); + } + + bool isSubtypeOfExt(const TypePtr rhs, std::ostream* why_not) const override { + if (Type::isSubtypeOfExt(rhs, why_not)) { + return true; + } + if (auto rhs_ = rhs->cast()) { + return getElementType()->isSubtypeOfExt(rhs_->getElementType(), why_not); + } + return false; + } + // common cast Optional[Tensor] for undefined tensor type + static OptionalTypePtr ofTensor(); + + private: + OptionalType(TypePtr elem) : SingleElementType(elem) {} +}; + +template +inline c10::optional merge_primitive( + const c10::optional& a, + const c10::optional& b) { + if (a.has_value() && b.has_value() && a.value() == b.value()) { + return a; + } + return c10::optional{}; +} + +// `VaryingShape` tracks if individual dimensions or a rank vary across +// profiled runs. A *varying* or *dynamic* dimension is expressed as +// an empty c10::optional in `sizes_`. If a rank is dynamic, the entire +// `sizes_` becomes the empty optional. +struct CAFFE2_API VaryingShape { + using ListOfOptionalInts = std::vector>; + VaryingShape(const std::vector& vec) + : VaryingShape(ListOfOptionalInts(vec.begin(), vec.end())) {} + + VaryingShape(c10::ArrayRef vec) + : VaryingShape(ListOfOptionalInts(vec.begin(), vec.end())){} + + VaryingShape(c10::optional size = c10::nullopt) : dims_(c10::nullopt) { + if (size) { + dims_ = ListOfOptionalInts(*size); + } + } + + VaryingShape(ListOfOptionalInts dims) + : dims_(std::move(dims)) {} + + VaryingShape(size_t size) : VaryingShape(c10::optional(size)) {} + + bool operator==(const VaryingShape& other) const { + return dims_ == other.dims_; + } + + const c10::optional& operator[](int i) const { + if (!dims_) { + throw std::runtime_error("Rank isn't fixed"); + } + return (*dims_).at(i); + } + + c10::optional size() const { + if (!dims_) { + return c10::nullopt; + } + const auto& dims = dims_.value(); + return dims.size(); + } + + const c10::optional& sizes() const { + return dims_; + } + + VaryingShape merge(const VaryingShape& other) const; + + c10::optional> concrete_sizes() const { + if (!dims_) { + return c10::nullopt; + } + std::vector sizes; + for (auto d : *dims_) { + if (!d) { + return c10::nullopt; + } + sizes.push_back(d.value()); + } + return sizes; + } + + bool isComplete() const { + if (!dims_) { + return false; + } + for (auto d : *dims_) { + if(!d) { + return false; + } + } + return true; + } + + private: + c10::optional dims_; +}; + +using VaryingStrides = VaryingShape; + +struct TensorType; +using TensorTypePtr = std::shared_ptr; +// This type represents a single Tensor with a specific size +struct CAFFE2_API TensorType : public Type { + static TensorTypePtr create(const at::Tensor& t) { + return TensorTypePtr(new TensorType(t)); + } + + static TensorTypePtr create(c10::optional scalar_type, + c10::optional device, + const VaryingShape &sizes, + const VaryingStrides &strides, + c10::optional requires_grad, + c10::optional undefined = false) { + return TensorTypePtr(new TensorType(scalar_type, device, sizes, strides, + requires_grad, undefined)); + } + + static TensorTypePtr create( + c10::optional scalar_type, + c10::optional device, + c10::optional dim, + c10::optional requires_grad) { + return TensorType::create( + scalar_type, + device, + VaryingShape(dim), + VaryingShape(dim), + requires_grad); + } + + // overloaded create variadic template argument as it could not distinguish + // initializer list + static TensorTypePtr createContiguous( + at::ScalarType scalar_type, + at::Device device, + at::IntArrayRef sizes) { + return create( + scalar_type, + device, + VaryingShape(sizes), + VaryingShape(contiguousStridesOf(sizes)), + c10::nullopt); + } + static TensorTypePtr create( + at::ScalarType scalar_type, + at::Device device, + at::IntArrayRef sizes, + at::IntArrayRef strides) { + return create( + scalar_type, + device, + VaryingShape(sizes), + c10::VaryingShape(strides), + c10::nullopt); + } + static TypePtr fromNumberType(TypePtr typ); + static TypePtr fromBoolType(); + + c10::optional dim() const { + return sizes().size(); + } + + const VaryingShape& sizes() const { + return sizes_; + } + const VaryingStrides& strides() const { + return strides_; + } + c10::optional device() const { + return device_; + } + c10::optional scalarType() const { + return scalar_type_; + } + c10::optional requiresGrad() const { + return requires_grad_; + } + bool requires_grad() const override { + return requires_grad_ ? *requires_grad_ : true; + } + + + bool operator==(const Type& rhs) const override { + if (rhs.kind() != kind()) { + return false; + } + + auto rt = rhs.expect(); + return scalar_type_ == rt->scalarType() && sizes() == rt->sizes() && + strides() == rt->strides() && device() == rt->device() && + requiresGrad() == rt->requiresGrad() && + undefined() == rt->undefined(); + } + bool isSubtypeOfExt(const TypePtr rhs, std::ostream* why_not) const override; + + std::string str() const override; + + c10::optional numel() const { + size_t prod = 1; + const auto& shape = sizes(); + + for (size_t i = 0; i < shape.size(); i++) { + if (!shape[i]) { + return c10::optional{}; + } + prod *= shape[i].value(); + } + return prod; + } + + TensorTypePtr withRequiresGrad(c10::optional s) { + auto copy = clone(); + copy->requires_grad_ = s; + return copy; + } + + TensorTypePtr withScalarType(c10::optional st) { + auto copy = clone(); + copy->scalar_type_ = st; + return copy; + } + + + TensorTypePtr withDim(c10::optional d) { + auto copy = clone(); + copy->sizes_ = VaryingShape(d); + copy->strides_ = VaryingShape(d); + return copy; + } + + TensorTypePtr withSizesStrides( + at::IntArrayRef sizes, + at::IntArrayRef strides) const { + auto cloned = clone(); + cloned->sizes_ = VaryingShape(sizes); + cloned->strides_ = VaryingStrides(strides); + return cloned; + } + + TensorTypePtr withSizes(at::IntArrayRef sizes) const { + return withSizesStrides( + sizes, contiguousStridesOf(sizes)); + } + + TensorTypePtr dimensionedOnly() const { + auto copy = clone(); + copy->sizes_ = VaryingShape(sizes().size()); + copy->strides_ = VaryingShape(sizes().size()); + return copy; + } + + TensorTypePtr contiguous() const { + auto cloned = clone(); + if (auto concrete_sizes = sizes().concrete_sizes()) { + cloned->strides_ = VaryingShape(contiguousStridesOf(*concrete_sizes)); + } else { + cloned->strides_ = VaryingShape(sizes().size()); + } + return cloned; + } + + TensorTypePtr merge(TensorTypePtr other) const; + + // is all information about the type specified except for autograd? + // This replaces the notion of a 'CompleteTensorType' that used to exist + // in the type-hierarchy. Excluding require_grad and undefined allows + // this to match the old behavior. + bool isComplete() const { + return scalar_type_ && device_ && sizes_.isComplete() && strides_.isComplete(); + } + + // this property is used by GuardElimination + // please see `checkInputs` for more details + bool isSummarized() const { + return !(isComplete() && requiresGrad().has_value() && + undefined().has_value()); + } + + TensorTypePtr withUndefined() { + auto r = clone(); + r->undefined_ = true; + return r; + } + + TensorTypePtr withPossiblyUndefined() { + auto r = clone(); + r->undefined_ = c10::nullopt; + return r; + } + + c10::optional undefined() const { return undefined_; } + + static TensorTypePtr get(); + + static const TypeKind Kind = TypeKind::TensorType; + + private: + TensorType(const at::Tensor& tensor) + : Type(TypeKind::TensorType), + scalar_type_(tensor.scalar_type()), + device_(tensor.device()), + sizes_(tensor.sizes().size()), + strides_(tensor.sizes().size()), + requires_grad_(tensor.requires_grad()), + undefined_(!tensor.defined()) { + if (!tensor.is_mkldnn() && !tensor.is_sparse()) { + sizes_ = tensor.sizes().vec(); + strides_ = tensor.strides().vec(); + } + } + TensorType( + c10::optional scalar_type, + c10::optional device, + const VaryingShape& sizes, + const VaryingStrides& strides, + c10::optional requires_grad, + c10::optional undefined = false) + : Type(TypeKind::TensorType), + scalar_type_(scalar_type), + device_(device), + sizes_(sizes), + strides_(strides), + requires_grad_(requires_grad), + undefined_(undefined) {} + + TensorTypePtr clone() const { + return TensorTypePtr(new TensorType( + scalar_type_, device_, sizes_, strides_, requires_grad_, undefined_)); + } + + static std::vector contiguousStridesOf(at::IntArrayRef sizes) { + std::vector strides(sizes.size()); + if (sizes.empty()) // zero-dim case + return strides; + strides.back() = 1; + for (size_t i = strides.size() - 1; i > 0; i--) { + strides[i - 1] = strides[i] * sizes[i]; + } + return strides; + } + + c10::optional scalar_type_; + c10::optional device_; + VaryingShape sizes_; + VaryingStrides strides_; + c10::optional requires_grad_; + // we exploit the fact certain tensors must be zero in the autograd to + // optimize gradient computation. Such zero tensors are currently implemented + // with `UndefinedTensorImpl.` They can be handled only by special operators + // (e.g. `AutogradAdd`) and their `Tensor::defined()` property returns false. + // Normally, `undefined_` is set to false, unless a type was created + // with `withUndefined` + // This will also mean that `undefined` tensors will fail + // `subtypeOf(TensorType::get())` check + // undefined_ may become `c10::nullopt` if the tensor was observed to be both + // defined and undefined. However, no tensor type starts out with + // `undefined_` set to `c10::nullopt` + c10::optional undefined_; +}; + +struct ListType; +using ListTypePtr = std::shared_ptr; +struct CAFFE2_API ListType + : public SingleElementType { + // It's not exactly a singleton, but there should be exactly one instance of + // List[T] for every T + friend struct Type; + template + static ListTypePtr create(T&&... all) { + return ListTypePtr( + new ListType(std::forward(all)...)); // NOLINT(modernize-make-shared) + } + + std::string str() const override { + std::stringstream ss; + ss << getElementType()->str() << "[]"; + return ss.str(); + } + std::string python_str() const override { + std::stringstream ss; + ss << "List[" << getElementType()->python_str() << "]"; + return ss.str(); + } + TypePtr createWithContained( + std::vector contained_types) const override { + return create(contained_types.at(0)); + } + // common cast List[Tensor] + static ListTypePtr ofTensors(); + static ListTypePtr ofInts(); + static ListTypePtr ofFloats(); + static ListTypePtr ofBools(); + + private: + ListType(TypePtr elem) : SingleElementType(elem) {} +}; + +struct DictType; +using DictTypePtr = std::shared_ptr; +struct CAFFE2_API DictType : public Type { + friend struct Type; + static const TypeKind Kind = TypeKind::DictType; + + static DictTypePtr create(TypePtr key, TypePtr value) { + switch (key->kind()) { + case TypeKind::AnyType: + case TypeKind::IntType: + case TypeKind::FloatType: + case TypeKind::StringType: + case TypeKind::TensorType: + return DictTypePtr(new DictType(key, value)); + default: + AT_ERROR( + "Cannot create dict for key type '", + key->str(), + "', only int, float, Tensor and string keys are supported"); + } + } + + // aligned with the format in FunctionSchema + std::string str() const override { + std::stringstream ss; + ss << "Dict(" << getKeyType()->str() << ", " << getValueType()->str() + << ")"; + return ss.str(); + } + + std::string python_str() const override { + std::stringstream ss; + ss << "Dict[" << getKeyType()->python_str() << ", " + << getValueType()->python_str() << "]"; + return ss.str(); + } + + TypePtr createWithContained( + std::vector contained_types) const override { + if (contained_types.size() != 2) { + throw std::runtime_error("Expected 2 contained types"); + } + return create(contained_types.at(0), contained_types.at(1)); + } + + TypePtr getKeyType() const { + return types.at(0); + } + + TypePtr getValueType() const { + return types.at(1); + } + + bool hasFreeVariables() const override { + return has_free_variables; + } + + at::ArrayRef containedTypes() const override { + return types; + } + + bool operator==(const Type& rhs) const override { + if (auto dict_rhs = rhs.cast()) { + return *getKeyType() == *(dict_rhs->getKeyType()) && + *getValueType() == *(dict_rhs->getValueType()); + } + return false; + } + + private: + DictType(TypePtr key, TypePtr value) + : Type(TypeKind::DictType), + types({key, value}), + has_free_variables( + key->hasFreeVariables() || value->hasFreeVariables()) {} + std::vector types; + bool has_free_variables; +}; + +struct FutureType; +using FutureTypePtr = std::shared_ptr; + +struct CAFFE2_API FutureType + : public SingleElementType { + friend struct Type; + template + static FutureTypePtr create(TypePtr elem) { + return FutureTypePtr( + new FutureType(std::move(elem))); // NOLINT(modernize-make-shared) + } + + std::string str() const override { + std::stringstream ss; + ss << "Future(" << getElementType()->str() << ")"; + return ss.str(); + } + std::string python_str() const override { + std::stringstream ss; + ss << "Future[" << getElementType()->python_str() << "]"; + return ss.str(); + } + TypePtr createWithContained( + std::vector contained_types) const override { + return create(contained_types.at(0)); + } + + private: + FutureType(TypePtr elem) : SingleElementType(elem) {} +}; + +using ::torch::jit::Function; +struct NamedType; +using NamedTypePtr = std::shared_ptr; + +struct CAFFE2_API NamedType : public Type { + NamedType(TypeKind tk, c10::optional name) + : Type(tk), name_(std::move(name)) {} + + // Fully qualified name of type + // Looks like: "foo.bar.Baz". + const c10::optional& name() const { + return name_; + } +private: + c10::optional name_; +}; + +// Any should never appear in a named type like a class, namedtuple or +// interface. If it does, then dynamic type information will be lost in the +// Pickler, leading to hard-to-track-down bugs that will only occur +// after saving or loading a model. This is because we rely on the +// static types in named types to reconstruct type tags of loaded +// values. Lifting this restriction requires solving the serialization +// problem first. +CAFFE2_API void checkNoAny( + const Type& base, + const char* what, + const std::string& attrname, + const TypePtr& attrtype); + +struct TupleType; +using TupleTypePtr = std::shared_ptr; +using NameList = std::vector; +// This type represents a Tuple +struct CAFFE2_API TupleType : public NamedType { + static TupleTypePtr createNamed(const c10::optional& name, + const std::vector& field_names, + const std::vector& types); + static TupleTypePtr create( + std::vector types) { + return TupleTypePtr(new TupleType( + std::move(types), + c10::nullopt, + nullptr)); // NOLINT(modernize-make-shared) + } + + at::ArrayRef elements() const { + return elements_; + } + + bool operator==(const Type& rhs) const override; + bool isSubtypeOfExt(const TypePtr rhs_, std::ostream* why_not) const override; + + std::string str() const override; + std::string python_str() const override; + bool hasFreeVariables() const override { + return has_free_variables_; + } + at::ArrayRef containedTypes() const override { + return elements_; + } + TypePtr createWithContained( + std::vector contained_types) const override { + return std::shared_ptr( + new TupleType(std::move(contained_types), name(), schema())); + } + const std::shared_ptr& schema() const { + return schema_; + } + + static const TypeKind Kind = TypeKind::TupleType; + + private: + TupleType( + std::vector elements_, + c10::optional name, + std::shared_ptr schema); + + bool compare( + const Type& rhs, + std::function fn) const { + if (rhs.kind() != kind()) { + return false; + } + + const auto& l_elements = elements(); + const auto& r_elements = rhs.cast()->elements(); + if (l_elements.size() != r_elements.size()) + return false; + for (size_t i = 0; i < l_elements.size(); ++i) { + if (!fn(l_elements[i], r_elements[i])) + return false; + } + return true; + } + + std::vector elements_; + bool has_free_variables_; + std::shared_ptr schema_; +}; + +struct NumberType; +using NumberTypePtr = std::shared_ptr; +// This type represents a Python number +// Subtype hierarchy for Number Types (NumberType as the base type): +// IntType <: NumberType +// FloatType <: NumberType +struct CAFFE2_API NumberType : public Type { + static NumberTypePtr create() { + return NumberTypePtr(new NumberType()); // NOLINT(modernize-make-shared) + } + bool operator==(const Type& rhs) const override { + return rhs.kind() == kind(); + } + std::string str() const override { + return "Scalar"; // match what PythonArgParser says for clarity + } + std::string python_str() const override { + return "number"; // technically not a valid python type, but + // we need to use it when parsing back in annotations + // for implicit conversions + } + static const TypeKind Kind = TypeKind::NumberType; + // global singleton + static NumberTypePtr get(); + + protected: + NumberType(TypeKind kind = TypeKind::NumberType) : Type(kind) {} +}; + +struct FloatType; +using FloatTypePtr = std::shared_ptr; +// This type represents a Python float number +struct CAFFE2_API FloatType : public NumberType { + static FloatTypePtr create() { + return FloatTypePtr(new FloatType()); // NOLINT(modernize-make-shared) + } + bool operator==(const Type& rhs) const override { + return rhs.kind() == kind(); + } + std::string str() const override { + return "float"; + } + std::string python_str() const override { + return "float"; + } + bool isSubtypeOfExt(const TypePtr rhs, std::ostream* why_not) const override { + return rhs->kind() == TypeKind::NumberType || NumberType::isSubtypeOfExt(rhs, why_not); + } + static const TypeKind Kind = TypeKind::FloatType; + // global singleton + static FloatTypePtr get(); + + private: + FloatType() : NumberType(TypeKind::FloatType) {} +}; + +struct IntType; +using IntTypePtr = std::shared_ptr; +// This type represents a Python int number +struct CAFFE2_API IntType : public NumberType { + static IntTypePtr create() { + return IntTypePtr(new IntType()); // NOLINT(modernize-make-shared) + } + bool operator==(const Type& rhs) const override { + return rhs.kind() == kind(); + } + std::string str() const override { + return "int"; + } + std::string python_str() const override { + return "int"; + } + bool isSubtypeOfExt(const TypePtr rhs, std::ostream* why_not) const override { + return rhs->kind() == TypeKind::NumberType || NumberType::isSubtypeOfExt(rhs, why_not); + } + static const TypeKind Kind = TypeKind::IntType; + // global singleton + static IntTypePtr get(); + + private: + IntType() : NumberType(TypeKind::IntType) {} +}; + +struct BoolType; +using BoolTypePtr = std::shared_ptr; +// This node represents a Python bool value +struct CAFFE2_API BoolType : public Type { + static BoolTypePtr create() { + return BoolTypePtr(new BoolType()); + } + bool operator==(const Type& rhs) const override { + return rhs.kind() == kind(); + } + std::string str() const override { + return "bool"; + } + static const TypeKind Kind = TypeKind::BoolType; + // global singleton + static BoolTypePtr get(); + + private: + BoolType() : Type(TypeKind::BoolType) {} +}; + +struct StringType; +using StringTypePtr = std::shared_ptr; +// This type represents a Python string +struct CAFFE2_API StringType : public Type { + static StringTypePtr create() { + return StringTypePtr(new StringType()); // NOLINT(modernize-make-shared) + } + bool operator==(const Type& rhs) const override { + return rhs.kind() == kind(); + } + std::string str() const override { + // we only use "str" (not "string") in both FunctionSchema and script + return python_str(); + } + std::string python_str() const override { + return "str"; + } + static const TypeKind Kind = TypeKind::StringType; + // global singleton + static StringTypePtr get(); + + private: + StringType() : Type(TypeKind::StringType) {} +}; + +struct FunctionType; +using FunctionTypePtr = std::shared_ptr; +using ::torch::jit::Function; +struct CAFFE2_API FunctionType : public NamedType { + static FunctionTypePtr create(Function* function) { + return FunctionTypePtr( + new FunctionType(function)); // NOLINT(modernize-make-shared) + } + bool operator==(const Type& rhs) const override { + if (auto func_type = rhs.cast()) { + return func_type->function_ == function_; + } + + return false; + } + std::string str() const override { + return "Function"; + } + std::string python_str() const override { + throw "Function"; + } + Function* function() const { + return function_; + } + static const TypeKind Kind = TypeKind::FunctionType; + + private: + FunctionType(Function* function); + Function* function_; +}; + +struct NoneType; +using NoneTypePtr = std::shared_ptr; +// This type represents a Python None +struct CAFFE2_API NoneType : public Type { + static NoneTypePtr create() { + return NoneTypePtr(new NoneType()); // NOLINT(modernize-make-shared) + } + bool operator==(const Type& rhs) const override { + return rhs.kind() == kind(); + } + std::string str() const override { + return "None"; + } + bool isSubtypeOfExt(const TypePtr rhs, std::ostream *why_not) const override { + if (rhs->kind() == OptionalType::Kind) { + return true; + } + return Type::isSubtypeOfExt(rhs, why_not); + } + static const TypeKind Kind = TypeKind::NoneType; + // global singleton + static NoneTypePtr get(); + + private: + NoneType() : Type(TypeKind::NoneType) {} +}; + +struct GeneratorType; +using GeneratorTypePtr = std::shared_ptr; +// This type represents a Generator +struct CAFFE2_API GeneratorType : public Type { + static GeneratorTypePtr create() { + return GeneratorTypePtr( + new GeneratorType()); // NOLINT(modernize-make-shared) + } + bool operator==(const Type& rhs) const override { + return rhs.kind() == kind(); + } + std::string str() const override { + return "Generator"; + } + static const TypeKind Kind = TypeKind::GeneratorType; + // global singleton + static GeneratorTypePtr get(); + + private: + GeneratorType() : Type(TypeKind::GeneratorType) {} +}; + +struct DeviceObjType; +using DeviceObjTypePtr = std::shared_ptr; +// This type represents a Generator +struct CAFFE2_API DeviceObjType : public Type { + static DeviceObjTypePtr create() { + return DeviceObjTypePtr( + new DeviceObjType()); // NOLINT(modernize-make-shared) + } + bool operator==(const Type& rhs) const override { + return rhs.kind() == kind(); + } + std::string str() const override { + return "Device"; + } + static const TypeKind Kind = TypeKind::DeviceObjType; + // global singleton + static DeviceObjTypePtr get(); + + private: + DeviceObjType() : Type(TypeKind::DeviceObjType) {} +}; + +struct VarType; +using VarTypePtr = std::shared_ptr; +// This type represents a type variable, used in FunctionSchema +struct VarType : public Type { + static VarTypePtr create(std::string name_) { + return VarTypePtr(new VarType(std::move(name_))); + } + bool operator==(const Type& rhs) const override { + return rhs.kind() == kind(); + } + std::string str() const override { + return name(); + } + const std::string& name() const { + return name_; + } + bool hasFreeVariables() const override { + return true; + } + static const TypeKind Kind = TypeKind::VarType; + + private: + VarType(std::string name_) + : Type(TypeKind::VarType), name_(std::move(name_)) {} + std::string name_; +}; + +struct CapsuleType; +using CapsuleTypePtr = std::shared_ptr; +// This type represents a Python Capsule +struct CAFFE2_API CapsuleType : public Type { + static CapsuleTypePtr create() { + return CapsuleTypePtr(new CapsuleType()); // NOLINT(modernize-make-shared) + } + bool operator==(const Type& rhs) const override { + return rhs.kind() == kind(); + } + std::string str() const override { + return "Capsule"; + } + static const TypeKind Kind = TypeKind::CapsuleType; + // global singleton + static CapsuleTypePtr get(); +private: + CapsuleType() + : Type(TypeKind::CapsuleType) {} +}; + +CAFFE2_API std::ostream& operator<<(std::ostream& out, const Type& t); +CAFFE2_API std::ostream& operator<<(std::ostream& out, const VaryingShape& t); +// what is the type, ignoring extra size/shape information? +// e.g. Tensor(2x3) -> Dynamic, and Tuple(Tensor(2x3),...) -> Tuple(Dynamic,...) + +inline TypePtr unshapedType(const TypePtr& type) { + if (type->isSubtypeOf(TensorType::get())) { + return TensorType::get(); + } + return type->withContained(fmap(type->containedTypes(), unshapedType)); +} + +inline TypePtr TensorType::fromNumberType(TypePtr typ) { + if (typ->isSubtypeOf(IntType::get())) { + return TensorType::createContiguous(at::kLong, at::kCPU, {}); + } else if (typ->isSubtypeOf(FloatType::get())) { + return TensorType::createContiguous(at::kFloat, at::kCPU, {}); + } else if (typ->isSubtypeOf(BoolType::get())) { + return TensorType::createContiguous(at::kLong, at::kCPU, {}); + } + TORCH_CHECK(false, "Unknown number type: ", typ->str()); +} +inline TypePtr TensorType::fromBoolType() { + return TensorType::createContiguous(at::kLong, at::kCPU, {}); +} + +inline c10::optional tryScalarTypeFromJitType(const c10::TypePtr & type) { + if (type == FloatType::get()) { + return at::ScalarType::Double; + } else if (type == IntType::get()) { + return at::ScalarType::Long; + } else if (type == BoolType::get()) { + return at::ScalarType::Bool; + } + return c10::nullopt; +} + +inline at::ScalarType scalarTypeFromJitType(const c10::TypePtr& type) { + auto result = tryScalarTypeFromJitType(type); + AT_ASSERTM( + result, + "Add new condition, expected Float, Int, or Bool but got", + type->str()); + return *result; +} + +// Attempt to find the correct supertype of t1 and t2. If none is found then +// nullopt will be returned. If t1 == t2, or t1 is a type refinement of t2, +// then t2 will be returned (and vice versa). +// Two different tensortypes will return dynamic. +// Currently we chose not to support returning a NumberType for a float & int +// input because of a lack of operator support for NumberType +CAFFE2_API c10::optional unifyTypes( + const TypePtr& t1, + const TypePtr& t2); + +CAFFE2_API c10::optional unifyTypeList(at::ArrayRef elements); + +namespace detail { +template +struct getTypePtr_ final { + static TypePtr call() { + if (!isCustomClassRegistered()) { + throw c10::Error("Type could not be converted to any of the known types.", ""); + } + auto res = getCustomClassType(); + return std::dynamic_pointer_cast(res.type_); + } +}; + +template <> +struct getTypePtr_ final { + static TypePtr call() { + return TensorType::get(); + } +}; +template <> +struct getTypePtr_ final { + static TypePtr call() { + return FloatType::get(); + } +}; +template <> +struct getTypePtr_ final { + static TypePtr call() { + return IntType::get(); + } +}; +template <> +struct getTypePtr_ final { + static TypePtr call() { + return BoolType::get(); + } +}; +template <> +struct getTypePtr_ final { + static TypePtr call() { + return NumberType::get(); + } +}; +template <> +struct getTypePtr_ final { + static TypePtr call() { + return OptionalType::create(GeneratorType::get()); + } +}; +template <> +struct getTypePtr_ final { + static TypePtr call() { + return StringType::get(); + } +}; +template +struct getTypePtr_> final { + static TypePtr call() { + static auto type = ListType::create(getTypePtr_::call()); + return type; + } +}; +template +struct getTypePtr_> final { + static TypePtr call() { + static auto type = ListType::create(getTypePtr_::call()); + return type; + } +}; +template +struct getTypePtr_> final { + static TypePtr call() { + static auto type = ListType::create(getTypePtr_::call()); + return type; + } +}; +template +struct getTypePtr_> final { + static TypePtr call() { + static auto type = ListType::create(getTypePtr_::call()); + return type; + } +}; +template +struct getTypePtr_> final { + static TypePtr call() { + static auto type = + DictType::create(getTypePtr_::call(), getTypePtr_::call()); + return type; + } +}; +template +struct getTypePtr_> final { + static TypePtr call() { + static auto type = + DictType::create(getTypePtr_::call(), getTypePtr_::call()); + return type; + } +}; +template +struct getTypePtr_> final { + static TypePtr call() { + static auto type = OptionalType::create(getTypePtr_::call()); + return type; + } +}; +} // namespace detail +template +inline TypePtr getTypePtr() { + // TODO: static_assert that a templated function exists, and throw a friendy + // error message if not + return detail::getTypePtr_::call(); +} + +using TypeEnv = std::unordered_map; +struct MatchTypeReturn { + MatchTypeReturn(std::string reason) : reason_(std::move(reason)) {} + static MatchTypeReturn Success() { + return MatchTypeReturn(); + } + bool success() const { + return !reason_.has_value(); + } + const std::string& reason() const { + return reason_.value(); + } + + private: + MatchTypeReturn() + : reason_(c10::nullopt) {} + c10::optional reason_; // is there is no match, this contains the reason +}; + +// attempt to match the type variables in formal to actual, adding them to type_env. +// If no match is possible this returns a MatchTypeReturn with r.success() == false +// and a r.reason() that describes why it could not match. +// note: It is possible to successfully match a formal, but for type variables +// in the formal to still not be defined. In particular, None matches Optional[T] +// but does not define the value of T. +CAFFE2_API MatchTypeReturn +matchTypeVariables(TypePtr formal, TypePtr actual, TypeEnv& type_env); + +// replace type variables appearing in `type` with the values in +// `type_env`. Returns nullptr if a variable used in `type` +// does not appear in `type_env` +CAFFE2_API TypePtr tryEvalTypeVariables(TypePtr type, TypeEnv& type_env); + + +/** + * User Defined Types + */ + +struct ClassType; +using ClassTypePtr = std::shared_ptr; +using ::torch::jit::script::CompilationUnit; + +// This represents a class in TorchScript. +struct CAFFE2_API ClassType : public NamedType { + // Create a class type with name `name` and its methods stored in `cu`. + static ClassTypePtr create( + c10::optional qualifiedName, + std::weak_ptr cu, + bool is_module = false); + + bool operator==(const Type& rhs) const override { + if (auto user_rhs = rhs.cast()) { + const auto& lhs_name = name().value(); + const auto& rhs_name = user_rhs->name().value(); + + return lhs_name == rhs_name; + } + return false; + } + + std::string str() const override { + return python_str(); + } + + std::string python_str() const override { + const auto& n = name().value(); + return n.qualifiedName(); + } + + const std::vector& methods() const; + + TypePtr getAttribute(const std::string& name) const { + AT_ASSERT(attributeNames_.size() == attributeTypes_.size()); + size_t pos = 0; + for (const auto& attr : attributeNames_) { + if (name == attr) { + break; + } + ++pos; + } + + if (pos >= attributeNames_.size()) { + return nullptr; + } + return attributeTypes_[pos]; + } + + size_t numAttributes() const { + AT_ASSERT(attributeNames_.size() == attributeTypes_.size()); + return attributeNames_.size(); + } + + const TypePtr& getAttribute(size_t slot) const { + AT_ASSERT(attributeNames_.size() == attributeTypes_.size()); + AT_ASSERT(slot < attributeTypes_.size()); + return attributeTypes_[slot]; + } + + const std::string& getAttributeName(size_t slot) const { + AT_ASSERT(attributeNames_.size() == attributeTypes_.size()); + AT_ASSERT(slot < attributeTypes_.size()); + return attributeNames_[slot]; + } + + // Attributes are stored in a specific slot at runtime for effiency. + // When emitting instructions we specify the slot so that attribute access is + // a constant lookup + c10::optional findAttributeSlot(const std::string& name) const { + AT_ASSERT(attributeNames_.size() == attributeTypes_.size()); + size_t slot = 0; + for (const auto& attr : attributeNames_) { + if (name == attr) { + return slot; + } + slot++; + } + return c10::nullopt; + } + size_t getAttributeSlot(const std::string& name) const { + if (auto r = findAttributeSlot(name)) { + return *r; + } + TORCH_CHECK( + false, + python_str(), + " does not have a field with the name '", + name, + "'"); + } + + bool hasAttribute(const std::string& name) const { + return std::find_if( + attributeNames_.cbegin(), + attributeNames_.cend(), + [&](const std::string& attr) { return attr == name; }) != + attributeNames_.cend(); + } + + size_t addAttribute( + const std::string& name, + TypePtr type, + bool is_parameter = false); + + // [Internal Only] Remove attribute from the ClassType, + // caller is responsible to make sure the modification is safe: + // it is unsafe to having existing allocations + // of this object around anymore, and any code that works on + // the attribute is now invalid. Only newly created code is + // valid again. + void unsafeRemoveAttribute(const std::string& name); + + // Add attribute \p NAME if it doesn't exist or verify that it has a + // compatible type otherwise. + size_t addOrCheckAttribute( + const std::string& name, + TypePtr ty, + bool is_parameter = false) { + auto slot_idx = findAttributeSlot(name); + if (!slot_idx) { + return addAttribute(name, ty, is_parameter); + } + + TORCH_CHECK( + is_parameter == this->is_parameter(*slot_idx), + "Parameter field mismatch for the field '", + name, + "'"); + TypePtr atype = getAttribute(*slot_idx); + TORCH_CHECK( + ty->isSubtypeOf(atype), + ty->python_str(), + " is not compatible with the type ", + atype->python_str(), + " for the field '", + name, + "'"); + return *slot_idx; + } + + at::ArrayRef attributeNames() const { + return attributeNames_; + } + + at::ArrayRef containedTypes() const override { + return attributeTypes_; + } + + TypePtr createWithContained(std::vector contained_types) const override { + auto ptr = ClassType::create(name(), compilation_unit_); + AT_ASSERT(numAttributes() == contained_types.size()); + for(size_t i = 0; i < attributeNames_.size(); ++i) { + AT_ASSERT(attributeTypes_[i]->isSubtypeOf(contained_types[i])); + ptr->addAttribute(attributeNames_[i], contained_types[i]); + } + // Copy methods over + for (const auto& method : methods()) { + ptr->addMethod(method); + } + return ptr; + } + + bool is_module() const override { + return bool(parameterSlots_); + } + bool is_parameter(size_t slot) const { + TORCH_INTERNAL_ASSERT( + is_module(), "asking for parameterSlots of non-Module"); + return parameterSlots_->at(slot); + } + + void addMethod(Function* method); + Function* getMethod(const std::string& name) const; + + std::shared_ptr compilation_unit(); + std::shared_ptr compilation_unit() const; + + // generate a refined version of this class. + // It has the same name but the slot Types are subtypes of + // the original slots. It is only valid to refine a class type in a context + // where it is know that there are not assignments to the objects slots + // that would invalidate the refinement. + // These variants are not registered in the global class table. + ClassTypePtr refine(at::ArrayRef refined_slots) const; + + bool isSubtypeOfExt(const TypePtr rhs, std::ostream* why_not) const override; + + static const TypeKind Kind = TypeKind::ClassType; + + private: + ClassType( + c10::optional name, + std::weak_ptr cu, + bool is_module); + + // Mapping of attribute names -> their type. + // NOTE: this does not contain methods, which are stored in the module + // TODO: once modules support arbitrary ivalue attributes, we don't need this + // anymore. + // TODO: This is better represented as an OrderedDict, but alas it is not yet + // available from c10 + std::vector attributeNames_; + std::vector attributeTypes_; + // Holds method attributes + std::weak_ptr compilation_unit_; + + // if present, this class inherits from torch.nn.Module + // and these are the indices of the attributes which are parameters + std::shared_ptr> parameterSlots_; + + // List of methods associated with this class. + std::vector methods_; + +}; + +struct InterfaceType; +using InterfaceTypePtr = std::shared_ptr; +using ::torch::jit::script::CompilationUnit; +using ::torch::jit::Function; + +// Interfaces are a list of abstract methods that a class might meet. +// If a class provides those methods, it implicitly meets the interface. + +// Subtype relations for Interface with ClassType: +// lhs (ClassType or InterfaceType) is a subtype of rhs if: +// 1. lhs methods are a superset of rhs methods +// 2. if rhs is module interface, the lhs must be module interface or module itself +struct CAFFE2_API InterfaceType : public NamedType { + static InterfaceTypePtr create( + QualifiedName qualifiedName, bool is_module=false); + + bool operator==(const Type& rhs) const override { + if (auto user_rhs = rhs.cast()) { + return name() == user_rhs->name(); + } + return false; + } + + std::string str() const override { + return std::string("InterfaceType<") + name()->name() + ">"; + } + + std::string python_str() const override { + return name()->qualifiedName(); + } + + bool isSubtypeOfExt(const TypePtr rhs, std::ostream* why_not) const override; + + // try to find a method of this interface, + // returns nullptr if not found. + const FunctionSchema* getMethod(const std::string& name) const; + void addMethod(FunctionSchema schema); + const std::vector& methods() { + return *methods_; + } + + bool is_module() const override{ + return is_module_; + } + static const TypeKind Kind = TypeKind::InterfaceType; + ~InterfaceType() override; + private: + InterfaceType(QualifiedName name, bool is_module); + + // shared_ptr so that this header does not have to depend on + // FunctionSchema.h + std::shared_ptr> methods_; + // flag to distinguish if it's an interface type from a module or not + bool is_module_; +}; + +} // namespace c10 diff --git a/thirdparty/libtorch/include/ATen/core/op_registration/infer_schema.h b/thirdparty/libtorch/include/ATen/core/op_registration/infer_schema.h new file mode 100644 index 0000000000..9971435cf7 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/op_registration/infer_schema.h @@ -0,0 +1,144 @@ +#pragma once + +/** + * This file contains functionality to take a C++ function and infer its + * c10::FunctionSchema. + */ + +#include +#include +#include + +namespace c10 { +namespace detail { + +namespace infer_schema { + +/// The templated inference code creates `ArgumentDef` instead of `Argument`, +/// because that can be constructed at compile time and has a much smaller +/// binary size than having calls to `Argument` constructors in the template. +/// Creating `Argument` objects from `ArgumentDef` can then be done at +/// runtime in a non-templated way. +struct ArgumentDef final { + using GetTypeFn = TypePtr(); + GetTypeFn* getTypeFn; +}; + +template +struct bool_t {}; +template<> struct bool_t : std::true_type {}; +template<> struct bool_t : std::false_type {}; + +/// Checks the static C++ types `Types` for correctness to catch common error cases. +template +constexpr int checkStaticTypes() { + // Give nice error messages for some of the common error cases. + // Use a LOUD ERROR MESSAGE SO USERS SEE THE STATIC_ASSERT + static_assert(guts::conjunction< + bool_t::value || std::is_same::value || std::is_same::value>... + >::value, "INVALID TYPE: Only int64_t and bool are supported as an integral argument type"); + static_assert(guts::conjunction< + bool_t::value>... + >::value, "INVALID TYPE: float is not supported as an argument type, use double instead"); + return 0; +} + +template +constexpr std::array createArgumentVectorFromTypes(guts::index_sequence) { + return ( + // Check types for common errors + checkStaticTypes(), + + // Create the return value + std::array{{ArgumentDef{&getTypePtr_>::call}...}} + ); +} + +/// Creates a vector of `ArgumentDef` from a list of C++ types that are specified +/// as template arguments. +template struct createArguments final {}; +template +struct createArguments> final { + static constexpr std::array call() { + return createArgumentVectorFromTypes( + guts::make_index_sequence() + ); + } +}; + +/// Creates a vector of `ArgumentDef` from a list of C++ types that are specified +/// as a tuple (i.e. in the way c10 kernels return values). +/// It can be a tuple if there's three output arguments with types A, B, C. +/// It can be an empty tuple<>, or void for kernels that don't return anything. +/// It can be a single type A (i.e. no tuple) for the case where a kernel just +/// returns one value. +template struct createReturns final {}; + +template +struct createReturns, void> final { + static constexpr std::array call() { + return createArgumentVectorFromTypes( + guts::make_index_sequence() + ); + } +}; + +template +struct createReturns::value && !guts::is_instantiation_of::value>> final { + static constexpr std::array call() { + return createReturns>::call(); + } +}; + +template<> +struct createReturns final { + static constexpr std::array call() { + return createReturns>::call(); + } +}; + +template +std::vector createArgumentVector(const std::array& args) { + std::vector result; + result.reserve(NumArgs); + for (size_t i = 0; i < args.size(); ++i) { + // Arguments are named "_" + result.push_back(Argument("_" + c10::guts::to_string(i), (*args[i].getTypeFn)())); + } + return result; +} + +// This is intentionally a separate function +// because then the template is smaller and that benefits binary size +inline FunctionSchema make_function_schema(std::string&& name, std::string&& overload_name, std::vector&& arguments, std::vector&& returns) { + return FunctionSchema(std::move(name), std::move(overload_name), std::move(arguments), std::move(returns)); +} + +template +inline FunctionSchema make_function_schema(std::string&& name, std::string&& overload_name, const std::array& arguments, const std::array& returns) { + return make_function_schema(std::move(name), std::move(overload_name), createArgumentVector(arguments), createArgumentVector(returns)); +} + +/// Creates a `FunctionSchema` object from a `FunctionTraits` type for a +/// function. +template +FunctionSchema createFunctionSchemaFromTraits(std::string&& name, std::string&& overload_name) { + using ReturnType = typename FunctionTraits::return_type; + using ParameterTypes = typename FunctionTraits::parameter_types; + + constexpr auto arguments = createArguments::call(); + constexpr auto returns = createReturns::call(); + + return make_function_schema(std::move(name), std::move(overload_name), arguments, returns); +} +} +} + +template +FunctionSchema inferFunctionSchema(std::string&& name, std::string&& overload_name) { + return detail::infer_schema::createFunctionSchemaFromTraits>(std::move(name), std::move(overload_name)); +} + +CAFFE2_API c10::optional findSchemaDifferences(const FunctionSchema& inferred, const FunctionSchema& specified); + +} diff --git a/thirdparty/libtorch/include/ATen/core/op_registration/op_registration.h b/thirdparty/libtorch/include/ATen/core/op_registration/op_registration.h new file mode 100644 index 0000000000..a87768485f --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/op_registration/op_registration.h @@ -0,0 +1,591 @@ +#pragma once + +/** + * Include this file if you want to register operators. It includes all + * functionality needed to do so for you. + */ + +#include +#include +#if !defined(CAFFE2_IS_XPLAT_BUILD) +#include +#endif +#include + +namespace c10 { + +/** + * An instance of this class handles the registration for one or more operators. + * Make sure you keep the RegisterOperators instance around since it will + * deregister the operator it's responsible for in its destructor. + * + * Example: + * + * > namespace { + * > class my_kernel_cpu final : public c10::OperatorKernel { + * > public: + * > Tensor operator()(Tensor a, Tensor b) {...} + * > }; + * > } + * > + * > static auto registry = c10::RegisterOperators() + * > .op(c10::RegisterOperators::options() + * > .schema("my_op") + * > .kernel(TensorTypeId::CPUTensorId)); + */ +class CAFFE2_API RegisterOperators final { +public: + RegisterOperators(); + ~RegisterOperators(); + + RegisterOperators(const RegisterOperators&) = delete; + RegisterOperators& operator=(const RegisterOperators&) = delete; + RegisterOperators(RegisterOperators&&) noexcept; + RegisterOperators& operator=(RegisterOperators&&) noexcept; + + class CAFFE2_API Options final { + public: + Options(const Options&) = delete; + Options(Options&&) noexcept = delete; + Options& operator=(const Options&) = delete; + Options& operator=(Options&&) noexcept = delete; + + // internal-only for registering stack based kernels + Options&& kernel(TensorTypeId dispatch_key, KernelFunction::BoxedKernelFunction* kernel_func) && { + return std::move(*this).kernel(dispatch_key, KernelFunction::makeFromBoxedFunction(kernel_func), nullptr); + } + + // internal-only for registering stack based catch-all kernels + Options&& catchAllKernel(KernelFunction::BoxedKernelFunction* kernel_func) && { + return std::move(*this).kernel(c10::nullopt, KernelFunction::makeFromBoxedFunction(kernel_func), nullptr); + } + + // internal only for registering caffe2 ops + Options&& schema(FunctionSchema&& schema) { + TORCH_CHECK(!schemaOrName_.has_value(), "You can only specify the schema once per operator registration."); + schemaOrName_ = c10::make_right(std::move(schema)); + return std::move(*this); + } + + /** + * Use this to specify the schema for an operator. You can also specify + * the operator name only to have the function signature part of the + * schema be inferred from the kernel function. + * + * Example: + * + * > // Infer function signature from my_kernel_cpu + * > static auto registry = c10::RegisterOperators() + * > .op(c10::RegisterOperators::options() + * > .schema("my_op") + * > .kernel(TensorTypeId::CPUTensorId)); + * > + * > + * > // Explicitly specify full schema + * > static auto registry = c10::RegisterOperators() + * > .op(c10::RegisterOperators::options() + * > .schema("my_op(Tensor a) -> Tensor") + * > .kernel(TensorTypeId::CPUTensorId)); + */ + Options&& schema(const std::string& schemaOrName) { + TORCH_CHECK(!schemaOrName_.has_value(), "Tried to register operator ", schemaOrName," but specified schema multiple times. You can only specify the schema once per operator registration."); + + #if defined(CAFFE2_IS_XPLAT_BUILD) + throw std::logic_error("Tried to register operator " + schemaOrName + ". We don't support registering c10 ops on mobile yet because the function schema parser isn't present in the mobile build."); + #else + schemaOrName_ = torch::jit::parseSchemaOrName(schemaOrName); + #endif + + return std::move(*this); + } + + /** + * Use this to register an operator whose kernel is implemented as a functor. + * The kernel is only called for inputs matching the given dispatch key. + * You can register multiple kernels for different dispatch keys. + * + * Example: + * + * > namespace { + * > class my_kernel_cpu final : public c10::OperatorKernel { + * > public: + * > Tensor operator()(Tensor a, Tensor b) {...} + * > }; + * > } + * > + * > static auto registry = c10::RegisterOperators() + * > .op(c10::RegisterOperators::options() + * > .schema("my_op") + * > .kernel(TensorTypeId::CPUTensorId)); + * + * The functor constructor can take arguments to configure the kernel. + * The arguments are defined in the kernel registration. + * Example: + * + * > namespace { + * > class my_kernel_cpu final : public c10::OperatorKernel { + * > public: + * > explicit my_kernel_cpu(std::string some_configuration, int a, bool b) + * > : ... {...} + * > + * > Tensor operator()(Tensor a, Tensor b) {...} + * > }; + * > } + * > + * > static auto registry = c10::RegisterOperators() + * > .op(c10::RegisterOperators::options() + * > .schema("my_op") + * > .kernel(TensorTypeId::CPUTensorId, "some_configuration", 3, true)); + */ + template + // enable_if: only enable it if KernelFunctor is actually a functor + guts::enable_if_t::value, Options&&> kernel(TensorTypeId dispatch_key, ConstructorParameters&&... constructorParameters) && { + static_assert(std::is_base_of::value, "Tried to register a kernel functor using the kernel() API, but it doesn't inherit from c10::OperatorKernel. Please have the functor inherit from it."); + static_assert(std::is_constructible::value, "Wrong argument list for constructor of kernel functor. The arguments to kernel(arguments...) must match one of the constructors of Functor."); + + return std::move(*this).kernel( + std::move(dispatch_key), + KernelFunction::makeFromUnboxedFunctorFactory(detail::KernelFactory...>(std::forward(constructorParameters)...)), + detail::FunctionSchemaInferer()() + ); + } + + /** + * Use this to register an operator whose kernel is implemented as a functor. + * The kernel is a catch-all kernel, meaning it's called independent from + * the input. Dispatch is disabled for this operator. + * + * Example: + * + * > namespace { + * > class my_kernel_cpu final : public c10::OperatorKernel { + * > public: + * > Tensor operator()(Tensor a, Tensor b) {...} + * > }; + * > } + * > + * > static auto registry = c10::RegisterOperators() + * > .op(c10::RegisterOperators::options() + * > .schema("my_op") + * > .catchAllKernel()); + * + * The functor constructor can take arguments to configure the kernel. + * The arguments are defined in the kernel registration. + * Example: + * + * > namespace { + * > class my_kernel_cpu final : public c10::OperatorKernel { + * > public: + * > explicit my_kernel_cpu(std::string some_configuration, int a, bool b) + * > : ... {...} + * > + * > Tensor operator()(Tensor a, Tensor b) {...} + * > }; + * > } + * > + * > static auto registry = c10::RegisterOperators() + * > .op(c10::RegisterOperators::options() + * > .schema("my_op") + * > .catchAllKernel("some_configuration", 3, true)); + */ + template + // enable_if: only enable it if KernelFunctor is actually a functor + guts::enable_if_t::value, Options&&> catchAllKernel(ConstructorParameters&&... constructorParameters) && { + static_assert(std::is_base_of::value, "Tried to register a kernel functor using the kernel() API, but it doesn't inherit from c10::OperatorKernel. Please have the functor inherit from it."); + static_assert(std::is_constructible::value, "Wrong argument list for constructor of kernel functor. The arguments to kernel(arguments...) must match one of the constructors of Functor."); + + return std::move(*this).kernel( + c10::nullopt, + KernelFunction::makeFromUnboxedFunctorFactory(detail::KernelFactory...>(std::forward(constructorParameters)...)), + detail::FunctionSchemaInferer()() + ); + } + + /** + * Use this to register an operator whose kernel is implemented by a function. + * The kernel is only called for inputs matching the given dispatch key. + * You can register multiple kernels for different dispatch keys. + * + * Example: + * + * > namespace { Tensor my_kernel_cpu(Tensor a, Tensor b) {...} } + * > + * > static auto registry = c10::RegisterOperators() + * > .op(c10::RegisterOperators::options() + * > .schema("my_op") + * > .kernel(TensorTypeId::CPUTensorId)); + */ + template + // enable_if: only enable it if FuncType is actually a function + guts::enable_if_t::value, Options&&> kernel(TensorTypeId dispatch_key) && { + static_assert(!std::is_same::value, "Tried to register a stackbased (i.e. internal) kernel function using the public kernel<...>() API. Please either use the internal kernel(...) API or also implement the kernel function as defined by the public API."); + static_assert(kernel_func != nullptr, "Kernel function cannot be nullptr"); + + return std::move(*this).kernel( + std::move(dispatch_key), + KernelFunction::makeFromUnboxedFunction(), + // TODO Do schema inference without relying on WrapKernelFunction + detail::FunctionSchemaInferer::type>()() + ); + } + + /** + * Use this to register an operator whose kernel is implemented by a function. + * The kernel is a catch-all kernel, meaning it's called independent from + * the input. Dispatch is disabled for this operator. + * + * Example: + * + * > namespace { Tensor my_kernel_cpu(Tensor a, Tensor b) {...} } + * > + * > static auto registry = c10::RegisterOperators() + * > .op(c10::RegisterOperators::options() + * > .schema("my_op") + * > .catchAllKernel()); + */ + template + // enable_if: only enable it if FuncType is actually a function + guts::enable_if_t::value, Options&&> catchAllKernel() && { + static_assert(!std::is_same::value, "Tried to register a stackbased (i.e. internal) kernel function using the public kernel<...>() API. Please either use the internal kernel(...) API or also implement the kernel function as defined by the public API."); + static_assert(kernel_func != nullptr, "Kernel function cannot be nullptr"); + + return std::move(*this).kernel( + c10::nullopt, + KernelFunction::makeFromUnboxedFunction(), + // TODO Do schema inference without relying on WrapKernelFunction + detail::FunctionSchemaInferer::type>()() + ); + } + + template + // enable_if: only enable it if FuncType is actually a function + guts::enable_if_t::value, Options&&> kernel(TensorTypeId dispatch_key, FuncType* kernel_func) && { + static_assert(!std::is_same::value, "Tried to register a stackbased (i.e. internal) kernel function using the public kernel<...>() API. Please either use the internal kernel(...) API or also implement the kernel function as defined by the public API."); + TORCH_INTERNAL_ASSERT(kernel_func != nullptr, "Kernel function cannot be nullptr"); + + return std::move(*this).kernel( + std::move(dispatch_key), + KernelFunction::makeFromUnboxedRuntimeFunction(kernel_func), + // TODO Do schema inference without relying on WrapKernelFunction + detail::FunctionSchemaInferer>>()() + ); + } + + template + // enable_if: only enable it if FuncType is actually a function + guts::enable_if_t::value, Options&&> catchAllKernel(FuncType* kernel_func) && { + static_assert(!std::is_same::value, "Tried to register a stackbased (i.e. internal) kernel function using the public kernel<...>() API. Please either use the internal kernel(...) API or also implement the kernel function as defined by the public API."); + TORCH_INTERNAL_ASSERT(kernel_func != nullptr, "Kernel function cannot be nullptr"); + + return std::move(*this).kernel( + c10::nullopt, + KernelFunction::makeFromUnboxedRuntimeFunction(kernel_func), + // TODO Do schema inference without relying on WrapKernelFunction + detail::FunctionSchemaInferer>>()() + ); + } + + // TODO Remove impl_unboxedOnlyKernel once all of aten can generate boxed kernels + template + // enable_if: only enable it if FuncType is actually a function + guts::enable_if_t::value, Options&&> impl_unboxedOnlyKernel(TensorTypeId dispatch_key) && { + static_assert(!std::is_same::value, "Tried to register a stackbased (i.e. internal) kernel function using the public kernel<...>() API. Please either use the internal kernel(...) API or also implement the kernel function as defined by the public API."); + static_assert(kernel_func != nullptr, "Kernel function cannot be nullptr"); + + return std::move(*this).kernel( + std::move(dispatch_key), + KernelFunction::makeFromUnboxedOnlyRuntimeFunction(kernel_func), + nullptr // disable function schema inference because some ops from native_functions.yaml don't support it yet + ); + } + + // TODO Remove impl_unboxedOnlyCatchAllKernel once all of aten can generate boxed kernels + template + // enable_if: only enable it if FuncType is actually a function + guts::enable_if_t::value, Options&&> impl_unboxedOnlyCatchAllKernel() && { + static_assert(!std::is_same::value, "Tried to register a stackbased (i.e. internal) kernel function using the public kernel<...>() API. Please either use the internal kernel(...) API or also implement the kernel function as defined by the public API."); + static_assert(kernel_func != nullptr, "Kernel function cannot be nullptr"); + + return std::move(*this).kernel( + c10::nullopt, + KernelFunction::makeFromUnboxedOnlyRuntimeFunction(kernel_func), + nullptr // disable function schema inference because some ops from native_functions.yaml don't support it yet + ); + } + + /** + * Use this to register an operator whose kernel is implemented as a lambda. + * The kernel is only called for inputs matching the given dispatch key. + * You can register multiple kernels for different dispatch keys. + * + * The lambda must be stateless, i.e. not have a capture. If your kernel + * needs to store some configuration parameters, write the kernel as a + * functor instead. + * + * Example: + * + * > static auto registry = c10::RegisterOperators() + * > .op(c10::RegisterOperators::options() + * > .schema("my_op") + * > .kernel(TensorTypeId::CPUTensorId, [] (Tensor a) -> Tensor {...})); + */ + template + // enable_if: only enable it if Lambda is a functor (note: lambdas are functors) + guts::enable_if_t< + guts::is_functor>::value + && !std::is_same>::func_type, KernelFunction::BoxedKernelFunction>::value, + Options&&> kernel(TensorTypeId dispatch_key, Lambda&& functor) && { + static_assert(!std::is_base_of>::value, "The kernel(x) API for registering a kernel is only meant to be used with lambdas. Your kernel is a functor. Please use the kernel() API instead."); + + // We don't support stateful lambdas (i.e. lambdas with a capture), because their + // behavior would be nonobvious. A functor kernel with cache gets a new instance of + // its cache each time the kernel is looked up from the dispatch table. + // A lambda with a capture would be global and share its capture between all kernel lookups. + // So, instead of making users having to think about it (including the thread-safety + // issues this causes), let's just forbid stateful lambdas alltogether. + static_assert(guts::is_stateless_lambda>::value, "The kernel(x) API for registering a kernel only works for stateless lambdas (i.e. lambdas without captures). If you need a cache, please use the functor based API kernel() instead."); + + return std::move(*this).kernel( + std::move(dispatch_key), + KernelFunction::makeFromUnboxedLambda(std::forward(functor)), + // TODO Do schema inference without relying on WrapRuntimeKernelFunctor + detail::FunctionSchemaInferer>>()() + ); + } + + /** + * Use this to register an operator whose kernel is implemented as a lambda. + * The kernel is a catch-all kernel, meaning it's called independent from + * the input. Dispatch is disabled for this operator. + * + * The lambda must be stateless, i.e. not have a capture. If your kernel + * needs to store some configuration parameters, write the kernel as a + * functor instead. + * + * Example: + * + * > static auto registry = c10::RegisterOperators() + * > .op(c10::RegisterOperators::options() + * > .schema("my_op") + * > .catchAllKernel([] (Tensor a) -> Tensor {...})); + */ + template + // enable_if: only enable it if Lambda is a functor (note: lambdas are functors) + guts::enable_if_t< + guts::is_functor>::value + && !std::is_same>::func_type, KernelFunction::BoxedKernelFunction>::value, + Options&&> catchAllKernel(Lambda&& lambda) && { + static_assert(!std::is_base_of>::value, "The kernel(x) API for registering a kernel is only meant to be used with lambdas. Your kernel is a functor. Please use the kernel() API instead."); + + // We don't support stateful lambdas (i.e. lambdas with a capture), because their + // behavior would be nonobvious. + // A lambda with a capture would be global and share its capture between all kernel lookups. + // This would be a likely source for unexpected race conditions, so we forbid it. + // If a kernel really needs global state, they can just have regular global state + // in their .cpp file next to the kernel lambda. + static_assert(guts::is_stateless_lambda>::value, "The kernel(x) API for registering a kernel only works for stateless lambdas (i.e. lambdas without captures). If you need a cache, please use the functor based API kernel() instead."); + + return std::move(*this).kernel( + c10::nullopt, + KernelFunction::makeFromUnboxedLambda(std::forward(lambda)), + // TODO Do schema inference without relying on WrapRuntimeKernelFunctor + detail::FunctionSchemaInferer>>()() + ); + } + + Options&& aliasAnalysis(AliasAnalysisKind aliasAnalysisKind) && { + TORCH_CHECK(!aliasAnalysisKind_.has_value(), "You can only call aliasAnalysis() once per operator registration."); + aliasAnalysisKind_ = aliasAnalysisKind; + return std::move(*this); + } + + private: + Options&& kernel(c10::optional&& dispatch_key, KernelFunction&& func, std::unique_ptr&& inferred_function_schema) && { + KernelRegistrationConfig config; + config.dispatch_key = dispatch_key; + config.func = std::move(func); + config.inferred_function_schema = std::move(inferred_function_schema); + kernels.push_back(std::move(config)); + return std::move(*this); + } + + Options() + : schemaOrName_(c10::nullopt) + , kernels() + , aliasAnalysisKind_(c10::nullopt) + {} + + // KernelRegistrationConfig accumulates all information from the config + // parameters passed to a RegisterOperators::op() call into one object. + struct KernelRegistrationConfig final { + KernelRegistrationConfig() + : dispatch_key(c10::nullopt) + , func() + , inferred_function_schema(nullptr) + {} + + c10::optional dispatch_key; + KernelFunction func; + std::unique_ptr inferred_function_schema; + }; + + c10::optional> schemaOrName_; + + std::vector kernels; + optional aliasAnalysisKind_; + friend class RegisterOperators; + }; + + /** + * Call this to get an instance of registration options, which + * can be passed to a call to RegisterOperators::op() to specify + * these options for the operator registration. + * See class doc comment for examples. + */ + static Options options() { + return {}; + } + + /** + * Call this to register an operator. See class doc comment for examples. + */ + RegisterOperators&& op(Options&& options) && { + checkSchemaAndRegisterOp_(std::move(options)); + return std::move(*this); + } + + /** + * This is a shorthand for RegisterOperators::op(Options) where you can + * specify the operator schema outside of the options parameter. + * See class doc comment for examples. + */ + RegisterOperators&& op(const std::string& schemaOrName, Options&& options = RegisterOperators::options()) && { + return std::move(*this).op(std::move(options).schema(schemaOrName)); + } + + // internal only for registering caffe2 ops + RegisterOperators&& op(FunctionSchema schema, Options&& options) && { + return std::move(*this).op(std::move(options).schema(std::move(schema))); + } + + template + explicit RegisterOperators(const std::string& schemaOrName, FuncType&& func, Options&& options = RegisterOperators::options()) + : RegisterOperators() { + std::move(*this).op(schemaOrName, std::forward(func), std::move(options)); + } + + /** + * This API registers an operator based on a kernel function pointer. + * + * Given a kernel + * + * > namespace { Tensor my_kernel_cpu(Tensor a, Tensor b) {...} } + * + * This API looks like: + * + * > static auto registry = c10::RegisterOperators() + * > .op("my_op", &my_kernel_cpu); + * + * If your kernel is small and the overhead of calling it matters, + * then this API might be the wrong choice since the following API + * has a slightly lower overhead for calling into the kernel: + * + * > static auto registry = c10::RegisterOperators() + * > .op("my_op", c10::RegisterOperators::options() + * > .kernel()); + * + * Or, alternatively, write your kernel as a functor: + * + * > namespace { + * > class my_kernel_cpu final : public c10::OperatorKernel { + * > public: + * > Tensor operator()(Tensor a, Tensor b) {...} + * > }; + * > } + * > + * > static auto registry = c10::RegisterOperators() + * > .op("my_op", c10::RegisterOperators::options() + * > .kernel()); + */ + template + // enable_if: only enable it if FuncType is actually a function, but not a stack based BoxedKernelFunction. + guts::enable_if_t::value && !std::is_same::value, RegisterOperators&&> + op(const std::string& schemaOrName, FuncType* func, Options&& options = RegisterOperators::options()) && { + constexpr bool AllowLegacyTypes = true; + return std::move(*this).op(std::move(options).schema(schemaOrName).kernel( + c10::nullopt, + KernelFunction::makeFromUnboxedRuntimeFunction(func), + // TODO Do schema inference without relying on WrapRuntimeKernelFunctor + detail::FunctionSchemaInferer>>()() + )); + } + + /** + * This API registers an operator based on a kernel lambda. + * + * This API looks like: + * + * > static auto registry = c10::RegisterOperators() + * > .op("my_op", [] (Tensor a, Tensor b) {...}); + * + * This is equivalent to: + * + * > static auto registry = c10::RegisterOperators() + * > .op("my_op", c10::RegisterOperators::options() + * > .catchAllKernel([] (Tensor a, Tensor b) {...})); + * + */ + template + // enable_if: only enable it if Lambda is actually a stateless lambda + guts::enable_if_t::value && guts::is_stateless_lambda>::value, RegisterOperators&&> + op(const std::string& schemaOrName, Lambda&& lambda, Options&& options = RegisterOperators::options()) && { + static_assert(!std::is_base_of::value, "c10::OperatorKernel is part of the new kernel registration API and shouldn't be used together with the deprecated registration API. Please use the new RegisterOperators::options().kernel() based API instead."); + + constexpr bool AllowLegacyTypes = true; + return std::move(*this).op(std::move(options).schema(schemaOrName).kernel( + c10::nullopt, + KernelFunction::makeFromUnboxedLambda(std::forward(lambda)), + // TODO Do schema inference without relying on WrapRuntimeKernelFunctor + detail::FunctionSchemaInferer>>()() + )); + } + + template + C10_DEPRECATED_MESSAGE("Registering operator kernels with stateful lambdas (i.e. lambdas with a capture) has non-obvious behavior. This is deprecated. Please use a lambda without a capture or a functor class instead.") + // enable_if: only enable it if Lambda is actually a functor but not a stateless lambda + guts::enable_if_t::value && !guts::is_stateless_lambda>::value, RegisterOperators&&> + op(const std::string& schemaOrName, Lambda&& lambda, Options&& options = RegisterOperators::options()) && { + static_assert(!std::is_base_of::value, "c10::OperatorKernel is part of the new kernel registration API and shouldn't be used together with the deprecated registration API. Please use the new RegisterOperators::options().kernel() based API instead."); + + constexpr bool AllowLegacyTypes = true; + return std::move(*this).op(std::move(options).schema(schemaOrName).kernel( + c10::nullopt, + KernelFunction::makeFromUnboxedLambda(std::forward(lambda)), + // TODO Do schema inference without relying on WrapRuntimeKernelFunctor + detail::FunctionSchemaInferer>>()() + )); + } + +private: + void checkSchemaAndRegisterOp_(Options&& config); + + static c10::FunctionSchema inferSchemaFromKernels_(const OperatorName& opNameStr, const Options& options); + void checkNoDuplicateKernels_(const Options& options); + void registerOp_(Options&& options); + void registerSchemaAndKernel_(FunctionSchema schema, Options::KernelRegistrationConfig&& config, OperatorOptions&& options); + void registerSchemaOnly_(FunctionSchema&& schema, OperatorOptions&& options); + static OperatorOptions makeOperatorOptions_(const Options& options); + + class OperatorRegistrar; + + std::vector registrars_; + + static_assert(std::is_nothrow_move_constructible>::value, ""); + static_assert(std::is_nothrow_move_assignable>::value, ""); +}; + +} + +namespace torch { + using RegisterOperators = c10::RegisterOperators; +} diff --git a/thirdparty/libtorch/include/ATen/core/operator_name.h b/thirdparty/libtorch/include/ATen/core/operator_name.h new file mode 100644 index 0000000000..323108871c --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/operator_name.h @@ -0,0 +1,40 @@ +#pragma once + +#include +#include + +namespace c10 { + +struct OperatorName final { + std::string name; + std::string overload_name; + OperatorName(std::string name, const std::string& overload_name) + : name(std::move(name)), overload_name(overload_name) {} +}; + +inline bool operator==(const OperatorName& lhs, const OperatorName& rhs) { + return lhs.name == rhs.name && lhs.overload_name == rhs.overload_name; +} + +inline bool operator!=(const OperatorName& lhs, const OperatorName& rhs) { + return !operator==(lhs, rhs); +} + +inline std::string toString(const OperatorName& opName) { + std::string result = opName.name; + if (opName.overload_name.size() != 0) { + result += "." + opName.overload_name; + } + return result; +} + +} + +namespace std { + template <> + struct hash<::c10::OperatorName> { + size_t operator()(const ::c10::OperatorName& x) const { + return std::hash()(x.name) ^ (~ std::hash()(x.overload_name)); + } + }; +} diff --git a/thirdparty/libtorch/include/ATen/core/qualified_name.h b/thirdparty/libtorch/include/ATen/core/qualified_name.h new file mode 100644 index 0000000000..629d05f73e --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/qualified_name.h @@ -0,0 +1,160 @@ +#pragma once + +#include +#include +#include +#include + +namespace c10 { + +// Represents a name of the form "foo.bar.baz" +struct QualifiedName { + QualifiedName() {} + + // `name` can be a dotted string, like "foo.bar.baz", or just a bare name. + /* implicit */ QualifiedName(const std::string& name) { + TORCH_CHECK(!name.empty()); + // split the string into its atoms. + size_t startSearchFrom = 0; + size_t pos = name.find(delimiter_, startSearchFrom); + + while (pos != std::string::npos) { + auto atom = name.substr(startSearchFrom, pos - startSearchFrom); + AT_ASSERTM( + atom.size() > 0, "Invalid name for qualified name: '", name, "'"); + atoms_.push_back(std::move(atom)); + startSearchFrom = pos + 1; + pos = name.find(delimiter_, startSearchFrom); + } + + auto finalAtom = name.substr(startSearchFrom, pos - startSearchFrom); + AT_ASSERTM( + finalAtom.size() > 0, "Invalid name for qualified name: '", name, "'"); + atoms_.push_back(std::move(finalAtom)); + + cacheAccessors(); + } + + explicit QualifiedName(std::vector atoms) { + for (const auto& atom : atoms) { + TORCH_CHECK(!atom.empty(), "Atom cannot be empty"); + TORCH_CHECK( + atom.find(delimiter_) == std::string::npos, + "Delimiter not allowed in atom"); + } + atoms_ = atoms; + cacheAccessors(); + } + // Unnecessary copy. Ideally we'd use somoething like std::string_view. + /* implicit */ QualifiedName(const char* name) + : QualifiedName(std::string(name)) {} + + // `name` must be a bare name (no dots!) + explicit QualifiedName(const QualifiedName& prefix, std::string name) { + TORCH_INTERNAL_ASSERT(!name.empty()); + TORCH_INTERNAL_ASSERT(name.find(delimiter_) == std::string::npos); + atoms_.insert(atoms_.begin(), prefix.atoms_.begin(), prefix.atoms_.end()); + atoms_.push_back(std::move(name)); + + cacheAccessors(); + } + + // Is `this` a prefix of `other`? + // For example, "foo.bar" is a prefix of "foo.bar.baz" + bool isPrefixOf(const QualifiedName& other) const { + const auto& thisAtoms = atoms_; + const auto& otherAtoms = other.atoms_; + + if (thisAtoms.size() > otherAtoms.size()) { + // Can't be a prefix if it's bigger + return false; + } + for (size_t i = 0; i < thisAtoms.size(); i++) { + if (thisAtoms[i] != otherAtoms[i]) { + return false; + } + } + return true; + } + + // The fully qualified name, like "foo.bar.baz" + const std::string& qualifiedName() const { + return qualifiedName_; + } + + // The leading qualifier, like "foo.bar" + const std::string& prefix() const { + return prefix_; + } + + // The base name, like "baz" + const std::string& name() const { + return name_; + } + + const std::vector& atoms() const { + return atoms_; + } + + bool operator==(const QualifiedName& other) const { + return this->qualifiedName_ == other.qualifiedName_; + } + + bool operator!=(const QualifiedName& other) const { + return !(*this == other); + } + + private: + static constexpr char delimiter_ = '.'; + + // Helper for cacheAccessors() below. + template + std::string join(char delimiter, const T& v) { + std::string out; + size_t reserve = 0; + for (const auto& e : v) { + reserve += e.size() + 1; + } + out.reserve(reserve); + for (size_t i = 0; i < v.size(); ++i) { + if (i != 0) { + out.push_back(delimiter); + } + out.append(v[i]); + } + return out; + } + + void cacheAccessors() { + qualifiedName_ = join(delimiter_, atoms_); + if (atoms_.size() > 1) { + ArrayRef view(atoms_); + const auto prefixView = view.slice(0, view.size() - 1); + prefix_ = join(delimiter_, prefixView); + } + + if (atoms_.size() >= 1) { + name_ = atoms_.back(); + } + } + + // The actual list of names, like "{foo, bar, baz}" + std::vector atoms_; + + /* + * Cached accessors, derived from `atoms_`. + */ + std::string qualifiedName_; + std::string prefix_; + std::string name_; +}; +} // namespace c10 + +namespace std { +template <> +struct hash { + size_t operator()(const c10::QualifiedName& n) const noexcept { + return std::hash()(n.qualifiedName()); + } +}; +} // namespace std diff --git a/thirdparty/libtorch/include/ATen/core/stack.h b/thirdparty/libtorch/include/ATen/core/stack.h new file mode 100644 index 0000000000..3bf8c2101b --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/stack.h @@ -0,0 +1,135 @@ +#pragma once + +#include + +// TODO move this to c10 namespace + +namespace torch { +namespace jit { + +using c10::IValue; +using Stack = std::vector; +using Operation = std::function; + +// An operation with N inputs and M outputs pops the last N inputs off +// the stack and pushes its M inputs onto the stack +// before: I0, I1, ... IN <- stack.back() +// after: O0, O1, ... OM +// operations are defined this way so that ownership of inputs can be +// transferred to the operation and it can incrementally drop ownership of +// tensors when they become unneeded. For large operations, like 'run an entire +// subgraph', this functionality is very important for minimizing gpu memory +// usage return value is the relative 'offset' to jump to for the next +// operation: +// pc += 1 + offset +// so a return value of 0 goes to the next instruction + +// treat the last N elements of the stack as a list, looking up +// element i +static inline IValue& peek(Stack& stack, size_t i, size_t N) { + return *(stack.end() - N + i); +} +static inline const IValue& peek(const Stack& stack, size_t i, size_t N) { + return *(stack.end() - N + i); +} +// treat the last N elements of the stack as a list, looking up the +// slice starting at index i and having length len +static inline at::ArrayRef peekSlice( + const Stack& stack, + size_t i, + size_t len, + size_t N) { + return at::ArrayRef(stack).slice(stack.size() - N + i, len); +} +static inline at::ArrayRef last(const Stack& stack, size_t N) { + return peekSlice(stack, 0, N, N); +} +static inline void drop(Stack& stack, size_t n) { + stack.erase(stack.end() - n, stack.end()); +} +static inline IValue pop(Stack& stack) { + auto r = std::move(stack.back()); + stack.pop_back(); + return r; +} +static inline std::vector pop(Stack& stack, size_t n) { + std::vector result; + result.reserve(n); + for (size_t i = 0; i < n; ++i) { + result.push_back(std::move(peek(stack, i, n))); + } + drop(stack, n); + return result; +} + +// variadic pop: +// int64_t a; at::Tensor b; +// pop(stack, a, b); +// equivalent to: +// b = pop(stack).toTensor(); +// a = pop(stack).toInt(); +template +static inline void pop(Stack& stack, Types&... args) { + size_t i = 0; + constexpr size_t N = sizeof...(args); + int result[N] = { + (args = std::move(peek(stack, i++, N)).template to(), 0)...}; + (void)result; + drop(stack, N); +} +template +static inline void push_one(Stack& stack, Type&& arg) { + stack.emplace_back(std::forward(arg)); +} + +static inline void push_one(Stack& stack, c10::TensorOptions options) { + stack.emplace_back(c10::typeMetaToScalarType(options.dtype())); + stack.emplace_back(options.layout()); + stack.emplace_back(options.device()); + stack.emplace_back(options.pinned_memory()); +} + +template +static inline void push(Stack& stack, Types&&... args) { + (void)std::initializer_list{(push_one(stack, args), 0)...}; +} +template +static inline void push_list_elements(Stack& stack, const c10::List& elements) { + for (T elem : elements) { + stack.push_back(std::move(elem)); + } +} + +// The packer here is carefully written not to make any unnecessary +// copies. + +// pack takes the return values of aten functions pushes them onto the stack +template +inline void pack(Stack& stack, T&& v) { + stack.emplace_back(std::forward(v)); +} + +template +struct TuplePacker { + // NB: *Not* a universal reference. + static void execute(Stack& stack, std::tuple&& t) { + // NB: The move here does not "destroy" the entire tuple, that is + // not what std::move does; only the particular tuple index + // processed here gets stolen. + pack(stack, std::get(std::move(t))); + TuplePacker::execute(stack, std::move(t)); + } +}; + +template +struct TuplePacker<0, Args...> { + static void execute(Stack& stack, std::tuple&& t){}; +}; + +template +inline void pack(Stack& stack, std::tuple&& t) { + TuplePacker::execute(stack, std::move(t)); +} + +} // namespace jit +} // namespace torch diff --git a/thirdparty/libtorch/include/ATen/core/typeid.h b/thirdparty/libtorch/include/ATen/core/typeid.h new file mode 100644 index 0000000000..5967c0a165 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/core/typeid.h @@ -0,0 +1 @@ +#include diff --git a/thirdparty/libtorch/include/ATen/cpp_custom_type_hack.h b/thirdparty/libtorch/include/ATen/cpp_custom_type_hack.h new file mode 100644 index 0000000000..e9d7b69973 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cpp_custom_type_hack.h @@ -0,0 +1,47 @@ +// WARNING! WARNING! WARNING! +// This file is a temporary hack to enable development of pytorch quantization +// +// It's a stub for wrapping arbitrary cpp types in TorchScript. Proper +// implementation (under development) is to use TorchScript custom types. +// In the meantime, we abuse ByteTensor with custom deleter for this purpose. +// +// Template argument has to be registered with CAFFE_KNOWN_TYPE mechanism. + +#include + +namespace at { +namespace cpp_custom_type_hack { + +template +T& cast(const Tensor& packed) { + TORCH_CHECK( + packed.scalar_type() == kByte, "Expected temporary cpp type wrapper"); + TORCH_CHECK( + packed.storage().data_ptr().get_deleter() == + caffe2::TypeMeta::Make().deleteFn(), + "Expected temporary cpp type wrapper of type ", + caffe2::TypeMeta::TypeName()); + return *reinterpret_cast(packed.storage().data_ptr().get()); +} + +template +Tensor create(std::unique_ptr ptr, TensorOptions options) { + // None of this should trace, so turn off Variable handling + at::AutoNonVariableTypeMode guard; + + // We store this instance away in a Tensor and register a deleter function + // so that we do not leak memory. On the other side, we pull out the storage's + // data_ptr and get the right typed pointer. + void* raw_ptr = ptr.release(); + at::DataPtr at_ptr( + raw_ptr, raw_ptr, caffe2::TypeMeta::Make().deleteFn(), at::kCPU); + + // size doesn't really matter, but we can align it to the actual size + // returning variables because one likely want to use this hack from python + auto retval = at::empty({sizeof(T)}, options.device(kCPU).dtype(at::kByte)); + retval.storage().set_data_ptr(std::move(at_ptr)); + return retval; +} + +} // namespace cpp_custom_type_hack +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/cpu/FlushDenormal.h b/thirdparty/libtorch/include/ATen/cpu/FlushDenormal.h new file mode 100644 index 0000000000..ca6820fc63 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cpu/FlushDenormal.h @@ -0,0 +1,14 @@ +/// Flush-To-Zero and Denormals-Are-Zero mode +/// +/// Flush-To-Zero (FTZ) and Denormals-Are-Zero (DAZ) are modes that bypass +/// IEEE 754 methods of dealing with denormal floating-point numbers on x86-64 +/// and some x86 CPUs. They result in reduced precision for values near zero, +/// but increased performance. +/// +/// See https://software.intel.com/en-us/articles/x87-and-sse-floating-point-assists-in-ia-32-flush-to-zero-ftz-and-denormals-are-zero-daz + +namespace at { namespace cpu { + +bool set_flush_denormal(bool on); + +}} // namespace at::cpu diff --git a/thirdparty/libtorch/include/ATen/cpu/vml.h b/thirdparty/libtorch/include/ATen/cpu/vml.h new file mode 100644 index 0000000000..45523166b9 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cpu/vml.h @@ -0,0 +1,200 @@ +#pragma once + +#include +#include +#include +#include + +// This header implements various unary operations using a MKL VML style +// interface. + +// It implements various functions with a simple interface +// For example it enables the user to call vsin(float* out, const float* in, +// size) This functions takes a pointer to a contious output array of floats and +// a constant input array. It will then apply sin to each value in in the input +// array and write the result into the output array. out and in may point to the +// same memory, i.e. this fully supports in-place operations. These functions +// also implement their own parallelization, so take precautions when calling +// these from threaded functions. + +// When MKL is available it will call into MKL's VML library similar to NumPy +// If MKL is not available it will use SLEEF. + +// This file might be compiled under AVX or AVX2 when called from e.g. +// UnaryOpsKernel.cpp + +#include +#include +#include +#include +#include +#include + +#if AT_MKL_ENABLED() && !defined(__APPLE__) +#include +#endif + +// [Note SSE-AVX transitions] +// There is a bug in Glibc2.23 +// https://bugs.launchpad.net/ubuntu/+source/glibc/+bug/1663280. Calling zeroall +// when using AVX/AVX2 code resolves this. +#if defined(__AVX__) && defined(__GLIBC__) && __GLIBC_MINOR__ == 23 +#define DL_RUNTIME_BUG(op, type) \ + using value_t = typename at::native::ztype::value_t; \ + volatile value_t x = (value_t)(1); \ + x = std::op(x); \ + _mm256_zeroall(); +#else +#define DL_RUNTIME_BUG(op, type) +#endif + +namespace at { +namespace vml { +namespace { + +using namespace vec256; + +template +inline void vrsqrt(scalar_t* out, scalar_t* in, int64_t size) { + parallel_for(0, size, 2048, [out, in](int64_t begin, int64_t end) { + map( + [](const Vec256& x) { + return Vec256((scalar_t)(1)) / x.sqrt(); + }, + out + begin, + in + begin, + end - begin); + }); +} + +// NB: We ignore numerical errors by convention and leave them to the user + +// We unfortunately need to duplicate code here to deal with the SSE-AVX +// transition bug (see [Note SSE-AVX transitions]). As soon as we can expect +// users to use a version of glibc newer than 2.23 we will be able to ditch +// this. This duplication is also necessary since not all functions (e.g. rsqrt) +// might be part of cmath. + +#define IMPLEMENT_VML_BUG(op) \ + template \ + inline void v##op(scalar_t* out, const scalar_t* in, int64_t size) { \ + DL_RUNTIME_BUG(op, scalar_t) \ + parallel_for(0, size, 2048, [out, in](int64_t begin, int64_t end) { \ + map([](const Vec256& x) { return x.op(); }, \ + out + begin, \ + in + begin, \ + end - begin); \ + }); \ + } + +#define IMPLEMENT_VML(op) \ + template \ + inline void v##op(scalar_t* out, const scalar_t* in, int64_t size) { \ + parallel_for(0, size, 2048, [out, in](int64_t begin, int64_t end) { \ + map([](const Vec256& x) { return x.op(); }, \ + out + begin, \ + in + begin, \ + end - begin); \ + }); \ + } + +IMPLEMENT_VML_BUG(abs) +IMPLEMENT_VML_BUG(acos) +IMPLEMENT_VML_BUG(asin) +IMPLEMENT_VML_BUG(atan) +IMPLEMENT_VML_BUG(ceil) +IMPLEMENT_VML_BUG(cos) +// IMPLEMENT_VML_BUG(cosh) +IMPLEMENT_VML_BUG(erf) +IMPLEMENT_VML_BUG(erfc) +IMPLEMENT_VML(erfinv) +IMPLEMENT_VML_BUG(exp) +IMPLEMENT_VML_BUG(expm1) +IMPLEMENT_VML_BUG(floor) +IMPLEMENT_VML(reciprocal) +IMPLEMENT_VML_BUG(log) +IMPLEMENT_VML_BUG(log10) +IMPLEMENT_VML_BUG(log1p) +IMPLEMENT_VML_BUG(log2) +IMPLEMENT_VML(neg) +IMPLEMENT_VML_BUG(sin) +// IMPLEMENT_VML_BUG(sinh) +IMPLEMENT_VML_BUG(sqrt) +IMPLEMENT_VML_BUG(round) +IMPLEMENT_VML(rsqrt) +IMPLEMENT_VML_BUG(tan) +IMPLEMENT_VML_BUG(tanh) +IMPLEMENT_VML_BUG(trunc) +IMPLEMENT_VML_BUG(lgamma) + + +#if AT_MKL_ENABLED() && !defined(__APPLE__) + +// NB: LP64 MKL is the most commonly used and thus we assume it here. That means +// we need to expect MKL_INT to be of type int, which implies int32_t in most +// cases. +static_assert( + std::is_same::value, + "MKL_INT is assumed to be int32_t"); +#define IMPLEMENT_VML_MKL_STUB(op, mklop, type, mkltype) \ + template <> \ + inline void v##op(type * out, const type * in, int64_t size) { \ + int64_t max_mkl_ind = std::numeric_limits::max(); \ + if (size <= static_cast(max_mkl_ind)) { \ + vm##mkltype##mklop( \ + size, in, out, VML_HA | VML_FTZDAZ_OFF | VML_ERRMODE_IGNORE); \ + } else { \ + MKL_INT ind = 0; \ + int64_t chunks = size / max_mkl_ind; \ + int64_t rest = size % max_mkl_ind; \ + for (; ind < chunks; ind++) { \ + vm##mkltype##mklop( \ + max_mkl_ind, \ + in + ind * max_mkl_ind, \ + out + ind * max_mkl_ind, \ + VML_HA | VML_FTZDAZ_OFF | VML_ERRMODE_IGNORE); \ + } \ + vm##mkltype##mklop( \ + rest, \ + in + ind * max_mkl_ind, \ + out + ind * max_mkl_ind, \ + VML_HA | VML_FTZDAZ_OFF | VML_ERRMODE_IGNORE); \ + } \ + } + +#define IMPLEMENT_VML_MKL(op, mklop) \ + IMPLEMENT_VML_MKL_STUB(op, mklop, float, s) \ + IMPLEMENT_VML_MKL_STUB(op, mklop, double, d) + +// NB: abs, cosh and sinh were temporarily disabled due to issues with Apple clang + +IMPLEMENT_VML_MKL(abs, Abs) +IMPLEMENT_VML_MKL(acos, Acos) +IMPLEMENT_VML_MKL(asin, Asin) +IMPLEMENT_VML_MKL(atan, Atan) +IMPLEMENT_VML_MKL(cos, Cos) +// IMPLEMENT_VML_MKL(cosh, Cosh) +IMPLEMENT_VML_MKL(erf, Erf) +IMPLEMENT_VML_MKL(erfc, Erfc) +IMPLEMENT_VML_MKL(erfinv, ErfInv) +IMPLEMENT_VML_MKL(exp, Exp) +IMPLEMENT_VML_MKL(expm1, Expm1) +IMPLEMENT_VML_MKL(log, Ln) +IMPLEMENT_VML_MKL(log10, Log10) +IMPLEMENT_VML_MKL(log1p, Log1p) +IMPLEMENT_VML_MKL(sin, Sin) +// IMPLEMENT_VML_MKL(sinh, Sinh) +IMPLEMENT_VML_MKL(sqrt, Sqrt) +IMPLEMENT_VML_MKL(tan, Tan) +IMPLEMENT_VML_MKL(tanh, Tanh) +IMPLEMENT_VML_MKL(trunc, Trunc) + +#if INTEL_MKL_VERSION >= 20180406 +IMPLEMENT_VML_MKL(log2, Log2) +#endif + +#endif + +} // namespace +} // namespace vml +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/cuda/ATenCUDAGeneral.h b/thirdparty/libtorch/include/ATen/cuda/ATenCUDAGeneral.h new file mode 100644 index 0000000000..cb45756847 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/ATenCUDAGeneral.h @@ -0,0 +1,9 @@ +#pragma once + +#include +#include +#include + +#include + +// Use TORCH_CUDA_API for exports from this folder diff --git a/thirdparty/libtorch/include/ATen/cuda/CUDAApplyUtils.cuh b/thirdparty/libtorch/include/ATen/cuda/CUDAApplyUtils.cuh new file mode 100644 index 0000000000..ffe61b2128 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/CUDAApplyUtils.cuh @@ -0,0 +1,1241 @@ +#pragma once + +#include +#include +#include +#include +#include +#include + +#include + +// +// This file contains pointwise operation functions and kernels that +// work on both contiguous and non-contiguous tensor arguments of +// arbitrary (up to MAX_CUTORCH_DIMS) dimensioned arguments without +// copying or temporary storage. +// + +/* + NOTE [ CUDA_tensor_applyN helpers ] + + The following CUDA_tensor_applyN (where N currently can be 1, 2, 3, or 4) + functions apply a pointwise operator to N tensor(s). + + The calling convention is + + 1. The template arguments should be, sequentially, + - First N typename args specify the scalar types of each of the N tensors. + - (Optional) `int step` arg specifies the number of elements processed + together at the same time. + Default is 1. + - A usually omitted (i.e., inferred) typename arg specifies the type of the + function/functor applied on `N * step` values in each iteration of each + CUDA thread. + 2. The arguments should be, sequentially, + - N tensors + - op: a function/functor that processes `N * step` values at the same time. + - If `step == 1`, it must have signature + `void(*)(scalar1_t&, scalar2_t&, ..., scalarN_t&)`, where + `scalar*_t`s are the first N typename template args, and the inputs + are the `N` values from the `N` tensors retrieved at a common index. + - Otherwise, it must must have signature + void(*)(int n, scalar1_t&, scalar1_t&, ..., scalar1_t&, // repeat `step` times + scalar2_t&, scalar2_t&, ..., scalar2_t&, // repeat `step` times + ..., + scalarN_t&, scalarN_t&, ..., scalarN_t&) // repeat `step` times + Different from `step == 1` case, it processes `N * step` values taken + from `step` common indices. Moreover, the first input `n` represents the + number of valid indices (it will always have `0 < n <= step`). It will + almost always be `step`, but at the boundary we may not have full `step` + elements and `n` can be a lesser value. + + E.g., if `step == 4` and `N == 2`, `op` could be + + [](int n, scalar1_t &u1, scalar1_t &u2, scalar1_t &u3, scalar1_t &u4, + scalar2_t &v1, scalar2_t &v2, scalar2_t &v3, scalar2_t &v4) { + // Only process u1, ..., un and v1, ..., vn. + // So if `n == 3`, `u4` and `v4` need not to be considered. + } + + In both cases, the references can actually be const, but at least one of + them should be non-const in order to write the output. + - (Optional, but recommended) N TensorArgType args that specify for each + tensor whether `op` reads AND writes ] (i.e., TensorArgType::ReadWrite), + or only reads (i.e., TensorArgType::ReadOnly). + Default is TensorArgType::ReadWrite for first Tensor, and + TensorArgType::ReadOnly for the rest. + + E.g., + + to compute a = b^2 for a and b of same dtype, we can call + + CUDA_tensor_apply2( + a, b, + [] __device__ (scalar &a_val, const scalar &b_val) { a_val = b_val * b_val; } + ); + + to work on 2 values at the same time, we can call + + CUDA_tensor_apply2( + a, b, + [] __device__ (int n, scalar1 &a_val1, scalar1 &a_val2, + const scalar2 &b_val1, const scalar2 &b_val2) { + // call special vectorized op here, or just do elementwise and enjoy unrolling... + // if n == 1, only process a_val1 and b_val1 + } + ); +*/ + +namespace at { +namespace cuda { + +// TODO: combine with TensorArg? So far that's been for debugging, and this is functional... +enum class TensorArgType { ReadWrite, ReadOnly }; + +namespace { + +// Rearrange dimensions for pointwise operations so that strides are in +// decreasing order as much as possible, so that kernels have better memory +// access patterns. +// +// For example, consider a binary operation on two "transposed" 2-dim tensors: +// sizes: 256 512 +// aInfo->strides: 1 256 +// bInfo->strides: 1 256 +// +// Given this, each concurrent memory access inside kernelPointwiseApply2() is +// exactly 256 elements apart, resulting in poor performance. +// +// This function exchanges dimensions so that memory access is contiguous: +// sizes: 512 256 +// aInfo->strides: 256 1 +// bInfo->strides: 256 1 +// +// (Actually, it becomes even better because now collapseDims() can turn each +// input into one contiguous array.) +// +// In general, given M (<=4) TensorInfo's with N dimensions, we can view each +// strides[i] (0 <= i < N) as an M-tuple. Given each pair i < j, we exchange +// strides[i] and [j] if +// (1) strides[i][k] < strides[j][k] for some k (0 <= k < M) +// (exchanging them will benefit input #k), and +// (2) strides[i][k] <= strieds[j][k] for all k +// (exchanging them will not make any input worse). +template +inline void rearrangeDims(detail::TensorInfo* aInfo, + detail::TensorInfo* bInfo = nullptr, + detail::TensorInfo* cInfo = nullptr, + detail::TensorInfo* dInfo = nullptr) { + int numInfos = 1; + int dims = aInfo->dims; + IndexType *sizes[4] = { aInfo->sizes, }; + IndexType *strides[4] = { aInfo->strides, }; + + if (bInfo != nullptr) { + ++numInfos; + if (bInfo->dims != dims) return; + sizes[1] = bInfo->sizes; + strides[1] = bInfo->strides; + } + + if (cInfo != nullptr) { + ++numInfos; + if (cInfo->dims != dims) return; + sizes[2] = cInfo->sizes; + strides[2] = cInfo->strides; + } + + if (dInfo != nullptr) { + ++numInfos; + if (dInfo->dims != dims) return; + sizes[3] = dInfo->sizes; + strides[3] = dInfo->strides; + } + + // Bail out if sizes do not match: we are using "deprecated pointwise + // behavior" among tensors of different shapes but same number of elements. + for (int i = 1; i < numInfos; ++i) { + for (int j = 0; j < dims; ++j) { + if (sizes[i][j] != sizes[0][j]) return; + } + } + + for (int i = 0; i < dims - 1; ++i) { + // No need to consider dimensions of size 1. + if (sizes[0][i] == 1) continue; + + for (int j = i + 1; j < dims; ++j) { + if (sizes[0][j] == 1) continue; + + // Compare the relative sizes of strides between dim #i and dim #j. + bool hasIncreasingStrides = false; + bool hasDecreasingStrides = false; + + for (int k = 0; k < numInfos; k++) { + IndexType stride_i = strides[k][i]; + IndexType stride_j = strides[k][j]; + if (stride_i < stride_j) { + hasIncreasingStrides = true; + } else if (stride_i > stride_j) { + hasDecreasingStrides = true; + } + } + + if (hasIncreasingStrides && !hasDecreasingStrides) { + for (int k = 0; k < numInfos; k++) { + IndexType size = sizes[k][i]; + sizes[k][i] = sizes[k][j]; + sizes[k][j] = size; + + IndexType stride = strides[k][i]; + strides[k][i] = strides[k][j]; + strides[k][j] = stride; + } + } + } + } +} + +// Threads per block for our apply kernel +// FIXME: use occupancy calculator instead +constexpr uint32_t AT_APPLY_THREADS_PER_BLOCK = 512; +constexpr uint32_t AT_APPLY_BLOCKS_PER_SM = 4; + +// The `remaining_steps` argument is used to support Op that operates on +// multiple elements at the same time. Generally, the strategy of ApplyOpN is to +// 1. Initialize `remaining_steps = step`, where `step` is the template arg of +// CUDA_tensor_applyN helpers. The input arg `n` to `apply()` represents the +// number of elements in bound for this call. It will almost always equal to +// `step` except at boundaries. +// 2. If `remaining_steps > 0` convert the current linearIndex to offset (if in +// bound), and recursively call `ApplyOpN` with `remaining_steps - 1`. +// 3. At `remaining_steps = 0`, +// if `step = 1`, call `op(tensor1_val, tensor2_val, ...)`; +// if `step > 1`, call `op(n, tensor1_val1, tensor1_val2, ..., tesor1_valstep, +// tensor2_val1, tensor2_val2, ..., tesor2_valstep, +// ... +// tensorN_val1, tensorN_val2, ..., tesorN_valstep);` +// +// See NOTE [ CUDA_tensor_applyN helpers ] above for how Op may look like. + +template +struct ApplyOp1 { +__device__ __forceinline__ +static void apply(detail::TensorInfo &a, const Op &op, int n, + IndexType linearIndex, Offsets... aOffsets) { + // Convert `linearIndex` into an offset of `a` + const IndexType aOffset = sizeof...(Offsets) < n ? + detail::IndexToOffset::get(linearIndex, a) : 0; + + ApplyOp1::apply( + a, op, n, linearIndex + 1, aOffsets..., aOffset + ); +} +}; + +// Specialize `step=1` case (i.e., `remaining_steps=0` and `len(Offsets)=1`). +// We don't need to pass in how many elements need to processed in this case. +template +struct ApplyOp1 { +__device__ __forceinline__ +static void apply(detail::TensorInfo &a, const Op &op, + int n, IndexType linearIndex, Offset offset) { + op(a.data[offset]); +} +}; + +template +struct ApplyOp1 { +__device__ __forceinline__ +static void apply(detail::TensorInfo &a, const Op &op, int n, + IndexType linearIndex, Offsets... offsets) { + op(n, a.data[offsets]...); +} +}; + +template +#if __CUDA_ARCH__ >= 350 || defined __HIP_PLATFORM_HCC__ +C10_LAUNCH_BOUNDS_2(AT_APPLY_THREADS_PER_BLOCK, AT_APPLY_BLOCKS_PER_SM) +#endif +__global__ void kernelPointwiseApply1(detail::TensorInfo a, + IndexType totalElements, const Op op) { + for (IndexType linearIndex = (blockIdx.x * blockDim.x + threadIdx.x) * step; + linearIndex < totalElements; + linearIndex += gridDim.x * blockDim.x * step) { + ApplyOp1::apply( + a, op, ::min(step, static_cast(totalElements - linearIndex)), linearIndex); + } +} + + +template +struct ApplyOp2 { +__device__ __forceinline__ +static void apply(detail::TensorInfo &a, + detail::TensorInfo &b, + const Op &op, int n, IndexType linearIndex, + Offsets... aOffsets, Offsets... bOffsets) { + // Convert `linearIndex` into an offset of `a` + const IndexType aOffset = sizeof...(Offsets) < n ? + detail::IndexToOffset::get(linearIndex, a) : 0; + + // Convert `linearIndex` into an offset of `b` + const IndexType bOffset = sizeof...(Offsets) < n ? + detail::IndexToOffset::get(linearIndex, b) : 0; + + ApplyOp2::apply( + a, b, op, n, linearIndex + 1, aOffsets..., aOffset, bOffsets..., bOffset + ); +} +}; + +// Specialize `step=1` case (i.e., `remaining_steps=0` and `len(Offsets)=1`). +// We don't need to pass in how many elements need to processed in this case. +template +struct ApplyOp2 { +__device__ __forceinline__ +static void apply(detail::TensorInfo &a, + detail::TensorInfo &b, + const Op &op, int n, IndexType linearIndex, + Offset aOffset, Offset bOffset) { + op(a.data[aOffset], b.data[bOffset]); +} +}; + +template +struct ApplyOp2 { +__device__ __forceinline__ +static void apply(detail::TensorInfo &a, + detail::TensorInfo &b, + const Op &op, int n, IndexType linearIndex, + Offsets... aOffsets, Offsets... bOffsets) { + op(n, a.data[aOffsets]..., b.data[bOffsets]...); +} +}; + +template +#if __CUDA_ARCH__ >= 350 || defined __HIP_PLATFORM_HCC__ +C10_LAUNCH_BOUNDS_2(AT_APPLY_THREADS_PER_BLOCK, AT_APPLY_BLOCKS_PER_SM) +#endif +__global__ void +kernelPointwiseApply2(detail::TensorInfo a, + detail::TensorInfo b, + IndexType totalElements, + const Op op) { + for (IndexType linearIndex = (blockIdx.x * blockDim.x + threadIdx.x) * step; + linearIndex < totalElements; + linearIndex += gridDim.x * blockDim.x * step) { + ApplyOp2::apply( + a, b, op, ::min(step, static_cast(totalElements - linearIndex)), + linearIndex); + } +} + + +template +struct ApplyOp3 { +__device__ __forceinline__ +static void apply(detail::TensorInfo &a, + detail::TensorInfo &b, + detail::TensorInfo &c, + const Op &op, int n, IndexType linearIndex, + Offsets... aOffsets, Offsets... bOffsets, + Offsets... cOffsets) { + // Convert `linearIndex` into an offset of `a` + const IndexType aOffset = sizeof...(Offsets) < n ? + detail::IndexToOffset::get(linearIndex, a) : 0; + + // Convert `linearIndex` into an offset of `b` + const IndexType bOffset = sizeof...(Offsets) < n ? + detail::IndexToOffset::get(linearIndex, b) : 0; + + // Convert `linearIndex` into an offset of `c` + const IndexType cOffset = sizeof...(Offsets) < n ? + detail::IndexToOffset::get(linearIndex, c) : 0; + + ApplyOp3::apply( + a, b, c, op, n, linearIndex + 1, aOffsets..., aOffset, bOffsets..., bOffset, + cOffsets..., cOffset + ); +} +}; + +// Specialize `step=1` case (i.e., `remaining_steps=0` and `len(Offsets)=1`). +// We don't need to pass in how many elements need to processed in this case. +template +struct ApplyOp3 { +__device__ __forceinline__ +static void apply(detail::TensorInfo &a, + detail::TensorInfo &b, + detail::TensorInfo &c, + const Op &op, int n, IndexType linearIndex, + Offset aOffset, Offset bOffset, Offset cOffset) { + op(a.data[aOffset], b.data[bOffset], c.data[cOffset]); +} +}; + +template +struct ApplyOp3 { +__device__ __forceinline__ +static void apply(detail::TensorInfo &a, + detail::TensorInfo &b, + detail::TensorInfo &c, + const Op &op, int n, IndexType linearIndex, + Offsets... aOffsets, Offsets... bOffsets, + Offsets... cOffsets) { + op(n, a.data[aOffsets]..., b.data[bOffsets]..., c.data[cOffsets]...); +} +}; + + +template +#if __CUDA_ARCH__ >= 350 || defined __HIP_PLATFORM_HCC__ +C10_LAUNCH_BOUNDS_2(AT_APPLY_THREADS_PER_BLOCK, AT_APPLY_BLOCKS_PER_SM) +#endif +__global__ void +kernelPointwiseApply3(detail::TensorInfo a, + detail::TensorInfo b, + detail::TensorInfo c, + IndexType totalElements, + const Op op) { + for (IndexType linearIndex = (blockIdx.x * blockDim.x + threadIdx.x) * step; + linearIndex < totalElements; + linearIndex += gridDim.x * blockDim.x * step) { + ApplyOp3::apply( + a, b, c, op, ::min(step, static_cast(totalElements - linearIndex)), linearIndex); + } +} + + +template +struct ApplyOp4 { +__device__ __forceinline__ +static void apply(detail::TensorInfo &a, + detail::TensorInfo &b, + detail::TensorInfo &c, + detail::TensorInfo &d, + const Op &op, int n, IndexType linearIndex, + Offsets... aOffsets, Offsets... bOffsets, + Offsets... cOffsets, Offsets... dOffsets) { + // Convert `linearIndex` into an offset of `a` + const IndexType aOffset = sizeof...(Offsets) < n ? + detail::IndexToOffset::get(linearIndex, a) : 0; + + // Convert `linearIndex` into an offset of `b` + const IndexType bOffset = sizeof...(Offsets) < n ? + detail::IndexToOffset::get(linearIndex, b) : 0; + + // Convert `linearIndex` into an offset of `c` + const IndexType cOffset = sizeof...(Offsets) < n ? + detail::IndexToOffset::get(linearIndex, c) : 0; + + // Convert `linearIndex` into an offset of `d` + const IndexType dOffset = sizeof...(Offsets) < n ? + detail::IndexToOffset::get(linearIndex, d) : 0; + + ApplyOp4::apply( + a, b, c, d, op, n, linearIndex + 1, aOffsets..., aOffset, bOffsets..., bOffset, + cOffsets..., cOffset, dOffsets..., dOffset + ); +} +}; + +// Specialize `step=1` case (i.e., `remaining_steps=0` and `len(Offsets)=1`). +// We don't need to pass in how many elements need to processed in this case. +template +struct ApplyOp4 { +__device__ __forceinline__ +static void apply(detail::TensorInfo &a, + detail::TensorInfo &b, + detail::TensorInfo &c, + detail::TensorInfo &d, + const Op &op, int n, IndexType linearIndex, + Offset aOffset, Offset bOffset, + Offset cOffset, Offset dOffset) { + op(a.data[aOffset], b.data[bOffset], c.data[cOffset], d.data[dOffset]); +} +}; + +template +struct ApplyOp4 { +__device__ __forceinline__ +static void apply(detail::TensorInfo &a, + detail::TensorInfo &b, + detail::TensorInfo &c, + detail::TensorInfo &d, + const Op &op, int n, IndexType linearIndex, + Offsets... aOffsets, Offsets... bOffsets, + Offsets... cOffsets, Offsets... dOffsets) { + op(n, a.data[aOffsets]..., b.data[bOffsets]..., c.data[cOffsets]..., d.data[dOffsets]...); +} +}; + +template +#if __CUDA_ARCH__ >= 350 || defined __HIP_PLATFORM_HCC__ +C10_LAUNCH_BOUNDS_2(AT_APPLY_THREADS_PER_BLOCK, AT_APPLY_BLOCKS_PER_SM) +#endif +__global__ void +kernelPointwiseApply4(detail::TensorInfo a, + detail::TensorInfo b, + detail::TensorInfo c, + detail::TensorInfo d, + IndexType totalElements, + const Op op) { + for (IndexType linearIndex = (blockIdx.x * blockDim.x + threadIdx.x) * step; + linearIndex < totalElements; + linearIndex += gridDim.x * blockDim.x * step) { + ApplyOp4::apply( + a, b, c, d, op, ::min(step, static_cast(totalElements - linearIndex)), linearIndex); + } +} + +} // namespace + +/** + Computes ceil(a / b) +*/ +template +__host__ __device__ __forceinline__ T ATenCeilDiv(T a, T b) { + return (a + b - 1) / b; +} + +template +inline bool getApplyGrid(uint64_t totalElements, dim3& grid, int64_t curDevice) { + if (curDevice == -1) return false; + uint64_t numel_per_thread = static_cast(AT_APPLY_THREADS_PER_BLOCK) * static_cast(step); + uint64_t numBlocks = ATenCeilDiv(totalElements, numel_per_thread); + uint64_t maxGridX = at::cuda::getDeviceProperties(curDevice)->maxGridSize[0]; + if (numBlocks > maxGridX) + numBlocks = maxGridX; + grid = dim3(numBlocks); + return true; +} + +inline dim3 getApplyBlock() { + return dim3(AT_APPLY_THREADS_PER_BLOCK); +} + +template +inline bool CUDA_tensor_apply2(at::Tensor a, + at::Tensor b, + const Op op, + TensorArgType aType = TensorArgType::ReadWrite, + TensorArgType bType = TensorArgType::ReadOnly) { + checkBackend("CUDA_tensor_apply2", {a, b}, Backend::CUDA); + int64_t totalElements = a.numel(); + + if (totalElements != b.numel()) { + return false; + } + + if (a.dim() > MAX_TENSORINFO_DIMS || + b.dim() > MAX_TENSORINFO_DIMS) { + return false; + } + + if (a.numel() == 0) { + // Empty tensor; do nothing + return true; + } + const dim3 block = getApplyBlock(); + + dim3 grid; + int64_t curDevice = current_device(); + if (curDevice == -1) return false; + if (!getApplyGrid(totalElements, grid, curDevice)) { + return false; + } + + /* + Expands readable/writable tensors whose indices may be "overlapped." + This ensures that each element of the tensor is operated on once and only + once. + */ + Tensor oldA; + Tensor oldB; + + if (aType == TensorArgType::ReadWrite && detail::maybeOverlappingIndices(a)) { + // Must perform in contiguous space + oldA = a; + a = a.contiguous(); + } + if (bType == TensorArgType::ReadWrite && detail::maybeOverlappingIndices(b)) { + // Must perform in contiguous space + oldB = b; + b = b.contiguous(); + } + + // It is possible that the tensor dimensions are able to be collapsed, + // and thus we can reduce the actual code complexity of the copy by + // exploiting this knowledge statically, since the div/mod is the + // most expensive part of the operation, more so than memory accesses. + // For instance, when copying a non-contiguous to a contiguous tensor + // (or vice versa), the contiguous tensor can be collapsed to one + // dimension, and the loop to translate the linear index to the array + // index can be similarly collapsed. That is what this unrolling is for. + +#define HANDLE_CASE(TYPE, A, B) \ + kernelPointwiseApply2 \ + <<>>( \ + aInfo, bInfo, static_cast(totalElements), op); + +#define HANDLE_B_CASE(TYPE, A, B) { \ + switch (B) { \ + case 1: \ + HANDLE_CASE(TYPE, A, 1); \ + break; \ + case 2: \ + HANDLE_CASE(TYPE, A, 2); \ + break; \ + default: \ + HANDLE_CASE(TYPE, A, -1); \ + break; \ + } \ +} + +#define HANDLE_A_CASE(TYPE, A, B) { \ + switch (A) { \ + case 1: \ + HANDLE_B_CASE(TYPE, 1, B); \ + break; \ + case 2: \ + HANDLE_B_CASE(TYPE, 2, B); \ + break; \ + default: \ + HANDLE_B_CASE(TYPE, -1, B); \ + break; \ + } \ +} + + if (detail::canUse32BitIndexMath(a) && + detail::canUse32BitIndexMath(b)) { + detail::TensorInfo aInfo = + detail::getTensorInfo(a); + + detail::TensorInfo bInfo = + detail::getTensorInfo(b); + rearrangeDims(&aInfo, &bInfo); + aInfo.collapseDims(); + bInfo.collapseDims(); + + HANDLE_A_CASE(unsigned int, aInfo.dims, bInfo.dims); + } else { + detail::TensorInfo aInfo = + detail::getTensorInfo(a); + + detail::TensorInfo bInfo = + detail::getTensorInfo(b); + rearrangeDims(&aInfo, &bInfo); + aInfo.collapseDims(); + bInfo.collapseDims(); + + /* + Only instantiates the all 1D special case and the fallback all nD case for + large (64-bit indexed) tensors to reduce compilation time. + */ + if (aInfo.dims == 1 && bInfo.dims == 1) { + HANDLE_CASE(uint64_t, 1, 1); + } else { + HANDLE_CASE(uint64_t, -1, -1); + } + } +#undef HANDLE_CASE +#undef HANDLE_B_CASE +#undef HANDLE_A_CASE + + if (oldA.defined()) { + // Ignore overlaps when copying back; if we use copy + // instead, it will recursively try and invoke ourselves to make + // oldA contiguous. + at::native::legacy::cuda::_th_copy_ignoring_overlaps_(oldA, a); + } + + if (oldB.defined()) { + // Ignore overlaps when copying back; if we use copy + // instead, it will recursively try and invoke ourselves to make + // oldB contiguous. + at::native::legacy::cuda::_th_copy_ignoring_overlaps_(oldB, b); + } + + return true; +} + +/* Provides default step = 1 to CUDA_tensor_apply2. */ +template +inline bool CUDA_tensor_apply2(at::Tensor a, + at::Tensor b, + const Op op, + TensorArgType aType = TensorArgType::ReadWrite, + TensorArgType bType = TensorArgType::ReadOnly) { + return CUDA_tensor_apply2(a, b, op, aType, bType); +} + + +template +inline bool CUDA_tensor_apply3(at::Tensor a, + at::Tensor b, + at::Tensor c, + const Op op, + TensorArgType aType = TensorArgType::ReadWrite, + TensorArgType bType = TensorArgType::ReadOnly, + TensorArgType cType = TensorArgType::ReadOnly) { + checkBackend("CUDA_tensor_apply3", {a, b, c}, Backend::CUDA); + int64_t totalElements = a.numel(); + + if (totalElements != b.numel() || + totalElements != c.numel()) { + return false; + } + + if (a.dim() > MAX_TENSORINFO_DIMS || + b.dim() > MAX_TENSORINFO_DIMS || + c.dim() > MAX_TENSORINFO_DIMS) { + return false; + } + + if (a.numel() == 0) { + // Empty tensor; do nothing + return true; + } + + const dim3 block = getApplyBlock(); + + dim3 grid; + int64_t curDevice = current_device(); + if (curDevice == -1) return false; + if (!getApplyGrid(totalElements, grid, curDevice)) { + return false; + } + + /* + Expands readable/writable tensors whose indices may be "overlapped." + This ensures that each element of the tensor is operated on once and only + once. + */ + Tensor oldA; + Tensor oldB; + Tensor oldC; + + if (aType == TensorArgType::ReadWrite && detail::maybeOverlappingIndices(a)) { + // Must perform in contiguous space + oldA = a; + a = a.contiguous(); + } + if (bType == TensorArgType::ReadWrite && detail::maybeOverlappingIndices(b)) { + // Must perform in contiguous space + oldB = b; + b = b.contiguous(); + } + if (cType == TensorArgType::ReadWrite && detail::maybeOverlappingIndices(c)) { + // Must perform in contiguous space + oldC = c; + c = c.contiguous(); + } + +#define HANDLE_CASE(TYPE, A, B, C) \ + kernelPointwiseApply3 \ + <<>>( \ + aInfo, bInfo, cInfo, static_cast(totalElements), op); + +#define HANDLE_C_CASE(TYPE, A, B, C) { \ + switch (C) { \ + case 1: \ + HANDLE_CASE(TYPE, A, B, 1); \ + break; \ + case 2: \ + HANDLE_CASE(TYPE, A, B, 2); \ + break; \ + default: \ + HANDLE_CASE(TYPE, A, B, -1); \ + break; \ + } \ +} + +#define HANDLE_B_CASE(TYPE, A, B, C) { \ + switch (B) { \ + case 1: \ + HANDLE_C_CASE(TYPE, A, 1, C); \ + break; \ + case 2: \ + HANDLE_C_CASE(TYPE, A, 2, C); \ + break; \ + default: \ + HANDLE_C_CASE(TYPE, A, -1, C); \ + break; \ + } \ +} + +#define HANDLE_A_CASE(TYPE, A, B, C) { \ + switch (A) { \ + case 1: \ + HANDLE_B_CASE(TYPE, 1, B, C); \ + break; \ + case 2: \ + HANDLE_B_CASE(TYPE, 2, B, C); \ + break; \ + default: \ + HANDLE_B_CASE(TYPE, -1, B, C); \ + break; \ + } \ +} + + if (detail::canUse32BitIndexMath(a) && + detail::canUse32BitIndexMath(b) && + detail::canUse32BitIndexMath(c)) { + detail::TensorInfo aInfo = + detail::getTensorInfo(a); + + detail::TensorInfo bInfo = + detail::getTensorInfo(b); + + detail::TensorInfo cInfo = + detail::getTensorInfo(c); + + rearrangeDims(&aInfo, &bInfo, &cInfo); + aInfo.collapseDims(); + bInfo.collapseDims(); + cInfo.collapseDims(); + + HANDLE_A_CASE(unsigned int, aInfo.dims, bInfo.dims, cInfo.dims); + } else { + detail::TensorInfo aInfo = + detail::getTensorInfo(a); + + detail::TensorInfo bInfo = + detail::getTensorInfo(b); + + detail::TensorInfo cInfo = + detail::getTensorInfo(c); + + rearrangeDims(&aInfo, &bInfo, &cInfo); + aInfo.collapseDims(); + bInfo.collapseDims(); + cInfo.collapseDims(); + + /* + Only instantiates the all 1D special case and the fallback all nD case for + large (64-bit indexed) tensors to reduce compilation time. + */ + if (aInfo.dims == 1 && bInfo.dims == 1 && cInfo.dims == 1) { + HANDLE_CASE(uint64_t, 1, 1, 1); + } else { + HANDLE_CASE(uint64_t, -1, -1, -1); + } + } +#undef HANDLE_CASE +#undef HANDLE_C_CASE +#undef HANDLE_B_CASE +#undef HANDLE_A_CASE + + if (oldA.defined()) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldA contiguous. + at::native::legacy::cuda::_th_copy_ignoring_overlaps_(oldA, a); + a = oldA; + } + + if (oldB.defined()) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldB contiguous. + at::native::legacy::cuda::_th_copy_ignoring_overlaps_(oldB, b); + b = oldB; + } + + if (oldC.defined()) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldC contiguous. + at::native::legacy::cuda::_th_copy_ignoring_overlaps_(oldC, c); + c = oldC; + } + + return true; +} + +/* Provides default step = 1 to CUDA_tensor_apply3. */ +template +inline bool CUDA_tensor_apply3(at::Tensor a, + at::Tensor b, + at::Tensor c, + const Op op, + TensorArgType aType = TensorArgType::ReadWrite, + TensorArgType bType = TensorArgType::ReadOnly, + TensorArgType cType = TensorArgType::ReadOnly) { + return CUDA_tensor_apply3( + a, b, c, op, aType, bType, cType); +} + + +template +inline bool CUDA_tensor_apply4(at::Tensor a, + at::Tensor b, + at::Tensor c, + at::Tensor d, + const Op op, + TensorArgType aType = TensorArgType::ReadWrite, + TensorArgType bType = TensorArgType::ReadOnly, + TensorArgType cType = TensorArgType::ReadOnly, + TensorArgType dType = TensorArgType::ReadOnly) { + checkBackend("CUDA_tensor_apply4", {a, b, c, d}, Backend::CUDA); + int64_t totalElements = a.numel(); + + if (totalElements != b.numel() || + totalElements != c.numel() || + totalElements != d.numel()) { + return false; + } + + if (a.dim() > MAX_TENSORINFO_DIMS || + b.dim() > MAX_TENSORINFO_DIMS || + c.dim() > MAX_TENSORINFO_DIMS || + d.dim() > MAX_TENSORINFO_DIMS) { + return false; + } + + if (a.numel() == 0) { + // Empty tensor; do nothing + return true; + } + + const dim3 block = getApplyBlock(); + + dim3 grid; + int64_t curDevice = current_device(); + if (curDevice == -1) return false; + if (!getApplyGrid(totalElements, grid, curDevice)) { + return false; + } + + /* + Expands readable/writable tensors whose indices may be "overlapped." + This ensures that each element of the tensor is operated on once and only + once. + */ + Tensor oldA; + Tensor oldB; + Tensor oldC; + Tensor oldD; + + if (aType == TensorArgType::ReadWrite && detail::maybeOverlappingIndices(a)) { + // Must perform in contiguous space + oldA = a; + a = a.contiguous(); + } + if (bType == TensorArgType::ReadWrite && detail::maybeOverlappingIndices(b)) { + // Must perform in contiguous space + oldB = b; + b = b.contiguous(); + } + if (cType == TensorArgType::ReadWrite && detail::maybeOverlappingIndices(c)) { + // Must perform in contiguous space + oldC = c; + c = c.contiguous(); + } + if (dType == TensorArgType::ReadWrite && detail::maybeOverlappingIndices(c)) { + // Must perform in contiguous space + oldD = d; + d = d.contiguous(); + } + +#define HANDLE_CASE(TYPE, A, B, C, D) \ + kernelPointwiseApply4 \ + <<>>( \ + aInfo, bInfo, cInfo, dInfo, static_cast(totalElements), op); + +#define HANDLE_D_CASE(TYPE, A, B, C, D) { \ + switch (D) { \ + case 1: \ + HANDLE_CASE(TYPE, A, B, C, 1); \ + break; \ + case 2: \ + HANDLE_CASE(TYPE, A, B, C, 2); \ + break; \ + default: \ + HANDLE_CASE(TYPE, A, B, C, -1); \ + break; \ + } \ +} + +#define HANDLE_C_CASE(TYPE, A, B, C, D) { \ + switch (C) { \ + case 1: \ + HANDLE_D_CASE(TYPE, A, B, 1, D); \ + break; \ + case 2: \ + HANDLE_D_CASE(TYPE, A, B, 2, D); \ + break; \ + default: \ + HANDLE_D_CASE(TYPE, A, B, -1, D); \ + break; \ + } \ +} + +#define HANDLE_B_CASE(TYPE, A, B, C, D) { \ + switch (B) { \ + case 1: \ + HANDLE_C_CASE(TYPE, A, 1, C, D); \ + break; \ + case 2: \ + HANDLE_C_CASE(TYPE, A, 2, C, D); \ + break; \ + default: \ + HANDLE_C_CASE(TYPE, A, -1, C, D); \ + break; \ + } \ +} + +#define HANDLE_A_CASE(TYPE, A, B, C, D) { \ + switch (A) { \ + case 1: \ + HANDLE_B_CASE(TYPE, 1, B, C, D); \ + break; \ + case 2: \ + HANDLE_B_CASE(TYPE, 2, B, C, D); \ + break; \ + default: \ + HANDLE_B_CASE(TYPE, -1, B, C, D); \ + break; \ + } \ +} + + if (detail::canUse32BitIndexMath(a) && + detail::canUse32BitIndexMath(b) && + detail::canUse32BitIndexMath(c) && + detail::canUse32BitIndexMath(d)) { + detail::TensorInfo aInfo = + detail::getTensorInfo(a); + + detail::TensorInfo bInfo = + detail::getTensorInfo(b); + + detail::TensorInfo cInfo = + detail::getTensorInfo(c); + + detail::TensorInfo dInfo = + detail::getTensorInfo(d); + + rearrangeDims(&aInfo, &bInfo, &cInfo, &dInfo); + aInfo.collapseDims(); + bInfo.collapseDims(); + cInfo.collapseDims(); + dInfo.collapseDims(); + + HANDLE_A_CASE(unsigned int, aInfo.dims, bInfo.dims, cInfo.dims, dInfo.dims); + } else { + detail::TensorInfo aInfo = + detail::getTensorInfo(a); + + detail::TensorInfo bInfo = + detail::getTensorInfo(b); + + detail::TensorInfo cInfo = + detail::getTensorInfo(c); + + detail::TensorInfo dInfo = + detail::getTensorInfo(d); + + rearrangeDims(&aInfo, &bInfo, &cInfo, &dInfo); + aInfo.collapseDims(); + bInfo.collapseDims(); + cInfo.collapseDims(); + dInfo.collapseDims(); + + /* + Only instantiates the all 1D special case and the fallback all nD case for + large (64-bit indexed) tensors to reduce compilation time. + */ + if (aInfo.dims == 1 && bInfo.dims == 1 && cInfo.dims == 1 && dInfo.dims == 1) { + HANDLE_CASE(uint64_t, 1, 1, 1, 1); + } else { + HANDLE_CASE(uint64_t, -1, -1, -1, -1); + } + } +#undef HANDLE_CASE +#undef HANDLE_D_CASE +#undef HANDLE_C_CASE +#undef HANDLE_B_CASE +#undef HANDLE_A_CASE + + if (oldA.defined()) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldA contiguous. + at::native::legacy::cuda::_th_copy_ignoring_overlaps_(oldA, a); + } + + if (oldB.defined()) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldB contiguous. + at::native::legacy::cuda::_th_copy_ignoring_overlaps_(oldB, b); + } + + if (oldC.defined()) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldC contiguous. + at::native::legacy::cuda::_th_copy_ignoring_overlaps_(oldC, c); + } + + if (oldD.defined()) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldC contiguous. + at::native::legacy::cuda::_th_copy_ignoring_overlaps_(oldD, c); + } + + return true; +} + +/* Provides default step = 1 to CUDA_tensor_apply4. */ +template +inline bool CUDA_tensor_apply4(at::Tensor a, + at::Tensor b, + at::Tensor c, + at::Tensor d, + const Op op, + TensorArgType aType = TensorArgType::ReadWrite, + TensorArgType bType = TensorArgType::ReadOnly, + TensorArgType cType = TensorArgType::ReadOnly, + TensorArgType dType = TensorArgType::ReadOnly) { + return CUDA_tensor_apply4( + a, b, c, d, op, aType, bType, cType); +} + +} // cuda +} // at diff --git a/thirdparty/libtorch/include/ATen/cuda/CUDABlas.h b/thirdparty/libtorch/include/ATen/cuda/CUDABlas.h new file mode 100644 index 0000000000..ae9f3ff93b --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/CUDABlas.h @@ -0,0 +1,68 @@ +#pragma once +/* + Provides a subset of CUDA BLAS functions as templates: + + gemm(stream, transa, transb, m, n, k, alpha, a, lda, b, ldb, beta, c, + ldc) + + gemv(stream, transa, m, n, alpha, a, lda, x, incx, beta, y, incy) + + where Dtype is double, float, at::Half or at::BFloat16(ROCm). The functions are + available in at::cuda::blas namespace. + */ + +#include + +namespace at { +namespace cuda { +namespace blas { + +/* LEVEL 3 BLAS FUNCTIONS */ + +#define CUDABLAS_GEMM_ARGTYPES(Dtype) \ + cudaStream_t stream, char transa, char transb, int64_t m, int64_t n, \ + int64_t k, Dtype alpha, const Dtype *a, int64_t lda, const Dtype *b, \ + int64_t ldb, Dtype beta, Dtype *c, int64_t ldc + +template +inline void gemm(CUDABLAS_GEMM_ARGTYPES(Dtype)) { + AT_ERROR("at::cuda::blas::gemm: not implemented for ", typeid(Dtype).name()); +} + +template <> +void gemm(CUDABLAS_GEMM_ARGTYPES(double)); +template <> +void gemm(CUDABLAS_GEMM_ARGTYPES(float)); +template <> +void gemm(CUDABLAS_GEMM_ARGTYPES(at::Half)); +#ifdef __HIP_PLATFORM_HCC__ +template <> +void gemm(CUDABLAS_GEMM_ARGTYPES(at::BFloat16)); +#endif + +/* LEVEL 2 BLAS FUNCTIONS */ + +#define CUDABLAS_GEMV_ARGTYPES(Dtype) \ + cudaStream_t stream, char trans, int64_t m, int64_t n, Dtype alpha, \ + const Dtype *a, int64_t lda, const Dtype *x, int64_t incx, Dtype beta, \ + Dtype *y, int64_t incy + +template +inline void gemv(CUDABLAS_GEMV_ARGTYPES(Dtype)) { + AT_ERROR("at::cuda::blas::gemv: not implemented for ", typeid(Dtype).name()); +} + +template <> +void gemv(CUDABLAS_GEMV_ARGTYPES(double)); +template <> +void gemv(CUDABLAS_GEMV_ARGTYPES(float)); +template <> +void gemv(CUDABLAS_GEMV_ARGTYPES(at::Half)); +#ifdef __HIP_PLATFORM_HCC__ +template <> +void gemv(CUDABLAS_GEMV_ARGTYPES(at::BFloat16)); +#endif + +} // namespace blas +} // namespace cuda +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/cuda/CUDAConfig.h b/thirdparty/libtorch/include/ATen/cuda/CUDAConfig.h new file mode 100644 index 0000000000..ca23913603 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/CUDAConfig.h @@ -0,0 +1,13 @@ +#pragma once + +// Test these using #if AT_CUDNN_ENABLED(), not #ifdef, so that it's +// obvious if you forgot to include Config.h +// c.f. https://stackoverflow.com/questions/33759787/generating-an-error-if-checked-boolean-macro-is-not-defined +// +// NB: This header MUST NOT be included from other headers; it should +// only be included from C++ files. + +#define AT_CUDNN_ENABLED() 0 +#define AT_ROCM_ENABLED() 0 + +#define NVCC_FLAGS_EXTRA "" diff --git a/thirdparty/libtorch/include/ATen/cuda/CUDAContext.h b/thirdparty/libtorch/include/ATen/cuda/CUDAContext.h new file mode 100644 index 0000000000..fea822d0d1 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/CUDAContext.h @@ -0,0 +1,69 @@ +#pragma once + +#include + +#include +#include +#include + +#include +#include +#include +#include +#include + +namespace at { +namespace cuda { + +/* +A common CUDA interface for ATen. + +This interface is distinct from CUDAHooks, which defines an interface that links +to both CPU-only and CUDA builds. That interface is intended for runtime +dispatch and should be used from files that are included in both CPU-only and +CUDA builds. + +CUDAContext, on the other hand, should be preferred by files only included in +CUDA builds. It is intended to expose CUDA functionality in a consistent +manner. + +This means there is some overlap between the CUDAContext and CUDAHooks, but +the choice of which to use is simple: use CUDAContext when in a CUDA-only file, +use CUDAHooks otherwise. + +Note that CUDAContext simply defines an interface with no associated class. +It is expected that the modules whose functions compose this interface will +manage their own state. There is only a single CUDA context/state. +*/ + +/** + * DEPRECATED: use device_count() instead + */ +inline int64_t getNumGPUs() { + return c10::cuda::device_count(); +} + +/** + * CUDA is available if we compiled with CUDA, and there are one or more + * devices. If we compiled with CUDA but there is a driver problem, etc., + * this function will report CUDA is not available (rather than raise an error.) + */ +inline bool is_available() { + return c10::cuda::device_count() > 0; +} + +TORCH_CUDA_API cudaDeviceProp* getCurrentDeviceProperties(); + +TORCH_CUDA_API int warp_size(); + +TORCH_CUDA_API cudaDeviceProp* getDeviceProperties(int64_t device); + +TORCH_CUDA_API Allocator* getCUDADeviceAllocator(); + +/* Handles */ +TORCH_CUDA_API cusparseHandle_t getCurrentCUDASparseHandle(); +TORCH_CUDA_API cublasHandle_t getCurrentCUDABlasHandle(); + + +} // namespace cuda +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/cuda/CUDADevice.h b/thirdparty/libtorch/include/ATen/cuda/CUDADevice.h new file mode 100644 index 0000000000..9d14ab1627 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/CUDADevice.h @@ -0,0 +1,17 @@ +#pragma once + +#include + +#include +#include + +namespace at { +namespace cuda { + +inline Device getDeviceFromPtr(void* ptr) { + cudaPointerAttributes attr; + AT_CUDA_CHECK(cudaPointerGetAttributes(&attr, ptr)); + return {DeviceType::CUDA, static_cast(attr.device)}; +} + +}} // namespace at::cuda diff --git a/thirdparty/libtorch/include/ATen/cuda/CUDAEvent.h b/thirdparty/libtorch/include/ATen/cuda/CUDAEvent.h new file mode 100644 index 0000000000..bd1e645b27 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/CUDAEvent.h @@ -0,0 +1,185 @@ +#pragma once + +#include +#include +#include +#include +#include +#include + +#include + +#include +#include + +namespace at { namespace cuda { + +/* +* CUDAEvents are movable not copyable wrappers around CUDA's events. +* +* CUDAEvents are constructed lazily when first recorded unless it is +* reconstructed from a cudaIpcEventHandle_t. The event has a device, and this +* device is acquired from the first recording stream. However, if reconstructed +* from a handle, the device should be explicitly specified; or if ipc_handle() is +* called before the event is ever recorded, it will use the current device. +* Later streams that record the event must match this device. +*/ +struct TORCH_CUDA_API CUDAEvent { + // Constructors + // Default value for `flags` is specified below - it's cudaEventDisableTiming + CUDAEvent() {} + CUDAEvent(unsigned int flags) : flags_{flags} {} + + CUDAEvent( + DeviceIndex device_index, const cudaIpcEventHandle_t* handle) { + #ifndef __HIP_PLATFORM_HCC__ + device_index_ = device_index; + CUDAGuard guard(device_index_); + + AT_CUDA_CHECK(cudaIpcOpenEventHandle(&event_, *handle)); + is_created_ = true; + #else + AT_ERROR("cuIpcOpenEventHandle with HIP is not supported"); + #endif + } + + // Note: event destruction done on creating device to avoid creating a + // CUDA context on other devices. + ~CUDAEvent() { + try { + if (is_created_) { + CUDAGuard guard(device_index_); + cudaEventDestroy(event_); + } + } catch (...) { /* No throw */ } + } + + CUDAEvent(const CUDAEvent&) = delete; + CUDAEvent& operator=(const CUDAEvent&) = delete; + + CUDAEvent(CUDAEvent&& other) { moveHelper(std::move(other)); } + CUDAEvent& operator=(CUDAEvent&& other) { + moveHelper(std::move(other)); + return *this; + } + + operator cudaEvent_t() const { return event(); } + + // Less than operator (to allow use in sets) + friend bool operator<(const CUDAEvent& left, const CUDAEvent& right) { + return left.event_ < right.event_; + } + + optional device() const { + if (is_created_) { + return at::Device(at::kCUDA, device_index_); + } else { + return {}; + } + } + + bool isCreated() const { return is_created_; } + DeviceIndex device_index() const {return device_index_;} + cudaEvent_t event() const { return event_; } + + // Note: cudaEventQuery can be safely called from any device + bool query() const { + if (!is_created_) { + return true; + } + + cudaError_t err = cudaEventQuery(event_); + if (err == cudaSuccess) { + return true; + } else if (err != cudaErrorNotReady) { + C10_CUDA_CHECK(err); + } + + return false; + } + + void record() { record(getCurrentCUDAStream()); } + + void recordOnce(const CUDAStream& stream) { + if (!was_recorded_) record(stream); + } + + // Note: cudaEventRecord must be called on the same device as the event. + void record(const CUDAStream& stream) { + if (!is_created_) { + createEvent(stream.device_index()); + } + + TORCH_CHECK(device_index_ == stream.device_index(), "Event device ", device_index_, + " does not match recording stream's device ", stream.device_index(), "."); + CUDAGuard guard(device_index_); + AT_CUDA_CHECK(cudaEventRecord(event_, stream)); + was_recorded_ = true; + } + + // Note: cudaStreamWaitEvent must be called on the same device as the stream. + // The event has no actual GPU resources associated with it. + void block(const CUDAStream& stream) { + if (is_created_) { + CUDAGuard guard(stream.device_index()); + AT_CUDA_CHECK(cudaStreamWaitEvent(stream, event_, 0)); + } + } + + // Note: cudaEventElapsedTime can be safely called from any device + float elapsed_time(const CUDAEvent& other) const { + TORCH_CHECK(is_created_ && other.isCreated(), + "Both events must be recorded before calculating elapsed time."); + float time_ms = 0; + // raise cudaErrorNotReady if either event is recorded but not yet completed + AT_CUDA_CHECK(cudaEventElapsedTime(&time_ms, event_, other.event_)); + return time_ms; + } + + // Note: cudaEventSynchronize can be safely called from any device + void synchronize() const { + if (is_created_) { + AT_CUDA_CHECK(cudaEventSynchronize(event_)); + } + } + + // Note: cudaIpcGetEventHandle must be called on the same device as the event + void ipc_handle(cudaIpcEventHandle_t * handle) { + #ifndef __HIP_PLATFORM_HCC__ + if (!is_created_) { + // this CUDAEvent object was initially constructed from flags but event_ + // is not created yet. + createEvent(getCurrentCUDAStream().device_index()); + } + CUDAGuard guard(device_index_); + AT_CUDA_CHECK(cudaIpcGetEventHandle(handle, event_)); + #else + AT_ERROR("cuIpcGetEventHandle with HIP is not supported"); + #endif + } + +private: + unsigned int flags_ = cudaEventDisableTiming; + bool is_created_ = false; + bool was_recorded_ = false; + DeviceIndex device_index_ = -1; + cudaEvent_t event_; + + void createEvent(DeviceIndex device_index) { + device_index_ = device_index; + CUDAGuard guard(device_index_); + AT_CUDA_CHECK(cudaEventCreateWithFlags(&event_, flags_)); + is_created_ = true; + } + + void moveHelper(CUDAEvent&& other) { + std::swap(flags_, other.flags_); + std::swap(is_created_, other.is_created_); + std::swap(was_recorded_, other.was_recorded_); + std::swap(device_index_, other.device_index_); + std::swap(event_, other.event_); + } +}; + +} // namespace cuda +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/cuda/CUDAMultiStreamGuard.h b/thirdparty/libtorch/include/ATen/cuda/CUDAMultiStreamGuard.h new file mode 100644 index 0000000000..c03b1550a3 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/CUDAMultiStreamGuard.h @@ -0,0 +1,59 @@ +#pragma once + +#include +#include +#include +#include + +#include + +namespace at { namespace cuda { + +// TODO: Implement this generically in c10. You'll need some way to get +// the number of GPUs from the GuardImpl, in that case. +class CUDAMultiStreamGuard final { +public: + /// Calls `set_stream` on each of the streams in the list. + /// This may be useful if you need to set different streams + /// for different devices. + explicit CUDAMultiStreamGuard(ArrayRef streams) : CUDAMultiStreamGuard() { + for (const auto& s : streams) { + setCurrentCUDAStream(s); + } + } + + CUDAMultiStreamGuard() { + const size_t device_count = getNumGPUs(); + original_streams_.reserve(device_count); + for (size_t device = 0; device < device_count; ++device) { + original_streams_.push_back(getCurrentCUDAStream(device)); + } + } + + CUDAMultiStreamGuard(const CUDAGuard&) = delete; + CUDAMultiStreamGuard& operator=(const CUDAGuard&) = delete; + + // See Note [Move construction for RAII guards is tricky] + CUDAMultiStreamGuard(CUDAGuard&& other) = delete; + + // See Note [Move assignment for RAII guards is tricky] + CUDAMultiStreamGuard& operator=(CUDAGuard&& other) = delete; + + ArrayRef original_streams() const { + return original_streams_; + } + + /// Resets the CUDA stream on each device to the one that was active upon + /// construction. + ~CUDAMultiStreamGuard() { + for (const auto& s : original_streams_) { + setCurrentCUDAStream(s); + } + } + +private: + /// The original streams that were active on all devices. + std::vector original_streams_; +}; + +}} // namespace at::cuda diff --git a/thirdparty/libtorch/include/ATen/cuda/CUDATensorMethods.cuh b/thirdparty/libtorch/include/ATen/cuda/CUDATensorMethods.cuh new file mode 100644 index 0000000000..e4e89ea1cd --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/CUDATensorMethods.cuh @@ -0,0 +1,15 @@ +#pragma once + +#include +#include + +#include +#include +#include + +namespace at { +template <> +inline __half* Tensor::data() const { + return reinterpret_cast<__half*>(data()); +} +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/cuda/CUDAUtils.h b/thirdparty/libtorch/include/ATen/cuda/CUDAUtils.h new file mode 100644 index 0000000000..15b5e02664 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/CUDAUtils.h @@ -0,0 +1,20 @@ +#pragma once + +#include + +namespace at { namespace cuda { + +// Check if every tensor in a list of tensors matches the current +// device. +inline bool check_device(ArrayRef ts) { + if (ts.empty()) { + return true; + } + Device curDevice = Device(kCUDA, current_device()); + for (const Tensor& t : ts) { + if (t.device() != curDevice) return false; + } + return true; +} + +}} // namespace at::cuda diff --git a/thirdparty/libtorch/include/ATen/cuda/Exceptions.h b/thirdparty/libtorch/include/ATen/cuda/Exceptions.h new file mode 100644 index 0000000000..8ffa6ffce2 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/Exceptions.h @@ -0,0 +1,103 @@ +#pragma once + +#include +#include +#include +#include +#include + +// See Note [CHECK macro] +#define AT_CUDNN_CHECK(EXPR) \ + do { \ + cudnnStatus_t status = EXPR; \ + if (status != CUDNN_STATUS_SUCCESS) { \ + if (status == CUDNN_STATUS_NOT_SUPPORTED) { \ + AT_ERROR( \ + "cuDNN error: ", \ + cudnnGetErrorString(status), \ + ". This error may appear if you passed in a non-contiguous input."); \ + } else { \ + AT_ERROR("cuDNN error: ", cudnnGetErrorString(status)); \ + } \ + } \ + } while (0) + +namespace at { namespace cuda { namespace blas { +const char* _cublasGetErrorEnum(cublasStatus_t error); +}}} // namespace at::cuda::blas + +#define TORCH_CUDABLAS_CHECK(EXPR) \ + do { \ + cublasStatus_t __err = EXPR; \ + TORCH_CHECK(__err == CUBLAS_STATUS_SUCCESS, \ + "CUDA error: ", \ + at::cuda::blas::_cublasGetErrorEnum(__err), \ + " when calling `" #EXPR "`"); \ + } while (0) + +const char *cusparseGetErrorString(cusparseStatus_t status); + +#define TORCH_CUDASPARSE_CHECK(EXPR) \ + do { \ + cusparseStatus_t __err = EXPR; \ + TORCH_CHECK(__err == CUSPARSE_STATUS_SUCCESS, \ + "CUDA error: ", \ + cusparseGetErrorString(__err), \ + " when calling `" #EXPR "`"); \ + } while (0) + +#define AT_CUDA_CHECK(EXPR) C10_CUDA_CHECK(EXPR) + +// For CUDA Driver API +// +// This is here instead of in c10 because NVRTC is loaded dynamically via a stub +// in ATen, and we need to use its nvrtcGetErrorString. +// See NOTE [ USE OF NVRTC AND DRIVER API ]. +#ifndef __HIP_PLATFORM_HCC__ + +#define AT_CUDA_DRIVER_CHECK(EXPR) \ + do { \ + CUresult __err = EXPR; \ + if (__err != CUDA_SUCCESS) { \ + const char* err_str; \ + CUresult get_error_str_err C10_UNUSED = at::globalContext().getNVRTC().cuGetErrorString(__err, &err_str); \ + if (get_error_str_err != CUDA_SUCCESS) { \ + AT_ERROR("CUDA driver error: unknown error"); \ + } else { \ + AT_ERROR("CUDA driver error: ", err_str); \ + } \ + } \ + } while (0) + +#else + +#define AT_CUDA_DRIVER_CHECK(EXPR) \ + do { \ + CUresult __err = EXPR; \ + if (__err != CUDA_SUCCESS) { \ + AT_ERROR("CUDA driver error: ", static_cast(__err)); \ + } \ + } while (0) + +#endif + +// For CUDA NVRTC +// +// Note: As of CUDA 10, nvrtc error code 7, NVRTC_ERROR_BUILTIN_OPERATION_FAILURE, +// incorrectly produces the error string "NVRTC unknown error." +// The following maps it correctly. +// +// This is here instead of in c10 because NVRTC is loaded dynamically via a stub +// in ATen, and we need to use its nvrtcGetErrorString. +// See NOTE [ USE OF NVRTC AND DRIVER API ]. +#define AT_CUDA_NVRTC_CHECK(EXPR) \ + do { \ + nvrtcResult __err = EXPR; \ + if (__err != NVRTC_SUCCESS) { \ + if (static_cast(__err) != 7) { \ + AT_ERROR("CUDA NVRTC error: ", at::globalContext().getNVRTC().nvrtcGetErrorString(__err)); \ + } else { \ + AT_ERROR("CUDA NVRTC error: NVRTC_ERROR_BUILTIN_OPERATION_FAILURE"); \ + } \ + } \ + } while (0) diff --git a/thirdparty/libtorch/include/ATen/cuda/NumericLimits.cuh b/thirdparty/libtorch/include/ATen/cuda/NumericLimits.cuh new file mode 100644 index 0000000000..7081e94837 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/NumericLimits.cuh @@ -0,0 +1,121 @@ +#pragma once + +#include +#include +#include +#include + +// NumericLimits.cuh is a holder for numeric limits definitions of commonly used +// types. This header is very specific to ROCm HIP and may be removed in the future. +// This header is derived from the legacy THCNumerics.cuh. + +// The lower_bound and upper_bound constants are same as lowest and max for +// integral types, but are -inf and +inf for floating point types. They are +// useful in implementing min, max, etc. + +namespace at { + +template +struct numeric_limits { +}; + +// WARNING: the following at::numeric_limits definitions are there only to support +// HIP compilation for the moment. Use std::numeric_limits if you are not +// compiling for ROCm. +// from @colesbury: "The functions on numeric_limits aren't marked with +// __device__ which is why they don't work with ROCm. CUDA allows them +// because they're constexpr." + +namespace { + // ROCm doesn't like INFINITY too. + constexpr double inf = INFINITY; +} + +template <> +struct numeric_limits { + static inline __host__ __device__ bool lowest() { return false; } + static inline __host__ __device__ bool max() { return true; } + static inline __host__ __device__ bool lower_bound() { return false; } + static inline __host__ __device__ bool upper_bound() { return true; } +}; + +template <> +struct numeric_limits { + static inline __host__ __device__ uint8_t lowest() { return 0; } + static inline __host__ __device__ uint8_t max() { return UINT8_MAX; } + static inline __host__ __device__ uint8_t lower_bound() { return 0; } + static inline __host__ __device__ uint8_t upper_bound() { return UINT8_MAX; } +}; + +template <> +struct numeric_limits { + static inline __host__ __device__ int8_t lowest() { return INT8_MIN; } + static inline __host__ __device__ int8_t max() { return INT8_MAX; } + static inline __host__ __device__ int8_t lower_bound() { return INT8_MIN; } + static inline __host__ __device__ int8_t upper_bound() { return INT8_MAX; } +}; + +template <> +struct numeric_limits { + static inline __host__ __device__ int16_t lowest() { return INT16_MIN; } + static inline __host__ __device__ int16_t max() { return INT16_MAX; } + static inline __host__ __device__ int16_t lower_bound() { return INT16_MIN; } + static inline __host__ __device__ int16_t upper_bound() { return INT16_MAX; } +}; + +template <> +struct numeric_limits { + static inline __host__ __device__ int32_t lowest() { return INT32_MIN; } + static inline __host__ __device__ int32_t max() { return INT32_MAX; } + static inline __host__ __device__ int32_t lower_bound() { return INT32_MIN; } + static inline __host__ __device__ int32_t upper_bound() { return INT32_MAX; } +}; + +template <> +struct numeric_limits { +#ifdef _MSC_VER + static inline __host__ __device__ int64_t lowest() { return _I64_MIN; } + static inline __host__ __device__ int64_t max() { return _I64_MAX; } + static inline __host__ __device__ int64_t lower_bound() { return _I64_MIN; } + static inline __host__ __device__ int64_t upper_bound() { return _I64_MAX; } +#else + static inline __host__ __device__ int64_t lowest() { return INT64_MIN; } + static inline __host__ __device__ int64_t max() { return INT64_MAX; } + static inline __host__ __device__ int64_t lower_bound() { return INT64_MIN; } + static inline __host__ __device__ int64_t upper_bound() { return INT64_MAX; } +#endif +}; + +template <> +struct numeric_limits { + static inline __host__ __device__ at::Half lowest() { return at::Half(0xFBFF, at::Half::from_bits()); } + static inline __host__ __device__ at::Half max() { return at::Half(0x7BFF, at::Half::from_bits()); } + static inline __host__ __device__ at::Half lower_bound() { return at::Half(0xFC00, at::Half::from_bits()); } + static inline __host__ __device__ at::Half upper_bound() { return at::Half(0x7C00, at::Half::from_bits()); } +}; + +template <> +struct numeric_limits { + static inline __host__ __device__ at::BFloat16 lowest() { return at::BFloat16(0xFF7F, at::BFloat16::from_bits()); } + static inline __host__ __device__ at::BFloat16 max() { return at::BFloat16(0x7F7F, at::BFloat16::from_bits()); } + static inline __host__ __device__ at::BFloat16 lower_bound() { return at::BFloat16(0xFF80, at::BFloat16::from_bits()); } + static inline __host__ __device__ at::BFloat16 upper_bound() { return at::BFloat16(0x7F80, at::BFloat16::from_bits()); } +}; + +template <> +struct numeric_limits { + static inline __host__ __device__ float lowest() { return -FLT_MAX; } + static inline __host__ __device__ float max() { return FLT_MAX; } + static inline __host__ __device__ float lower_bound() { return -static_cast(inf); } + static inline __host__ __device__ float upper_bound() { return static_cast(inf); } +}; + +template <> +struct numeric_limits { + static inline __host__ __device__ double lowest() { return -DBL_MAX; } + static inline __host__ __device__ double max() { return DBL_MAX; } + static inline __host__ __device__ double lower_bound() { return -inf; } + static inline __host__ __device__ double upper_bound() { return inf; } +}; + +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/cuda/PinnedMemoryAllocator.h b/thirdparty/libtorch/include/ATen/cuda/PinnedMemoryAllocator.h new file mode 100644 index 0000000000..e980908857 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/PinnedMemoryAllocator.h @@ -0,0 +1,9 @@ +#pragma once + +#include + +namespace at { namespace cuda { + +TORCH_CUDA_API at::Allocator* getPinnedMemoryAllocator(); + +}} // namespace at::cuda diff --git a/thirdparty/libtorch/include/ATen/cuda/detail/CUDAHooks.h b/thirdparty/libtorch/include/ATen/cuda/detail/CUDAHooks.h new file mode 100644 index 0000000000..afd2394f16 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/detail/CUDAHooks.h @@ -0,0 +1,40 @@ +#include + +#include +#include + +// TODO: No need to have this whole header, we can just put it all in +// the cpp file + +namespace at { namespace cuda { namespace detail { + +// The real implementation of CUDAHooksInterface +struct CUDAHooks : public at::CUDAHooksInterface { + CUDAHooks(at::CUDAHooksArgs) {} + std::unique_ptr initCUDA() const override; + Device getDeviceFromPtr(void* data) const override; + bool isPinnedPtr(void* data) const override; + Generator* getDefaultCUDAGenerator(DeviceIndex device_index = -1) const override; + bool hasCUDA() const override; + bool hasMAGMA() const override; + bool hasCuDNN() const override; + const at::cuda::NVRTC& nvrtc() const override; + int64_t current_device() const override; + bool hasPrimaryContext(int64_t device_index) const override; + c10::optional getDevceIndexWithPrimaryContext() const override; + Allocator* getPinnedMemoryAllocator() const override; + bool compiledWithCuDNN() const override; + bool compiledWithMIOpen() const override; + bool supportsDilatedConvolutionWithCuDNN() const override; + bool supportsDepthwiseConvolutionWithCuDNN() const override; + long versionCuDNN() const override; + std::string showConfig() const override; + double batchnormMinEpsilonCuDNN() const override; + int64_t cuFFTGetPlanCacheMaxSize(int64_t device_index) const override; + void cuFFTSetPlanCacheMaxSize(int64_t device_index, int64_t max_size) const override; + int64_t cuFFTGetPlanCacheSize(int64_t device_index) const override; + void cuFFTClearPlanCache(int64_t device_index) const override; + int getNumGPUs() const override; +}; + +}}} // at::cuda::detail diff --git a/thirdparty/libtorch/include/ATen/cuda/detail/DeviceThreadHandles.h b/thirdparty/libtorch/include/ATen/cuda/detail/DeviceThreadHandles.h new file mode 100644 index 0000000000..8ef1112533 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/detail/DeviceThreadHandles.h @@ -0,0 +1,141 @@ +// Some stateful GPU libraries, such as cuDNN, cuBLAS, use handles to store states. +// These handles are tied to device, and these libraries requires/recommends not to +// share handles across host threads. +// +// These libraries recommend using one handle per host thread. We may not want to do +// this because threads are relatively light-weight, but creating and destroying +// handles is expensive (destroying the handle causes synchronizations). DataParallel, +// for example, creates new threads for each forward pass. +// +// This file implements a handle pool mechanism. The handle pool returns handles on +// demand as threads request them. If all existing handles in the pool are in use, +// it creates a new one. As threads terminate, they release handles back into the pool. +// In this way, the handle pool never creates more handles than the high-water mark of +// active threads, so it's efficient with DataParallel. + +#pragma once + +#include +#include +#include +#include +#include + +#include + +namespace at { namespace cuda { namespace { + +template +struct DeviceThreadHandlePool { + + struct Handle { + Handle_t handle; + Handle(bool create = false) : handle(nullptr) + { + if(create) Create(&handle); + } + // std::vector.emplace() and push_back() may route through temporaries and call + // copy/move constructors along the way. If this is the case, we don't want + // the destructors of temporaries to call cudnnDestroy on the handle. + // We can achieve safety (for the narrow case of stashing within std::vectors) + // by making Handle moveable but not copyable, and transferring handle ownership + // to the latest constructed object. This is not a substitute for full-blown + // reference counting, but reference counting may be overkill here. + // Another alternative is to wrap the saved Handles in unique_ptrs, i.e., + // unordered_map>> created_handles; + Handle(const Handle& rhs) = delete; + // Following https://stackoverflow.com/questions/3279543/what-is-the-copy-and-swap-idiom + Handle(Handle&& rhs) : Handle() { std::swap(handle, rhs.handle); } + // operator= takes argument by value + Handle& operator=(Handle rhs) { std::swap(handle, rhs.handle); return *this; } + ~Handle() { + if(handle) Destroy(handle); + } + }; + + std::mutex mutex; + + // Handles are lazily created as different threads request them, + // but are never destroyed until the end of the process. + // The maximum number of handles this process will create for each device is equal + // to the high-water mark of the number of concurrently active threads that request + // handles for that device. + // When threads terminate, they release their handles back into the pool for reuse. + // Otherwise, new handles would be created every time new threads were spawned, + // resulting in poor performance for Python modules that repeatedly or frequently + // spawned new sets of threads (like DataParallel, which creates a new set of threads + // for each forward pass). + // + // To prevent potential deadlocks, we explicitly choose not to cap the number + // of handles that are created per device. + // Example of danger: If we cap the max handles at 4, and 5 threads are sharing a device, + // only 4 can make forward progress at any time. The other 4 will not release their + // handles until they exit, so the fifth cannot make progress until then. This is + // not a problem...UNLESS all 5 threads attempt some sort of synchronization at an + // intermediate point (ie, before any of them have exited). We have no way to anticipate + // or enforce that user threads will not attempt such intermediate synchronization. + // The only way to ensure safety is to avoid imposing a cap on the number of handles. + std::unordered_map> created_handles; + std::unordered_map> available_handles; + + // PoolWindow lazily creates and caches the handles that a particular thread is using, + // so in the common case handle access doesn't incur either handle creation or a mutex lock. + class PoolWindow + { + DeviceThreadHandlePool &parent; + public: + PoolWindow(DeviceThreadHandlePool &parent): parent(parent) {} + ~PoolWindow(){ release(); } + + Handle_t reserve(int device) + { + // If this thread already has a handle for this device, return it + if(my_handles.find(device) != my_handles.end()) + return my_handles[device]; + + // otherwise, either grab a handle from the pool if one is available, + // or if not, create a new one. + std::lock_guard guard(parent.mutex); + + if(parent.available_handles[device].size() > 0) + { + my_handles[device] = parent.available_handles[device].back(); + parent.available_handles[device].pop_back(); + } + else + { + // In local testing, I do observe that emplace_back sometimes routes through temporaries + // that incur move-constructor and destructor calls. See comments in Handle above. + parent.created_handles[device].emplace_back(true /*create*/); + my_handles[device] = parent.created_handles[device].back().handle; + } + + return my_handles[device]; + } + + private: + // Stores the per-device handles currently owned by this thread + std::unordered_map my_handles; + + // Called by the destructor. Releases this thread's handles back into the pool. + void release() { + if(my_handles.size() > 0) { + std::lock_guard guard(parent.mutex); + for(auto d_h : my_handles) + parent.available_handles[d_h.first].push_back(d_h.second); + } + } + }; + + // Warning: + // If you want to change this function, be aware that this function will be called + // by multiple threads and there is no mutex guarding the call of this function, so + // make sure your implementation is thread-safe. + PoolWindow *newPoolWindow() { + // The returned pointer will be owned by a thread local variable + // so that different threads does not share the same PoolWindow. + return new PoolWindow(*this); + } +}; + +}}} // namespace at::cuda::detail:: diff --git a/thirdparty/libtorch/include/ATen/cuda/detail/IndexUtils.cuh b/thirdparty/libtorch/include/ATen/cuda/detail/IndexUtils.cuh new file mode 100644 index 0000000000..315897ae65 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/detail/IndexUtils.cuh @@ -0,0 +1,32 @@ +#pragma once + +#include +#include +#include + +namespace at { +namespace cuda { +namespace detail { + +TORCH_CUDA_API bool maybeOverlappingIndices(const at::Tensor& t); +TORCH_CUDA_API bool canUse32BitIndexMath(const at::Tensor &t, int64_t max_elem=std::numeric_limits::max()); + +template +TensorInfo +getTensorInfo(const at::Tensor& t) { + IndexType sz[MAX_TENSORINFO_DIMS]; + IndexType st[MAX_TENSORINFO_DIMS]; + + int dims = t.dim(); + for (int i = 0; i < dims; ++i) { + sz[i] = t.size(i); + st[i] = t.stride(i); + } + + return TensorInfo( + t.data_ptr(), dims, sz, st); +} + +} // detail +} // cuda +} // at diff --git a/thirdparty/libtorch/include/ATen/cuda/detail/KernelUtils.h b/thirdparty/libtorch/include/ATen/cuda/detail/KernelUtils.h new file mode 100644 index 0000000000..af788ff8f8 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/detail/KernelUtils.h @@ -0,0 +1,31 @@ +#pragma once + +#include + +// Contents of this file are copied from THCUNN/common.h for the ease of porting +// THCUNN functions into ATen. + +namespace at { namespace cuda { namespace detail { + +// CUDA: grid stride looping +// +// int64_t _i_n_d_e_x specifically prevents overflow in the loop increment. +// If input.numel() < INT_MAX, _i_n_d_e_x < INT_MAX, except after the final +// iteration of the loop where _i_n_d_e_x += blockDim.x * gridDim.x can be +// greater than INT_MAX. But in that case _i_n_d_e_x >= n, so there are no +// further iterations and the overflowed value in i=_i_n_d_e_x is not used. +#define CUDA_KERNEL_LOOP(i, n) \ + int64_t _i_n_d_e_x = blockIdx.x * blockDim.x + threadIdx.x; \ + for (int i=_i_n_d_e_x; _i_n_d_e_x < (n); _i_n_d_e_x+=blockDim.x * gridDim.x, i=_i_n_d_e_x) + +// Use 1024 threads per block, which requires cuda sm_2x or above +constexpr int CUDA_NUM_THREADS = 1024; + +// CUDA: number of blocks for threads. +inline int GET_BLOCKS(const int N) +{ + AT_ASSERTM(N > 0, "CUDA kernel launch blocks must be positive, but got N=", N); + return (N + CUDA_NUM_THREADS - 1) / CUDA_NUM_THREADS; +} + +}}} // namespace at::cuda::detail diff --git a/thirdparty/libtorch/include/ATen/cuda/detail/OffsetCalculator.cuh b/thirdparty/libtorch/include/ATen/cuda/detail/OffsetCalculator.cuh new file mode 100644 index 0000000000..e43967db4d --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/detail/OffsetCalculator.cuh @@ -0,0 +1,64 @@ +#pragma once + +#include +#include +#include +#include +#include + +/// OffsetCalculator calculates the offset in bytes of a linear index for NARGS +/// operands that share the same shape, but may have different strides. + +#ifdef __HIP_PLATFORM_HCC__ +constexpr int MAX_DIMS = 16; +#else +constexpr int MAX_DIMS = 25; +#endif + +template +struct OffsetCalculator { + // The offset for each argument (in bytes). Wrapper around fixed-size array. + using offset_type = at::detail::Array; + + OffsetCalculator(int dims, const int64_t* sizes, const int64_t* const* strides) : dims(dims) { + TORCH_CHECK(dims <= MAX_DIMS, "tensor has too many (>", MAX_DIMS, ") dims"); + for (int i = 0; i < MAX_DIMS; ++i) { + if (i < dims) { + sizes_[i] = IntDivider(sizes[i]); + } else { + sizes_[i] = IntDivider(1); + } + for (int arg = 0; arg < NARGS; arg++) { + strides_[i][arg] = i < dims ? strides[arg][i] : 0; + } + } + } + + C10_HOST_DEVICE offset_type get(index_t linear_idx) const { + offset_type offsets; + #pragma unroll + for (int arg = 0; arg < NARGS; arg++) { + offsets[arg] = 0; + } + + #pragma unroll + for (int dim = 0; dim < MAX_DIMS; ++dim) { + if (dim == dims) { + break; + } + auto divmod = sizes_[dim].divmod(linear_idx); + linear_idx = divmod.div; + + #pragma unroll + for (int arg = 0; arg < NARGS; arg++) { + offsets[arg] += divmod.mod * strides_[dim][arg]; + } + + } + return offsets; + } + + int dims; + IntDivider sizes_[MAX_DIMS]; + index_t strides_[MAX_DIMS][NARGS]; +}; diff --git a/thirdparty/libtorch/include/ATen/cuda/detail/TensorInfo.cuh b/thirdparty/libtorch/include/ATen/cuda/detail/TensorInfo.cuh new file mode 100644 index 0000000000..b5fcbe2223 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cuda/detail/TensorInfo.cuh @@ -0,0 +1,121 @@ +#pragma once + +#include +#include + +namespace at { +namespace cuda { +namespace detail { + +#define MAX_TENSORINFO_DIMS 25 + +// CUDA kernel argument that defines tensor layout +template +struct TensorInfo { + TensorInfo(); + TensorInfo(T* p, + int dim, + IndexType sz[MAX_TENSORINFO_DIMS], + IndexType st[MAX_TENSORINFO_DIMS]); + + // Set the size of the given dimension to 1, as if it were a + // reduction dim (allows you to calculate offsets of the reduction + // slice) + void reduceDim(int dim); + + // See note on [collapse dims]. + int collapseDims(const int excludeDim = -1); + + // Contiguous tensors of more than one dimension are collapsed down + // to one tensor + __host__ __device__ inline bool isContiguous() const { + return (dims == 1 && strides[0] == 1); + } + + T* data; + IndexType sizes[MAX_TENSORINFO_DIMS]; + IndexType strides[MAX_TENSORINFO_DIMS]; + int dims; +}; + +template +TensorInfo::TensorInfo() { + data = nullptr; + dims = 0; +} + +template +TensorInfo::TensorInfo(T* p, + int dim, + IndexType sz[MAX_TENSORINFO_DIMS], + IndexType st[MAX_TENSORINFO_DIMS]) { + data = p; + dims = dim; + AT_ASSERT(dims < MAX_TENSORINFO_DIMS); + + for (int i = 0; i < dim; ++i) { + sizes[i] = sz[i]; + strides[i] = st[i]; + } +} + +template +void +TensorInfo::reduceDim(int dim) { + TORCH_CHECK(dim < dims && dim >= 0, "expected dim between 0 and dims - 1"); + sizes[dim] = 1; +} + +template +int +TensorInfo::collapseDims(const int excludeDim) { + auto result = at::collapse_dims(sizes, strides, dims, excludeDim); + dims = std::get<1>(result); + return std::get<0>(result); +} + +// Translate a linear index for the apply to a T* offset; +// specialized on `Dims` to reduce nvcc compilation time +template +struct IndexToOffset { + static __host__ __device__ IndexType get( + IndexType linearId, + const TensorInfo& info) { + + IndexType offset = 0; + + // Uses static dims + for (int i = Dims - 1; i > 0; --i) { + IndexType curDimIndex = linearId % info.sizes[i]; + IndexType curDimOffset = curDimIndex * info.strides[i]; + offset += curDimOffset; + linearId /= info.sizes[i]; + } + + return offset + linearId * info.strides[0]; + } +}; + +// Uses dynamic (runtime) instead of static (compiletime) dims +template +struct IndexToOffset { + static inline __host__ __device__ IndexType get( + IndexType linearId, + const TensorInfo& info) { + + IndexType offset = 0; + + for (int i = info.dims - 1; i > 0; --i) { + IndexType curDimIndex = linearId % info.sizes[i]; + IndexType curDimOffset = curDimIndex * info.strides[i]; + offset += curDimOffset; + linearId /= info.sizes[i]; + } + + return offset + linearId * info.strides[0]; + } +}; + +} // detail +} // cuda +} // at diff --git a/thirdparty/libtorch/include/ATen/cudnn/Descriptors.h b/thirdparty/libtorch/include/ATen/cudnn/Descriptors.h new file mode 100644 index 0000000000..6853017cde --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cudnn/Descriptors.h @@ -0,0 +1,287 @@ +#pragma once + +#include +#include + +#include +#include +#include +#include +#include +#include + +namespace at { namespace native { + +// TODO: Add constructors for all of the descriptors + +inline int dataSize(cudnnDataType_t dataType) +{ + switch (dataType) { + case CUDNN_DATA_HALF: return 2; + case CUDNN_DATA_FLOAT: return 4; + default: return 8; + } +} + +// The stride for a size-1 dimensions is not uniquely determined; in +// fact, it can be anything you want, because the fact that the +// tensor is size 1 at this dimension means that you will never actually +// try advancing your pointer by this stride. +// +// However, CuDNN has a much more stringent requirement on strides: +// if you are passing a contiguous input, it better be the case +// that the stride for dim i is the product of the sizes of dims +// i+1 to the end. This stride is indeed uniquely determined. This +// function modifies 'stride' in place so this invariant holds. +static inline void fixSizeOneDimStride(int dim, const int *size, int *stride) { + int64_t z = 1; + for(int d = dim-1; d >= 0; d--) + { + if (size[d] == 1) { + stride[d] = z; + } else { + z *= size[d]; + } + } +} + +template +struct DescriptorDeleter { + void operator()(T* x) { + if (x != nullptr) { + AT_CUDNN_CHECK(dtor(x)); + } + } +}; + +// A generic class for wrapping cuDNN descriptor types. All you need +// is to give the underlying type the Descriptor_t points to (usually, +// if it's cudnnTensorDescriptor_t it points to cudnnTensorStruct), +// the constructor and the destructor. Subclasses are responsible +// for defining a set() function to actually set the descriptor. +// +// Descriptors default construct to a nullptr, and have a descriptor +// initialized the first time you call set() or any other initializing +// function. +template +class TORCH_CUDA_API Descriptor +{ +public: + // TODO: Figure out why const-correctness doesn't work here + + // Use desc() to access the underlying descriptor pointer in + // a read-only fashion. Most client code should use this. + // If the descriptor was never initialized, this will return + // nullptr. + T* desc() const { return desc_.get(); } + T* desc() { return desc_.get(); } + + // Use mut_desc() to access the underlying desciptor pointer + // if you intend to modify what it points to (e.g., using + // cudnnSetFooDescriptor). This will ensure that the descriptor + // is initialized. Code in this file will use this function. + T* mut_desc() { init(); return desc_.get(); } +protected: + void init() { + if (desc_ == nullptr) { + T* raw_desc; + AT_CUDNN_CHECK(ctor(&raw_desc)); + desc_.reset(raw_desc); + } + } +private: + std::unique_ptr> desc_; +}; + +class TORCH_CUDA_API TensorDescriptor + : public Descriptor +{ +public: + TensorDescriptor() {} + explicit TensorDescriptor(const at::Tensor &t, size_t pad = 0) { + set(t, pad); + } + + // Note [CuDNN broadcast padding] + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + // pad specifies the minimum dimensionality of the tensor descriptor + // we produce (it doesn't have anything to do with, e.g., convolution + // padding). If 't' is lower-dimensional than 'pad', the remaining + // dimensions (on the right) are padded with ones. This doesn't + // affect the underlying data layout. This is particularly useful for + // dealing with a pecularity of the CuDNN API, which is that broadcasting in CuDNN is + // done in two steps: first, the client code is expected to pad out + // (the dimensions) input tensors to be the same dimension as the + // target broadcast, and then second, CuDNN takes of actually + // broadcasting size 1 dimensions. + + void set(const at::Tensor &t, size_t pad = 0); + void set(cudnnDataType_t dataType, IntArrayRef sizes, IntArrayRef strides, size_t pad = 0); + + void print(); + +private: + void set(cudnnDataType_t dataType, int dim, int* size, int* stride) { + fixSizeOneDimStride(dim, size, stride); + AT_CUDNN_CHECK(cudnnSetTensorNdDescriptor(mut_desc(), dataType, dim, size, stride)); + } +}; + +std::ostream& operator<<(std::ostream & out, const TensorDescriptor& d); + +class FilterDescriptor + : public Descriptor +{ +public: + void set(const at::Tensor &t, int64_t pad = 0); + +private: + void set(cudnnDataType_t dataType, int dim, int* size, cudnnTensorFormat_t filter_format) { + AT_CUDNN_CHECK(cudnnSetFilterNdDescriptor(mut_desc(), dataType, filter_format, dim, size)); + } +}; + +struct TORCH_CUDA_API ConvolutionDescriptor + : public Descriptor +{ + void set(cudnnDataType_t dataType, int dim, int* pad, int* stride, int * upscale /* aka dilation */, int groups) { + cudnnDataType_t mathType = dataType; + if (dataType == CUDNN_DATA_HALF) mathType = CUDNN_DATA_FLOAT; + AT_CUDNN_CHECK(cudnnSetConvolutionNdDescriptor(mut_desc(), dim, pad, stride, upscale, + CUDNN_CROSS_CORRELATION, mathType)); + AT_CUDNN_CHECK(cudnnSetConvolutionGroupCount(mut_desc(), groups)); + // See Note [behavior of cudnnFind and cudnnGet] + AT_CUDNN_CHECK(cudnnSetConvolutionMathType(mut_desc(), CUDNN_DEFAULT_MATH)); + if(dataType == CUDNN_DATA_HALF) + AT_CUDNN_CHECK(cudnnSetConvolutionMathType(mut_desc(), CUDNN_TENSOR_OP_MATH)); + + } +}; + +struct TORCH_CUDA_API SpatialTransformerDescriptor + : public Descriptor +{ + void set(cudnnDataType_t dataType, int dim, int* size) { + AT_CUDNN_CHECK(cudnnSetSpatialTransformerNdDescriptor(mut_desc(), CUDNN_SAMPLER_BILINEAR, dataType, dim, size)); + } +}; + +struct TORCH_CUDA_API DropoutDescriptor + : public Descriptor +{ + at::Tensor state; + + // Initialize a dropout descriptor's RNG state. + // WARNING: This function is very expensive, avoid calling this function! + // NB: it takes a Type so that we can generate a Variable if necessary. + void initialize_rng(cudnnHandle_t handle, float dropout, long long int seed, const TensorOptions& options) { + AT_ASSERTM(dropout > 0, "dropout must be nonzero; otherwise call set_no_dropout"); + size_t state_size; + AT_CUDNN_CHECK(cudnnDropoutGetStatesSize(handle, &state_size)); + AT_ASSERT(options.device().type() == kCUDA); + AT_ASSERT(options.dtype() == kByte); + state = at::empty({static_cast(state_size)}, options); + setCuDNNStreamToCurrent(); + AT_CUDNN_CHECK(cudnnSetDropoutDescriptor(mut_desc(), handle, dropout, state.data_ptr(), state_size, seed)); + } + + // Restore a dropout descriptor given a dropout probability and existing RNG state. + void set(cudnnHandle_t handle, float dropout, at::Tensor state_) { + AT_ASSERTM(dropout > 0, "dropout must be nonzero; otherwise call set_no_dropout"); + state = state_; + void *state_ptr = state.data_ptr(); + size_t state_size = state.size(0); + // NB: The seed doesn't actually matter, so we give a dummy value + setCuDNNStreamToCurrent(); + AT_CUDNN_CHECK(cudnnRestoreDropoutDescriptor(mut_desc(), handle, dropout, state_ptr, state_size, 0 /* seed */)); + } + + // Restore a dropout descriptor corresponding to no dropout + void set_no_dropout(cudnnHandle_t handle) { + // NB: seed doesn't matter when dropout = 0, because no random number + // initialization actually takes place when there is no dropout. + // NB: Empirically, cudnnSetDropoutDescriptor is cheap when + // dropoot == 0 + AT_CUDNN_CHECK(cudnnSetDropoutDescriptor(mut_desc(), handle, 0 /* dropout */, nullptr, 0 /* state_size */, 0 /* seed */)); + } +}; + +struct TORCH_CUDA_API RNNDescriptor + : public Descriptor +{ + DropoutDescriptor dropout_desc_; + void set(cudnnHandle_t handle, int hidden_size, int num_layers, DropoutDescriptor&& dropout_desc, + cudnnRNNInputMode_t input_mode, cudnnDirectionMode_t bidirectional, + cudnnRNNMode_t mode, cudnnDataType_t datatype, cudnnDataType_t input_type, cudnnRNNAlgo_t algo) { + dropout_desc_ = std::move(dropout_desc); + AT_CUDNN_CHECK(cudnnSetRNNDescriptor_v6( + handle, + mut_desc(), + hidden_size, + num_layers, + dropout_desc_.desc(), + input_mode, + bidirectional, + mode, + algo, + datatype)); +#if CUDA_VERSION >= 9000 + cudaDeviceProp* prop = at::cuda::getCurrentDeviceProperties(); + if (prop->major >= 7) { + if (input_type == CUDNN_DATA_HALF) { + cudnnSetRNNMatrixMathType(mut_desc(), CUDNN_TENSOR_OP_MATH); + } else { + // Technically, as the default it's not necessary to explicitly + // set this. + cudnnSetRNNMatrixMathType(mut_desc(), CUDNN_DEFAULT_MATH); + } + } +#endif + } +}; + +struct TORCH_CUDA_API CTCLossDescriptor + : public Descriptor +{ + void set(cudnnDataType_t datatype) { + AT_CUDNN_CHECK(cudnnSetCTCLossDescriptor(mut_desc(), datatype)); + } +#if CUDNN_VERSION >= 7600 + void setEx( + cudnnDataType_t datatype, + cudnnLossNormalizationMode_t normMode, + cudnnNanPropagation_t gradMode) { + AT_CUDNN_CHECK( + cudnnSetCTCLossDescriptorEx(mut_desc(), datatype, normMode, gradMode)); + } +#endif +}; + +union Constant +{ + float f; + double d; + Constant(cudnnDataType_t dataType, double value) { + if (dataType == CUDNN_DATA_HALF || dataType == CUDNN_DATA_FLOAT) { + f = static_cast(value); + } else { + d = value; + } + } +}; + +}} // namespace diff --git a/pyrevolve/examples/__init__.py b/thirdparty/libtorch/include/ATen/cudnn/Exceptions.h similarity index 100% rename from pyrevolve/examples/__init__.py rename to thirdparty/libtorch/include/ATen/cudnn/Exceptions.h diff --git a/thirdparty/libtorch/include/ATen/cudnn/Handle.h b/thirdparty/libtorch/include/ATen/cudnn/Handle.h new file mode 100644 index 0000000000..408d2edf99 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cudnn/Handle.h @@ -0,0 +1,10 @@ +#pragma once + +#include +#include + +namespace at { namespace native { + +TORCH_CUDA_API cudnnHandle_t getCudnnHandle(); + +}} // namespace at::native diff --git a/thirdparty/libtorch/include/ATen/cudnn/Handles.h b/thirdparty/libtorch/include/ATen/cudnn/Handles.h new file mode 100644 index 0000000000..5b9a081f0c --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cudnn/Handles.h @@ -0,0 +1,2 @@ +#pragma once +#include diff --git a/thirdparty/libtorch/include/ATen/cudnn/Types.h b/thirdparty/libtorch/include/ATen/cudnn/Types.h new file mode 100644 index 0000000000..70e5dd4ab7 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cudnn/Types.h @@ -0,0 +1,12 @@ +#pragma once + +#include +#include + +namespace at { namespace native { + +cudnnDataType_t getCudnnDataType(const at::Tensor& tensor); + +int64_t cudnn_version(); + +}} // namespace at::cudnn diff --git a/thirdparty/libtorch/include/ATen/cudnn/Utils.h b/thirdparty/libtorch/include/ATen/cudnn/Utils.h new file mode 100644 index 0000000000..7d6405798d --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cudnn/Utils.h @@ -0,0 +1,27 @@ +#pragma once + +#include +#include +#include +#include +#include + +namespace at { namespace native { + +inline void setCuDNNStreamToCurrent() { + // TODO: Should getCurrentStream be a method on Context? + AT_CUDNN_CHECK(cudnnSetStream(getCudnnHandle(), at::cuda::getCurrentCUDAStream())); +} + +// cuDNN has a buggy check for tensor being contiguous (that is, it does +// not ignore stride for dimension that is equal to 0). This function +// makes tensors which have zero stride contiguous, by setting the +// strides to 1 as cuDNN likes. +inline Tensor contiguousIfZeroInStrides(const Tensor& t) { + for (auto s : t.strides()) { + if (s == 0) return t.contiguous(); + } + return t; +} + +}} diff --git a/thirdparty/libtorch/include/ATen/cudnn/cudnn-wrapper.h b/thirdparty/libtorch/include/ATen/cudnn/cudnn-wrapper.h new file mode 100644 index 0000000000..66a926fe57 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/cudnn/cudnn-wrapper.h @@ -0,0 +1,16 @@ +#pragma once + +#include + +#define STRINGIFY(x) #x +#define STRING(x) STRINGIFY(x) + +#if CUDNN_MAJOR < 6 +#pragma message ("CuDNN v" STRING(CUDNN_MAJOR) " found, but need at least CuDNN v6. You can get the latest version of CuDNN from https://developer.nvidia.com/cudnn or disable CuDNN with USE_CUDNN=0") +#pragma message "We strongly encourage you to move to 6.0 and above." +#pragma message "This message is intended to annoy you enough to update." +#endif + +#undef STRINGIFY +#undef STRING + diff --git a/thirdparty/libtorch/include/ATen/detail/CPUGuardImpl.h b/thirdparty/libtorch/include/ATen/detail/CPUGuardImpl.h new file mode 100644 index 0000000000..65751c15f5 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/detail/CPUGuardImpl.h @@ -0,0 +1,61 @@ +#pragma once + +#include +#include + +namespace at { +namespace detail { + +struct CPUGuardImpl final : public c10::impl::DeviceGuardImplInterface { + CPUGuardImpl() {} + DeviceType type() const override { + return DeviceType::CPU; + } + Device exchangeDevice(Device) const override { + // no-op + return Device(DeviceType::CPU, -1); + + } + Device getDevice() const override { + return Device(DeviceType::CPU, -1); + } + void setDevice(Device) const override { + // no-op + } + void uncheckedSetDevice(Device d) const noexcept override { + // no-op + } + Stream getStream(Device d) const noexcept override { + // no-op + return Stream(Stream::DEFAULT, Device(DeviceType::CPU, -1)); + } + // NB: These do NOT set the current device + Stream exchangeStream(Stream s) const noexcept override { + // no-op + return Stream(Stream::DEFAULT, Device(DeviceType::CPU, -1)); + } + DeviceIndex deviceCount() const noexcept override { + return 1; + } + + // Event-related functions + void record(void** event, + const Stream& stream, + const DeviceIndex device_index, + const EventFlag flag) const override { + TORCH_CHECK(false, "CPU backend doesn't support events."); + } + void block( + void* event, + const Stream& stream) const override { + TORCH_CHECK(false, "CPU backend doesn't support events.") + } + bool queryEvent(void* event) const override { + TORCH_CHECK(false, "CPU backend doesn't support events.") + } + void destroyEvent( + void* event, + const DeviceIndex device_index) const noexcept override { } +}; + +}} // namespace at::detail diff --git a/thirdparty/libtorch/include/ATen/detail/CUDAHooksInterface.h b/thirdparty/libtorch/include/ATen/detail/CUDAHooksInterface.h new file mode 100644 index 0000000000..46cf5b4367 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/detail/CUDAHooksInterface.h @@ -0,0 +1,171 @@ +#pragma once + +#include +#include +#include +#include +#include + +#include +#include +#include + +// Forward-declares THCState +struct THCState; + +// Forward-declares at::cuda::NVRTC +namespace at { namespace cuda { +struct NVRTC; +}} // at::cuda + +namespace at { +class Context; +} + +// NB: Class must live in `at` due to limitations of Registry.h. +namespace at { + +constexpr const char* CUDA_HELP = + "PyTorch splits its backend into two shared libraries: a CPU library " + "and a CUDA library; this error has occurred because you are trying " + "to use some CUDA functionality, but the CUDA library has not been " + "loaded by the dynamic linker for some reason. The CUDA library MUST " + "be loaded, EVEN IF you don't directly use any symbols from the CUDA library! " + "One common culprit is a lack of -Wl,--no-as-needed in your link arguments; many " + "dynamic linkers will delete dynamic library dependencies if you don't " + "depend on any of their symbols. You can check if this has occurred by " + "using ldd on your binary to see if there is a dependency on *_cuda.so " + "library."; + +// The CUDAHooksInterface is an omnibus interface for any CUDA functionality +// which we may want to call into from CPU code (and thus must be dynamically +// dispatched, to allow for separate compilation of CUDA code). How do I +// decide if a function should live in this class? There are two tests: +// +// 1. Does the *implementation* of this function require linking against +// CUDA libraries? +// +// 2. Is this function *called* from non-CUDA ATen code? +// +// (2) should filter out many ostensible use-cases, since many times a CUDA +// function provided by ATen is only really ever used by actual CUDA code. +// +// TODO: Consider putting the stub definitions in another class, so that one +// never forgets to implement each virtual function in the real implementation +// in CUDAHooks. This probably doesn't buy us much though. +struct CAFFE2_API CUDAHooksInterface { + // This should never actually be implemented, but it is used to + // squelch -Werror=non-virtual-dtor + virtual ~CUDAHooksInterface() {} + + // Initialize THCState and, transitively, the CUDA state + virtual std::unique_ptr initCUDA() const { + TORCH_CHECK(false, "Cannot initialize CUDA without ATen_cuda library. ", CUDA_HELP); + } + + virtual Generator* getDefaultCUDAGenerator(DeviceIndex device_index = -1) const { + TORCH_CHECK(false, "Cannot get default CUDA generator without ATen_cuda library. ", CUDA_HELP); + } + + virtual Device getDeviceFromPtr(void* data) const { + TORCH_CHECK(false, "Cannot get device of pointer on CUDA without ATen_cuda library. ", CUDA_HELP); + } + + virtual bool isPinnedPtr(void* data) const { + return false; + } + + virtual bool hasCUDA() const { + return false; + } + + virtual bool hasMAGMA() const { + return false; + } + + virtual bool hasCuDNN() const { + return false; + } + + virtual const at::cuda::NVRTC& nvrtc() const { + TORCH_CHECK(false, "NVRTC requires CUDA. ", CUDA_HELP); + } + + virtual int64_t current_device() const { + return -1; + } + + virtual bool hasPrimaryContext(int64_t device_index) const { + TORCH_CHECK(false, "Cannot call hasPrimaryContext(", device_index, ") without ATen_cuda library. ", CUDA_HELP); + } + + virtual c10::optional getDevceIndexWithPrimaryContext() const { + return c10::nullopt; + } + + virtual Allocator* getPinnedMemoryAllocator() const { + TORCH_CHECK(false, "Pinned memory requires CUDA. ", CUDA_HELP); + } + + virtual bool compiledWithCuDNN() const { + return false; + } + + virtual bool compiledWithMIOpen() const { + return false; + } + + virtual bool supportsDilatedConvolutionWithCuDNN() const { + return false; + } + + virtual bool supportsDepthwiseConvolutionWithCuDNN() const { + return false; + } + + virtual long versionCuDNN() const { + TORCH_CHECK(false, "Cannot query cuDNN version without ATen_cuda library. ", CUDA_HELP); + } + + virtual std::string showConfig() const { + TORCH_CHECK(false, "Cannot query detailed CUDA version without ATen_cuda library. ", CUDA_HELP); + } + + virtual double batchnormMinEpsilonCuDNN() const { + TORCH_CHECK(false, + "Cannot query batchnormMinEpsilonCuDNN() without ATen_cuda library. ", CUDA_HELP); + } + + virtual int64_t cuFFTGetPlanCacheMaxSize(int64_t device_index) const { + TORCH_CHECK(false, "Cannot access cuFFT plan cache without ATen_cuda library. ", CUDA_HELP); + } + + virtual void cuFFTSetPlanCacheMaxSize(int64_t device_index, int64_t max_size) const { + TORCH_CHECK(false, "Cannot access cuFFT plan cache without ATen_cuda library. ", CUDA_HELP); + } + + virtual int64_t cuFFTGetPlanCacheSize(int64_t device_index) const { + TORCH_CHECK(false, "Cannot access cuFFT plan cache without ATen_cuda library. ", CUDA_HELP); + } + + virtual void cuFFTClearPlanCache(int64_t device_index) const { + TORCH_CHECK(false, "Cannot access cuFFT plan cache without ATen_cuda library. ", CUDA_HELP); + } + + virtual int getNumGPUs() const { + return 0; + } +}; + +// NB: dummy argument to suppress "ISO C++11 requires at least one argument +// for the "..." in a variadic macro" +struct CAFFE2_API CUDAHooksArgs {}; + +C10_DECLARE_REGISTRY(CUDAHooksRegistry, CUDAHooksInterface, CUDAHooksArgs); +#define REGISTER_CUDA_HOOKS(clsname) \ + C10_REGISTER_CLASS(CUDAHooksRegistry, clsname, clsname) + +namespace detail { +CAFFE2_API const CUDAHooksInterface& getCUDAHooks(); +} // namespace detail +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/detail/FunctionTraits.h b/thirdparty/libtorch/include/ATen/detail/FunctionTraits.h new file mode 100644 index 0000000000..aab7300b58 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/detail/FunctionTraits.h @@ -0,0 +1,78 @@ +#pragma once + +#include + +// Modified from https://stackoverflow.com/questions/7943525/is-it-possible-to-figure-out-the-parameter-type-and-return-type-of-a-lambda + +// Fallback, anything with an operator() +template +struct function_traits : public function_traits { +}; + +// Pointers to class members that are themselves functors. +// For example, in the following code: +// template +// struct S { +// func_t f; +// }; +// template +// S make_s(func_t f) { +// return S { .f = f }; +// } +// +// auto s = make_s([] (int, float) -> double { /* ... */ }); +// +// function_traits traits; +template +struct function_traits : public function_traits { +}; + +// Const class member functions +template +struct function_traits : public function_traits { +}; + +// Reference types +template +struct function_traits : public function_traits {}; +template +struct function_traits : public function_traits {}; + +// Free functions +template +struct function_traits { + // arity is the number of arguments. + enum { arity = sizeof...(Args) }; + + typedef std::tuple ArgsTuple; + typedef ReturnType result_type; + + template + struct arg + { + typedef typename std::tuple_element>::type type; + // the i-th argument is equivalent to the i-th tuple element of a tuple + // composed of those arguments. + }; +}; + +template +struct nullary_function_traits { + using traits = function_traits; + using result_type = typename traits::result_type; +}; + +template +struct unary_function_traits { + using traits = function_traits; + using result_type = typename traits::result_type; + using arg1_t = typename traits::template arg<0>::type; +}; + +template +struct binary_function_traits { + using traits = function_traits; + using result_type = typename traits::result_type; + using arg1_t = typename traits::template arg<0>::type; + using arg2_t = typename traits::template arg<1>::type; +}; diff --git a/thirdparty/libtorch/include/ATen/detail/HIPHooksInterface.h b/thirdparty/libtorch/include/ATen/detail/HIPHooksInterface.h new file mode 100644 index 0000000000..4c53eb4e92 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/detail/HIPHooksInterface.h @@ -0,0 +1,74 @@ +#pragma once + +#include +#include +#include + +#include + +#include +#include +#include + +// Forward-declares THHState +struct THHState; + +namespace at { +class Context; +} + +// NB: Class must live in `at` due to limitations of Registry.h. +namespace at { + +// The HIPHooksInterface is an omnibus interface for any HIP functionality +// which we may want to call into from CPU code (and thus must be dynamically +// dispatched, to allow for separate compilation of HIP code). See +// CUDAHooksInterface for more detailed motivation. +struct CAFFE2_API HIPHooksInterface { + // This should never actually be implemented, but it is used to + // squelch -Werror=non-virtual-dtor + virtual ~HIPHooksInterface() {} + + // Initialize THHState and, transitively, the HIP state + virtual std::unique_ptr initHIP() const { + AT_ERROR("Cannot initialize HIP without ATen_hip library."); + } + + virtual std::unique_ptr initHIPGenerator(Context*) const { + AT_ERROR("Cannot initialize HIP generator without ATen_hip library."); + } + + virtual bool hasHIP() const { + return false; + } + + virtual int64_t current_device() const { + return -1; + } + + virtual Allocator* getPinnedMemoryAllocator() const { + AT_ERROR("Pinned memory requires HIP."); + } + + virtual void registerHIPTypes(Context*) const { + AT_ERROR("Cannot registerHIPTypes() without ATen_hip library."); + } + + virtual int getNumGPUs() const { + return 0; + } +}; + +// NB: dummy argument to suppress "ISO C++11 requires at least one argument +// for the "..." in a variadic macro" +struct CAFFE2_API HIPHooksArgs {}; + +C10_DECLARE_REGISTRY(HIPHooksRegistry, HIPHooksInterface, HIPHooksArgs); +#define REGISTER_HIP_HOOKS(clsname) \ + C10_REGISTER_CLASS(HIPHooksRegistry, clsname, clsname) + +namespace detail { +CAFFE2_API const HIPHooksInterface& getHIPHooks(); + +} // namespace detail +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/detail/ScalarTypeConversions.h b/thirdparty/libtorch/include/ATen/detail/ScalarTypeConversions.h new file mode 100644 index 0000000000..5c197b2d44 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/detail/ScalarTypeConversions.h @@ -0,0 +1,24 @@ +#pragma once + +#include +#include +#include +#include + +namespace at { namespace detail { + +template +inline T load(const void* data, ScalarType src_type) { + return AT_DISPATCH_ALL_TYPES_AND2(at::ScalarType::Half, at::ScalarType::Bool, src_type, "load", [&]() { + return at::convert(*(scalar_t*)data); + }); +} + +template +inline void store(T value, void* dst, ScalarType dst_type) { + AT_DISPATCH_ALL_TYPES_AND2(at::ScalarType::Half, at::ScalarType::Bool, dst_type, "store", [&]() { + *(scalar_t*)dst = at::convert(value); + }); +} + +}} // namespace at::detail diff --git a/thirdparty/libtorch/include/ATen/div_rtn.h b/thirdparty/libtorch/include/ATen/div_rtn.h new file mode 100644 index 0000000000..7b6beec4b0 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/div_rtn.h @@ -0,0 +1,11 @@ +#pragma once + +// Integer division rounding to -Infinity +template +static inline T div_rtn(T x, T y) { + int q = x/y; + int r = x%y; + if ((r!=0) && ((r<0) != (y<0))) --q; + return q; +} + diff --git a/thirdparty/libtorch/include/ATen/dlpack.h b/thirdparty/libtorch/include/ATen/dlpack.h new file mode 100644 index 0000000000..f8dc8fcd2c --- /dev/null +++ b/thirdparty/libtorch/include/ATen/dlpack.h @@ -0,0 +1,141 @@ +/*! + * Copyright (c) 2017 by Contributors + * \file dlpack.h + * \brief The common header of DLPack. + */ +#ifndef DLPACK_DLPACK_H_ +#define DLPACK_DLPACK_H_ + +#ifdef __cplusplus +#define DLPACK_EXTERN_C extern "C" +#else +#define DLPACK_EXTERN_C +#endif + +/*! \brief The current version of dlpack */ +#define DLPACK_VERSION 010 + +/*! \brief DLPACK_DLL prefix for windows */ +#ifdef _WIN32 +#ifdef DLPACK_EXPORTS +#define DLPACK_DLL __declspec(dllexport) +#else +#define DLPACK_DLL __declspec(dllimport) +#endif +#else +#define DLPACK_DLL +#endif + +#include +#include + +#ifdef __cplusplus +extern "C" { +#endif +/*! + * \brief The device type in DLContext. + */ +typedef enum { + kDLCPU = 1, + kDLGPU = 2, + // kDLCPUPinned = kDLCPU | kDLGPU + kDLCPUPinned = 3, + kDLOpenCL = 4, + kDLMetal = 8, + kDLVPI = 9, + kDLROCM = 10, +} DLDeviceType; + +/*! + * \brief A Device context for Tensor and operator. + */ +typedef struct { + /*! \brief The device type used in the device. */ + DLDeviceType device_type; + /*! \brief The device index */ + int device_id; +} DLContext; + +/*! + * \brief The type code options DLDataType. + */ +typedef enum { + kDLInt = 0U, + kDLUInt = 1U, + kDLFloat = 2U, +} DLDataTypeCode; + +/*! + * \brief The data type the tensor can hold. + * + * Examples + * - float: type_code = 2, bits = 32, lanes=1 + * - float4(vectorized 4 float): type_code = 2, bits = 32, lanes=4 + * - int8: type_code = 0, bits = 8, lanes=1 + */ +typedef struct { + /*! + * \brief Type code of base types. + * We keep it uint8_t instead of DLDataTypeCode for minimal memory + * footprint, but the value should be one of DLDataTypeCode enum values. + * */ + uint8_t code; + /*! + * \brief Number of bits, common choices are 8, 16, 32. + */ + uint8_t bits; + /*! \brief Number of lanes in the type, used for vector types. */ + uint16_t lanes; +} DLDataType; + +/*! + * \brief Plain C Tensor object, does not manage memory. + */ +typedef struct { + /*! + * \brief The opaque data pointer points to the allocated data. + * This will be CUDA device pointer or cl_mem handle in OpenCL. + * This pointer is always aligns to 256 bytes as in CUDA. + */ + void* data; + /*! \brief The device context of the tensor */ + DLContext ctx; + /*! \brief Number of dimensions */ + int ndim; + /*! \brief The data type of the pointer*/ + DLDataType dtype; + /*! \brief The shape of the tensor */ + int64_t* shape; + /*! + * \brief strides of the tensor, + * can be NULL, indicating tensor is compact. + */ + int64_t* strides; + /*! \brief The offset in bytes to the beginning pointer to data */ + uint64_t byte_offset; +} DLTensor; + +/*! + * \brief C Tensor object, manage memory of DLTensor. This data structure is + * intended to faciliate the borrowing of DLTensor by another framework. It is + * not meant to transfer the tensor. When the borrowing framework doesn't need + * the tensor, it should call the deleter to notify the host that the resource + * is no longer needed. + */ +typedef struct DLManagedTensor { + /*! \brief DLTensor which is being memory managed */ + DLTensor dl_tensor; + /*! \brief the context of the original host framework of DLManagedTensor in + * which DLManagedTensor is used in the framework. It can also be NULL. + */ + void * manager_ctx; + /*! \brief Destructor signature void (*)(void*) - this should be called + * to destruct manager_ctx which holds the DLManagedTensor. It can be NULL + * if there is no way for the caller to provide a reasonable destructor. + */ + void (*deleter)(struct DLManagedTensor * self); +} DLManagedTensor; +#ifdef __cplusplus +} // DLPACK_EXTERN_C +#endif +#endif // DLPACK_DLPACK_H_ diff --git a/thirdparty/libtorch/include/ATen/native/quantized/Copy.h b/thirdparty/libtorch/include/ATen/native/quantized/Copy.h new file mode 100644 index 0000000000..a1bd290b3a --- /dev/null +++ b/thirdparty/libtorch/include/ATen/native/quantized/Copy.h @@ -0,0 +1,11 @@ +#pragma once + +#include + +namespace at { +namespace native { + +Tensor& quantized_copy_from_float_(Tensor& self, const Tensor& src); + +} +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/native/quantized/cpu/fake_quantize_core.h b/thirdparty/libtorch/include/ATen/native/quantized/cpu/fake_quantize_core.h new file mode 100644 index 0000000000..b2c5ff3c14 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/native/quantized/cpu/fake_quantize_core.h @@ -0,0 +1,27 @@ +#include +#include +#include +#include + +/* FakeQuantize Op for PerChannelAffine quantization scheme */ +namespace at { +namespace native { +void fake_quantize_slice( + Tensor& output, + const Tensor& input, + float sc, + int64_t z_point, + int64_t quant_min, + int64_t quant_max); + +void fake_quantize_grad_slice( + Tensor& input_grad, + const Tensor& output_grad, + const Tensor& input, + float sc, + int64_t z_point, + int64_t quant_min, + int64_t quant_max); + +} // namespace native +} // namespace at diff --git a/thirdparty/libtorch/include/ATen/native/quantized/cpu/fbgemm_utils.h b/thirdparty/libtorch/include/ATen/native/quantized/cpu/fbgemm_utils.h new file mode 100644 index 0000000000..fe590b8449 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/native/quantized/cpu/fbgemm_utils.h @@ -0,0 +1,109 @@ +#pragma once + +#ifdef USE_FBGEMM +#include "fbgemm/Fbgemm.h" +#include "fbgemm/QuantUtils.h" + +#include +#include + +// The struct for the packed weight matrix (PackBMatrix) and the corresponding +// column offsets used for the fully connect layer, which are both prepared in +// the prepacking step to save the computations in the inference. Note the +// column offsets include the sum of the B columns as well as the scalar term +// B_zero_point * K, whereas the row offsets created by +// PackAWithQuantRowOffset/PackAWithIm2Col/PackAWithRowOffset are only the sum +// of the A rows. The column offsets are needed for the asymmetric quantization +// (affine quantization) of input matrix. +// Note that in JIT mode we can think of a way to fuse col_offsets with bias. +struct FBGEMM_API PackedLinearWeight { + std::unique_ptr> w; + c10::optional bias; + std::vector col_offsets; + std::vector w_scale; + std::vector w_zp; + c10::QScheme q_scheme; +}; + +template +struct FBGEMM_API PackedConvWeight { + std::unique_ptr> w; + c10::optional bias; + std::vector col_offsets; + std::vector kernel; + std::vector w_scale; + std::vector w_zp; + c10::QScheme q_scheme; +}; + +// PackWeight: Convert the weight from uint8 to int8. +inline void convert_uint8_int8( + int len, + const uint8_t* src_uint8, + int8_t* dst_int8) { + for (int i = 0; i < len; ++i) { + dst_int8[i] = static_cast(static_cast(src_uint8[i]) - 128); + } +} + +// UnpackWeight: Convert the weight from int8 to uint8. +inline void convert_int8_uint8( + int len, + const int8_t* src_int8, + uint8_t* dst_uint8) { + for (int i = 0; i < len; ++i) { + dst_uint8[i] = + static_cast(static_cast(src_int8[i]) + 128); + } +} + +namespace at { +namespace native { +namespace fbgemm_utils { + +template +fbgemm::conv_param_t MakeFbgemmConvParam( + int N, + int C, + int M, + const std::vector& image_shape, + int groups, + const std::vector& kernels, + const std::vector& strides, + const std::vector& pads, + const std::vector& dilations); + +// TODO: Remove functions below when ChannelsLast3d is ready. +Tensor MakeStridedQTensorCPU( + const IntArrayRef& sizes, + const IntArrayRef& strides, + const TensorOptions& options, + QuantizerPtr quantizer); + +Tensor MakeEmptyAffineQuantizedChannelsLast3dTensor( + int64_t N, + int64_t C, + int64_t D, + int64_t H, + int64_t W, + const TensorOptions& options, + double scale, + int64_t zero_point); + +Tensor MakeEmptyPerChannelAffineQuantizedChannelsLast3dTensor( + int64_t N, + int64_t C, + int64_t D, + int64_t H, + int64_t W, + const TensorOptions& options, + const Tensor& scales, + const Tensor& zero_points); + +Tensor ConvertToChannelsLast3dTensor(const Tensor& src); + +} // namespace fbgemm_utils +} // namespace native +} // namespace at + +#endif // USE_FBGEMM diff --git a/thirdparty/libtorch/include/ATen/native/quantized/cpu/init_qnnpack.h b/thirdparty/libtorch/include/ATen/native/quantized/cpu/init_qnnpack.h new file mode 100644 index 0000000000..dbfb406ea5 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/native/quantized/cpu/init_qnnpack.h @@ -0,0 +1,13 @@ +#pragma once + +#ifdef USE_PYTORCH_QNNPACK + +namespace at { +namespace native { + +void initQNNPACK(); + +} // namespace native +} // namespace at + +#endif diff --git a/thirdparty/libtorch/include/ATen/native/quantized/cpu/qnnpack_utils.h b/thirdparty/libtorch/include/ATen/native/quantized/cpu/qnnpack_utils.h new file mode 100644 index 0000000000..89e1ee112f --- /dev/null +++ b/thirdparty/libtorch/include/ATen/native/quantized/cpu/qnnpack_utils.h @@ -0,0 +1,86 @@ +#pragma once + +#ifdef USE_PYTORCH_QNNPACK +#include +#include + +struct QnnpackOperatorDeleter { + void operator()(pytorch_qnnp_operator_t op) { + pytorch_qnnp_delete_operator(op); + } +}; + +// PackedWeight struct for QNNPACK stores the original Weight and Bias as +// QNNPACK currently does not support an unpack function. Possible optimiation - +// For PyTorch Mobile, once the model is scripted and serialized we don't need +// to call unpack, so we can save some memory by checking for this case. +// Input scale is set to null in pre-pack step. QNNPACK needs bias quantized with +// input scale which is available at runtime in pytorch. During runtime if input +// scale value changes then we requantize bias with the updated scale. +// For inference we expect the graph to be static so the input scale should +// not change across consecutive inference calls. +struct PackedLinearWeightsQnnp { + std::unique_ptr w; + at::Tensor orig_weight; + at::Tensor bias; + c10::optional input_scale; + double w_scale; + int64_t w_zp; +}; + +struct PackedConvWeightsQnnp { + std::unique_ptr w; + at::Tensor orig_weight; + at::Tensor bias; + c10::optional input_scale; + std::vector kernel; + double w_scale; + int64_t w_zp; +}; + +enum class Activation : uint8_t { NONE = 0, RELU = 1 }; + +#if defined(__ANDROID__) && !defined(__NDK_MAJOR__) +template +inline float Round(const float x) { + return ::nearbyintf(x); +} +inline double Round(const double x) { + return ::nearbyint(x); +} +#else +template +inline T Round(const T x) { + return std::nearbyint(x); +} +#endif + +inline uint8_t QuantizeUint8(float scale, int32_t zero_point, float value) { + const int32_t qmin = std::numeric_limits::min(); + const int32_t qmax = std::numeric_limits::max(); + auto r = zero_point + static_cast(Round(value / scale)); + r = std::max(r, qmin); + r = std::min(r, qmax); + return static_cast(r); +} + +inline std::pair activationLimits( + float scale, + int32_t zero_point, + Activation Ac) { + switch (Ac) { + case Activation::NONE: + return {std::numeric_limits::min(), + std::numeric_limits::max()}; + case Activation::RELU: + return {QuantizeUint8(scale, zero_point, 0.0), + std::numeric_limits::max()}; + default: +#ifdef _MSC_VER + __assume(0); +#else + __builtin_unreachable(); +#endif + } +} +#endif diff --git a/thirdparty/libtorch/include/ATen/native/quantized/cpu/quantized_ops.h b/thirdparty/libtorch/include/ATen/native/quantized/cpu/quantized_ops.h new file mode 100644 index 0000000000..2b742d8483 --- /dev/null +++ b/thirdparty/libtorch/include/ATen/native/quantized/cpu/quantized_ops.h @@ -0,0 +1,91 @@ +#include +#include +#include + +namespace at { +namespace native { + +using qrelu_fn = void (*)(const at::Tensor& /*qx*/, at::Tensor& /*qy*/); +using qadd_fn = + void (*)(Tensor& /*out*/, const Tensor& /*self*/, const Tensor& /*other*/); +using qmaxpool_2d_fn = void (*)( + const Tensor& qx, + int64_t iC, // input/output channels + int64_t iH, + int64_t iW, // input sizes + int64_t oH, + int64_t oW, // output sizes + int64_t kH, + int64_t kW, // kernel size + int64_t sH, + int64_t sW, // strides + int64_t pH, + int64_t pW, // padding + int64_t dH, + int64_t dW, // dilation + Tensor& qy); +using qadaptive_avg_pool2d_fn = void (*)( + const Tensor& qx, + Tensor& qy, + int64_t b, + int64_t sizeD, + int64_t isizeH, + int64_t isizeW, + int64_t osizeH, + int64_t osizeW, + int64_t istrideB, + int64_t istrideD, + int64_t istrideH, + int64_t istrideW); + +using qavg_pool2d_fn = void (*)( + const Tensor& qx, + Tensor& qy, + int64_t b, + int64_t nInputPlane, + int64_t inputWidth, + int64_t inputHeight, + int64_t outputWidth, + int64_t outputHeight, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + bool count_include_pad, + c10::optional divisor_override); + +using qupsample_bilinear2d_fn = void (*)( + Tensor& output, + const Tensor& input, + int64_t input_height, + int64_t input_width, + int64_t output_height, + int64_t output_width, + int64_t nbatch, + int64_t channels, + bool align_corners); + +using qcat_nhwc_fn = Tensor (*)( + const c10::List& qxs, + int64_t dim, + double scale, + int64_t zero_point); +using qtopk_fn = void(*)(Tensor&, Tensor&, const Tensor&, int64_t, int64_t, bool, bool); + +// using qavg_pool2d_fn +DECLARE_DISPATCH(qrelu_fn, qrelu_stub); +DECLARE_DISPATCH(qrelu_fn, qrelu6_stub); +DECLARE_DISPATCH(qadd_fn, qadd_stub); +DECLARE_DISPATCH(qadd_fn, qadd_relu_stub); +DECLARE_DISPATCH(qmaxpool_2d_fn, qmaxpool_2d_nhwc_stub); +DECLARE_DISPATCH(qadaptive_avg_pool2d_fn, qadaptive_avg_pool2d_nhwc_stub); +DECLARE_DISPATCH(qavg_pool2d_fn, qavg_pool2d_nhwc_stub); +DECLARE_DISPATCH(qupsample_bilinear2d_fn, qupsample_bilinear2d_nhwc_stub); +DECLARE_DISPATCH(qcat_nhwc_fn, qcat_nhwc_stub); +DECLARE_DISPATCH(qcat_nhwc_fn, qcat_relu_nhwc_stub); +DECLARE_DISPATCH(qtopk_fn, qtopk_stub); + +} // namespace native +} // namespace at diff --git a/thirdparty/libtorch/include/TH/TH.h b/thirdparty/libtorch/include/TH/TH.h new file mode 100644 index 0000000000..5d863b58d5 --- /dev/null +++ b/thirdparty/libtorch/include/TH/TH.h @@ -0,0 +1,23 @@ +#ifndef TH_INC +#define TH_INC + +#include + +#include +#ifdef USE_LAPACK +#include +#endif + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#endif diff --git a/thirdparty/libtorch/include/TH/THAllocator.h b/thirdparty/libtorch/include/TH/THAllocator.h new file mode 100644 index 0000000000..b9acf0a754 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THAllocator.h @@ -0,0 +1,100 @@ +#pragma once + +#include + +#include + +#define TH_ALLOCATOR_MAPPED_SHARED 1 +#define TH_ALLOCATOR_MAPPED_SHAREDMEM 2 +#define TH_ALLOCATOR_MAPPED_EXCLUSIVE 4 +#define TH_ALLOCATOR_MAPPED_NOCREATE 8 +#define TH_ALLOCATOR_MAPPED_KEEPFD 16 +#define TH_ALLOCATOR_MAPPED_FROMFD 32 +#define TH_ALLOCATOR_MAPPED_UNLINK 64 + +/* default malloc/free allocator. malloc and realloc raise an error (using + * THError) on allocation failure. + */ +TH_API c10::Allocator* getTHDefaultAllocator(void); + +// Sentinel value/type to help distinguish the file descriptor constructor from +// the non-file descriptor constructor +enum WithFd { WITH_FD }; + +class CAFFE2_API THMapAllocator { + public: + THMapAllocator(const char *filename, int flags, size_t size); + THMapAllocator(WithFd, const char *filename, int fd, int flags, size_t size); + THMapAllocator(const THMapAllocator&) = delete; + THMapAllocator& operator=(const THMapAllocator&) = delete; + THMapAllocator(THMapAllocator&&) = delete; + THMapAllocator& operator=(THMapAllocator&&) = delete; + + const char* filename() const { return filename_.c_str(); } + int fd() const { +#ifdef _WIN32 + AT_ERROR("THMapAllocator::fd() is unsupported on Windows"); +#else + return fd_; +#endif + } + ptrdiff_t size() const { return size_; } + // Return a pointer to the actual data for this allocator + // (in the case of the refcounted allocator, this is offset + // from the base pointer.) + virtual void* data() const { return base_ptr_; } + + static THMapAllocator* fromDataPtr(const at::DataPtr&); + static at::DataPtr makeDataPtr(const char *filename, int flags, size_t size, size_t* actual_size_out); + static at::DataPtr makeDataPtr(WithFd, const char *filename, int fd, int flags, size_t size, size_t* actual_size_out); + + // Closes the data. Helps us avoid destructor shenanigans + virtual void close(); + + // This is very dangerous. You have to redefine this destructor for each + // subclass + virtual ~THMapAllocator() { close(); } + +protected: + bool closed_ = false; + std::string filename_; + int flags_ = 0; + ptrdiff_t size_; /* mapped size */ +#ifdef _WIN32 + void* handle_; + void* event_; + std::string eventname_; +#else + int fd_ = -1; +#endif + void *base_ptr_ = nullptr; +}; + +// Base-from-member idiom +struct CAFFE2_API THRefcountedMapAllocatorArgCheck { + THRefcountedMapAllocatorArgCheck(int flags); +}; + +class CAFFE2_API THRefcountedMapAllocator + : private THRefcountedMapAllocatorArgCheck, + public THMapAllocator { + public: + THRefcountedMapAllocator(const char *filename, int flags, size_t size); + THRefcountedMapAllocator(WithFd, const char *filename, int fd, int flags, size_t size); + + static THRefcountedMapAllocator* fromDataPtr(const at::DataPtr&); + static at::DataPtr makeDataPtr(const char *filename, int flags, size_t size, size_t* actual_size_out); + static at::DataPtr makeDataPtr(WithFd, const char *filename, int fd, int flags, size_t size, size_t* actual_size_out); + + void* data() const override; + + void incref(); + int decref(); + void close() override; + + virtual ~THRefcountedMapAllocator() { close(); } + +protected: + void checkFlags(); + void initializeAlloc(); +}; diff --git a/thirdparty/libtorch/include/TH/THBlas.h b/thirdparty/libtorch/include/TH/THBlas.h new file mode 100644 index 0000000000..ea06c307c1 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THBlas.h @@ -0,0 +1,14 @@ +#ifndef TH_BLAS_INC +#define TH_BLAS_INC + +#include + +#define THBlas_(NAME) TH_CONCAT_4(TH,Real,Blas_,NAME) + +#include +#include + +#include +#include + +#endif diff --git a/thirdparty/libtorch/include/TH/THDiskFile.h b/thirdparty/libtorch/include/TH/THDiskFile.h new file mode 100644 index 0000000000..d6bab2168c --- /dev/null +++ b/thirdparty/libtorch/include/TH/THDiskFile.h @@ -0,0 +1,19 @@ +#ifndef TH_DISK_FILE_INC +#define TH_DISK_FILE_INC + +#include + +TH_API THFile *THDiskFile_new(const char *name, const char *mode, int isQuiet); +TH_API THFile *THPipeFile_new(const char *name, const char *mode, int isQuiet); + +TH_API const char *THDiskFile_name(THFile *self); + +TH_API int THDiskFile_isLittleEndianCPU(void); +TH_API int THDiskFile_isBigEndianCPU(void); +TH_API void THDiskFile_nativeEndianEncoding(THFile *self); +TH_API void THDiskFile_littleEndianEncoding(THFile *self); +TH_API void THDiskFile_bigEndianEncoding(THFile *self); +TH_API void THDiskFile_longSize(THFile *self, int size); +TH_API void THDiskFile_noBuffer(THFile *self); + +#endif diff --git a/thirdparty/libtorch/include/TH/THFile.h b/thirdparty/libtorch/include/TH/THFile.h new file mode 100644 index 0000000000..144cec47ef --- /dev/null +++ b/thirdparty/libtorch/include/TH/THFile.h @@ -0,0 +1,93 @@ +#ifndef TH_FILE_INC +#define TH_FILE_INC + +#include + +typedef struct THFile__ THFile; + +TH_API int THFile_isOpened(THFile *self); +TH_API int THFile_isQuiet(THFile *self); +TH_API int THFile_isReadable(THFile *self); +TH_API int THFile_isWritable(THFile *self); +TH_API int THFile_isBinary(THFile *self); +TH_API int THFile_isAutoSpacing(THFile *self); +TH_API int THFile_hasError(THFile *self); + +TH_API void THFile_binary(THFile *self); +TH_API void THFile_ascii(THFile *self); +TH_API void THFile_autoSpacing(THFile *self); +TH_API void THFile_noAutoSpacing(THFile *self); +TH_API void THFile_quiet(THFile *self); +TH_API void THFile_pedantic(THFile *self); +TH_API void THFile_clearError(THFile *self); + +/* scalar */ +TH_API uint8_t THFile_readByteScalar(THFile *self); +TH_API int8_t THFile_readCharScalar(THFile *self); +TH_API int16_t THFile_readShortScalar(THFile *self); +TH_API int32_t THFile_readIntScalar(THFile *self); +TH_API int64_t THFile_readLongScalar(THFile *self); +TH_API float THFile_readFloatScalar(THFile *self); +TH_API double THFile_readDoubleScalar(THFile *self); + +TH_API void THFile_writeByteScalar(THFile *self, uint8_t scalar); +TH_API void THFile_writeCharScalar(THFile *self, int8_t scalar); +TH_API void THFile_writeShortScalar(THFile *self, int16_t scalar); +TH_API void THFile_writeIntScalar(THFile *self, int32_t scalar); +TH_API void THFile_writeLongScalar(THFile *self, int64_t scalar); +TH_API void THFile_writeFloatScalar(THFile *self, float scalar); +TH_API void THFile_writeDoubleScalar(THFile *self, double scalar); + +/* storage */ +TH_API size_t THFile_readByte(THFile *self, THByteStorage *storage); +TH_API size_t THFile_readChar(THFile *self, THCharStorage *storage); +TH_API size_t THFile_readShort(THFile *self, THShortStorage *storage); +TH_API size_t THFile_readInt(THFile *self, THIntStorage *storage); +TH_API size_t THFile_readLong(THFile *self, THLongStorage *storage); +TH_API size_t THFile_readFloat(THFile *self, THFloatStorage *storage); +TH_API size_t THFile_readDouble(THFile *self, THDoubleStorage *storage); +TH_API size_t THFile_readBool(THFile *self, THBoolStorage *storage); + +TH_API size_t THFile_writeByte(THFile *self, THByteStorage *storage); +TH_API size_t THFile_writeChar(THFile *self, THCharStorage *storage); +TH_API size_t THFile_writeShort(THFile *self, THShortStorage *storage); +TH_API size_t THFile_writeInt(THFile *self, THIntStorage *storage); +TH_API size_t THFile_writeLong(THFile *self, THLongStorage *storage); +TH_API size_t THFile_writeFloat(THFile *self, THFloatStorage *storage); +TH_API size_t THFile_writeDouble(THFile *self, THDoubleStorage *storage); +TH_API size_t THFile_writeBool(THFile *self, THBoolStorage *storage); + +/* raw */ +TH_API size_t THFile_readByteRaw(THFile *self, uint8_t *data, size_t n); +TH_API size_t THFile_readCharRaw(THFile *self, int8_t *data, size_t n); +TH_API size_t THFile_readShortRaw(THFile *self, int16_t *data, size_t n); +TH_API size_t THFile_readIntRaw(THFile *self, int32_t *data, size_t n); +TH_API size_t THFile_readLongRaw(THFile *self, int64_t *data, size_t n); +TH_API size_t THFile_readFloatRaw(THFile *self, float *data, size_t n); +TH_API size_t THFile_readDoubleRaw(THFile *self, double *data, size_t n); +TH_API size_t THFile_readStringRaw(THFile *self, const char *format, char **str_); /* you must deallocate str_ */ + +TH_API size_t THFile_writeByteRaw(THFile *self, uint8_t *data, size_t n); +TH_API size_t THFile_writeCharRaw(THFile *self, int8_t *data, size_t n); +TH_API size_t THFile_writeShortRaw(THFile *self, int16_t *data, size_t n); +TH_API size_t THFile_writeIntRaw(THFile *self, int32_t *data, size_t n); +TH_API size_t THFile_writeLongRaw(THFile *self, int64_t *data, size_t n); +TH_API size_t THFile_writeFloatRaw(THFile *self, float *data, size_t n); +TH_API size_t THFile_writeDoubleRaw(THFile *self, double *data, size_t n); +TH_API size_t THFile_writeStringRaw(THFile *self, const char *str, size_t size); + +TH_API THHalf THFile_readHalfScalar(THFile *self); +TH_API void THFile_writeHalfScalar(THFile *self, THHalf scalar); +TH_API size_t THFile_readHalf(THFile *self, THHalfStorage *storage); +TH_API size_t THFile_writeHalf(THFile *self, THHalfStorage *storage); +TH_API size_t THFile_readHalfRaw(THFile *self, THHalf* data, size_t size); +TH_API size_t THFile_writeHalfRaw(THFile *self, THHalf* data, size_t size); + +TH_API void THFile_synchronize(THFile *self); +TH_API void THFile_seek(THFile *self, size_t position); +TH_API void THFile_seekEnd(THFile *self); +TH_API size_t THFile_position(THFile *self); +TH_API void THFile_close(THFile *self); +TH_API void THFile_free(THFile *self); + +#endif diff --git a/thirdparty/libtorch/include/TH/THFilePrivate.h b/thirdparty/libtorch/include/TH/THFilePrivate.h new file mode 100644 index 0000000000..354d769c70 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THFilePrivate.h @@ -0,0 +1,50 @@ +#include + +#include + + +struct THFile__ +{ + struct THFileVTable *vtable; + + int isQuiet; + int isReadable; + int isWritable; + int isBinary; + int isAutoSpacing; + int hasError; +}; + +/* virtual table definition */ + +struct THFileVTable +{ + int (*isOpened)(THFile *self); + + ssize_t (*readByte)(THFile *self, uint8_t *data, ssize_t n); + ssize_t (*readChar)(THFile *self, int8_t *data, ssize_t n); + ssize_t (*readShort)(THFile *self, int16_t *data, ssize_t n); + ssize_t (*readInt)(THFile *self, int32_t *data, ssize_t n); + ssize_t (*readLong)(THFile *self, int64_t *data, ssize_t n); + ssize_t (*readFloat)(THFile *self, float *data, ssize_t n); + ssize_t (*readDouble)(THFile *self, double *data, ssize_t n); + ssize_t (*readHalf)(THFile *self, THHalf *data, ssize_t n); + ssize_t (*readString)(THFile *self, const char *format, char **str_); + + ssize_t (*writeByte)(THFile *self, uint8_t *data, ssize_t n); + ssize_t (*writeChar)(THFile *self, int8_t *data, ssize_t n); + ssize_t (*writeShort)(THFile *self, int16_t *data, ssize_t n); + ssize_t (*writeInt)(THFile *self, int32_t *data, ssize_t n); + ssize_t (*writeLong)(THFile *self, int64_t *data, ssize_t n); + ssize_t (*writeFloat)(THFile *self, float *data, ssize_t n); + ssize_t (*writeDouble)(THFile *self, double *data, ssize_t n); + ssize_t (*writeHalf)(THFile *self, THHalf *data, ssize_t n); + ssize_t (*writeString)(THFile *self, const char *str, ssize_t size); + + void (*synchronize)(THFile *self); + void (*seek)(THFile *self, ssize_t position); + void (*seekEnd)(THFile *self); + ssize_t (*position)(THFile *self); + void (*close)(THFile *self); + void (*free)(THFile *self); +}; diff --git a/thirdparty/libtorch/include/TH/THGeneral.h b/thirdparty/libtorch/include/TH/THGeneral.h new file mode 100644 index 0000000000..a3b99e6e8e --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGeneral.h @@ -0,0 +1,172 @@ +#ifndef TH_GENERAL_INC +#define TH_GENERAL_INC + +#ifndef __STDC_FORMAT_MACROS +#define __STDC_FORMAT_MACROS +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef TH_BLAS_MKL +#include +#endif + +#define USE_BLAS +#define USE_LAPACK +/* #undef BLAS_F2C */ +#define BLAS_USE_CBLAS_DOT + +# define TH_EXTERNC extern "C" + +// Note(jiayq): copied from ATen/core/Macros.h. Because internal build of TH +// and ATen are not unified yet, we need to duplicate code for now. Long term +// we should merge macros. +#ifdef _WIN32 +#if !defined(AT_CORE_STATIC_WINDOWS) +// TODO: unfiy the controlling macros. +#if defined(CAFFE2_BUILD_MAIN_LIBS) || defined(ATen_cpu_EXPORTS) || defined(caffe2_EXPORTS) +#define TH_CPP_API __declspec(dllexport) +#else // defined(CAFFE2_BUILD_MAIN_LIBS) || defined(ATen_cpu_EXPORTS) || defined(caffe2_EXPORTS) +#define TH_CPP_API __declspec(dllimport) +#endif // defined(CAFFE2_BUILD_MAIN_LIBS) || defined(ATen_cpu_EXPORTS) || defined(caffe2_EXPORTS) +#else // !defined(AT_CORE_STATIC_WINDOWS) +#define TH_CPP_API +#endif // !defined(AT_CORE_STATIC_WINDOWS) +#else // _WIN32 +#if defined(__GNUC__) +#define TH_CPP_API __attribute__((__visibility__("default"))) +#endif // defined(__GNUC__) +#endif // _WIN32 + +#ifdef NO_EXPORT +#undef TH_CPP_API +#define TH_CPP_API +#endif + +#define TH_API TH_EXTERNC TH_CPP_API + +#ifdef _WIN32 +# define TH_NO_RETURN __declspec(noreturn) +# define TH_UNREACHABLE +#else +# define TH_NO_RETURN __attribute__((noreturn)) +# define TH_UNREACHABLE __builtin_unreachable(); +#endif + +#if defined(__GNUC__) && ((__GNUC__ > 2) || (__GNUC__ == 2 && __GNUC_MINOR__ > 4)) +# define TH_UNUSED __attribute__((unused)) +#else +# define TH_UNUSED +#endif + +#if defined(__clang__) +#define __ubsan_ignore_float_divide_by_zero__ __attribute__((no_sanitize("float-divide-by-zero"))) +#else +#define __ubsan_ignore_float_divide_by_zero__ +#endif + +#ifndef M_PI +# define M_PI 3.14159265358979323846 +#endif + +typedef void (*THErrorHandlerFunction)(const char *msg, void *data); +typedef void (*THArgErrorHandlerFunction)(int argNumber, const char *msg, void *data); + +#define TH_DESC_BUFF_LEN 64 +typedef struct { + char str[TH_DESC_BUFF_LEN]; +} THDescBuff; + + +TH_API THDescBuff _THSizeDesc(const int64_t *size, const int64_t ndim); +TH_API TH_NO_RETURN void _THError(const char *file, const int line, const char *fmt, ...); +TH_API void _THAssertionFailed(const char *file, const int line, const char *exp, const char *fmt, ...); +TH_API void THSetErrorHandler(THErrorHandlerFunction new_handler, void *data); +TH_API void THSetDefaultErrorHandler(THErrorHandlerFunction new_handler, void *data); +TH_API void _THArgCheck(const char *file, int line, int condition, int argNumber, const char *fmt, ...); +TH_API void THSetArgErrorHandler(THArgErrorHandlerFunction new_handler, void *data); +TH_API void THSetDefaultArgErrorHandler(THArgErrorHandlerFunction new_handler, void *data); +TH_API void* THAlloc(ptrdiff_t size); +TH_API void* THRealloc(void *ptr, ptrdiff_t size); +TH_API void THFree(void *ptr); +TH_API void THSetGCHandler( void (*torchGCHandlerFunction)(void *data), void *data ); +// this hook should only be called by custom allocator functions +TH_API void THHeapUpdate(ptrdiff_t size); + +#define THError(...) _THError(__FILE__, __LINE__, __VA_ARGS__) + +#define THCleanup(...) __VA_ARGS__ + +#define THArgCheck(...) \ +do { \ + _THArgCheck(__FILE__, __LINE__, __VA_ARGS__); \ +} while(0) + +#define THArgError(...) \ +do { \ + _THArgCheck(__FILE__, __LINE__, false, __VA_ARGS__); \ + TH_UNREACHABLE \ +} while(0) + +#define THArgCheckWithCleanup(condition, cleanup, ...) \ +do if (!(condition)) { \ + cleanup \ + _THArgCheck(__FILE__, __LINE__, 0, __VA_ARGS__); \ +} while(0) + +#define THAssert(exp) \ +do { \ + if (!(exp)) { \ + _THAssertionFailed(__FILE__, __LINE__, #exp, ""); \ + } \ +} while(0) + +#define THAssertMsg(exp, ...) \ +do { \ + if (!(exp)) { \ + _THAssertionFailed(__FILE__, __LINE__, #exp, __VA_ARGS__); \ + } \ +} while(0) + +#define TH_CONCAT_STRING_2(x,y) TH_CONCAT_STRING_2_EXPAND(x,y) +#define TH_CONCAT_STRING_2_EXPAND(x,y) #x #y + +#define TH_CONCAT_STRING_3(x,y,z) TH_CONCAT_STRING_3_EXPAND(x,y,z) +#define TH_CONCAT_STRING_3_EXPAND(x,y,z) #x #y #z + +#define TH_CONCAT_STRING_4(x,y,z,w) TH_CONCAT_STRING_4_EXPAND(x,y,z,w) +#define TH_CONCAT_STRING_4_EXPAND(x,y,z,w) #x #y #z #w + +#define TH_CONCAT_2(x,y) TH_CONCAT_2_EXPAND(x,y) +#define TH_CONCAT_2_EXPAND(x,y) x ## y + +#define TH_CONCAT_3(x,y,z) TH_CONCAT_3_EXPAND(x,y,z) +#define TH_CONCAT_3_EXPAND(x,y,z) x ## y ## z + +#define TH_CONCAT_4_EXPAND(x,y,z,w) x ## y ## z ## w +#define TH_CONCAT_4(x,y,z,w) TH_CONCAT_4_EXPAND(x,y,z,w) + +#define THMin(X, Y) ((X) < (Y) ? (X) : (Y)) +#define THMax(X, Y) ((X) > (Y) ? (X) : (Y)) + +#if (defined(_MSC_VER) || defined(__MINGW32__)) +#define snprintf _snprintf +#define popen _popen +#define pclose _pclose +#include +#if !defined(HAVE_SSIZE_T) +typedef SSIZE_T ssize_t; +#endif +#endif + +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateAllTypes.h b/thirdparty/libtorch/include/TH/THGenerateAllTypes.h new file mode 100644 index 0000000000..009ca9b60b --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateAllTypes.h @@ -0,0 +1,17 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateAllTypes.h" +#endif + +#ifndef THGenerateManyTypes +#define THAllLocalGenerateManyTypes +#define THGenerateManyTypes +#endif + +#include +#include + +#ifdef THAllLocalGenerateManyTypes +#undef THAllLocalGenerateManyTypes +#undef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateBFloat16Type.h b/thirdparty/libtorch/include/TH/THGenerateBFloat16Type.h new file mode 100644 index 0000000000..40f0a8c570 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateBFloat16Type.h @@ -0,0 +1,21 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateBFloat16Type.h" +#endif + +#include +#define scalar_t at::BFloat16 +#define accreal double +#define TH_CONVERT_ACCREAL_TO_REAL(_val) (scalar_t)(_val) +#define Real BFloat16 +#define TH_REAL_IS_BFLOAT16 +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef accreal +#undef scalar_t +#undef Real +#undef TH_REAL_IS_BFLOAT16 +#undef TH_CONVERT_ACCREAL_TO_REAL + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateBoolType.h b/thirdparty/libtorch/include/TH/THGenerateBoolType.h new file mode 100644 index 0000000000..12d22b1946 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateBoolType.h @@ -0,0 +1,24 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateBoolType.h" +#endif + +#define scalar_t bool +#define ureal bool +#define accreal int64_t +#define Real Bool +#define TH_CONVERT_REAL_TO_ACCREAL(_val) (accreal)(_val) +#define TH_CONVERT_ACCREAL_TO_REAL(_val) (scalar_t)(_val) +#define TH_REAL_IS_BOOL +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef scalar_t +#undef ureal +#undef accreal +#undef Real +#undef TH_REAL_IS_BOOL +#undef TH_CONVERT_REAL_TO_ACCREAL +#undef TH_CONVERT_ACCREAL_TO_REAL + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateByteType.h b/thirdparty/libtorch/include/TH/THGenerateByteType.h new file mode 100644 index 0000000000..3b03842080 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateByteType.h @@ -0,0 +1,26 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateByteType.h" +#endif + +#define scalar_t uint8_t +#define ureal uint8_t +#define accreal int64_t +#define Real Byte +#define TH_CONVERT_REAL_TO_ACCREAL(_val) (accreal)(_val) +#define TH_CONVERT_ACCREAL_TO_REAL(_val) (scalar_t)(_val) +#define THInf UCHAR_MAX +#define TH_REAL_IS_BYTE +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef scalar_t +#undef ureal +#undef accreal +#undef Real +#undef THInf +#undef TH_REAL_IS_BYTE +#undef TH_CONVERT_REAL_TO_ACCREAL +#undef TH_CONVERT_ACCREAL_TO_REAL + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateCharType.h b/thirdparty/libtorch/include/TH/THGenerateCharType.h new file mode 100644 index 0000000000..57265b0665 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateCharType.h @@ -0,0 +1,26 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateCharType.h" +#endif + +#define scalar_t int8_t +#define ureal uint8_t +#define accreal int64_t +#define Real Char +#define THInf SCHAR_MAX +#define TH_CONVERT_REAL_TO_ACCREAL(_val) (accreal)(_val) +#define TH_CONVERT_ACCREAL_TO_REAL(_val) (scalar_t)(_val) +#define TH_REAL_IS_CHAR +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef scalar_t +#undef ureal +#undef accreal +#undef Real +#undef THInf +#undef TH_REAL_IS_CHAR +#undef TH_CONVERT_REAL_TO_ACCREAL +#undef TH_CONVERT_ACCREAL_TO_REAL + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateDoubleType.h b/thirdparty/libtorch/include/TH/THGenerateDoubleType.h new file mode 100644 index 0000000000..fb67a52b3c --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateDoubleType.h @@ -0,0 +1,24 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateDoubleType.h" +#endif + +#define scalar_t double +#define accreal double +#define TH_CONVERT_REAL_TO_ACCREAL(_val) (accreal)(_val) +#define TH_CONVERT_ACCREAL_TO_REAL(_val) (scalar_t)(_val) +#define Real Double +#define THInf DBL_MAX +#define TH_REAL_IS_DOUBLE +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef accreal +#undef scalar_t +#undef Real +#undef THInf +#undef TH_REAL_IS_DOUBLE +#undef TH_CONVERT_REAL_TO_ACCREAL +#undef TH_CONVERT_ACCREAL_TO_REAL + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateFloatType.h b/thirdparty/libtorch/include/TH/THGenerateFloatType.h new file mode 100644 index 0000000000..c4b97b5236 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateFloatType.h @@ -0,0 +1,24 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateFloatType.h" +#endif + +#define scalar_t float +#define accreal double +#define TH_CONVERT_REAL_TO_ACCREAL(_val) (accreal)(_val) +#define TH_CONVERT_ACCREAL_TO_REAL(_val) (scalar_t)(_val) +#define Real Float +#define THInf FLT_MAX +#define TH_REAL_IS_FLOAT +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef accreal +#undef scalar_t +#undef Real +#undef THInf +#undef TH_REAL_IS_FLOAT +#undef TH_CONVERT_REAL_TO_ACCREAL +#undef TH_CONVERT_ACCREAL_TO_REAL + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateFloatTypes.h b/thirdparty/libtorch/include/TH/THGenerateFloatTypes.h new file mode 100644 index 0000000000..f51b658c22 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateFloatTypes.h @@ -0,0 +1,17 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateFloatTypes.h" +#endif + +#ifndef THGenerateManyTypes +#define THFloatLocalGenerateManyTypes +#define THGenerateManyTypes +#endif + +#include +#include + +#ifdef THFloatLocalGenerateManyTypes +#undef THFloatLocalGenerateManyTypes +#undef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateHalfType.h b/thirdparty/libtorch/include/TH/THGenerateHalfType.h new file mode 100644 index 0000000000..b075c683e0 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateHalfType.h @@ -0,0 +1,25 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateHalfType.h" +#endif + +#include +#define scalar_t THHalf +#define accreal float +#define TH_CONVERT_REAL_TO_ACCREAL(_val) (accreal)(_val) +#define TH_CONVERT_ACCREAL_TO_REAL(_val) (scalar_t)(_val) +#define Real Half +#define THInf TH_HALF_BITS_TO_LITERAL(TH_HALF_INF) +#define TH_REAL_IS_HALF +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef scalar_t +#undef accreal +#undef Real +#undef THInf +#undef TH_REAL_IS_HALF +#undef TH_CONVERT_REAL_TO_ACCREAL +#undef TH_CONVERT_ACCREAL_TO_REAL + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateIntType.h b/thirdparty/libtorch/include/TH/THGenerateIntType.h new file mode 100644 index 0000000000..2d31760ad9 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateIntType.h @@ -0,0 +1,26 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateIntType.h" +#endif + +#define scalar_t int32_t +#define ureal uint32_t +#define accreal int64_t +#define TH_CONVERT_REAL_TO_ACCREAL(_val) (accreal)(_val) +#define TH_CONVERT_ACCREAL_TO_REAL(_val) (scalar_t)(_val) +#define Real Int +#define THInf INT_MAX +#define TH_REAL_IS_INT +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef scalar_t +#undef ureal +#undef accreal +#undef Real +#undef THInf +#undef TH_REAL_IS_INT +#undef TH_CONVERT_REAL_TO_ACCREAL +#undef TH_CONVERT_ACCREAL_TO_REAL + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateIntTypes.h b/thirdparty/libtorch/include/TH/THGenerateIntTypes.h new file mode 100644 index 0000000000..07445573dc --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateIntTypes.h @@ -0,0 +1,20 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateIntTypes.h" +#endif + +#ifndef THGenerateManyTypes +#define THIntLocalGenerateManyTypes +#define THGenerateManyTypes +#endif + +#include +#include +#include +#include +#include + +#ifdef THIntLocalGenerateManyTypes +#undef THIntLocalGenerateManyTypes +#undef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateLongType.h b/thirdparty/libtorch/include/TH/THGenerateLongType.h new file mode 100644 index 0000000000..636cf94584 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateLongType.h @@ -0,0 +1,26 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateLongType.h" +#endif + +#define scalar_t int64_t +#define ureal uint64_t +#define accreal int64_t +#define TH_CONVERT_REAL_TO_ACCREAL(_val) (accreal)(_val) +#define TH_CONVERT_ACCREAL_TO_REAL(_val) (scalar_t)(_val) +#define Real Long +#define THInf LONG_MAX +#define TH_REAL_IS_LONG +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef scalar_t +#undef ureal +#undef accreal +#undef Real +#undef THInf +#undef TH_REAL_IS_LONG +#undef TH_CONVERT_REAL_TO_ACCREAL +#undef TH_CONVERT_ACCREAL_TO_REAL + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateQInt32Type.h b/thirdparty/libtorch/include/TH/THGenerateQInt32Type.h new file mode 100644 index 0000000000..b0b45d0b14 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateQInt32Type.h @@ -0,0 +1,24 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateQInt32Type.h" +#endif + +#define quantized_t c10::qint32 +#define scalar_t int32_t +#define Real QInt32 +#define RealUnderlying Int +#define THQUANTIZED +#define THQINT32 +#define TH_REAL_IS_BYTE +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef scalar_t +#undef quantized_t +#undef Real +#undef RealUnderlying +#undef TH_REAL_IS_BYTE +#undef THQINT32 +#undef THQUANTIZED + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateQInt8Type.h b/thirdparty/libtorch/include/TH/THGenerateQInt8Type.h new file mode 100644 index 0000000000..4126fadd90 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateQInt8Type.h @@ -0,0 +1,24 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateQInt8Type.h" +#endif + +#define quantized_t c10::qint8 +#define scalar_t int8_t +#define Real QInt8 +#define RealUnderlying Char +#define THQUANTIZED +#define THQINT8 +#define TH_REAL_IS_BYTE +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef scalar_t +#undef quantized_t +#undef Real +#undef RealUnderlying +#undef TH_REAL_IS_BYTE +#undef THQINT8 +#undef THQUANTIZED + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateQTypes.h b/thirdparty/libtorch/include/TH/THGenerateQTypes.h new file mode 100644 index 0000000000..ee958b3a32 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateQTypes.h @@ -0,0 +1,18 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateQTypes.h" +#endif + +#ifndef THGenerateManyTypes +#define THQLocalGenerateManyTypes +#define THGenerateManyTypes +#endif + +#include +#include +#include + +#ifdef THQLocalGenerateManyTypes +#undef THQLocalGenerateManyTypes +#undef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateQUInt8Type.h b/thirdparty/libtorch/include/TH/THGenerateQUInt8Type.h new file mode 100644 index 0000000000..3051bbc35e --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateQUInt8Type.h @@ -0,0 +1,24 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateQUInt8Type.h" +#endif + +#define quantized_t c10::quint8 +#define scalar_t uint8_t +#define Real QUInt8 +#define RealUnderlying Byte +#define THQUANTIZED +#define THQUINT8 +#define TH_REAL_IS_BYTE +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef scalar_t +#undef quantized_t +#undef Real +#undef RealUnderlying +#undef TH_REAL_IS_BYTE +#undef THQUINT8 +#undef THQUANTIZED + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerateShortType.h b/thirdparty/libtorch/include/TH/THGenerateShortType.h new file mode 100644 index 0000000000..afda4dce8e --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerateShortType.h @@ -0,0 +1,26 @@ +#ifndef TH_GENERIC_FILE +#error "You must define TH_GENERIC_FILE before including THGenerateShortType.h" +#endif + +#define scalar_t int16_t +#define ureal uint16_t +#define accreal int64_t +#define TH_CONVERT_REAL_TO_ACCREAL(_val) (accreal)(_val) +#define TH_CONVERT_ACCREAL_TO_REAL(_val) (scalar_t)(_val) +#define Real Short +#define THInf SHRT_MAX +#define TH_REAL_IS_SHORT +#line 1 TH_GENERIC_FILE +#include TH_GENERIC_FILE +#undef scalar_t +#undef ureal +#undef accreal +#undef Real +#undef THInf +#undef TH_REAL_IS_SHORT +#undef TH_CONVERT_REAL_TO_ACCREAL +#undef TH_CONVERT_ACCREAL_TO_REAL + +#ifndef THGenerateManyTypes +#undef TH_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/TH/THGenerator.hpp b/thirdparty/libtorch/include/TH/THGenerator.hpp new file mode 100644 index 0000000000..970f12ac6b --- /dev/null +++ b/thirdparty/libtorch/include/TH/THGenerator.hpp @@ -0,0 +1,39 @@ +#pragma once + +#include + +/** + * THGeneratorState is a POD class needed for memcpys + * in torch.get_rng_state() and torch.set_rng_state(). + * It is a legacy class and even though it is replaced with + * at::CPUGenerator, we need this class and some of its fields + * to support backward compatibility on loading checkpoints. + */ +struct THGeneratorState { + /* The initial seed. */ + uint64_t the_initial_seed; + int left; /* = 1; */ + int seeded; /* = 0; */ + uint64_t next; + uint64_t state[at::MERSENNE_STATE_N]; /* the array for the state vector */ + + /********************************/ + + /* For normal distribution */ + double normal_x; + double normal_y; + double normal_rho; + int normal_is_valid; /* = 0; */ +}; + +/** + * THGeneratorStateNew is a POD class containing + * new data introduced in at::CPUGenerator and the legacy state. It is used + * as a helper for torch.get_rng_state() and torch.set_rng_state() + * functions. + */ +struct THGeneratorStateNew { + THGeneratorState legacy_pod; + float next_float_normal_sample; + bool is_next_float_normal_sample_valid; +}; diff --git a/thirdparty/libtorch/include/TH/THHalf.h b/thirdparty/libtorch/include/TH/THHalf.h new file mode 100644 index 0000000000..ce41080ca7 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THHalf.h @@ -0,0 +1,8 @@ +#ifndef TH_HALF_H +#define TH_HALF_H + +#include + +#define THHalf at::Half + +#endif diff --git a/thirdparty/libtorch/include/TH/THLapack.h b/thirdparty/libtorch/include/TH/THLapack.h new file mode 100644 index 0000000000..e50faa8f45 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THLapack.h @@ -0,0 +1,27 @@ +#ifndef TH_LAPACK_INC +#define TH_LAPACK_INC + +#include + +#define THLapack_(NAME) TH_CONCAT_4(TH,Real,Lapack_,NAME) + +#define THLapackCheck(fmt, func, info , ...) \ +if (info < 0) { \ + THError("Lapack Error in %s : Illegal Argument %d", func, -info); \ +} else if(info > 0) { \ + THError(fmt, func, info, ##__VA_ARGS__); \ +} \ + +#define THLapackCheckWithCleanup(fmt, cleanup, func, info , ...) \ +if (info < 0) { \ + cleanup \ + THError("Lapack Error in %s : Illegal Argument %d", func, -info); \ +} else if(info > 0) { \ + cleanup \ + THError(fmt, func, info, ##__VA_ARGS__); \ +} + +#include +#include + +#endif diff --git a/thirdparty/libtorch/include/TH/THLogAdd.h b/thirdparty/libtorch/include/TH/THLogAdd.h new file mode 100644 index 0000000000..60acaedc14 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THLogAdd.h @@ -0,0 +1,14 @@ +#ifndef TH_LOG_ADD_INC +#define TH_LOG_ADD_INC + +#include + +TH_API const double THLog2Pi; +TH_API const double THLogZero; +TH_API const double THLogOne; + +TH_API double THLogAdd(double log_a, double log_b); +TH_API double THLogSub(double log_a, double log_b); +TH_API double THExpMinusApprox(const double x); + +#endif diff --git a/thirdparty/libtorch/include/TH/THMemoryFile.h b/thirdparty/libtorch/include/TH/THMemoryFile.h new file mode 100644 index 0000000000..d572b6ddca --- /dev/null +++ b/thirdparty/libtorch/include/TH/THMemoryFile.h @@ -0,0 +1,13 @@ +#ifndef TH_MEMORY_FILE_INC +#define TH_MEMORY_FILE_INC + +#include +#include + +TH_API THFile *THMemoryFile_newWithStorage(THCharStorage *storage, const char *mode); +TH_API THFile *THMemoryFile_new(const char *mode); + +TH_API THCharStorage *THMemoryFile_storage(THFile *self); +TH_API void THMemoryFile_longSize(THFile *self, int size); + +#endif diff --git a/thirdparty/libtorch/include/TH/THSize.h b/thirdparty/libtorch/include/TH/THSize.h new file mode 100644 index 0000000000..c190a31077 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THSize.h @@ -0,0 +1,13 @@ +#ifndef TH_SIZE_INC +#define TH_SIZE_INC + +#include +#include + +// THTensor functions that would work on a THSize if we had such a class in C++, +// i.e. THTensor functions that depend only on the shape of the tensor, not the type. + +TH_API int THSize_isSameSizeAs(const int64_t *sizeA, int64_t dimsA, const int64_t *sizeB, int64_t dimsB); +TH_API ptrdiff_t THSize_nElement(int64_t dims, int64_t *size); + +#endif diff --git a/thirdparty/libtorch/include/TH/THStorage.h b/thirdparty/libtorch/include/TH/THStorage.h new file mode 100644 index 0000000000..404d587529 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THStorage.h @@ -0,0 +1,4 @@ +#pragma once +#include + +// Compatability header. Use THStorageFunctions.h instead if you need this. diff --git a/thirdparty/libtorch/include/TH/THStorageFunctions.h b/thirdparty/libtorch/include/TH/THStorageFunctions.h new file mode 100644 index 0000000000..adaccb435a --- /dev/null +++ b/thirdparty/libtorch/include/TH/THStorageFunctions.h @@ -0,0 +1,39 @@ +#pragma once + +#include +#include + +#define THStorage_(NAME) TH_CONCAT_4(TH,Real,Storage_,NAME) + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +// This exists to have a data-type independent way of freeing (necessary for THPPointer). +TH_API void THStorage_free(THStorage *storage); diff --git a/thirdparty/libtorch/include/TH/THStorageFunctions.hpp b/thirdparty/libtorch/include/TH/THStorageFunctions.hpp new file mode 100644 index 0000000000..f26ee62b36 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THStorageFunctions.hpp @@ -0,0 +1,38 @@ +#pragma once + +// STOP!!! Thinking of including this header directly? Please +// read Note [TH abstraction violation] + +#include +#include +#include + +#include + +// Note [Weak references for intrusive refcounting] +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Here's the scheme: +// +// - refcount == number of strong references to the object +// weakcount == number of weak references to the object, +// plus one more if refcount > 0 +// +// - THStorage stays live as long as there are any strong +// or weak pointers to it (weakcount > 0, since strong +// references count as a +1 to weakcount) +// +// - finalizers are called and data_ptr is deallocated when refcount == 0 +// +// - Once refcount == 0, it can never again be > 0 (the transition +// from > 0 to == 0 is monotonic) +// +// - When you access THStorage via a weak pointer, you must +// atomically increment the use count, if it is greater than 0. +// If it is not, you must report that the storage is dead. +// + +TH_CPP_API THStorage* THStorage_new(caffe2::TypeMeta data_type); +TH_API ptrdiff_t THStorage_size(const THStorage *self); + +TH_API void THStorage_retain(THStorage *storage); +TH_API void THStorage_resize(THStorage *storage, ptrdiff_t size); diff --git a/thirdparty/libtorch/include/TH/THTensor.h b/thirdparty/libtorch/include/TH/THTensor.h new file mode 100644 index 0000000000..335000fbef --- /dev/null +++ b/thirdparty/libtorch/include/TH/THTensor.h @@ -0,0 +1,62 @@ +#ifndef TH_TENSOR_INC +#define TH_TENSOR_INC + +#include +#include + +#define THTensor_(NAME) TH_CONCAT_4(TH,Real,Tensor_,NAME) + +/* basics */ +#include +#include + +#include +#include + +#include +#include + +#include +#include + +/* random numbers */ +#include +#include + +#include +#include + +/* maths */ +#include +#include + +#include +#include + +#include +#include + +#include +#include + +/* fill and zero*/ +#include +#include + +#include +#include + +#include +#include + +#include +#include + +/* convolutions */ +#include +#include + +/* lapack support */ +#include +#include +#endif diff --git a/thirdparty/libtorch/include/TH/THTensor.hpp b/thirdparty/libtorch/include/TH/THTensor.hpp new file mode 100644 index 0000000000..699ebaa0ce --- /dev/null +++ b/thirdparty/libtorch/include/TH/THTensor.hpp @@ -0,0 +1,138 @@ +#pragma once + +// STOP!!! Thinking of including this header directly? Please +// read Note [TH abstraction violation] + +#include +#include + +#include +#include + +// Returns a Tensor given a TensorImpl. The TensorImpl remains valid after the +// the Tensor is destroyed. +inline at::Tensor THTensor_wrap(THTensor* tensor) { + c10::raw::intrusive_ptr::incref(tensor); + return at::Tensor(c10::intrusive_ptr::reclaim(tensor)); +} + +inline const int64_t* THTensor_getSizePtr(THTensor* tensor) { + return tensor->sizes().data(); +} + +inline const int64_t* THTensor_getStridePtr(THTensor* tensor) { + return tensor->strides().data(); +} + +// NB: Non-retaining +inline THStorage* THTensor_getStoragePtr(const THTensor* tensor) { + // Within PyTorch, the invariant is that storage_ is always + // initialized; we never have tensors that don't have any storage. + // However, for Caffe2, this is not true, because they have permitted + // tensors to be allocated without specifying what scalar type + // they should be, only to be filled when GetMutableData is called + // for the first time (providing the necessary type). It is an ERROR to + // invoke any PyTorch operations on such a half-constructed storage, + // and this check tests for that case. + TORCH_CHECK(tensor->storage(), "Cannot use PyTorch operations on a half-constructed " + "tensor. If this tensor came from Caffe2, please call GetMutableData on " + "it first; otherwise, this is a bug, please report it."); + return tensor->storage().unsafeGetStorageImpl(); +} + +inline void THTensor_maybe_zero_dim(THTensor *tensor, bool condition_when_zero_dim) { + bool set_zero_dim = condition_when_zero_dim && tensor->sizes().size() == 1 && tensor->size(0) == 1; + if (set_zero_dim) { + tensor->set_sizes_and_strides({}, {}); + } +} + +// [NOTE: nDimension vs nDimensionLegacyNoScalars vs nDimensionLegacyAll] +// nDimension corresponds to the "true" ATen dimension. +// nDimensionLegacyNoScalars correpsonds to the ATen dimension, except scalars are viewed as 1-dimensional tensors. +// nDimensionLegacyAll corresponds to the ATen dimension, except scalars are viewed as 1-dimensional tensors +// and tensors with a dimension of size zero are collapsed to 0-dimensional tensors. +// +// Eventually, everything should go through nDimension or tensor->dim(). +inline int THTensor_nDimension(const THTensor* tensor) { + return tensor->dim(); +} + +inline int THTensor_nDimensionLegacyNoScalars(const THTensor* tensor) { + if (tensor->dim() == 0) { + return 1; + } else { + return tensor->dim(); + } +} + +inline int THTensor_nDimensionLegacyAll(const THTensor* tensor) { + if (tensor->is_empty()) { + return 0; + } else if (tensor->dim() == 0) { + return 1; + } else { + return tensor->dim(); + } +} + +inline int64_t THTensor_strideLegacyNoScalars(const THTensor *self, int dim) { + THArgCheck((dim >= 0) && (dim < THTensor_nDimensionLegacyNoScalars(self)), 2, "dimension %d out of range of %dD tensor", + dim, THTensor_nDimensionLegacyNoScalars(self)); + return self->dim() == 0 ? 1 : self->stride(dim); +} + +inline int64_t THTensor_sizeLegacyNoScalars(const THTensor *self, int dim) +{ + THArgCheck((dim >= 0) && (dim < THTensor_nDimensionLegacyNoScalars(self)), 2, "dimension %d out of range of %dD tensor", + dim, THTensor_nDimensionLegacyNoScalars(self)); + return self->dim() == 0 ? 1 : self->size(dim); +} + +#include +#include + +#include +#include + +inline std::vector THTensor_sizesLegacyNoScalars(const THTensor *self) { + if (self->dim() == 0) { + return {1}; + } else { + return self->sizes().vec(); + } +} + +inline std::vector THTensor_stridesLegacyNoScalars(const THTensor *self) { + if (self->dim() == 0) { + return {1}; + } else { + return self->strides().vec(); + } +} + +// NB: Steals ownership of storage +TH_API void THTensor_stealAndSetStoragePtr(THTensor* tensor, THStorage* storage); + +TH_API void THTensor_free(THTensor *self); +TH_API void THTensor_setStorageNd(THTensor *self, THStorage *storage, ptrdiff_t storageOffset, int nDimension, const int64_t *size, const int64_t *stride); +TH_API void THTensor_resizeNd(THTensor *self, int nDimension, const int64_t *size, const int64_t *stride); + +TH_CPP_API void THTensor_resize(THTensor *self, at::IntArrayRef size, at::IntArrayRef stride); +TH_CPP_API void THTensor_setStorage(THTensor *self, THStorage *storage_, ptrdiff_t storageOffset_, at::IntArrayRef size_, at::IntArrayRef stride_); +TH_CPP_API c10::optional> THTensor_compute_stride( + at::IntArrayRef oldshape, + at::IntArrayRef oldstride, + at::IntArrayRef newshape); + +#include +#include + +#include +#include + +#include +#include + +#include +#include diff --git a/thirdparty/libtorch/include/TH/THTensorApply.h b/thirdparty/libtorch/include/TH/THTensorApply.h new file mode 100644 index 0000000000..230e6d2462 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THTensorApply.h @@ -0,0 +1,309 @@ +#ifndef TH_TENSOR_APPLY_INC +#define TH_TENSOR_APPLY_INC + +#include + +/* + * The basic strategy for apply is as follows: + * + * 1. Starting with the outermost index, loop until we reach a dimension where the + * data is no longer contiguous, i.e. the stride at that dimension is not equal to + * the size of the tensor defined by the outer dimensions. Let's call this outer + * (contiguous) tensor A. Note that if the Tensor is contiguous, then A is equal + * to the entire Tensor. Let's call the inner tensor B. + * + * 2. We loop through the indices in B, starting at its outermost dimension. For + * example, if B is a 2x2 matrix, then we do: + * + * B[0][0] + * B[0][1] + * B[1][0] + * B[1][1] + * + * We set the offset into the underlying storage as (storageOffset + stride_B * index_B), + * i.e. basically we compute the offset into the storage as we would normally for a + * Tensor. But because we are guaranteed the subsequent data is contiguous in memory, we + * can simply loop for sizeof(A) iterations and perform the operation, without having to + * follow the order described by the strides of A. + * + * 3. As an optimization, we merge dimensions of A that are contiguous in memory. For + * example, if A is a 3x3x3x3 tensor narrowed from a 3x3x4x3 tensor, then the first two + * dimensions can be merged for the purposes of APPLY, reducing the number of nested + * loops. + */ + +#define __TH_TENSOR_APPLYX_PREAMBLE(TYPE, TENSOR, DIM, ALLOW_CONTIGUOUS) \ + TYPE *TENSOR##_data = NULL; \ + int64_t *TENSOR##_counter = NULL, *TENSOR##_sizes = NULL, *TENSOR##_strides = NULL, *TENSOR##_dimOffset = NULL; \ + int64_t TENSOR##_stride = 0, TENSOR##_size = 0, TENSOR##_dim = 0, TENSOR##_i, TENSOR##_n; \ + int TENSOR##_contiguous = ALLOW_CONTIGUOUS && DIM < 0; \ + TENSOR##_n = 1; \ + for(TENSOR##_i = 0; TENSOR##_i < TENSOR->dim(); TENSOR##_i++) \ + TENSOR##_n *= TENSOR->size(TENSOR##_i); \ +\ + if(TENSOR->is_empty()) \ + TH_TENSOR_APPLY_hasFinished = 1; \ + else \ + { \ + TENSOR##_data = THTensor_getStoragePtr(TENSOR)->data()+TENSOR->storage_offset(); \ + TENSOR##_size = 1; \ + TENSOR##_stride = 1; \ + for(TENSOR##_i = THTensor_nDimensionLegacyAll(TENSOR)-1; TENSOR##_i >= 0; TENSOR##_i--) { \ + if(THTensor_sizeLegacyNoScalars(TENSOR, TENSOR##_i) != 1) { \ + if(THTensor_strideLegacyNoScalars(TENSOR, TENSOR##_i) == TENSOR##_size && TENSOR##_i != DIM) \ + TENSOR##_size *= THTensor_sizeLegacyNoScalars(TENSOR, TENSOR##_i); \ + else{ \ + TENSOR##_contiguous = 0; \ + break; \ + } \ + } \ + } \ + if (!TENSOR##_contiguous) { \ + /* Find the dimension of contiguous sections */ \ + TENSOR##_dim = 1; \ + for(TENSOR##_i = THTensor_nDimensionLegacyAll(TENSOR)-2; TENSOR##_i >= 0; TENSOR##_i--) \ + { \ + if(TENSOR->stride(TENSOR##_i) != TENSOR->stride(TENSOR##_i+1) * TENSOR->size(TENSOR##_i+1) || TENSOR##_i == DIM || TENSOR##_i+1 == DIM) \ + TENSOR##_dim++; \ + } \ + /* Allocate an array of 3*dim elements, where dim is the number of contiguous sections */ \ + TENSOR##_counter = (int64_t*)THAlloc(sizeof(int64_t)*(3*TENSOR##_dim)); \ + TENSOR##_sizes = TENSOR##_counter + TENSOR##_dim; \ + TENSOR##_strides = TENSOR##_counter + 2*TENSOR##_dim; \ + TH_TENSOR_dim_index = TENSOR##_dim-1; \ + TENSOR##_dimOffset = (DIM == THTensor_nDimensionLegacyAll(TENSOR)-1) ? &TENSOR##_i : &TENSOR##_counter[DIM]; \ + TENSOR##_sizes[TH_TENSOR_dim_index] = THTensor_sizeLegacyNoScalars(TENSOR, THTensor_nDimensionLegacyAll(TENSOR)-1); \ + TENSOR##_strides[TH_TENSOR_dim_index] = THTensor_strideLegacyNoScalars(TENSOR, THTensor_nDimensionLegacyAll(TENSOR)-1); \ + /* TENSOR##_counter tracks where we are in the storage. The offset into the */ \ + /* storage is given by storage_offset + (i * j), where i is the stride */ \ + /* vector and j is tensor_counter vector. This sets the starting position for the loop. */ \ + for(TENSOR##_i = TENSOR##_dim-1; TENSOR##_i >= 0; --TENSOR##_i) { \ + TENSOR##_counter[TENSOR##_i] = 0; \ + } \ + for(TENSOR##_i = THTensor_nDimensionLegacyAll(TENSOR)-2; TENSOR##_i >= 0; --TENSOR##_i) { \ + if (TENSOR->stride(TENSOR##_i) == TENSOR->stride(TENSOR##_i+1) * TENSOR->size(TENSOR##_i+1) && TENSOR##_i != DIM && TENSOR##_i+1 != DIM) { \ + TENSOR##_sizes[TH_TENSOR_dim_index] = TENSOR->size(TENSOR##_i) * TENSOR##_sizes[TH_TENSOR_dim_index]; \ + if (DIM != THTensor_nDimensionLegacyAll(TENSOR)-1 && TENSOR##_i < DIM) \ + TENSOR##_dimOffset--; \ + } else { \ + --TH_TENSOR_dim_index; \ + TENSOR##_sizes[TH_TENSOR_dim_index] = TENSOR->size(TENSOR##_i); \ + TENSOR##_strides[TH_TENSOR_dim_index] = TENSOR->stride(TENSOR##_i); \ + } \ + } \ + /* Size of the inner most section */ \ + TENSOR##_size = TENSOR##_sizes[TENSOR##_dim-1]; \ + /* Stride of the inner most section */ \ + TENSOR##_stride = TENSOR##_strides[TENSOR##_dim-1]; \ + } \ + else{\ + TENSOR##_dim = 1;\ + TENSOR##_counter = (int64_t*)THAlloc(sizeof(int64_t)*3);\ + TENSOR##_sizes = TENSOR##_counter + 1;\ + TENSOR##_strides = TENSOR##_counter + 2;\ + TENSOR##_sizes[0] = TENSOR##_n;\ + TENSOR##_strides[0] = 1;\ + TENSOR##_size = TENSOR##_sizes[0];\ + TENSOR##_stride = TENSOR##_strides[0];\ + }\ + } \ + TENSOR##_i = 0; + +#define __TH_TENSOR_APPLYX_UPDATE_COUNTERS(TENSOR, ALWAYS_UPDATE) \ + if(TENSOR##_i == TENSOR##_size || ALWAYS_UPDATE) \ + { \ + if(TENSOR##_contiguous) \ + break; \ +\ + if(TENSOR##_dim == 1) \ + break; \ +\ + /* Reset pointer to beginning of loop */ \ + TENSOR##_data -= TENSOR##_size*TENSOR##_stride; \ + for(TENSOR##_i = TENSOR##_dim-2; TENSOR##_i >= 0; TENSOR##_i--) \ + { \ + TENSOR##_counter[TENSOR##_i]++; \ + /* Jump ahread by the stride of this dimension */ \ + TENSOR##_data += TENSOR##_strides[TENSOR##_i]; \ +\ + if(TENSOR##_counter[TENSOR##_i] == TENSOR##_sizes[TENSOR##_i]) \ + { \ + if(TENSOR##_i == 0) \ + { \ + TH_TENSOR_APPLY_hasFinished = 1; \ + break; \ + } \ + else \ + { \ + /* Reset the pointer to the beginning of the chunk defined by this dimension */ \ + TENSOR##_data -= TENSOR##_counter[TENSOR##_i]*TENSOR##_strides[TENSOR##_i]; \ + TENSOR##_counter[TENSOR##_i] = 0; \ + } \ + } \ + else \ + break; \ + } \ + TENSOR##_i = 0; \ + } \ + +#define TH_TENSOR_APPLY3_D(TYPE1, TENSOR1, TYPE2, TENSOR2, TYPE3, TENSOR3, DIM, CODE) \ +{ \ + int TH_TENSOR_APPLY_hasFinished = 0; \ + int64_t TH_TENSOR_dim_index = 0; \ + __TH_TENSOR_APPLYX_PREAMBLE(TYPE1, TENSOR1, DIM, 1) \ + __TH_TENSOR_APPLYX_PREAMBLE(TYPE2, TENSOR2, DIM, 1) \ + __TH_TENSOR_APPLYX_PREAMBLE(TYPE3, TENSOR3, DIM, 1) \ + \ + int elements_equal = 1; \ + if(TENSOR1##_n != TENSOR2##_n) { \ + elements_equal = 0; \ + } \ + else if(TENSOR1##_n != TENSOR3##_n) { \ + elements_equal = 0; \ + } \ + if (elements_equal == 0) { \ + AT_ERROR("inconsistent tensor size, expected ", \ + #TENSOR1, " ", TENSOR1->sizes(), ", ", \ + #TENSOR2, " ", TENSOR2->sizes(), " and ", \ + #TENSOR3, " ", TENSOR3->sizes(), " to have the same " \ + "number of elements, but got ", TENSOR1##_n, ", ", \ + TENSOR2##_n, " and ", TENSOR3##_n, " elements respectively"); \ + } \ + \ + while(!TH_TENSOR_APPLY_hasFinished) \ + { \ + /* Loop through the inner most region of the Tensor */ \ + for(; TENSOR1##_i < TENSOR1##_size && TENSOR2##_i < TENSOR2##_size && TENSOR3##_i < TENSOR3##_size; TENSOR1##_i++, TENSOR2##_i++, TENSOR3##_i++, TENSOR1##_data += TENSOR1##_stride, TENSOR2##_data += TENSOR2##_stride, TENSOR3##_data += TENSOR3##_stride) /* 0 et pas TENSOR##_dim! */ \ + { \ + CODE \ + } \ + __TH_TENSOR_APPLYX_UPDATE_COUNTERS(TENSOR1, 0) \ + __TH_TENSOR_APPLYX_UPDATE_COUNTERS(TENSOR2, 0) \ + __TH_TENSOR_APPLYX_UPDATE_COUNTERS(TENSOR3, 0) \ + } \ + if(TENSOR1##_counter != NULL) \ + THFree(TENSOR1##_counter); \ + if(TENSOR2##_counter != NULL) \ + THFree(TENSOR2##_counter); \ + if(TENSOR3##_counter != NULL) \ + THFree(TENSOR3##_counter); \ +} + +#define TH_TENSOR_APPLY3(TYPE1, TENSOR1, TYPE2, TENSOR2, TYPE3, TENSOR3, CODE) \ + TH_TENSOR_APPLY3_D(TYPE1, TENSOR1, TYPE2, TENSOR2, TYPE3, TENSOR3, -1, CODE) + +#define TH_TENSOR_APPLY2_D(TYPE1, TENSOR1, TYPE2, TENSOR2, DIM, CODE) \ +{ \ + int TH_TENSOR_APPLY_hasFinished = 0; \ + int64_t TH_TENSOR_dim_index = 0; \ + __TH_TENSOR_APPLYX_PREAMBLE(TYPE1, TENSOR1, DIM, 1) \ + __TH_TENSOR_APPLYX_PREAMBLE(TYPE2, TENSOR2, DIM, 1) \ +\ + if(TENSOR1##_n != TENSOR2##_n) { \ + AT_ERROR("inconsistent tensor size, expected ", \ + #TENSOR1, " ", TENSOR1->sizes(), " and ", \ + #TENSOR2, " ", TENSOR2->sizes(), \ + " to have the same number of elements, but got ", \ + TENSOR1##_n, " and ", TENSOR2##_n, " elements respectively"); \ + } \ + while(!TH_TENSOR_APPLY_hasFinished) \ + { \ + /* Loop through the inner most region of the Tensor */ \ + for(; TENSOR1##_i < TENSOR1##_size && TENSOR2##_i < TENSOR2##_size; TENSOR1##_i++, TENSOR2##_i++, TENSOR1##_data += TENSOR1##_stride, TENSOR2##_data += TENSOR2##_stride) /* 0 et pas TENSOR##_dim! */ \ + { \ + CODE \ + } \ + __TH_TENSOR_APPLYX_UPDATE_COUNTERS(TENSOR1, 0) \ + __TH_TENSOR_APPLYX_UPDATE_COUNTERS(TENSOR2, 0) \ + } \ + if(TENSOR1##_counter != NULL) \ + THFree(TENSOR1##_counter); \ + if(TENSOR2##_counter != NULL) \ + THFree(TENSOR2##_counter); \ +} + +#define TH_TENSOR_APPLY2(TYPE1, TENSOR1, TYPE2, TENSOR2, CODE) \ + TH_TENSOR_APPLY2_D(TYPE1, TENSOR1, TYPE2, TENSOR2, -1, CODE) + +#define TH_TENSOR_APPLY_D(TYPE, TENSOR, DIM, CODE) \ +{ \ + int TH_TENSOR_APPLY_hasFinished = 0; \ + int64_t TH_TENSOR_dim_index = 0; \ + __TH_TENSOR_APPLYX_PREAMBLE(TYPE, TENSOR, DIM, 0) \ +\ + while(!TH_TENSOR_APPLY_hasFinished) \ + { \ + /* Loop through the inner most region of the Tensor */ \ + for(; TENSOR##_i < TENSOR##_size; TENSOR##_i++, TENSOR##_data += TENSOR##_stride) /* 0 et pas TENSOR##_dim! */ \ + { \ + CODE \ + } \ + __TH_TENSOR_APPLYX_UPDATE_COUNTERS(TENSOR, 1) \ + } \ + THFree(TENSOR##_counter); \ +} + +#define TH_TENSOR_APPLY(TYPE, TENSOR, CODE) \ + TH_TENSOR_APPLY_D(TYPE, TENSOR, -1, CODE) + + +/* + * Calcuate the memory offset of an element in a tensor. The strategy is below: + * + * 1. convert the line index(the index of the element) to the indexs(coordinates) in the tensor. + * It can hinted by a classical problem: Getting each individual digit from a whole integer(Decimal base). + * A N-digit decimal base number could be view as a N-dimension tensor and the sizes of the tensor are 10. + * So the value the whole integer is the line index. And the digits could be viewed as the indexes in + * different dimentions. + * + * 2. convert the indexs(coordinates) in the tensor to the memory offset. + * + * You can get the detailes in the for-statement iterations. + * + * The macro is only used in the first element in each thread. For the rest, the memory offset could update + * according to info of the tensor in order to get better performance. So we should also record the each + * indexs in coresponding dimension of first element. + * The recorded info is stored in the TENSOR##_counter_tmp. + * + */ +#define __TH_TENSOR_APPLYX_CAL_MEMORY_OFFSET(TENSOR) \ + int64_t *TENSOR##_counter_tmp = (int64_t*)THAlloc(sizeof(int64_t) * TENSOR##_dim); \ + ptrdiff_t TENSOR##_memory_offset = 0; \ + ptrdiff_t TENSOR##_quot = line_index_start; \ + for (TENSOR##_i = TENSOR##_dim-1; TENSOR##_i>=0; --TENSOR##_i) { \ + TENSOR##_counter_tmp[TENSOR##_i] = TENSOR##_quot%TENSOR##_sizes[TENSOR##_i]; \ + TENSOR##_quot /= TENSOR##_sizes[TENSOR##_i]; \ + TENSOR##_memory_offset += TENSOR##_counter_tmp[TENSOR##_i] * TENSOR##_strides[TENSOR##_i]; \ + } + +/* + * The macro update the indexes in each dimension of the elements except for the first one allocated in + * each thread. + * For a tensor, if the index of some dimension reaches the size of the corresponding dimension. It will carry and clear. + * If the index of next high dimension does do, the index of next high dimension should carry and clear, too. + * + * The momery offset calculatation is a little confusing. If current index carries, the current index is set to 0. So + * the offset should decrease by size*stride of the last dimension. Then the index next high dimension increases by 1. So + * the offset should increase by stride of next high dimension. + */ +#define __TH_TENSOR_APPLYX_UPDATE_COUNTERS_PARALLEL(TENSOR) \ + if(TENSOR##_i == TENSOR##_size && TENSOR##_dim > 1){ /*reaches the edge*/ \ + int TENSOR##_carry_coord = 1; /*set carry flag to true*/ \ + TENSOR##_start = 0; /*the current index be cleared to 0*/\ + TENSOR##_data -= TENSOR##_size * TENSOR##_stride; /*the momery offset reset to the first one in current dimension */\ + for(TENSOR##_i = TENSOR##_dim - 2; (TENSOR##_i >= 0) && (TENSOR##_carry_coord); TENSOR##_i--){ \ + TENSOR##_counter_tmp[TENSOR##_i]++; /*the index of next high dimension update*/ \ + TENSOR##_data += TENSOR##_strides[TENSOR##_i]; /*memory offset increase by stride of next high dimension*/\ + if(TENSOR##_counter_tmp[TENSOR##_i] == TENSOR##_sizes[TENSOR##_i]){ /*The next high dimension also carry, continue + to clear and carry*/ \ + TENSOR##_data -= TENSOR##_sizes[TENSOR##_i] * TENSOR##_strides[TENSOR##_i]; \ + TENSOR##_counter_tmp[TENSOR##_i] = 0; \ + } else { \ + TENSOR##_carry_coord = 0; \ + } \ + } \ + } else { \ + TENSOR##_start = TENSOR##_i; \ + } + +#endif diff --git a/thirdparty/libtorch/include/TH/THTensorDimApply.h b/thirdparty/libtorch/include/TH/THTensorDimApply.h new file mode 100644 index 0000000000..2bb3f97e60 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THTensorDimApply.h @@ -0,0 +1,329 @@ +#ifndef TH_TENSOR_DIM_APPLY_INC +#define TH_TENSOR_DIM_APPLY_INC + +// This is an example of SIZE_CHECK argument passable to TH_TENSOR_DIM_APPLY3. +// The TENSOR1, TENSOR2, TENSOR3, DIMENSION will be expanded the same way as +// TH_TENSOR_DIM_APPLY3. +// Specifically, this check ensures that TENSOR1, TENSOR2, TENSOR3 have same +// size except for DIMENSION. +#define TH_TENSOR_DIM_APPLY3_SIZE_EQ_EXCEPT_DIM(TENSOR1, TENSOR2, TENSOR3, DIMENSION) \ +{ \ + int shape_check_flag = 0; \ + for(TH_TENSOR_DIM_APPLY_i = 0; TH_TENSOR_DIM_APPLY_i < THTensor_nDimensionLegacyNoScalars(TENSOR1); TH_TENSOR_DIM_APPLY_i++) \ + { \ + if (TH_TENSOR_DIM_APPLY_i == DIMENSION) \ + continue; \ + if (TENSOR1->size(TH_TENSOR_DIM_APPLY_i) != TENSOR2->size(TH_TENSOR_DIM_APPLY_i)) { \ + shape_check_flag = 1; \ + break; \ + } \ + if(TENSOR1->size(TH_TENSOR_DIM_APPLY_i) != TENSOR3->size(TH_TENSOR_DIM_APPLY_i)) { \ + shape_check_flag = 1; \ + break; \ + } \ + } \ + if (shape_check_flag == 1) { \ + AT_ERROR("Expected ", #TENSOR1, " ", TENSOR1->sizes(), ", ", #TENSOR2, " ", TENSOR2->sizes(), " and ", #TENSOR3, " ", TENSOR3->sizes(), " to have the same size apart from dimension ", DIMENSION); \ + } \ +} + +#define TH_TENSOR_DIM_APPLY3(TYPE1, TENSOR1, TYPE2, TENSOR2, TYPE3, TENSOR3, DIMENSION, SIZE_CHECK, CODE) \ +{ \ + TYPE1 *TENSOR1##_data = NULL; \ + TH_UNUSED int64_t TENSOR1##_stride = 0, TENSOR1##_size = 0; \ + TYPE2 *TENSOR2##_data = NULL; \ + TH_UNUSED int64_t TENSOR2##_stride = 0, TENSOR2##_size = 0; \ + TYPE3 *TENSOR3##_data = NULL; \ + TH_UNUSED int64_t TENSOR3##_stride = 0, TENSOR3##_size = 0; \ + int64_t *TH_TENSOR_DIM_APPLY_counter = NULL; \ + int TH_TENSOR_DIM_APPLY_hasFinished = THTensor_(numel)(TENSOR1) == 0; \ + int TH_TENSOR_DIM_APPLY_i; \ +\ + if( (DIMENSION < 0) || (DIMENSION >= THTensor_nDimensionLegacyNoScalars(TENSOR1)) ) \ + THError("invalid dimension %d (expected to be 0 <= dim < %d)", DIMENSION, THTensor_nDimensionLegacyNoScalars(TENSOR1)); \ + int same_dims = 1; \ + if( THTensor_nDimensionLegacyNoScalars(TENSOR1) != THTensor_nDimensionLegacyNoScalars(TENSOR2) ) { \ + same_dims = 0; \ + } \ + if( THTensor_nDimensionLegacyNoScalars(TENSOR1) != THTensor_nDimensionLegacyNoScalars(TENSOR3) ) { \ + same_dims = 0; \ + } \ + if (same_dims == 0) { \ + AT_ERROR("inconsistent tensor size, expected ", #TENSOR1, " ", TENSOR1->sizes(), ", ", #TENSOR2, " ", TENSOR2->sizes(), " and ", #TENSOR3, " ",TENSOR3->sizes() , " to have the same number of dimensions"); \ + } \ + SIZE_CHECK(TENSOR1, TENSOR2, TENSOR3, DIMENSION) \ +\ + if (TH_TENSOR_DIM_APPLY_hasFinished) { \ + return; \ + } \ + TH_TENSOR_DIM_APPLY_counter = (int64_t*)THAlloc(sizeof(int64_t)*(THTensor_nDimensionLegacyNoScalars(TENSOR1))); \ + for(TH_TENSOR_DIM_APPLY_i = 0; TH_TENSOR_DIM_APPLY_i < THTensor_nDimensionLegacyNoScalars(TENSOR1); TH_TENSOR_DIM_APPLY_i++) \ + TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i] = 0; \ +\ + TENSOR1##_data = THTensor_getStoragePtr(TENSOR1)->data()+(TENSOR1)->storage_offset(); \ + TENSOR1##_stride = THTensor_strideLegacyNoScalars((TENSOR1), DIMENSION); \ + TENSOR1##_size = THTensor_sizeLegacyNoScalars((TENSOR1), DIMENSION); \ +\ + TENSOR2##_data = THTensor_getStoragePtr(TENSOR2)->data()+(TENSOR2)->storage_offset(); \ + TENSOR2##_stride = THTensor_strideLegacyNoScalars((TENSOR2), DIMENSION); \ + TENSOR2##_size = THTensor_sizeLegacyNoScalars((TENSOR2), DIMENSION); \ +\ + TENSOR3##_data = THTensor_getStoragePtr(TENSOR3)->data()+(TENSOR3)->storage_offset(); \ + TENSOR3##_stride = THTensor_strideLegacyNoScalars((TENSOR3), DIMENSION); \ + TENSOR3##_size = THTensor_sizeLegacyNoScalars((TENSOR3), DIMENSION); \ +\ + while(!TH_TENSOR_DIM_APPLY_hasFinished) \ + { \ + CODE \ +\ + if(THTensor_nDimensionLegacyNoScalars(TENSOR1) == 1) \ + break; \ + \ + for(TH_TENSOR_DIM_APPLY_i = 0; TH_TENSOR_DIM_APPLY_i < THTensor_nDimensionLegacyNoScalars(TENSOR1); TH_TENSOR_DIM_APPLY_i++) \ + { \ + if(TH_TENSOR_DIM_APPLY_i == DIMENSION) \ + { \ + if(TH_TENSOR_DIM_APPLY_i == THTensor_nDimensionLegacyNoScalars(TENSOR1)-1) \ + { \ + TH_TENSOR_DIM_APPLY_hasFinished = 1; \ + break; \ + } \ + continue; \ + } \ +\ + TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i]++; \ + TENSOR1##_data += THTensor_strideLegacyNoScalars(TENSOR1, TH_TENSOR_DIM_APPLY_i); \ + TENSOR2##_data += THTensor_strideLegacyNoScalars(TENSOR2, TH_TENSOR_DIM_APPLY_i); \ + TENSOR3##_data += THTensor_strideLegacyNoScalars(TENSOR3, TH_TENSOR_DIM_APPLY_i); \ +\ + if(TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i] == THTensor_sizeLegacyNoScalars(TENSOR1, TH_TENSOR_DIM_APPLY_i)) \ + { \ + if(TH_TENSOR_DIM_APPLY_i == THTensor_nDimensionLegacyNoScalars(TENSOR1)-1) \ + { \ + TH_TENSOR_DIM_APPLY_hasFinished = 1; \ + break; \ + } \ + else \ + { \ + TENSOR1##_data -= TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i]*THTensor_strideLegacyNoScalars(TENSOR1, TH_TENSOR_DIM_APPLY_i); \ + TENSOR2##_data -= TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i]*THTensor_strideLegacyNoScalars(TENSOR2, TH_TENSOR_DIM_APPLY_i); \ + TENSOR3##_data -= TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i]*THTensor_strideLegacyNoScalars(TENSOR3, TH_TENSOR_DIM_APPLY_i); \ + TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i] = 0; \ + } \ + } \ + else \ + break; \ + } \ + } \ + THFree(TH_TENSOR_DIM_APPLY_counter); \ +} + +/** + * Similar to DIM_APPLY(...) but we maintain two sets of pointers: one for the first tensor + * and one for the second. The two tensors must have the same shape, other than at the + * specified DIMENSION. This function makes it easy to store the output from reducing the + * TENSOR at index. For example, in the sum example described below, we could instead do: + * + * int64_t i = 0; + * TYPE1 sum; + * + * for (i = 0; i < TENSOR1##_size; ++i) { + * sum += TENSOR1##_data[i * TENSOR1##_stride] + * } + * *TENSOR2##_data = (TYPE2) sum; + * + * In particular, we guarantee that the offset into TENSOR2 will be what you would get if + * you applied all of the index values used to generate the offset into TENSOR1. + */ +#define TH_TENSOR_DIM_APPLY2(TYPE1, TENSOR1, TYPE2, TENSOR2, DIMENSION, CODE) \ +{ \ + TYPE1 *TENSOR1##_data = NULL; \ + TH_UNUSED int64_t TENSOR1##_stride = 0, TENSOR1##_size = 0; \ + TYPE2 *TENSOR2##_data = NULL; \ + TH_UNUSED int64_t TENSOR2##_stride = 0, TENSOR2##_size = 0; \ + int64_t *TH_TENSOR_DIM_APPLY_counter = NULL; \ + int TH_TENSOR_DIM_APPLY_hasFinished = THTensor_(numel)(TENSOR1) == 0; \ + int TH_TENSOR_DIM_APPLY_i; \ +\ + if( (DIMENSION < 0) || (DIMENSION >= THTensor_nDimensionLegacyNoScalars(TENSOR1)) ) \ + THError("invalid dimension %d (expected to be 0 <= dim < %d)", DIMENSION, THTensor_nDimensionLegacyAll(TENSOR1)); \ + if( THTensor_nDimensionLegacyNoScalars(TENSOR1) != THTensor_nDimensionLegacyNoScalars(TENSOR2)) { \ + AT_ERROR("inconsistent tensor size, expected ", #TENSOR1, " ", TENSOR1->sizes(), " and ", #TENSOR2, " ", TENSOR2->sizes(), " to have the same number of dimensions"); \ + } \ + TH_UNUSED int shape_check_flag = 0; \ + for(TH_TENSOR_DIM_APPLY_i = 0; TH_TENSOR_DIM_APPLY_i < THTensor_nDimensionLegacyNoScalars(TENSOR1); TH_TENSOR_DIM_APPLY_i++) \ + { \ + if(TH_TENSOR_DIM_APPLY_i == DIMENSION) \ + continue; \ + if(THTensor_sizeLegacyNoScalars(TENSOR1, TH_TENSOR_DIM_APPLY_i) != THTensor_sizeLegacyNoScalars(TENSOR2, TH_TENSOR_DIM_APPLY_i)) { \ + AT_ERROR("Expected ", #TENSOR1, " ", TENSOR1->sizes(), " and ", #TENSOR2, " ", TENSOR2->sizes(), " to have the same size in dimension ", DIMENSION); \ + } \ + } \ +\ + if (TH_TENSOR_DIM_APPLY_hasFinished) { \ + return; \ + } \ + TH_TENSOR_DIM_APPLY_counter = (int64_t*)THAlloc(sizeof(int64_t)*(THTensor_nDimensionLegacyNoScalars(TENSOR1))); \ + for(TH_TENSOR_DIM_APPLY_i = 0; TH_TENSOR_DIM_APPLY_i < THTensor_nDimensionLegacyNoScalars(TENSOR1); TH_TENSOR_DIM_APPLY_i++) \ + TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i] = 0; \ +\ + TENSOR1##_data = THTensor_getStoragePtr(TENSOR1)->data()+(TENSOR1)->storage_offset(); \ + TENSOR1##_stride = THTensor_strideLegacyNoScalars((TENSOR1), DIMENSION); \ + TENSOR1##_size = THTensor_sizeLegacyNoScalars(TENSOR1, DIMENSION); \ +\ + TENSOR2##_data = THTensor_getStoragePtr(TENSOR2)->data()+(TENSOR2)->storage_offset(); \ + TENSOR2##_stride = THTensor_strideLegacyNoScalars((TENSOR2), DIMENSION); \ + TENSOR2##_size = THTensor_sizeLegacyNoScalars(TENSOR2, DIMENSION); \ +\ + while(!TH_TENSOR_DIM_APPLY_hasFinished) \ + { \ + CODE \ +\ + if(THTensor_nDimensionLegacyNoScalars(TENSOR1) == 1) \ + break; \ + \ + for(TH_TENSOR_DIM_APPLY_i = 0; TH_TENSOR_DIM_APPLY_i < THTensor_nDimensionLegacyNoScalars(TENSOR1); TH_TENSOR_DIM_APPLY_i++) \ + { \ + if(TH_TENSOR_DIM_APPLY_i == DIMENSION) \ + { \ + if(TH_TENSOR_DIM_APPLY_i == THTensor_nDimensionLegacyNoScalars(TENSOR1)-1) \ + { \ + TH_TENSOR_DIM_APPLY_hasFinished = 1; \ + break; \ + } \ + continue; \ + } \ +\ + TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i]++; \ + TENSOR1##_data += THTensor_strideLegacyNoScalars(TENSOR1, TH_TENSOR_DIM_APPLY_i); \ + TENSOR2##_data += THTensor_strideLegacyNoScalars(TENSOR2, TH_TENSOR_DIM_APPLY_i); \ +\ + if(TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i] == THTensor_sizeLegacyNoScalars(TENSOR1, TH_TENSOR_DIM_APPLY_i)) \ + { \ + if(TH_TENSOR_DIM_APPLY_i == THTensor_nDimensionLegacyNoScalars(TENSOR1)-1) \ + { \ + TH_TENSOR_DIM_APPLY_hasFinished = 1; \ + break; \ + } \ + else \ + { \ + TENSOR1##_data -= TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i]*THTensor_strideLegacyNoScalars(TENSOR1, TH_TENSOR_DIM_APPLY_i); \ + TENSOR2##_data -= TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i]*THTensor_strideLegacyNoScalars(TENSOR2, TH_TENSOR_DIM_APPLY_i); \ + TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i] = 0; \ + } \ + } \ + else \ + break; \ + } \ + } \ + THFree(TH_TENSOR_DIM_APPLY_counter); \ +} + +/** + * The basic idea for DIM_APPLY: Given a TENSOR and a DIMENSION, provide access to the data stored + * at all sets of dimension values other than DIMENSION, such that we can get all the values at those + * fixed indices for the various values at DIMENSION. + * + * Suppose we have a 2x3x4 Tensor A, and we have DIMENSION=2. Then we will hit CODE (2x3) times, and the + * pointer into storage will be at: + * + * A[0][0] + * A[0][1] + * A[0][2] + * A[1][0] + * A[1][1] + * A[1][2] + * + * And at each point, we can access the data for each of the four elements of the Tensor via + * TENSOR##_stride. So for example, if we wanted to sum the elements there, we could do: + * + * int64_t i = 0; + * TYPE sum; + * for (i = 0; i < TENSOR##_size; i++) { + * sum += TENSOR##_data[i * TENSOR##_stride] + * } + * + * Note that we don't have to have DIMENSION be the last tensor. If we have DIMENSION=1, then we will hit the + * code (2x4) times, with pointer into the storage at: + * + * offset + + * stride_0 * 0 + stride_2 * 0 + * stride_0 * 1 + stride_2 * 0 + * stride_0 * 0 + stride_2 * 1 + * stride_0 * 1 + stride_2 * 1 + * stride_0 * 0 + stride_2 * 2 + * stride_0 * 1 + stride_2 * 2 + * stride_0 * 0 + stride_2 * 3 + * stride_0 * 1 + stride_2 * 3 + * + * So we can again sum over the values at DIMENSION with the other indices fixed. + */ +#define TH_TENSOR_DIM_APPLY(TYPE, TENSOR, DIMENSION, CODE) \ +{ \ + TYPE *TENSOR##_data = NULL; \ + int64_t TENSOR##_stride = 0, TENSOR##_size = 0; \ + int64_t *TH_TENSOR_DIM_APPLY_counter = NULL; \ + int TH_TENSOR_DIM_APPLY_hasFinished = 0; \ + int TH_TENSOR_DIM_APPLY_i; \ +\ + if( (DIMENSION < 0) || (DIMENSION >= THTensor_nDimensionLegacyAll(TENSOR)) ) \ + THError("invalid dimension"); \ +\ + TENSOR##_data = THTensor_getStoragePtr(TENSOR)->data()+(TENSOR)->storage_offset(); \ + TENSOR##_stride = THTensor_strideLegacyNoScalars((TENSOR), DIMENSION); \ + TENSOR##_size = THTensor_sizeLegacyNoScalars(TENSOR, DIMENSION); \ + /* Counter stores the indices into the Tensor at any time */ \ + TH_TENSOR_DIM_APPLY_counter = (int64_t*)THAlloc(sizeof(int64_t)*(THTensor_nDimensionLegacyAll(TENSOR))); \ + for(TH_TENSOR_DIM_APPLY_i = 0; TH_TENSOR_DIM_APPLY_i < THTensor_nDimensionLegacyAll(TENSOR); TH_TENSOR_DIM_APPLY_i++) \ + TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i] = 0; \ +\ + while(!TH_TENSOR_DIM_APPLY_hasFinished) \ + { \ + CODE \ +\ + if(THTensor_nDimensionLegacyAll(TENSOR) == 1) \ + break; \ + \ + for(TH_TENSOR_DIM_APPLY_i = 0; TH_TENSOR_DIM_APPLY_i < THTensor_nDimensionLegacyAll(TENSOR); TH_TENSOR_DIM_APPLY_i++) \ + { \ + /* Check if the index is equal to DIMENSION. We don't need to update the */ \ + /* offset if this is the case, and can consider the next index. However, */ \ + /* in the case that the DIMENSION is the last index in the Tensor, then */ \ + /* we have parsed the entire tensor and can exit */ \ + if(TH_TENSOR_DIM_APPLY_i == DIMENSION) \ + { \ + if(TH_TENSOR_DIM_APPLY_i == THTensor_nDimensionLegacyAll(TENSOR)-1) \ + { \ + TH_TENSOR_DIM_APPLY_hasFinished = 1; \ + break; \ + } \ + continue; \ + } \ +\ + /* Bump the counter at this index, update the pointer */ \ + TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i]++; \ + TENSOR##_data += THTensor_strideLegacyNoScalars(TENSOR, TH_TENSOR_DIM_APPLY_i); \ +\ + if(TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i] == THTensor_sizeLegacyNoScalars(TENSOR, TH_TENSOR_DIM_APPLY_i)) \ + { \ + /* Handled TENSOR_size(dim) iterations for DIM_APPLY_i. If this is the last dimension, exit */ \ + if(TH_TENSOR_DIM_APPLY_i == THTensor_nDimensionLegacyAll(TENSOR)-1) \ + { \ + TH_TENSOR_DIM_APPLY_hasFinished = 1; \ + break; \ + } \ + else \ + { \ + /* Reset the counter, and the pointer to the beginning of the storage for this combination of indices */ \ + TENSOR##_data -= TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i]*THTensor_strideLegacyNoScalars(TENSOR, TH_TENSOR_DIM_APPLY_i); \ + TH_TENSOR_DIM_APPLY_counter[TH_TENSOR_DIM_APPLY_i] = 0; \ + } \ + } \ + else \ + break; \ + } \ + } \ + THFree(TH_TENSOR_DIM_APPLY_counter); \ +} + +#endif diff --git a/thirdparty/libtorch/include/TH/THVector.h b/thirdparty/libtorch/include/TH/THVector.h new file mode 100644 index 0000000000..512ce54900 --- /dev/null +++ b/thirdparty/libtorch/include/TH/THVector.h @@ -0,0 +1,21 @@ +#ifndef TH_VECTOR_INC +#define TH_VECTOR_INC + +#include +#define THVector_(NAME) TH_CONCAT_4(TH,Real,Vector_,NAME) + +/* We are going to use dynamic dispatch, and want only to generate declarations + * of the vector functions */ +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#endif // TH_VECTOR_INC diff --git a/thirdparty/libtorch/include/TH/generic/THBlas.h b/thirdparty/libtorch/include/TH/generic/THBlas.h new file mode 100644 index 0000000000..7d827eb09d --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THBlas.h @@ -0,0 +1,19 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THBlas.h" +#else + +/* Level 1 */ +TH_API void THBlas_(swap)(int64_t n, scalar_t *x, int64_t incx, scalar_t *y, int64_t incy); +TH_API void THBlas_(scal)(int64_t n, scalar_t a, scalar_t *x, int64_t incx); +TH_API void THBlas_(copy)(int64_t n, scalar_t *x, int64_t incx, scalar_t *y, int64_t incy); +TH_API void THBlas_(axpy)(int64_t n, scalar_t a, scalar_t *x, int64_t incx, scalar_t *y, int64_t incy); +TH_API scalar_t THBlas_(dot)(int64_t n, scalar_t *x, int64_t incx, scalar_t *y, int64_t incy); + +/* Level 2 */ +TH_API void THBlas_(gemv)(char trans, int64_t m, int64_t n, scalar_t alpha, scalar_t *a, int64_t lda, scalar_t *x, int64_t incx, scalar_t beta, scalar_t *y, int64_t incy); +TH_API void THBlas_(ger)(int64_t m, int64_t n, scalar_t alpha, scalar_t *x, int64_t incx, scalar_t *y, int64_t incy, scalar_t *a, int64_t lda); + +/* Level 3 */ +TH_API void THBlas_(gemm)(char transa, char transb, int64_t m, int64_t n, int64_t k, scalar_t alpha, scalar_t *a, int64_t lda, scalar_t *b, int64_t ldb, scalar_t beta, scalar_t *c, int64_t ldc); + +#endif diff --git a/thirdparty/libtorch/include/TH/generic/THLapack.h b/thirdparty/libtorch/include/TH/generic/THLapack.h new file mode 100644 index 0000000000..287915c74d --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THLapack.h @@ -0,0 +1,21 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THLapack.h" +#else + +/* ||AX-B|| */ +TH_API void THLapack_(gels)(char trans, int m, int n, int nrhs, scalar_t *a, int lda, scalar_t *b, int ldb, scalar_t *work, int lwork, int *info); +/* Non-sym eigenvals */ +TH_API void THLapack_(geev)(char jobvl, char jobvr, int n, scalar_t *a, int lda, scalar_t *wr, scalar_t *wi, scalar_t* vl, int ldvl, scalar_t *vr, int ldvr, scalar_t *work, int lwork, int *info); + +/* Positive Definite matrices */ +/* Matrix inverse based on Cholesky factorization */ +TH_API void THLapack_(potri)(char uplo, int n, scalar_t *a, int lda, int *info); + +/* QR decomposition */ +TH_API void THLapack_(geqrf)(int m, int n, scalar_t *a, int lda, scalar_t *tau, scalar_t *work, int lwork, int *info); +/* Build Q from output of geqrf */ +TH_API void THLapack_(orgqr)(int m, int n, int k, scalar_t *a, int lda, scalar_t *tau, scalar_t *work, int lwork, int *info); +/* Multiply Q with a matrix from output of geqrf */ +TH_API void THLapack_(ormqr)(char side, char trans, int m, int n, int k, scalar_t *a, int lda, scalar_t *tau, scalar_t *c, int ldc, scalar_t *work, int lwork, int *info); + +#endif diff --git a/thirdparty/libtorch/include/TH/generic/THStorage.h b/thirdparty/libtorch/include/TH/generic/THStorage.h new file mode 100644 index 0000000000..9f6de2e9d0 --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THStorage.h @@ -0,0 +1,74 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THStorage.h" +#else + +#include +#include + +/* on pourrait avoir un liste chainee + qui initialise math, lab structures (or more). + mouais -- complique. + + Pb: THMapStorage is kind of a class + THLab_()... comment je m'en sors? + + en template, faudrait que je les instancie toutes!!! oh boy! + Et comment je sais que c'est pour Cuda? Le type float est le meme dans les <> + + au bout du compte, ca serait sur des pointeurs float/double... etc... = facile. + primitives?? + */ + +// Struct definition is moved to THStorage.hpp (so this file stays C compatible) + +#define THStorage at::StorageImpl + +// These used to be distinct types; for some measure of backwards compatibility and documentation +// alias these to the single THStorage type. +#define THFloatStorage THStorage +#define THDoubleStorage THStorage +#define THHalfStorage THStorage +#define THByteStorage THStorage +#define THCharStorage THStorage +#define THShortStorage THStorage +#define THIntStorage THStorage +#define THLongStorage THStorage +#define THBoolStorage THStorage +#define THBFloat16Storage THStorage +#define THQUInt8Storage THStorage +#define THQInt8Storage THStorage +#define THQInt32Storage THStorage + +TH_API scalar_t* THStorage_(data)(const THStorage*); +TH_API ptrdiff_t THStorage_(size)(const THStorage*); +TH_API size_t THStorage_(elementSize)(void); + +/* slow access -- checks everything */ +TH_API void THStorage_(set)(THStorage*, ptrdiff_t, scalar_t); +TH_API scalar_t THStorage_(get)(const THStorage*, ptrdiff_t); + +TH_API THStorage* THStorage_(new)(void); +TH_API THStorage* THStorage_(newWithSize)(ptrdiff_t size); +TH_API THStorage* THStorage_(newWithSize1)(scalar_t); +TH_API THStorage* THStorage_(newWithSize2)(scalar_t, scalar_t); +TH_API THStorage* THStorage_(newWithSize3)(scalar_t, scalar_t, scalar_t); +TH_API THStorage* THStorage_(newWithSize4)(scalar_t, scalar_t, scalar_t, scalar_t); +TH_API THStorage* THStorage_(newWithMapping)(const char *filename, ptrdiff_t size, int flags); + +TH_API THStorage* THStorage_(newWithAllocator)(ptrdiff_t size, + c10::Allocator* allocator); +TH_API THStorage* THStorage_(newWithDataAndAllocator)( + at::DataPtr&& data, ptrdiff_t size, at::Allocator* allocator); + +/* should not differ with API */ +TH_API void THStorage_(setFlag)(THStorage *storage, const char flag); +TH_API void THStorage_(clearFlag)(THStorage *storage, const char flag); +TH_API void THStorage_(retain)(THStorage *storage); +TH_API void THStorage_(swap)(THStorage *storage1, THStorage *storage2); + +/* might differ with other API (like CUDA) */ +TH_API void THStorage_(free)(THStorage *storage); +TH_API void THStorage_(resize)(THStorage *storage, ptrdiff_t size); +TH_API void THStorage_(fill)(THStorage *storage, scalar_t value); + +#endif diff --git a/thirdparty/libtorch/include/TH/generic/THStorageCopy.h b/thirdparty/libtorch/include/TH/generic/THStorageCopy.h new file mode 100644 index 0000000000..7bab82e454 --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THStorageCopy.h @@ -0,0 +1,27 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THStorageCopy.h" +#else + +/* Support for copy between different Storage types */ +TH_API void THStorage_(copy)(THStorage *storage, THStorage *src); +TH_API void THStorage_(copyByte)(THStorage *storage, struct THByteStorage *src); +TH_API void THStorage_(copyChar)(THStorage *storage, struct THCharStorage *src); +TH_API void THStorage_(copyShort)(THStorage *storage, struct THShortStorage *src); +TH_API void THStorage_(copyInt)(THStorage *storage, struct THIntStorage *src); +TH_API void THStorage_(copyLong)(THStorage *storage, struct THLongStorage *src); +TH_API void THStorage_(copyFloat)(THStorage *storage, struct THFloatStorage *src); +TH_API void THStorage_(copyDouble)(THStorage *storage, struct THDoubleStorage *src); +TH_API void THStorage_(copyHalf)(THStorage *storage, struct THHalfStorage *src); +TH_API void THStorage_(copyBool)(THStorage *storage, struct THBoolStorage *src); +TH_API void THStorage_(copyBFloat16)(THStorage *storage, struct THBFloat16Storage *src); +#ifdef THQUINT8 +TH_API void THStorage_(copyQUInt8)(THStorage *storage, struct THQUInt8Storage *src); +#endif +#ifdef THQINT8 +TH_API void THStorage_(copyQInt8)(THStorage *storage, struct THQInt8Storage *src); +#endif +#ifdef THQINT32 +TH_API void THStorage_(copyQInt32)(THStorage *storage, struct THQInt32Storage *src); +#endif + +#endif diff --git a/thirdparty/libtorch/include/TH/generic/THTensor.h b/thirdparty/libtorch/include/TH/generic/THTensor.h new file mode 100644 index 0000000000..b59a08c023 --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THTensor.h @@ -0,0 +1,134 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THTensor.h" +#else + +/* a la lua? dim, storageoffset, ... et les methodes ? */ + +#include + +#define THTensor at::TensorImpl + +// These used to be distinct types; for some measure of backwards compatibility and documentation +// alias these to the single THTensor type. +#define THFloatTensor THTensor +#define THDoubleTensor THTensor +#define THHalfTensor THTensor +#define THByteTensor THTensor +#define THCharTensor THTensor +#define THShortTensor THTensor +#define THIntTensor THTensor +#define THLongTensor THTensor +#define THBoolTensor THTensor +#define THBFloat16Tensor THTensor + +/**** access methods ****/ +TH_API THStorage* THTensor_(storage)(const THTensor *self); +TH_API ptrdiff_t THTensor_(storageOffset)(const THTensor *self); + +// See [NOTE: nDimension vs nDimensionLegacyNoScalars vs nDimensionLegacyAll] +TH_API int THTensor_(nDimension)(const THTensor *self); +TH_API int THTensor_(nDimensionLegacyNoScalars)(const THTensor *self); +TH_API int THTensor_(nDimensionLegacyAll)(const THTensor *self); +TH_API int64_t THTensor_(size)(const THTensor *self, int dim); +TH_API int64_t THTensor_(stride)(const THTensor *self, int dim); +TH_API scalar_t *THTensor_(data)(const THTensor *self); + + +/**** creation methods ****/ +TH_API THTensor *THTensor_(new)(void); +TH_API THTensor *THTensor_(newWithTensor)(THTensor *tensor); +TH_API THTensor *THTensor_(newWithStorage1d)(THStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_); +TH_API THTensor *THTensor_(newWithStorage2d)(THStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_, + int64_t size1_, int64_t stride1_); +TH_API THTensor *THTensor_(newWithStorage3d)(THStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_, + int64_t size1_, int64_t stride1_, + int64_t size2_, int64_t stride2_); +TH_API THTensor *THTensor_(newWithStorage4d)(THStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_, + int64_t size1_, int64_t stride1_, + int64_t size2_, int64_t stride2_, + int64_t size3_, int64_t stride3_); + +/* stride might be NULL */ +TH_API THTensor *THTensor_(newWithSize1d)(int64_t size0_); +TH_API THTensor *THTensor_(newWithSize2d)(int64_t size0_, int64_t size1_); +TH_API THTensor *THTensor_(newWithSize3d)(int64_t size0_, int64_t size1_, int64_t size2_); +TH_API THTensor *THTensor_(newWithSize4d)(int64_t size0_, int64_t size1_, int64_t size2_, int64_t size3_); + +TH_API THTensor *THTensor_(newClone)(THTensor *self); +TH_API THTensor *THTensor_(newContiguous)(THTensor *tensor); +TH_API THTensor *THTensor_(newSelect)(THTensor *tensor, int dimension_, int64_t sliceIndex_); +TH_API THTensor *THTensor_(newNarrow)(THTensor *tensor, int dimension_, int64_t firstIndex_, int64_t size_); +TH_API THTensor *THTensor_(newTranspose)(THTensor *tensor, int dimension1_, int dimension2_); + +// resize* methods simply resize the storage. So they may not retain the current data at current indices. +// This is especially likely to happen when the tensor is not contiguous. In general, if you still need the +// values, unless you are doing some size and stride tricks, do not use resize*. +TH_API void THTensor_(resizeNd)(THTensor *tensor, int nDimension, const int64_t *size, const int64_t *stride); +TH_API void THTensor_(resizeAs)(THTensor *tensor, THTensor *src); +TH_API void THTensor_(resize0d)(THTensor *tensor); +TH_API void THTensor_(resize1d)(THTensor *tensor, int64_t size0_); +TH_API void THTensor_(resize2d)(THTensor *tensor, int64_t size0_, int64_t size1_); +TH_API void THTensor_(resize3d)(THTensor *tensor, int64_t size0_, int64_t size1_, int64_t size2_); +TH_API void THTensor_(resize4d)(THTensor *tensor, int64_t size0_, int64_t size1_, int64_t size2_, int64_t size3_); +TH_API void THTensor_(resize5d)(THTensor *tensor, int64_t size0_, int64_t size1_, int64_t size2_, int64_t size3_, int64_t size4_); +// Note: these are legacy resize functions that treat sizes as size->size() == 0 and size->data() as being 0-terminated. + +TH_API void THTensor_(set)(THTensor *self, THTensor *src); +TH_API void THTensor_(setStorageNd)(THTensor *self, THStorage *storage_, ptrdiff_t storageOffset_, int nDimension, const int64_t *size, const int64_t *stride); +TH_API void THTensor_(setStorage1d)(THTensor *self, THStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_); +TH_API void THTensor_(setStorage2d)(THTensor *self, THStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_, + int64_t size1_, int64_t stride1_); +TH_API void THTensor_(setStorage3d)(THTensor *self, THStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_, + int64_t size1_, int64_t stride1_, + int64_t size2_, int64_t stride2_); +TH_API void THTensor_(setStorage4d)(THTensor *self, THStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_, + int64_t size1_, int64_t stride1_, + int64_t size2_, int64_t stride2_, + int64_t size3_, int64_t stride3_); + +TH_API void THTensor_(narrow)(THTensor *self, THTensor *src, int dimension_, int64_t firstIndex_, int64_t size_); +TH_API void THTensor_(select)(THTensor *self, THTensor *src, int dimension_, int64_t sliceIndex_); +TH_API void THTensor_(transpose)(THTensor *self, THTensor *src, int dimension1_, int dimension2_); +TH_API int THTensor_(isTransposed)(const THTensor *self); + +TH_API void THTensor_(squeeze1d)(THTensor *self, THTensor *src, int dimension_); +TH_API void THTensor_(unsqueeze1d)(THTensor *self, THTensor *src, int dimension_); + +TH_API int THTensor_(isContiguous)(const THTensor *self); +TH_API int THTensor_(isSameSizeAs)(const THTensor *self, const THTensor *src); +TH_API ptrdiff_t THTensor_(nElement)(const THTensor *self); + +TH_API void THTensor_(retain)(THTensor *self); +TH_API void THTensor_(free)(THTensor *self); +TH_API void THTensor_(freeCopyTo)(THTensor *self, THTensor *dst); + +/* Slow access methods [check everything] */ +TH_API void THTensor_(set0d)(THTensor *tensor, scalar_t value); +TH_API void THTensor_(set1d)(THTensor *tensor, int64_t x0, scalar_t value); +TH_API void THTensor_(set2d)(THTensor *tensor, int64_t x0, int64_t x1, scalar_t value); +TH_API void THTensor_(set3d)(THTensor *tensor, int64_t x0, int64_t x1, int64_t x2, scalar_t value); +TH_API void THTensor_(set4d)(THTensor *tensor, int64_t x0, int64_t x1, int64_t x2, int64_t x3, scalar_t value); + +TH_API scalar_t THTensor_(get0d)(const THTensor *tensor); +TH_API scalar_t THTensor_(get1d)(const THTensor *tensor, int64_t x0); +TH_API scalar_t THTensor_(get2d)(const THTensor *tensor, int64_t x0, int64_t x1); +TH_API scalar_t THTensor_(get3d)(const THTensor *tensor, int64_t x0, int64_t x1, int64_t x2); +TH_API scalar_t THTensor_(get4d)(const THTensor *tensor, int64_t x0, int64_t x1, int64_t x2, int64_t x3); + +/* Shape manipulation methods */ +TH_API void THTensor_(cat)(THTensor *r_, THTensor *ta, THTensor *tb, int dimension); +TH_API void THTensor_(catArray)(THTensor *result, THTensor **inputs, int numInputs, int dimension); + +/* Debug methods */ +TH_API THDescBuff THTensor_(desc)(const THTensor *tensor); +TH_API THDescBuff THTensor_(sizeDesc)(const THTensor *tensor); + +#endif diff --git a/thirdparty/libtorch/include/TH/generic/THTensor.hpp b/thirdparty/libtorch/include/TH/generic/THTensor.hpp new file mode 100644 index 0000000000..5966fd853b --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THTensor.hpp @@ -0,0 +1,20 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THTensor.hpp" +#else + +// STOP!!! Thinking of including this header directly? Please +// read Note [TH abstraction violation] + +// NOTE: functions exist here only to support dispatch via Declarations.cwrap. You probably don't want to put +// new functions in here, they should probably be un-genericized. + +TH_CPP_API void THTensor_(setStorage)(THTensor *self, THStorage *storage_, ptrdiff_t storageOffset_, + at::IntArrayRef size_, at::IntArrayRef stride_); +/* strides.data() might be NULL */ +TH_CPP_API THTensor *THTensor_(newWithStorage)(THStorage *storage, ptrdiff_t storageOffset, + at::IntArrayRef sizes, at::IntArrayRef strides); + +TH_CPP_API void THTensor_(resize)(THTensor *self, at::IntArrayRef size, at::IntArrayRef stride); +TH_CPP_API THTensor *THTensor_(newWithSize)(at::IntArrayRef size, at::IntArrayRef stride); + +#endif diff --git a/thirdparty/libtorch/include/TH/generic/THTensorConv.h b/thirdparty/libtorch/include/TH/generic/THTensorConv.h new file mode 100644 index 0000000000..7eab385bef --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THTensorConv.h @@ -0,0 +1,79 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THTensorConv.h" +#else + +TH_API void THTensor_(validXCorr2Dptr)(scalar_t *r_, + scalar_t alpha, + scalar_t *t_, int64_t ir, int64_t ic, + scalar_t *k_, int64_t kr, int64_t kc, + int64_t sr, int64_t sc); + +TH_API void THTensor_(validConv2Dptr)(scalar_t *r_, + scalar_t alpha, + scalar_t *t_, int64_t ir, int64_t ic, + scalar_t *k_, int64_t kr, int64_t kc, + int64_t sr, int64_t sc); + +TH_API void THTensor_(fullXCorr2Dptr)(scalar_t *r_, + scalar_t alpha, + scalar_t *t_, int64_t ir, int64_t ic, + scalar_t *k_, int64_t kr, int64_t kc, + int64_t sr, int64_t sc); + +TH_API void THTensor_(fullConv2Dptr)(scalar_t *r_, + scalar_t alpha, + scalar_t *t_, int64_t ir, int64_t ic, + scalar_t *k_, int64_t kr, int64_t kc, + int64_t sr, int64_t sc); + +TH_API void THTensor_(validXCorr2DRevptr)(scalar_t *r_, + scalar_t alpha, + scalar_t *t_, int64_t ir, int64_t ic, + scalar_t *k_, int64_t kr, int64_t kc, + int64_t sr, int64_t sc); + +TH_API void THTensor_(conv2DRevger)(THTensor *r_, scalar_t beta, scalar_t alpha, THTensor *t_, THTensor *k_, int64_t srow, int64_t scol); +TH_API void THTensor_(conv2DRevgerm)(THTensor *r_, scalar_t beta, scalar_t alpha, THTensor *t_, THTensor *k_, int64_t srow, int64_t scol); +TH_API void THTensor_(conv2Dger)(THTensor *r_, scalar_t beta, scalar_t alpha, THTensor *t_, THTensor *k_, int64_t srow, int64_t scol, const char *vf, const char *xc); +TH_API void THTensor_(conv2Dmv)(THTensor *r_, scalar_t beta, scalar_t alpha, THTensor *t_, THTensor *k_, int64_t srow, int64_t scol, const char *vf, const char *xc); +TH_API void THTensor_(conv2Dmm)(THTensor *r_, scalar_t beta, scalar_t alpha, THTensor *t_, THTensor *k_, int64_t srow, int64_t scol, const char *vf, const char *xc); +TH_API void THTensor_(conv2Dmul)(THTensor *r_, scalar_t beta, scalar_t alpha, THTensor *t_, THTensor *k_, int64_t srow, int64_t scol, const char *vf, const char *xc); +TH_API void THTensor_(conv2Dcmul)(THTensor *r_, scalar_t beta, scalar_t alpha, THTensor *t_, THTensor *k_, int64_t srow, int64_t scol, const char *vf, const char *xc); + +TH_API void THTensor_(validXCorr3Dptr)(scalar_t *r_, + scalar_t alpha, + scalar_t *t_, int64_t it, int64_t ir, int64_t ic, + scalar_t *k_, int64_t kt, int64_t kr, int64_t kc, + int64_t st, int64_t sr, int64_t sc); + +TH_API void THTensor_(validConv3Dptr)(scalar_t *r_, + scalar_t alpha, + scalar_t *t_, int64_t it, int64_t ir, int64_t ic, + scalar_t *k_, int64_t kt, int64_t kr, int64_t kc, + int64_t st, int64_t sr, int64_t sc); + +TH_API void THTensor_(fullXCorr3Dptr)(scalar_t *r_, + scalar_t alpha, + scalar_t *t_, int64_t it, int64_t ir, int64_t ic, + scalar_t *k_, int64_t kt, int64_t kr, int64_t kc, + int64_t st, int64_t sr, int64_t sc); + +TH_API void THTensor_(fullConv3Dptr)(scalar_t *r_, + scalar_t alpha, + scalar_t *t_, int64_t it, int64_t ir, int64_t ic, + scalar_t *k_, int64_t kt, int64_t kr, int64_t kc, + int64_t st, int64_t sr, int64_t sc); + +TH_API void THTensor_(validXCorr3DRevptr)(scalar_t *r_, + scalar_t alpha, + scalar_t *t_, int64_t it, int64_t ir, int64_t ic, + scalar_t *k_, int64_t kt, int64_t kr, int64_t kc, + int64_t st, int64_t sr, int64_t sc); + +TH_API void THTensor_(conv3DRevger)(THTensor *r_, scalar_t beta, scalar_t alpha, THTensor *t_, THTensor *k_, int64_t sdepth, int64_t srow, int64_t scol); +TH_API void THTensor_(conv3Dger)(THTensor *r_, scalar_t beta, scalar_t alpha, THTensor *t_, THTensor *k_, int64_t sdepth, int64_t srow, int64_t scol, const char *vf, const char *xc); +TH_API void THTensor_(conv3Dmv)(THTensor *r_, scalar_t beta, scalar_t alpha, THTensor *t_, THTensor *k_, int64_t sdepth, int64_t srow, int64_t scol, const char *vf, const char *xc); +TH_API void THTensor_(conv3Dmul)(THTensor *r_, scalar_t beta, scalar_t alpha, THTensor *t_, THTensor *k_, int64_t sdepth, int64_t srow, int64_t scol, const char *vf, const char *xc); +TH_API void THTensor_(conv3Dcmul)(THTensor *r_, scalar_t beta, scalar_t alpha, THTensor *t_, THTensor *k_, int64_t sdepth, int64_t srow, int64_t scol, const char *vf, const char *xc); + +#endif diff --git a/thirdparty/libtorch/include/TH/generic/THTensorFastGetSet.hpp b/thirdparty/libtorch/include/TH/generic/THTensorFastGetSet.hpp new file mode 100644 index 0000000000..83c2c6481b --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THTensorFastGetSet.hpp @@ -0,0 +1,49 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THTensorFastGetSet.hpp" +#else + +static inline scalar_t THTensor_(fastGetLegacy1dNoScalars)(THTensor *self, int64_t x0) { + return self->unsafe_data()[x0*THTensor_strideLegacyNoScalars(self, 0)]; +} + +static inline scalar_t THTensor_(fastGet1d)(THTensor *self, int64_t x0) { + return self->unsafe_data()[x0*self->stride(0)]; +} + +static inline scalar_t THTensor_(fastGet2d)(THTensor *self, int64_t x0, int64_t x1) { + return self->unsafe_data()[x0*self->stride(0)+x1*self->stride(1)]; +} + +static inline scalar_t THTensor_(fastGet3d)(THTensor *self, int64_t x0, int64_t x1, int64_t x2) { + return self->unsafe_data()[x0*self->stride(0)+x1*self->stride(1)+x2*self->stride(2)]; +} + +static inline scalar_t THTensor_(fastGet4d)(THTensor *self, int64_t x0, int64_t x1, int64_t x2, int64_t x3) { + return self->unsafe_data()[x0*self->stride(0)+x1*self->stride(1)+x2*self->stride(2)+x3*self->stride(3)]; +} + +static inline scalar_t THTensor_(fastGet5d)(THTensor *self, int64_t x0, int64_t x1, int64_t x2, int64_t x3, int64_t x4) { + return self->unsafe_data()[x0*self->stride(0)+x1*self->stride(1)+x2*self->stride(2)+x3*self->stride(3)+(x4)*self->stride(4)]; +} + +static inline void THTensor_(fastSet1d)(THTensor *self, int64_t x0, scalar_t value) { + self->unsafe_data()[x0*self->stride(0)] = value; +} + +static inline void THTensor_(fastSet2d)(THTensor *self, int64_t x0, int64_t x1, scalar_t value) { + self->unsafe_data()[x0*self->stride(0)+x1*self->stride(1)] = value; +} + +static inline void THTensor_(fastSet3d)(THTensor *self, int64_t x0, int64_t x1, int64_t x2, scalar_t value) { + self->unsafe_data()[x0*self->stride(0)+x1*self->stride(1)+x2*self->stride(2)] = value; +} + +static inline void THTensor_(fastSet4d)(THTensor *self, int64_t x0, int64_t x1, int64_t x2, int64_t x3, scalar_t value) { + self->unsafe_data()[x0*self->stride(0)+x1*self->stride(1)+x2*self->stride(2)+x3*self->stride(3)] = value; +} + +static inline void THTensor_(fastSet5d)(THTensor *self, int64_t x0, int64_t x1, int64_t x2, int64_t x3, int64_t x4, scalar_t value) { + self->unsafe_data()[x0*self->stride(0)+x1*self->stride(1)+x2*self->stride(2)+x3*self->stride(3)+(x4)*self->stride(4)] = value; +} + +#endif diff --git a/thirdparty/libtorch/include/TH/generic/THTensorFill.h b/thirdparty/libtorch/include/TH/generic/THTensorFill.h new file mode 100644 index 0000000000..4763918565 --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THTensorFill.h @@ -0,0 +1,8 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THTensorFill.h" +#else + +TH_API void THTensor_(fill)(THTensor *r_, scalar_t value); +TH_API void THTensor_(zero)(THTensor *r_); + +#endif diff --git a/thirdparty/libtorch/include/TH/generic/THTensorLapack.h b/thirdparty/libtorch/include/TH/generic/THTensorLapack.h new file mode 100644 index 0000000000..05dbbf9f12 --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THTensorLapack.h @@ -0,0 +1,12 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THTensorLapack.h" +#else + +TH_API void THTensor_(gels)(THTensor *rb_, THTensor *ra_, THTensor *b_, THTensor *a_); +TH_API void THTensor_(geev)(THTensor *re_, THTensor *rv_, THTensor *a_, bool eigenvectors); +TH_API void THTensor_(potri)(THTensor *ra_, THTensor *a, bool upper); +TH_API void THTensor_(geqrf)(THTensor *ra_, THTensor *rtau_, THTensor *a); +TH_API void THTensor_(orgqr)(THTensor *ra_, THTensor *a, THTensor *tau); +TH_API void THTensor_(ormqr)(THTensor *ra_, THTensor *a, THTensor *tau, THTensor *c, bool left, bool transpose); + +#endif diff --git a/thirdparty/libtorch/include/TH/generic/THTensorMath.h b/thirdparty/libtorch/include/TH/generic/THTensorMath.h new file mode 100644 index 0000000000..b1054dd044 --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THTensorMath.h @@ -0,0 +1,169 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THTensorMath.h" +#else + +#include +#include + +TH_API void THTensor_(nonzero)(THLongTensor *subscript, THTensor *tensor); + +#if !defined(TH_REAL_IS_HALF) + +TH_API void THTensor_(ltValue)(THBoolTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(leValue)(THBoolTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(gtValue)(THBoolTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(geValue)(THBoolTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(neValue)(THBoolTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(eqValue)(THBoolTensor *r_, THTensor* t, scalar_t value); + +TH_API void THTensor_(ltValueT)(THTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(leValueT)(THTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(gtValueT)(THTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(geValueT)(THTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(neValueT)(THTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(eqValueT)(THTensor *r_, THTensor* t, scalar_t value); + +TH_API void THTensor_(ltTensor)(THBoolTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(leTensor)(THBoolTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(gtTensor)(THBoolTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(geTensor)(THBoolTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(neTensor)(THBoolTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(eqTensor)(THBoolTensor *r_, THTensor *ta, THTensor *tb); + +TH_API void THTensor_(ltTensorT)(THTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(leTensorT)(THTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(gtTensorT)(THTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(geTensorT)(THTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(neTensorT)(THTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(eqTensorT)(THTensor *r_, THTensor *ta, THTensor *tb); + +TH_API void THTensor_(ltValueByte)(THByteTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(leValueByte)(THByteTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(gtValueByte)(THByteTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(geValueByte)(THByteTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(neValueByte)(THByteTensor *r_, THTensor* t, scalar_t value); +TH_API void THTensor_(eqValueByte)(THByteTensor *r_, THTensor* t, scalar_t value); + +TH_API void THTensor_(ltTensorByte)(THByteTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(leTensorByte)(THByteTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(gtTensorByte)(THByteTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(geTensorByte)(THByteTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(neTensorByte)(THByteTensor *r_, THTensor *ta, THTensor *tb); +TH_API void THTensor_(eqTensorByte)(THByteTensor *r_, THTensor *ta, THTensor *tb); + +TH_API void THTensor_(maskedSelect)(THTensor *tensor, THTensor* src, THByteTensor *mask); +TH_API void THTensor_(maskedSelectBool)(THTensor *tensor, THTensor* src, THBoolTensor *mask); +TH_API void THTensor_(maskedFill)(THTensor *tensor, THByteTensor *mask, scalar_t value); +TH_API void THTensor_(maskedFillBool)(THTensor *tensor, THBoolTensor *mask, scalar_t value); +TH_API void THTensor_(maskedCopy)(THTensor *tensor, THByteTensor *mask, THTensor* src); +TH_API void THTensor_(maskedCopyBool)(THTensor *tensor, THBoolTensor *mask, THTensor* src); + +TH_API ptrdiff_t THTensor_(numel)(THTensor *t); + +TH_API void THTensor_(addmv)(THTensor *r_, THTensor *t, THTensor *mat, THTensor *vec, scalar_t beta, scalar_t alpha); +TH_API void THTensor_(addmm)(THTensor *r_, THTensor *t, THTensor *mat1, THTensor *mat2, scalar_t beta, scalar_t alpha); +TH_API void THTensor_(addr)(THTensor *r_, THTensor *t, THTensor *vec1, THTensor *vec2, scalar_t beta, scalar_t alpha); + +#if !defined(TH_REAL_IS_BOOL) +TH_API void THTensor_(mul)(THTensor *r_, THTensor *t, scalar_t value); +TH_API void THTensor_(div)(THTensor *r_, THTensor *t, scalar_t value); +#endif + +#if !defined(TH_REAL_IS_BFLOAT16) + +TH_API accreal THTensor_(sumall)(THTensor *t); +TH_API int THTensor_(equal)(THTensor *ta, THTensor *tb); + +TH_API void THTensor_(bitand)(THTensor *r_, THTensor *t, scalar_t value); +TH_API void THTensor_(cbitand)(THTensor *r_, THTensor *t, THTensor *src); +TH_API void THTensor_(bitor)(THTensor *r_, THTensor *t, scalar_t value); +TH_API void THTensor_(cbitor)(THTensor *r_, THTensor *t, THTensor *src); + +void THTensor_(preserveReduceDimSemantics)(THTensor *r_, int in_dims, int reduce_dimension, int keepdim); + +TH_API void THTensor_(max)(THTensor *values_, THLongTensor *indices_, THTensor *t, int dimension, int keepdim); +TH_API void THTensor_(min)(THTensor *values_, THLongTensor *indices_, THTensor *t, int dimension, int keepdim); +TH_API scalar_t THTensor_(minall)(THTensor *t); +TH_API scalar_t THTensor_(maxall)(THTensor *t); + +TH_API void THTensor_(cmax)(THTensor *r, THTensor *t, THTensor *src); +TH_API void THTensor_(cmin)(THTensor *r, THTensor *t, THTensor *src); +TH_API void THTensor_(cmaxValue)(THTensor *r, THTensor *t, scalar_t value); +TH_API void THTensor_(cminValue)(THTensor *r, THTensor *t, scalar_t value); + +TH_API void THTensor_(indexSelect)(THTensor *tensor, THTensor *src, int dim, THLongTensor *index); +TH_API void THTensor_(indexCopy)(THTensor *tensor, int dim, THLongTensor *index, THTensor *src); +TH_API void THTensor_(take)(THTensor *tensor, THTensor *src, THLongTensor *index); +TH_API void THTensor_(put)(THTensor *tensor, THLongTensor *index, THTensor *src, int accumulate); +TH_API void THTensor_(indexFill)(THTensor *tensor, int dim, THLongTensor *index, scalar_t val); + +TH_API void THTensor_(gather)(THTensor *tensor, THTensor *src, int dim, THLongTensor *index); +TH_API void THTensor_(scatter)(THTensor *tensor, int dim, THLongTensor *index, THTensor *src); +TH_API void THTensor_(scatterAdd)(THTensor *tensor, int dim, THLongTensor *index, THTensor *src); +TH_API void THTensor_(scatterFill)(THTensor *tensor, int dim, THLongTensor *index, scalar_t val); + +TH_API void THTensor_(cumsum)(THTensor *r_, THTensor *t, int dimension); +TH_API void THTensor_(cumprod)(THTensor *r_, THTensor *t, int dimension); + +#if !defined(TH_REAL_IS_BOOL) /* non bool only part */ + +TH_API accreal THTensor_(dot)(THTensor *t, THTensor *src); + +TH_API void THTensor_(cinv)(THTensor *self, THTensor *src); + +TH_API void THTensor_(lshift)(THTensor *r_, THTensor *t, scalar_t value); +TH_API void THTensor_(rshift)(THTensor *r_, THTensor *t, scalar_t value); +TH_API void THTensor_(fmod)(THTensor *r_, THTensor *t, scalar_t value); +TH_API void THTensor_(remainder)(THTensor *r_, THTensor *t, scalar_t value); +TH_API void THTensor_(clamp)(THTensor *r_, THTensor *t, scalar_t min_value, scalar_t max_value); + +TH_API void THTensor_(cadd)(THTensor *r_, THTensor *t, scalar_t value, THTensor *src); +TH_API void THTensor_(csub)(THTensor *self, THTensor *src1, scalar_t value, THTensor *src2); +TH_API void THTensor_(cmul)(THTensor *r_, THTensor *t, THTensor *src); +TH_API void THTensor_(cdiv)(THTensor *r_, THTensor *t, THTensor *src); +TH_API void THTensor_(clshift)(THTensor *r_, THTensor *t, THTensor *src); +TH_API void THTensor_(crshift)(THTensor *r_, THTensor *t, THTensor *src); +TH_API void THTensor_(cfmod)(THTensor *r_, THTensor *t, THTensor *src); +TH_API void THTensor_(cremainder)(THTensor *r_, THTensor *t, THTensor *src); + +TH_API void THTensor_(addbmm)(THTensor *r_, THTensor *t, THTensor *batch1, THTensor *batch2, scalar_t beta, scalar_t alpha); +TH_API void THTensor_(baddbmm)(THTensor *r_, THTensor *t, THTensor *batch1, THTensor *batch2, scalar_t beta, scalar_t alpha); + +TH_API void THTensor_(kthvalue)(THTensor *values_, THLongTensor *indices_, THTensor *t, int64_t k, int dimension, int keepdim); +TH_API void THTensor_(mode)(THTensor *values_, THLongTensor *indices_, THTensor *t, int dimension, int keepdim); +TH_API void THTensor_(prod)(THTensor *r_, THTensor *t, int dimension, int keepdim); +TH_API accreal THTensor_(trace)(THTensor *t); + +TH_API void THTensor_(diag)(THTensor *r_, THTensor *t, int k); + +TH_API void THTensor_(sort)(THTensor *rt_, THLongTensor *ri_, THTensor *t, int dimension, int descendingOrder); +TH_API void THTensor_(triu)(THTensor *r_, THTensor *t, int64_t k); + + +#if defined(TH_REAL_IS_FLOAT) || defined(TH_REAL_IS_DOUBLE) + +TH_API void THTensor_(cos)(THTensor *r_, THTensor *t); +TH_API void THTensor_(cosh)(THTensor *r_, THTensor *t); +TH_API void THTensor_(tan)(THTensor *r_, THTensor *t); +TH_API void THTensor_(atan)(THTensor *r_, THTensor *t); +TH_API void THTensor_(tanh)(THTensor *r_, THTensor *t); +TH_API void THTensor_(erf)(THTensor *r_, THTensor *t); +TH_API void THTensor_(erfc)(THTensor *r_, THTensor *t); + +TH_API void THTensor_(std_single)(THTensor *r_, THTensor *t, int dimension, bool unbiased, int keepdim); +TH_API void THTensor_(var_single)(THTensor *r_, THTensor *t, int dimension, bool unbiased, int keepdim); +TH_API void THTensor_(norm)(THTensor *r_, THTensor *t, scalar_t value, int dimension, int keepdim); +TH_API void THTensor_(renorm)(THTensor *r_, THTensor *t, scalar_t value, int dimension, scalar_t maxnorm); +TH_API accreal THTensor_(dist)(THTensor *a, THTensor *b, scalar_t value); +TH_API void THTensor_(histc)(THTensor *hist, THTensor *tensor, int64_t nbins, scalar_t minvalue, scalar_t maxvalue); +TH_API void THTensor_(bhistc)(THTensor *hist, THTensor *tensor, int64_t nbins, scalar_t minvalue, scalar_t maxvalue); + +TH_API accreal THTensor_(meanall)(THTensor *self); +TH_API accreal THTensor_(var_all)(THTensor *self, bool unbiased); +TH_API accreal THTensor_(std_all)(THTensor *self, bool unbiased); +TH_API accreal THTensor_(normall)(THTensor *t, scalar_t value); +#endif +#endif +#endif +#endif +#endif diff --git a/thirdparty/libtorch/include/TH/generic/THTensorRandom.h b/thirdparty/libtorch/include/TH/generic/THTensorRandom.h new file mode 100644 index 0000000000..7bdae7f64a --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THTensorRandom.h @@ -0,0 +1,32 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THTensorRandom.h" +#else + +#include +#include + +TH_API void THTensor_(random)(THTensor *self, at::Generator *_generator); +TH_API void THTensor_(clampedRandom)(THTensor *self, int64_t min, int64_t max, at::Generator *_generator); +TH_API void THTensor_(cappedRandom)(THTensor *self, int64_t max, at::Generator *_generator); +TH_API void THTensor_(geometric)(THTensor *self, double p, at::Generator *_generator); + +#if defined(TH_REAL_IS_FLOAT) || defined(TH_REAL_IS_DOUBLE) +TH_API void THTensor_(bernoulli_Tensor)(THTensor *self, at::Generator *_generator, THTensor *p); +TH_API void THTensor_(uniform)(THTensor *self, double a, double b, at::Generator *_generator); +TH_API void THTensor_(normal)(THTensor *self, double mean, double stdv, at::Generator *_generator); +TH_API void THTensor_(normal_means)(THTensor *self, THTensor *means, double stddev, at::Generator *gen); +TH_API void THTensor_(normal_stddevs)(THTensor *self, double mean, THTensor *stddevs, at::Generator *gen); +TH_API void THTensor_(normal_means_stddevs)(THTensor *self, THTensor *means, THTensor *stddevs, at::Generator *gen); +TH_API void THTensor_(exponential)(THTensor *self, double lambda, at::Generator *_generator); +TH_API void THTensor_(cauchy)(THTensor *self, double median, double sigma, at::Generator *_generator); +TH_API void THTensor_(logNormal)(THTensor *self, double mean, double stdv, at::Generator *_generator); +TH_API void THTensor_(multinomialAliasSetup)(THTensor *prob_dist, THLongTensor *J, THTensor *q); +TH_API void THTensor_(multinomialAliasDraw)(THLongTensor *self, THTensor *q, THLongTensor *J, int n_sample, at::Generator *_generator); +#endif + +#if defined(TH_REAL_IS_BYTE) +TH_API void THTensor_(getRNGState)(at::Generator *_generator, THTensor *self); +TH_API void THTensor_(setRNGState)(at::Generator *_generator, THTensor *self); +#endif + +#endif diff --git a/thirdparty/libtorch/include/TH/generic/THVector.h b/thirdparty/libtorch/include/TH/generic/THVector.h new file mode 100644 index 0000000000..564459ddd1 --- /dev/null +++ b/thirdparty/libtorch/include/TH/generic/THVector.h @@ -0,0 +1,43 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "TH/generic/THVector.h" +#else + +#include +#include + +TH_API void THVector_(fill)(scalar_t *x, const scalar_t c, const ptrdiff_t n); + +#if !defined(TH_REAL_IS_BOOL) /* non bool only part */ + +TH_API void THVector_(cadd)(scalar_t *z, const scalar_t *x, const scalar_t *y, const scalar_t c, const ptrdiff_t n); +TH_API void THVector_(adds)(scalar_t *y, const scalar_t *x, const scalar_t c, const ptrdiff_t n); +TH_API void THVector_(cmul)(scalar_t *z, const scalar_t *x, const scalar_t *y, const ptrdiff_t n); +TH_API void THVector_(muls)(scalar_t *y, const scalar_t *x, const scalar_t c, const ptrdiff_t n); +TH_API void THVector_(cdiv)(scalar_t *z, const scalar_t *x, const scalar_t *y, const ptrdiff_t n); +TH_API void THVector_(divs)(scalar_t *y, const scalar_t *x, const scalar_t c, const ptrdiff_t n); +TH_API void THVector_(neg)(scalar_t *y, const scalar_t *x, const ptrdiff_t n); +TH_API void THVector_(normal_fill)(scalar_t *data, + const int64_t size, + struct at::Generator *generator, + const scalar_t mean, + const scalar_t stddev); + +#endif /* non bool only part */ + +/* floating point only now */ +#if defined(TH_REAL_IS_FLOAT) || defined(TH_REAL_IS_DOUBLE) + +TH_API void THVector_(exp)(scalar_t *y, const scalar_t *x, const ptrdiff_t n); +TH_API void THVector_(erf)(scalar_t *y, const scalar_t *x, const ptrdiff_t n); +TH_API void THVector_(erfc)(scalar_t *y, const scalar_t *x, const ptrdiff_t n); +TH_API void THVector_(cos)(scalar_t *y, const scalar_t *x, const ptrdiff_t n); +TH_API void THVector_(cosh)(scalar_t *y, const scalar_t *x, const ptrdiff_t n); +TH_API void THVector_(tan)(scalar_t *y, const scalar_t *x, const ptrdiff_t n); +TH_API void THVector_(atan)(scalar_t *y, const scalar_t *x, const ptrdiff_t n); +TH_API void THVector_(tanh)(scalar_t *y, const scalar_t *x, const ptrdiff_t n); +TH_API void THVector_(pow)(scalar_t *y, const scalar_t *x, const scalar_t c, const ptrdiff_t n); +TH_API void THVector_(cinv)(scalar_t *y, const scalar_t *x, const ptrdiff_t n); + +#endif /* floating point only part */ + +#endif diff --git a/thirdparty/libtorch/include/THC/THC.h b/thirdparty/libtorch/include/THC/THC.h new file mode 100644 index 0000000000..79be433e1a --- /dev/null +++ b/thirdparty/libtorch/include/THC/THC.h @@ -0,0 +1,18 @@ +#ifndef THC_INC +#define THC_INC + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#endif diff --git a/thirdparty/libtorch/include/THC/THCAllocator.h b/thirdparty/libtorch/include/THC/THCAllocator.h new file mode 100644 index 0000000000..a749ee53fe --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCAllocator.h @@ -0,0 +1,17 @@ +#ifndef THC_ALLOCATOR_INC +#define THC_ALLOCATOR_INC + +#include + +// IPC doesn't support (re)allocation + +class TORCH_CUDA_API THCIpcDeleter { + public: + THCIpcDeleter(std::shared_ptr basePtr); + ~THCIpcDeleter(); + static at::DataPtr makeDataPtr(std::shared_ptr basePtr, void* data); +private: + std::shared_ptr basePtr_; +}; + +#endif diff --git a/thirdparty/libtorch/include/THC/THCApply.cuh b/thirdparty/libtorch/include/THC/THCApply.cuh new file mode 100644 index 0000000000..6d275e24d1 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCApply.cuh @@ -0,0 +1,758 @@ +#ifndef THC_APPLY_INC +#define THC_APPLY_INC + +#include +#include +#include +#include +#include + +// +// This file contains pointwise operation functions and kernels that +// work on both contiguous and non-contiguous tensor arguments of +// arbitrary (up to MAX_CUTORCH_DIMS) dimensioned arguments without +// copying or temporary storage. +// + +// Rearrange dimensions for pointwise operations so that strides are in +// decreasing order as much as possible, so that kernels have better memory +// access patterns. +// +// For example, consider a binary operation on two "transposed" 2-dim tensors: +// sizes: 256 512 +// aInfo->strides: 1 256 +// bInfo->strides: 1 256 +// +// Given this, each concurrent memory access inside kernelPointwiseApply2() is +// exactly 256 elements apart, resulting in poor performance. +// +// This function exchanges dimensions so that memory access is contiguous: +// sizes: 512 256 +// aInfo->strides: 256 1 +// bInfo->strides: 256 1 +// +// (Actually, it becomes even better because now collapseDims() can turn each +// input into one contiguous array.) +// +// In general, given M (<=3) TensorInfo's with N dimensions, we can view each +// strides[i] (0 <= i < N) as an M-tuple. Given each pair i < j, we exchange +// strides[i] and [j] if +// (1) strides[i][k] < strides[j][k] for some k (0 <= k < M) +// (exchanging them will benefit input #k), and +// (2) strides[i][k] <= strieds[j][k] for all k +// (exchanging them will not make any input worse). +template +void rearrangeDims(TensorInfo* aInfo, + TensorInfo* bInfo = nullptr, + TensorInfo* cInfo = nullptr) { + int numInfos = 1; + int dims = aInfo->dims; + IndexType *sizes[3] = { aInfo->sizes, }; + IndexType *strides[3] = { aInfo->strides, }; + + if (bInfo != nullptr) { + ++numInfos; + if (bInfo->dims != dims) return; + sizes[1] = bInfo->sizes; + strides[1] = bInfo->strides; + } + + if (cInfo != nullptr) { + ++numInfos; + if (cInfo->dims != dims) return; + sizes[2] = cInfo->sizes; + strides[2] = cInfo->strides; + } + + // Bail out if sizes do not match: we are using "deprecated pointwise + // behavior" among tensors of different shapes but same number of elements. + for (int i = 1; i < numInfos; ++i) { + for (int j = 0; j < dims; ++j) { + if (sizes[i][j] != sizes[0][j]) return; + } + } + + for (int i = 0; i < dims - 1; ++i) { + // No need to consider dimensions of size 1. + if (sizes[0][i] == 1) continue; + + for (int j = i + 1; j < dims; ++j) { + if (sizes[0][j] == 1) continue; + + // Compare the relative sizes of strides between dim #i and dim #j. + bool hasIncreasingStrides = false; + bool hasDecreasingStrides = false; + + for (int k = 0; k < numInfos; k++) { + IndexType stride_i = strides[k][i]; + IndexType stride_j = strides[k][j]; + if (stride_i < stride_j) { + hasIncreasingStrides = true; + } else if (stride_i > stride_j) { + hasDecreasingStrides = true; + } + } + + if (hasIncreasingStrides && !hasDecreasingStrides) { + for (int k = 0; k < numInfos; k++) { + IndexType size = sizes[k][i]; + sizes[k][i] = sizes[k][j]; + sizes[k][j] = size; + + IndexType stride = strides[k][i]; + strides[k][i] = strides[k][j]; + strides[k][j] = stride; + } + } + } + } +} + +// Threads per block for our apply kernel +// FIXME: use occupancy calculator instead +#define THC_APPLY_THREADS_PER_BLOCK (32 * 16) +#define THC_APPLY_BLOCKS_PER_SM 4 +template +#if defined __HIP_PLATFORM_HCC__ +C10_LAUNCH_BOUNDS_2(THC_APPLY_THREADS_PER_BLOCK, THC_APPLY_BLOCKS_PER_SM) +#endif +__global__ void +kernelPointwiseApply1(const OffsetInfo a, + IndexType totalElements, + Op op) { + // NOTE: The two typecasts below are essential when IndexType is 64-bit; + // without them, results are silently truncated to 32 bits! + for (IndexType linearIndex = (IndexType) blockIdx.x * blockDim.x + threadIdx.x; + linearIndex < totalElements; + linearIndex += (IndexType) gridDim.x * blockDim.x) { + op(a.get(linearIndex)); + } +} + +template +#if defined __HIP_PLATFORM_HCC__ +C10_LAUNCH_BOUNDS_2(THC_APPLY_THREADS_PER_BLOCK, THC_APPLY_BLOCKS_PER_SM) +#endif +__global__ void +kernelPointwiseApply2(const OffsetInfo a, + const OffsetInfo b, + IndexType totalElements, + Op op) { + for (IndexType linearIndex = (IndexType) blockIdx.x * blockDim.x + threadIdx.x; + linearIndex < totalElements; + linearIndex += (IndexType) gridDim.x * blockDim.x) { + op(a.get(linearIndex), b.get(linearIndex)); + } +} + +template +#if defined __HIP_PLATFORM_HCC__ +C10_LAUNCH_BOUNDS_2(THC_APPLY_THREADS_PER_BLOCK, THC_APPLY_BLOCKS_PER_SM) +#endif +__global__ void +kernelPointwiseApply3(const OffsetInfo a, + const OffsetInfo b, + const OffsetInfo c, + IndexType totalElements, + Op op) { + for (IndexType linearIndex = (IndexType) blockIdx.x * blockDim.x + threadIdx.x; + linearIndex < totalElements; + linearIndex += (IndexType) gridDim.x * blockDim.x) { + op(a.get(linearIndex), b.get(linearIndex), c.get(linearIndex)); + } +} + +inline dim3 getApplyBlock() { + return dim3(THC_APPLY_THREADS_PER_BLOCK); +} + +inline bool getApplyGrid(THCState* state, uint64_t totalElements, dim3& grid, int curDevice) { + if (curDevice == -1) return false; + + uint64_t numBlocks = THCCeilDiv(totalElements, static_cast(THC_APPLY_THREADS_PER_BLOCK)); + uint64_t maxGridX = at::cuda::getDeviceProperties(curDevice)->maxGridSize[0]; + if (numBlocks > maxGridX) + numBlocks = maxGridX; + + // For 32-bit indices, make sure that gridDim.x * blockDim.x fits in 32 bits. + if (totalElements <= INT32_MAX && + numBlocks > INT32_MAX / THC_APPLY_THREADS_PER_BLOCK) + numBlocks = INT32_MAX / THC_APPLY_THREADS_PER_BLOCK; + + grid = dim3(numBlocks); + return true; +} + +template +bool THC_pointwiseApply1(THCState* state, + TensorTypeA* a, + const Op& op, + TensorArgType aType = ReadWrite) { + if (THCTensor_nDimensionLegacyAll(state, a) > MAX_CUTORCH_DIMS) { + return false; + } + + if (THCTensor_nDimensionLegacyAll(state, a) == 0) { + // Zero-dim tensor; do nothing + return true; + } + + const dim3 block = getApplyBlock(); + + dim3 grid; + ptrdiff_t totalElements = THCTensor_nElement(state, a); + + int curDevice = -1; + cudaGetDevice(&curDevice); + if (!getApplyGrid(state, totalElements, grid, curDevice)) { + return false; + } + + /* + Expands readable/writable tensors whose indices may be "overlapped." + This ensures that each element of the tensor is operated on once and only + once. + */ + TensorTypeA* oldA = NULL; + + if (aType == ReadWrite && + THCTensor_maybeOverlappingIndices(state, a)) { + // Must perform in contiguous space + oldA = a; + a = (TensorTypeA*)THCTensor_newContiguous(state, a); + } + + // It is possible that the tensor dimensions are able to be collapsed, + // and thus we can reduce the actual code complexity of the copy by + // exploiting this knowledge statically, since the div/mod is the + // most expensive part of the operation, more so than memory accesses. + // For instance, when copying a non-contiguous to a contiguous tensor + // (or vice versa), the contiguous tensor can be collapsed to one + // dimension, and the loop to translate the linear index to the array + // index can be similarly collapsed. That is what this unrolling is for. +#define HANDLE_CASE(TYPE, A) \ + kernelPointwiseApply1 \ + <<>>( \ + OffsetInfo \ + (aInfo), \ + (TYPE) totalElements, op); + +#define HANDLE_A_CASE(TYPE, A) { \ + switch (A) { \ + case 1: \ + HANDLE_CASE(TYPE, 1); \ + break; \ + case 2: \ + HANDLE_CASE(TYPE, 2); \ + break; \ + default: \ + HANDLE_CASE(TYPE, -1); \ + break; \ + } \ +} + + // Can we use 32-bit integer math in the kernel (the linear ID for the copy + // and the resulting non-linear offset is all computable using 32-bit math?) + // We also use unsigned index math in the kernel, as signed div/mod has + // additional overhead. + if (THCTensor_canUse32BitIndexMath(state, a)) { + TensorInfo aInfo = + getTensorInfo(state, a); + rearrangeDims(&aInfo); + aInfo.collapseDims(); +#if CUDA_VERSION < 9000 + if (!aInfo.isContiguous()) { + grid.x = min(at::cuda::getCurrentDeviceProperties()->multiProcessorCount * THC_APPLY_BLOCKS_PER_SM , grid.x); + } +#endif + HANDLE_A_CASE(unsigned int, aInfo.dims); + } else { + TensorInfo aInfo = + getTensorInfo(state, a); + rearrangeDims(&aInfo); + aInfo.collapseDims(); + + /* + Only instantiates the all 1D special case and the fallback all nD case for + large (64-bit indexed) tensors to reduce compilation time. + */ + if (aInfo.dims == 1) { + OffsetInfo + aOffset(aInfo); + kernelPointwiseApply1 + <<>>( + aOffset, (uint64_t) totalElements, op); + } else { + +#if CUDA_VERSION < 9000 + grid.x = min(at::cuda::getCurrentDeviceProperties()->multiProcessorCount * THC_APPLY_BLOCKS_PER_SM , grid.x); +#endif + OffsetInfo + aOffset(aInfo); + kernelPointwiseApply1 + <<>>( + aOffset, (uint64_t) totalElements, op); + } + } +#undef HANDLE_CASE +#undef HANDLE_A_CASE + + if (oldA) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldA contiguous. + THCTensor_copyIgnoringOverlaps(state, oldA, a); + THCTensor_free(state, a); + a = oldA; + } + + return true; +} + +template +bool THC_pointwiseApply2(THCState* state, + TensorTypeA* a, + TensorTypeB* b, + const Op& op, + TensorArgType aType = ReadWrite, + TensorArgType bType = ReadOnly) { + ptrdiff_t totalElements = THCTensor_nElement(state, a); + if (totalElements != THCTensor_nElement(state, b)) { + return false; + } + + if (THCTensor_nDimensionLegacyAll(state, a) > MAX_CUTORCH_DIMS || + THCTensor_nDimensionLegacyAll(state, b) > MAX_CUTORCH_DIMS) { + return false; + } + + if (THCTensor_nDimensionLegacyAll(state, a) == 0) { + // Zero-dim tensor; do nothing + return true; + } + + const dim3 block = getApplyBlock(); + + dim3 grid; + int curDevice = -1; + cudaGetDevice(&curDevice); + if (!getApplyGrid(state, totalElements, grid, curDevice)) { + return false; + } + + /* + Expands readable/writable tensors whose indices may be "overlapped." + This ensures that each element of the tensor is operated on once and only + once. + */ + TensorTypeA* oldA = NULL; + TensorTypeB* oldB = NULL; + + if (aType == ReadWrite && + THCTensor_maybeOverlappingIndices(state, a)) { + // Must perform in contiguous space + oldA = a; + a = (TensorTypeA*)THCTensor_newContiguous(state, a); + } + if (bType == ReadWrite && + THCTensor_maybeOverlappingIndices(state, b)) { + // Must perform in contiguous space + oldB = b; + b = (TensorTypeB*)THCTensor_newContiguous(state, b); + } + + // It is possible that the tensor dimensions are able to be collapsed, + // and thus we can reduce the actual code complexity of the copy by + // exploiting this knowledge statically, since the div/mod is the + // most expensive part of the operation, more so than memory accesses. + // For instance, when copying a non-contiguous to a contiguous tensor + // (or vice versa), the contiguous tensor can be collapsed to one + // dimension, and the loop to translate the linear index to the array + // index can be similarly collapsed. That is what this unrolling is for. +#define HANDLE_CASE(TYPE, A, B) \ + kernelPointwiseApply2 \ + <<>>( \ + OffsetInfo \ + (aInfo), \ + OffsetInfo \ + (bInfo), \ + (TYPE) totalElements, op); + +#define HANDLE_B_CASE(TYPE, A, B) { \ + switch (B) { \ + case 1: \ + HANDLE_CASE(TYPE, A, 1); \ + break; \ + case 2: \ + HANDLE_CASE(TYPE, A, 2); \ + break; \ + default: \ + HANDLE_CASE(TYPE, A, -1); \ + break; \ + } \ +} + +#define HANDLE_A_CASE(TYPE, A, B) { \ + switch (A) { \ + case 1: \ + HANDLE_B_CASE(TYPE, 1, B); \ + break; \ + case 2: \ + HANDLE_B_CASE(TYPE, 2, B); \ + break; \ + default: \ + HANDLE_B_CASE(TYPE, -1, B); \ + break; \ + } \ +} + + if (THCTensor_canUse32BitIndexMath(state, a) && + THCTensor_canUse32BitIndexMath(state, b)) { + TensorInfo aInfo = + getTensorInfo(state, a); + + TensorInfo bInfo = + getTensorInfo(state, b); + + rearrangeDims(&aInfo, &bInfo); + aInfo.collapseDims(); + bInfo.collapseDims(); +#if CUDA_VERSION < 9000 + if (!(aInfo.isContiguous() && bInfo.isContiguous())) + grid.x = min(at::cuda::getCurrentDeviceProperties()->multiProcessorCount * THC_APPLY_BLOCKS_PER_SM , grid.x); +#endif + + HANDLE_A_CASE(unsigned int, aInfo.dims, bInfo.dims); + } else { + TensorInfo aInfo = + getTensorInfo(state, a); + + TensorInfo bInfo = + getTensorInfo(state, b); + + rearrangeDims(&aInfo, &bInfo); + aInfo.collapseDims(); + bInfo.collapseDims(); + + /* + Only instantiates the all 1D special case and the fallback all nD case for + large (64-bit indexed) tensors to reduce compilation time. + */ + if (aInfo.dims == 1 && bInfo.dims == 1) { + OffsetInfo + aOffset(aInfo); + OffsetInfo + bOffset(bInfo); + kernelPointwiseApply2 + <<>>( + aOffset, bOffset, (uint64_t) totalElements, op); + } else { +#if CUDA_VERSION < 9000 + grid.x = min(at::cuda::getCurrentDeviceProperties()->multiProcessorCount * THC_APPLY_BLOCKS_PER_SM , grid.x); +#endif + OffsetInfo + aOffset(aInfo); + OffsetInfo + bOffset(bInfo); + kernelPointwiseApply2 + <<>>( + aOffset, bOffset, (uint64_t) totalElements, op); + } + } +#undef HANDLE_CASE +#undef HANDLE_B_CASE +#undef HANDLE_A_CASE + + if (oldA) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldA contiguous. + THCTensor_copyIgnoringOverlaps(state, oldA, a); + THCTensor_free(state, a); + a = oldA; + } + + if (oldB) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldB contiguous. + THCTensor_copyIgnoringOverlaps(state, oldB, b); + THCTensor_free(state, b); + b = oldB; + } + + return true; +} + +template +bool THC_pointwiseApply3(THCState* state, + TensorTypeA* a, + TensorTypeB* b, + TensorTypeC* c, + const Op& op, + TensorArgType aType = ReadWrite, + TensorArgType bType = ReadOnly, + TensorArgType cType = ReadOnly) { + ptrdiff_t totalElements = THCTensor_nElement(state, a); + + if (totalElements != THCTensor_nElement(state, b) || + totalElements != THCTensor_nElement(state, c)) { + return false; + } + + if (THCTensor_nDimensionLegacyAll(state, a) > MAX_CUTORCH_DIMS || + THCTensor_nDimensionLegacyAll(state, b) > MAX_CUTORCH_DIMS || + THCTensor_nDimensionLegacyAll(state, c) > MAX_CUTORCH_DIMS) { + return false; + } + + if (THCTensor_nDimensionLegacyAll(state, a) == 0) { + // Zero-dim tensor; do nothing + return true; + } + + const dim3 block = getApplyBlock(); + + dim3 grid; + int curDevice = -1; + cudaGetDevice(&curDevice); + if (!getApplyGrid(state, totalElements, grid, curDevice)) { + return false; + } + + /* + Expands readable/writable tensors whose indices may be "overlapped." + This ensures that each element of the tensor is operated on once and only + once. + */ + TensorTypeA* oldA = NULL; + TensorTypeB* oldB = NULL; + TensorTypeC* oldC = NULL; + + if (aType == ReadWrite && + THCTensor_maybeOverlappingIndices(state, a)) { + // Must perform in contiguous space + oldA = a; + a = (TensorTypeA*)THCTensor_newContiguous(state, a); + } + if (bType == ReadWrite && + THCTensor_maybeOverlappingIndices(state, b)) { + // Must perform in contiguous space + oldB = b; + b = (TensorTypeB*)THCTensor_newContiguous(state, b); + } + if (cType == ReadWrite && + THCTensor_maybeOverlappingIndices(state, c)) { + // Must perform in contiguous space + oldC = c; + c = (TensorTypeC*)THCTensor_newContiguous(state, c); + } + +#define HANDLE_CASE(TYPE, A, B, C) \ + kernelPointwiseApply3 \ + <<>>( \ + OffsetInfo \ + (aInfo), \ + OffsetInfo \ + (bInfo), \ + OffsetInfo \ + (cInfo), \ + (TYPE) totalElements, op); + +#define HANDLE_C_CASE(TYPE, A, B, C) { \ + switch (C) { \ + case 1: \ + HANDLE_CASE(TYPE, A, B, 1); \ + break; \ + case 2: \ + HANDLE_CASE(TYPE, A, B, 2); \ + break; \ + default: \ + HANDLE_CASE(TYPE, A, B, -1); \ + break; \ + } \ +} + +#define HANDLE_B_CASE(TYPE, A, B, C) { \ + switch (B) { \ + case 1: \ + HANDLE_C_CASE(TYPE, A, 1, C); \ + break; \ + case 2: \ + HANDLE_C_CASE(TYPE, A, 2, C); \ + break; \ + default: \ + HANDLE_C_CASE(TYPE, A, -1, C); \ + break; \ + } \ +} + +#define HANDLE_A_CASE(TYPE, A, B, C) { \ + switch (A) { \ + case 1: \ + HANDLE_B_CASE(TYPE, 1, B, C); \ + break; \ + case 2: \ + HANDLE_B_CASE(TYPE, 2, B, C); \ + break; \ + default: \ + HANDLE_B_CASE(TYPE, -1, B, C); \ + break; \ + } \ +} + + if (THCTensor_canUse32BitIndexMath(state, a) && + THCTensor_canUse32BitIndexMath(state, b) && + THCTensor_canUse32BitIndexMath(state, c)) { + TensorInfo aInfo = + getTensorInfo(state, a); + + TensorInfo bInfo = + getTensorInfo(state, b); + + TensorInfo cInfo = + getTensorInfo(state, c); + + rearrangeDims(&aInfo, &bInfo, &cInfo); + aInfo.collapseDims(); + bInfo.collapseDims(); + cInfo.collapseDims(); + +#if CUDA_VERSION < 9000 + if (!(aInfo.isContiguous() && bInfo.isContiguous() && cInfo.isContiguous())) + grid.x = min(at::cuda::getCurrentDeviceProperties()->multiProcessorCount * THC_APPLY_BLOCKS_PER_SM , grid.x); +#endif + HANDLE_A_CASE(unsigned int, aInfo.dims, bInfo.dims, cInfo.dims); + } else { + TensorInfo aInfo = + getTensorInfo(state, a); + + TensorInfo bInfo = + getTensorInfo(state, b); + + TensorInfo cInfo = + getTensorInfo(state, c); + + rearrangeDims(&aInfo, &bInfo, &cInfo); + aInfo.collapseDims(); + bInfo.collapseDims(); + cInfo.collapseDims(); + + /* + Only instantiates the all 1D special case and the fallback all nD case for + large (64-bit indexed) tensors to reduce compilation time. + */ + if (aInfo.dims == 1 && bInfo.dims == 1 && cInfo.dims == 1) { + OffsetInfo + aOffset(aInfo); + OffsetInfo + bOffset(bInfo); + OffsetInfo + cOffset(cInfo); + kernelPointwiseApply3 + <<>>( + aOffset, bOffset, cOffset, (uint64_t) totalElements, op); + } else { +#if CUDA_VERSION < 9000 + grid.x = min(at::cuda::getCurrentDeviceProperties()->multiProcessorCount * THC_APPLY_BLOCKS_PER_SM , grid.x); +#endif + + OffsetInfo + aOffset(aInfo); + OffsetInfo + bOffset(bInfo); + OffsetInfo + cOffset(cInfo); + kernelPointwiseApply3 + <<>>( + aOffset, bOffset, cOffset, (uint64_t) totalElements, op); + } + } +#undef HANDLE_CASE +#undef HANDLE_C_CASE +#undef HANDLE_B_CASE +#undef HANDLE_A_CASE + + if (oldA) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldA contiguous. + THCTensor_copyIgnoringOverlaps(state, oldA, a); + THCTensor_free(state, a); + a = oldA; + } + + if (oldB) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldB contiguous. + THCTensor_copyIgnoringOverlaps(state, oldB, b); + THCTensor_free(state, b); + b = oldB; + } + + if (oldC) { + // Ignore overlaps when copying back; if we use THCTensor_copy + // instead, it will recursively try and invoke ourselves to make + // oldC contiguous. + THCTensor_copyIgnoringOverlaps(state, oldC, c); + THCTensor_free(state, c); + c = oldC; + } + + return true; +} + +#undef THC_APPLY_THREADS_PER_BLOCK +#undef THC_APPLY_BLOCKS_PER_SM + +#endif // THC_APPLY_INC diff --git a/thirdparty/libtorch/include/THC/THCAsmUtils.cuh b/thirdparty/libtorch/include/THC/THCAsmUtils.cuh new file mode 100644 index 0000000000..479567fba5 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCAsmUtils.cuh @@ -0,0 +1,145 @@ +#ifndef THC_ASM_UTILS_INC +#define THC_ASM_UTILS_INC + +// Collection of direct PTX functions + +template +struct Bitfield {}; + +template <> +struct Bitfield { + static __device__ __forceinline__ + unsigned int getBitfield(unsigned int val, int pos, int len) { +#if defined(__HIP_PLATFORM_HCC__) + pos &= 0xff; + len &= 0xff; + + unsigned int m = (1u << len) - 1u; + return (val >> pos) & m; +#else + unsigned int ret; + asm("bfe.u32 %0, %1, %2, %3;" : "=r"(ret) : "r"(val), "r"(pos), "r"(len)); + return ret; +#endif + } + + static __device__ __forceinline__ + unsigned int setBitfield(unsigned int val, unsigned int toInsert, int pos, int len) { +#if defined(__HIP_PLATFORM_HCC__) + pos &= 0xff; + len &= 0xff; + + unsigned int m = (1u << len) - 1u; + toInsert &= m; + toInsert <<= pos; + m <<= pos; + + return (val & ~m) | toInsert; +#else + unsigned int ret; + asm("bfi.b32 %0, %1, %2, %3, %4;" : + "=r"(ret) : "r"(toInsert), "r"(val), "r"(pos), "r"(len)); + return ret; +#endif + } +}; + +template <> +struct Bitfield { + static __device__ __forceinline__ + uint64_t getBitfield(uint64_t val, int pos, int len) { +#if defined(__HIP_PLATFORM_HCC__) + pos &= 0xff; + len &= 0xff; + + uint64_t m = (1u << len) - 1u; + return (val >> pos) & m; +#else + uint64_t ret; + asm("bfe.u64 %0, %1, %2, %3;" : "=l"(ret) : "l"(val), "r"(pos), "r"(len)); + return ret; +#endif + } + + static __device__ __forceinline__ + uint64_t setBitfield(uint64_t val, uint64_t toInsert, int pos, int len) { +#if defined(__HIP_PLATFORM_HCC__) + pos &= 0xff; + len &= 0xff; + + uint64_t m = (1u << len) - 1u; + toInsert &= m; + toInsert <<= pos; + m <<= pos; + + return (val & ~m) | toInsert; +#else + uint64_t ret; + asm("bfi.b64 %0, %1, %2, %3, %4;" : + "=l"(ret) : "l"(toInsert), "l"(val), "r"(pos), "r"(len)); + return ret; +#endif + } +}; + +__device__ __forceinline__ int getLaneId() { +#if defined(__HIP_PLATFORM_HCC__) + return __lane_id(); +#else + int laneId; + asm("mov.s32 %0, %laneid;" : "=r"(laneId) ); + return laneId; +#endif +} + +#if defined(__HIP_PLATFORM_HCC__) +__device__ __forceinline__ unsigned long long int getLaneMaskLt() { + std::uint64_t m = (1ull << getLaneId()) - 1ull; + return m; +#else +__device__ __forceinline__ unsigned getLaneMaskLt() { + unsigned mask; + asm("mov.u32 %0, %%lanemask_lt;" : "=r"(mask)); + return mask; +#endif +} + +#if defined (__HIP_PLATFORM_HCC__) +__device__ __forceinline__ unsigned long long int getLaneMaskLe() { + std::uint64_t m = UINT64_MAX >> (sizeof(std::uint64_t) * CHAR_BIT - (getLaneId() + 1)); + return m; +} +#else +__device__ __forceinline__ unsigned getLaneMaskLe() { + unsigned mask; + asm("mov.u32 %0, %%lanemask_le;" : "=r"(mask)); + return mask; +} +#endif + +#if defined(__HIP_PLATFORM_HCC__) +__device__ __forceinline__ unsigned long long int getLaneMaskGt() { + std::uint64_t m = getLaneMaskLe(); + return m ? ~m : m; +#else +__device__ __forceinline__ unsigned getLaneMaskGt() { + unsigned mask; + asm("mov.u32 %0, %%lanemask_gt;" : "=r"(mask)); + return mask; +#endif +} + +#if defined(__HIP_PLATFORM_HCC__) +__device__ __forceinline__ unsigned long long int getLaneMaskGe() { + std::uint64_t m = getLaneMaskLt(); + return ~m; +#else +__device__ __forceinline__ unsigned getLaneMaskGe() { + unsigned mask; + asm("mov.u32 %0, %%lanemask_ge;" : "=r"(mask)); + return mask; +#endif +} + + +#endif // THC_ASM_UTILS_INC diff --git a/thirdparty/libtorch/include/THC/THCAtomics.cuh b/thirdparty/libtorch/include/THC/THCAtomics.cuh new file mode 100644 index 0000000000..91a74bc3a1 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCAtomics.cuh @@ -0,0 +1,169 @@ +#ifndef THC_ATOMICS_INC +#define THC_ATOMICS_INC + +#include +#include +#include +#include + +template +struct AtomicAddIntegerImpl; + +template +struct AtomicAddIntegerImpl { + inline __device__ void operator()(T *address, T val) { + size_t offset = (size_t)address & 3; + uint32_t * address_as_ui = (uint32_t *)((char *)address - offset); + uint32_t old = *address_as_ui; + uint32_t shift = offset * 8; + uint32_t old_byte; + uint32_t newval; + uint32_t assumed; + + do { + assumed = old; + old_byte = (old >> shift) & 0xff; + // preserve size in initial cast. Casting directly to uint32_t pads + // negative signed values with 1's (e.g. signed -1 = unsigned ~0). + newval = static_cast(THCNumerics::add(val, old_byte)); + newval = (old & ~(0x000000ff << shift)) | (newval << shift); + old = atomicCAS(address_as_ui, assumed, newval); + } while (assumed != old); + } +}; + +template +struct AtomicAddIntegerImpl { + inline __device__ void operator()(T *address, T val) { + size_t offset = (size_t)address & 2; + uint32_t * address_as_ui = (uint32_t *)((char *)address - offset); + bool is_32_align = offset; + uint32_t old = *address_as_ui; + uint32_t old_bytes; + uint32_t newval; + uint32_t assumed; + + do { + assumed = old; + old_bytes = is_32_align ? old >> 16 : old & 0xffff; + // preserve size in initial cast. Casting directly to uint32_t pads + // negative signed values with 1's (e.g. signed -1 = unsigned ~0). + newval = static_cast(THCNumerics::add(val, old_bytes)); + newval = is_32_align ? (old & 0xffff) | (newval << 16) : (old & 0xffff0000) | newval; + old = atomicCAS(address_as_ui, assumed, newval); + } while (assumed != old); + } +}; + +template +struct AtomicAddIntegerImpl { + inline __device__ void operator()(T *address, T val) { + uint32_t * address_as_ui = (uint32_t *) (address); + uint32_t old = *address_as_ui; + uint32_t newval; + uint32_t assumed; + + do { + assumed = old; + newval = val + (T)old; + old = atomicCAS(address_as_ui, assumed, newval); + } while (assumed != old); + } +}; + +template +struct AtomicAddIntegerImpl { + inline __device__ void operator()(T *address, T val) { + unsigned long long * address_as_ui = (unsigned long long *) (address); + unsigned long long old = *address_as_ui; + unsigned long long newval; + unsigned long long assumed; + + do { + assumed = old; + newval = val + (T)old; + old = atomicCAS(address_as_ui, assumed, newval); + } while (assumed != old); + } +}; + +static inline __device__ void atomicAdd(uint8_t *address, uint8_t val) { + AtomicAddIntegerImpl()(address, val); +} + +static inline __device__ void atomicAdd(int8_t *address, int8_t val) { + AtomicAddIntegerImpl()(address, val); +} + +static inline __device__ void atomicAdd(int16_t *address, int16_t val) { + AtomicAddIntegerImpl()(address, val); +} + +static inline __device__ void atomicAdd(int64_t *address, int64_t val) { +#ifdef __HIP_PLATFORM_HCC__ + __atomic_fetch_add(address, val, __ATOMIC_RELAXED); +#else + AtomicAddIntegerImpl()(address, val); +#endif +} + +static inline __device__ void atomicAdd(bool *address, bool val) { + *address = address && val; +} + +static inline __device__ void atomicAdd(at::Half *address, at::Half val) { + #if ((CUDA_VERSION < 10000) || (defined(__CUDA_ARCH__) && (__CUDA_ARCH__ < 700))) + unsigned int * address_as_ui = + (unsigned int *) ((char *)address - ((size_t)address & 2)); + unsigned int old = *address_as_ui; + unsigned int assumed; + + do { + assumed = old; + at::Half hsum; + hsum.x = (size_t)address & 2 ? (old >> 16) : (old & 0xffff); + hsum = THCNumerics::add(hsum, val); + old = (size_t)address & 2 ? (old & 0xffff) | (hsum.x << 16) : (old & 0xffff0000) | hsum.x; + old = atomicCAS(address_as_ui, assumed, old); + } while (assumed != old); + #else + atomicAdd(reinterpret_cast<__half*>(address), val); + #endif + +} + +#if defined(__CUDA_ARCH__) && (__CUDA_ARCH__ < 600 || CUDA_VERSION < 8000) +// from CUDA C Programmic Guide +static inline __device__ void atomicAdd(double *address, double val) { + unsigned long long int* address_as_ull = (unsigned long long int*)address; + unsigned long long int old = *address_as_ull; + unsigned long long int assumed; + + do { + assumed = old; + old = atomicCAS(address_as_ull, assumed, + __double_as_longlong(val + + __longlong_as_double(assumed))); + + // Note: uses integer comparison to avoid hang in case of NaN (since NaN != NaN) +} while (assumed != old); +} +#elif !defined(__CUDA_ARCH__) && (CUDA_VERSION < 8000) || defined(__HIP_PLATFORM_HCC__) + +/* Note [hip-clang differences to hcc] + * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * The upcoming hip-clang compiler for ROCm differs from hcc in a few details. + * It exports the __HIP__ macro, we can hence differentiate between hcc and + * hip-clang. In the below, hcc only received support for atomicAdd with double + * typing after work week 18312. hip-clang had support from the first version. + * In general, the code-visible differences between hip-clang and hcc will be + * minimal. + */ + +#if defined(__HIP_PLATFORM_HCC__) && __hcc_workweek__ < 18312 && !__HIP__ + // This needs to be defined for the host side pass + static inline __device__ void atomicAdd(double *address, double val) { } +#endif +#endif + +#endif // THC_ATOMICS_INC diff --git a/thirdparty/libtorch/include/THC/THCBlas.h b/thirdparty/libtorch/include/THC/THCBlas.h new file mode 100644 index 0000000000..ef2320a283 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCBlas.h @@ -0,0 +1,55 @@ +#ifndef THC_BLAS_INC +#define THC_BLAS_INC + +#include +#include +#include + +/* Level 1 */ +THC_API float THCudaBlas_Sdot(THCState *state, int64_t n, float *x, int64_t incx, float *y, int64_t incy); +THC_API double THCudaBlas_Ddot(THCState *state, int64_t n, double *x, int64_t incx, double *y, int64_t incy); +THC_API THHalf THCudaBlas_Hdot(THCState *state, int64_t n, THHalf *x, int64_t incx, THHalf *y, int64_t incy); + +/* Level 2 */ +THC_API void THCudaBlas_Sgemv(THCState *state, char trans, int64_t m, int64_t n, float alpha, float *a, int64_t lda, float *x, int64_t incx, float beta, float *y, int64_t incy); +THC_API void THCudaBlas_Dgemv(THCState *state, char trans, int64_t m, int64_t n, double alpha, double *a, int64_t lda, double *x, int64_t incx, double beta, double *y, int64_t incy); +THC_API void THCudaBlas_Sger(THCState *state, int64_t m, int64_t n, float alpha, float *x, int64_t incx, float *y, int64_t incy, float *a, int64_t lda); +THC_API void THCudaBlas_Dger(THCState *state, int64_t m, int64_t n, double alpha, double *x, int64_t incx, double *y, int64_t incy, double *a, int64_t lda); + +/* Level 3 */ +THC_API void THCudaBlas_Sgemm(THCState *state, char transa, char transb, int64_t m, int64_t n, int64_t k, float alpha, float *a, int64_t lda, float *b, int64_t ldb, float beta, float *c, int64_t ldc); +THC_API void THCudaBlas_Dgemm(THCState *state, char transa, char transb, int64_t m, int64_t n, int64_t k, double alpha, double *a, int64_t lda, double *b, int64_t ldb, double beta, double *c, int64_t ldc); + +THC_API void THCudaBlas_Hgemm(THCState *state, char transa, char transb, int64_t m, int64_t n, int64_t k, THHalf alpha, THHalf *a, int64_t lda, THHalf *b, int64_t ldb, THHalf beta, THHalf *c, int64_t ldc); +#ifdef __HIP_PLATFORM_HCC__ +THC_API void THCudaBlas_Bgemm(THCState *state, char transa, char transb, int64_t m, int64_t n, int64_t k, at::BFloat16 alpha, at::BFloat16 *a, int64_t lda, at::BFloat16 *b, int64_t ldb, at::BFloat16 beta, at::BFloat16 *c, int64_t ldc); +#endif + +THC_API void THCudaBlas_SgemmBatched(THCState *state, char transa, char transb, int64_t m, int64_t n, int64_t k, + float alpha, const float *a[], int64_t lda, const float *b[], int64_t ldb, + float beta, float *c[], int64_t ldc, int64_t batchCount); +THC_API void THCudaBlas_DgemmBatched(THCState *state, char transa, char transb, int64_t m, int64_t n, int64_t k, + double alpha, const double *a[], int64_t lda, const double *b[], int64_t ldb, + double beta, double *c[], int64_t ldc, int64_t batchCount); +#if CUDA_VERSION >= 8000 || defined __HIP_PLATFORM_HCC__ +THC_API void THCudaBlas_SgemmStridedBatched(THCState *state, char transa, char transb, int64_t m, int64_t n, int64_t k, + float alpha, const float *a, int64_t lda, int64_t strideA, const float *b, int64_t ldb, int64_t strideB, + float beta, float *c, int64_t ldc, int64_t strideC, int64_t batchCount); +THC_API void THCudaBlas_DgemmStridedBatched(THCState *state, char transa, char transb, int64_t m, int64_t n, int64_t k, + double alpha, const double *a, int64_t lda, int64_t strideA, const double *b, int64_t ldb, int64_t strideB, + double beta, double *c, int64_t ldc, int64_t strideC, int64_t batchCount); +#endif + +#if CUDA_VERSION >= 9010 || defined(__HIP_PLATFORM_HCC__) +void THCudaBlas_HgemmStridedBatched(THCState *state, char transa, char transb, int64_t m, int64_t n, int64_t k, + THHalf alpha, const THHalf *a, int64_t lda, int64_t strideA, const THHalf *b, int64_t ldb, int64_t strideB, + THHalf beta, THHalf *c, int64_t ldc, int64_t strideC, int64_t batchCount); +#endif + +#ifdef __HIP_PLATFORM_HCC__ +void THCudaBlas_BgemmStridedBatched(THCState *state, char transa, char transb, int64_t m, int64_t n, int64_t k, + at::BFloat16 alpha, const at::BFloat16 *a, int64_t lda, int64_t strideA, const at::BFloat16 *b, int64_t ldb, int64_t strideB, + at::BFloat16 beta, at::BFloat16 *c, int64_t ldc, int64_t strideC, int64_t batchCount); +#endif + +#endif diff --git a/thirdparty/libtorch/include/THC/THCCachingHostAllocator.h b/thirdparty/libtorch/include/THC/THCCachingHostAllocator.h new file mode 100644 index 0000000000..38688842e1 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCCachingHostAllocator.h @@ -0,0 +1,33 @@ +#ifndef THC_CACHING_HOST_ALLOCATOR_INC +#define THC_CACHING_HOST_ALLOCATOR_INC + +#include + + +#include + +// +// A caching allocator for CUDA host allocations (pinned memory). +// +// This provides a drop-in replacement for THCudaHostAllocator, which re-uses +// freed pinned (page-locked) memory allocations. This avoids device +// synchronizations due to cudaFreeHost calls. +// +// To ensure correct behavior, THCCachingHostAllocator_recordEvent must be +// called anytime a pointer from this allocator is used in a cudaMemcpyAsync +// call between host and device. We implement this for storages and tensors in +// copy_from_cpu_async_ and copy_to_cpu_async_. +// +// Note that this allocator does not split larger allocations into smaller +// blocks, unlike the caching device allocator. +// +THC_API c10::Allocator* getTHCCachingHostAllocator(void); + +// Records an event in the specified stream. The allocation 'ptr' will not be +// re-used until the event has occurred. +THC_API cudaError_t THCCachingHostAllocator_recordEvent(void *ptr, at::cuda::CUDAStream stream); + +// Releases cached pinned memory allocations via cudaHostFree +THC_API void THCCachingHostAllocator_emptyCache(void); + +#endif diff --git a/thirdparty/libtorch/include/THC/THCDeviceTensor-inl.cuh b/thirdparty/libtorch/include/THC/THCDeviceTensor-inl.cuh new file mode 100644 index 0000000000..16e1f94e47 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCDeviceTensor-inl.cuh @@ -0,0 +1,416 @@ +#include + +namespace detail { + +template +__host__ __device__ void copy(T to[N], T from[N]) { + for (int i = 0; i < N; ++i) { + to[i] = from[i]; + } +} + +} // namespace detail + +template class PtrTraits> +__host__ __device__ +THCDeviceTensor::THCDeviceTensor() + : data_(NULL) { + thc_static_assert(Dim > 0); + + for (int i = 0; i < Dim; ++i) { + size_[i] = 0; + stride_[i] = (IndexT) 1; + } +} + +template class PtrTraits> +__host__ __device__ +THCDeviceTensor:: +#ifdef _MSC_VER +THCDeviceTensor(DataPtrType data, const IndexT (&sizes)[Dim]) +#else +THCDeviceTensor(DataPtrType data, const IndexT sizes[Dim]) +#endif + : data_(data) { + thc_static_assert(Dim > 0); + + for (int i = 0; i < Dim; ++i) { + size_[i] = sizes[i]; + } + + stride_[Dim - 1] = (IndexT) 1; + for (int i = Dim - 2; i >= 0; --i) { + stride_[i] = stride_[i + 1] * sizes[i + 1]; + } +} + +template class PtrTraits> +__host__ __device__ +THCDeviceTensor::THCDeviceTensor( +#ifdef _MSC_VER + DataPtrType data, const IndexT (&sizes)[Dim], const IndexT (&strides)[Dim]) +#else + DataPtrType data, const IndexT sizes[Dim], const IndexT strides[Dim]) +#endif + : data_(data) { + thc_static_assert(Dim > 0); + + for (int i = 0; i < Dim; ++i) { + size_[i] = sizes[i]; + stride_[i] = strides[i]; + } +} + +template class PtrTraits> +template +__host__ __device__ bool +THCDeviceTensor::isSameSizeAndStride( + const THCDeviceTensor& rhs) const { + if (Dim != OtherDim) { + return false; + } + + for (int i = 0; i < Dim; ++i) { + if (size_[i] != rhs.size_[i]) { + return false; + } + + if (stride_[i] != rhs.stride_[i]) { + return false; + } + } + + return true; +} + +template class PtrTraits> +template +__host__ __device__ THCDeviceTensor +THCDeviceTensor::cast() { + thc_static_assert(sizeof(U) == sizeof(T)); + + return THCDeviceTensor( + reinterpret_cast(data_), size_, stride_); +} + +template class PtrTraits> +template +__host__ __device__ const THCDeviceTensor +THCDeviceTensor::cast() const { + thc_static_assert(sizeof(U) == sizeof(T)); + + return THCDeviceTensor( + reinterpret_cast(data_), size_, stride_); +} + +template class PtrTraits> +__host__ __device__ ptrdiff_t +THCDeviceTensor::numElements() const { + ptrdiff_t size = getSize(0); + + for (int i = 1; i < Dim; ++i) { + size *= getSize(i); + } + + return size; +} + +template class PtrTraits> +__host__ __device__ bool +THCDeviceTensor::isContiguous() const { + return isContiguousRange(0, Dim); +} + +template class PtrTraits> +__host__ __device__ bool +THCDeviceTensor::isConsistentlySized(int i) const { + if (i == 0 && getStride(i) > 0 && getSize(i) > 0) { + return true; + } else if ((i > 0) && (i < Dim) && (getStride(i) > 0) && + ((getStride(i - 1) / getStride(i)) >= getSize(i))) { + return true; + } + + return false; +} + +template class PtrTraits> +__host__ __device__ bool +THCDeviceTensor::isConsistentlySized() const { + for (int i = 0; i < Dim; ++i) { + if (!isConsistentlySized(i)) { + return false; + } + } + + return true; +} + +template class PtrTraits> +__host__ __device__ bool +THCDeviceTensor::isContiguousRange( + int first, int last) const { + + int64_t prevSize = last < Dim ? getStride(last) * getSize(last) : 1; + + for (int i = last - 1; i >= first; --i) { + if (getSize(i) != (IndexT) 1) { + if (getStride(i) == prevSize) { + prevSize *= getSize(i); + } else { + return false; + } + } + } + + return true; +} + +template class PtrTraits> +__host__ __device__ THCDeviceTensor +THCDeviceTensor::transpose(int dim1, + int dim2) const { +#if defined(__CUDA_ARCH__) || defined(__HIP_PLATFORM_HCC__) + // Device code + assert(dim1 >= 0 && dim1 < Dim); + assert(dim1 >= 0 && dim2 < Dim); +#else + // Host code + if (dim1 < 0 || dim1 >= Dim) { + THError("dim1 out of bounds"); + } + + if (dim2 < 0 || dim2 >= Dim) { + THError("dim2 out of bounds"); + } +#endif + + IndexT newSize[Dim]; + IndexT newStride[Dim]; + + for (int i = 0; i < Dim; ++i) { + newSize[i] = size_[i]; + newStride[i] = stride_[i]; + } + + IndexT tmp = newSize[dim1]; + newSize[dim1] = newSize[dim2]; + newSize[dim2] = tmp; + + tmp = newStride[dim1]; + newStride[dim1] = newStride[dim2]; + newStride[dim2] = tmp; + + return THCDeviceTensor(data_, newSize, newStride); +} + +template class PtrTraits> +template +__host__ __device__ THCDeviceTensor +THCDeviceTensor::upcastOuter() { + // Can only create tensors of greater dimension + thc_static_assert(NewDim > Dim); + + IndexT newSize[NewDim]; + IndexT newStride[NewDim]; + + int shift = NewDim - Dim; + + for (int i = 0; i < NewDim; ++i) { + if (i < shift) { + // These are the extended dimensions + newSize[i] = (IndexT) 1; + newStride[i] = size_[0] * stride_[0]; + } else { + // Shift the remaining dimensions + newSize[i] = size_[i - shift]; + newStride[i] = stride_[i - shift]; + } + } + + return THCDeviceTensor( + data_, newSize, newStride); +} + +template class PtrTraits> +template +__host__ __device__ THCDeviceTensor +THCDeviceTensor::upcastInner() { + // Can only create tensors of greater dimension + thc_static_assert(NewDim > Dim); + + IndexT newSize[NewDim]; + IndexT newStride[NewDim]; + + for (int i = 0; i < NewDim; ++i) { + if (i < Dim) { + // Existing dimensions get copied over + newSize[i] = size_[i]; + newStride[i] = stride_[i]; + } else { + // Extended dimensions + newSize[i] = (IndexT) 1; + newStride[i] = (IndexT) 1; + } + } + + return THCDeviceTensor( + data_, newSize, newStride); +} + +template class PtrTraits> +template +__host__ __device__ THCDeviceTensor +THCDeviceTensor::downcastOuter() { + // Can only create tensors of lesser dimension + thc_static_assert(NewDim < Dim); + + // We can't downcast non-contiguous tensors, since it leaves + // garbage data in the tensor. The tensor needs to be contiguous + // in all of the dimensions we are collapsing (no padding in + // them). + bool cont = isContiguousRange(0, Dim - NewDim); +#if defined(__CUDA_ARCH__) || defined(__HIP_PLATFORM_HCC__) + // Device code + assert(cont); +#else + // Host code + if (!cont) { + THError("Can only downcast contiguous tensors"); + } +#endif + + IndexT newSize[NewDim]; + IndexT newStride[NewDim]; + + int ignoredDims = Dim - NewDim; + IndexT collapsedSize = 1; + + for (int i = 0; i < Dim; ++i) { + if (i < ignoredDims) { + // Collapse these dimensions + collapsedSize *= getSize(i); + } else { + // Non-collapsed dimensions + if (i == ignoredDims) { + // This is the first non-collapsed dimension + newSize[i - ignoredDims] = collapsedSize * getSize(i); + } else { + // Subsequent non-collapsed dimensions + newSize[i - ignoredDims] = getSize(i); + } + + newStride[i - ignoredDims] = getStride(i); + } + } + + return THCDeviceTensor( + data_, newSize, newStride); +} + +template class PtrTraits> +template +__host__ __device__ THCDeviceTensor +THCDeviceTensor::downcastInner() { + // Can only create tensors of lesser dimension + thc_static_assert(NewDim < Dim); + + // We can't downcast non-contiguous tensors, since it leaves + // garbage data in the tensor. The tensor needs to be contiguous + // in all of the dimensions we are collapsing (no padding in + // them). + bool cont = isContiguousRange(NewDim, Dim); +#if defined(__CUDA_ARCH__) || defined(__HIP_PLATFORM_HCC__) + // Device code + assert(cont); +#else + // Host code + if (!cont) { + THError("Can only downcast contiguous tensors"); + } +#endif + + IndexT newSize[NewDim]; + IndexT newStride[NewDim]; + + IndexT collapsedSize = 1; + + for (int i = Dim - 1; i >= 0; --i) { + if (i >= NewDim) { + // Collapse these dimensions + collapsedSize *= getSize(i); + } else { + // Non-collapsed dimensions + if (i == NewDim - 1) { + // This is the first non-collapsed dimension + newSize[i] = collapsedSize * getSize(i); + newStride[i] = getStride(Dim - 1); + } else { + // Subsequent non-collapsed dimensions + newSize[i] = getSize(i); + newStride[i] = getStride(i); + } + } + } + + return THCDeviceTensor( + data_, newSize, newStride); +} + +template class PtrTraits> +template +__host__ __device__ THCDeviceTensor +THCDeviceTensor::view(DataPtrType at) { + thc_static_assert(SubDim >= 1 && SubDim < Dim); + + IndexT viewSizes[SubDim]; + IndexT viewStrides[SubDim]; + + for (int i = 0; i < SubDim; ++i) { + viewSizes[i] = size_[Dim - SubDim + i]; + viewStrides[i] = stride_[Dim - SubDim + i]; + } + + return THCDeviceTensor( + at, viewSizes, viewStrides); +} + +template class PtrTraits> +template +__host__ __device__ THCDeviceTensor +THCDeviceTensor::view() { + return view(data_); +} + +template class PtrTraits> +void +THCDeviceTensor::zero(cudaStream_t stream) { +#if defined(__CUDA_ARCH__) || defined(__HIP_PLATFORM_HCC__) + assert(isContiguous()); +#else + if (!isContiguous()) { + THError("fillAsync only works on contiguous data"); + } +#endif + + cudaMemsetAsync(data(), 0, numElements() * sizeof(T), stream); +} diff --git a/thirdparty/libtorch/include/THC/THCDeviceTensor.cuh b/thirdparty/libtorch/include/THC/THCDeviceTensor.cuh new file mode 100644 index 0000000000..543420ea72 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCDeviceTensor.cuh @@ -0,0 +1,513 @@ +#ifndef THC_DEVICE_TENSOR_INC +#define THC_DEVICE_TENSOR_INC + +#include +#include + +// A CUDA 6.5 compatible version of static_assert. Remove once on CUDA 7.0. +template +struct THCStaticAssert; + +template <> +struct THCStaticAssert { +}; + +#define thc_static_assert(expr) (THCStaticAssert<(expr) != 0>()) + +/// Our tensor type +template class PtrTraits> +class THCDeviceTensor; + +/// Type of a subspace of a tensor +namespace detail { +template class PtrTraits> +class THCDeviceSubTensor; +} + +template +struct RestrictPtrTraits { + typedef T* __restrict__ PtrType; +}; + +template +struct DefaultPtrTraits { + typedef T* PtrType; +}; + +/** + Templated multi-dimensional array that supports strided access of + elements. Main access is through `operator[]`; e.g., + `tensor[x][y][z]`. + +- `T` is the contained type (e.g., `float`) +- `Dim` is the tensor rank +- `IndexT` is the integer type used for size/stride arrays, and for +- all indexing math. Default is `int`, but for large tensors, `int64_t` +- can be used instead. +- `PtrTraits` are traits applied to our data pointer (T*). By default, +- this is just T*, but RestrictPtrTraits can be used to apply T* +- __restrict__ for alias-free analysis. +*/ +template class PtrTraits = DefaultPtrTraits> +class THCDeviceTensor { + public: + enum { NumDim = Dim }; + typedef T DataType; + typedef IndexT IndexType; + typedef typename PtrTraits::PtrType DataPtrType; + typedef THCDeviceTensor TensorType; + + /// Default constructor + __host__ __device__ THCDeviceTensor(); + + /// Constructor that calculates strides with no padding + __host__ __device__ THCDeviceTensor(DataPtrType data, +#ifdef _MSC_VER + const IndexT (&sizes)[Dim]); +#else + const IndexT sizes[Dim]); +#endif + + /// Constructor that takes arbitrary size/stride arrays + __host__ __device__ THCDeviceTensor(DataPtrType data, +#ifdef _MSC_VER + const IndexT (&sizes)[Dim], + const IndexT (&strides)[Dim]); +#else + const IndexT sizes[Dim], + const IndexT strides[Dim]); +#endif + + /// Returns true if the two tensors are of the same dimensionality, + /// size and stride. + template + __host__ __device__ bool + isSameSizeAndStride( + const THCDeviceTensor& rhs) const; + + /// Cast to a tensor of a different type of the same size and stride + template + __host__ __device__ THCDeviceTensor cast(); + + /// Const version of `cast` + template + __host__ __device__ + const THCDeviceTensor cast() const; + + /// Returns a raw pointer to the start of our data. + __host__ __device__ __forceinline__ DataPtrType data() { + return data_; + } + + /// Returns a raw pointer to the start of our data (const). + __host__ __device__ __forceinline__ + const DataPtrType data() const { + return data_; + } + + /// Cast to a different datatype + template + __host__ __device__ __forceinline__ + typename PtrTraits::PtrType dataAs() { + return reinterpret_cast::PtrType>(data_); + } + + /// Cast to a different datatype + template + __host__ __device__ __forceinline__ + const typename PtrTraits::PtrType dataAs() const { + return reinterpret_cast::PtrType>(data_); + } + + /// Returns a read/write view of a portion of our tensor. + __host__ __device__ __forceinline__ + detail::THCDeviceSubTensor + operator[](IndexT); + + /// Returns a read/write view of a portion of our tensor (const). + __host__ __device__ __forceinline__ + const detail::THCDeviceSubTensor + operator[](IndexT) const; + + /// Returns the size of a given dimension, `[0, Dim - 1]`. No bounds + /// checking. + __host__ __device__ __forceinline__ int getSize(int i) const { + return size_[i]; + } + + /// Returns the stride of a given dimension, `[0, Dim - 1]`. No bounds + /// checking. + __host__ __device__ __forceinline__ int getStride(int i) const { + return stride_[i]; + } + + /// Returns the total number of elements contained within our data + /// (product of `getSize(i)`) + __host__ __device__ ptrdiff_t numElements() const; + + /// Returns the size array. + __host__ __device__ __forceinline__ const IndexT* sizes() const { + return size_; + } + + /// Returns the stride array. + __host__ __device__ __forceinline__ const IndexT* strides() const { + return stride_; + } + + /// Returns true if there is no padding within the tensor and no + /// re-ordering of the dimensions. + /// ~~~ + /// (stride(i) == size(i + 1) * stride(i + 1)) && stride(dim - 1) == 0 + /// ~~~ + __host__ __device__ bool isContiguous() const; + + /// Returns whether a given dimension has only increasing stride + /// from the previous dimension. A tensor that was permuted by + /// exchanging size and stride only will fail this check. + /// If `i == 0` just check `size > 0`. Returns `false` if `stride` is `<= 0`. + __host__ __device__ bool isConsistentlySized(int i) const; + + // Returns whether at each dimension `stride <= size`. + // If this is not the case then iterating once over the size space will + // touch the same memory locations multiple times. + __host__ __device__ bool isConsistentlySized() const; + + /// Returns true if the given dimension range [first, last) has no padding. + __host__ __device__ bool isContiguousRange(int first, int last) const; + + /// Returns a tensor of the same dimension after transposing the two + /// dimensions given. Does not actually move elements; transposition + /// is made by permuting the size/stride arrays. + /// If the dimensions are not valid, asserts. + __host__ __device__ THCDeviceTensor + transpose(int dim1, int dim2) const; + + /// Upcast a tensor of dimension `D` to some tensor of dimension + /// D' > D by padding the leading dimensions by 1 + /// e.g., upcasting a 2-d tensor `[2][3]` to a 4-d tensor `[1][1][2][3]` + template + __host__ __device__ THCDeviceTensor + upcastOuter(); + + /// Upcast a tensor of dimension `D` to some tensor of dimension + /// D' > D by padding the lowest/most varying dimensions by 1 + /// e.g., upcasting a 2-d tensor `[2][3]` to a 4-d tensor `[2][3][1][1]` + template + __host__ __device__ THCDeviceTensor + upcastInner(); + + /// Downcast a tensor of dimension `D` to some tensor of dimension + /// D' < D by collapsing the leading dimensions. asserts if there is + /// padding on the leading dimensions. + template + __host__ __device__ + THCDeviceTensor downcastOuter(); + + /// Downcast a tensor of dimension `D` to some tensor of dimension + /// D' < D by collapsing the leading dimensions. asserts if there is + /// padding on the leading dimensions. + template + __host__ __device__ + THCDeviceTensor downcastInner(); + + /// Returns a tensor that is a view of the `SubDim`-dimensional slice + /// of this tensor, starting at `at`. + template + __host__ __device__ THCDeviceTensor + view(DataPtrType at); + + /// Returns a tensor that is a view of the `SubDim`-dimensional slice + /// of this tensor, starting where our data begins + template + __host__ __device__ THCDeviceTensor + view(); + + /// Zeroes out the tensor asynchronously. Asserts if the contents + /// in question are not contiguous. + void zero(cudaStream_t stream = 0); + + private: + /// Raw pointer to where the tensor data begins + DataPtrType data_; + + /// Array of strides (in sizeof(T) terms) per each dimension + IndexT stride_[Dim]; + + /// Size per each dimension + IndexT size_[Dim]; +}; + +namespace detail { + +/// Specialization for a view of a single value (0-dimensional) +template class PtrTraits> +class THCDeviceSubTensor { + public: + __host__ __device__ THCDeviceSubTensor + operator=(typename TensorType::DataType val) { + *data_ = val; + return *this; + } + + // operator T& + __host__ __device__ operator typename TensorType::DataType&() { + return *data_; + } + + // const operator T& returning const T& + __host__ __device__ operator const typename TensorType::DataType&() const { + return *data_; + } + + // operator& returning T* + __host__ __device__ typename TensorType::DataType* operator&() { + return data_; + } + + // const operator& returning const T* + __host__ __device__ const typename TensorType::DataType* operator&() const { + return data_; + } + + /// Returns a raw accessor to our slice. + __host__ __device__ __forceinline__ typename TensorType::DataPtrType data() { + return data_; + } + + /// Returns a raw accessor to our slice (const). + __host__ __device__ __forceinline__ + const typename TensorType::DataPtrType data() const { + return data_; + } + + /// Cast to a different datatype. + template + __host__ __device__ T& as() { + return *dataAs(); + } + + /// Cast to a different datatype (const). + template + __host__ __device__ const T& as() const { + return *dataAs(); + } + + /// Cast to a different datatype + template + __host__ __device__ __forceinline__ + typename PtrTraits::PtrType dataAs() { + return reinterpret_cast::PtrType>(data_); + } + + /// Cast to a different datatype (const) + template + __host__ __device__ __forceinline__ + typename PtrTraits::PtrType dataAs() const { + return reinterpret_cast::PtrType>(data_); + } + + /// Use the texture cache for reads + __device__ __forceinline__ typename TensorType::DataType ldg() const { +#if __CUDA_ARCH__ >= 350 + return __ldg(data_); +#else + return *data_; +#endif + } + + /// Use the texture cache for reads; cast as a particular type + template + __device__ __forceinline__ T ldgAs() const { +#if __CUDA_ARCH__ >= 350 + return __ldg(dataAs()); +#else + return as(); +#endif + } + + private: + /// One dimension greater can create us + friend class THCDeviceSubTensor; + + /// Our parent tensor can create us + friend class THCDeviceTensor; + + __host__ __device__ __forceinline__ THCDeviceSubTensor( + TensorType& t, + typename TensorType::DataPtrType data) + : tensor_(t), + data_(data) { + } + + /// The tensor we're referencing + TensorType& tensor_; + + /// Where our value is located + typename TensorType::DataPtrType const data_; +}; + +/// A `SubDim`-rank slice of a parent THCDeviceTensor +template class PtrTraits> +class THCDeviceSubTensor { + public: + /// Returns a view of the data located at our offset (the dimension + /// `SubDim` - 1 tensor). + __host__ __device__ __forceinline__ + THCDeviceSubTensor + operator[](typename TensorType::IndexType index) { + return THCDeviceSubTensor( + tensor_, + data_ + index * tensor_.getStride(TensorType::NumDim - SubDim)); + } + + /// Returns a view of the data located at our offset (the dimension + /// `SubDim` - 1 tensor) (const). + __host__ __device__ __forceinline__ + const THCDeviceSubTensor + operator[](typename TensorType::IndexType index) const { + return THCDeviceSubTensor( + tensor_, + data_ + index * tensor_.getStride(TensorType::NumDim - SubDim)); + } + + // operator& returning T* + __host__ __device__ typename TensorType::DataType* operator&() { + return data_; + } + + // const operator& returning const T* + __host__ __device__ const typename TensorType::DataType* operator&() const { + return data_; + } + + /// Returns a raw accessor to our slice. + __host__ __device__ __forceinline__ typename TensorType::DataPtrType data() { + return data_; + } + + /// Returns a raw accessor to our slice (const). + __host__ __device__ __forceinline__ + const typename TensorType::DataPtrType data() const { + return data_; + } + + /// Cast to a different datatype. + template + __host__ __device__ T& as() { + return *dataAs(); + } + + /// Cast to a different datatype (const). + template + __host__ __device__ const T& as() const { + return *dataAs(); + } + + /// Cast to a different datatype + template + __host__ __device__ __forceinline__ + typename PtrTraits::PtrType dataAs() { + return reinterpret_cast::PtrType>(data_); + } + + /// Cast to a different datatype (const) + template + __host__ __device__ __forceinline__ + typename PtrTraits::PtrType dataAs() const { + return reinterpret_cast::PtrType>(data_); + } + + /// Use the texture cache for reads + __device__ __forceinline__ typename TensorType::DataType ldg() const { +#if __CUDA_ARCH__ >= 350 + return __ldg(data_); +#else + return *data_; +#endif + } + + /// Use the texture cache for reads; cast as a particular type + template + __device__ __forceinline__ T ldgAs() const { +#if __CUDA_ARCH__ >= 350 + return __ldg(dataAs()); +#else + return as(); +#endif + } + + /// Returns a tensor that is a view of the SubDim-dimensional slice + /// of this tensor, starting where our data begins + THCDeviceTensor view() { + return tensor_.template view(data_); + } + + private: + /// One dimension greater can create us + friend class THCDeviceSubTensor; + + /// Our parent tensor can create us + friend class + THCDeviceTensor; + + __host__ __device__ __forceinline__ THCDeviceSubTensor( + TensorType& t, + typename TensorType::DataPtrType data) + : tensor_(t), + data_(data) { + } + + /// The tensor we're referencing + TensorType& tensor_; + + /// The start of our sub-region + typename TensorType::DataPtrType const data_; +}; + +} // namespace detail + +template class PtrTraits> +__host__ __device__ __forceinline__ +detail::THCDeviceSubTensor, + Dim - 1, PtrTraits> +THCDeviceTensor::operator[](IndexT index) { + return detail::THCDeviceSubTensor( + detail::THCDeviceSubTensor( + *this, data_)[index]); +} + +template class PtrTraits> +__host__ __device__ __forceinline__ +const detail::THCDeviceSubTensor, + Dim - 1, PtrTraits> +THCDeviceTensor::operator[](IndexT index) const { + return detail::THCDeviceSubTensor( + detail::THCDeviceSubTensor( + const_cast(*this), data_)[index]); +} + +#include + +#endif // THC_DEVICE_TENSOR_INC diff --git a/thirdparty/libtorch/include/THC/THCDeviceTensorUtils-inl.cuh b/thirdparty/libtorch/include/THC/THCDeviceTensorUtils-inl.cuh new file mode 100644 index 0000000000..d02e7d087c --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCDeviceTensorUtils-inl.cuh @@ -0,0 +1,118 @@ +namespace detail { + +// Add a layer of SFINAE to support static_assert +template class PtrTraits, + int NewDim, bool B> +struct UpcastTHCRoot { + static THCDeviceTensor + make(THCState* state, THCudaTensor* t); +}; + +template class PtrTraits, + int NewDim, bool B> +struct UpcastTHC : + UpcastTHCRoot { +}; + +// Never instantiated SFINAE purposes only +template class PtrTraits, + int NewDim> +struct UpcastTHC : + UpcastTHCRoot { +}; + +template class PtrTraits, + int NewDim> +struct UpcastTHC : + UpcastTHCRoot { + static THCDeviceTensor + make(THCState* state, THCudaTensor* t) { + thc_static_assert(NewDim > Dim); + return toDeviceTensor(state, t). + template upcastOuter(); + } +}; + +// Add a layer of SFINAE to support static_assert +template class PtrTraits, + int NewDim, bool B> +struct DowncastTHCRoot { + static THCDeviceTensor + make(THCState* state, THCudaTensor* t); +}; + +template class PtrTraits, + int NewDim, bool B> +struct DowncastTHC : + DowncastTHCRoot { +}; + +// Never instantiated SFINAE purposes only +template class PtrTraits, + int NewDim> +struct DowncastTHC : + DowncastTHCRoot { +}; + +template class PtrTraits, + int NewDim> +struct DowncastTHC : + DowncastTHCRoot { + static THCDeviceTensor + make(THCState* state, THCudaTensor* t) { + thc_static_assert(NewDim < Dim); + return toDeviceTensor(state, t). + template downcastOuter(); + } +}; + +} // namespace detail + +#define SWITCH_UNROLL_CUDA_CAST_FACTORY(i) \ + case i: \ + if (NewDim > i) { \ + return detail::UpcastTHC i)>:: \ + make(state, t); \ + } else if (NewDim == i) { \ + return toDeviceTensor(state, t); \ + } else { \ + return detail::DowncastTHC:: \ + make(state, t); \ + } \ + /* break; */ + +template class PtrTraits> +THCDeviceTensor +toDeviceTensorCast(THCState* state, THCudaTensor* t) { + switch (THCudaTensor_nDimensionLegacyAll(state, t)) { + SWITCH_UNROLL_CUDA_CAST_FACTORY(1); + SWITCH_UNROLL_CUDA_CAST_FACTORY(2); + SWITCH_UNROLL_CUDA_CAST_FACTORY(3); + SWITCH_UNROLL_CUDA_CAST_FACTORY(4); + SWITCH_UNROLL_CUDA_CAST_FACTORY(5); + SWITCH_UNROLL_CUDA_CAST_FACTORY(6); + SWITCH_UNROLL_CUDA_CAST_FACTORY(7); + SWITCH_UNROLL_CUDA_CAST_FACTORY(8); + SWITCH_UNROLL_CUDA_CAST_FACTORY(9); + SWITCH_UNROLL_CUDA_CAST_FACTORY(10); + default: + ; + } + + // Not implemented + THError("THCDeviceTensor dimension size not supported"); + return NULL; /* never enters this piece, appeasing compiler warnings */ +} + +#undef SWITCH_UNROLL_CUDA_CAST_FACTORY diff --git a/thirdparty/libtorch/include/THC/THCDeviceTensorUtils.cuh b/thirdparty/libtorch/include/THC/THCDeviceTensorUtils.cuh new file mode 100644 index 0000000000..285aa823c6 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCDeviceTensorUtils.cuh @@ -0,0 +1,80 @@ +#ifndef THC_DEVICE_TENSOR_UTILS_INC +#define THC_DEVICE_TENSOR_UTILS_INC + +#include +#include +#include + +/// Constructs a DeviceTensor initialized from a THCudaTensor by +/// upcasting or downcasting the tensor to that of a different +/// dimension. +template class PtrTraits> +THCDeviceTensor +toDeviceTensorCast(THCState* state, THCudaTensor* t); + +template +THCDeviceTensor +toDeviceTensorCast(THCState* state, THCudaTensor* t) { + return toDeviceTensorCast(state, t); +} + +template +THCDeviceTensor +toDeviceTensorCast(THCState* state, THCudaTensor* t) { + return toDeviceTensorCast(state, t); +} + +/// Constructs a THCDeviceTensor initialized from a THCudaTensor. Will +/// error if the dimensionality does not match exactly. +template class PtrTraits> +THCDeviceTensor +toDeviceTensor(THCState* state, THCTensor* t); + +template +THCDeviceTensor +toDeviceTensor(THCState* state, THCTensor* t) { + return toDeviceTensor(state, t); +} + +template +THCDeviceTensor +toDeviceTensor(THCState* state, THCTensor* t) { + return toDeviceTensor(state, t); +} + +template class PtrTraits> +THCDeviceTensor +toDeviceTensor(THCState* state, THCTensor* t) { + if (Dim != THCTensor_nDimensionLegacyAll(state, t)) { + THError("THCudaTensor dimension mismatch"); + } + // Determine the maximum offset into the tensor achievable; `IndexT` + // must be smaller than this type in order to use it. + ptrdiff_t maxOffset = 0; + IndexT sizes[Dim]; + IndexT strides[Dim]; + + for (int i = 0; i < Dim; ++i) { + int64_t size = THTensor_sizeLegacyNoScalars(t, i); + int64_t stride = THTensor_strideLegacyNoScalars(t, i); + + maxOffset += (size - 1) * stride; + + sizes[i] = (IndexT) size; + strides[i] = (IndexT) stride; + } + + if (maxOffset > std::numeric_limits::max()) { + THError("THCudaTensor sizes too large for THCDeviceTensor conversion"); + } + + return THCDeviceTensor( + t->data(), sizes, strides); +} + +#include + +#endif // THC_DEVICE_TENSOR_UTILS_INC diff --git a/thirdparty/libtorch/include/THC/THCDeviceUtils.cuh b/thirdparty/libtorch/include/THC/THCDeviceUtils.cuh new file mode 100644 index 0000000000..34bc74f7a5 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCDeviceUtils.cuh @@ -0,0 +1,120 @@ +#ifndef THC_DEVICE_UTILS_INC +#define THC_DEVICE_UTILS_INC + +#include +/* The largest consecutive integer representable in float32 (2^24) */ +#define FLOAT32_MAX_CONSECUTIVE_INT 16777216.0f + +/** + Computes ceil(a / b) +*/ +template +__host__ __device__ __forceinline__ T THCCeilDiv(T a, T b) { + return (a + b - 1) / b; +} + +/** + Computes ceil(a / b) * b; i.e., rounds up `a` to the next highest + multiple of b +*/ +template +__host__ __device__ __forceinline__ T THCRoundUp(T a, T b) { + return THCCeilDiv(a, b) * b; +} + +/** + * For CC 3.5+, perform a load using __ldg + */ +template +__device__ __forceinline__ T doLdg(const T* p) { +#if __CUDA_ARCH__ >= 350 + return __ldg(p); +#else + return *p; +#endif +} + +__device__ __forceinline__ unsigned int ACTIVE_MASK() +{ +#if CUDA_VERSION >= 9000 + return __activemask(); +#else +// will be ignored anyway + return 0xffffffff; +#endif +} + +#if defined(__HIP_PLATFORM_HCC__) +__device__ __forceinline__ unsigned long long int WARP_BALLOT(int predicate) +{ + return __ballot(predicate); +} +#else +__device__ __forceinline__ unsigned int WARP_BALLOT(int predicate, unsigned int mask = 0xffffffff) +{ +#if CUDA_VERSION >= 9000 + return __ballot_sync(mask, predicate); +#else + return __ballot(predicate); +#endif +} +#endif + +template +__device__ __forceinline__ T WARP_SHFL_XOR(T value, int laneMask, int width = warpSize, unsigned int mask = 0xffffffff) +{ +#if CUDA_VERSION >= 9000 + return __shfl_xor_sync(mask, value, laneMask, width); +#else + return __shfl_xor(value, laneMask, width); +#endif +} + +template +__device__ __forceinline__ T WARP_SHFL(T value, int srcLane, int width = warpSize, unsigned int mask = 0xffffffff) +{ +#if CUDA_VERSION >= 9000 + return __shfl_sync(mask, value, srcLane, width); +#else + return __shfl(value, srcLane, width); +#endif +} + +template +__device__ __forceinline__ T WARP_SHFL_UP(T value, unsigned int delta, int width = warpSize, unsigned int mask = 0xffffffff) +{ +#if CUDA_VERSION >= 9000 + return __shfl_up_sync(mask, value, delta, width); +#else + return __shfl_up(value, delta, width); +#endif +} + +#ifdef __HIP_PLATFORM_HCC__ +//To handle ambiguity, add a type double version. +__device__ __forceinline__ double WARP_SHFL_DOWN(double value, unsigned int delta, int width = warpSize, unsigned int mask = 0xffffffff) +{ + //(HIP doesn't support double) + return (double) __shfl_down((float) value, delta, width); +} +__device__ __forceinline__ int64_t WARP_SHFL_DOWN(int64_t value, unsigned int delta, int width = warpSize, unsigned int mask = 0xffffffff) +{ + //(HIP doesn't support int64_t). Trick from https://devblogs.nvidia.com/faster-parallel-reductions-kepler/ + int2 a = *reinterpret_cast(&value); + a.x = __shfl_down(a.x, delta); + a.y = __shfl_down(a.y, delta); + return *reinterpret_cast(&a); +} +#endif +template +__device__ __forceinline__ T WARP_SHFL_DOWN(T value, unsigned int delta, int width = warpSize, unsigned int mask = 0xffffffff) +{ +#if CUDA_VERSION >= 9000 + return __shfl_down_sync(mask, value, delta, width); +#else + return __shfl_down(value, delta, width); +#endif +} + + +#endif // THC_DEVICE_UTILS_INC diff --git a/thirdparty/libtorch/include/THC/THCGeneral.h b/thirdparty/libtorch/include/THC/THCGeneral.h new file mode 100644 index 0000000000..fe92f4cebb --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGeneral.h @@ -0,0 +1,84 @@ +#ifndef THC_GENERAL_INC +#define THC_GENERAL_INC + +#include +#include + +#include + +#include +#include +#include + +#include + +#define USE_MAGMA + +# define THC_EXTERNC extern "C" + +// TH & THC are now part of the same library as ATen and Caffe2 +// NB: However, we are planning to split it out to a torch_cuda library +#define THC_API THC_EXTERNC TORCH_CUDA_API +#define THC_CLASS TORCH_CUDA_API + +#ifndef THAssert +#define THAssert(exp) \ + do { \ + if (!(exp)) { \ + _THError(__FILE__, __LINE__, "assert(%s) failed", #exp); \ + } \ + } while(0) +#endif + +typedef struct THCState THCState; +struct THCState; + +typedef struct _THCCudaResourcesPerDevice { + /* Size of scratch space per each stream on this device available */ + size_t scratchSpacePerStream; +} THCCudaResourcesPerDevice; + +THC_API THCState* THCState_alloc(void); +THC_API void THCState_free(THCState* state); + +THC_API void THCudaInit(THCState* state); +THC_API void THCudaShutdown(THCState* state); + +/* If device `dev` can access allocations on device `devToAccess`, this will return */ +/* 1; otherwise, 0. */ +THC_API int THCState_getPeerToPeerAccess(THCState* state, int dev, int devToAccess); + +THC_API c10::Allocator* THCState_getCudaHostAllocator(THCState* state); + +THC_API void THCMagma_init(THCState *state); + +/* State manipulators and accessors */ +THC_API int THCState_getNumDevices(THCState* state); + +THC_API cudaStream_t THCState_getCurrentStreamOnDevice(THCState *state, int device); +THC_API cudaStream_t THCState_getCurrentStream(THCState *state); + +/* For the current device and stream, returns the allocated scratch space */ +THC_API size_t THCState_getCurrentDeviceScratchSpaceSize(THCState* state); + +#define THCAssertSameGPU(expr) if (!expr) THError("arguments are located on different GPUs") +#define THCudaCheck(err) __THCudaCheck(err, __FILE__, __LINE__) +#define THCudaCheckWarn(err) __THCudaCheckWarn(err, __FILE__, __LINE__) +#define THCublasCheck(err) __THCublasCheck(err, __FILE__, __LINE__) +#define THCusparseCheck(err) __THCusparseCheck(err, __FILE__, __LINE__) + +THC_API void __THCudaCheck(cudaError_t err, const char *file, const int line); +THC_API void __THCudaCheckWarn(cudaError_t err, const char *file, const int line); +THC_API void __THCublasCheck(cublasStatus_t status, const char *file, const int line); +THC_API void __THCusparseCheck(cusparseStatus_t status, const char *file, const int line); + +THC_API void* THCudaMalloc(THCState *state, size_t size); +THC_API void THCudaFree(THCState *state, void* ptr); + +at::DataPtr THCudaHostAlloc(THCState *state, size_t size); + +THC_API void THCudaHostRecord(THCState *state, void *ptr); + +THC_API cudaError_t THCudaMemGetInfo(THCState *state, size_t* freeBytes, size_t* totalBytes, size_t* largestBlock); + +#endif diff --git a/thirdparty/libtorch/include/THC/THCGeneral.hpp b/thirdparty/libtorch/include/THC/THCGeneral.hpp new file mode 100644 index 0000000000..dc12b43cff --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGeneral.hpp @@ -0,0 +1,23 @@ +#pragma once + +#include + +/* Global state of THC. */ +struct THCState { + /* Set of all allocated resources. */ + THCCudaResourcesPerDevice* resourcesPerDevice; + /* Captured number of devices upon startup; convenience for bounds checking */ + int numDevices; + + /* Allocator using cudaMallocHost. */ + // NB: These allocators (specifically, cudaHostAllocator) MUST implement + // maybeGlobalBoundDeleter, because we have a few use-cases where we need to + // do raw allocations with them (for Thrust). + // TODO: Make this statically obvious + at::Allocator* cudaHostAllocator; + at::Allocator* cudaDeviceAllocator; + + /* Table of enabled peer-to-peer access between directed pairs of GPUs. + If i accessing allocs on j is enabled, p2pAccess[i][j] is 1; 0 otherwise. */ + int** p2pAccessEnabled; +}; diff --git a/thirdparty/libtorch/include/THC/THCGenerateAllTypes.h b/thirdparty/libtorch/include/THC/THCGenerateAllTypes.h new file mode 100644 index 0000000000..48d19e5988 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGenerateAllTypes.h @@ -0,0 +1,37 @@ +#ifndef THC_GENERIC_FILE +#error "You must define THC_GENERIC_FILE before including THGenerateAllTypes.h" +#endif + +#define THCGenerateAllTypes + +#define THCTypeIdxByte 1 +#define THCTypeIdxChar 2 +#define THCTypeIdxShort 3 +#define THCTypeIdxInt 4 +#define THCTypeIdxLong 5 +#define THCTypeIdxFloat 6 +#define THCTypeIdxDouble 7 +#define THCTypeIdxHalf 8 +#define THCTypeIdx_(T) TH_CONCAT_2(THCTypeIdx,T) + +#include +#include +#include +#include +#include +#include +#include +#include + +#undef THCTypeIdxByte +#undef THCTypeIdxChar +#undef THCTypeIdxShort +#undef THCTypeIdxInt +#undef THCTypeIdxLong +#undef THCTypeIdxFloat +#undef THCTypeIdxDouble +#undef THCTypeIdxHalf +#undef THCTypeIdx_ + +#undef THCGenerateAllTypes +#undef THC_GENERIC_FILE diff --git a/thirdparty/libtorch/include/THC/THCGenerateBFloat16Type.h b/thirdparty/libtorch/include/THC/THCGenerateBFloat16Type.h new file mode 100644 index 0000000000..a19cae3b6f --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGenerateBFloat16Type.h @@ -0,0 +1,27 @@ +#ifndef THC_GENERIC_FILE +#error "You must define THC_GENERIC_FILE before including THCGenerateBFloat16Type.h" +#endif +#include + +#define scalar_t at::BFloat16 +#define accreal float +#define Real BFloat16 + +#define CReal CudaBFloat16 + +#define THC_REAL_IS_BFLOAT16 +#line 1 THC_GENERIC_FILE +#include THC_GENERIC_FILE +#undef scalar_t +#undef accreal +#undef Real + +#undef CReal + +#undef THC_REAL_IS_BFLOAT16 + +#ifndef THCGenerateAllTypes +#ifndef THCGenerateFloatTypes +#undef THC_GENERIC_FILE +#endif +#endif diff --git a/thirdparty/libtorch/include/THC/THCGenerateBoolType.h b/thirdparty/libtorch/include/THC/THCGenerateBoolType.h new file mode 100644 index 0000000000..6a2ff4bf52 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGenerateBoolType.h @@ -0,0 +1,22 @@ +#ifndef THC_GENERIC_FILE +#error "You must define THC_GENERIC_FILE before including THCGenerateBoolType.h" +#endif + +#define scalar_t bool +#define ureal bool +#define accreal int64_t +#define Real Bool +#define CReal CudaBool +#define THC_REAL_IS_BOOL +#line 1 THC_GENERIC_FILE +#include THC_GENERIC_FILE +#undef scalar_t +#undef ureal +#undef accreal +#undef Real +#undef CReal +#undef THC_REAL_IS_BOOL + +#ifndef THCGenerateBoolType +#undef THC_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/THC/THCGenerateByteType.h b/thirdparty/libtorch/include/THC/THCGenerateByteType.h new file mode 100644 index 0000000000..e2c5f35d91 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGenerateByteType.h @@ -0,0 +1,20 @@ +#ifndef THC_GENERIC_FILE +#error "You must define THC_GENERIC_FILE before including THGenerateByteType.h" +#endif + +#define scalar_t uint8_t +#define accreal int64_t +#define Real Byte +#define CReal CudaByte +#define THC_REAL_IS_BYTE +#line 1 THC_GENERIC_FILE +#include THC_GENERIC_FILE +#undef scalar_t +#undef accreal +#undef Real +#undef CReal +#undef THC_REAL_IS_BYTE + +#ifndef THCGenerateAllTypes +#undef THC_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/THC/THCGenerateCharType.h b/thirdparty/libtorch/include/THC/THCGenerateCharType.h new file mode 100644 index 0000000000..f41ffbd36a --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGenerateCharType.h @@ -0,0 +1,20 @@ +#ifndef THC_GENERIC_FILE +#error "You must define THC_GENERIC_FILE before including THGenerateCharType.h" +#endif + +#define scalar_t int8_t +#define accreal int64_t +#define Real Char +#define CReal CudaChar +#define THC_REAL_IS_CHAR +#line 1 THC_GENERIC_FILE +#include THC_GENERIC_FILE +#undef scalar_t +#undef accreal +#undef Real +#undef CReal +#undef THC_REAL_IS_CHAR + +#ifndef THCGenerateAllTypes +#undef THC_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/THC/THCGenerateDoubleType.h b/thirdparty/libtorch/include/THC/THCGenerateDoubleType.h new file mode 100644 index 0000000000..4c81b36277 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGenerateDoubleType.h @@ -0,0 +1,22 @@ +#ifndef THC_GENERIC_FILE +#error "You must define THC_GENERIC_FILE before including THGenerateDoubleType.h" +#endif + +#define scalar_t double +#define accreal double +#define Real Double +#define CReal CudaDouble +#define THC_REAL_IS_DOUBLE +#line 1 THC_GENERIC_FILE +#include THC_GENERIC_FILE +#undef scalar_t +#undef accreal +#undef Real +#undef CReal +#undef THC_REAL_IS_DOUBLE + +#ifndef THCGenerateAllTypes +#ifndef THCGenerateFloatTypes +#undef THC_GENERIC_FILE +#endif +#endif diff --git a/thirdparty/libtorch/include/THC/THCGenerateFloatType.h b/thirdparty/libtorch/include/THC/THCGenerateFloatType.h new file mode 100644 index 0000000000..f84abcc837 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGenerateFloatType.h @@ -0,0 +1,24 @@ +#ifndef THC_GENERIC_FILE +#error "You must define THC_GENERIC_FILE before including THGenerateFloatType.h" +#endif + +#define scalar_t float +/* FIXME: fp64 has bad performance on some platforms; avoid using it unless + we opt into it? */ +#define accreal float +#define Real Float +#define CReal Cuda +#define THC_REAL_IS_FLOAT +#line 1 THC_GENERIC_FILE +#include THC_GENERIC_FILE +#undef scalar_t +#undef accreal +#undef Real +#undef CReal +#undef THC_REAL_IS_FLOAT + +#ifndef THCGenerateAllTypes +#ifndef THCGenerateFloatTypes +#undef THC_GENERIC_FILE +#endif +#endif diff --git a/thirdparty/libtorch/include/THC/THCGenerateFloatTypes.h b/thirdparty/libtorch/include/THC/THCGenerateFloatTypes.h new file mode 100644 index 0000000000..c44666446e --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGenerateFloatTypes.h @@ -0,0 +1,32 @@ +#ifndef THC_GENERIC_FILE +#error "You must define THC_GENERIC_FILE before including THGenerateFloatTypes.h" +#endif + +#define THCGenerateFloatTypes + +#define THCTypeIdxByte 1 +#define THCTypeIdxChar 2 +#define THCTypeIdxShort 3 +#define THCTypeIdxInt 4 +#define THCTypeIdxLong 5 +#define THCTypeIdxFloat 6 +#define THCTypeIdxDouble 7 +#define THCTypeIdxHalf 8 +#define THCTypeIdx_(T) TH_CONCAT_2(THCTypeIdx,T) + +#include +#include +#include + +#undef THCTypeIdxByte +#undef THCTypeIdxChar +#undef THCTypeIdxShort +#undef THCTypeIdxInt +#undef THCTypeIdxLong +#undef THCTypeIdxFloat +#undef THCTypeIdxDouble +#undef THCTypeIdxHalf +#undef THCTypeIdx_ + +#undef THCGenerateFloatTypes +#undef THC_GENERIC_FILE diff --git a/thirdparty/libtorch/include/THC/THCGenerateHalfType.h b/thirdparty/libtorch/include/THC/THCGenerateHalfType.h new file mode 100644 index 0000000000..e067559d5a --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGenerateHalfType.h @@ -0,0 +1,28 @@ +#ifndef THC_GENERIC_FILE +#error "You must define THC_GENERIC_FILE before including THGenerateHalfType.h" +#endif + +#include + +#define scalar_t THHalf +#define accreal float +#define Real Half + +#define CReal CudaHalf + +#define THC_REAL_IS_HALF +#line 1 THC_GENERIC_FILE +#include THC_GENERIC_FILE +#undef scalar_t +#undef accreal +#undef Real + +#undef CReal + +#undef THC_REAL_IS_HALF + +#ifndef THCGenerateAllTypes +#ifndef THCGenerateFloatTypes +#undef THC_GENERIC_FILE +#endif +#endif diff --git a/thirdparty/libtorch/include/THC/THCGenerateIntType.h b/thirdparty/libtorch/include/THC/THCGenerateIntType.h new file mode 100644 index 0000000000..083ebce323 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGenerateIntType.h @@ -0,0 +1,20 @@ +#ifndef THC_GENERIC_FILE +#error "You must define THC_GENERIC_FILE before including THGenerateIntType.h" +#endif + +#define scalar_t int32_t +#define accreal int64_t +#define Real Int +#define CReal CudaInt +#define THC_REAL_IS_INT +#line 1 THC_GENERIC_FILE +#include THC_GENERIC_FILE +#undef scalar_t +#undef accreal +#undef Real +#undef CReal +#undef THC_REAL_IS_INT + +#ifndef THCGenerateAllTypes +#undef THC_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/THC/THCGenerateLongType.h b/thirdparty/libtorch/include/THC/THCGenerateLongType.h new file mode 100644 index 0000000000..7fc587654c --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGenerateLongType.h @@ -0,0 +1,20 @@ +#ifndef THC_GENERIC_FILE +#error "You must define THC_GENERIC_FILE before including THGenerateLongType.h" +#endif + +#define scalar_t int64_t +#define accreal int64_t +#define Real Long +#define CReal CudaLong +#define THC_REAL_IS_LONG +#line 1 THC_GENERIC_FILE +#include THC_GENERIC_FILE +#undef scalar_t +#undef accreal +#undef Real +#undef CReal +#undef THC_REAL_IS_LONG + +#ifndef THCGenerateAllTypes +#undef THC_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/THC/THCGenerateShortType.h b/thirdparty/libtorch/include/THC/THCGenerateShortType.h new file mode 100644 index 0000000000..5f05d65ce2 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCGenerateShortType.h @@ -0,0 +1,20 @@ +#ifndef THC_GENERIC_FILE +#error "You must define THC_GENERIC_FILE before including THGenerateShortType.h" +#endif + +#define scalar_t int16_t +#define accreal int64_t +#define Real Short +#define CReal CudaShort +#define THC_REAL_IS_SHORT +#line 1 THC_GENERIC_FILE +#include THC_GENERIC_FILE +#undef scalar_t +#undef accreal +#undef Real +#undef CReal +#undef THC_REAL_IS_SHORT + +#ifndef THCGenerateAllTypes +#undef THC_GENERIC_FILE +#endif diff --git a/thirdparty/libtorch/include/THC/THCIntegerDivider.cuh b/thirdparty/libtorch/include/THC/THCIntegerDivider.cuh new file mode 100644 index 0000000000..75c0a5079f --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCIntegerDivider.cuh @@ -0,0 +1,123 @@ +#ifndef THC_INTEGER_DIVIDER_INC +#define THC_INTEGER_DIVIDER_INC + +#include +#if defined(__CUDA_ARCH__) || defined(__HIP_DEVICE_COMPILE__) +#include +#endif + +// A utility class to implement integer division by muliplication, given a fixed +// divisor. +// +// WARNING: The fast divider algorithm is only implemented for unsigned int; +// otherwise we default to plain integer division. For unsigned int, +// we further assume that the dividend is at most INT32_MAX. Thus, +// IntDivider must NOT be used for general integer division. +// +// This reduced range is enough for our purpose, and it allows us to +// slightly simplify the computation. +// +// (NOTE: Below, "2^k" denotes exponentiation, i.e., 1< 0), we can find a "magic number" m (2^N +// <= m < 2^(N+1)) and shift s such that: +// +// \floor(n / d) = \floor((m * n) / 2^(N+s)). +// +// Given such m and s, the integer division can be then implemented as: +// +// let m' = m - 2^N // 0 <= m' < 2^N +// +// fast_integer_division(n): +// // Multiply two N-bit unsigned integers: the result is a 2N-bit unsigned +// // integer. Then take the higher N bits. +// t = (m' * n) >> N +// +// // Here we use the fact that n is less than 2^(N-1): otherwise the value +// // of (t + n) may not fit in an N-bit integer. +// return (t + n) >> s +// +// Finding such a magic number is surprisingly easy: +// +// s = \ceil(\log_2 d) +// m' = \floor(2^N * (2^s - d) / d) + 1 // Need 2N-bit integer arithmetic. +// +// See also: +// - Division by Invariant Integers Using Multiplication, +// Torbjörn Granlund and Peter L. Montgomery, 1994. +// +// - http://www.hackersdelight.org/magic.htm +// +// - http://ridiculousfish.com/blog/posts/labor-of-division-episode-i.html + +// Result of div/mod operation stored together. +template +struct DivMod { + Value div, mod; + + __host__ __device__ DivMod(Value div, Value mod) : div(div), mod(mod) { } +}; + +// Base case: we only have an implementation for uint32_t for now. For +// everything else, we use plain division. +template +struct IntDivider { + IntDivider() { } // Dummy constructor for arrays. + IntDivider(Value d) : divisor(d) { } + + __host__ __device__ inline Value div(Value n) const { return n / divisor; } + __host__ __device__ inline Value mod(Value n) const { return n % divisor; } + __host__ __device__ inline DivMod divmod(Value n) const { + return DivMod(n / divisor, n % divisor); + } + + Value divisor; +}; + +// Implement fast integer division. +template <> +struct IntDivider { + static_assert(sizeof(unsigned int) == 4, "Assumes 32-bit unsigned int."); + + IntDivider() { } // Dummy constructor for arrays. + + IntDivider(unsigned int d) : divisor(d) { + assert(divisor >= 1 && divisor <= INT32_MAX); + + // TODO: gcc/clang has __builtin_clz() but it's not portable. + for (shift = 0; shift < 32; shift++) if ((1U << shift) >= divisor) break; + + uint64_t one = 1; + uint64_t magic = ((one << 32) * ((one << shift) - divisor)) / divisor + 1; + m1 = magic; + assert(m1 > 0 && m1 == magic); // m1 must fit in 32 bits. + } + + __host__ __device__ inline unsigned int div(unsigned int n) const { +#if defined(__CUDA_ARCH__) || defined(__HIP_DEVICE_COMPILE__) + // 't' is the higher 32-bits of unsigned 32-bit multiplication of 'n' and + // 'm1'. + unsigned int t = __umulhi(n, m1); + return (t + n) >> shift; +#else + // Using uint64_t so that the addition does not overflow. + uint64_t t = ((uint64_t) n * m1) >> 32; + return (t + n) >> shift; +#endif + } + + __host__ __device__ inline unsigned int mod(unsigned int n) const { + return n - div(n) * divisor; + } + + __host__ __device__ inline DivMod divmod(unsigned int n) const { + unsigned int q = div(n); + return DivMod(q, n - q * divisor); + } + + unsigned int divisor; // d above. + unsigned int m1; // Magic number: m' above. + unsigned int shift; // Shift amounts. +}; + +#endif // THC_INTEGER_DIVIDER_INC diff --git a/thirdparty/libtorch/include/THC/THCNumerics.cuh b/thirdparty/libtorch/include/THC/THCNumerics.cuh new file mode 100644 index 0000000000..baa54cf6cc --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCNumerics.cuh @@ -0,0 +1,405 @@ +#ifndef THC_NUMERICS_INC +#define THC_NUMERICS_INC + +#include +#include +#include +#include +#include +#include +#include + +// WARNING: THCNumerics is being deprecated. Please follow the comments +// in this file to learn about new usages. +// Comments on usage: +// - lt,le,gt,ge,eq,neg,add,mul,sub,div and other binary ops can +// be implemented using CUDA_apply_utils or binary cuda kernel +// - Check NumericLimits.cuh for specialized math functions. +// - Note how __half and at::Half can be casted. for instance: +// static_cast(std::sin(static_cast(a))); + +template +struct THCNumerics { +}; + +template +static inline __host__ __device__ T powi(T a, T b) { + assert(THCNumerics::ge(b, 0)); + T result = 1; + while (b) { + if (b & 1) { + result *= a; + } + b /= 2; + a *= a; + } + return result; +} + +// DEPRECATED: For integral types, use math functions from std and NumericLimits.cuh. +// Use binary_kernel or CUDA_apply_utils for arithmetic +template <> +struct THCNumerics { + static inline __host__ __device__ uint8_t min() { return at::numeric_limits::lowest(); } + static inline __host__ __device__ uint8_t max() { return at::numeric_limits::max(); } + static inline __host__ __device__ uint8_t lower_bound() { return at::numeric_limits::lower_bound(); } + static inline __host__ __device__ uint8_t upper_bound() { return at::numeric_limits::upper_bound(); } + + static inline __host__ __device__ bool lt(uint8_t a, uint8_t b) { return a < b; } + static inline __host__ __device__ bool le(uint8_t a, uint8_t b) { return a <= b; } + static inline __host__ __device__ bool gt(uint8_t a, uint8_t b) { return a > b; } + static inline __host__ __device__ bool ge(uint8_t a, uint8_t b) { return a >= b; } + static inline __host__ __device__ bool eq(uint8_t a, uint8_t b) { return a == b; } + static inline __device__ bool eq_with_nan(uint8_t a, uint8_t b) { return a == b; } + static inline __host__ __device__ bool ne(uint8_t a, uint8_t b) { return a != b; } + + static inline __host__ __device__ uint8_t add(uint8_t a, uint8_t b) { return a + b; } + static inline __host__ __device__ uint8_t mul(uint8_t a, uint8_t b) { return a * b; } + static inline __host__ __device__ uint8_t sub(uint8_t a, uint8_t b) { return a - b; } + static inline __host__ __device__ uint8_t div(uint8_t a, uint8_t b) { return a / b; } + static inline __host__ __device__ uint8_t pow(uint8_t a, uint8_t b) { return powi(a, b); } + static inline __host__ __device__ bool isnan(uint8_t a) { return false; } + static inline __host__ __device__ bool isinf(uint8_t a) { return false; } +}; + +template <> +struct THCNumerics { + static inline __host__ __device__ bool min() { return at::numeric_limits::lowest(); } + static inline __host__ __device__ bool max() { return at::numeric_limits::max(); } + static inline __host__ __device__ bool lower_bound() { return at::numeric_limits::lower_bound(); } + static inline __host__ __device__ bool upper_bound() { return at::numeric_limits::upper_bound(); } + + static inline __host__ __device__ bool lt(bool a, bool b) { return a < b; } + static inline __host__ __device__ bool le(bool a, bool b) { return a <= b; } + static inline __host__ __device__ bool gt(bool a, bool b) { return a > b; } + static inline __host__ __device__ bool ge(bool a, bool b) { return a >= b; } + static inline __host__ __device__ bool eq(bool a, bool b) { return a == b; } + static inline __host__ __device__ bool ne(bool a, bool b) { return a != b; } + static inline __host__ __device__ bool add(bool a, bool b) { return a + b; } + static inline __host__ __device__ bool mul(bool a, bool b) { return a && b; } + static inline __host__ __device__ bool sub(bool a, bool b) { return a - b; } + static inline __host__ __device__ bool div(bool a, bool b) { return a / b; } + static inline __host__ __device__ bool isnan(bool a) { return false; } + static inline __host__ __device__ bool isinf(bool a) { return false; } +}; + +template <> +struct THCNumerics { + static inline __host__ __device__ int8_t min() { return at::numeric_limits::lowest(); } + static inline __host__ __device__ int8_t max() { return at::numeric_limits::max(); } + static inline __host__ __device__ int8_t lower_bound() { return at::numeric_limits::lower_bound(); } + static inline __host__ __device__ int8_t upper_bound() { return at::numeric_limits::upper_bound(); } + + static inline __host__ __device__ bool lt(int8_t a, int8_t b) { return a < b; } + static inline __host__ __device__ bool le(int8_t a, int8_t b) { return a <= b; } + static inline __host__ __device__ bool gt(int8_t a, int8_t b) { return a > b; } + static inline __host__ __device__ bool ge(int8_t a, int8_t b) { return a >= b; } + static inline __host__ __device__ bool eq(int8_t a, int8_t b) { return a == b; } + static inline __device__ bool eq_with_nan(int8_t a, int8_t b) { return a == b; } + static inline __host__ __device__ bool ne(int8_t a, int8_t b) { return a != b; } + + static inline __host__ __device__ int8_t add(int8_t a, int8_t b) { return a + b; } + static inline __host__ __device__ int8_t mul(int8_t a, int8_t b) { return a * b; } + static inline __host__ __device__ int8_t sub(int8_t a, int8_t b) { return a - b; } + static inline __host__ __device__ int8_t div(int8_t a, int8_t b) { return a / b; } + static inline __host__ __device__ int8_t pow(int8_t a, int8_t b) { return powi(a, b); } + static inline __host__ __device__ bool isnan(int8_t a) { return false; } + static inline __host__ __device__ bool isinf(int8_t a) { return false; } +}; + +template <> +struct THCNumerics { + static inline __host__ __device__ int16_t min() { return at::numeric_limits::lowest(); } + static inline __host__ __device__ int16_t max() { return at::numeric_limits::max(); } + static inline __host__ __device__ int16_t lower_bound() { return at::numeric_limits::lower_bound(); } + static inline __host__ __device__ int16_t upper_bound() { return at::numeric_limits::upper_bound(); } + + static inline __host__ __device__ bool lt(int16_t a, int16_t b) { return a < b; } + static inline __host__ __device__ bool le(int16_t a, int16_t b) { return a <= b; } + static inline __host__ __device__ bool gt(int16_t a, int16_t b) { return a > b; } + static inline __host__ __device__ bool ge(int16_t a, int16_t b) { return a >= b; } + static inline __host__ __device__ bool eq(int16_t a, int16_t b) { return a == b; } + static inline __device__ bool eq_with_nan(int16_t a, int16_t b) { return a == b; } + static inline __host__ __device__ bool ne(int16_t a, int16_t b) { return a != b; } + + static inline __host__ __device__ int16_t add(int16_t a, int16_t b) { return a + b; } + static inline __host__ __device__ int16_t mul(int16_t a, int16_t b) { return a * b; } + static inline __host__ __device__ int16_t sub(int16_t a, int16_t b) { return a - b; } + static inline __host__ __device__ int16_t div(int16_t a, int16_t b) { return a / b; } + static inline __host__ __device__ int16_t pow(int16_t a, int16_t b) { return powi(a, b); } + static inline __host__ __device__ bool isnan(int16_t a) { return false; } + static inline __host__ __device__ bool isinf(int16_t a) { return false; } +}; + +template <> +struct THCNumerics { + static inline __host__ __device__ int32_t min() { return at::numeric_limits::lowest(); } + static inline __host__ __device__ int32_t max() { return at::numeric_limits::max(); } + static inline __host__ __device__ int32_t lower_bound() { return at::numeric_limits::lower_bound(); } + static inline __host__ __device__ int32_t upper_bound() { return at::numeric_limits::upper_bound(); } + + static inline __host__ __device__ bool lt(int32_t a, int32_t b) { return a < b; } + static inline __host__ __device__ bool le(int32_t a, int32_t b) { return a <= b; } + static inline __host__ __device__ bool gt(int32_t a, int32_t b) { return a > b; } + static inline __host__ __device__ bool ge(int32_t a, int32_t b) { return a >= b; } + static inline __host__ __device__ bool eq(int32_t a, int32_t b) { return a == b; } + static inline __device__ bool eq_with_nan(int32_t a, int32_t b) { return a == b; } + static inline __host__ __device__ bool ne(int32_t a, int32_t b) { return a != b; } + + static inline __host__ __device__ int32_t add(int32_t a, int32_t b) { return a + b; } + static inline __host__ __device__ int32_t mul(int32_t a, int32_t b) { return a * b; } + static inline __host__ __device__ int32_t sub(int32_t a, int32_t b) { return a - b; } + static inline __host__ __device__ int32_t div(int32_t a, int32_t b) { return a / b; } + static inline __host__ __device__ int32_t pow(int32_t a, int32_t b) { return powi(a, b); } + static inline __host__ __device__ bool isnan(int32_t a) { return false; } + static inline __host__ __device__ bool isinf(int32_t a) { return false; } +}; + +template <> +struct THCNumerics { + static inline __host__ __device__ int64_t min() { return at::numeric_limits::lowest(); } + static inline __host__ __device__ int64_t max() { return at::numeric_limits::max(); } + static inline __host__ __device__ int64_t lower_bound() { return at::numeric_limits::lower_bound(); } + static inline __host__ __device__ int64_t upper_bound() { return at::numeric_limits::upper_bound(); } + + static inline __host__ __device__ bool lt(int64_t a, int64_t b) { return a < b; } + static inline __host__ __device__ bool le(int64_t a, int64_t b) { return a <= b; } + static inline __host__ __device__ bool gt(int64_t a, int64_t b) { return a > b; } + static inline __host__ __device__ bool ge(int64_t a, int64_t b) { return a >= b; } + static inline __host__ __device__ bool eq(int64_t a, int64_t b) { return a == b; } + static inline __device__ bool eq_with_nan(int64_t a, int64_t b) { return a == b; } + static inline __host__ __device__ bool ne(int64_t a, int64_t b) { return a != b; } + + + static inline __host__ __device__ int64_t add(int64_t a, int64_t b) { return a + b; } + static inline __host__ __device__ int64_t mul(int64_t a, int64_t b) { return a * b; } + static inline __host__ __device__ int64_t sub(int64_t a, int64_t b) { return a - b; } + static inline __host__ __device__ int64_t div(int64_t a, int64_t b) { return a / b; }; + static inline __host__ __device__ int64_t pow(int64_t a, int64_t b) { return powi(a, b); } + static inline __host__ __device__ bool isnan(int64_t a) { return false; } + static inline __host__ __device__ bool isinf(int64_t a) { return false; } +}; + +// DEPRECATED: use math functions from std and NumericLimits.cuh +template <> +struct THCNumerics { + static inline __host__ __device__ at::Half min() { return at::numeric_limits::lowest(); } + static inline __host__ __device__ at::Half max() { return at::numeric_limits::max(); } + static inline __host__ __device__ at::Half lower_bound() { return at::numeric_limits::lower_bound(); } + static inline __host__ __device__ at::Half upper_bound() { return at::numeric_limits::upper_bound(); } + + static inline __host__ __device__ bool lt(at::Half a, at::Half b) { return a < b; } + static inline __host__ __device__ bool le(at::Half a, at::Half b) { return a <= b; } + static inline __host__ __device__ bool gt(at::Half a, at::Half b) { return a > b; } + static inline __host__ __device__ bool ge(at::Half a, at::Half b) { return a >= b; } + static inline __host__ __device__ bool eq(at::Half a, at::Half b) { return a == b; } + static inline __device__ bool eq_with_nan(at::Half a, at::Half b) { return __half_as_ushort(a) == __half_as_ushort(b); } + static inline __host__ __device__ bool ne(at::Half a, at::Half b) { return a != b; } + + static inline __host__ __device__ at::Half exp(at::Half a) { return std::exp(a); } + static inline __host__ __device__ at::Half exp10(at::Half a) { return ::exp10(a); } + static inline __host__ __device__ at::Half cos(at::Half a) { return ::cos(a); } + static inline __host__ __device__ at::Half sqrt(at::Half a) { return ::sqrt(a); } + static inline __host__ __device__ at::Half cosh(at::Half a) { return ::cosh(a); } + static inline __host__ __device__ at::Half tan(at::Half a) { return ::tan(a); } + static inline __host__ __device__ at::Half atan(at::Half a) { return ::atan(a); } + static inline __host__ __device__ at::Half tanh(at::Half a) { return ::tanh(a); } + static inline __host__ __device__ at::Half erf(at::Half a) { return ::erf(a); } + static inline __host__ __device__ at::Half erfc(at::Half a) { return ::erfc(a); } + static inline __host__ __device__ at::Half cinv(at::Half a) { return 1.0f / a; } + static inline __host__ __device__ at::Half add(at::Half a, at::Half b) { return a + b; } + static inline __host__ __device__ at::Half div(at::Half a, at::Half b) { return a / b; } + static inline __host__ __device__ at::Half mul(at::Half a, at::Half b) { return a * b; } + static inline __host__ __device__ at::Half sub(at::Half a, at::Half b) { return a - b; } + static inline __host__ __device__ at::Half pow(at::Half a, at::Half b) { return ::pow(a, b); } + + static inline __host__ __device__ bool isnan(at::Half a) { + #ifdef _MSC_VER + // Windows requires this explicit conversion. The reason is unclear + // related issue with clang: https://reviews.llvm.org/D37906 + return ::isnan((float) a); + #else + return ::isnan(a); + #endif + } + + static inline __host__ __device__ bool isinf(at::Half a) { + #ifdef _MSC_VER + // Windows requires this explicit conversion. The reason is unclear + // related issue with clang: https://reviews.llvm.org/D37906 + return ::isinf((float) a); + #else + return ::isinf(a); + #endif + } + +}; + +// DEPRECATED: use math functions from std and cuda math API (if needed) +// note that the functions exp10,erfinv and cinv +// are not in the std namespace +template <> +struct THCNumerics { + static inline __host__ __device__ float min() { return at::numeric_limits::lowest(); } + static inline __host__ __device__ float max() { return at::numeric_limits::max(); } + static inline __host__ __device__ float lower_bound() { return at::numeric_limits::lower_bound(); } + static inline __host__ __device__ float upper_bound() { return at::numeric_limits::upper_bound(); } + + static inline __host__ __device__ bool lt(float a, float b) { return a < b; } + static inline __host__ __device__ bool le(float a, float b) { return a <= b; } + static inline __host__ __device__ bool gt(float a, float b) { return a > b; } + static inline __host__ __device__ bool ge(float a, float b) { return a >= b; } + static inline __host__ __device__ bool eq(float a, float b) { return a == b; } + static inline __device__ bool eq_with_nan(float a, float b) { return __float_as_int(a) == __float_as_int(b); } + static inline __host__ __device__ bool ne(float a, float b) { return a != b; } + + static inline __host__ __device__ float exp (float a) { return expf(a); } + static inline __host__ __device__ float exp10(float a) { return exp10f(a); } + static inline __host__ __device__ float cos (float a) { return cosf(a); } + static inline __host__ __device__ float sqrt (float a) { return sqrtf(a); } + static inline __host__ __device__ float cosh (float a) { return coshf(a); } + static inline __host__ __device__ float tan (float a) { return tanf(a); } + static inline __host__ __device__ float atan (float a) { return atanf(a); } + static inline __host__ __device__ float tanh (float a) { return tanhf(a); } + static inline __host__ __device__ float erf (float a) { return erff(a); } + static inline __host__ __device__ float erfc (float a) { return erfcf(a); } + static inline __host__ __device__ float cinv (float a) { return 1.0f / a; } + static inline __host__ __device__ float add (float a, float b) { return a + b; } + static inline __host__ __device__ float div (float a, float b) { return a / b; } + static inline __host__ __device__ float mul (float a, float b) { return a * b; } + static inline __host__ __device__ float sub (float a, float b) { return a - b; } + static inline __host__ __device__ float pow (float a, float b) { return powf(a, b); } + static inline __host__ __device__ bool isnan(float a) { return ::isnan(a); } + static inline __host__ __device__ bool isinf(float a) { return ::isinf(a); } +}; + +template <> +struct THCNumerics { + static inline __host__ __device__ at::BFloat16 min() { return at::numeric_limits::lowest(); } + static inline __host__ __device__ at::BFloat16 max() { return at::numeric_limits::max(); } + static inline __host__ __device__ at::BFloat16 lower_bound() { return at::numeric_limits::lower_bound(); } + static inline __host__ __device__ at::BFloat16 upper_bound() { return at::numeric_limits::upper_bound(); } + + static inline __host__ __device__ bool lt(at::BFloat16 a, at::BFloat16 b) { return a < b; } + static inline __host__ __device__ bool le(at::BFloat16 a, at::BFloat16 b) { return a <= b; } + static inline __host__ __device__ bool gt(at::BFloat16 a, at::BFloat16 b) { return a > b; } + static inline __host__ __device__ bool ge(at::BFloat16 a, at::BFloat16 b) { return a >= b; } + static inline __host__ __device__ bool eq(at::BFloat16 a, at::BFloat16 b) { return a == b; } + static inline __host__ __device__ bool ne(at::BFloat16 a, at::BFloat16 b) { return a != b; } + + static inline __host__ __device__ at::BFloat16 lgamma(at::BFloat16 a) { return lgammaf(a);} + static inline __host__ __device__ at::BFloat16 exp (at::BFloat16 a) { return expf(a); } + static inline __host__ __device__ at::BFloat16 exp10(at::BFloat16 a) { return exp10f(a); } + static inline __host__ __device__ at::BFloat16 log (at::BFloat16 a) { return logf(a); } + static inline __host__ __device__ at::BFloat16 log10(at::BFloat16 a) { return log10f(a); } + static inline __host__ __device__ at::BFloat16 log1p(at::BFloat16 a) { return log1pf(a); } + static inline __host__ __device__ at::BFloat16 log2 (at::BFloat16 a) { return log2f(a); } + static inline __host__ __device__ at::BFloat16 expm1(at::BFloat16 a) { return expm1f(a); } + static inline __host__ __device__ at::BFloat16 cos (at::BFloat16 a) { return cosf(a); } + static inline __host__ __device__ at::BFloat16 sin (at::BFloat16 a) { return sinf(a); } + static inline __host__ __device__ at::BFloat16 sqrt (at::BFloat16 a) { return sqrtf(a); } + static inline __host__ __device__ at::BFloat16 rsqrt(at::BFloat16 a) { return rsqrtf(a); } + static inline __host__ __device__ at::BFloat16 floor(at::BFloat16 a) { return floorf(a); } + static inline __host__ __device__ at::BFloat16 trunc(at::BFloat16 a) { return truncf(a); } + static inline __host__ __device__ at::BFloat16 acos (at::BFloat16 a) { return acosf(a); } + static inline __host__ __device__ at::BFloat16 cosh (at::BFloat16 a) { return coshf(a); } + static inline __host__ __device__ at::BFloat16 acosh(at::BFloat16 a) { return acoshf(a); } + static inline __host__ __device__ at::BFloat16 asin (at::BFloat16 a) { return asinf(a); } + static inline __host__ __device__ at::BFloat16 sinh (at::BFloat16 a) { return sinhf(a); } + static inline __host__ __device__ at::BFloat16 asinh(at::BFloat16 a) { return asinhf(a); } + static inline __host__ __device__ at::BFloat16 tan (at::BFloat16 a) { return tanf(a); } + static inline __host__ __device__ at::BFloat16 atan (at::BFloat16 a) { return atanf(a); } + static inline __host__ __device__ at::BFloat16 tanh (at::BFloat16 a) { return tanhf(a); } + static inline __host__ __device__ at::BFloat16 erf (at::BFloat16 a) { return erff(a); } + static inline __host__ __device__ at::BFloat16 erfc (at::BFloat16 a) { return erfcf(a); } + static inline __host__ __device__ at::BFloat16 abs (at::BFloat16 a) { return fabsf(a); } + static inline __host__ __device__ at::BFloat16 round(at::BFloat16 a) { return nearbyintf(a); } + static inline __host__ __device__ at::BFloat16 frac (at::BFloat16 a) { return a - truncf(a); } + static inline __host__ __device__ at::BFloat16 cinv (at::BFloat16 a) { return 1.0f / a; } + static inline __host__ __device__ at::BFloat16 add (at::BFloat16 a, at::BFloat16 b) { return a + b; } + static inline __host__ __device__ at::BFloat16 div (at::BFloat16 a, at::BFloat16 b) { return a / b; } + static inline __host__ __device__ at::BFloat16 mul (at::BFloat16 a, at::BFloat16 b) { return a * b; } + static inline __host__ __device__ at::BFloat16 sub (at::BFloat16 a, at::BFloat16 b) { return a - b; } + static inline __host__ __device__ at::BFloat16 pow (at::BFloat16 a, at::BFloat16 b) { return powf(a, b); } + static inline __host__ __device__ at::BFloat16 atan2(at::BFloat16 a, at::BFloat16 b) { return atan2f(a, b); } + + static inline __host__ __device__ bool isnan(at::BFloat16 a) { + #ifdef _MSC_VER + // Windows requires this explicit conversion. The reason is unclear + // related issue with clang: https://reviews.llvm.org/D37906 + return ::isnan((float) a); + #else + return ::isnan(a); + #endif + } + + static inline __host__ __device__ bool isinf(at::BFloat16 a) { + #ifdef _MSC_VER + // Windows requires this explicit conversion. The reason is unclear + // related issue with clang: https://reviews.llvm.org/D37906 + return ::isinf((float) a); + #else + return ::isinf(a); + #endif + } +}; + +// DEPRECATED: use math functions from std and cuda math API (if needed) +// note that the functions exp10,erfinv and cinv +// are not in the std namespace +template <> +struct THCNumerics { + static inline __host__ __device__ double min() { return at::numeric_limits::lowest(); } + static inline __host__ __device__ double max() { return at::numeric_limits::max(); } + static inline __host__ __device__ double lower_bound() { return at::numeric_limits::lower_bound(); } + static inline __host__ __device__ double upper_bound() { return at::numeric_limits::upper_bound(); } + + static inline __host__ __device__ bool lt(double a, double b) { return a < b; } + static inline __host__ __device__ bool le(double a, double b) { return a <= b; } + static inline __host__ __device__ bool gt(double a, double b) { return a > b; } + static inline __host__ __device__ bool ge(double a, double b) { return a >= b; } + static inline __host__ __device__ bool eq(double a, double b) { return a == b; } + static inline __device__ bool eq_with_nan(double a, double b) { return __double_as_longlong(a) == __double_as_longlong(b); } + static inline __host__ __device__ bool ne(double a, double b) { return a != b; } + + static inline __host__ __device__ double exp (double a) { return ::exp(a); } + static inline __host__ __device__ double exp10(double a) { return ::exp10(a); } + static inline __host__ __device__ double cos (double a) { return ::cos(a); } + static inline __host__ __device__ double sqrt (double a) { return ::sqrt(a); } + static inline __host__ __device__ double cosh (double a) { return ::cosh(a); } + static inline __host__ __device__ double tan (double a) { return ::tan(a); } + static inline __host__ __device__ double atan (double a) { return ::atan(a); } + static inline __host__ __device__ double tanh (double a) { return ::tanh(a); } + static inline __host__ __device__ double erf (double a) { return ::erf(a); } + static inline __host__ __device__ double erfc (double a) { return ::erfc(a); } + static inline __host__ __device__ double cinv (double a) { return 1.0 / a; } + static inline __host__ __device__ double add (double a, double b) { return a + b; } + static inline __host__ __device__ double div (double a, double b) { return a / b; } + static inline __host__ __device__ double mul (double a, double b) { return a * b; } + static inline __host__ __device__ double sub (double a, double b) { return a - b; } + static inline __host__ __device__ double pow (double a, double b) { return ::pow(a, b); } + static inline __host__ __device__ bool isnan(double a) { return ::isnan(a); } + static inline __host__ __device__ bool isinf(double a) { return ::isinf(a); } +}; + +// WARNING: The following note is deprecated +/// `half` has some type conversion issues associated with it, since it +/// is a struct without a constructor/implicit conversion constructor. +/// We use this to convert scalar values to the given type that the +/// tensor expects. +/// +/// at::Half has implicit conversions for float and __half types. Moreover +/// it has constructors for __half and float types. + +template +struct ScalarConvert { + static __host__ __device__ Out to(const In v) { return (Out) v; } +}; + +// DEPRECATED: use static_cast in kernels instead of scalar_cast +template +__host__ __device__ T scalar_cast(U u) { + return ScalarConvert::to(u); +} + +#endif // THC_NUMERICS_INC diff --git a/thirdparty/libtorch/include/THC/THCReduce.cuh b/thirdparty/libtorch/include/THC/THCReduce.cuh new file mode 100644 index 0000000000..856d3226c6 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCReduce.cuh @@ -0,0 +1,643 @@ +#ifndef THC_REDUCE_INC +#define THC_REDUCE_INC + +// +// This file contains dimension reduction operation functions and +// kernels that work on both contiguous and non-contiguous tensor +// arguments of arbitrary (up to MAX_CUTORCH_DIMS) dimensioned +// arguments without copying or temporary storage. +// + +#include +#include +#include +#include + +// Threads per thread block +#define THC_NONCONTIG_REDUCE_BLOCK_SIZE 32 * 16 +#define CHUNKPERBLOCK 256 + +template +__device__ __forceinline__ IndexType getReduceNoncontigDimSliceIndex() { + // Each thread handles one slice + return getLinearBlockId() * THC_NONCONTIG_REDUCE_BLOCK_SIZE + threadIdx.x; +} + +// quick hack to enable two-stage use of reduceChunk +template +struct SimpleCopyOp +{ + __device__ __forceinline__ T operator()(volatile const T val) const volatile + { + return val; + } +}; + +__device__ __forceinline__ int lastpow2(int n) +{ + int out = 1 << (31 - __clz(n)); + if(n == out) + out >>= 1; + return out; +} + +template + +__device__ __forceinline__ void reduceChunk + (T* out, + U* in, + const int& inbounds, + const IndexType& reductionStride, + const IndexType& reductionSize, + const IndexType& inOffset, + const IndexType& outOffset, + const int& shmem_lim, + AccT init, + AccT* shmem, + ModifyOp modifyOp, + ReduceOp reduceOp, + FinalizeOp finalizeOp) +{ + AccT load_reg[4]; + AccT local_reg = init; + + //Unroll this loop + //for(IndexType i=threadIdx.y; i(in[inOffset + i*reductionStride]); + load_reg[0] = modifyOp(val0); + const AccT val1 = scalar_cast(in[inOffset + (i + blockDim.y)*reductionStride]); + load_reg[1] = modifyOp(val1); + const AccT val2 = scalar_cast(in[inOffset + (i + blockDim.y*2)*reductionStride]); + load_reg[2] = modifyOp(val2); + const AccT val3 = scalar_cast(in[inOffset + (i + blockDim.y*3)*reductionStride]); + load_reg[3] = modifyOp(val3); + local_reg = reduceOp(local_reg, load_reg[0]); + local_reg = reduceOp(local_reg, load_reg[1]); + local_reg = reduceOp(local_reg, load_reg[2]); + local_reg = reduceOp(local_reg, load_reg[3]); + } + else if (i + blockDim.y*2 < reductionSize) + { + const AccT val0 = scalar_cast(in[inOffset + i*reductionStride]); + load_reg[0] = modifyOp(val0); + const AccT val1 = scalar_cast(in[inOffset + (i + blockDim.y)*reductionStride]); + load_reg[1] = modifyOp(val1); + const AccT val2 = scalar_cast(in[inOffset + (i + blockDim.y*2)*reductionStride]); + load_reg[2] = modifyOp(val2); + local_reg = reduceOp(local_reg, load_reg[0]); + local_reg = reduceOp(local_reg, load_reg[1]); + local_reg = reduceOp(local_reg, load_reg[2]); + } + else if (i + blockDim.y < reductionSize) + { + const AccT val0 = scalar_cast(in[inOffset + i*reductionStride]); + load_reg[0] = modifyOp(val0); + const AccT val1 = scalar_cast(in[inOffset + (i + blockDim.y)*reductionStride]); + load_reg[1] = modifyOp(val1); + local_reg = reduceOp(local_reg, load_reg[0]); + local_reg = reduceOp(local_reg, load_reg[1]); + } + else if (i < reductionSize) + { + const AccT val0 = scalar_cast(in[inOffset + i*reductionStride]); + local_reg = reduceOp(local_reg, modifyOp(val0)); + } + } + + *shmem = local_reg; + for(int i = lastpow2(shmem_lim); i > 0; i >>= 1) + { + __syncthreads(); + if(threadIdx.y < i && threadIdx.y + i < shmem_lim) + *shmem = reduceOp(*shmem, *(shmem + i*blockDim.x)); + } + + if(threadIdx.y == 0 && inbounds) { + T &&o_ele = static_cast(finalizeOp(*shmem)); + out[outOffset] = o_ele; + } +} + +// Kernel that handles an entire reduction of a slice of a tensor per each thread +template + +#if __CUDA_ARCH__ >= 350 || defined __HIP_PLATFORM_HCC__ +C10_LAUNCH_BOUNDS_2(512, 4) +#endif +__global__ void kernelReduceNoncontigDim_shared + (TensorInfo out, + TensorInfo in, + IndexType reductionStride, + IndexType reductionSize, + IndexType totalSlices, + AccT init, + ModifyOp modifyOp, + ReduceOp reduceOp, + FinalizeOp finalizeOp, + volatile AccT* stagingData, + int* semaphores) +{ + IndexType sliceIndex = blockIdx.x*blockDim.x + threadIdx.x; + + __shared__ int isLastBlockDone; + __shared__ AccT local_reduce[THC_NONCONTIG_REDUCE_BLOCK_SIZE]; + AccT* shmem = &local_reduce[threadIdx.x + threadIdx.y*blockDim.x]; + + // This kernel is intended for the latency-bound case, so we want to launch enough blocks + // to cover the entire output. This means we don't need grid-stride loops. + const IndexType outOffset = + IndexToOffset::get(sliceIndex, out); + const IndexType inOffset = + IndexToOffset::get(sliceIndex, in); + const int inbounds = (sliceIndex < totalSlices); + + if(gridDim.y == 1) + reduceChunk + (out.data, + in.data, + inbounds, + reductionStride, + reductionSize, + inOffset, + outOffset, + reductionSize < blockDim.y ? reductionSize : blockDim.y, + init, + shmem, + modifyOp, + reduceOp, + finalizeOp); + else + { + int* semaphore = semaphores + blockIdx.x; + + const IndexType chunkStart = blockIdx.y*CHUNKPERBLOCK; + const IndexType chunkSize = reductionSize - chunkStart < CHUNKPERBLOCK ? + reductionSize - chunkStart : CHUNKPERBLOCK; + const IndexType reductionStrideStaging = totalSlices; + const IndexType stagingOffset = sliceIndex; + + reduceChunk + (stagingData, + in.data, + inbounds, + reductionStride, + chunkSize, + inOffset + chunkStart*reductionStride, + stagingOffset + blockIdx.y*reductionStrideStaging, + chunkSize < blockDim.y ? chunkSize : blockDim.y, + init, + shmem, + modifyOp, + reduceOp, + SimpleCopyOp()); + + __threadfence(); // make sure writes are globally visible + __syncthreads(); // if multiple warps in this block wrote to staging, make sure they're all done + + if(threadIdx.x == 0 && threadIdx.y == 0) + { + int old = atomicAdd(semaphore, 1); + isLastBlockDone = (old == gridDim.y - 1); + } + + __syncthreads(); + + // The staging area contains gridDim.y elements along each slice. The final reduction + // begins by treating the first blockDim.y elements as "init" values. + if(isLastBlockDone) + { + if(threadIdx.y < gridDim.y) + init = stagingData[stagingOffset + threadIdx.y*reductionStrideStaging]; + IndexType remaining = gridDim.y < blockDim.y ? 0 : gridDim.y - blockDim.y; + reduceChunk + (out.data, + stagingData, + inbounds, + reductionStrideStaging, + remaining, // if 0, loop in reduceChunk is skipped, otherwise... + stagingOffset + blockDim.y*reductionStrideStaging, // ...loop begins at blockDim+1th element + outOffset, + gridDim.y < blockDim.y ? gridDim.y : blockDim.y, + init, + shmem, + SimpleCopyOp(), + reduceOp, + finalizeOp); + } + } +} + + +// Kernel that handles an entire reduction of a slice of a tensor per each thread +template +#if __CUDA_ARCH__ >= 350 || defined __HIP_PLATFORM_HCC__ +C10_LAUNCH_BOUNDS_2(512, 4) +#endif +__global__ void +kernelReduceNoncontigDim(TensorInfo out, + TensorInfo in, + IndexType reductionStride, + IndexType reductionSize, + IndexType totalSlices, + AccT init, + ModifyOp modifyOp, + ReduceOp reduceOp, + FinalizeOp finalizeOp) { + const IndexType sliceIndex = getReduceNoncontigDimSliceIndex(); + + if (sliceIndex >= totalSlices) { + return; + } + + // Each thread picks a point in `out` and `in` for which it is + // producing the reduction + const IndexType outOffset = + IndexToOffset::get(sliceIndex, out); + const IndexType inBaseOffset = + IndexToOffset::get(sliceIndex, in); + + // For each point in reductionSize, reduce into `r` + IndexType inOffset = inBaseOffset; + AccT r = init; + + for (IndexType i = 0; i < reductionSize; ++i) { + const AccT val = scalar_cast(in.data[inOffset]); + r = reduceOp(r, modifyOp(val)); + inOffset += reductionStride; + } + + // Write out reduced value + out.data[outOffset] = scalar_cast(finalizeOp(r)); +} + +template +__device__ __forceinline__ IndexType getReduceContigDimSliceIndex() { + // Each block handles one slice + return getLinearBlockId(); +} + +// Kernel that handles an entire reduction of a slice of a tensor per +// each block +template +__global__ void +kernelReduceContigDim(TensorInfo out, + TensorInfo in, + IndexType reductionSize, + IndexType totalSlices, + AccT init, + ModifyOp modifyOp, + ReduceOp reduceOp, + FinalizeOp finalizeOp) { + const IndexType sliceIndex = getReduceContigDimSliceIndex(); + + if (sliceIndex >= totalSlices) { + return; + } + + // Get the offset in `out` for the reduction + const IndexType outOffset = + IndexToOffset::get(sliceIndex, out); + + // Get the base offset in `in` for this block's reduction + const IndexType inBaseOffset = + IndexToOffset::get(sliceIndex, in); + + // Each thread in the block will reduce some subset of elements in + // the slice. The elements are guaranteed contiguous starting at + // `inBaseOffset`. + AccT r = init; + for (IndexType i = threadIdx.x; i < reductionSize; i += blockDim.x) { + const AccT val = scalar_cast(in.data[inBaseOffset + i]); + r = reduceOp(r, modifyOp(val)); + } + + // Reduce within the block + // FIXME: extern name + extern __shared__ char smemChar[]; + AccT* smem = (AccT*) smemChar; + r = reduceBlock(smem, blockDim.x, r, reduceOp, init); + + if (threadIdx.x == 0) { + // Write out reduced value + out.data[outOffset] = scalar_cast(finalizeOp(r)); + } +} + +inline dim3 getNoncontigReduceBlock() { + return dim3(THC_NONCONTIG_REDUCE_BLOCK_SIZE); +} + +inline dim3 getContigReduceBlock(ptrdiff_t numSlices, int64_t reductionSize) { + // If the number of slices is low but the reduction dimension size + // is high, then we should increase block size for greater parallelism. + // Aim for at least 32 warps per SM (assume 15 SMs; don't bother + // inquiring the real number for now). + int maxWarps = 4; // better occupancy if many blocks are around + // For numSlices > 15 * 8, there are > 32 warps active per SM. + if (numSlices < 15 * 8) { + maxWarps = 8; + if (numSlices < 15 * 4) { + maxWarps = 16; + if (numSlices < 15 * 2) { + maxWarps = 32; + } + } + } + + // Scale up block size based on the reduction dimension size + int64_t warpsInReductionSize = THCCeilDiv(reductionSize, (int64_t) 32); + int numWarps = warpsInReductionSize > (int64_t) maxWarps ? + maxWarps : (int) warpsInReductionSize; + + return dim3(numWarps * 32); +} + +inline bool getNoncontigReduceGrid(ptrdiff_t elements, dim3& grid) { + // One output point per thread + return THC_getGridFromTiles(THCCeilDiv(elements, + (ptrdiff_t) THC_NONCONTIG_REDUCE_BLOCK_SIZE), grid); +} + +inline bool getContigReduceGrid(ptrdiff_t elements, dim3& grid) { + // One output point per block + return THC_getGridFromTiles(elements, grid); +} + +// Performs a reduction out[..., 0, ...] = reduce_i(modify(in[..., i, ...])) for +// all in where i and the out's 0 are indexed at dimension `dim` +template +bool THC_reduceDim(THCState* state, + TensorType* out, + TensorType* in, + const ModifyOp modifyOp, + const ReduceOp reduceOp, + const FinalizeOp finalizeOp, + AccT init, + int dim, + int keepdim) { + ptrdiff_t inElements = THCTensor_nElement(state, in); + + int64_t reductionSize = THTensor_sizeLegacyNoScalars(in, dim); + int64_t reductionStride = THTensor_strideLegacyNoScalars(in, dim); + ptrdiff_t outElements = inElements / reductionSize; + + if (THCTensor_nDimensionLegacyAll(state, out) > MAX_CUTORCH_DIMS || + THCTensor_nDimensionLegacyAll(state, in) > MAX_CUTORCH_DIMS) { + return false; + } + + if (THCTensor_nDimensionLegacyAll(state, in) == 0) { + // Zero-dim tensor; do nothing + return true; + } + + // Is the reduction dimension contiguous? If so, then we can use a + // shared memory reduction kernel to increase performance. + bool contigReduction = (reductionStride == 1); + + dim3 block; + dim3 grid; + int smemSize = 0; // contiguous reduction uses smem + if (contigReduction) { + if (!getContigReduceGrid(outElements, grid)) { + return false; + } + + block = getContigReduceBlock(outElements, reductionSize); + smemSize = sizeof(AccT) * block.x; + } else { + if (!getNoncontigReduceGrid(outElements, grid)) { + return false; + } + + block = getNoncontigReduceBlock(); + + if(outElements <= 4096) + { + // gridDim.x and blockDim.x parallelize work across slices. + // blockDim.y enables some intra-block reduction within slices. + // gridDim.y enables inter-block reduction within slices. + + // Each block covers 32 output elements. + int blockdimx = 32; + int griddimx = THCCeilDiv((int64_t)outElements, (int64_t)blockdimx); + + // Each warp reduces at most 4 slices. This heuristic can be tuned, + // but locking blockdimy to 16 is robust and reasonably performant. + int blockdimy = 16; + + int griddimy = 1; + bool coop = false; + // Rough heuristics to decide if using cooperating blocks is worthwhile + if( outElements <= 32 && reductionSize >= 4096) coop = true; + if( 32 < outElements && outElements <= 64 && reductionSize >= 4096) coop = true; + if( 64 < outElements && outElements <= 128 && reductionSize >= 4096) coop = true; + if( 128 < outElements && outElements <= 256 && reductionSize >= 4096) coop = true; + if( 256 < outElements && outElements <= 512 && reductionSize >= 4096) coop = true; + if( 512 < outElements && outElements <= 1024 && reductionSize >= 4096) coop = true; + if(1024 < outElements && outElements <= 2048 && reductionSize >= 2048) coop = true; + if(2048 < outElements && outElements <= 4096 && reductionSize >= 2048) coop = true; + // Each block reduces at most CHUNKPERBLOCK (currently 256) slices. + if(coop) + griddimy = THCCeilDiv((int64_t)reductionSize, (int64_t)CHUNKPERBLOCK); + + grid = dim3(griddimx, griddimy, 1); + block = dim3(blockdimx, blockdimy, 1); + } + } + + // Resize out to correspond to the reduced size with keepdim=True. + + // Preserve noncontiguities by unsqueezing out if necessary + THCTensor_preserveReduceDimSemantics( + state, out, THCTensor_nDimensionLegacyAll(state, in), dim, keepdim); + + // Resize out + std::vector sizes = THTensor_sizesLegacyNoScalars(in); + sizes[dim] = 1; + THCTensor_resize(state, out, sizes, {}); + + // It is possible that the tensor dimensions are able to be collapsed, + // and thus we can reduce the actual code complexity of the copy by + // exploiting this knowledge statically, since the div/mod is the + // most expensive part of the operation, more so than memory accesses. + // For instance, when copying a non-contiguous to a contiguous tensor + // (or vice versa), the contiguous tensor can be collapsed to one + // dimension, and the loop to translate the linear index to the array + // index can be similarly collapsed. That is what this unrolling is for. +#define HANDLE_CASE(TYPE, OUT, IN) \ + if (contigReduction) { \ + kernelReduceContigDim \ + <<>> \ + (outInfo, inInfo, reductionSize, \ + (TYPE) outElements, init, modifyOp, reduceOp, finalizeOp); \ + } else { \ + if(block.y == 1){ \ + kernelReduceNoncontigDim< \ + ScalarType, \ + TYPE, AccT, ModifyOp, ReduceOp, FinalizeOp, \ + OUT, IN> \ + <<>> \ + (outInfo, inInfo, reductionStride, reductionSize, \ + (TYPE) outElements, init, modifyOp, reduceOp, finalizeOp); \ + } \ + else \ + { \ + void* stagingData = nullptr; \ + void* semaphores = nullptr; \ + \ + if(grid.y > 1) \ + { \ + stagingData = THCudaMalloc(state, sizeof(AccT)*outElements*grid.y);\ + semaphores = THCudaMalloc(state, sizeof(int)*grid.x); \ + THCudaCheck(cudaMemsetAsync \ + (semaphores, \ + 0, \ + sizeof(int)*grid.x, \ + THCState_getCurrentStream(state))); \ + } \ + \ + kernelReduceNoncontigDim_shared \ + \ + <<>> \ + (outInfo, \ + inInfo, \ + reductionStride, \ + reductionSize, \ + (TYPE) outElements, \ + init, \ + modifyOp, \ + reduceOp, \ + finalizeOp, \ + (volatile AccT*)stagingData, \ + (int*)semaphores); \ + \ + if(grid.y > 1) \ + { \ + THCudaFree(state, stagingData); \ + THCudaFree(state, semaphores); \ + } \ + } \ + } + +#define HANDLE_IN_CASE(TYPE, OUT, IN) \ + { \ + switch (IN) { \ + case 1: \ + HANDLE_CASE(TYPE, OUT, 1); \ + break; \ + case 2: \ + HANDLE_CASE(TYPE, OUT, 2); \ + break; \ + default: \ + HANDLE_CASE(TYPE, OUT, -1); \ + break; \ + } \ + } + +#define HANDLE_OUT_CASE(TYPE, OUT, IN) \ + { \ + switch (OUT) { \ + case 1: \ + HANDLE_IN_CASE(TYPE, 1, IN); \ + break; \ + case 2: \ + HANDLE_IN_CASE(TYPE, 2, IN); \ + break; \ + default: \ + HANDLE_IN_CASE(TYPE, -1, IN); \ + break; \ + } \ + } + + if(THCTensor_canUse32BitIndexMath(state, out) && + THCTensor_canUse32BitIndexMath(state, in)) + { + TensorInfo outInfo = + getTensorInfo(state, out); + outInfo.collapseDims(); + + TensorInfo inInfo = + getTensorInfo(state, in); + inInfo.reduceDim(dim); + inInfo.collapseDims(); + HANDLE_OUT_CASE(unsigned int, outInfo.dims, inInfo.dims); + } + else + { + TensorInfo outInfo = + getTensorInfo(state, out); + outInfo.collapseDims(); + + TensorInfo inInfo = + getTensorInfo(state, in); + inInfo.reduceDim(dim); + inInfo.collapseDims(); + + /* + Only instantiates the all 1D special case and the fallback all nD case for + large (64-bit indexed) tensors to reduce compilation time. + */ + if (outInfo.dims == 1 && inInfo.dims == 1) { + HANDLE_CASE(uint64_t, 1, 1); + } else { + HANDLE_CASE(uint64_t, -1, -1); + } + } +#undef HANDLE_CASE +#undef HANDLE_IN_CASE +#undef HANDLE_OUT_CASE + + + if (!keepdim) { + THCTensor_squeeze1d(state, out, out, dim); + } + return true; +} + +#undef THC_NONCONTIG_REDUCE_BLOCK_SIZE +#undef CHUNKPERBLOCK + +#endif // THC_REDUCE_INC diff --git a/thirdparty/libtorch/include/THC/THCReduceAll.cuh b/thirdparty/libtorch/include/THC/THCReduceAll.cuh new file mode 100644 index 0000000000..0e009f3f16 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCReduceAll.cuh @@ -0,0 +1,341 @@ +#ifndef THC_REDUCEALL_INC +#define THC_REDUCEALL_INC + +// +// This file contains dimension reduction operation functions and +// kernels that work on both contiguous and non-contiguous tensor +// arguments of arbitrary (up to MAX_CUTORCH_DIMS) dimensioned +// arguments without copying or temporary storage, for reducing an +// entire tensor to one value. +// + +#include +#include + +// Size per each reduction block +#define THC_REDUCE_ALL_BLOCK_SIZE 1024L + +// Cutoff size for two-pass reduction +#define THC_TWO_PASS_REDUCTION_SIZE 2048L + +// Kernel that handles an entire reduction of a tensor in one pass +template +__global__ void +#if defined(__HIP_PLATFORM_HCC__) +C10_LAUNCH_BOUNDS_1(THC_REDUCE_ALL_BLOCK_SIZE) +#endif +kernelReduceAll(TensorInfo in, + IndexType totalElements, + AccT init, + ModifyOp modifyOp, + ReduceOp reduceOp, + AccT* out) { + // With a block-wide stride, have each thread perform its own reduction. + AccT r = init; + for (IndexType i = threadIdx.x; i < totalElements; i += blockDim.x) { + const IndexType inOffset = IndexToOffset::get(i, in); + const AccT val = scalar_cast(in.data[inOffset]); + r = reduceOp(r, modifyOp(val)); + } + + // Reduce within the block + extern __shared__ char smemChar[]; + AccT* smem = (AccT*) smemChar; + r = reduceBlock(smem, blockDim.x, r, reduceOp, init); + + if (threadIdx.x == 0) { + // Write out reduced value + *out = r; + } +} + +template +__device__ __forceinline__ IndexType getStartIndex(IndexType totalSize) { + IndexType sizePerBlock = THCCeilDiv(totalSize, (IndexType) gridDim.x); + return blockIdx.x * sizePerBlock; +} + +template +__device__ __forceinline__ IndexType getEndIndex(IndexType totalSize) { + IndexType sizePerBlock = THCCeilDiv(totalSize, (IndexType) gridDim.x); + return min((IndexType) ((blockIdx.x + 1) * sizePerBlock), totalSize); +} + +// Kernel that handles an entire reduction of a tensor in two passes +template +#if defined(__HIP_PLATFORM_HCC__) +C10_LAUNCH_BOUNDS_1(THC_REDUCE_ALL_BLOCK_SIZE) +#endif +__global__ void +kernelReduceAllPass1(TensorInfo in, + IndexType totalElements, + AccT init, + ModifyOp modifyOp, + ReduceOp reduceOp, + AccT* scratchSpace) { + const IndexType startIndex = getStartIndex(totalElements); + const IndexType endIndex = getEndIndex(totalElements); + + // With a block-wide stride, have each thread perform its own reduction. + AccT r = init; + for (IndexType i = startIndex + threadIdx.x; i < endIndex; i += blockDim.x) { + const IndexType inOffset = IndexToOffset::get(i, in); + const AccT val = scalar_cast(in.data[inOffset]); + r = reduceOp(r, modifyOp(val)); + } + + // Reduce within the block + extern __shared__ char smemChar[]; + AccT* smem = (AccT*) smemChar; + r = reduceBlock(smem, blockDim.x, r, reduceOp, init); + + if (threadIdx.x == 0) { + // Write out block-wide reduced value + scratchSpace[blockIdx.x] = r; + } +} + +template +#if defined(__HIP_PLATFORM_HCC__) +C10_LAUNCH_BOUNDS_1(THC_REDUCE_ALL_BLOCK_SIZE) +#endif +__global__ void +kernelReduceAllPass2(int numPass1Blocks, + T init, + ReduceOp reduceOp, + T* scratchSpace, + T* out) { + T r = init; + if (threadIdx.x < numPass1Blocks) { + r = scratchSpace[threadIdx.x]; + } + + // Reduce within the block + extern __shared__ char smemChar[]; + T* smem = (T*) smemChar; + r = reduceBlock(smem, numPass1Blocks, r, reduceOp, init); + + if (threadIdx.x == 0) { + *out = r; + } +} + +// Perform a two-pass reduction if the tensor is large enough to +// warrant it. +inline bool isTwoPassReductionSize(ptrdiff_t elements) { + return (elements > THC_TWO_PASS_REDUCTION_SIZE); +} + +template +inline ptrdiff_t getTwoPassBlocks(THCState* state, ptrdiff_t elements) { + ptrdiff_t numBlocks = THCCeilDiv(elements, (ptrdiff_t)THC_REDUCE_ALL_BLOCK_SIZE); + + // We can only have as many blocks as there is scratch space + ptrdiff_t scratchSpace = + THCState_getCurrentDeviceScratchSpaceSize(state) / sizeof(T); + THAssert(scratchSpace > 0); + + // Limit to 1024 due to dimensionality constraint + if (scratchSpace > 1024) { + scratchSpace = 1024; + } + + if (numBlocks > scratchSpace) { + numBlocks = scratchSpace; + } + + return numBlocks; +} + +// Get the block/grid size that we want +template +inline void getPass1ReduceBlockGrid(THCState* state, ptrdiff_t elements, + dim3& grid, dim3& block) { + grid = dim3(getTwoPassBlocks(state, elements)); + block = dim3(THC_REDUCE_ALL_BLOCK_SIZE); +} + +template +inline void getPass2ReduceBlockGrid(THCState* state, ptrdiff_t elements, + dim3& grid, dim3& block) { + grid = dim3(1); + // We only need as many threads as there were blocks originally + block = dim3(getTwoPassBlocks(state, elements)); +} + +inline void getSinglePassReduceBlockGrid(ptrdiff_t elements, + dim3& grid, dim3& block) { + grid = dim3(1); + block = dim3(THC_REDUCE_ALL_BLOCK_SIZE); +} + +template +void callReduceAll(THCState* state, + const TensorInfo& in, + ptrdiff_t totalElements, + AccT init, + const ModifyOp& modifyOp, + const ReduceOp& reduceOp, + AccT* devOut) { + dim3 grid; + dim3 block; + + if (isTwoPassReductionSize(totalElements)) { + void* scratchSpace = THCudaMalloc(state, THCState_getCurrentDeviceScratchSpaceSize(state)); + + getPass1ReduceBlockGrid(state, totalElements, grid, block); + size_t smemSize = block.x * sizeof(AccT); + + kernelReduceAllPass1 + <<>>( + in, (IndexType) totalElements, init, modifyOp, reduceOp, + (AccT*) scratchSpace); + + int numPass1Blocks = grid.x; + getPass2ReduceBlockGrid(state, totalElements, grid, block); + smemSize = block.x * sizeof(AccT); + + kernelReduceAllPass2 + <<>>( + numPass1Blocks, init, reduceOp, + (AccT*) scratchSpace, devOut); + + THCudaFree(state, scratchSpace); + } else { + getSinglePassReduceBlockGrid(totalElements, grid, block); + size_t smemSize = block.x * sizeof(AccT); + + kernelReduceAll + <<>>( + in, (IndexType) totalElements, init, modifyOp, reduceOp, devOut); + } +} + +// Reduces the entire tensor to one value. `out` points to +// host-resident memory. +template +bool THC_reduceAll(THCState* state, + TensorType* in, + const ModifyOp& modifyOp, + const ReduceOp& reduceOp, + AccT init, + AccT* out, + int outOnDevice) { + ptrdiff_t inElements = THCTensor_nElement(state, in); + + if (THCTensor_nDimensionLegacyAll(state, in) > MAX_CUTORCH_DIMS) { + return false; + } + + if (THCTensor_nDimensionLegacyAll(state, in) == 0) { + // Zero-dim tensor; do nothing + *out = init; + return true; + } + + bool freeDevOut = false; + AccT* devOut = out; + if (!outOnDevice) { + // Use the stream-specific scratch space for the reduction kernel + // to write out its value + devOut = static_cast(THCudaMalloc(state, + THCState_getCurrentDeviceScratchSpaceSize(state))); + freeDevOut = true; + } + + // It is possible that the tensor dimensions are able to be collapsed, + // and thus we can reduce the actual code complexity of the copy by + // exploiting this knowledge statically, since the div/mod is the + // most expensive part of the operation, more so than memory accesses. + // For instance, when copying a non-contiguous to a contiguous tensor + // (or vice versa), the contiguous tensor can be collapsed to one + // dimension, and the loop to translate the linear index to the array + // index can be similarly collapsed. That is what this unrolling is for. +#define HANDLE_CASE(TYPE, IN) \ + callReduceAll( \ + state, inInfo, inElements, init, modifyOp, \ + reduceOp, devOut); + +#define HANDLE_IN_CASE(TYPE, IN) \ + { \ + switch (IN) { \ + case 1: \ + HANDLE_CASE(TYPE, 1); \ + break; \ + case 2: \ + HANDLE_CASE(TYPE, 2); \ + break; \ + default: \ + HANDLE_CASE(TYPE, -1); \ + break; \ + } \ + } + + if (THCTensor_canUse32BitIndexMath(state, in)) { + TensorInfo inInfo = + getTensorInfo(state, in); + inInfo.collapseDims(); + + HANDLE_IN_CASE(unsigned int, inInfo.dims); + } else { + TensorInfo inInfo = + getTensorInfo(state, in); + inInfo.collapseDims(); + + /* + Only instantiates the all 1D special case and the fallback all nD case for + large (64-bit indexed) tensors to reduce compilation time. + */ + if (inInfo.dims == 1) { + HANDLE_IN_CASE(uint64_t, 1); + } else { + HANDLE_IN_CASE(uint64_t, -1); + } + } +#undef HANDLE_CASE +#undef HANDLE_IN_CASE + + // If our destination is not on the device, copy the value back to + // the host (synchronous!) + if (!outOnDevice) { + cudaStream_t stream = THCState_getCurrentStream(state); + THCudaCheck(cudaMemcpyAsync(out, + devOut, + sizeof(AccT), + cudaMemcpyDeviceToHost, + stream)); + THCudaCheck(cudaStreamSynchronize(stream)); + } + + if (freeDevOut) { + THCudaFree(state, devOut); + } + + return true; +} + +#undef THC_REDUCE_ALL_BLOCK_SIZE +#undef THC_TWO_PASS_REDUCTION_SIZE + +#endif // THC_REDUCEALL_INC diff --git a/thirdparty/libtorch/include/THC/THCReduceApplyUtils.cuh b/thirdparty/libtorch/include/THC/THCReduceApplyUtils.cuh new file mode 100644 index 0000000000..4324e19597 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCReduceApplyUtils.cuh @@ -0,0 +1,152 @@ +#ifndef THC_REDUCE_APPLY_UTILS_INC +#define THC_REDUCE_APPLY_UTILS_INC + +#include +#include +#include +#include +#include +#include + +// Enum that indicates whether tensor arguments are read/write or +// read-only +enum TensorArgType { ReadWrite, ReadOnly }; + +template +__device__ __forceinline__ IndexType getLinearBlockId() { + return blockIdx.z * gridDim.y * gridDim.x + + blockIdx.y * gridDim.x + + blockIdx.x; +} + +// Reduce N values concurrently, i.e. suppose N = 2, and there are 4 threads: +// (1, 2), (3, 4), (5, 6), (7, 8), then the return in threadVals for thread 0 +// is (1 + 3 + 5 + 7, 2 + 4 + 6 + 8) = (16, 20) +// +// If smem is not used again, there is no need to __syncthreads before this +// call. However, if smem will be used, e.g., this function is called in a loop, +// then __syncthreads is needed either before or afterwards to prevent non-0 +// threads overriding smem in the next loop before num-0 thread reads from it. +template +__device__ void reduceNValuesInBlock(T *smem, + T threadVals[N], + const unsigned int numVals, + ReduceOp reduceOp, + T init) { + if (numVals == 0) { + #pragma unroll + for (int i = 0; i < N; ++i) { + threadVals[i] = init; + } + return; + } + + // We store each of the N values contiguously, so if N = 2, all values for + // the first threadVal for each thread in the block are stored followed by + // all of the values for the second threadVal for each thread in the block + if (threadIdx.x < numVals) { + #pragma unroll + for (int i = 0; i < N; ++i) { + smem[i * numVals + threadIdx.x] = threadVals[i]; + } + } + __syncthreads(); + + // Number of lanes in the final reduction --> this is used to determine + // where to put the outputs of each of the n things we are reducing. If + // nLP = 32, then we have the 32 outputs for the first threadVal, + // followed by the 32 outputs for the second threadVal, etc. + const unsigned int numLanesParticipating = min(numVals, warpSize); + + if (numVals > warpSize && ((threadIdx.x / warpSize) == 0 )) { + #pragma unroll + for (int i = 0; i < N; ++i) { + threadVals[i] = threadIdx.x < numVals ? threadVals[i] : init; + } + + for (int i = warpSize + threadIdx.x; i < numVals; i += warpSize) { + #pragma unroll + for (int j = 0; j < N; ++j) { + threadVals[j] = reduceOp(threadVals[j], smem[j * numVals + i]); + } + } + + #pragma unroll + for (int i = 0; i < N; ++i) { + smem[i * numLanesParticipating + threadIdx.x] = threadVals[i]; + } + } + __syncthreads(); + + if (threadIdx.x == 0) { + if (numLanesParticipating == 32) { + #pragma unroll + for (int i = 0; i < N; ++i) { + #pragma unroll + for (int j = 1; j < 32; ++j) { + threadVals[i] = reduceOp(threadVals[i], smem[i * 32 + j]); + } + } + } else { + #pragma unroll + for (int i = 0; i < N; ++i) { + for (int j = 1; j < numLanesParticipating; ++j) { + threadVals[i] = reduceOp(threadVals[i], smem[i * numVals + j]); + } + } + } + } +} + +// Block-wide reduction in shared memory helper; only threadIdx.x == 0 will +// return the reduced value +// +// If smem is not used again, there is no need to __syncthreads before this +// call. However, if smem will be used, e.g., this function is called in a loop, +// then __syncthreads is needed either before or afterwards to prevent non-0 +// threads overriding smem in the next loop before num-0 thread reads from it. +template +__device__ T reduceBlock(T* smem, + const unsigned int numVals, + T threadVal, + ReduceOp reduceOp, + T init) { + reduceNValuesInBlock(smem, &threadVal, numVals, reduceOp, init); + return threadVal; +} + + +// Block-wide reduction where each thread locally reduces N +// values before letting a single warp take over - assumes +// threadVals is in registers, not shared memory +// +// If smem is not used again, there is no need to __syncthreads before this +// call. However, if smem will be used, e.g., this function is called in a loop, +// then __syncthreads is needed either before or afterwards to prevent non-0 +// threads overriding smem in the next loop before num-0 thread reads from it. +template +__device__ T reduceBlockWithNThreadLocalReductions(T *smem, + T threadVals[N], + const unsigned int numVals, + ReduceOp reduceOp, + T init) { + int offset = threadIdx.x * N; + T local = offset < numVals ? threadVals[0] : init; + + #pragma unroll + for (int i = 1; i < N; ++i) { + ++offset; + T next = offset < numVals ? threadVals[i] : init; + local = reduceOp(local, next); + } + + return reduceBlock(smem, blockDim.x < numVals ? blockDim.x : numVals, local, reduceOp, init); +} + +// Make sure the given tensor doesn't have too many dimensions +void THCCheckTensorDims(THCState* state, THCudaTensor* tensor, int arg); + +// Produces a grid with at least one point per tile +THC_API bool THC_getGridFromTiles(ptrdiff_t gridTiles, dim3& grid); + +#endif // THC_REDUCE_APPLY_UTILS_INC diff --git a/thirdparty/libtorch/include/THC/THCScanUtils.cuh b/thirdparty/libtorch/include/THC/THCScanUtils.cuh new file mode 100644 index 0000000000..75576238ca --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCScanUtils.cuh @@ -0,0 +1,218 @@ +#ifndef THC_SCAN_UTILS_INC +#define THC_SCAN_UTILS_INC + +#include +#include +#include + +// Collection of in-kernel scan / prefix sum utilities + +// Inclusive Scan via an upsweep/downsweep mechanism. Assumes: +// +// 1. Power2ScanSize is a power of 2. This code still works for collections that +// do not exactly contain a power of 2 number of elements, simply round up to the +// nearest power of 2 and then call. +// +// 2. That there are two-elements per thread, i.e. the size of the smem storage +// is 2 * blockDim.x * sizeof(T). +// +// Consider a (+)-Scan on the following elements: +// +// Upsweep: +// +// 0 1 2 3 4 5 6 7 +// 1 5 9 13 +// 6 22 +// 28 +// +// Downsweep: +// 15 +// 3 10 21 +template +__device__ void inclusivePrefixScan(T *smem, BinaryOp binop) { + // Reduce step ("upsweep") +#pragma unroll + for (int stride = 1; stride < Power2ScanSize; stride <<= 1) { + int index = (threadIdx.x + 1) * stride * 2 - 1; + if (index < Power2ScanSize) { + smem[index] = binop(smem[index], smem[index - stride]); + } + __syncthreads(); + } + + // Post-reduce step ("downsweep") +#pragma unroll + for (int stride = Power2ScanSize / 4; stride > 0; stride >>= 1) { + int index = (threadIdx.x + 1) * stride * 2 - 1; + if ((index + stride) < Power2ScanSize) { + smem[index + stride] = binop(smem[index + stride], smem[index]); + } + __syncthreads(); + } +} + +// Generic Op that can be used to support segmented scans by re-using +// the basic inclusiveScanOp. Merely requires that the input data has both +// a flag and val component +template +struct SegmentedScanOp { + __host__ __device__ SegmentedScanOp(BinaryOp binop): _binop(binop) {} + __host__ __device__ inline T operator()(const T& a, const T& b) { + T c; + c.val = a.flag ? a.val : _binop(a.val, b.val); + c.flag = a.flag | b.flag; + return c; + } + + BinaryOp _binop; +}; + +// Extends the above Inclusive Scan to support segments. It has the same properties +// but also takes a flag array that indicates the starts of "segments", i.e. individual +// units to scan. For example, consider the following (+)-scan that is segmented: +// +// Input: [1, 3, 2, 4, 1, 2, 3, 2, 1, 4] +// Flags: [1, 0, 0, 1, 0, 1, 1, 0, 1, 0] +// Output: 1 4 6 4 5 2 3 5 1 5 +// +// So we see that each "flag" resets the scan to that index. +template +__device__ void segmentedInclusivePrefixScan(T *smem, bool *bmem, BinaryOp binop) { + // Reduce step ("upsweep") +#pragma unroll + for (int stride = 1; stride < Power2ScanSize; stride <<= 1) { + int index = (threadIdx.x + 1) * stride * 2 - 1; + if (index < Power2ScanSize) { + smem[index] = bmem[index] ? smem[index] : binop(smem[index], smem[index - stride]); + bmem[index] = bmem[index] | bmem[index - stride]; + } + __syncthreads(); + } + + // Post-reduce step ("downsweep") +#pragma unroll + for (int stride = Power2ScanSize / 4; stride > 0; stride >>= 1) { + int index = (threadIdx.x + 1) * stride * 2 - 1; + if ((index + stride) < Power2ScanSize) { + smem[index + stride] = bmem[index + stride] ? smem[index + stride] : binop(smem[index + stride], smem[index]); + bmem[index + stride] = bmem[index + stride] | bmem[index]; + } + __syncthreads(); + } +} + +// Inclusive prefix sum using shared memory +template +__device__ void inclusivePrefixScan(T* smem, T in, T* out, BinaryFunction binop) { + // FIXME: this is a slow, simple implementation; need up/down sweep, + // prevent smem conflicts + smem[threadIdx.x] = in; + + __syncthreads(); + + for (int offset = 1; offset < blockDim.x; offset *= 2) { + T val = 0; + + if (threadIdx.x >= offset) { + val = binop(smem[threadIdx.x - offset], smem[threadIdx.x]); + } + + __syncthreads(); + if (threadIdx.x >= offset) { + smem[threadIdx.x] = val; + } + + __syncthreads(); + } + + *out = smem[threadIdx.x]; + + // Prevent write-after-read dependencies on smem usage above if necessary + if (KillWARDependency) { + __syncthreads(); + } +} + +// Exclusive prefix sum using shared memory +template +__device__ void exclusivePrefixScan(T* smem, T in, T* out, T* carry, BinaryFunction binop) { + // FIXME: crappy implementation + // We kill write-after-read dependencies separately below, hence the `false` + inclusivePrefixScan(smem, in, out, binop); + + *out -= in; + *carry = smem[blockDim.x - 1]; + + // Prevent write-after-read dependencies on smem usage above if necessary + if (KillWARDependency) { + __syncthreads(); + } +} + +// Inclusive prefix sum for binary vars using intra-warp voting + +// shared memory +template +__device__ void inclusiveBinaryPrefixScan(T* smem, bool in, T* out, BinaryFunction binop) { + // Within-warp, we use warp voting. +#if defined (__HIP_PLATFORM_HCC__) + unsigned long long int vote = WARP_BALLOT(in); + T index = __popcll(getLaneMaskLe() & vote); + T carry = __popcll(vote); +#else + T vote = WARP_BALLOT(in); + T index = __popc(getLaneMaskLe() & vote); + T carry = __popc(vote); +#endif + + int warp = threadIdx.x / C10_WARP_SIZE; + + // Per each warp, write out a value + if (getLaneId() == 0) { + smem[warp] = carry; + } + + __syncthreads(); + + // Sum across warps in one thread. This appears to be faster than a + // warp shuffle scan for CC 3.0+ + if (threadIdx.x == 0) { + int current = 0; + for (int i = 0; i < blockDim.x / C10_WARP_SIZE; ++i) { + T v = smem[i]; + smem[i] = binop(smem[i], current); + current = binop(current, v); + } + } + + __syncthreads(); + + // load the carry from the preceding warp + if (warp >= 1) { + index = binop(index, smem[warp - 1]); + } + + *out = index; + + if (KillWARDependency) { + __syncthreads(); + } +} + +// Exclusive prefix sum for binary vars using intra-warp voting + +// shared memory +template +__device__ void exclusiveBinaryPrefixScan(T* smem, bool in, T* out, T* carry, BinaryFunction binop) { + inclusiveBinaryPrefixScan(smem, in, out, binop); + + // Inclusive to exclusive + *out -= (T) in; + + // The outgoing carry for all threads is the last warp's sum + *carry = smem[THCCeilDiv(blockDim.x, C10_WARP_SIZE) - 1]; + + if (KillWARDependency) { + __syncthreads(); + } +} + +#endif // THC_SCAN_UTILS_INC diff --git a/thirdparty/libtorch/include/THC/THCSleep.h b/thirdparty/libtorch/include/THC/THCSleep.h new file mode 100644 index 0000000000..b3f20a3934 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCSleep.h @@ -0,0 +1,10 @@ +#ifndef THC_SPIN_INC +#define THC_SPIN_INC + +#include +#include + +// enqueues a kernel that spins for the specified number of cycles +THC_API void THC_sleep(THCState* state, int64_t cycles); + +#endif diff --git a/thirdparty/libtorch/include/THC/THCSortUtils.cuh b/thirdparty/libtorch/include/THC/THCSortUtils.cuh new file mode 100644 index 0000000000..7d39b3be53 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCSortUtils.cuh @@ -0,0 +1,229 @@ +#ifndef THC_SORT_UTILS_INC +#define THC_SORT_UTILS_INC + +#include +#include +#include +#include + +// Collection of kernel sort routines +template +struct LTComp { + __device__ inline bool operator()(const T& a, const T& b) const { + return (handleNaN && THCNumerics::isnan(b) && !THCNumerics::isnan(a)) || THCNumerics::lt(a, b); + } +}; + +template +struct GTComp { + __device__ inline bool operator()(const T& a, const T& b) const { + return (handleNaN && THCNumerics::isnan(a) && !THCNumerics::isnan(b)) || THCNumerics::gt(a, b); + } +}; + +template +__device__ inline void swapVars(T& t1, T& t2) { + T tmp = t1; + t1 = t2; + t2 = tmp; +} + +template +__device__ inline void bitonicSwap(K& kA, V& vA, bool& validA, + K& kB, V& vB, bool& validB, + bool dir, + const Comparator& comp) { + // Invalid entries always sort to the end + bool swap = (comp(kA, kB) && validA) || !validB; + if (swap == dir) { + swapVars(kA, kB); + swapVars(vA, vB); + swapVars(validA, validB); + } +}; + +template +__device__ inline void bitonicSwapKeys(K& kA, bool& validA, + K& kB, bool& validB, + bool dir, + const Comparator& comp) { + bool swap = (comp(kA, kB) && validA) || !validB; + if (swap == dir) { + swapVars(kA, kB); + swapVars(validA, validB); + } +} + +template +__device__ inline void bitonicSort(K keys[Power2SortSize], + V values[Power2SortSize], + bool valid[Power2SortSize], + const Comparator& comp) { +#ifndef __HIP_PLATFORM_HCC__ +#pragma unroll +#endif + for (unsigned int size = 2; size < Power2SortSize; size *= 2) { + bool flag = ((threadIdx.x & (size / 2)) != 0); + +#ifndef __HIP_PLATFORM_HCC__ +#pragma unroll +#endif + for (unsigned int stride = size / 2; stride > 0; stride /= 2) { + + __syncthreads(); + + unsigned int pos = 2 * threadIdx.x - (threadIdx.x & (stride - 1)); + bitonicSwap( + keys[pos], values[pos], valid[pos], + keys[pos + stride], values[pos + stride], valid[pos + stride], + flag, comp); + } + } + +#ifndef __HIP_PLATFORM_HCC__ +#pragma unroll +#endif + for (unsigned int stride = Power2SortSize / 2; stride > 0; stride /= 2) { + + __syncthreads(); + + unsigned int pos = 2 * threadIdx.x - (threadIdx.x & (stride - 1)); + bitonicSwap( + keys[pos], values[pos], valid[pos], + keys[pos + stride], values[pos + stride], valid[pos + stride], + false, comp); + } + + __syncthreads(); + +} + +template +__device__ inline void bitonicSortKeys(K keys[Power2SortSize], + bool valid[Power2SortSize], + const Comparator& comp) { +#ifndef __HIP_PLATFORM_HCC__ +#pragma unroll +#endif + for (unsigned int size = 2; size < Power2SortSize; size *= 2) { + bool flag = ((threadIdx.x & (size / 2)) != 0); + +#ifndef __HIP_PLATFORM_HCC__ +#pragma unroll +#endif + for (unsigned int stride = size / 2; stride > 0; stride /= 2) { + + __syncthreads(); + + unsigned int pos = 2 * threadIdx.x - (threadIdx.x & (stride - 1)); + bitonicSwapKeys( + keys[pos], valid[pos], + keys[pos + stride], valid[pos + stride], + flag, comp); + } + } + +#ifndef __HIP_PLATFORM_HCC__ +#pragma unroll +#endif + for (unsigned int stride = Power2SortSize / 2; stride > 0; stride /= 2) { + __syncthreads(); + + unsigned int pos = 2 * threadIdx.x - (threadIdx.x & (stride - 1)); + bitonicSwapKeys( + keys[pos], valid[pos], + keys[pos + stride], valid[pos + stride], + false, comp); + } + + __syncthreads(); + +} + +// Sorts (key, value) pairs (in different tensors) in-place; i.e., +// modifies the input `keys` and `values` +template +C10_LAUNCH_BOUNDS_1(1024) +__global__ void +bitonicSortKVInPlace(TensorInfo keys, + IndexType keySlices, + IndexType keySliceSize, + IndexType keySliceStride, + TensorInfo values, + IndexType valueSliceStride, + Comparator comp) { + // Find the slice of the tensor that we are sorting + const IndexType linearIndex = getLinearBlockId(); + // Tiling the slices could have us be out of bounds, if there are a + // lot of slices to sort + if (linearIndex >= keySlices) { + return; + } + + __shared__ K sharedKeys[Power2SortSize]; + __shared__ V sharedValues[Power2SortSize]; + __shared__ bool sharedValid[Power2SortSize]; + + const IndexType keyStartOffset = + IndexToOffset::get(linearIndex, keys); + const IndexType valueStartOffset = + IndexToOffset::get(linearIndex, values); + + // If the sort size is 1, the data is already sorted + if (Power2SortSize == 1) { + return; + } else { + // Otherwise, each thread is responsible for loading and storing 2 + // elements. The sort size is guaranteed to be >= 2 + const int elem1 = threadIdx.x; + const int elem2 = threadIdx.x + (Power2SortSize / 2); + + bool valid1 = (elem1 < keySliceSize); + K k1 = valid1 ? + keys.data[keyStartOffset + elem1 * keySliceStride] : ScalarConvert::to(0); + V v1 = valid1 ? + values.data[valueStartOffset + elem1 * valueSliceStride] : ScalarConvert::to(0); + + sharedKeys[elem1] = k1; + sharedValues[elem1] = v1; + sharedValid[elem1] = valid1; + + bool valid2 = (elem2 < keySliceSize); + K k2 = valid2 ? + keys.data[keyStartOffset + elem2 * keySliceStride] : ScalarConvert::to(0); + V v2 = valid2 ? + values.data[valueStartOffset + elem2 * valueSliceStride] : ScalarConvert::to(0); + + sharedKeys[elem2] = k2; + sharedValues[elem2] = v2; + sharedValid[elem2] = valid2; + + // Sort! + bitonicSort( + sharedKeys, sharedValues, sharedValid, comp); + + // elem1 and elem2 values might be out-of-range, if the data size we are + // sorting is smaller than half the power2 size + if (valid1) { + keys.data[keyStartOffset + elem1 * keySliceStride] = + sharedKeys[elem1]; + values.data[valueStartOffset + elem1 * valueSliceStride] = + sharedValues[elem1]; + } + + if (valid2) { + keys.data[keyStartOffset + elem2 * keySliceStride] = + sharedKeys[elem2]; + values.data[valueStartOffset + elem2 * valueSliceStride] = + sharedValues[elem2]; + } + } +} + +uint64_t nextHighestPowerOf2(uint64_t n); + +#endif // THC_SORT_UTILS_INC diff --git a/thirdparty/libtorch/include/THC/THCStorage.h b/thirdparty/libtorch/include/THC/THCStorage.h new file mode 100644 index 0000000000..4d4d9abf09 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCStorage.h @@ -0,0 +1,18 @@ +#ifndef THC_STORAGE_INC +#define THC_STORAGE_INC + +#include +#include + +#define THCStorage_(NAME) TH_CONCAT_4(TH,CReal,Storage_,NAME) + +#include +#include + +#include +#include + +#include +#include + +#endif diff --git a/thirdparty/libtorch/include/THC/THCStorage.hpp b/thirdparty/libtorch/include/THC/THCStorage.hpp new file mode 100644 index 0000000000..62a1d950a4 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCStorage.hpp @@ -0,0 +1,26 @@ +#pragma once + +// STOP!!! Thinking of including this header directly? Please +// read Note [TH abstraction violation] + +#include +// Should work with THStorageClass +#include + +#include + +#include +#include +#include + +THC_API THCStorage* THCStorage_new(THCState* state, caffe2::TypeMeta); + +THC_API void THCStorage_retain(THCState *state, THCStorage *storage); + +THC_API void THCStorage_resize(THCState *state, THCStorage *storage, ptrdiff_t size); +THC_API int THCStorage_getDevice(THCState* state, const THCStorage* storage); + +THC_API THCStorage* THCStorage_newWithDataAndAllocator( + THCState *state, at::ScalarType scalar_type, + at::DataPtr&& data, ptrdiff_t size, + at::Allocator* allocator); diff --git a/thirdparty/libtorch/include/THC/THCStorageCopy.h b/thirdparty/libtorch/include/THC/THCStorageCopy.h new file mode 100644 index 0000000000..3a6dc0c7f6 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCStorageCopy.h @@ -0,0 +1,17 @@ +#ifndef THC_STORAGE_COPY_INC +#define THC_STORAGE_COPY_INC + +#include +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#endif diff --git a/thirdparty/libtorch/include/THC/THCTensor.h b/thirdparty/libtorch/include/THC/THCTensor.h new file mode 100644 index 0000000000..c8ebd5d3d6 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensor.h @@ -0,0 +1,26 @@ +#ifndef THC_TENSOR_INC +#define THC_TENSOR_INC + +#include +#include +#include + +#define THCTensor_(NAME) TH_CONCAT_4(TH,CReal,Tensor_,NAME) + +#define THC_DESC_BUFF_LEN 64 + +typedef struct THC_CLASS THCDescBuff +{ + char str[THC_DESC_BUFF_LEN]; +} THCDescBuff; + +#include +#include + +#include +#include + +#include +#include + +#endif diff --git a/thirdparty/libtorch/include/THC/THCTensor.hpp b/thirdparty/libtorch/include/THC/THCTensor.hpp new file mode 100644 index 0000000000..b543c0af25 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensor.hpp @@ -0,0 +1,64 @@ +#pragma once + +// STOP!!! Thinking of including this header directly? Please +// read Note [TH abstraction violation] + +#include +#include +#include +#include + +#include +#include + +// See [NOTE: nDimension vs nDimensionLegacyNoScalars vs nDimensionLegacyAll] +THC_API int THCTensor_nDimension(THCState *state, const THCTensor *self); +THC_API int THCTensor_nDimensionLegacyNoScalars(THCState *state, const THCTensor *self); +THC_API int THCTensor_nDimensionLegacyAll(THCState *state, const THCTensor *self); + +THC_API int64_t THCTensor_size(THCState *state, const THCTensor *self, int dim); +THC_API int64_t THCTensor_sizeLegacyNoScalars(THCState *state, const THCTensor *self, int dim); +THC_API int64_t THCTensor_stride(THCState *state, const THCTensor *self, int dim); +THC_API int64_t THCTensor_strideLegacyNoScalars(THCState *state, const THCTensor *self, int dim); + +THC_API THCTensor *THCTensor_new(THCState *state, caffe2::TypeMeta type_meta); + +THC_API void THCTensor_resize(THCState *state, THCTensor *tensor, at::IntArrayRef size, at::IntArrayRef stride); +THC_API void THCTensor_resizeNd(THCState *state, THCTensor *tensor, int nDimension, const int64_t *size, const int64_t *stride); +THC_API void THCTensor_resizeAs(THCState *state, THCTensor *tensor, THCTensor *src); + +THC_API void THCTensor_set(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_setStorage(THCState *state, THCTensor *self, THCStorage *storage_, ptrdiff_t storageOffset_, at::IntArrayRef size_, at::IntArrayRef stride_); +THC_API void THCTensor_setStorageNd(THCState *state, THCTensor *self, THCStorage *storage, ptrdiff_t storageOffset, int nDimension, const int64_t *size, const int64_t *stride); + +THC_API void THCTensor_squeeze1d(THCState *state, THCTensor *self, THCTensor *src, int dimension_); +THC_API void THCTensor_unsqueeze1d(THCState *state, THCTensor *self, THCTensor *src, int dimension_); + +THC_API bool THCTensor_allContiguous(THCState *state, THCTensor **inputs, int numInputs); +THC_API ptrdiff_t THCTensor_nElement(THCState *state, const THCTensor *self); + +THC_API void THCTensor_retain(THCState *state, THCTensor *self); +THC_API void THCTensor_free(THCState *state, THCTensor *self); + +THC_API int THCTensor_getDevice(THCState* state, const THCTensor* tensor); +THC_API bool THCTensor_allSameDevice(THCState* state, THCTensor ** inputs, int numInputs); + +/* Can we use 32 bit math for indexing? */ +THC_API bool THCTensor_canUse32BitIndexMath(THCState* state, const THCTensor* t, ptrdiff_t max_elem=INT32_MAX); +/* Are all tensors 32-bit indexable? */ +THC_API bool THCTensor_all32BitIndexable(THCState* state, THCTensor** inputs, int numInputs); +THC_API void THCTensor_preserveReduceDimSemantics(THCState *state, THCTensor *tensor, int in_dims, + int64_t dimension, int keepdim); +/* Returns false if there is no possibility that the tensor */ +/* has more than one index that references the same datapoint, */ +/* true otherwise. */ +THC_API bool THCTensor_maybeOverlappingIndices(THCState* state, const THCTensor* t); + +#include +#include + +#include +#include + +#include +#include diff --git a/thirdparty/libtorch/include/THC/THCTensorCopy.h b/thirdparty/libtorch/include/THC/THCTensorCopy.h new file mode 100644 index 0000000000..9366c37b04 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorCopy.h @@ -0,0 +1,18 @@ +#ifndef TH_CUDA_TENSOR_COPY_INC +#define TH_CUDA_TENSOR_COPY_INC + +#include +#include +#include + + +#include +#include + +#include +#include + +#include +#include + +#endif diff --git a/thirdparty/libtorch/include/THC/THCTensorCopy.hpp b/thirdparty/libtorch/include/THC/THCTensorCopy.hpp new file mode 100644 index 0000000000..ff01449ba4 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorCopy.hpp @@ -0,0 +1,18 @@ +#pragma once + +#include + +template +void THC_copyTensor(THCState* state, THCTensor* dst, THCTensor* src); + +template +THCTensor *THCTensor_newClone(THCState *state, THCTensor *self); + +template +THCTensor *THCTensor_newContiguous(THCState *state, THCTensor *self); + +template +void THCTensor_freeCopyTo(THCState *state, THCTensor *self, THCTensor *dst); + +template +void THCTensor_copyIgnoringOverlaps(THCState* state, THCTensor* dst, THCTensor* src); diff --git a/thirdparty/libtorch/include/THC/THCTensorInfo.cuh b/thirdparty/libtorch/include/THC/THCTensorInfo.cuh new file mode 100644 index 0000000000..4ad61c6ad7 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorInfo.cuh @@ -0,0 +1,260 @@ +#ifndef THC_TENSOR_INFO_INC +#define THC_TENSOR_INFO_INC + +#include +#include +#include +#include +#include + +// Maximum number of dimensions allowed for cutorch +#define MAX_CUTORCH_DIMS 25 + +// Warning string for tensor arguments that are too large or have too +// many dimensions +#define CUTORCH_STR(X) #X +#define CUTORCH_DIM_WARNING "tensor too large or too many (>" \ + CUTORCH_STR(MAX_CUTORCH_DIMS) ") dimensions" + +// CUDA kernel argument that defines tensor layout +template +struct TensorInfo { + TensorInfo(T* p, + int dim, + IndexType sz[MAX_CUTORCH_DIMS], + IndexType st[MAX_CUTORCH_DIMS]); + + // Set the size of the given dimension to 1, as if it were a + // reduction dim (allows you to calculate offsets of the reduction + // slice) + void reduceDim(int dim); + + /* + Updates the TensorInfo's dims, sizes, and strides to reflect a "collapse" of + the info, possibly excluding the optional excludeDim. A "collapsed" version + of the info is the fewest dims that order the tensor's elements in the same + way as the original info. If excludeDim is specified, the collapse is the + fewest dims that order the tensor's elements as the original and preserve the + excluded dimension, unless the tensor collapses to a point. + + Returns the (new) index of the preserved dimension if excludeDim is + specified. Returns 0 if the tensor is collapsed to a point. Returns -1 + otherwise. + */ + int collapseDims(const int excludeDim = -1); + + // Contiguous tensors of more than one dimension are collapsed down + // to one tensor + __host__ __device__ inline bool isContiguous() const { + return (dims == 1 && strides[0] == 1); + } + + T* data; + IndexType sizes[MAX_CUTORCH_DIMS]; + IndexType strides[MAX_CUTORCH_DIMS]; + int dims; +}; + +template +TensorInfo::TensorInfo(T* p, + int dim, + IndexType sz[MAX_CUTORCH_DIMS], + IndexType st[MAX_CUTORCH_DIMS]) { + data = p; + dims = dim; + assert(dims > 0 && dims < MAX_CUTORCH_DIMS); + + for (int i = 0; i < dim; ++i) { + sizes[i] = sz[i]; + strides[i] = st[i]; + } +} + +template +void +TensorInfo::reduceDim(int dim) { + assert(dim < dims && dim >= 0); + sizes[dim] = 1; +} + +template +int +TensorInfo::collapseDims(const int excludeDim) { + + assert(excludeDim >= -1 && excludeDim < dims); + + int stopDim = (excludeDim == -1) ? dims : excludeDim; + int newIndex = -1; + int oldIndex = 0; + int remappedExcludedDim = -1; + + while (oldIndex < dims) { + // Finds a dimension to collapse into + for (; oldIndex < stopDim; ++oldIndex) { + if (sizes[oldIndex] == 1) { + continue; + } + ++newIndex; + sizes[newIndex] = sizes[oldIndex]; + strides[newIndex] = strides[oldIndex]; + ++oldIndex; + break; + } + + // Collapses dims + for (; oldIndex < stopDim; ++oldIndex) { + if (sizes[oldIndex] == 1) { + continue; + } + + if (strides[newIndex] == sizes[oldIndex] * strides[oldIndex]) { + sizes[newIndex] *= sizes[oldIndex]; + strides[newIndex] = strides[oldIndex]; + } else { + ++newIndex; + sizes[newIndex] = sizes[oldIndex]; + strides[newIndex] = strides[oldIndex]; + } + } + + // Handles excludeDim being set (oldIndex == excludeDim) + if (oldIndex != dims) { + + // Preserves excluded dimension + ++newIndex; + sizes[newIndex] = sizes[oldIndex]; + strides[newIndex] = strides[oldIndex]; + remappedExcludedDim = newIndex; + + // Restarts iteration after excludeDim + ++oldIndex; + stopDim = dims; + } + } + + // Handles special case of all dims size 1 + if (newIndex == -1 || (newIndex == 0 && sizes[0] == 1)) { + dims = 1; + sizes[0] = 1; + strides[0] = 1; + + return 0; + } + + dims = newIndex + 1; + return remappedExcludedDim; +} + +// Translate a linear index for the apply to a T* offset; +// specialized on `Dims` to reduce nvcc compilation time +template +struct IndexToOffset { + static __host__ __device__ IndexType get( + IndexType linearId, + const TensorInfo& info) { + + IndexType offset = 0; + + // Uses static dims + for (int i = Dims - 1; i > 0; --i) { + IndexType curDimIndex = linearId % info.sizes[i]; + IndexType curDimOffset = curDimIndex * info.strides[i]; + offset += curDimOffset; + linearId /= info.sizes[i]; + } + + return offset + linearId * info.strides[0]; + } +}; + +template +struct IndexToOffset { + static inline __host__ __device__ IndexType get( + IndexType linearId, + const TensorInfo& info) { + + IndexType offset = 0; + + // Uses dynamic dims + for (int i = info.dims - 1; i > 0; --i) { + IndexType curDimIndex = linearId % info.sizes[i]; + IndexType curDimOffset = curDimIndex * info.strides[i]; + offset += curDimOffset; + linearId /= info.sizes[i]; + } + + return offset + linearId * info.strides[0]; + } +}; + +// OffsetInfo is a faster implementation of IndexToOffset that uses faster +// integer division: we transform each division into integer multiplication by a +// pre-computed constant. (See IntDivider for details.) +template +struct OffsetInfo { + explicit OffsetInfo(const TensorInfo& tinfo) { + assert(tinfo.dims == Dims); + data = tinfo.data; + + for (int i = 0; i < Dims; ++i) { + sizes[i] = IntDivider(tinfo.sizes[i]); + strides[i] = tinfo.strides[i]; + } + } + + __host__ __device__ T* get(IndexType linearIndex) const { + IndexType offset = 0; + + for (int i = Dims - 1; i > 0; --i) { + DivMod divmod = sizes[i].divmod(linearIndex); + linearIndex = divmod.div; + offset += divmod.mod * strides[i]; + } + + return &data[offset + linearIndex * strides[0]]; + } + + T* data; + IntDivider sizes[Dims]; + IndexType strides[Dims]; +}; + +// For 1D tensors the offset equals linear index * stride. +template +struct OffsetInfo { + explicit OffsetInfo(const TensorInfo& tinfo) + : data{tinfo.data}, stride{tinfo.strides[0]} {} + + __host__ __device__ T* get(IndexType linearIndex) const { + return &data[linearIndex * stride]; + } + + T* data; + const IndexType stride; +}; + +// Dims=-1 is used when the dimension is unknown at compile time. +// +// Unfortunately, pre-computation does not work here, because of a bug in nvcc +// (tested on CUDA 8.0): if a kernel argument contains an array that is +// dynamically accessed, the whole array is first copied into the local memory. +// (That is, every kernel thread makes its own copy of the argument, even if it +// is never updated.) Pre-computation makes it worse because now we have more +// data to copy. +// +// So let's fall back to vanilla division approach. + +template +struct OffsetInfo { + explicit OffsetInfo(const TensorInfo& tinfo) + : tinfo(tinfo) { } + + __host__ __device__ T* get(IndexType linearIndex) const { + IndexType offset = IndexToOffset::get(linearIndex, tinfo); + return &tinfo.data[offset]; + } + + TensorInfo tinfo; +}; + +#endif // THC_TENSOR_INFO_INC diff --git a/thirdparty/libtorch/include/THC/THCTensorMath.h b/thirdparty/libtorch/include/THC/THCTensorMath.h new file mode 100644 index 0000000000..8c9aa6f837 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorMath.h @@ -0,0 +1,82 @@ +#ifndef TH_CUDA_TENSOR_MATH_INC +#define TH_CUDA_TENSOR_MATH_INC + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#include +#include + +#endif diff --git a/thirdparty/libtorch/include/THC/THCTensorMathMagma.cuh b/thirdparty/libtorch/include/THC/THCTensorMathMagma.cuh new file mode 100644 index 0000000000..08124d3d4c --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorMathMagma.cuh @@ -0,0 +1,22 @@ +#ifndef THC_TENSOR_MATH_MAGMA_CUH +#define THC_TENSOR_MATH_MAGMA_CUH + +#ifdef USE_MAGMA +#include +#else +#include +#endif + +#ifdef USE_MAGMA +template +static inline T* th_magma_malloc_pinned(size_t n) +{ + void* ptr; + if (MAGMA_SUCCESS != magma_malloc_pinned(&ptr, n * sizeof(T))) + THError("$ Torch: not enough memory: you tried to allocate %dGB. Buy new RAM!", n/268435456); + return reinterpret_cast(ptr); +} + +#endif + +#endif // THC_TENSOR_MATH_MAGMA_CUH diff --git a/thirdparty/libtorch/include/THC/THCTensorMathPointwise.cuh b/thirdparty/libtorch/include/THC/THCTensorMathPointwise.cuh new file mode 100644 index 0000000000..ece97e715c --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorMathPointwise.cuh @@ -0,0 +1,363 @@ +#ifndef THC_TENSORMATH_POINTWISE_CUH +#define THC_TENSORMATH_POINTWISE_CUH + +#include +#include +#include +#include +#include +#include +#include +#include + +template +struct TensorCAddOp { + TensorCAddOp(T v) : val(v) {} + + __device__ __forceinline__ void operator()(T* out, T* in) { + *out += val * *in; + } + + __device__ __forceinline__ void operator()(T* out, T* in1, T* in2) { + *out = *in1 + val * *in2; + } + + T val; +}; + +template +struct TensorMulOp { + __device__ __forceinline__ void operator()(T* out, T* in) { + *out *= *in; + } + + __device__ __forceinline__ void operator()(T* out, T* in1, T* in2) { + *out = *in1 * *in2; + } +}; + +template +static __device__ __forceinline__ +typename std::enable_if::value, bool>::type +modulo_wrap(T a, T b) { + return (a != 0) && (a < 0) != (b < 0); +} + +template +static __device__ __forceinline__ +typename std::enable_if::value, bool>::type +modulo_wrap(T a, T b) { + return false; +} + +template +struct TensorCRemainderOp { + __device__ __forceinline__ void operator()(T* out, T* in) { + T val = *out % *in; + if (modulo_wrap(val, *in)) { + val += *in; + } + *out = val; + } + + __device__ __forceinline__ void operator()(T* out, T* in1, T* in2) { + T val = *in1 % *in2; + if (modulo_wrap(val, *in2)) { + val += *in2; + } + *out = val; + } +}; + +template <> +struct TensorCRemainderOp { + __device__ __forceinline__ void operator()(float* out, float* in) { + *out = *in != 0.f ? *out - *in * floorf(*out / *in) : NAN; + } + + __device__ __forceinline__ void operator()(float* out, float* in1, float* in2) { + *out = *in2 != 0.f ? *in1 - *in2 * floorf(*in1 / *in2) : NAN; + } +}; + +template <> +struct TensorCRemainderOp { + __device__ __forceinline__ void operator()(double* out, double* in) { + *out = *in != 0. ? *out - *in * floor(*out / *in) : NAN; + } + + __device__ __forceinline__ void operator()(double* out, double* in1, double* in2) { + *out = *in2 != 0. ? *in1 - *in2 * floor(*in1 / *in2) : NAN; + } +}; + +template <> +struct TensorCRemainderOp { + __device__ __forceinline__ void operator()(at::Half* out, at::Half* in) { + *out = *in != 0.f ? *out - *in * floorf(*out / *in) : NAN; + } + + __device__ __forceinline__ void operator()(at::Half* out, at::Half* in1, at::Half* in2) { + *out = *in2 != 0.f ? *in1 - *in2 * floorf(*in1 / *in2) : NAN; + } +}; + +template +struct TensorCFmodOp { + __device__ __forceinline__ void operator()(T* out, T* in) { + *out = *out % *in; + } + + __device__ __forceinline__ void operator()(T* out, T* in1, T* in2) { + *out = *in1 % *in2; + } +}; + +template <> +struct TensorCFmodOp { + __device__ __forceinline__ void operator()(float* out, float* in) { + *out = fmodf(*out, *in); + } + + __device__ __forceinline__ void operator()(float* out, float* in1, float* in2) { + *out = fmodf(*in1, *in2); + } +}; + +template <> +struct TensorCFmodOp { + __device__ __forceinline__ void operator()(double* out, double* in) { + *out = fmod(*out, *in); + } + + __device__ __forceinline__ void operator()(double* out, double* in1, double* in2) { + *out = fmod(*in1, *in2); + } +}; + +template <> +struct TensorCFmodOp { + __device__ __forceinline__ void operator()(at::Half* out, at::Half* in) { + *out = fmodf(*out, *in); + } + + __device__ __forceinline__ void operator()(at::Half* out, at::Half* in1, at::Half* in2) { + *out = fmodf(*in1, *in2); + } +}; + +template +struct TensorClampOp { + TensorClampOp(T min, T max) : minValue(min), maxValue(max) {} + __device__ __forceinline__ void operator()(T* out, T* in) { + T val = THCNumerics::lt(*in, minValue) ? minValue : *in; + *out = THCNumerics::gt(val, maxValue) ? maxValue : val; + } + + __device__ __forceinline__ void operator()(T* v) { + T val = THCNumerics::lt(*v, minValue) ? minValue : *v; + *v = THCNumerics::gt(val, maxValue) ? maxValue : val; + } + + const T minValue; + const T maxValue; +}; + +template +struct TensorCrossOp { + TensorCrossOp(int64_t sx, int64_t sy, int64_t so) : sx(sx), sy(sy), so(so) {} + + __device__ __forceinline__ void operator()(T* out, T* x, T*y) { + T val0 = THCNumerics::sub( + THCNumerics::mul(x[1 * sx], y[2 * sy]), + THCNumerics::mul(x[2 * sx], y[1 * sy]) + ); + + T val1 = THCNumerics::sub( + THCNumerics::mul(x[2 * sx], y[0 * sy]), + THCNumerics::mul(x[0 * sx], y[2 * sy]) + ); + + T val2 = THCNumerics::sub( + THCNumerics::mul(x[0 * sx], y[1 * sy]), + THCNumerics::mul(x[1 * sx], y[0 * sy]) + ); + + out[0 * so] = val0; + out[1 * so] = val1; + out[2 * so] = val2; + } + + const int64_t sx, sy, so; +}; + +template +struct TensorMaxOp { + __device__ __forceinline__ void operator()(T* out, T* in) { + *out = THCNumerics::gt(*out, *in) ? *out : *in; + } + + __device__ __forceinline__ void operator()(T* out, T* in1, T* in2) { + *out = THCNumerics::gt(*in1, *in2) ? *in1 : *in2; + } +}; + +template +struct TensorMinOp { + __device__ __forceinline__ void operator()(T* out, T* in) { + *out = THCNumerics::lt(*out, *in) ? *out : *in; + } + + __device__ __forceinline__ void operator()(T* out, T* in1, T* in2) { + *out = THCNumerics::lt(*in1, *in2) ? *in1 : *in2; + } +}; + +template +struct TensorMaxValueOp { + TensorMaxValueOp(T v) : val(v) {} + + __device__ __forceinline__ void operator()(T* out) { + *out = THCNumerics::lt(*out, val) ? val : *out; // this order propagates NaN + } + + __device__ __forceinline__ void operator()(T* out, T* in) { + *out = THCNumerics::lt(*in, val) ? val : *in; // this order propagates NaN + } + + T val; +}; + +template +struct TensorMinValueOp { + TensorMinValueOp(T v) : val(v) {} + + __device__ __forceinline__ void operator()(T* out) { + *out = THCNumerics::gt(*out, val) ? val : *out; // this order propagates NaN + } + + __device__ __forceinline__ void operator()(T* out, T* in) { + *out = THCNumerics::gt(*in, val) ? val : *in; // this order propagates NaN + } + + T val; +}; + +template +struct TensorLShiftOp { + __device__ __forceinline__ void + operator()(T* out, T* in) { + *out <<= *in; + } + + __device__ __forceinline__ void + operator()(T* out, T* in1, T* in2) { + *out = *in1 << *in2; + } +}; + +template <> +struct TensorLShiftOp { + __device__ __forceinline__ void + operator()(float* out, float* in) { + *out *= powf(2.0f, *in); + } + + __device__ __forceinline__ void + operator()(float* out, float* in1, float* in2) { + *out = *in1 * powf(2.0f, *in2); + } +}; + +template <> +struct TensorLShiftOp { + __device__ __forceinline__ void + operator()(double* out, double* in) { + *out *= pow(2.0, *in); + } + + __device__ __forceinline__ void + operator()(double* out, double* in1, double* in2) { + *out = *in1 * pow(2.0, *in2); + } +}; + +template +struct TensorRShiftOp { + __device__ __forceinline__ void + operator()(T* out, T* in) { + *out >>= *in; + } + + __device__ __forceinline__ void + operator()(T* out, T* in1, T* in2) { + *out = *in1 >> *in2; + } +}; + +template <> +struct TensorRShiftOp { + __device__ __forceinline__ void + operator()(float* out, float* in) { + *out /= powf(2.0f, *in); + } + + __device__ __forceinline__ void + operator()(float* out, float* in1, float* in2) { + *out = *in1 / powf(2.0f, *in2); + } +}; + +template <> +struct TensorRShiftOp { + __device__ __forceinline__ void + operator()(double* out, double* in) { + *out /= pow(2.0, *in); + } + + __device__ __forceinline__ void + operator()(double* out, double* in1, double* in2) { + *out = *in1 / pow(2.0, *in2); + } +}; + +template +struct TensorBitAndOp { + __device__ __forceinline__ void + operator()(T* out, T* in) { + *out &= *in; + } + + __device__ __forceinline__ void + operator()(T* out, T* in1, T* in2) { + *out = *in1 & *in2; + } +}; + +template +struct TensorBitOrOp { + __device__ __forceinline__ void + operator()(T* out, T* in) { + *out |= *in; + } + + __device__ __forceinline__ void + operator()(T* out, T* in1, T* in2) { + *out = *in1 | *in2; + } +}; + +template +struct TensorBitXorOp { + __device__ __forceinline__ void + operator()(T* out, T* in) { + *out ^= *in; + } + + __device__ __forceinline__ void + operator()(T* out, T* in1, T* in2) { + *out = *in1 ^ *in2; + } +}; + +#endif // THC_TENSORMATH_POINTWISE_CUH diff --git a/thirdparty/libtorch/include/THC/THCTensorMathReduce.cuh b/thirdparty/libtorch/include/THC/THCTensorMathReduce.cuh new file mode 100644 index 0000000000..a0602ebfb6 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorMathReduce.cuh @@ -0,0 +1,606 @@ +#ifndef THC_TENSORMATH_REDUCE_CUH +#define THC_TENSORMATH_REDUCE_CUH + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#if CUDA_VERSION >= 7000 || defined __HIP_PLATFORM_HCC__ +#include +#endif + +/* +Reductions that (only) operate on accumulate types. +*/ + +template +struct WelfordData { + T mean_; + T m_2_n_; + int count_; // do we need int64_t? + + __host__ __device__ WelfordData() { + } + + // stripping initialization from default constructor to avoid dynamic + // initialization warning thrown from using this data structure in CUDA kernel + // as static shared memory. + __host__ __device__ void reset() { + mean_ = T(0); + m_2_n_ = T(0); + count_ = 0; + } + + __host__ __device__ WelfordData(const U data_) { + mean_ = static_cast(data_); + m_2_n_ = static_cast(0); + count_ = 1; + } + + __host__ __device__ WelfordData(const WelfordData &t) : + mean_(t.mean_), + m_2_n_(t.m_2_n_), + count_(t.count_) + { + } + + __host__ __device__ WelfordData(const volatile WelfordData &t) : + mean_(t.mean_), + m_2_n_(t.m_2_n_), + count_(t.count_) + { + } + + __host__ __device__ volatile WelfordData& operator = (const volatile WelfordData &t) volatile { + mean_ = t.mean_; + m_2_n_ = t.m_2_n_; + count_ = t.count_; + return *this; + } + + __host__ __device__ WelfordData& operator = (const WelfordData &t) { + mean_ = t.mean_; + m_2_n_ = t.m_2_n_; + count_ = t.count_; + return *this; + } + +}; + + +template +struct ModifyWelford { + inline __device__ T operator()(const T &a) const { + return a; + } +}; + +template +struct ReduceWelford { + inline __device__ WelfordData operator()(const WelfordData &a, const WelfordData &b) const { + WelfordData c; + c.count_ = THCNumerics::add(a.count_, b.count_); + T factor = THCNumerics::div(1.0, max(1, c.count_)); + c.mean_ = THCNumerics::mul(THCNumerics::add(THCNumerics::mul(a.mean_, a.count_), THCNumerics::mul(b.mean_, b.count_)), factor); + c.m_2_n_ = THCNumerics::add(a.m_2_n_, THCNumerics::add(b.m_2_n_, THCNumerics::mul(factor, THCNumerics::mul(a.count_, THCNumerics::mul(b.count_, THCNumerics::pow(THCNumerics::sub(a.mean_, b.mean_), 2) ))))); + return c; + } +}; + +template +struct VarianceWelford { + VarianceWelford(const int _unbiased, const bool _apply_sqrt): unbiased{_unbiased}, apply_sqrt(_apply_sqrt) {} + + inline __device__ T operator()(const WelfordData &a) const { + T res = THCNumerics::div(a.m_2_n_, unbiased ? a.count_ : a.count_-1); + if (apply_sqrt) { + return THCNumerics::sqrt(res); + } + return res; + } + + const int unbiased; + const bool apply_sqrt; +}; + +template +struct ReduceAdd { + inline __device__ T operator()(const T a, const T b) const { + return THCNumerics::add(a, b); + } +}; + +template +struct ReduceMultiply { + inline __device__ T operator()(const T a, const T b) const { + return THCNumerics::mul(a, b); + } +}; + +template +struct ReduceDivide { + ReduceDivide(const T _divisor): divisor{_divisor} {} + + inline __device__ T operator()(const T x) const { + return THCNumerics::div(x, divisor); + } + + const T divisor; +}; + +template +struct ReducePow { + ReducePow(const T _exponent): exponent{_exponent} {} + + inline __device__ T operator()(const T x) const { + return THCNumerics::pow(x, exponent); + } + + const T exponent; +}; + +template +struct SquareFunctor { + SquareFunctor(const T _mean): mean{_mean} {} + + inline __device__ T operator()(const T x) const { + return THCNumerics::mul( + THCNumerics::sub(x, mean), + THCNumerics::sub(x, mean) + ); + } + + const T mean; +}; + +template +struct ReduceMin { + inline __device__ T operator()(T a, T b) const { + return (THCNumerics::lt(a, b) || THCNumerics::isnan(a)) ? a : b; + } +}; + +template +struct ReduceMax { + inline __device__ T operator()(T a, T b) const { + return (THCNumerics::gt(a, b) || THCNumerics::isnan(a)) ? a : b; + } +}; + +struct LogicalAll { + inline __device__ unsigned char operator()(const unsigned char x, + const unsigned char y) const { + return (x && y); + } +}; + +struct LogicalAny { + inline __device__ unsigned char operator()(const unsigned char x, + const unsigned char y) const { + return (x || y); + } +}; + +template +inline __device__ T THCMax(const T a, const T b) { + return THCNumerics::gt(a, b) ? a : b; +} + +template +__global__ void THCTensor_kernel_renorm(T *data, + const AccT value, + const ptrdiff_t size, + const AccT maxnorm) { + __shared__ AccT buffer[32]; + int64_t tx = threadIdx.x; + int64_t bx = blockIdx.x; + int64_t step = blockDim.x; + T *row = data + size * bx; + + buffer[tx] = scalar_cast(0); + AccT norm; + + if (THCNumerics::eq(value, scalar_cast(INFINITY))) { + // get norm of axis + for (ptrdiff_t i = tx; i < size; i += step) { + const AccT val = scalar_cast(row[i]); + buffer[tx] = THCMax(buffer[tx], static_cast(std::abs(val))); + } + // add (reduce) + for (unsigned int stride = blockDim.x >> 1; stride > 0; stride >>= 1) { + __syncthreads(); + if (tx < stride) + buffer[tx] = THCMax(buffer[tx], buffer[tx+stride]); + } + // clip norms + __syncthreads(); + norm = buffer[0]; + } else { + // get norm of axis + for (ptrdiff_t i = tx; i < size; i += step) { + const AccT val = scalar_cast(row[i]); + buffer[tx] = THCNumerics::add( + buffer[tx], + THCNumerics::pow(static_cast(std::abs(val)), value) + ); + } + // add (reduce) + for (unsigned int stride = blockDim.x >> 1; stride > 0; stride >>= 1) { + __syncthreads(); + if (tx < stride) + buffer[tx] = THCNumerics::add(buffer[tx], buffer[tx+stride]); + } + // clip norms + __syncthreads(); + norm = THCNumerics::pow(buffer[0], THCNumerics::cinv(value)); + } + + if (THCNumerics::gt(norm, maxnorm)) { + norm = THCNumerics::div( + maxnorm, + THCNumerics::add(norm, scalar_cast(1e-7)) + ); + // renormalize + for (ptrdiff_t i = tx; i < size; i += step) { + const AccT val = scalar_cast(row[i]); + row[i] = scalar_cast(THCNumerics::mul(val, norm)); + } + } +} + +template +struct TensorNonZeroOp { + TensorNonZeroOp() {} + + __host__ __device__ T operator()(const T lhs) const { + const T zero = scalar_cast(0); + if (THCNumerics::eq(lhs, zero)) return zero; + + return scalar_cast(1); + } +}; + +template +struct TensorNormOp { + TensorNormOp(T _exponent) : exponent{_exponent} {} + + __host__ __device__ T operator()(const T x) const { + switch (StaticExp) { + case 1: return static_cast(std::abs(x)); + case 2: return THCNumerics::mul(x, x); + default: return THCNumerics::pow(static_cast(std::abs(x)), exponent); + } + } + + const T exponent; +}; + +/* + Fuses conversions and a TensorDistOp. Needed for Thrust. +*/ +template +struct ThrustTensorDistOp { + ThrustTensorDistOp(AccT _exponent) : exponent{_exponent} {} + + __host__ __device__ AccT operator()(T _x, T _y) const { + const AccT x = scalar_cast(_x); + const AccT y = scalar_cast(_y); + if (THCNumerics::eq(exponent, scalar_cast(0))) { + const AccT zero = scalar_cast(0); + if (THCNumerics::eq(THCNumerics::sub(x, y), zero))return zero; + return scalar_cast(1); + } + if (THCNumerics::eq(exponent, scalar_cast(1))) { + return static_cast(std::abs(THCNumerics::sub(x, y))); + } else if (THCNumerics::eq(exponent, scalar_cast(2))) { + return THCNumerics::pow( + THCNumerics::sub(x, y), exponent); + } else { + return THCNumerics::pow( + static_cast(std::abs(THCNumerics::sub(x, y))), + exponent); + } + } + + const AccT exponent; +}; + +#include + +// Given the sum of values and the sum of squares, compute the variance or standard deviation. +template +__forceinline__ __device__ T THCTensor_computeVar( + T sum, + T sum2, + const unsigned row_size) { + + T rs2 = scalar_cast(row_size); + T rs2m = scalar_cast(row_size - 1); + T zero = scalar_cast(0); + + if (flag) { + sum = THCNumerics::div(sum, rs2); + sum2 = THCNumerics::div(sum2, rs2); + sum2 = THCNumerics::sub(sum2, THCNumerics::mul(sum, sum)); + sum2 = (THCNumerics::lt(sum2, zero) ? zero : sum2); + } else { + sum = THCNumerics::div(sum, rs2); + sum2 = THCNumerics::div(sum2, rs2m); + sum2 = THCNumerics::sub(sum2, + THCNumerics::mul( + THCNumerics::div(rs2 ,rs2m), + THCNumerics::mul(sum, sum))); + sum2 = (THCNumerics::lt(sum2, zero) ? zero : sum2); + } + + if (apply_sqrt) + return THCNumerics::sqrt(sum2); + + return sum2; +} + +/* A set of reduction kernels that take in binary ops on thrust pairs (of value, index). + These are useful when you not only have to do a reduction, but you might have + to preserve the location of contention (for example min/max operations). + The structure of the kernels follows the structure of the reduction kernels. +*/ +template +__global__ void +kernelTransformReduceOuterDimIndex(K *tgt1, + Index *tgt2, + K *src_, + unsigned num_orows, + unsigned num_irows, + unsigned row_size, + thrust::pair init, + BinaryFunction binary_op) { + for (unsigned orow = blockIdx.x; orow < num_orows; orow += gridDim.x) { + for (unsigned irow = blockIdx.y * blockDim.x + threadIdx.x; + irow < num_irows; + irow += gridDim.y * blockDim.x) { + K *src = src_ + orow * row_size * num_irows + irow; + thrust::pair acc = init; + + for (unsigned col = 0; col < row_size; ++col) { + // +1 for Lua index + acc = binary_op(acc, + thrust::make_pair(*src, col)); + src += num_irows; + } + + tgt1[orow * num_irows + irow] = acc.first; + tgt2[orow * num_irows + irow] = acc.second; + } + } +} + +template +__host__ void +THC_transformReduceOuterDimIndex(THCState *state, + TensorTypeK *tgt1, + TensorTypeIndex *tgt2, + TensorTypeK *src, + int64_t rdim, + const thrust::pair& init, + BinaryFunction binary_op) { + unsigned ndim = THCTensor_nDimensionLegacyAll(state, src); + unsigned num_orows = 1; + for (int64_t dim = 0; dim < rdim; dim++) { + num_orows *= THCTensor_sizeLegacyNoScalars(state, src, dim); + } + unsigned row_size = THCTensor_sizeLegacyNoScalars(state, src, rdim); + unsigned num_irows = 1; + for (unsigned dim = rdim + 1; dim < ndim; dim++) { + num_irows *= THCTensor_sizeLegacyNoScalars(state, src, dim); + } + + dim3 threads(min(512, num_irows)); + unsigned maxGridDim = 1024; + dim3 grid(min(maxGridDim, num_orows), + min(maxGridDim, THCCeilDiv(num_irows, threads.x))); + + kernelTransformReduceOuterDimIndex + <<>>( + tgt1->template data(), + tgt2->template data(), + src->template data(), + num_orows, num_irows, row_size, init, binary_op); + + THCudaCheck(cudaGetLastError()); +} + +/* Reduce the innermost dimension of a tensor (on thrust::pair functors which are (value, index)) + * + * For an n-d tensor (n <= 4) where the reduction is along the innermost dimension: + * + * - block.x is the innermost dimension, i.e. dimension 0; + * - block.y and grid.y make up dimension 1; and + * - grid.x and grid z are the remaining two outer dimensions (if any) + * + * Reduction along other dimensions is handled in a separate kernel. + */ +template +__global__ void +kernelTransformReduceInnermostDimIndex(K *tgt1, + Index* tgt2, + K *src_, + unsigned num_rows, + unsigned row_size, + thrust::pair init, + BinaryFunction binary_op) { + __shared__ K sbuf[32][16 + 1]; // avoid bank conflict + __shared__ Index ibuf[32][16 + 1]; // avoid bank conflict + + for (unsigned block_row = blockIdx.x * blockDim.y; + block_row < num_rows; + block_row += blockDim.y * gridDim.x) { + unsigned row = block_row + threadIdx.y; + thrust::pair acc = init; + if (row < num_rows) { + K *src = src_ + row * row_size; + // Sequential reduction within a thread. + for (unsigned col = threadIdx.x; col < row_size; col += blockDim.x) { + acc = binary_op(acc, thrust::make_pair(src[col], col)); + } + } + + sbuf[threadIdx.y][threadIdx.x] = acc.first; + ibuf[threadIdx.y][threadIdx.x] = acc.second; + + __syncthreads(); + + // Reduce intermediate values to single value. + K* sline = &sbuf[threadIdx.y][0]; + Index* iline = &ibuf[threadIdx.y][0]; + for (unsigned s = 8; s > 0; s >>= 1) { + if (row < num_rows && threadIdx.x < s) { + thrust::pair arg1 = + thrust::make_pair(sline[threadIdx.x], iline[threadIdx.x]); + thrust::pair arg2 = + thrust::make_pair(sline[threadIdx.x + s], iline[threadIdx.x + s]); + thrust::pair res = binary_op(arg1, arg2); + + sline[threadIdx.x] = res.first; + iline[threadIdx.x] = res.second; + } + __syncthreads(); + } + + if (row < num_rows && threadIdx.x == 0) { + tgt1[row] = sline[0]; + tgt2[row] = iline[0]; + } + __syncthreads(); + } +} + +template +__host__ void +THC_transformReduceInnermostDimIndex(THCState *state, + TensorTypeK *tgt1, + TensorTypeIndex *tgt2, + TensorTypeK *src, + const thrust::pair& init, + BinaryFunction binary_op) { + unsigned ndim = THCTensor_nDimensionLegacyAll(state, src); + unsigned num_rows = 1; + for (unsigned dim = 0; dim < ndim - 1; dim++) { + num_rows *= THCTensor_sizeLegacyNoScalars(state, src, dim); + } + unsigned row_size = THCTensor_sizeLegacyNoScalars(state, src, ndim - 1); + + dim3 threads(16, 32); + dim3 grid(min(1024, THCCeilDiv(num_rows, threads.y))); + + kernelTransformReduceInnermostDimIndex + <<>>( + tgt1->template data(), + tgt2->template data(), + src->template data(), + num_rows, row_size, init, binary_op); + + THCudaCheck(cudaGetLastError()); +} + +template +void +THC_reduceDimIndex(THCState *state, + TensorTypeK *tgt1_, + TensorTypeIndex *tgt2_, + TensorTypeK *src, + int64_t dimension, + int keepdim, + const thrust::pair& init, + BinaryFunction binary_op) +{ + THArgCheck(dimension >= 0 && + dimension < THCTensor_nDimensionLegacyAll(state, src), + 3, "dimension out of range"); + + + // Unsqueeze tgt1_/tgt_2 if necessary so that their contiguity traits + // are preserved if they are the same size as the correct reduction output. + int src_dims = THCTensor_nDimensionLegacyAll(state, src); + THCTensor_preserveReduceDimSemantics( + state, tgt1_, src_dims, dimension, keepdim); + THCTensor_preserveReduceDimSemantics( + state, tgt2_, src_dims, dimension, keepdim); + + std::vector dim = THTensor_sizesLegacyNoScalars(src); + dim[dimension] = 1; + THCTensor_resize(state, tgt1_, dim, {}); + THCTensor_resize(state, tgt2_, dim, {}); + + TensorTypeK *tgt1 = (TensorTypeK*)THCTensor_newContiguous(state, tgt1_); + TensorTypeIndex *tgt2 = (TensorTypeIndex*)THCTensor_newContiguous(state, tgt2_); + src = (TensorTypeK*)THCTensor_newContiguous(state, src); + + if (dimension == THCTensor_nDimensionLegacyAll(state, src) - 1) { + THC_transformReduceInnermostDimIndex(state, tgt1, tgt2, src, init, binary_op); + } else { + THC_transformReduceOuterDimIndex(state, tgt1, tgt2, src, dimension, init, binary_op); + } + + THCTensor_free(state, src); + THCTensor_freeCopyTo(state, tgt1, tgt1_); + THCTensor_freeCopyTo(state, tgt2, tgt2_); + if (!keepdim) { + THCTensor_squeeze1d(state, tgt1_, tgt1_, dimension); + THCTensor_squeeze1d(state, tgt2_, tgt2_, dimension); + } +} + +template +struct MaxValuePair { + __host__ __device__ + thrust::pair operator()(const thrust::pair& a, + const thrust::pair& b) { + return (THCNumerics::ge(a.first, b.first) || + THCNumerics::isnan(a.first)) ? a : b; + } +}; + +template +struct MinValuePair { + __host__ __device__ + thrust::pair operator()(const thrust::pair& a, + const thrust::pair& b) { + return (THCNumerics::le(a.first, b.first) || + THCNumerics::isnan(a.first)) ? a : b; + } +}; + +template +struct AddOp { + __device__ __forceinline__ T operator()(T const &lhs, T const &rhs) { + return THCNumerics::add(lhs, rhs); + } +}; + +template +struct MulOp { + __device__ __forceinline__ T operator()(T const &lhs, T const &rhs) { + return THCNumerics::mul(lhs, rhs); + } +}; + +#endif // THC_TENSORMATH_REDUCE_CUH diff --git a/thirdparty/libtorch/include/THC/THCTensorMode.cuh b/thirdparty/libtorch/include/THC/THCTensorMode.cuh new file mode 100644 index 0000000000..ef9126860b --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorMode.cuh @@ -0,0 +1,282 @@ +#ifndef THC_TENSOR_MODE_CUH +#define THC_TENSOR_MODE_CUH + +#include +#include +#include + +struct ThrustHalfLess +{ + __host__ __device__ inline bool operator()(const at::Half& lhs, const at::Half& rhs) { + return THCNumerics::lt(lhs, rhs); + } +}; + +struct ThrustHalfNotEqualTo +{ + __host__ __device__ inline bool operator()(const at::Half& lhs, const at::Half& rhs) { + return THCNumerics::ne(lhs, rhs); + } +}; + +struct ThrustHalfEqualTo +{ + __host__ __device__ inline bool operator()(const at::Half& lhs, const at::Half& rhs) { + return THCNumerics::eq(lhs, rhs); + } +}; + +struct ThrustHalfEqualToPredicate +{ + ThrustHalfEqualToPredicate(at::Half val): val_(val) {} + __host__ __device__ inline bool operator()(at::Half x) { + return THCNumerics::eq(val_, x); + } + + at::Half val_; +}; + +template +struct BinaryAddOp { + __host__ __device__ inline T operator()(const T a, const T b) { + return THCNumerics::add(a, b); + } +}; + +template <> +struct BinaryAddOp { + __host__ __device__ inline unsigned int operator()(const unsigned int a, const unsigned int b) { + return a + b; + } +}; + +// Used for a segmented reduction +struct ModeUnsignedBoolPair { + unsigned int val; + bool flag; +}; + +// In the kernel below, we have a common pattern of reducing (unsigned int, unsigned int) +// pairs of data +struct ModeUnsignedPair { + unsigned int val; + unsigned int index; +}; + +template +struct MaxReduceOp { + __host__ __device__ inline T operator()(const T& a, const T& b) { + return b.val > a.val ? b : a; + } +}; + +template +struct MatchReduceOp { + __host__ __device__ inline T operator()(const T& a, const T& b) { + return b.flag ? b : a; + } +}; + +// The mode kernel has the following characteristics: It uses internal shared memory +// buffers of Power2Size, which must be greater than the number of elements. Additionally, +// there is one block for every slice to calculate the mode for, and in each block there +// is one thread for every two elements. +// +// Both sorted and positions are assumed to be contiguous Tensors with the mode dimension +// as the innermost dim, such that we can get the particular slice for a Tensor via its +// linear block dimension * the slice size. +template +__global__ void computeMode( + T *input, + TensorInfo values, + TensorInfo indices, + int64_t sliceSize) +{ + int tidx = threadIdx.x; + int stidx = blockDim.x + threadIdx.x; // Second index this thread responsible for + + // First, we need to calculate the offset into the sorted Tensor that represents + // the start of the slice for this block to calculate the mode for. This offset + // is a combination of the gridIndices, and the number of elements in the slice. + unsigned int blockId = getLinearBlockId(); + unsigned int linearOffset = blockId * sliceSize; + + // shmem is a dynamically sized buffer we will use throughout the kernel to + // handle computation efficiently. The size of this shmem must be + // sizeof(T) * Power2Size + (2 * sizeof(unsigned int) * Power2Size) + // + // Initially, the buffer will be organized as follows: + // + // [smem (slice elements) | bmem (valid indices) | ] + extern __shared__ char shmem[]; + + // smem represents a proportion of the shared memory buffer that is used to store + // the elements from the slice: + T *smem = reinterpret_cast(shmem); + + // Each thread loads up to two elements from the Tensor into shared memory + if (tidx < sliceSize) { + smem[tidx] = input[linearOffset + tidx]; + } + if (stidx < sliceSize) { + smem[stidx] = input[linearOffset + stidx]; + } + + // Next, we initialize a boolean region of the buffer, offset by the loaded element + // smem region + bool *bmem = reinterpret_cast(&smem[Power2Size]); + + // The first use of this region stores bmem[i] = i < sliceSize to mark the valid + // components in the smem buffer + bmem[tidx] = tidx < sliceSize; + bmem[stidx] = stidx < sliceSize; + __syncthreads(); // barrier for smem, bmem initialization + + // First, sort the input slice in ascending order. smem contains the input + // elements, and bmem marks the valid indices + bitonicSortKeys, T, unsigned int, Power2Size>(smem, bmem, LTComp()); + __syncthreads(); // make no assumptions that the sort syncs at end + + // The next step of our algorithm is performing a block-wide comparison of + // neighboring elements. In particular, given an sorted input slice A, we + // produce an output slice B, such that B[i] = 1 if A[i-i] != A[i], otherwise 0. + // + // Given the input A = [0, 0, 1, 1, 2, 2, 2, 4, 5, 6, 6, 7, 8] + // B = [1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1] + // + // In particular, we can think of B[i] true indicating the start of a sequence of + // equal values in the sorted list. Similarly, we will also store the negation of B, + // which we'll call C. In particular, we can think of C[i] = true iff A[i-1] == A[i] + // in our original sorted slice. + // + // C = [0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0] + + // We overwrite bmem, and treat the rest of shared memory as a buffer of (index, flag) pairs + // where the index represents values from C, and the flag represents values from B. + // + // [smem (sorted slice) | ubpmem (index, flag pairs)] + + struct ModeUnsignedBoolPair *ubpmem = reinterpret_cast( + &smem[Power2Size]); + + if (tidx == 0) { + ubpmem[0].flag = true; + ubpmem[0].val = 0; + } + + // Compares elements (0, 1), (2, 3), ... and sets 1, 3, ... + ubpmem[tidx * 2 + 1].flag = THCNumerics::ne(smem[tidx * 2], smem[tidx * 2 + 1]); // (0, 1), (1, 2), etc. + ubpmem[tidx * 2 + 1].val = !ubpmem[tidx * 2 + 1].flag; + + // Compares elements (1, 2), (3, 4), ... and sets 2, 4, ... + if (((tidx + 1) * 2) < Power2Size) { + ubpmem[(tidx + 1) * 2].flag = THCNumerics::ne(smem[((tidx + 1) * 2) - 1], smem[(tidx + 1) * 2]); + ubpmem[(tidx + 1) * 2].val = !ubpmem[(tidx + 1) * 2].flag; + } + __syncthreads(); // barrier for ubpmem initialization + + // Next, we perform a segmented prefix sum on the neighboring elements, where + // the presence of a one indicates the start of a segment. In this case B acts + // as the segment start flags, and C is the buffer to be summed: + // + // Input (C) = [0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0, 0] + // Flag (B) = [1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1] + // Output (C) = [0, 1, 0, 1, 0, 1, 2, 0, 0, 0, 1, 0, 0] + // + // Afterwards, the (index) components of the ubpmem buffer contain the lengths of the + // segments (minus 1), i.e. the counts of each element in the original input. + + inclusivePrefixScan< + struct ModeUnsignedBoolPair, + struct SegmentedScanOp >, + Power2Size>( + ubpmem, + SegmentedScanOp >(BinaryAddOp())); + // assumes scan syncs at the end + + // Next, we reinterpret the ubpmem buffer as pairs of unsigned integers (i.e. we treat the + // boolean flag regions as integers). We initialize these to represent indices, and we'll call + // this buffer I + struct ModeUnsignedPair *uupmem = reinterpret_cast(ubpmem); + + // At this point, we need to find the maximum element in lengths buffer C. + // This element will represent the count (-1) of the mode. Because of the + // way we have set up the problem, the index where this mode occurs will + // also be the location of the mode value in the sorted array, e.g. + // + // smem = [0, 0, 1, 1, 1, 2] + // C = [0, 1, 0, 1, 2, 0] + // I = [0, 1, 2, 3, 4, 5] + // ^ + // maximum value, also aligned with mode = 1 + // + // We perform a block wide max-reduction of the C buffer, but we also need the + // indices to come along with it, so we utilize the uupmem construction. + // + // At the end we need to return the ModeUnsignedPair containing index = 4, val = 2, + // which represents the max + + // In practice, we will make each thread locally reduce 2 values in its registers prior + // to the global block-wide reduction. Note that instead of tidx/stidx, we utilize tidx * 2, + // tidx * 2 + 1, so each thread deals with adjacent elements. This is because the reduce + // code below relies on thread elements to be adjacent. + struct ModeUnsignedPair uup[2]; + uup[0].index = tidx * 2; + uup[0].val = ubpmem[tidx * 2].val; + uup[1].index = tidx * 2 + 1; + uup[1].val = ubpmem[tidx * 2 + 1].val; + __syncthreads(); + + struct ModeUnsignedPair max = {0, 0}; + + max = reduceBlockWithNThreadLocalReductions, 2> + (uupmem, uup, sliceSize, MaxReduceOp(), max); + + // Store the mode in shared memory for use in finding the mode in the input slice + __shared__ T mode; + + // Given the above constraints, the mode is the value at the reduced index in the + // original sorted element buffer + if (tidx == 0) { + mode = smem[max.index]; + } + __syncthreads(); // broadcast mode + + // Finally, we need to find the "an" index of the mode in the input Tensor. The API does + // not constrain which index we pick, so it can be any of the indices that contain the mode. + // We will do a reduction to find the index. We go back to using the (index, flag) buffer + // arrangement. First, we mark indices that are equal to the mode, i.e B[i] = true if + // input[i] == mode, and initialize C[i] to be the index + // + // Again we reduce 2 elements in the thread's registers prior to the block-wide reduction + struct ModeUnsignedBoolPair ubpp[2]; + if (tidx * 2 < sliceSize) { + ubpp[0].flag = THCNumerics::eq(input[linearOffset + (tidx * 2)], mode); + ubpp[0].val = tidx * 2; + } + if (tidx * 2 + 1 < sliceSize) { + ubpp[1].flag = THCNumerics::eq(input[linearOffset + (tidx * 2 + 1)], mode); + ubpp[1].val = tidx * 2 + 1; + } + + // Then we perform a similar reduction to the one above, except this time we update + // the element if the element at the base position is not equal to the mode and + // the element at the offset position is. At the end, C[0] will contain an index + // with the mode. + struct ModeUnsignedBoolPair match = {0, false}; + + match = reduceBlockWithNThreadLocalReductions, 2> + (ubpmem, ubpp, sliceSize, MatchReduceOp(), match); + + // Finally, we have the mode, and an index where it occurs. We use a single thread + // to place this in the appropriate output position + if (tidx == 0) { + int64_t index = match.val; + + unsigned int outputOffset = IndexToOffset::get(blockId, values); + values.data[outputOffset] = mode; + indices.data[outputOffset] = index; + } +} + +#endif // THC_TENSOR_MODE_CUH diff --git a/thirdparty/libtorch/include/THC/THCTensorRandom.cuh b/thirdparty/libtorch/include/THC/THCTensorRandom.cuh new file mode 100644 index 0000000000..d38cb96801 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorRandom.cuh @@ -0,0 +1,90 @@ +#ifndef THC_TENSOR_RANDOM_CUH +#define THC_TENSOR_RANDOM_CUH + +#include +#include +#include + +#include + +#define MAX_NUM_BLOCKS 200 +#define BLOCK_SIZE 256 + +template +__global__ void +multinomialAliasDrawKernel(int size, int64_t *output, int64_t *J, T *q, int64_t K, T *uniform, T *bernoulli){ + int64_t idx = blockIdx.x * BLOCK_SIZE + threadIdx.x; + if (idx < size) { + int64_t rand_ind = ScalarConvert::to(uniform[idx]); + T bern_uniform = bernoulli[idx]; + int _mask = (int) THCNumerics::lt(bern_uniform, q[rand_ind]); + output[idx] = J[rand_ind]*(1 -_mask) + rand_ind * _mask; + } +} + +template +__global__ void +aliasMultinomialFilter(T *q, T *probs, int64_t *smaller, int64_t *larger, int64_t *J_data, int64_t *larger_short_data, int64_t *smaller_short_data, T one, int64_t inputsize){ + int64_t idx = blockIdx.x * BLOCK_SIZE + threadIdx.x; + if (idx < inputsize) { + larger_short_data[idx] = 0; + smaller_short_data[idx] = 0; + J_data[idx]= -1; + T val = THCNumerics::mul(probs[idx], ScalarConvert::to(inputsize)); + if (THCNumerics::lt(val, one)) { + smaller[idx] = idx+1; + larger[idx] = 0; + } else { + larger[idx] = idx+1; + smaller[idx] = 0; + } + q[idx] = val; + } +} + +template +__global__ void +condDiv(T *q, int64_t *J, int64_t inputsize, T q_max) { + int64_t idx = blockIdx.x * BLOCK_SIZE + threadIdx.x; + T one = ScalarConvert::to(1); + if (idx < inputsize) { + if (J[idx] < 0) { + q[idx] = one; + } else { + if (THCNumerics::gt(q_max, one)) { + q[idx] = THCNumerics::div(q[idx], q_max); + } + } + } +} + + +#undef MAX_NUM_BLOCKS +#undef BLOCK_SIZE + +template +__global__ void +aliasMultinomialSetup(int64_t *J, T*q, int64_t inputsize, int64_t * smaller, int64_t *larger, int small_c, int large_c) { + T one = ScalarConvert::to(1); + // Loop through and create little binary mixtures that + // appropriately allocate the larger outcomes over the + // overall uniform mixture. + int64_t large = 0; + int64_t small = 0; + while (small_c > 0 && large_c > 0) { + large = larger[large_c-1]; + small = smaller[small_c-1]; + J[small] = large; + T q_sum = THCNumerics::add(q[large], q[small]); + q[large] = THCNumerics::sub(q_sum, one); + if (THCNumerics::lt(q[large], one)) { + smaller[small_c-1] = large; + large_c -= 1; + } else { + larger[large_c-1] = large; + small_c -= 1; + } + } +} + +#endif // THC_TENSOR_RANDOM_CUH diff --git a/thirdparty/libtorch/include/THC/THCTensorRandom.h b/thirdparty/libtorch/include/THC/THCTensorRandom.h new file mode 100644 index 0000000000..8422f3fafa --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorRandom.h @@ -0,0 +1,17 @@ +#ifndef TH_CUDA_TENSOR_RANDOM_INC +#define TH_CUDA_TENSOR_RANDOM_INC + +#include + +#include +#include + +#include +#include + +#include + +THC_API void THCRandom_getRNGState(at::Generator *gen_, THByteTensor *rng_state); +THC_API void THCRandom_setRNGState(at::Generator *gen_, THByteTensor *rng_state); + +#endif diff --git a/thirdparty/libtorch/include/THC/THCTensorSort.cuh b/thirdparty/libtorch/include/THC/THCTensorSort.cuh new file mode 100644 index 0000000000..ffda23a000 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorSort.cuh @@ -0,0 +1,103 @@ +#ifndef THC_TENSORSORT_CUH +#define THC_TENSORSORT_CUH + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#if CUDA_VERSION >= 7000 || defined(__HIP_PLATFORM_HCC__) +#include +#endif + +template +struct ThrustGTOp { + __device__ bool operator()(const T& lhs, const T& rhs) const { + return (handleNaN && THCNumerics::isnan(lhs) && !THCNumerics::isnan(rhs)) || THCNumerics::gt(lhs, rhs); + } +}; + +template +struct ThrustLTOp { + __device__ bool operator()(const T& lhs, const T& rhs) const { + return (handleNaN && THCNumerics::isnan(rhs) && !THCNumerics::isnan(lhs)) || THCNumerics::lt(lhs, rhs); + } +}; + +template +struct ThrustSliceGTOp { +ThrustSliceGTOp(int64_t size) : sliceSize(size) {} + __device__ bool operator()(const thrust::tuple& lhs, const thrust::tuple& rhs) const { + IndT segA = (IndT)thrust::get<0>(lhs) / sliceSize; + IndT segB = (IndT)thrust::get<0>(rhs) / sliceSize; + if (segA != segB) + return segA < segB; + else + return (handleNaN && THCNumerics::isnan(thrust::get<1>(lhs)) && !THCNumerics::isnan(thrust::get<1>(rhs))) || THCNumerics::gt(thrust::get<1>(lhs), thrust::get<1>(rhs)); + } + const IndT sliceSize; +}; + +template +struct ThrustSliceLTOp { +ThrustSliceLTOp(int64_t size) : sliceSize(size) {} + __device__ bool operator()(const thrust::tuple& lhs, const thrust::tuple& rhs) const { + IndT segA = (IndT)thrust::get<0>(lhs) / sliceSize; + IndT segB = (IndT)thrust::get<0>(rhs) / sliceSize; + if (segA != segB) + return segA < segB; + else + return (handleNaN && THCNumerics::isnan(thrust::get<1>(rhs)) && !THCNumerics::isnan(thrust::get<1>(lhs))) || THCNumerics::lt(thrust::get<1>(lhs), thrust::get<1>(rhs)); + } + const IndT sliceSize; +}; + + + + +// `base` is the base address of a tensor +// For each slice (defined as a linear point of `out`, from 0 -> +// (sliceSize - 1) * sliceStride, we fill that slice from `0` to +// `sliceSize - 1`. +template +__global__ void +fillSliceWithIndex(TensorInfo out, + IndexType totalSlices, + IndexType sliceSize, + IndexType sliceStride) { + IndexType slice = getLinearBlockId(); + + if (slice >= totalSlices) { + return; + } + + const uint64_t offset = + IndexToOffset::get(slice, out); + int64_t* base = &out.data[offset]; + + for (int64_t i = threadIdx.x; i < sliceSize; i += blockDim.x) { + // Torch indices are 1-based (hence the +1) + base[i * sliceStride] = i; + } +} + +// For sorting in Thurst; extracts a within-slice index from a linear index +struct GlobalIndexToPerSliceIndex { + GlobalIndexToPerSliceIndex(int64_t size) : sliceSize(size) {} + + __device__ inline void operator()(int64_t& v) const { + v = v % sliceSize; + } + + const int64_t sliceSize; +}; + +void THCudaLongTensor_fillSliceWithIndex(THCState* state, + THCudaLongTensor* t, + int dim); +#endif // THC_TENSORSORT_CUH diff --git a/thirdparty/libtorch/include/THC/THCTensorTopK.cuh b/thirdparty/libtorch/include/THC/THCTensorTopK.cuh new file mode 100644 index 0000000000..4863cf90aa --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorTopK.cuh @@ -0,0 +1,144 @@ +#ifndef THC_TENSOR_TOPK_CUH +#define THC_TENSOR_TOPK_CUH + +#include +#include + +using namespace at::native; + +template +C10_LAUNCH_BOUNDS_1(1024) +__global__ void gatherTopK(TensorInfo input, + IndexType inputSliceSize, + IndexType outputSliceSize, // aka `k` + + IndexType numInputSlices, + IndexType inputWithinSliceStride, + + TensorInfo topK, + IndexType numTopKSlices, + IndexType topKWithinSliceStride, + + TensorInfo indices, + IndexType indicesWithinSliceStride) { + // Indices are limited to integer fp precision, so counts can fit in + // int32, regardless of IndexType +#ifdef __HIP_PLATFORM_HCC__ + __shared__ int smem[64]; +#else + __shared__ int smem[32]; // one per each warp, up to warp limit +#endif + + IndexType slice = getLinearBlockId(); + if (slice >= numInputSlices) { + return; + } + + // Find the start offset for our slice + IndexType sliceStartIndex = + IndexToOffset::get(slice, input); + IndexType topKSliceStartIndex = + IndexToOffset::get(slice, topK); + IndexType indicesSliceStartIndex = + IndexToOffset::get(slice, indices); + + T* inputSliceStart = &input.data[sliceStartIndex]; + T* topKSliceStart = &topK.data[topKSliceStartIndex]; + int64_t* indicesSliceStart = &indices.data[indicesSliceStartIndex]; + + // Find the k-th highest element in our input + T topKValue = ScalarConvert::to(0); + radixSelect::RadixType, IndexType, Order>( + inputSliceStart, outputSliceSize, + inputSliceSize, inputWithinSliceStride, + smem, &topKValue); + + // Every value that is strictly less/greater than `pattern` + // (depending on sort dir) in sorted int format is in the top-K. + // The top-K value itself might not be unique. + // + // Since there are a variable number of elements that we see that + // are within the top-k, we don't know at what index to write out + // the resulting values. + // In order to get this, we perform an exclusive prefix sum of + // `hasTopK`. This will return the resulting index into which we + // need to write the result, if a thread has a result. + + // All threads need to participate in the loop and the prefix sum, + // but not necessarily in the load; hence loop bounds being rounded + // up to a multiple of the block dim. + IndexType numIterations = THCRoundUp(inputSliceSize, (IndexType) blockDim.x); + IndexType writeIndexStart = 0; + + for (IndexType i = threadIdx.x; i < numIterations; i += blockDim.x) { + bool inRange = (i < inputSliceSize); + T v = + inRange ? doLdg(&inputSliceStart[i * inputWithinSliceStride]) : ScalarConvert::to(0); + bool hasTopK; + if (Order) { + hasTopK = inRange && (THCNumerics::gt(v, topKValue)); + } else { + hasTopK = inRange && (THCNumerics::lt(v, topKValue)); + } + + int index; + int carry; + exclusiveBinaryPrefixScan(smem, hasTopK, &index, &carry, AddOp()); + + if (hasTopK) { + int writeIndex = writeIndexStart + index; + assert(writeIndex < outputSliceSize); + + IndexType topKOffset = writeIndex * topKWithinSliceStride; + IndexType indexOffset = writeIndex * indicesWithinSliceStride; + + topKSliceStart[topKOffset] = v; + indicesSliceStart[indexOffset] = i; + } + + writeIndexStart += carry; + } + + // We need to fill in the rest with actual == top-K values. + // The number that we need is outputSliceSize - + // writeIndexStart. There might be more than that number available, + // in which case we have to choose the first seen set. We do this + // via a prefix sum to calculate indices for writing results. + assert(outputSliceSize >= writeIndexStart); + IndexType topKRemaining = (outputSliceSize - writeIndexStart); + + for (IndexType i = threadIdx.x; i < numIterations; i += blockDim.x) { + bool inRange = (i < inputSliceSize); + T v = + inRange ? doLdg(&inputSliceStart[i * inputWithinSliceStride]) : ScalarConvert::to(0); + bool hasTopK = inRange && (THCNumerics::eq(v, topKValue)); + + int index; + int carry; + exclusiveBinaryPrefixScan(smem, hasTopK, &index, &carry, AddOp()); + + if (hasTopK && index < topKRemaining) { + int writeIndex = writeIndexStart + index; + assert(writeIndex < outputSliceSize); + + IndexType topKOffset = writeIndex * topKWithinSliceStride; + IndexType indexOffset = writeIndex * indicesWithinSliceStride; + + topKSliceStart[topKOffset] = v; + indicesSliceStart[indexOffset] = i; + } + + if (carry >= topKRemaining) { + break; + } + + topKRemaining -= carry; + writeIndexStart += carry; + } +} + +#undef RADIX_BITS +#undef RADIX_SIZE +#undef RADIX_MASK + +#endif // THC_TENSOR_TOPK_CUH diff --git a/thirdparty/libtorch/include/THC/THCTensorTypeUtils.cuh b/thirdparty/libtorch/include/THC/THCTensorTypeUtils.cuh new file mode 100644 index 0000000000..23d7c8c232 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCTensorTypeUtils.cuh @@ -0,0 +1,83 @@ +#ifndef THC_TENSOR_TYPE_UTILS_INC +#define THC_TENSOR_TYPE_UTILS_INC + +#include +#include +#include +#include +#include +#include +#include + +/// A utility for accessing THCuda*Tensor types in a generic manner + +/// Equivalent to C++11's type_traits std::is_same; used for comparing +/// equality of types. Don't assume the existence of C++11 +template +struct SameType { + static const bool same = false; +}; + +template +struct SameType { + static const bool same = true; +}; + +template +bool isSameType() { + return SameType::same; +} + +// Utility function for constructing TensorInfo structs. In this case, the +// two template parameters are: +// +// 1. The TensorType, e.g. THCTensor in generic functions, or THCudaTensor, +// THCudaLongTensor etc. +// +// 2. The IndexType. This is always going to be an unsigned integral value, +// but depending on the size of the Tensor you may select uint16_t +// uint32_t, uint64_t etc. +// +// Internally we use the TensorUtils static functions to get the necessary +// dims, sizes, stride etc. +// +// For example, suppose we have a THCudaTensor t, with dim = 2, size = [3, 4], +// stride = [4, 1], offset = 8, and we set our index type to be unsigned int. +// Then we yield a TensorInfo struct templatized with float, unsigned int and +// the following fields: +// +// data is a float* to the underlying storage at position 8 +// dims is 2 +// sizes is a MAX_CUTORCH_DIMS element array with [3, 4] in its first two positions +// strides is a MAX_CUTORCH_DIMS element array with [4, 1] in its first two positions +// +// TensorInfos can then be passed to CUDA kernels, but we can use the static functions +// defined above to perform Tensor Operations that are appropriate for each +// TensorType. +template +TensorInfo +getTensorInfo(THCState* state, TensorType* t) { + IndexType sz[MAX_CUTORCH_DIMS]; + IndexType st[MAX_CUTORCH_DIMS]; + + int dims = THCTensor_nDimensionLegacyNoScalars(state, t); + for (int i = 0; i < dims; ++i) { + sz[i] = THTensor_sizeLegacyNoScalars(t, i); + st[i] = THTensor_strideLegacyNoScalars(t, i); + } + + return TensorInfo( + t->template data(), dims, sz, st); +} + +template +struct ScalarNegate { + static __host__ __device__ T to(const T v) { return -v; } +}; + +template +struct ScalarInv { + static __host__ __device__ T to(const T v) { return ((T) 1) / v; } +}; + +#endif // THC_TENSOR_TYPE_UTILS_INC diff --git a/thirdparty/libtorch/include/THC/THCThrustAllocator.cuh b/thirdparty/libtorch/include/THC/THCThrustAllocator.cuh new file mode 100644 index 0000000000..0e753222d6 --- /dev/null +++ b/thirdparty/libtorch/include/THC/THCThrustAllocator.cuh @@ -0,0 +1,31 @@ +#ifndef THC_THRUST_ALLOCATOR_INC +#define THC_THRUST_ALLOCATOR_INC + +#include + +/// Allocator for Thrust to re-route its internal device allocations +/// to the THC allocator +class THCThrustAllocator { + public: + typedef char value_type; + + THCThrustAllocator(THCState* state) + : state_(state) { + } + + ~THCThrustAllocator() { + } + + char* allocate(std::ptrdiff_t size) { + return static_cast(THCudaMalloc(state_, size)); + } + + void deallocate(char* p, size_t size) { + THCudaFree(state_, p); + } + + private: + THCState* state_; +}; + +#endif // THC_THRUST_ALLOCATOR_INC diff --git a/thirdparty/libtorch/include/THC/generic/THCStorage.h b/thirdparty/libtorch/include/THC/generic/THCStorage.h new file mode 100644 index 0000000000..cbcdaf5f3e --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCStorage.h @@ -0,0 +1,53 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCStorage.h" +#else + +#define THCStorage THStorage + +// These used to be distinct types; for some measure of backwards compatibility and documentation +// alias these to the single THCStorage type. +#define THCudaStorage THCStorage +#define THCudaDoubleStorage THCStorage +#define THCudaHalfStorage THCStorage +#define THCudaByteStorage THCStorage +#define THCudaCharStorage THCStorage +#define THCudaShortStorage THCStorage +#define THCudaIntStorage THCStorage +#define THCudaLongStorage THCStorage +#define THCudaBoolStorage THCStorage +#define THCudaBFloat16Storage THCStorage + +THC_API scalar_t* THCStorage_(data)(THCState *state, const THCStorage*); +THC_API ptrdiff_t THCStorage_(size)(THCState *state, const THCStorage*); +THC_API int THCStorage_(elementSize)(THCState *state); + +/* slow access -- checks everything */ +THC_API void THCStorage_(set)(THCState *state, THCStorage*, ptrdiff_t, scalar_t); +THC_API scalar_t THCStorage_(get)(THCState *state, const THCStorage*, ptrdiff_t); + +THC_API THCStorage* THCStorage_(new)(THCState *state); +THC_API THCStorage* THCStorage_(newWithSize)(THCState *state, ptrdiff_t size); +THC_API THCStorage* THCStorage_(newWithSize1)(THCState *state, scalar_t); +THC_API THCStorage* THCStorage_(newWithSize2)(THCState *state, scalar_t, scalar_t); +THC_API THCStorage* THCStorage_(newWithSize3)(THCState *state, scalar_t, scalar_t, scalar_t); +THC_API THCStorage* THCStorage_(newWithSize4)(THCState *state, scalar_t, scalar_t, scalar_t, scalar_t); +THC_API THCStorage* THCStorage_(newWithMapping)(THCState *state, const char *filename, ptrdiff_t size, int shared); + +THC_API THCStorage* THCStorage_(newWithAllocator)( + THCState *state, ptrdiff_t size, + at::Allocator* allocator); +THC_API THCStorage* THCStorage_(newWithDataAndAllocator)( + THCState *state, at::DataPtr&& data, ptrdiff_t size, + at::Allocator* allocator); + +THC_API void THCStorage_(setFlag)(THCState *state, THCStorage *storage, const char flag); +THC_API void THCStorage_(clearFlag)(THCState *state, THCStorage *storage, const char flag); +THC_API void THCStorage_(retain)(THCState *state, THCStorage *storage); + +THC_API void THCStorage_(free)(THCState *state, THCStorage *storage); +THC_API void THCStorage_(resize)(THCState *state, THCStorage *storage, ptrdiff_t size); +THC_API void THCStorage_(fill)(THCState *state, THCStorage *storage, scalar_t value); + +THC_API int THCStorage_(getDevice)(THCState* state, const THCStorage* storage); + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCStorageCopy.h b/thirdparty/libtorch/include/THC/generic/THCStorageCopy.h new file mode 100644 index 0000000000..16c5afd147 --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCStorageCopy.h @@ -0,0 +1,45 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCStorageCopy.h" +#else + +/* Support for copy between different Storage types */ + +THC_API void THCStorage_(copy)(THCState *state, THCStorage *storage, THCStorage *src); +THC_API void THCStorage_(copyByte)(THCState *state, THCStorage *storage, struct THByteStorage *src); +THC_API void THCStorage_(copyChar)(THCState *state, THCStorage *storage, struct THCharStorage *src); +THC_API void THCStorage_(copyShort)(THCState *state, THCStorage *storage, struct THShortStorage *src); +THC_API void THCStorage_(copyInt)(THCState *state, THCStorage *storage, struct THIntStorage *src); +THC_API void THCStorage_(copyLong)(THCState *state, THCStorage *storage, struct THLongStorage *src); +THC_API void THCStorage_(copyFloat)(THCState *state, THCStorage *storage, struct THFloatStorage *src); +THC_API void THCStorage_(copyDouble)(THCState *state, THCStorage *storage, struct THDoubleStorage *src); +THC_API void THCStorage_(copyHalf)(THCState *state, THCStorage *storage, struct THHalfStorage *src); +THC_API void THCStorage_(copyBool)(THCState *state, THCStorage *storage, struct THBoolStorage *src); +THC_API void THCStorage_(copyBFloat16)(THCState *state, THCStorage *storage, struct THBFloat16Storage *src); + +THC_API void THCStorage_(copyCudaByte)(THCState *state, THCStorage *storage, struct THCudaByteStorage *src); +THC_API void THCStorage_(copyCudaChar)(THCState *state, THCStorage *storage, struct THCudaCharStorage *src); +THC_API void THCStorage_(copyCudaShort)(THCState *state, THCStorage *storage, struct THCudaShortStorage *src); +THC_API void THCStorage_(copyCudaInt)(THCState *state, THCStorage *storage, struct THCudaIntStorage *src); +THC_API void THCStorage_(copyCudaLong)(THCState *state, THCStorage *storage, struct THCudaLongStorage *src); +THC_API void THCStorage_(copyCudaFloat)(THCState *state, THCStorage *storage, struct THCudaStorage *src); +THC_API void THCStorage_(copyCudaDouble)(THCState *state, THCStorage *storage, struct THCudaDoubleStorage *src); +THC_API void THCStorage_(copyCudaHalf)(THCState *state, THCStorage *storage, struct THCudaHalfStorage *src); +THC_API void THCStorage_(copyCudaBool)(THCState *state, THCStorage *storage, struct THCudaBoolStorage *src); +THC_API void THCStorage_(copyCudaBFloat16)(THCState *state, THCStorage *storage, struct THCudaBFloat16Storage *src); + +THC_API void TH_CONCAT_2(THByteStorage_copyCuda , Real)(THCState *state, THByteStorage *self, struct THCStorage *src); +THC_API void TH_CONCAT_2(THCharStorage_copyCuda , Real)(THCState *state, THCharStorage *self, struct THCStorage *src); +THC_API void TH_CONCAT_2(THShortStorage_copyCuda , Real)(THCState *state, THShortStorage *self, struct THCStorage *src); +THC_API void TH_CONCAT_2(THIntStorage_copyCuda , Real)(THCState *state, THIntStorage *self, struct THCStorage *src); +THC_API void TH_CONCAT_2(THLongStorage_copyCuda , Real)(THCState *state, THLongStorage *self, struct THCStorage *src); +THC_API void TH_CONCAT_2(THFloatStorage_copyCuda , Real)(THCState *state, THFloatStorage *self, struct THCStorage *src); +THC_API void TH_CONCAT_2(THDoubleStorage_copyCuda, Real)(THCState *state, THDoubleStorage *self, struct THCStorage *src); +THC_API void TH_CONCAT_2(THHalfStorage_copyCuda, Real)(THCState *state, THHalfStorage *self, struct THCStorage *src); +THC_API void TH_CONCAT_2(THBoolStorage_copyCuda, Real)(THCState *state, THBoolStorage *self, struct THCStorage *src); +THC_API void TH_CONCAT_2(THBFloat16Storage_copyCuda, Real)(THCState *state, THBFloat16Storage *self, struct THCStorage *src); + +THC_API void THStorage_(copyCuda)(THCState *state, THStorage *self, THCStorage *src); +THC_API void THCStorage_(copyCuda)(THCState *state, THCStorage *self, THCStorage *src); +THC_API void THCStorage_(copyCPU)(THCState *state, THCStorage *self, THStorage *src); + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensor.h b/thirdparty/libtorch/include/THC/generic/THCTensor.h new file mode 100644 index 0000000000..79e5b14d09 --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensor.h @@ -0,0 +1,134 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensor.h" +#else + +#define THCTensor THTensor + +// These used to be distinct types; for some measure of backwards compatibility and documentation +// alias these to the single THCTensor type. +#define THCudaTensor THCTensor +#define THCudaDoubleTensor THCTensor +#define THCudaHalfTensor THCTensor +#define THCudaByteTensor THCTensor +#define THCudaCharTensor THCTensor +#define THCudaShortTensor THCTensor +#define THCudaIntTensor THCTensor +#define THCudaLongTensor THCTensor +#define THCudaBoolTensor THCTensor +#define THCudaBFloat16Tensor THCTensor + +/**** access methods ****/ +THC_API THCStorage* THCTensor_(storage)(THCState *state, const THCTensor *self); +THC_API ptrdiff_t THCTensor_(storageOffset)(THCState *state, const THCTensor *self); + +// See [NOTE: nDimension vs nDimensionLegacyNoScalars vs nDimensionLegacyAll] +THC_API int THCTensor_(nDimension)(THCState *state, const THCTensor *self); +THC_API int THCTensor_(nDimensionLegacyNoScalars)(THCState *state, const THCTensor *self); +THC_API int THCTensor_(nDimensionLegacyAll)(THCState *state, const THCTensor *self); + +THC_API int64_t THCTensor_(size)(THCState *state, const THCTensor *self, int dim); +THC_API int64_t THCTensor_(sizeLegacyNoScalars)(THCState *state, const THCTensor *self, int dim); +THC_API int64_t THCTensor_(stride)(THCState *state, const THCTensor *self, int dim); +THC_API int64_t THCTensor_(strideLegacyNoScalars)(THCState *state, const THCTensor *self, int dim); +THC_API scalar_t *THCTensor_(data)(THCState *state, const THCTensor *self); + +THC_API void THCTensor_(setFlag)(THCState *state, THCTensor *self, const char flag); +THC_API void THCTensor_(clearFlag)(THCState *state, THCTensor *self, const char flag); + + +/**** creation methods ****/ +THC_API THCTensor *THCTensor_(new)(THCState *state); +THC_API THCTensor *THCTensor_(newWithTensor)(THCState *state, THCTensor *tensor); +THC_API THCTensor *THCTensor_(newWithStorage1d)(THCState *state, THCStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_); +THC_API THCTensor *THCTensor_(newWithStorage2d)(THCState *state, THCStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_, + int64_t size1_, int64_t stride1_); +THC_API THCTensor *THCTensor_(newWithStorage3d)(THCState *state, THCStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_, + int64_t size1_, int64_t stride1_, + int64_t size2_, int64_t stride2_); +THC_API THCTensor *THCTensor_(newWithStorage4d)(THCState *state, THCStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_, + int64_t size1_, int64_t stride1_, + int64_t size2_, int64_t stride2_, + int64_t size3_, int64_t stride3_); + +/* stride might be NULL */ +THC_API THCTensor *THCTensor_(newWithSize1d)(THCState *state, int64_t size0_); +THC_API THCTensor *THCTensor_(newWithSize2d)(THCState *state, int64_t size0_, int64_t size1_); +THC_API THCTensor *THCTensor_(newWithSize3d)(THCState *state, int64_t size0_, int64_t size1_, int64_t size2_); +THC_API THCTensor *THCTensor_(newWithSize4d)(THCState *state, int64_t size0_, int64_t size1_, int64_t size2_, int64_t size3_); + +THC_API THCTensor *THCTensor_(newClone)(THCState *state, THCTensor *self); +THC_API THCTensor *THCTensor_(newContiguous)(THCState *state, THCTensor *tensor); +THC_API THCTensor *THCTensor_(newSelect)(THCState *state, THCTensor *tensor, int dimension_, int64_t sliceIndex_); +THC_API THCTensor *THCTensor_(newNarrow)(THCState *state, THCTensor *tensor, int dimension_, int64_t firstIndex_, int64_t size_); +THC_API THCTensor *THCTensor_(newTranspose)(THCState *state, THCTensor *tensor, int dimension1_, int dimension2_); +THC_API THCTensor *THCTensor_(newFoldBatchDim)(THCState *state, THCTensor *input); + +// resize* methods simply resize the storage. So they may not retain the current data at current indices. +// This is especially likely to happen when the tensor is not contiguous. In general, if you still need the +// values, unless you are doing some size and stride tricks, do not use resize*. +THC_API void THCTensor_(resizeNd)(THCState *state, THCTensor *tensor, int nDimension, const int64_t *size, const int64_t *stride); +THC_API void THCTensor_(resizeAs)(THCState *state, THCTensor *tensor, THCTensor *src); +THC_API void THCTensor_(resize0d)(THCState *state, THCTensor *tensor); +THC_API void THCTensor_(resize1d)(THCState *state, THCTensor *tensor, int64_t size0_); +THC_API void THCTensor_(resize2d)(THCState *state, THCTensor *tensor, int64_t size0_, int64_t size1_); +THC_API void THCTensor_(resize3d)(THCState *state, THCTensor *tensor, int64_t size0_, int64_t size1_, int64_t size2_); +THC_API void THCTensor_(resize4d)(THCState *state, THCTensor *tensor, int64_t size0_, int64_t size1_, int64_t size2_, int64_t size3_); +THC_API void THCTensor_(resize5d)(THCState *state, THCTensor *tensor, int64_t size0_, int64_t size1_, int64_t size2_, int64_t size3_, int64_t size4_); + +THC_API void THCTensor_(set)(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_(setStorageNd)(THCState *state, THCTensor *self, THCStorage *storage, ptrdiff_t storageOffset, int nDimension, const int64_t *size, const int64_t *stride); +THC_API void THCTensor_(setStorage1d)(THCState *state, THCTensor *self, THCStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_); +THC_API void THCTensor_(setStorage2d)(THCState *state, THCTensor *self, THCStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_, + int64_t size1_, int64_t stride1_); +THC_API void THCTensor_(setStorage3d)(THCState *state, THCTensor *self, THCStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_, + int64_t size1_, int64_t stride1_, + int64_t size2_, int64_t stride2_); +THC_API void THCTensor_(setStorage4d)(THCState *state, THCTensor *self, THCStorage *storage_, ptrdiff_t storageOffset_, + int64_t size0_, int64_t stride0_, + int64_t size1_, int64_t stride1_, + int64_t size2_, int64_t stride2_, + int64_t size3_, int64_t stride3_); + +THC_API void THCTensor_(narrow)(THCState *state, THCTensor *self, THCTensor *src, int dimension_, int64_t firstIndex_, int64_t size_); +THC_API void THCTensor_(select)(THCState *state, THCTensor *self, THCTensor *src, int dimension_, int64_t sliceIndex_); +THC_API void THCTensor_(transpose)(THCState *state, THCTensor *self, THCTensor *src, int dimension1_, int dimension2_); + +THC_API void THCTensor_(squeeze1d)(THCState *state, THCTensor *self, THCTensor *src, int dimension_); +THC_API void THCTensor_(unsqueeze1d)(THCState *state, THCTensor *self, THCTensor *src, int dimension_); + +THC_API int THCTensor_(isContiguous)(THCState *state, const THCTensor *self); +THC_API int THCTensor_(isSameSizeAs)(THCState *state, const THCTensor *self, const THCTensor *src); +THC_API ptrdiff_t THCTensor_(nElement)(THCState *state, const THCTensor *self); + +THC_API void THCTensor_(retain)(THCState *state, THCTensor *self); +THC_API void THCTensor_(free)(THCState *state, THCTensor *self); +THC_API void THCTensor_(freeCopyTo)(THCState *state, THCTensor *self, THCTensor *dst); + +/* Slow access methods [check everything] */ +THC_API void THCTensor_(set0d)(THCState *state, THCTensor *tensor, scalar_t value); +THC_API void THCTensor_(set1d)(THCState *state, THCTensor *tensor, int64_t x0, scalar_t value); +THC_API void THCTensor_(set2d)(THCState *state, THCTensor *tensor, int64_t x0, int64_t x1, scalar_t value); +THC_API void THCTensor_(set3d)(THCState *state, THCTensor *tensor, int64_t x0, int64_t x1, int64_t x2, scalar_t value); +THC_API void THCTensor_(set4d)(THCState *state, THCTensor *tensor, int64_t x0, int64_t x1, int64_t x2, int64_t x3, scalar_t value); + +THC_API scalar_t THCTensor_(get0d)(THCState *state, const THCTensor *tensor); +THC_API scalar_t THCTensor_(get1d)(THCState *state, const THCTensor *tensor, int64_t x0); +THC_API scalar_t THCTensor_(get2d)(THCState *state, const THCTensor *tensor, int64_t x0, int64_t x1); +THC_API scalar_t THCTensor_(get3d)(THCState *state, const THCTensor *tensor, int64_t x0, int64_t x1, int64_t x2); +THC_API scalar_t THCTensor_(get4d)(THCState *state, const THCTensor *tensor, int64_t x0, int64_t x1, int64_t x2, int64_t x3); + +/* CUDA-specific functions */ +THC_API int THCTensor_(getDevice)(THCState *state, const THCTensor *self); +THC_API int THCTensor_(checkGPU)(THCState *state, unsigned int nTensors, ...); + +/* debug methods */ +THC_API THCDescBuff THCTensor_(sizeDesc)(THCState *state, const THCTensor *tensor); + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorCopy.h b/thirdparty/libtorch/include/THC/generic/THCTensorCopy.h new file mode 100644 index 0000000000..896b2635d7 --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorCopy.h @@ -0,0 +1,11 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorCopy.h" +#else + +THC_API void THCTensor_(copy)(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_(copyIgnoringOverlaps)(THCState *state, THCTensor *self, THCTensor *src); + +THC_API void THCTensor_(copyAsyncCPU)(THCState *state, THCTensor *self, THTensor *src); +THC_API void THTensor_(copyAsyncCuda)(THCState *state, THTensor *self, THCTensor *src); + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorIndex.h b/thirdparty/libtorch/include/THC/generic/THCTensorIndex.h new file mode 100644 index 0000000000..c97e3b373a --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorIndex.h @@ -0,0 +1,15 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorIndex.h" +#else + +THC_API void THCTensor_(indexCopy)(THCState *state, THCTensor *res_, int dim, THCudaLongTensor *indices, THCTensor *src); +THC_API void THCTensor_(indexFill)(THCState *state, THCTensor *tensor, int dim, THCudaLongTensor *index, scalar_t val); +THC_API void THCTensor_(indexSelect)(THCState *state, THCTensor *tensor, THCTensor *src, int dim, THCudaLongTensor *index); +THC_API void THCTensor_(take)(THCState *state, THCTensor *res_, THCTensor *src, THCudaLongTensor *index); +THC_API void THCTensor_(put)(THCState *state, THCTensor *res_, THCudaLongTensor *indices, THCTensor *src, int accumulate); + +#if !defined(THC_REAL_IS_BOOL) /* non bool only part */ +THC_API void THCTensor_(indexAdd)(THCState *state, THCTensor *res_, int dim, THCudaLongTensor *indices, THCTensor *src); +#endif + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorMasked.h b/thirdparty/libtorch/include/THC/generic/THCTensorMasked.h new file mode 100644 index 0000000000..57eac2b4f6 --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorMasked.h @@ -0,0 +1,54 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorMasked.h" +#else + +THC_API void THCTensor_(maskedFill)(THCState *state, + THCTensor *tensor, + THCudaByteTensor *mask, + scalar_t value); + + +THC_API void THCTensor_(maskedFillBool)(THCState *state, + THCTensor *tensor, + THCudaBoolTensor *mask, + scalar_t value); + +// FIXME: remove now that we have THCudaByteTensor? +THC_API void THCTensor_(maskedFillByte)(THCState *state, + THCTensor *tensor, + THByteTensor *mask, + scalar_t value); + +THC_API void THCTensor_(maskedCopy)(THCState *state, + THCTensor *tensor, + THCudaByteTensor *mask, + THCTensor *src); + +THC_API void THCTensor_(maskedCopyBool)(THCState *state, + THCTensor *tensor, + THCudaBoolTensor *mask, + THCTensor *src); + +// FIXME: remove now that we have THCudaByteTensor? +THC_API void THCTensor_(maskedCopyByte)(THCState *state, + THCTensor *tensor, + THByteTensor *mask, + THCTensor *src); + +THC_API void THCTensor_(maskedSelect)(THCState *state, + THCTensor *tensor, + THCTensor *src, + THCudaByteTensor *mask); + +THC_API void THCTensor_(maskedSelectBool)(THCState *state, + THCTensor *tensor, + THCTensor *src, + THCudaBoolTensor *mask); + +// FIXME: remove now that we have THCudaByteTensor? +THC_API void THCTensor_(maskedSelectByte)(THCState *state, + THCTensor *tensor, + THCTensor *src, + THByteTensor *mask); + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorMath.h b/thirdparty/libtorch/include/THC/generic/THCTensorMath.h new file mode 100644 index 0000000000..a565cec208 --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorMath.h @@ -0,0 +1,20 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorMath.h" +#else + +THC_API void THCTensor_(fill)(THCState *state, THCTensor *self, scalar_t value); +THC_API void THCTensor_(zero)(THCState *state, THCTensor *self); +THC_API void THCTensor_(cat)(THCState *state, THCTensor *result, THCTensor *ta, THCTensor *tb, int dimension); +THC_API void THCTensor_(catArray)(THCState *state, THCTensor *result, THCTensor **inputs, int numInputs, int dimension); +THC_API void THCTensor_(nonzero)(THCState* state, THCudaLongTensor *tensor, THCTensor *self); +THC_API ptrdiff_t THCTensor_(numel)(THCState *state, THCTensor *t); + +#if !defined(THC_REAL_IS_BOOL) /* non bool only part */ + +THC_API void THCTensor_(triu)(THCState *state, THCTensor *self, THCTensor *src, int64_t k); +THC_API void THCTensor_(diag)(THCState *state, THCTensor *self, THCTensor *src, int64_t k); +THC_API accreal THCTensor_(trace)(THCState *state, THCTensor *self); + +#endif + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorMathBlas.h b/thirdparty/libtorch/include/THC/generic/THCTensorMathBlas.h new file mode 100644 index 0000000000..98608b016b --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorMathBlas.h @@ -0,0 +1,12 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorMathBlas.h" +#else + +THC_API accreal THCTensor_(dot)(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_(addmv)(THCState *state, THCTensor *self, THCTensor *t, THCTensor *mat, THCTensor *vec, scalar_t beta, scalar_t alpha); +THC_API void THCTensor_(addmm)(THCState *state, THCTensor *self, THCTensor *t, THCTensor *mat1, THCTensor *mat2, scalar_t beta, scalar_t alpha); +THC_API void THCTensor_(addr)(THCState *state, THCTensor *self, THCTensor *t, THCTensor *vec1, THCTensor *vec2, scalar_t beta, scalar_t alpha); +THC_API void THCTensor_(addbmm)(THCState *state, THCTensor *result, THCTensor *t, THCTensor *batch1, THCTensor *batch2, scalar_t beta, scalar_t alpha); +THC_API void THCTensor_(baddbmm)(THCState *state, THCTensor *result, THCTensor *t, THCTensor *batch1, THCTensor *batch2, scalar_t beta, scalar_t alpha); + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorMathMagma.h b/thirdparty/libtorch/include/THC/generic/THCTensorMathMagma.h new file mode 100644 index 0000000000..ae46a62c9e --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorMathMagma.h @@ -0,0 +1,15 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorMathMagma.h" +#else + +#if defined(THC_REAL_IS_FLOAT) || defined(THC_REAL_IS_DOUBLE) + +// MAGMA (i.e. CUDA implementation of LAPACK functions) +THC_API void THCTensor_(gels)(THCState *state, THCTensor *rb_, THCTensor *ra_, THCTensor *b_, THCTensor *a_); +THC_API void THCTensor_(geev)(THCState *state, THCTensor *re_, THCTensor *rv_, THCTensor *a_, bool eigenvectors); +THC_API void THCTensor_(potri)(THCState *state, THCTensor *ra_, THCTensor *a, bool upper); +THC_API void THCTensor_(geqrf)(THCState *state, THCTensor *ra_, THCTensor *rtau_, THCTensor *a_); + +#endif // defined(THC_REAL_IS_FLOAT) || defined(THC_REAL_IS_DOUBLE) + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorMathPairwise.h b/thirdparty/libtorch/include/THC/generic/THCTensorMathPairwise.h new file mode 100644 index 0000000000..246be6be70 --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorMathPairwise.h @@ -0,0 +1,21 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorMathPairwise.h" +#else + +THC_API int THCTensor_(equal)(THCState *state, THCTensor *self, THCTensor *src); + +THC_API void THCTensor_(bitand)(THCState *state, THCTensor *self, THCTensor *src, scalar_t value); +THC_API void THCTensor_(bitor)(THCState *state, THCTensor *self, THCTensor *src, scalar_t value); + +#if !defined(THC_REAL_IS_BOOL) + +THC_API void THCTensor_(mul)(THCState *state, THCTensor *self, THCTensor *src, scalar_t value); +THC_API void THCTensor_(div)(THCState *state, THCTensor *self, THCTensor *src, scalar_t value); +THC_API void THCTensor_(lshift)(THCState *state, THCTensor *self, THCTensor *src, scalar_t value); +THC_API void THCTensor_(rshift)(THCState *state, THCTensor *self, THCTensor *src, scalar_t value); +THC_API void THCTensor_(fmod)(THCState *state, THCTensor *self, THCTensor *src, scalar_t value); +THC_API void THCTensor_(remainder)(THCState *state, THCTensor *self, THCTensor *src, scalar_t value); + +#endif + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorMathPointwise.h b/thirdparty/libtorch/include/THC/generic/THCTensorMathPointwise.h new file mode 100644 index 0000000000..5eb631b90c --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorMathPointwise.h @@ -0,0 +1,43 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorMathPointwise.h" +#else + +THC_API void THCTensor_(cbitand)(THCState *state, THCTensor *self, THCTensor *src1, THCTensor *src2); +THC_API void THCTensor_(cbitor)(THCState *state, THCTensor *self, THCTensor *src1, THCTensor *src2); + +THC_API void THCTensor_(cmax)(THCState *state, THCTensor *self, THCTensor *src1, THCTensor *src2); +THC_API void THCTensor_(cmin)(THCState *state, THCTensor *self, THCTensor *src1, THCTensor *src2); +THC_API void THCTensor_(cmaxValue)(THCState *state, THCTensor *self, THCTensor *src, scalar_t value); +THC_API void THCTensor_(cminValue)(THCState *state, THCTensor *self, THCTensor *src, scalar_t value); + +#if !defined(THC_REAL_IS_BOOL) + +#if defined(THC_REAL_IS_FLOAT) || defined(THC_REAL_IS_DOUBLE) || defined(THC_REAL_IS_HALF) + +THC_API void THCTensor_(exp)(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_(cos)(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_(cosh)(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_(tan)(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_(atan)(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_(tanh)(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_(erf)(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_(erfc)(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_(sqrt)(THCState *state, THCTensor *self, THCTensor *src); +THC_API void THCTensor_(cinv)(THCState *state, THCTensor *self, THCTensor *src); + +#endif + +THC_API void THCTensor_(clamp)(THCState *state, THCTensor *self, THCTensor *src, scalar_t min_value, scalar_t max_value); +THC_API void THCTensor_(crossKernel)(THCState *state, THCTensor *self, THCTensor *src1, THCTensor *src2, int dimension); + +THC_API void THCTensor_(cadd)(THCState *state, THCTensor *self, THCTensor *src1, scalar_t value, THCTensor *src2); +THC_API void THCTensor_(csub)(THCState *state, THCTensor *self, THCTensor *src1, scalar_t value, THCTensor *src2); +THC_API void THCTensor_(cmul)(THCState *state, THCTensor *self, THCTensor *src1, THCTensor *src2); +THC_API void THCTensor_(cdiv)(THCState *state, THCTensor *self, THCTensor *src1, THCTensor *src2); +THC_API void THCTensor_(clshift)(THCState *state, THCTensor *self, THCTensor *src1, THCTensor *src2); +THC_API void THCTensor_(crshift)(THCState *state, THCTensor *self, THCTensor *src1, THCTensor *src2); +THC_API void THCTensor_(cfmod)(THCState *state, THCTensor *self, THCTensor *src1, THCTensor *src2); +THC_API void THCTensor_(cremainder)(THCState *state, THCTensor *self, THCTensor *src1, THCTensor *src2); + +#endif +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorMathReduce.h b/thirdparty/libtorch/include/THC/generic/THCTensorMathReduce.h new file mode 100644 index 0000000000..dd0e188a47 --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorMathReduce.h @@ -0,0 +1,50 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorMathReduce.h" +#else + +THC_API accreal THCTensor_(sumall)(THCState *state, THCTensor *self); + +THC_API void THCTensor_(min)(THCState *state, + THCTensor *values, + THCudaLongTensor *indices, + THCTensor *src, int dim, int keepdim); +THC_API void THCTensor_(max)(THCState *state, + THCTensor *values, + THCudaLongTensor *indices, + THCTensor *src, int dim, int keepdim); + +THC_API scalar_t THCTensor_(minall)(THCState *state, THCTensor *self); +THC_API scalar_t THCTensor_(maxall)(THCState *state, THCTensor *self); + +#if !defined(THC_REAL_IS_BOOL) + +#if defined(THC_REAL_IS_FLOAT) || defined(THC_REAL_IS_DOUBLE) || defined(THC_REAL_IS_HALF) + +THC_API void THCTensor_(renorm)(THCState *state, THCTensor* self, THCTensor* src, scalar_t value, int dimension, scalar_t max_norm); +THC_API void THCTensor_(std_single)(THCState *state, THCTensor *self, THCTensor *src, int dim, bool unbiased, int keepdim); +THC_API void THCTensor_(norm)(THCState *state, THCTensor* self, THCTensor* src, scalar_t value, int dimension, int keepdim); +THC_API void THCTensor_(var_single)(THCState *state, THCTensor *self, THCTensor *src, int dim, bool unbiased, int keepdim); + +THC_API accreal THCTensor_(std_all)(THCState *state, THCTensor *self, bool unbiased); +THC_API accreal THCTensor_(normall)(THCState *state, THCTensor *self, scalar_t value); +THC_API accreal THCTensor_(var_all)(THCState *state, THCTensor *self, bool unbiased); + +#endif + +THC_API void THCTensor_(prod)(THCState *state, THCTensor *self, THCTensor *src, int dim, int keepdim); + +THC_API accreal THCTensor_(meanall)(THCState *state, THCTensor *self); + +THC_API scalar_t THCTensor_(medianall)(THCState *state, THCTensor *self); + +THC_API void THCTensor_(median)(THCState *state, + THCTensor *values, + THCudaLongTensor *indices, + THCTensor *src, int dim, int keepdim); + +THC_API accreal THCTensor_(dist)(THCState *state, THCTensor *self, THCTensor *src, + scalar_t value); + +#endif + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorMathScan.h b/thirdparty/libtorch/include/THC/generic/THCTensorMathScan.h new file mode 100644 index 0000000000..9346dbce71 --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorMathScan.h @@ -0,0 +1,8 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorMathScan.h" +#else + +THC_API void THCTensor_(cumsum)(THCState *state, THCTensor *self, THCTensor *src, int dim); +THC_API void THCTensor_(cumprod)(THCState *state, THCTensor *self, THCTensor *src, int dim); + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorMode.h b/thirdparty/libtorch/include/THC/generic/THCTensorMode.h new file mode 100644 index 0000000000..796eb66e53 --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorMode.h @@ -0,0 +1,14 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorMode.h" +#else + +/* Returns the mode, and index of the mode, for the set of values + * along a given dimension in the input tensor. */ +THC_API void THCTensor_(mode)(THCState *state, + THCTensor *values, + THCudaLongTensor *indices, + THCTensor *input, + int dimension, + int keepdim); + +#endif // THC_GENERIC_FILE diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorRandom.h b/thirdparty/libtorch/include/THC/generic/THCTensorRandom.h new file mode 100644 index 0000000000..cb359001af --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorRandom.h @@ -0,0 +1,13 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorRandom.h" +#else + +#include "ATen/core/Generator.h" + +#if defined(THC_REAL_IS_FLOAT) || defined(THC_REAL_IS_DOUBLE) || defined(THC_REAL_IS_HALF) + +THC_API void THCTensor_(multinomialAliasSetup)(struct THCState *state, THCTensor *probs, THCudaLongTensor *J, THCTensor *q); +THC_API void THCTensor_(multinomialAliasDraw)(THCState *state, THCudaLongTensor *self, THCTensor *_q, THCudaLongTensor *_J, int n_sample, at::Generator* gen_); + +#endif +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorScatterGather.h b/thirdparty/libtorch/include/THC/generic/THCTensorScatterGather.h new file mode 100644 index 0000000000..fb87cfc3e4 --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorScatterGather.h @@ -0,0 +1,10 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorScatterGather.h" +#else + +THC_API void THCTensor_(gather)(THCState* state, THCTensor *tensor, THCTensor *src, int dim, THCudaLongTensor *index); +THC_API void THCTensor_(scatter)(THCState* state, THCTensor *tensor, int dim, THCudaLongTensor *index, THCTensor *src); +THC_API void THCTensor_(scatterAdd)(THCState* state, THCTensor *tensor, int dim, THCudaLongTensor *index, THCTensor *src); +THC_API void THCTensor_(scatterFill)(THCState* state, THCTensor *tensor, int dim, THCudaLongTensor *index, scalar_t value); + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorSort.h b/thirdparty/libtorch/include/THC/generic/THCTensorSort.h new file mode 100644 index 0000000000..d6569a3dd7 --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorSort.h @@ -0,0 +1,20 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorSort.h" +#else + +/* Performs an in-place sort of (keys, values). Only works for slice sizes + <= 2048 at the moment (slice size == size of keys/values dim `dim`) */ +THC_API void THCTensor_(sortKeyValueInplace)(THCState* state, + THCTensor* keys, + THCudaLongTensor* values, + int dim, bool dir); + +/* Performs an out-of-place sort of `input`, returning the per-slice indices + in `indices` and the sorted values in `sorted` */ +THC_API void THCTensor_(sort)(THCState* state, + THCTensor* sorted, + THCudaLongTensor* indices, + THCTensor* input, + int dim, int order); + +#endif diff --git a/thirdparty/libtorch/include/THC/generic/THCTensorTopK.h b/thirdparty/libtorch/include/THC/generic/THCTensorTopK.h new file mode 100644 index 0000000000..ffe6e959ea --- /dev/null +++ b/thirdparty/libtorch/include/THC/generic/THCTensorTopK.h @@ -0,0 +1,13 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THC/generic/THCTensorTopK.h" +#else + +/* Returns the set of all kth smallest (or largest) elements, depending */ +/* on `dir` */ +THC_API void THCTensor_(topk)(THCState* state, + THCTensor* topK, + THCudaLongTensor* indices, + THCTensor* input, + int64_t k, int dim, int dir, int sorted); + +#endif // THC_GENERIC_FILE diff --git a/thirdparty/libtorch/include/THCUNN/SharedMem.cuh b/thirdparty/libtorch/include/THCUNN/SharedMem.cuh new file mode 100644 index 0000000000..8d83d9f9a9 --- /dev/null +++ b/thirdparty/libtorch/include/THCUNN/SharedMem.cuh @@ -0,0 +1,43 @@ +// Based on the simpleTempltes CUDA example + +#ifndef THCUNN_SHAREDMEM_H +#define THCUNN_SHAREDMEM_H + +template +struct SharedMem { + __device__ T *getPointer() + { + extern __device__ void error(void); + error(); + return NULL; + } +}; + +template <> +struct SharedMem +{ + __device__ half *getPointer() { + extern __shared__ half s_half[]; + return s_half; + } +}; + +template <> +struct SharedMem +{ + __device__ float *getPointer() { + extern __shared__ float s_float[]; + return s_float; + } +}; + +template <> +struct SharedMem +{ + __device__ double *getPointer() { + extern __shared__ double s_double[]; + return s_double; + } +}; + +#endif diff --git a/thirdparty/libtorch/include/THCUNN/THCHalfAutoNumerics.cuh b/thirdparty/libtorch/include/THCUNN/THCHalfAutoNumerics.cuh new file mode 100644 index 0000000000..05ffab5ff2 --- /dev/null +++ b/thirdparty/libtorch/include/THCUNN/THCHalfAutoNumerics.cuh @@ -0,0 +1,46 @@ +#ifndef THC_HALF_AUTO_NUMERICS_INC +#define THC_HALF_AUTO_NUMERICS_INC + +#include +#include + +// WARNING: THCNumerics is being deprecated. Read the comments and function usage +// in THCNumerics to learn about the deprecation +// +// Half numerics functions defined as free functions, so cunn code can be +// written generically, i.e. without excessive calling of THCNumerics functions. + +// these functions should move to THCNumerics + +inline __host__ __device__ THHalf fmaxType(THHalf x, THHalf y) { + return THCNumerics::ge(x, y) ? x : y; +} + +inline __host__ __device__ float fmaxType(float x, THHalf y) { + return fmaxf(x, ScalarConvert::to(y)); +} + +inline __host__ __device__ float fmaxType(float x, float y) { + return fmaxf(x, y); +} + +inline __host__ __device__ double fmaxType(double x, double y) { + return fmax(x, y); +} + + +// arithmetic functions + +inline __host__ __device__ THHalf exp(THHalf a) { + return THCNumerics::exp(a); +} + +inline __host__ __device__ THHalf pow(THHalf a, THHalf b) { + return THCNumerics::pow(a, b); +} + +inline __host__ __device__ THHalf tanh(THHalf a) { + return THCNumerics::tanh(a); +} + +#endif diff --git a/thirdparty/libtorch/include/THCUNN/generic/THCUNN.h b/thirdparty/libtorch/include/THCUNN/generic/THCUNN.h new file mode 100644 index 0000000000..fc329b6e93 --- /dev/null +++ b/thirdparty/libtorch/include/THCUNN/generic/THCUNN.h @@ -0,0 +1,326 @@ +#ifndef THC_GENERIC_FILE +#define THC_GENERIC_FILE "THCUNN/generic/THCUNN.h" +#else + +#include + +THC_API void THNN_(BCECriterion_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *target, + THCTensor *output, + int64_t reduction, + THCTensor *weights); // [OPTIONAL] + +THC_API void THNN_(BCECriterion_updateGradInput)( + THCState *state, + THCTensor *input, + THCTensor *target, + THCTensor *gradOutput, + THCTensor *gradInput, + int64_t reduction, + THCTensor *weights); // [OPTIONAL] + +THC_API void THNN_(ClassNLLCriterion_updateOutput)( + THCState *state, + THCTensor *input, + THCIndexTensor *target, + THCTensor *output, + int64_t reduction, + THCTensor *weights, // [OPTIONAL] + THCTensor *total_weight, + int64_t ignore_index); + +THC_API void THNN_(ClassNLLCriterion_updateGradInput)( + THCState *state, + THCTensor *input, + THCIndexTensor *target, + THCTensor *gradOutput, + THCTensor *gradInput, + int64_t reduction, + THCTensor *weights, // [OPTIONAL] + THCTensor *total_weight, + int64_t ignore_index); + +THC_API void THNN_(ELU_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *output, + accreal alpha, + accreal scale, + accreal input_scale, + bool inplace); + +THC_API void THNN_(ELU_updateGradInput)( + THCState *state, + THCTensor *gradOutput, + THCTensor *gradInput, + THCTensor *output, + accreal alpha, + accreal scale, + accreal input_scale); + +THC_API void THNN_(HardTanh_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *output, + accreal min_val, + accreal max_val, + bool inplace); + +THC_API void THNN_(HardTanh_updateGradInput)( + THCState *state, + THCTensor *input, + THCTensor *gradOutput, + THCTensor *gradInput, + accreal min_val, + accreal max_val, + bool inplace); + +THC_API void THNN_(GatedLinear_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *output, + int dim); + +THC_API void THNN_(GatedLinear_updateGradInput)( + THCState *state, + THCTensor *input, + THCTensor *gradOutput, + THCTensor *gradInput, + int dim); + +THC_API void THNN_(LeakyReLU_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *output, + accreal negval, + bool inplace); + +THC_API void THNN_(LeakyReLU_updateGradInput)( + THCState *state, + THCTensor *input, + THCTensor *gradOutput, + THCTensor *gradInput, + accreal negval, + bool inplace); + +THC_API void THNN_(LogSigmoid_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *output, + THCTensor *buffer); + +THC_API void THNN_(LogSigmoid_updateGradInput)( + THCState *state, + THCTensor *input, + THCTensor *gradOutput, + THCTensor *gradInput, + THCTensor *buffer); + +THC_API void THNN_(MultiLabelMarginCriterion_updateOutput)( + THCState *state, + THCTensor *input, + THCIndexTensor *target, + THCTensor *output, + THCTensor *is_target, + int64_t reduction); + +THC_API void THNN_(MultiLabelMarginCriterion_updateGradInput)( + THCState *state, + THCTensor *input, + THCIndexTensor *target, + THCTensor *gradOutput, + THCTensor *gradInput, + THCTensor *is_target, + int64_t reduction); + +THC_API void THNN_(MultiMarginCriterion_updateOutput)( + THCState *state, + THCTensor *input, + THCIndexTensor *target, + THCTensor *output, + int64_t reduction, + int p, + THCTensor *weights, // [OPTIONAL] + accreal margin); + +THC_API void THNN_(MultiMarginCriterion_updateGradInput)( + THCState *state, + THCTensor *input, + THCIndexTensor *target, + THCTensor *gradOutput, + THCTensor *gradInput, + int64_t reduction, + int p, + THCTensor *weights, // [OPTIONAL] + accreal margin); + +THC_API void THNN_(SpatialClassNLLCriterion_updateOutput)( + THCState *state, + THCTensor *input, + THCIndexTensor *target, + THCTensor *output, + int64_t reduction, + THCTensor *weights, // [OPTIONAL] + THCTensor *total_weight, + int64_t ignore_index); + +THC_API void THNN_(SpatialClassNLLCriterion_updateGradInput)( + THCState *state, + THCTensor *input, + THCIndexTensor *target, + THCTensor *gradOutput, + THCTensor *gradInput, + int64_t reduction, + THCTensor *weights, // [OPTIONAL] + THCTensor *total_weight, + int64_t ignore_index); + +THC_API void THNN_(SpatialConvolutionMM_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *output, + THCTensor *weight, + THCTensor *bias, // [OPTIONAL] + THCTensor *columns, + THCTensor *ones, + int kW, int kH, + int dW, int dH, + int padW, int padH); + +THC_API void THNN_(SpatialConvolutionMM_updateGradInput)( + THCState *state, + THCTensor *input, + THCTensor *gradOutput, + THCTensor *gradInput, + THCTensor *weight, + THCTensor *columns, + THCTensor *ones, + int kW, int kH, + int dW, int dH, + int padW, int padH); + +THC_API void THNN_(SpatialConvolutionMM_accGradParameters)( + THCState *state, + THCTensor *input, + THCTensor *gradOutput, + THCTensor *gradWeight, + THCTensor *gradBias, // [OPTIONAL] + THCTensor *columns, + THCTensor *ones, + int kW, int kH, + int dW, int dH, + int padW, int padH, + accreal scale); + +THC_API void THNN_(SpatialDepthwiseConvolution_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *output, + THCTensor *weight, + THCTensor *bias, // [OPTIONAL] + int kW, int kH, + int dW, int dH, + int padW, int padH, + int dilationW, int dilationH); + +THC_API void THNN_(SpatialDepthwiseConvolution_updateGradInput)( + THCState *state, + THCTensor *input, + THCTensor *gradOutput, + THCTensor *gradInput, + THCTensor *weight, + int kW, int kH, + int dW, int dH, + int padW, int padH, + int dilationW, int dilationH); + +THC_API void THNN_(SpatialDepthwiseConvolution_accGradParameters)( + THCState *state, + THCTensor *input, + THCTensor *gradOutput, + THCTensor *gradWeight, + int kW, int kH, + int dW, int dH, + int padW, int padH, + int dilationW, int dilationH); + +THC_API void THNN_(RReLU_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *output, + THCTensor *noise, + double lower, + double upper, + bool train, + bool inplace, + void *generator); + +THC_API void THNN_(RReLU_updateGradInput)( + THCState *state, + THCTensor *input, + THCTensor *gradOutput, + THCTensor *gradInput, + THCTensor *noise, + double lower, + double upper, + bool train, + bool inplace); + +THC_API void THNN_(SoftMarginCriterion_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *target, + THCTensor *output, + int64_t reduction); + +THC_API void THNN_(SoftMarginCriterion_updateGradInput)( + THCState *state, + THCTensor *input, + THCTensor *target, + THCTensor *gradOutput, + THCTensor *gradInput, + int64_t reduction); + +THC_API void THNN_(SoftPlus_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *output, + accreal beta, + accreal threshold); + +THC_API void THNN_(SoftPlus_updateGradInput)( + THCState *state, + THCTensor *input, + THCTensor *gradOutput, + THCTensor *gradInput, + THCTensor *output, + accreal beta, + accreal threshold); + +THC_API void THNN_(SoftShrink_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *output, + accreal lambda); + +THC_API void THNN_(SoftShrink_updateGradInput)( + THCState *state, + THCTensor *input, + THCTensor *gradOutput, + THCTensor *gradInput, + accreal lambda); + +THC_API void THNN_(Tanh_updateOutput)( + THCState *state, + THCTensor *input, + THCTensor *output); + +THC_API void THNN_(Tanh_updateGradInput)( + THCState *state, + THCTensor *gradOutput, + THCTensor *gradInput, + THCTensor *output); + +#endif diff --git a/thirdparty/libtorch/include/THNN/generic/THNN.h b/thirdparty/libtorch/include/THNN/generic/THNN.h new file mode 100644 index 0000000000..7113805ec5 --- /dev/null +++ b/thirdparty/libtorch/include/THNN/generic/THNN.h @@ -0,0 +1,172 @@ +#ifndef TH_GENERIC_FILE +#define TH_GENERIC_FILE "THNN/generic/THNN.h" +#else + +#include +#include +#include + +#if !defined(TH_REAL_IS_LONG) + +TH_API void THNN_(BCECriterion_updateOutput)( + THNNState *state, + THTensor *input, + THTensor *target, + THTensor *output, + int64_t reduction, + THTensor *weights); // [OPTIONAL] +TH_API void THNN_(BCECriterion_updateGradInput)( + THNNState *state, + THTensor *input, + THTensor *target, + THTensor *gradOutput, + THTensor *gradInput, + int64_t reduction, + THTensor *weights); // [OPTIONAL] + +TH_API void THNN_(ELU_updateOutput)( + THNNState *state, // library's state + THTensor *input, // input tensor + THTensor *output, // [OUT] ELU output + accreal alpha, // an ELU parameter (as in paper) + accreal scale, // scaling factor for output + accreal input_scale, // scaling factor for input + bool inplace); // if true, modifies gradOutput and sets gradInput onto it (no additional memory is allocated) +TH_API void THNN_(ELU_updateGradInput)( + THNNState *state, // library's state + THTensor *gradOutput, // gradient w.r.t. output + THTensor *gradInput, // [OUT] gradient w.r.t. input + THTensor *output, // output from a forward pass + accreal alpha, // an ELU parameter (as in paper) + accreal scale, + accreal input_scale); + +TH_API void THNN_(GatedLinear_updateOutput)( + THNNState *state, // library's state + THTensor *input, // input tensor + THTensor *output, // [OUT] output tensor, half size of input along dimension dim + int dim); // dimension for halving operation +TH_API void THNN_(GatedLinear_updateGradInput)( + THNNState *state, // library's state + THTensor *input, // input tensor + THTensor *gradOutput, // gradient w.r.t module's output + THTensor *gradInput, // [OUT] gradient w.r.t input + int dim); // dimension for halving operation + +// HardTanh clamps the values to the interval [min_val; max_val]. +TH_API void THNN_(HardTanh_updateOutput)( + THNNState *state, // library's state + THTensor *input, // input tensor + THTensor *output, // [OUT] output tensor + accreal min_val, // lower threshold + accreal max_val, // upper threshold + bool inplace); +TH_API void THNN_(HardTanh_updateGradInput)( + THNNState *state, // library's state + THTensor *input, // input tensor + THTensor *gradOutput, // gradient w.r.t. module's output + THTensor *gradInput, // [OUT] gradient w.r.t. the input + accreal min_val, // lower threshold + accreal max_val, // upper threshold + bool inplace); + +TH_API void THNN_(LeakyReLU_updateOutput)( + THNNState *state, // library's state + THTensor *input, // [MODIFIED] input tensor + THTensor *output, // [OUT] output tensor + accreal negval, // negative part slope + bool inplace); // if true, modifies the input tensor and sets the output tensor on it (no additional memory is allocated) +TH_API void THNN_(LeakyReLU_updateGradInput)( + THNNState *state, // library's state + THTensor *input, // input tensor + THTensor *gradOutput, // [MODIFIED] gradient w.r.t. module's output + THTensor *gradInput, // [OUT] gradient w.r.t. the input + accreal negval, // negative part slope + bool inplace); // if true, modifies gradOutput and sets gradInput onto it (no additional memory is allocated) + +TH_API void THNN_(LogSigmoid_updateOutput)( + THNNState *state, // library's state + THTensor *input, // input tensor + THTensor *output, // output tensor + THTensor *buffer); // [BUFFER] +TH_API void THNN_(LogSigmoid_updateGradInput)( + THNNState *state, // library's state + THTensor *input, // input + THTensor *gradOutput, // gradient w.r.t. module's output + THTensor *gradInput, // [OUT] gradient w.r.t. input + THTensor *buffer); // [BUFFER] + +TH_API void THNN_(SoftMarginCriterion_updateOutput)( + THNNState *state, + THTensor *input, + THTensor *target, + THTensor *output, + int64_t reduction); + +TH_API void THNN_(SoftMarginCriterion_updateGradInput)( + THNNState *state, + THTensor *input, + THTensor *target, + THTensor *gradOutput, + THTensor *gradInput, + int64_t reduction); + +TH_API void THNN_(RReLU_updateOutput)( + THNNState *state, + THTensor *input, + THTensor *output, + THTensor *noise, + accreal lower, + accreal upper, + bool train, + bool inplace, + at::Generator *generator); +TH_API void THNN_(RReLU_updateGradInput)( + THNNState *state, + THTensor *input, + THTensor *gradOutput, + THTensor *gradInput, + THTensor *noise, + accreal lower, + accreal upper, + bool train, + bool inplace); + +TH_API void THNN_(SoftPlus_updateOutput)( + THNNState *state, + THTensor *input, THTensor *output, + accreal beta, + accreal threshold); +TH_API void THNN_(SoftPlus_updateGradInput)( + THNNState *state, + THTensor *input, + THTensor *gradOutput, + THTensor *gradInput, + THTensor *output, + accreal beta, + accreal threshold); + +TH_API void THNN_(SoftShrink_updateOutput)( + THNNState *state, + THTensor *input, + THTensor *output, + accreal lambda); +TH_API void THNN_(SoftShrink_updateGradInput)( + THNNState *state, + THTensor *input, + THTensor *gradOutput, + THTensor *gradInput, + accreal lambda); + +TH_API void THNN_(Tanh_updateOutput)( + THNNState *state, + THTensor *input, + THTensor *output); +TH_API void THNN_(Tanh_updateGradInput)( + THNNState *state, + THTensor *gradOutput, + THTensor *gradInput, + THTensor *output); + +#endif +#endif diff --git a/thirdparty/libtorch/include/c10/core/Allocator.h b/thirdparty/libtorch/include/c10/core/Allocator.h new file mode 100644 index 0000000000..06b77c7b95 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/Allocator.h @@ -0,0 +1,217 @@ +#pragma once + +#include +#include + +#include +#include +#include + +namespace c10 { + +// A DataPtr is a unique pointer (with an attached deleter and some +// context for the deleter) to some memory, which also records what +// device is for its data. +// +// nullptr DataPtrs can still have a nontrivial device; this allows +// us to treat zero-size allocations uniformly with non-zero allocations. +// +class C10_API DataPtr { + private: + c10::detail::UniqueVoidPtr ptr_; + Device device_; + + public: + // Choice of CPU here is arbitrary; if there's an "undefined" device + // we could use that too + DataPtr() : ptr_(), device_(DeviceType::CPU) {} + DataPtr(void* data, Device device) : ptr_(data), device_(device) {} + DataPtr(void* data, void* ctx, DeleterFnPtr ctx_deleter, Device device) + : ptr_(data, ctx, ctx_deleter), device_(device) {} + void* operator->() const { + return ptr_.get(); + } + void clear() { + ptr_.clear(); + } + void* get() const { + return ptr_.get(); + } + void* get_context() const { + return ptr_.get_context(); + } + void* release_context() { + return ptr_.release_context(); + } + std::unique_ptr&& move_context() { + return ptr_.move_context(); + } + operator bool() const { + return static_cast(ptr_); + } + template + T* cast_context(DeleterFnPtr expected_deleter) const { + return ptr_.cast_context(expected_deleter); + } + DeleterFnPtr get_deleter() const { + return ptr_.get_deleter(); + } + /** + * Compare the deleter in a DataPtr to expected_deleter. + * If it matches, replace the deleter with new_deleter + * and return true; otherwise, does nothing and returns + * false. + * + * In general, it is not safe to unconditionally set the + * deleter on a DataPtr, because you don't know what + * the deleter is, and thus will have a hard time properly + * disposing of the deleter without storing the original + * deleter (this is difficult to do, because DeleterFnPtr + * is not a closure, and because the context on DataPtr is + * only a single word, you generally don't have enough + * space to store both the original deleter and its context). + * However, in some cases, you know /exactly/ what the deleter + * is, and you have a new deleter that manually wraps + * the old one. In this case, you can safely swap the deleter + * after asserting that the deleters line up. + * + * What are the requirements on new_deleter? It must still + * properly dispose of the void* pointer passed in as its argument, + * where void* is whatever the context of the original deleter + * is. So in general, you expect the new deleter to look something + * like this: + * + * [](void* ptr) { + * some_new_stuff(ptr); + * get_orig_allocator()->raw_deleter(ptr); + * } + * + * Note that it won't work to close over the original + * allocator; you don't have enough space to do that! Also, + * it's unsafe to assume that the passed in pointer in + * question is the memory pointer in question; it might not + * be; be sure to read the source code of the Allocator + * in question to confirm this. + */ + C10_NODISCARD bool compare_exchange_deleter(DeleterFnPtr expected_deleter, DeleterFnPtr new_deleter) { + return ptr_.compare_exchange_deleter(expected_deleter, new_deleter); + } + Device device() const { + return device_; + } + // Unsafely mutates the device on a DataPtr. Under normal use, + // you should never actually need to call this function. + // We need this for the implementation of the hack detailed + // in Note [Masquerading as CUDA] + void unsafe_set_device(Device device) { + device_ = device; + } +}; + +// NB: Device is NOT tested for here; a CUDA nullptr is as much a nullptr as a +// CPU nullptr + +inline bool operator==(const DataPtr& dp, std::nullptr_t) noexcept { + return !dp; +} +inline bool operator==(std::nullptr_t, const DataPtr& dp) noexcept { + return !dp; +} +inline bool operator!=(const DataPtr& dp, std::nullptr_t) noexcept { + return dp; +} +inline bool operator!=(std::nullptr_t, const DataPtr& dp) noexcept { + return dp; +} + +// Note [raw_allocate/raw_deallocate and Thrust] +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Thrust's support for custom allocators requires us to write something +// like this: +// +// class ThrustAllocator { +// char* allocate(size_t); +// void deallocate(char*, size_t); +// }; +// +// This is not good for our unique_ptr based allocator interface, as +// there is no way to get to the context when we free. +// +// However, in some cases the context is exactly the same as +// the data pointer. In this case, we can support the "raw" +// allocate and deallocate interface. This is what +// raw_deleter signifies. By default, it returns a nullptr, which means that +// the raw interface is not implemented. Be sure to implement it whenever +// possible, or the raw interface will incorrectly reported as unsupported, +// when it is actually possible. + +struct C10_API Allocator { + virtual ~Allocator() = default; + + virtual DataPtr allocate(size_t n) const = 0; + + // If this returns a non nullptr, it means that allocate() + // is guaranteed to return a unique_ptr with this deleter attached; + // it means the rawAllocate and rawDeallocate APIs are safe to use. + // This function MUST always return the same BoundDeleter. + virtual DeleterFnPtr raw_deleter() const { + return nullptr; + } + void* raw_allocate(size_t n) { + auto dptr = allocate(n); + AT_ASSERT(dptr.get() == dptr.get_context()); + return dptr.release_context(); + } + void raw_deallocate(void* ptr) { + auto d = raw_deleter(); + AT_ASSERT(d); + d(ptr); + } +}; + +// This context is used to generate DataPtr which have arbitrary +// std::function deleters associated with them. In some user facing +// functions, we give a (user-friendly) interface for constructing +// tensors from external data which take an arbitrary std::function +// deleter. Grep for InefficientStdFunctionContext to find these +// occurrences. +// +// This context is inefficient because we have to do a dynamic +// allocation InefficientStdFunctionContext, on top of the dynamic +// allocation which is implied by std::function itself. +struct C10_API InefficientStdFunctionContext { + std::unique_ptr> ptr_; + InefficientStdFunctionContext( + std::unique_ptr>&& ptr) + : ptr_(std::move(ptr)) {} + static DataPtr makeDataPtr( + void* ptr, + const std::function& deleter, + Device device); +}; + +/** Set the allocator for DeviceType `t`. The passed in allocator pointer is + * expected to have static lifetime; this function does NOT take ownership + * of the raw pointer. (The reason for this is to prevent existing pointers + * to an allocator of a particular device from being invalidated when + * SetAllocator is called.) + * + * Also note that this is not thread-safe, and we assume this function will + * only be called during initialization. + */ +C10_API void SetAllocator(DeviceType t, Allocator* alloc); +C10_API Allocator* GetAllocator(const DeviceType& t); + +template +struct AllocatorRegisterer { + explicit AllocatorRegisterer(Allocator* alloc) { + SetAllocator(t, alloc); + } +}; + +#define REGISTER_ALLOCATOR(t, f) \ + namespace { \ + static AllocatorRegisterer g_allocator_d(f); \ + } + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/Backend.h b/thirdparty/libtorch/include/c10/core/Backend.h new file mode 100644 index 0000000000..26a277374c --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/Backend.h @@ -0,0 +1,289 @@ +#pragma once + +#include +#include +#include + +#include + +namespace c10 { + +/** + * This legacy enum class defines the set of backends supported by old school, + * code generated Type-based ATen. A "backend" in this sense roughly + * corresponds to the cartesian product of (device type, layout), but restricted + * only to combinations which we actually have kernels for. Backend does NOT + * include dtype. + * + * The reason we are sunsetting this enum class is because it doesn't allow for + * open registration; e.g., if you want to add SparseXLA, you'd have to + * edit this enum; you wouldn't be able to do it out of tree. TensorTypeId is + * the replacement for Backend which supports open registration. + * + * NB: The concept of 'Backend' here disagrees with the notion of backend + * exposed to users in torch.backends. Backend here is something like "CPU" + * or "SparseCUDA"; backend in torch.backends is something like "MKL" or + * "CUDNN". + */ +enum class Backend { CPU, CUDA, HIP, SparseCPU, SparseCUDA, SparseHIP, MSNPU, XLA, QuantizedCPU, ComplexCPU, ComplexCUDA, Undefined, MkldnnCPU, NumOptions }; + +static inline Backend toSparse(Backend b) { + switch (b) { + case Backend::CPU: + return Backend::SparseCPU; + case Backend::CUDA: + return Backend::SparseCUDA; + case Backend::HIP: + return Backend::SparseHIP; + case Backend::SparseCPU: + return Backend::SparseCPU; + case Backend::SparseCUDA: + return Backend::SparseCUDA; + case Backend::SparseHIP: + return Backend::SparseHIP; + default: + throw std::runtime_error("Unknown backend"); + } +} + +static inline Backend toDense(Backend b) { + switch (b) { + case Backend::CPU: + return Backend::CPU; + case Backend::CUDA: + return Backend::CUDA; + case Backend::HIP: + return Backend::HIP; + case Backend::MSNPU: + return Backend::MSNPU; + case Backend::XLA: + return Backend::XLA; + case Backend::SparseCPU: + return Backend::CPU; + case Backend::SparseCUDA: + return Backend::CUDA; + case Backend::SparseHIP: + return Backend::HIP; + case Backend::QuantizedCPU: + return Backend::QuantizedCPU; + case Backend::ComplexCPU: + return Backend::ComplexCPU; + case Backend::ComplexCUDA: + return Backend::ComplexCUDA; + default: + throw std::runtime_error("Unknown backend"); + } +} + +static inline Backend tensorTypeIdToBackend(TensorTypeId t) { + if (t == TensorTypeId::CPUTensorId) { + return Backend::CPU; + } else if (t == TensorTypeId::CUDATensorId) { + return Backend::CUDA; + } else if (t == TensorTypeId::HIPTensorId) { + return Backend::HIP; + } else if (t == TensorTypeId::MSNPUTensorId) { + return Backend::MSNPU; + } else if (t == TensorTypeId::XLATensorId) { + return Backend::XLA; + } else if (t == TensorTypeId::SparseCPUTensorId) { + return Backend::SparseCPU; + } else if (t == TensorTypeId::SparseCUDATensorId) { + return Backend::SparseCUDA; + } else if (t == TensorTypeId::SparseHIPTensorId) { + return Backend::SparseHIP; + } else if (t == TensorTypeId::MkldnnCPUTensorId) { + return Backend::MkldnnCPU; + } else if (t == TensorTypeId::QuantizedCPUTensorId) { + return Backend::QuantizedCPU; + } else if (t == TensorTypeId::ComplexCPUTensorId) { + return Backend::ComplexCPU; + } else if (t == TensorTypeId::ComplexCUDATensorId) { + return Backend::ComplexCUDA; + } else if (t == TensorTypeId::UndefinedTensorId) { + return Backend::Undefined; + } else { + AT_ERROR("Unrecognized tensor type ID: ", t); + } +} + +static inline TensorTypeId backendToTensorTypeId(Backend b) { + switch (b) { + case Backend::CPU: + return TensorTypeId::CPUTensorId; + case Backend::CUDA: + return TensorTypeId::CUDATensorId; + case Backend::HIP: + return TensorTypeId::HIPTensorId; + case Backend::MSNPU: + return TensorTypeId::MSNPUTensorId; + case Backend::XLA: + return TensorTypeId::XLATensorId; + case Backend::SparseCPU: + return TensorTypeId::SparseCPUTensorId; + case Backend::SparseCUDA: + return TensorTypeId::SparseCUDATensorId; + case Backend::SparseHIP: + return TensorTypeId::SparseHIPTensorId; + case Backend::MkldnnCPU: + return TensorTypeId::MkldnnCPUTensorId; + case Backend::QuantizedCPU: + return TensorTypeId::QuantizedCPUTensorId; + case Backend::ComplexCPU: + return TensorTypeId::ComplexCPUTensorId; + case Backend::ComplexCUDA: + return TensorTypeId::ComplexCUDATensorId; + case Backend::Undefined: + return TensorTypeId::UndefinedTensorId; + default: + throw std::runtime_error("Unknown backend"); + } +} + +static inline DeviceType backendToDeviceType(Backend b) { + switch (b) { + case Backend::CPU: + return DeviceType::CPU; + case Backend::CUDA: + return DeviceType::CUDA; + case Backend::HIP: + return DeviceType::HIP; + case Backend::MSNPU: + return DeviceType::MSNPU; + case Backend::XLA: + return DeviceType::XLA; + case Backend::SparseCPU: + return DeviceType::CPU; + case Backend::SparseCUDA: + return DeviceType::CUDA; + case Backend::SparseHIP: + return DeviceType::HIP; + case Backend::MkldnnCPU: + case Backend::QuantizedCPU: + case Backend::ComplexCPU: + return DeviceType::CPU; + case Backend::ComplexCUDA: + return DeviceType::CUDA; + case Backend::Undefined: + AT_ERROR("Undefined backend is not a valid device type"); + default: + AT_ERROR("Unknown backend"); + } +} + +static inline Backend backendToCPU(Backend b) { + switch (b) { + case Backend::CPU: + return Backend::CPU; + case Backend::CUDA: + return Backend::CPU; + case Backend::HIP: + return Backend::CPU; + case Backend::SparseCPU: + return Backend::SparseCPU; + case Backend::SparseCUDA: + return Backend::SparseCPU; + case Backend::SparseHIP: + return Backend::SparseCPU; + case Backend::MSNPU: + case Backend::XLA: + return Backend::CPU; + case Backend::MkldnnCPU: + return Backend::MkldnnCPU; + case Backend::QuantizedCPU: + return Backend::QuantizedCPU; + case Backend::ComplexCPU: + case Backend::ComplexCUDA: + return Backend::ComplexCPU; + case Backend::Undefined: + return Backend::Undefined; + default: + AT_ERROR("Unknown backend"); + } +} + +static inline Backend backendToCUDA(Backend b) { + switch (b) { + case Backend::CPU: + case Backend::CUDA: + case Backend::HIP: + case Backend::MSNPU: + case Backend::XLA: + return Backend::CUDA; + case Backend::SparseCPU: + case Backend::SparseCUDA: + case Backend::SparseHIP: + return Backend::SparseCUDA; + case Backend::ComplexCPU: + case Backend::ComplexCUDA: + return Backend::ComplexCUDA; + case Backend::Undefined: + return Backend::Undefined; + default: + AT_ERROR("Unknown backend"); + } +} + +static inline Backend backendToHIP(Backend b) { + switch (b) { + case Backend::CPU: + case Backend::CUDA: + case Backend::HIP: + case Backend::MSNPU: + case Backend::XLA: + return Backend::HIP; + case Backend::SparseCPU: + case Backend::SparseCUDA: + case Backend::SparseHIP: + return Backend::SparseHIP; + case Backend::Undefined: + return Backend::Undefined; + default: + AT_ERROR("Unknown backend"); + } +} + +// TODO: This probably shouldn't actually be static inline +static inline const char* toString(Backend b) { + switch (b) { + case Backend::CPU: + return "CPU"; + case Backend::CUDA: + return "CUDA"; + case Backend::HIP: + return "HIP"; + case Backend::MSNPU: + return "MSNPU"; + case Backend::XLA: + return "XLA"; + case Backend::SparseCPU: + return "SparseCPU"; + case Backend::SparseCUDA: + return "SparseCUDA"; + case Backend::SparseHIP: + return "SparseHIP"; + case Backend::MkldnnCPU: + return "MkldnnCPU"; + case Backend::QuantizedCPU: + return "QuantizedCPU"; + case Backend::ComplexCPU: + return "ComplexCPU"; + case Backend::ComplexCUDA: + return "ComplexCUDA"; + default: + return "UNKNOWN_BACKEND"; + } +} + +static inline bool isSparse(Backend b) { + switch (b) { + case Backend::SparseCPU: + case Backend::SparseCUDA: + case Backend::SparseHIP: + return true; + default: + return false; + } +} + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/CPUAllocator.h b/thirdparty/libtorch/include/c10/core/CPUAllocator.h new file mode 100644 index 0000000000..c0fe490a07 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/CPUAllocator.h @@ -0,0 +1,42 @@ +#pragma once + +#include +#include + +#include +#include +#include + +// TODO: rename to c10 +C10_DECLARE_bool(caffe2_report_cpu_memory_usage); +C10_DECLARE_bool(caffe2_cpu_allocator_do_zero_fill); +C10_DECLARE_bool(caffe2_cpu_allocator_do_junk_fill); + +namespace c10 { + +// Use 64-byte alignment should be enough for computation up to AVX512. +constexpr size_t gAlignment = 64; + +using MemoryDeleter = void (*)(void*); + +// A helper function that is basically doing nothing. +C10_API void NoDelete(void*); + +// Fill the data memory region of num bytes with a particular garbage pattern. +// The garbage value is chosen to be NaN if interpreted as floating point value, +// or a very large integer. +C10_API void memset_junk(void* data, size_t num); + +C10_API void* alloc_cpu(size_t nbytes); +C10_API void free_cpu(void* data); + +// Get the CPU Allocator. +C10_API at::Allocator* GetCPUAllocator(); +// Sets the CPU allocator to the given allocator: the caller gives away the +// ownership of the pointer. +C10_API void SetCPUAllocator(at::Allocator* alloc); + +// Get the Default CPU Allocator +C10_API at::Allocator* GetDefaultCPUAllocator(); + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/CopyBytes.h b/thirdparty/libtorch/include/c10/core/CopyBytes.h new file mode 100644 index 0000000000..c49763f69d --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/CopyBytes.h @@ -0,0 +1,44 @@ +#pragma once + +#include + +namespace c10 { + +using CopyBytesFunction = void (*)( + size_t nbytes, + const void* src, + Device src_device, + void* dst, + Device dst_device); + +struct C10_API _CopyBytesFunctionRegisterer { + _CopyBytesFunctionRegisterer( + DeviceType from, + DeviceType to, + CopyBytesFunction func_sync, + CopyBytesFunction func_async = nullptr); +}; + +#define REGISTER_COPY_BYTES_FUNCTION(from, to, ...) \ + namespace { \ + static _CopyBytesFunctionRegisterer C10_ANONYMOUS_VARIABLE( \ + g_copy_function)(from, to, __VA_ARGS__); \ + } + +/* + * WARNING: Implementations for this function are currently registered from + * ATen and caffe2, not yet from c10. Don't use this if not either ATen + * or caffe2 is present as well. + * We can't move them yet, because the CUDA implementations aren't unified yet + * between ATen and caffe2. + * We're planning to move the implementations into c10/backend/xxx + * to make c10 self contained again. + */ +C10_API void CopyBytes( + size_t nbytes, + const void* src, + Device src_device, + void* dst, + Device dst_device, + bool async); +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/DefaultDtype.h b/thirdparty/libtorch/include/c10/core/DefaultDtype.h new file mode 100644 index 0000000000..ad0f3cb4ef --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/DefaultDtype.h @@ -0,0 +1,12 @@ +#pragma once + +#include + +namespace caffe2 { +class TypeMeta; +} // namespace caffe2 + +namespace c10 { +C10_API void set_default_dtype(caffe2::TypeMeta dtype); +C10_API const caffe2::TypeMeta& get_default_dtype(); +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/DefaultTensorOptions.h b/thirdparty/libtorch/include/c10/core/DefaultTensorOptions.h new file mode 100644 index 0000000000..7bf96525c1 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/DefaultTensorOptions.h @@ -0,0 +1,36 @@ +#pragma once + +#include +#include +#include +#include + +namespace c10 { + +struct TensorOptions; + +/// Like TensorOptions, but all fields are guaranteed to be filled. +struct DefaultTensorOptions { + DefaultTensorOptions() = default; + + caffe2::TypeMeta dtype() const noexcept { return dtype_; } + Device device() const noexcept { return device_; } + Layout layout() const noexcept { return layout_; } + bool requires_grad() const noexcept { return requires_grad_; } + + // Defined in TensorOptions.h + inline DefaultTensorOptions& merge(const TensorOptions& options); + + private: + caffe2::TypeMeta dtype_ = caffe2::TypeMeta::Make(); // 64-bit + Device device_ = at::kCPU; // 32-bit + Layout layout_ = at::kStrided; // 8-bit + bool requires_grad_ = false; // 8-bit +}; + +inline const DefaultTensorOptions& getDefaultTensorOptions() { + static const auto options = DefaultTensorOptions(); + return options; +} + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/Device.h b/thirdparty/libtorch/include/c10/core/Device.h new file mode 100644 index 0000000000..f1249e865f --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/Device.h @@ -0,0 +1,132 @@ +#pragma once + +#include +#include +#include + +#include +#include +#include +#include + +namespace c10 { + +/// An index representing a specific device; e.g., the 1 in GPU 1. +/// A DeviceIndex is not independently meaningful without knowing +/// the DeviceType it is associated; try to use Device rather than +/// DeviceIndex directly. +using DeviceIndex = int16_t; + +/// Represents a a compute device on which a tensor is located. A device is +/// uniquely identified by a type, which specifies the type of machine it is +/// (e.g. CPU or CUDA GPU), and a device index or ordinal, which identifies the +/// specific compute device when there is more than one of a certain type. The +/// device index is optional, and in its defaulted state represents (abstractly) +/// "the current device". Further, there are two constraints on the value of the +/// device index, if one is explicitly stored: +/// 1. A negative index represents the current device, a non-negative index +/// represents a specific, concrete device, +/// 2. When the device type is CPU, the device index must be zero. +struct C10_API Device final { + using Type = DeviceType; + + /// Constructs a new `Device` from a `DeviceType` and an optional device + /// index. + /* implicit */ Device(DeviceType type, DeviceIndex index = -1) + : type_(type), index_(index) { + validate(); + } + + /// Constructs a `Device` from a string description, for convenience. + /// The string supplied must follow the following schema: + /// `(cpu|cuda)[:]` + /// where `cpu` or `cuda` specifies the device type, and + /// `:` optionally specifies a device index. + /* implicit */ Device(const std::string& device_string); + + /// Returns true if the type and index of this `Device` matches that of + /// `other`. + bool operator==(const Device& other) const noexcept { + return this->type_ == other.type_ && this->index_ == other.index_; + } + + /// Returns true if the type or index of this `Device` differs from that of + /// `other`. + bool operator!=(const Device& other) const noexcept { + return !(*this == other); + } + + /// Sets the device index. + void set_index(DeviceIndex index) { + index_ = index; + } + + /// Returns the type of device this is. + DeviceType type() const noexcept { + return type_; + } + + /// Returns the optional index. + DeviceIndex index() const noexcept { + return index_; + } + + /// Returns true if the device has a non-default index. + bool has_index() const noexcept { + return index_ != -1; + } + + /// Return true if the device is of CUDA type. + bool is_cuda() const noexcept { + return type_ == DeviceType::CUDA; + } + + /// Return true if the device is of CPU type. + bool is_cpu() const noexcept { + return type_ == DeviceType::CPU; + } + + /// Same string as returned from operator<<. + std::string str() const; + + private: + DeviceType type_; + DeviceIndex index_ = -1; + void validate() { + TORCH_CHECK(index_ == -1 || index_ >= 0, + "Device index must be -1 or non-negative, got ", index_); + TORCH_CHECK(!is_cpu() || index_ <= 0, + "CPU device index must be -1 or zero, got ", index_); + } +}; + +C10_API std::ostream& operator<<( + std::ostream& stream, + const Device& device); + +} // namespace c10 + +namespace std { +template <> +struct hash { + size_t operator()(c10::Device d) const noexcept { + // Are you here because this static assert failed? Make sure you ensure + // that the bitmasking code below is updated accordingly! + static_assert(sizeof(c10::DeviceType) == 2, "DeviceType is not 16-bit"); + static_assert(sizeof(c10::DeviceIndex) == 2, "DeviceIndex is not 16-bit"); + // Note [Hazard when concatenating signed integers] + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + // We must first convert to a same-sized unsigned type, before promoting to + // the result type, to prevent sign extension when any of the values is -1. + // If sign extension occurs, you'll clobber all of the values in the MSB + // half of the resulting integer. + // + // Technically, by C/C++ integer promotion rules, we only need one of the + // uint32_t casts to the result type, but we put in both for explicitness's sake. + uint32_t bits = + static_cast(static_cast(d.type())) << 16 + | static_cast(static_cast(d.index())); + return std::hash{}(bits); + } +}; +} // namespace std diff --git a/thirdparty/libtorch/include/c10/core/DeviceGuard.h b/thirdparty/libtorch/include/c10/core/DeviceGuard.h new file mode 100644 index 0000000000..852d6366eb --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/DeviceGuard.h @@ -0,0 +1,184 @@ +#pragma once + +#include + +namespace c10 { + +/// RAII guard that sets a certain default device in its constructor, and +/// changes it back to the device that was originally active upon destruction. +/// +/// The device is always reset to the one that was active at the time of +/// construction of the guard. Even if you `set_device` after construction, the +/// destructor will still reset the device to the one that was active at +/// construction time. +/// +/// This device guard does NOT have an uninitialized state; it is guaranteed +/// to reset a device on exit. If you are in a situation where you *might* +/// want to setup a guard (i.e., are looking for the moral equivalent +/// of optional), see OptionalDeviceGuard. +class DeviceGuard { +public: + /// No default constructor; see Note [Omitted default constructor from RAII] + explicit DeviceGuard() = delete; + + /// Set the current device to the passed Device. + explicit DeviceGuard(Device device) : guard_(device) {} + + /// This constructor is for testing only. + explicit DeviceGuard(Device device, const impl::DeviceGuardImplInterface* impl) : guard_(device, impl) {} + + /// Copy is disallowed + DeviceGuard(const DeviceGuard&) = delete; + DeviceGuard& operator=(const DeviceGuard&) = delete; + + /// Move is disallowed, as DeviceGuard does not have an uninitialized state, + /// which is required for moves on types with nontrivial destructors. + DeviceGuard(DeviceGuard&& other) = delete; + DeviceGuard& operator=(DeviceGuard&& other) = delete; + + /// Sets the device to the given one. The specified device must be consistent + /// with the device type originally specified during guard construction. + /// + /// TODO: The consistency check here is inconsistent with StreamGuard's + /// behavior with set_stream, where a stream on a different device than + /// the original one isn't an error; we just reset the stream and then + /// switch devices. + void reset_device(at::Device device) { + guard_.reset_device(device); + } + + /// This method is for testing only. + void reset_device(at::Device device, const impl::DeviceGuardImplInterface* impl) { + guard_.reset_device(device, impl); + } + + /// Sets the device index to the given one. The device type is inferred + /// from the original device type the guard was constructed with. + void set_index(DeviceIndex index) { + guard_.set_index(index); + } + + /// Returns the device that was set at the time the guard was constructed. + Device original_device() const { + return guard_.original_device(); + } + + /// Returns the most recent device that was set using this device guard, + /// either from construction, or via set_device. + Device current_device() const { + return guard_.current_device(); + } + +private: + impl::InlineDeviceGuard guard_; +}; + +/** + * A OptionalDeviceGuard is an RAII class that sets a device to some value on + * initialization, and resets the device to its original value on destruction. + * Morally, a OptionalDeviceGuard is equivalent to optional, but + * with extra constructors and methods as appropriate. + * + * Besides its obvious use (optionally applying a DeviceGuard), OptionalDeviceGuard + * is often also used for the following idiom: + * + * OptionalDeviceGuard g; + * for (const auto& t : tensors) { + * g.set_device(t.device()); + * do_something_with(t); + * } + * + * This usage is marginally more efficient than constructing a DeviceGuard every + * iteration of the for loop, as it avoids an unnecessary device reset. + * + * Unlike DeviceGuard, a OptionalDeviceGuard may be uninitialized. This occurs + * when you use the nullary constructor, or pass a nullopt to the constructor. + * Uninitialized OptionalDeviceGuards do *nothing*; they do not know what the + * original device was and they do not reset on destruction. This is why + * original_device() and current_device() return optional rather than + * Device (as they do in DeviceGuard), and also is why we didn't just + * provide OptionalDeviceGuard by default and hide DeviceGuard from users. + * + * The semantics of an OptionalDeviceGuard are exactly explained by thinking + * of it as an optional. In particular, an initialized + * OptionalDeviceGuard doesn't restore device to its value at construction; it + * restores device to its value *at initialization*. So if you have the + * program: + * + * setDevice(1); + * OptionalDeviceGuard g; + * setDevice(2); + * g.reset_device(Device(DeviceType::CUDA, 3)); // initializes! + * + * On destruction, g will reset device to 2, rather than 1. + * + * An uninitialized OptionalDeviceGuard is distinct from a (initialized) + * DeviceGuard whose original_device_ and current_device_ match, since the + * DeviceGuard will still reset the device to original_device_. + */ +class OptionalDeviceGuard { +public: + /// Create an uninitialized guard. Set the guard later using reset_device. + explicit OptionalDeviceGuard() : guard_() {} + + /// Initialize the guard, setting the current device to the passed Device. + explicit OptionalDeviceGuard(Device device) : guard_(device) {} + + /// Initialize the guard if a Device is passed; otherwise leave the + /// guard uninitialized. + explicit OptionalDeviceGuard(optional device) : guard_(device) {} + + /// Constructor for testing only. + explicit OptionalDeviceGuard(Device device, const impl::DeviceGuardImplInterface* impl) : guard_(device, impl) {} + + /// Copy is disallowed + OptionalDeviceGuard(const OptionalDeviceGuard&) = delete; + OptionalDeviceGuard& operator=(const OptionalDeviceGuard&) = delete; + + /// Move is disallowed + /// See Note [Explicit initialization of optional fields] + /// and // Note [Move construction for RAII guards is tricky] + /// for rationale. + OptionalDeviceGuard(OptionalDeviceGuard&& other) = delete; + OptionalDeviceGuard& operator=(OptionalDeviceGuard&& other) = delete; + + /// Sets the device to the given one. The specified device must be consistent + /// with the device type originally specified during guard construction. + void reset_device(at::Device device) { + guard_.reset_device(device); + } + + /// For testing only + void reset_device(at::Device device, const impl::DeviceGuardImplInterface* impl) { + guard_.reset_device(device, impl); + } + + /// Returns the device that was set at the time the guard was constructed. + optional original_device() const { + return guard_.original_device(); + } + + /// Returns the most recent device that was set using this device guard, + /// either from construction, or via reset_device. + optional current_device() const { + return guard_.current_device(); + } + +private: + impl::InlineOptionalDeviceGuard guard_; +}; + +// Note [Whither the DeviceGuard boilerplate] +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Design note: in principle, we could avoid these wrappers using: +// +// using DeviceGuard = impl::InlineDeviceGuard; +// using OptionalDeviceGuard = impl::InlineOptionalDeviceGuard; +// +// But the error messages are worse, and our users can't just look at the +// header file to find out what's going on. Furthermore, for specializations +// like CUDAStreamGuard, it can be profitable to replace some interfaces with +// refined types (e.g., return CUDAStream instead of Stream). So, we eat +// the boilerplate and write out the API explicitly. + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/DeviceType.h b/thirdparty/libtorch/include/c10/core/DeviceType.h new file mode 100644 index 0000000000..9f759666d7 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/DeviceType.h @@ -0,0 +1,70 @@ +#pragma once + +// This is directly synchronized with caffe2/proto/caffe2.proto, but +// doesn't require me to figure out how to get Protobuf headers into +// ATen/core (which would require a lot more build system hacking.) +// If you modify me, keep me synchronized with that file. + +#include + +#include +#include + +namespace c10 { + +enum class DeviceType : int16_t { + CPU = 0, + CUDA = 1, // CUDA. + MKLDNN = 2, // Reserved for explicit MKLDNN + OPENGL = 3, // OpenGL + OPENCL = 4, // OpenCL + IDEEP = 5, // IDEEP. + HIP = 6, // AMD HIP + FPGA = 7, // FPGA + MSNPU = 8, // MSNPU + XLA = 9, // XLA / TPU + // NB: If you add more devices: + // - Change the implementations of DeviceTypeName and isValidDeviceType + // in DeviceType.cpp + // - Change the number below + COMPILE_TIME_MAX_DEVICE_TYPES = 10, + ONLY_FOR_TEST = 20901, // This device type is only for test. +}; + +constexpr DeviceType kCPU = DeviceType::CPU; +constexpr DeviceType kCUDA = DeviceType::CUDA; +constexpr DeviceType kHIP = DeviceType::HIP; +constexpr DeviceType kMSNPU = DeviceType::MSNPU; +constexpr DeviceType kXLA = DeviceType::XLA; + +// define explicit int constant +constexpr int COMPILE_TIME_MAX_DEVICE_TYPES = + static_cast(DeviceType::COMPILE_TIME_MAX_DEVICE_TYPES); + +static_assert(COMPILE_TIME_MAX_DEVICE_TYPES <= 16, + "Hey! You seem to be adding a lot of new DeviceTypes. The intent was " + "for this constant to reflect the actual number of DeviceTypes we support " + "in PyTorch; it's important that this number is not too large as we " + "use this to allocate stack arrays in some places in our code. If you " + "are indeed just adding the 17th device type, feel free to change " + "the check to 32; but if you are adding some sort of extensible device " + "types registration, please be aware that you are affecting code that " + "this number is small. Try auditing uses of this constant."); + +C10_API std::string DeviceTypeName( + DeviceType d, + bool lower_case = false); + +C10_API bool isValidDeviceType(DeviceType d); + +C10_API std::ostream& operator<<(std::ostream& stream, DeviceType type); + +} // namespace c10 + +namespace std { +template <> struct hash { + std::size_t operator()(c10::DeviceType k) const { + return std::hash()(static_cast(k)); + } +}; +} // namespace std diff --git a/thirdparty/libtorch/include/c10/core/Event.h b/thirdparty/libtorch/include/c10/core/Event.h new file mode 100644 index 0000000000..9b4d7ddf12 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/Event.h @@ -0,0 +1,113 @@ +#pragma once + +#include "c10/core/impl/InlineEvent.h" +#include "c10/core/impl/VirtualGuardImpl.h" + +namespace c10 { + +/** + * A backend-generic movable, not copyable, not thread-safe event. + * + * The design of this event follows that of CUDA and HIP events. These events + * are recorded and waited on by streams and can be rerecorded to, + * each rerecording essentially creating a new version of the event. + * For example, if (in CPU time), stream X is asked to record E, + * stream Y waits on E, and stream X is asked to record E again, then Y will + * wait for X to finish the first call to record and not the second, because + * it's waiting on the first version of event E, not the second. + * Querying an event only returns the status of its most recent version. + * + * Backend-generic events are implemented by this class and + * impl::InlineEvent. In addition to these events there are also + * some backend-specific events, like ATen's CUDAEvent. Each of these + * classes has its own use. + * + * impl::InlineEvent<...> or a backend-specific event should be + * preferred when the backend is known at compile time and known to + * be compiled. Backend-specific events may have additional functionality. + * + * This Event should be used if a particular backend may not be available, + * or the backend required is not known at compile time. + * + * These generic events are built on top of DeviceGuardImpls, analogous + * to DeviceGuard and InlineDeviceGuard. The name "DeviceGuardImpls," + * is no longer entirely accurate, as these classes implement the + * backend-specific logic for a generic backend interface. + * + * See DeviceGuardImplInterface.h for a list of all supported flags. + */ + +struct Event final { + // Constructors + Event() = delete; + Event( + const DeviceType _device_type, + const EventFlag _flag = EventFlag::PYTORCH_DEFAULT) + : impl_{_device_type, _flag} { } + + // Copy constructor and copy assignment operator (deleted) + Event(const Event&) = delete; + Event& operator=(const Event&) = delete; + + // Move constructor and move assignment operator + Event(Event&& other) : impl_{std::move(other.impl_)} { } + Event& operator=(Event&& other) { + impl_.swap(std::move(other.impl_)); + return *this; + } + + // Destructor + ~Event() = default; + + // Getters + DeviceType device_type() const noexcept { return impl_.device_type(); } + DeviceIndex device_index() const noexcept { return impl_.device_index(); } + EventFlag flag() const noexcept { return impl_.flag(); } + bool was_marked_for_recording() const noexcept { return impl_.was_marked_for_recording(); } + +/** + * Calls record() if and only if record() has never been called for this event. + * Note: because Event is not thread-safe recordOnce() may call record() + * multiple times if called from multiple threads. + */ + void recordOnce(const Stream& stream) { + impl_.recordOnce(stream); + } + +/** + * Increments the event's version and enqueues a job with this version + * in the stream's work queue. When the stream process that job + * it nofifies all streams waiting on / blocked by that version of the + * event to continue and marks that version as recorded. + * */ + void record(const Stream& stream) { + impl_.record(stream); + } + +/** + * Does nothing if the event has not been scheduled to be recorded. + * If the event was previously enqueued to be recorded, a command + * to wait for the version of the event that exists at the time of this call + * is inserted in the stream's work queue. + * When the stream reaches this command it will stop processing + * additional commands until that version of the event is marked as recorded. + */ + void block(const Stream& stream) const { + impl_.block(stream); + } + +/** + * Returns true if (and only if) + * (1) the event has never been scheduled to be recorded + * (2) the current version is marked as recorded. + * Returns false otherwise. + */ + bool query() const { + return impl_.query(); + } + +private: + impl::InlineEvent impl_; +}; + +} // c10 diff --git a/thirdparty/libtorch/include/c10/core/Layout.h b/thirdparty/libtorch/include/c10/core/Layout.h new file mode 100644 index 0000000000..c5ecc8908a --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/Layout.h @@ -0,0 +1,41 @@ +#pragma once + +#include +#include + +#include + +namespace c10 { +enum class Layout : int8_t { Strided, Sparse, Mkldnn }; + +constexpr auto kStrided = Layout::Strided; +constexpr auto kSparse = Layout::Sparse; +constexpr auto kMkldnn = Layout::Mkldnn; + +inline Layout layout_from_backend(Backend backend) { + switch (backend) { + case Backend::SparseCPU: + case Backend::SparseCUDA: + case Backend::SparseHIP: + return Layout::Sparse; + case Backend::MkldnnCPU: + return Layout::Mkldnn; + default: + return Layout::Strided; + } +} + +inline std::ostream& operator<<(std::ostream& stream, at::Layout layout) { + switch (layout) { + case at::kStrided: + return stream << "Strided"; + case at::kSparse: + return stream << "Sparse"; + case at::kMkldnn: + return stream << "Mkldnn"; + default: + AT_ERROR("Unknown layout"); + } +} + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/MemoryFormat.h b/thirdparty/libtorch/include/c10/core/MemoryFormat.h new file mode 100644 index 0000000000..87f5c590eb --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/MemoryFormat.h @@ -0,0 +1,63 @@ +#pragma once + +#include +#include +#include + +#include + +// Memory format is not the property of a Tensor. It is the way to tell an +// operator how the result should be organized in memory and nothing more. That +// means memory format should never be used as return value for any tensor state +// interrogation functions (internally and externally). +// +// Possible options are: +// Preserve: +// If any of the input tensors is in channels_last format, operator output +// should be in channels_last format +// +// Contiguous: +// Regardless of input tensors format, the output should be contiguous Tensor. +// +// ChannelsLast: +// Regardless of input tensors format, the output should be in channels_last format. + + +namespace c10 { +enum class MemoryFormat : int8_t { Contiguous, Preserve, ChannelsLast }; + +// If you are seeing this, it means that this call site was not checked if +// the memory format could be preserved, and it was switched to old default +// behaviour of contiguous +#define LEGACY_CONTIGUOUS_MEMORY_FORMAT c10::get_contiguous_memory_format() + +C10_DEPRECATED inline MemoryFormat get_contiguous_memory_format() { + return MemoryFormat::Contiguous; +} + +inline std::ostream& operator<<( + std::ostream& stream, + at::MemoryFormat memory_format) { + switch (memory_format) { + case MemoryFormat::Preserve: + return stream << "Preserve"; + case MemoryFormat::Contiguous: + return stream << "Contiguous"; + case MemoryFormat::ChannelsLast: + return stream << "ChannelsLast"; + default: + AT_ERROR("Unknown memory format"); + } +} + +inline std::vector get_channels_last_strides(IntArrayRef sizes) { + AT_ASSERT(sizes.size() == 4); + std::vector strides(sizes.size()); + strides[1] = 1; + strides[3] = sizes[1]; + strides[2] = strides[3] * sizes[3]; + strides[0] = strides[2] * sizes[2]; + return strides; +} + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/QEngine.h b/thirdparty/libtorch/include/c10/core/QEngine.h new file mode 100644 index 0000000000..082b85dffa --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/QEngine.h @@ -0,0 +1,40 @@ +#pragma once + +#include +#include +#include + +namespace c10 { + +/** + * QEngine is an enum that is used to select the engine to run quantized ops. + * Keep this enum in sync with get_qengine_id() in + * torch/backends/quantized/__init__.py + */ +enum class QEngine : uint8_t { + NoQEngine = 0, + FBGEMM = 1, + QNNPACK = 2, +}; + +constexpr auto kNoQEngine = QEngine::NoQEngine; +constexpr auto kFBGEMM = QEngine::FBGEMM; +constexpr auto kQNNPACK = QEngine::QNNPACK; + +inline std::string toString(QEngine qengine) { + switch (qengine) { + case kNoQEngine: + return "NoQEngine"; + case kFBGEMM: + return "FBGEMM"; + case kQNNPACK: + return "QNNPACK"; + default: + TORCH_CHECK( + false, + "Unrecognized Quantized Engine: ", + static_cast(qengine)); + } +} + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/QScheme.h b/thirdparty/libtorch/include/c10/core/QScheme.h new file mode 100644 index 0000000000..96c00b7bd6 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/QScheme.h @@ -0,0 +1,45 @@ +#pragma once + +#include +#include +#include + +namespace c10 { + +/** + * QScheme is an enum that specifies the type of quantization. This has a one + * to one correspondence with Quantizer + * Please refer to ATen/quantized/Quantizer.h to see the Quantizers classes. + * Keep this file in sync with torch/nn/_qscheme.py + */ +enum class QScheme : uint8_t { + PER_TENSOR_AFFINE = 0, + PER_CHANNEL_AFFINE = 1, + PER_TENSOR_SYMMETRIC = 2, + PER_CHANNEL_SYMMETRIC = 3, + COMPILE_TIME_NUM_QSCHEMES = 4, +}; + +constexpr auto kPerTensorAffine = QScheme::PER_TENSOR_AFFINE; +constexpr auto kPerChannelAffine = QScheme::PER_CHANNEL_AFFINE; +constexpr auto kPerTensorSymmetric = QScheme::PER_TENSOR_SYMMETRIC; +constexpr auto kPerChannelSymmetric = QScheme::PER_CHANNEL_SYMMETRIC; +constexpr int COMPILE_TIME_NUM_QSCHEMES = + static_cast(QScheme::COMPILE_TIME_NUM_QSCHEMES); + +inline std::string toString(QScheme qscheme) { + switch(qscheme) { + case kPerTensorAffine: + return "per_tensor_affine"; + case kPerChannelAffine: + return "per_channel_affine"; + case kPerTensorSymmetric: + return "per_tensor_symmetric"; + case kPerChannelSymmetric: + return "per_channel_symmetric"; + default: + TORCH_CHECK(false, "Unrecognized qscheme: ", static_cast(qscheme)); + } +} + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/Scalar.h b/thirdparty/libtorch/include/c10/core/Scalar.h new file mode 100644 index 0000000000..6d79f3004c --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/Scalar.h @@ -0,0 +1,143 @@ +#pragma once + +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace c10 { + +/** + * Scalar represents a 0-dimensional tensor which contains a single element. + * Unlike a tensor, numeric literals (in C++) are implicitly convertible to + * Scalar (which is why, for example, we provide both add(Tensor) and + * add(Scalar) overloads for many operations). It may also be used in + * circumstances where you statically know a tensor is 0-dim and single size, + * but don't know its type. + */ +class C10_API Scalar { + public: + Scalar() : Scalar(int64_t(0)) {} + +#define DEFINE_IMPLICIT_CTOR(type, name) \ + Scalar(type vv) : Scalar(vv, true) { } + + AT_FORALL_SCALAR_TYPES_AND2(Half, BFloat16, DEFINE_IMPLICIT_CTOR) + +#undef DEFINE_IMPLICIT_CTOR + + // Value* is both implicitly convertible to SymbolicVariable and bool which + // causes ambiguosity error. Specialized constructor for bool resolves this + // problem. + template < + typename T, + typename std::enable_if::value, bool>::type* = + nullptr> + Scalar(T vv) : tag(Tag::HAS_b) { + v.i = convert(vv); + } + +#define DEFINE_IMPLICIT_COMPLEX_CTOR(type, name, member) \ + Scalar(type vv) : tag(Tag::HAS_##member) { \ + v.member[0] = c10::convert(vv.real()); \ + v.member[1] = c10::convert(vv.imag()); \ + } + + DEFINE_IMPLICIT_COMPLEX_CTOR(at::ComplexHalf, ComplexHalf, z) + DEFINE_IMPLICIT_COMPLEX_CTOR(std::complex, ComplexFloat, z) + DEFINE_IMPLICIT_COMPLEX_CTOR(std::complex, ComplexDouble, z) + +#undef DEFINE_IMPLICIT_COMPLEX_CTOR + +#define DEFINE_ACCESSOR(type, name) \ + type to##name() const { \ + if (Tag::HAS_d == tag) { \ + return checked_convert(v.d, #type); \ + } else if (Tag::HAS_z == tag) { \ + return checked_convert>( \ + {v.z[0], v.z[1]}, #type); \ + } if (Tag::HAS_b == tag) { \ + return checked_convert(v.i, #type); \ + } else { \ + return checked_convert(v.i, #type); \ + } \ + } + + // TODO: Support ComplexHalf accessor + AT_FORALL_SCALAR_TYPES_WITH_COMPLEX_EXCEPT_COMPLEX_HALF(DEFINE_ACCESSOR) + + // also support scalar.to(); + template + T to() const; + +#undef DEFINE_ACCESSOR + bool isFloatingPoint() const { + return Tag::HAS_d == tag; + } + + C10_DEPRECATED_MESSAGE("isIntegral is deprecated. Please use the overload with 'includeBool' parameter instead.") + bool isIntegral() const { + return Tag::HAS_i == tag; + } + bool isIntegral(bool includeBool) const { + return Tag::HAS_i == tag || (includeBool && isBoolean()); + } + + bool isComplex() const { + return Tag::HAS_z == tag; + } + bool isBoolean() const { + return Tag::HAS_b == tag; + } + + Scalar operator-() const; + + private: + template::is_integer && ! std::is_same::value, bool>::type* = + nullptr> + Scalar(T vv, bool) : tag(Tag::HAS_i) { + v.i = convert(vv); + } + + template::is_integer, bool>::type* = + nullptr> + Scalar(T vv, bool) : tag(Tag::HAS_d) { + v.d = convert(vv); + } + + // We can't set v in the initializer list using the + // syntax v{ .member = ... } because it doesn't work on MSVC + + enum class Tag { HAS_d, HAS_i, HAS_z, HAS_b }; + Tag tag; + union { + double d; + int64_t i; + // Can't do put std::complex in the union, because it triggers + // an nvcc bug: + // error: designator may not specify a non-POD subobject + double z[2]; + } v; +}; + +// define the scalar.to() specializations +template +inline T Scalar::to() const { + throw std::runtime_error("to() cast to unexpected type."); +} + +#define DEFINE_TO(T, name) \ + template <> \ + inline T Scalar::to() const { \ + return to##name(); \ + } +AT_FORALL_SCALAR_TYPES_WITH_COMPLEX_EXCEPT_COMPLEX_HALF(DEFINE_TO) +#undef DEFINE_TO +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/ScalarType.h b/thirdparty/libtorch/include/c10/core/ScalarType.h new file mode 100644 index 0000000000..4dfffc8ec7 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/ScalarType.h @@ -0,0 +1,430 @@ +#pragma once + +#include +#include +#include +#include + +#include +#include +#include + +namespace c10 { + +// For the macros below: +// NB: If you want to macro some code for all non-QInt scalar types (i.e. types +// with complete information, you probably want one of the +// AT_FORALL_SCALAR_TYPES / AT_FORALL_SCALAR_TYPES_AND +// macros below, which are designed to behave similarly to the Dispatch macros +// with the same name. + +// NB: Order matters for this macro; it is relied upon in +// _promoteTypesLookup and the serialization format. +#define AT_FORALL_SCALAR_TYPES_WITH_COMPLEX_AND_QINTS(_) \ + _(uint8_t, Byte) /* 0 */ \ + _(int8_t, Char) /* 1 */ \ + _(int16_t, Short) /* 2 */ \ + _(int, Int) /* 3 */ \ + _(int64_t, Long) /* 4 */ \ + _(at::Half, Half) /* 5 */ \ + _(float, Float) /* 6 */ \ + _(double, Double) /* 7 */ \ + _(at::ComplexHalf, ComplexHalf) /* 8 */ \ + _(std::complex, ComplexFloat) /* 9 */ \ + _(std::complex, ComplexDouble) /* 10 */ \ + _(bool, Bool) /* 11 */ \ + _(c10::qint8, QInt8) /* 12 */ \ + _(c10::quint8, QUInt8) /* 13 */ \ + _(c10::qint32, QInt32) /* 14 */ \ + _(at::BFloat16, BFloat16) /* 15 */ + + +// If you want to support ComplexHalf for real, add ComplexHalf +// into this macro (and change the name). But beware: convert() +// doesn't work for all the conversions you need... +#define AT_FORALL_SCALAR_TYPES_WITH_COMPLEX_EXCEPT_COMPLEX_HALF(_) \ + _(uint8_t, Byte) \ + _(int8_t, Char) \ + _(int16_t, Short) \ + _(int, Int) \ + _(int64_t, Long) \ + _(at::Half, Half) \ + _(float, Float) \ + _(double, Double) \ + _(std::complex, ComplexFloat) \ + _(std::complex, ComplexDouble) \ + _(bool, Bool) \ + _(at::BFloat16, BFloat16) + + +enum class ScalarType : int8_t { +#define DEFINE_ENUM(_1, n) n, + AT_FORALL_SCALAR_TYPES_WITH_COMPLEX_AND_QINTS(DEFINE_ENUM) +#undef DEFINE_ENUM + Undefined, + NumOptions +}; + +namespace impl { + +// These are used to map ScalarTypes to C++ types. Feel free to add more or even +// macro generate this; the examples here are just those we have found to be +// necessary. + +template +struct ScalarTypeToCPPType; + +template<> +struct ScalarTypeToCPPType { + using type = c10::Half; + + // This is a workaround for the CUDA bug which prevents ::detail::ScalarTypeToCType::type being used directly + // due to ambiguous reference which can't to be resolved. For some reason it cant pick between at::detail and at::cuda::detail. + // For repro example, please see: https://gist.github.com/izdeby/952ae7cf256ddb740a73776d39a7e7ba + // TODO: remove once the bug is fixed. + static type t; +}; + +template<> +struct ScalarTypeToCPPType { + using type = c10::BFloat16; + + // This is a workaround for the CUDA bug which prevents ::detail::ScalarTypeToCType::type being used directly + // due to ambiguous reference which can't to be resolved. For some reason it cant pick between at::detail and at::cuda::detail. + // For repro example, please see: https://gist.github.com/izdeby/952ae7cf256ddb740a73776d39a7e7ba + // TODO: remove once the bug is fixed. + static type t; +}; + +template<> +struct ScalarTypeToCPPType { + using type = bool; + + // This is a workaround for the CUDA bug which prevents ::detail::ScalarTypeToCType::type being used directly + // due to ambiguous reference which can't to be resolved. For some reason it cant pick between at::detail and at::cuda::detail. + // For repro example, please see: https://gist.github.com/izdeby/952ae7cf256ddb740a73776d39a7e7ba + // TODO: remove once the bug is fixed. + static type t; +}; + +template<> +struct ScalarTypeToCPPType { + using type = int64_t; + + // This is a workaround for the CUDA bug which prevents ::detail::ScalarTypeToCType::type being used directly + // due to ambiguous reference which can't to be resolved. For some reason it cant pick between at::detail and at::cuda::detail. + // For repro example, please see: https://gist.github.com/izdeby/952ae7cf256ddb740a73776d39a7e7ba + // TODO: remove once the bug is fixed. + static type t; +}; +} + +#define AT_FORALL_SCALAR_TYPES(_) \ + _(uint8_t, Byte) \ + _(int8_t, Char) \ + _(int16_t, Short) \ + _(int, Int) \ + _(int64_t, Long) \ + _(float, Float) \ + _(double, Double) + +#define AT_FORALL_SCALAR_TYPES_AND(SCALARTYPE, _) \ + _(uint8_t, Byte) \ + _(int8_t, Char) \ + _(int16_t, Short) \ + _(int, Int) \ + _(int64_t, Long) \ + _(float, Float) \ + _(double, Double) \ + _(decltype(::c10::impl::ScalarTypeToCPPType<::c10::ScalarType::SCALARTYPE>::t), SCALARTYPE) + +#define AT_FORALL_SCALAR_TYPES_AND2(SCALARTYPE1, SCALARTYPE2, _) \ + _(uint8_t, Byte) \ + _(int8_t, Char) \ + _(int16_t, Short) \ + _(int, Int) \ + _(int64_t, Long) \ + _(float, Float) \ + _(double, Double) \ + _(decltype(::c10::impl::ScalarTypeToCPPType<::c10::ScalarType::SCALARTYPE1>::t), SCALARTYPE1) \ + _(decltype(::c10::impl::ScalarTypeToCPPType<::c10::ScalarType::SCALARTYPE2>::t), SCALARTYPE2) + +#define AT_FORALL_SCALAR_TYPES_AND3(SCALARTYPE1, SCALARTYPE2, SCALARTYPE3, _) \ + _(uint8_t, Byte) \ + _(int8_t, Char) \ + _(int16_t, Short) \ + _(int, Int) \ + _(int64_t, Long) \ + _(float, Float) \ + _(double, Double) \ + _(decltype(::c10::impl::ScalarTypeToCPPType<::c10::ScalarType::SCALARTYPE1>::t), SCALARTYPE1) \ + _(decltype(::c10::impl::ScalarTypeToCPPType<::c10::ScalarType::SCALARTYPE2>::t), SCALARTYPE2) \ + _(decltype(::c10::impl::ScalarTypeToCPPType<::c10::ScalarType::SCALARTYPE3>::t), SCALARTYPE3) + +#define AT_FORALL_QINT_TYPES(_) \ + _(c10::qint8, QInt8) \ + _(c10::quint8, QUInt8) \ + _(c10::qint32, QInt32) + +#define AT_FORALL_COMPLEX_TYPES(_) \ + _(std::complex, ComplexFloat) \ + _(std::complex, ComplexDouble) + +static inline caffe2::TypeMeta scalarTypeToTypeMeta(ScalarType scalar_type) { +#define DEFINE_CASE(ctype, name) \ + case ScalarType::name: \ + return caffe2::TypeMeta::Make(); + + switch (scalar_type) { + AT_FORALL_SCALAR_TYPES_WITH_COMPLEX_AND_QINTS(DEFINE_CASE) + case ScalarType::Undefined: + return caffe2::TypeMeta(); + default: + AT_ERROR( + "Unrecognized Scalartype ", + scalar_type, + " (please report this error)"); + } +#undef DEFINE_CASE +} + +static inline c10::optional tryTypeMetaToScalarType( + caffe2::TypeMeta dtype) { +#define DEFINE_IF(ctype, name) \ + if (dtype == caffe2::TypeMeta::Make()) { \ + return {ScalarType::name}; \ + } + AT_FORALL_SCALAR_TYPES_WITH_COMPLEX_AND_QINTS(DEFINE_IF) +#undef DEFINE_IF + if (dtype == caffe2::TypeMeta()) { + return {ScalarType::Undefined}; + } + return c10::nullopt; +} + +static inline ScalarType typeMetaToScalarType(caffe2::TypeMeta dtype) { + if (auto scalar_type = tryTypeMetaToScalarType(dtype)) { + return *scalar_type; + } + AT_ERROR( + "Unsupported TypeMeta in ATen: ", dtype, " (please report this error)"); +} + +static inline bool operator==(ScalarType t, caffe2::TypeMeta m) { + if (auto mt = tryTypeMetaToScalarType(m)) { + return (*mt) == t; + } + return false; +} + +static inline bool operator==(caffe2::TypeMeta m, ScalarType t) { + return t == m; +} + +#define DEFINE_CONSTANT(_, name) \ + constexpr ScalarType k##name = ScalarType::name; + +AT_FORALL_SCALAR_TYPES_WITH_COMPLEX_AND_QINTS(DEFINE_CONSTANT) +#undef DEFINE_CONSTANT + +static inline const char* toString(ScalarType t) { +#define DEFINE_CASE(_, name) \ + case ScalarType::name: \ + return #name; + + switch (t) { + AT_FORALL_SCALAR_TYPES_WITH_COMPLEX_AND_QINTS(DEFINE_CASE) + default: + return "UNKNOWN_SCALAR"; + } +#undef DEFINE_CASE +} + +static inline size_t elementSize(ScalarType t) { +#define CASE_ELEMENTSIZE_CASE(ctype, name) \ + case ScalarType::name: \ + return sizeof(ctype); + + switch (t) { + AT_FORALL_SCALAR_TYPES_WITH_COMPLEX_AND_QINTS(CASE_ELEMENTSIZE_CASE) + default: + AT_ERROR("Unknown ScalarType"); + } +#undef CASE_ELEMENTSIZE_CASE +} + +C10_DEPRECATED_MESSAGE("isIntegralType is deprecated. Please use the overload with 'includeBool' parameter instead.") +static inline bool isIntegralType(ScalarType t) { + return ( + t == ScalarType::Byte || t == ScalarType::Char || t == ScalarType::Int || + t == ScalarType::Long || t == ScalarType::Short); +} + +static inline bool isIntegralType(ScalarType t, bool includeBool) { + bool isIntegral = ( + t == ScalarType::Byte || t == ScalarType::Char || t == ScalarType::Int || + t == ScalarType::Long || t == ScalarType::Short); + + return includeBool ? isIntegral || (t == ScalarType::Bool) : isIntegral; +} + +static inline bool isFloatingType(ScalarType t) { + return ( + t == ScalarType::Double || t == ScalarType::Float || + t == ScalarType::Half || t == ScalarType::BFloat16); +} + +static inline bool isComplexType(ScalarType t) { + return ( + t == ScalarType::ComplexHalf || t == ScalarType::ComplexFloat || + t == ScalarType::ComplexDouble); +} + +static inline bool isQIntType(ScalarType t) { + // Don't forget to extend this when adding new QInt types + return t == ScalarType:: QInt8 || t == ScalarType::QUInt8 || t == ScalarType::QInt32; +} + +static inline ScalarType toQIntType(ScalarType t) { + switch (t) { + case ScalarType::Byte: + return ScalarType::QUInt8; + case ScalarType::Char: + return ScalarType::QInt8; + case ScalarType::Int: + return ScalarType::QInt32; + default: + return t; + } +} + +static inline ScalarType toUnderlying(ScalarType t) { + switch (t) { + case ScalarType::QUInt8: + return ScalarType::Byte; + case ScalarType::QInt8: + return ScalarType::Char; + case ScalarType::QInt32: + return ScalarType::Int; + default: + return t; + } +} + +static inline bool isSignedType(ScalarType t) { + #define CASE_SIGNED(ctype, name) \ + case ScalarType::name: \ + return std::numeric_limits::is_signed; + + switch (toUnderlying(t)) { + AT_FORALL_SCALAR_TYPES_AND3(Half, Bool, BFloat16, CASE_SIGNED) + default: + AT_ERROR("Unknown ScalarType"); + } + #undef CASE_SIGNED +} + +static inline bool isUnderlying(ScalarType type, ScalarType qtype) { + return type == toUnderlying(qtype); +} + +static inline ScalarType toValueType(ScalarType t) { + switch (t) { + case ScalarType::ComplexFloat: + return ScalarType::Float; + case ScalarType::ComplexDouble: + return ScalarType::Double; + default: + return t; + } +} + +// see tensor_attributes.rst for detailed explanation and examples +// of casting rules. +static inline bool canCast(const ScalarType from, const ScalarType to) { + // We disallow float -> integral, e.g., int_tensor *= float is disallowed. + if (isFloatingType(from) && isIntegralType(to, false)) { + return false; + } + + // Treat bool as a distinct "category," to be consistent with type promotion + // rules (e.g. `bool_tensor + 5 -> int64_tensor`). If `5` was in the same category + // as `bool_tensor`, we would not promote. + // Differing categories implies `bool_tensor += 5` is disallowed. + // + // NB: numpy distinguishes "unsigned" as a category to get the desired + // `bool_tensor + 5 -> int64_tensor` behavior. We don't, because: + // * We don't want the performance hit of checking the runtime sign of Scalars. + // * `uint8_tensor + 5 -> int64_tensor` would be undesirable. + if (from != ScalarType::Bool && to == ScalarType::Bool) { + return false; + } + return true; +} + +static inline ScalarType promoteTypes(ScalarType a, ScalarType b) { + // This is generated according to NumPy's promote_types + constexpr auto u1 = ScalarType::Byte; + constexpr auto i1 = ScalarType::Char; + constexpr auto i2 = ScalarType::Short; + constexpr auto i4 = ScalarType::Int; + constexpr auto i8 = ScalarType::Long; + constexpr auto f2 = ScalarType::Half; + constexpr auto f4 = ScalarType::Float; + constexpr auto f8 = ScalarType::Double; + constexpr auto c2 = ScalarType::ComplexHalf; + constexpr auto c4 = ScalarType::ComplexFloat; + constexpr auto c8 = ScalarType::ComplexDouble; + constexpr auto b1 = ScalarType::Bool; + constexpr auto bf = ScalarType::BFloat16; + constexpr auto ud = ScalarType::Undefined; + if (a == ud || b == ud) { + return ScalarType::Undefined; + } + + // For QInt types, we only allow exact match + if (isQIntType(a) && a == b) { + return a; + } + + if (isQIntType(a) || isQIntType(b)) { + AT_ERROR( + "promoteTypes with quantized numbers is not handled yet; figure out what the correct rules should be, offending types: ", + toString(a), + " ", + toString(b)); + } + + // this matrix has to be consistent with AT_FORALL_SCALAR_TYPES_WITH_COMPLEX + // so that's why we have to add undefined as we are not sure what is the + // corrent values for the type promotions in complex type cases. + static constexpr ScalarType _promoteTypesLookup[static_cast( + ScalarType::NumOptions)][static_cast(ScalarType::NumOptions)] = { + /* u1 i1 i2 i4 i8 f2 f4 f8 c2 c4 c8 b1 q1 q2 q3 bf*/ + /* u1 */ {u1, i2, i2, i4, i8, f2, f4, f8, ud, c4, c8, u1, ud, ud, ud, ud}, + /* i1 */ {i2, i1, i2, i4, i8, f2, f4, f8, ud, c4, c8, i1, ud, ud, ud, ud}, + /* i2 */ {i2, i2, i2, i4, i8, f2, f4, f8, ud, c4, c8, i2, ud, ud, ud, ud}, + /* i4 */ {i4, i4, i4, i4, i8, f2, f4, f8, ud, c4, c8, i4, ud, ud, ud, ud}, + /* i8 */ {i8, i8, i8, i8, i8, f2, f4, f8, ud, c4, c8, i8, ud, ud, ud, ud}, + /* f2 */ {f2, f2, f2, f2, f2, f2, f4, f8, ud, c4, c8, f2, ud, ud, ud, ud}, + /* f4 */ {f4, f4, f4, f4, f4, f4, f4, f8, ud, c4, c8, f4, ud, ud, ud, ud}, + /* f8 */ {f8, f8, f8, f8, f8, f8, f8, f8, ud, c8, c8, f8, ud, ud, ud, ud}, + /* c2 */ {ud, ud, ud, ud, ud, ud, ud, ud, c2, c4, c8, ud, ud, ud, ud, ud}, + /* c4 */ {c4, c4, c4, c4, c4, c4, c4, c8, c4, c4, c8, ud, ud, ud, ud, ud}, + /* c8 */ {c8, c8, c8, c8, c8, c8, c8, c8, c8, c8, c8, ud, ud, ud, ud, ud}, + /* b1 */ {u1, i1, i2, i4, i8, f2, f4, f8, ud, ud, ud, b1, ud, ud, ud, ud}, + /* q1 */ {ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud}, + /* q2 */ {ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud}, + /* q3 */ {ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud}, + /* bf */ {ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, ud, bf}, + }; + return _promoteTypesLookup[static_cast(a)][static_cast(b)]; +} + +inline std::ostream& operator<<( + std::ostream& stream, + at::ScalarType scalar_type) { + return stream << toString(scalar_type); +} + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/Storage.h b/thirdparty/libtorch/include/c10/core/Storage.h new file mode 100644 index 0000000000..6d86119eff --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/Storage.h @@ -0,0 +1,184 @@ +#pragma once + +#include + +namespace c10 { + +struct C10_API Storage { + public: + Storage() {} + Storage(c10::intrusive_ptr ptr) : storage_impl_(std::move(ptr)) {} + + // Allocates memory buffer using given allocator and creates a storage with it + Storage( + caffe2::TypeMeta data_type, + size_t size, + Allocator* allocator, + bool resizable) + : storage_impl_(c10::make_intrusive( + data_type, + size, + allocator, + resizable)) {} + + // Creates storage with pre-allocated memory buffer. Allocator is given for + // potential future reallocations, however it can be nullptr if the storage + // is non-resizable + Storage( + caffe2::TypeMeta data_type, + int64_t numel, + at::DataPtr data_ptr, + at::Allocator* allocator, + bool resizable) + : storage_impl_(c10::make_intrusive( + data_type, + numel, + std::move(data_ptr), + allocator, + resizable)) {} + + // Legacy constructor for partially initialized (dtype or memory) storages + // that can be temporarily created with Caffe2 APIs. See the note on top of + // TensorImpl.h for details. + static Storage create_legacy(at::Device device, caffe2::TypeMeta data_type) { + auto allocator = GetAllocator(device.type()); + return Storage(c10::make_intrusive( + data_type, + 0, + allocator->allocate(0), // materialize a non-default Device. + allocator, + true)); + } + + template + inline bool IsType() const { + return storage_impl_->IsType(); + } + + template + T* data() const { return storage_impl_->data(); } + + template + T* unsafe_data() const { return storage_impl_->unsafe_data(); } + + size_t elementSize() const { + return storage_impl_->itemsize(); + } + + inline size_t itemsize() const { + return storage_impl_->itemsize(); + } + + ptrdiff_t size() const { + return storage_impl_->numel(); + } + + int64_t numel() const { + return storage_impl_->numel(); + } + + // TODO: remove later + void set_numel(int64_t numel) const { + storage_impl_.get()->set_numel(numel); + } + + bool resizable() const { + return storage_impl_->resizable(); + } + + size_t capacity() const { + return storage_impl_->capacity(); + } + // get() use here is to get const-correctness + + void* data() const { + return storage_impl_.get()->data(); + } + + const caffe2::TypeMeta& dtype() const { + return storage_impl_->dtype(); + } + + at::DataPtr& data_ptr() { + return storage_impl_->data_ptr(); + } + + const at::DataPtr& data_ptr() const { + return storage_impl_->data_ptr(); + } + + // Returns the previous data_ptr + at::DataPtr set_data_ptr(at::DataPtr&& data_ptr) const { + return storage_impl_.get()->set_data_ptr(std::move(data_ptr)); + }; + + void set_dtype(const caffe2::TypeMeta& data_type) const { + storage_impl_.get()->set_dtype(data_type); + } + + DeviceType device_type() const { + return storage_impl_->device_type(); + } + + at::Allocator* allocator() const { + return storage_impl_.get()->allocator(); + } + + at::Device device() const { + return storage_impl_->device(); + } + + StorageImpl* unsafeReleaseStorageImpl() { + return storage_impl_.release(); + } + + StorageImpl* unsafeGetStorageImpl() const noexcept { + return storage_impl_.get(); + } + + operator bool() const { + return storage_impl_; + } + + size_t use_count() const { + return storage_impl_.use_count(); + } + + inline bool unique() const { + return storage_impl_.unique(); + } + + bool is_alias_of(const Storage& other) const { + return storage_impl_ == other.storage_impl_; + } + + void UniqueStorageShareExternalPointer( + void* src, + const caffe2::TypeMeta& data_type, + size_t capacity, + DeleterFnPtr d = nullptr) { + if (!storage_impl_.unique()) { + AT_ERROR( + "UniqueStorageShareExternalPointer can only be called when use_count == 1"); + } + storage_impl_->UniqueStorageShareExternalPointer( + src, data_type, capacity, d); + } + + void UniqueStorageShareExternalPointer( + at::DataPtr&& data_ptr, + const caffe2::TypeMeta& data_type, + size_t capacity) { + if (!storage_impl_.unique()) { + AT_ERROR( + "UniqueStorageShareExternalPointer can only be called when use_count == 1"); + } + storage_impl_->UniqueStorageShareExternalPointer( + std::move(data_ptr), data_type, capacity); + } + + protected: + c10::intrusive_ptr storage_impl_; +}; + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/StorageImpl.h b/thirdparty/libtorch/include/c10/core/StorageImpl.h new file mode 100644 index 0000000000..579ef00820 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/StorageImpl.h @@ -0,0 +1,234 @@ +#pragma once + +#include +#include + +#include + +namespace c10 { + +struct C10_API StorageImpl final : public c10::intrusive_ptr_target { + public: + StorageImpl( + caffe2::TypeMeta data_type, + int64_t numel, + at::DataPtr data_ptr, + at::Allocator* allocator, + bool resizable) + : data_type_(data_type), + data_ptr_(std::move(data_ptr)), + numel_(numel), + resizable_(resizable), + received_cuda_(false), + allocator_(allocator) { + if (resizable) { + AT_ASSERTM( + allocator_, "For resizable storage, allocator must be provided"); + } + if (numel > 0) { + if (data_type_.id() == caffe2::TypeIdentifier::uninitialized()) { + AT_ERROR( + "Constructing a storage with meta of unknown type and non-zero numel"); + } + } + } + + StorageImpl( + caffe2::TypeMeta data_type, + int64_t numel, + at::Allocator* allocator, + bool resizable) + : StorageImpl( + data_type, + numel, + allocator->allocate(data_type.itemsize() * numel), + allocator, + resizable) {} + + StorageImpl& operator=(StorageImpl&& other) = default; + StorageImpl& operator=(const StorageImpl&) = delete; + StorageImpl() = delete; + StorageImpl(StorageImpl&& other) = default; + StorageImpl(const StorageImpl&) = delete; + ~StorageImpl() = default; + + void reset() { + data_ptr_.clear(); + numel_ = 0; + } + + template + inline bool IsType() const { + return data_type_.Match(); + } + + template + inline T* data() const { + auto data_type = caffe2::TypeMeta::Make(); + if (dtype() != data_type) { + AT_ERROR( + "Attempt to access StorageImpl having data type ", + dtype(), + " as data type ", + data_type); + } + return unsafe_data(); + } + + template + inline T* unsafe_data() const { + return static_cast(this->data_ptr_.get()); + } + + void release_resources() override { + data_ptr_.clear(); + } + + size_t itemsize() const { + return data_type_.itemsize(); + } + + size_t capacity() const { + return numel_ * itemsize(); + } + + int64_t numel() const { + return numel_; + }; + + // TODO: remove later + void set_numel(int64_t numel) { + numel_ = numel; + }; + + bool resizable() const { + return resizable_; + }; + + at::DataPtr& data_ptr() { + return data_ptr_; + }; + + const at::DataPtr& data_ptr() const { + return data_ptr_; + }; + + // Returns the previous data_ptr + at::DataPtr set_data_ptr(at::DataPtr&& data_ptr) { + std::swap(data_ptr_, data_ptr); + return std::move(data_ptr); + }; + + // XXX: TERRIBLE! DONT USE UNLESS YOU HAVE TO! AND EVEN THEN DONT, JUST DONT! + // Setting the data_type will require you to audit many other parts of the + // struct again to make sure it's still valid. + void set_dtype(const caffe2::TypeMeta& data_type) { + int64_t capacity = numel_ * data_type_.itemsize(); + data_type_ = data_type; + numel_ = capacity / data_type_.itemsize(); + } + + // TODO: Return const ptr eventually if possible + void* data() { + return data_ptr_.get(); + } + + void* data() const { + return data_ptr_.get(); + } + + at::DeviceType device_type() const { + return data_ptr_.device().type(); + } + + at::Allocator* allocator() { + return allocator_; + } + + const caffe2::TypeMeta& dtype() const { + return data_type_; + } + + const at::Allocator* allocator() const { + return allocator_; + }; + + // You generally shouldn't use this method, but it is occasionally + // useful if you want to override how a tensor will be reallocated, + // after it was already allocated (and its initial allocator was + // set) + void set_allocator(at::Allocator* allocator) { + allocator_ = allocator; + } + + Device device() const { + return data_ptr_.device(); + } + + void set_resizable(bool resizable) { + if (resizable) { + // We need an allocator to be resizable + AT_ASSERT(allocator_); + } + resizable_ = resizable; + } + + /** + * Can only be called when use_count is 1 + */ + void UniqueStorageShareExternalPointer( + void* src, + const caffe2::TypeMeta& data_type, + size_t capacity, + DeleterFnPtr d = nullptr) { + UniqueStorageShareExternalPointer( + at::DataPtr(src, src, d, data_ptr_.device()), data_type, capacity); + } + + /** + * Can only be called when use_count is 1 + */ + void UniqueStorageShareExternalPointer( + at::DataPtr&& data_ptr, + const caffe2::TypeMeta& data_type, + size_t capacity) { + data_type_ = data_type; + // TODO: Use CAFFE_ENFORCE_WITH_CALLER equivalent + // For now causes lots of redefine issues if caffe2/core/logging.h is used + if (data_type_.id() == caffe2::TypeIdentifier::uninitialized()) { + AT_ERROR( + "To share with a raw external pointer you need to have meta " + "already set."); + } + data_ptr_ = std::move(data_ptr); + // NOTE: data_type might change and so it's also possible that capacity + // might not be divisible by itemsize. There is no way for us to keep track + // of the exact capacity if we're not explicity storing is. More conrectely + // capacity() might not return the value that was set here, if itemsize does + // not evenly divide it. + numel_ = capacity / data_type_.itemsize(); + allocator_ = nullptr; + resizable_ = false; + } + + // This method can be used only after storage construction and cannot be used + // to modify storage status + void set_received_cuda(bool received_cuda) { + received_cuda_ = received_cuda; + } + + bool received_cuda() { + return received_cuda_; + } + + private: + caffe2::TypeMeta data_type_; + DataPtr data_ptr_; + int64_t numel_; + bool resizable_; + // Identifies that Storage was received from another process and doesn't have + // local to process cuda memory allocation + bool received_cuda_; + Allocator* allocator_; +}; +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/Stream.h b/thirdparty/libtorch/include/c10/core/Stream.h new file mode 100644 index 0000000000..6962be72bf --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/Stream.h @@ -0,0 +1,154 @@ +#pragma once + +#include + +namespace c10 { + +/// An index representing a specific stream. A StreamId is not independently +/// meaningful without knowing the Device it is associated with; try to +/// use Stream rather than StreamId directly. +/// +/// StreamIds are opaque; they are assigned by some DeviceType-specific +/// numbering system which is not visible to the user. HOWEVER, we +/// guarantee that StreamId 0 is always a valid stream, and corresponds +/// to some sort of "default" stream. +using StreamId = int32_t; + +// NB: I decided not to call the above StreamIndex to avoid confusion with +// DeviceIndex. This way, you access device index with index(), and stream id +// with id() + +/** + * A stream is a software mechanism used to synchronize launched kernels + * without requiring explicit synchronizations between kernels. The basic + * model is that every kernel launch is associated with a stream: every + * kernel on the same stream is implicitly synchronized so that if I launch + * kernels A and B on the same stream, A is guaranteed to finish before B + * launches. If I want B to run concurrently with A, I must schedule + * it on a different stream. + * + * The Stream class is a backend agnostic value class representing a stream + * which I may schedule a kernel on. Every stream is associated with a device, + * which is recorded in stream, which is used to avoid confusion about which + * device a stream refers to. + * + * Streams are explicitly thread-safe, in the sense that it is OK to pass + * a Stream from one thread to another, and kernels queued from two different + * threads will still get serialized appropriately. (Of course, the + * time when the kernels get queued is undetermined unless you synchronize + * host side ;) + * + * Stream does NOT have a default constructor. Streams are for expert + * users; if you want to use Streams, we're going to assume you know + * how to deal with C++ template error messages if you try to + * resize() a vector of Streams. + * + * Known instances of streams in backends: + * + * - cudaStream_t (CUDA) + * - hipStream_t (HIP) + * - cl_command_queue (OpenCL) (NB: Caffe2's existing OpenCL integration + * does NOT support command queues.) + * + * Because this class is device agnostic, it cannot provide backend-specific + * functionality (e.g., get the cudaStream_t of a CUDA stream.) There are + * wrapper classes which provide this functionality, e.g., CUDAStream. + */ +class Stream final { +private: + Device device_; + StreamId id_; +public: + enum Unsafe { UNSAFE }; + enum Default { DEFAULT }; + + /// Unsafely construct a stream from a Device and a StreamId. In + /// general, only specific implementations of streams for a + /// backend should manufacture Stream directly in this way; other users + /// should use the provided APIs to get a stream. In particular, + /// we don't require backends to give any guarantees about non-zero + /// StreamIds; they are welcome to allocate in whatever way they like. + explicit Stream(Unsafe, Device device, StreamId id) + : device_(device) + , id_(id) {} + + /// Construct the default stream of a Device. The default stream is + /// NOT the same as the current stream; default stream is a fixed stream + /// that never changes, whereas the current stream may be changed by + /// StreamGuard. + explicit Stream(Default, Device device) + : device_(device) + , id_(0) {} + + bool operator==(const Stream& other) const noexcept { + return this->device_ == other.device_ && this->id_ == other.id_; + } + bool operator!=(const Stream& other) const noexcept { + return !(*this == other); + } + + Device device() const noexcept { return device_; } + DeviceType device_type() const noexcept { return device_.type(); } + DeviceIndex device_index() const noexcept { return device_.index(); } + StreamId id() const noexcept { return id_; } + + // Enqueues a wait instruction in the stream's work queue. + // This instruction is a no-op unless the event is marked + // for recording. In that case the stream stops processing + // until the event is recorded. + template + void wait(const T& event) const { + event.block(*this); + } + + // The purpose of this function is to more conveniently permit binding + // of Stream to and from Python. Without packing, I have to setup a whole + // class with two fields (device and stream id); with packing I can just + // store a single uint64_t. + // + // The particular way we pack streams into a uint64_t is considered an + // implementation detail and should not be relied upon. + uint64_t pack() const noexcept { + // Are you here because this static assert failed? Make sure you ensure + // that the bitmasking code below is updated accordingly! + static_assert(sizeof(DeviceType) == 2, "DeviceType is not 16-bit"); + static_assert(sizeof(DeviceIndex) == 2, "DeviceIndex is not 16-bit"); + static_assert(sizeof(StreamId) == 4, "DeviceIndex is not 32-bit"); + // Concat these together into a 64-bit integer + // See Note [Hazard when concatenating signed integers] + uint64_t bits = + static_cast(static_cast(device_type())) << 48 + | static_cast(static_cast(device_index())) << 32 + | static_cast(static_cast(id())); + return bits; + } + + static Stream unpack(uint64_t bits) { + auto stream_id = static_cast(bits) & 0xFFFFFFFFull; + bits >>= 32; + auto device_index = static_cast(bits) & 0xFFFFull; + bits >>= 16; + auto device_type = static_cast(bits); + TORCH_CHECK(isValidDeviceType(device_type)); + // Unfortunately, we can't check if the StreamId is valid here; it + // will be checked upon first use. + return Stream(UNSAFE, Device(device_type, device_index), stream_id); + } + + // I decided NOT to provide setters on this class, because really, + // why would you change the device of a stream? Just construct + // it correctly from the beginning dude. +}; + +C10_API std::ostream& operator<<(std::ostream& stream, const Stream& s); + +} // namespace c10 + +namespace std { + template <> + struct hash { + size_t operator()(c10::Stream s) const noexcept { + return std::hash{}(s.pack()); + } + }; +} // namespace std diff --git a/thirdparty/libtorch/include/c10/core/StreamGuard.h b/thirdparty/libtorch/include/c10/core/StreamGuard.h new file mode 100644 index 0000000000..e2bdac928b --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/StreamGuard.h @@ -0,0 +1,126 @@ +#pragma once + +#include + +namespace c10 { + +/** + * A StreamGuard is an RAII class that changes the current device + * to the device corresponding to some stream, and changes the + * default stream on that device to be this stream. + * + * Use of StreamGuard is HIGHLY discouraged in operator definitions. In + * a single operator, you probably don't know enough about the global + * state of the world to profitably decide how to set streams. Let + * the caller handle this appropriately, and just use the current stream + * in your operator code. + * + * This StreamGuard does NOT have an uninitialized state; it is guaranteed + * to reset the stream and device on exit. If you are in a situation + * where you *might* want to setup a stream guard, see OptionalStreamGuard. + */ +struct StreamGuard { + /// No default constructor, see Note [Omitted default constructor from RAII] + explicit StreamGuard() = delete; + + /// Set the current device to the device associated with the passed stream, + /// and set the current stream on that device to the passed stream. + explicit StreamGuard(Stream stream) : guard_(stream) {} + + /// Copy is disallowed + StreamGuard(const StreamGuard&) = delete; + StreamGuard& operator=(const StreamGuard&) = delete; + + /// Move is disallowed, as StreamGuard does not have an uninitialized state, + /// which is required for moves on types with nontrivial destructors. + StreamGuard(StreamGuard&& other) = delete; + StreamGuard& operator=(StreamGuard&& other) = delete; + + /// Resets the currently set stream to the original stream and + /// the currently set device to the original device. Then, + /// set the current device to the device associated with the passed stream, + /// and set the current stream on that device to the passed stream. + /// + /// NOTE: this implementation may skip some stream/device setting if + /// it can prove that it is unnecessary. + /// + /// WARNING: reset_stream does NOT preserve previously set streams on + /// different devices. If you need to set streams on multiple devices + /// on , use MultiStreamGuard instead. + void reset_stream(Stream stream) { guard_.reset_stream(stream); } + + /// Returns the stream that was set at the time the guard was constructed. + Stream original_stream() const { + return guard_.original_stream(); + } + + /// Returns the most recent stream that was set using this device guard, + /// either from construction, or via set_stream. + Stream current_stream() const { + return guard_.current_stream(); + } + + /// Returns the most recent device that was set using this device guard, + /// either from construction, or via set_device/reset_device/set_index. + Device current_device() const { return guard_.current_device(); } + + /// Returns the device that was set at the most recent reset_stream(), + /// or otherwise the device at construction time. + Device original_device() const { return guard_.original_device(); } + +private: + c10::impl::InlineStreamGuard guard_; +}; + +/** + * An OptionalStreamGuard is an RAII class that sets a device to some value on + * initialization, and resets the device to its original value on destruction. + * See OptionalDeviceGuard for more guidance on how to use this class. + */ +struct OptionalStreamGuard { + /// Create an uninitialized guard. + explicit OptionalStreamGuard() : guard_() {} + + /// Set the current device to the device associated with the passed stream, + /// and set the current stream on that device to the passed stream. + explicit OptionalStreamGuard(Stream stream) : guard_(stream) {} + + /// Set the current device to the device associated with the passed stream, + /// and set the current stream on that device to the passed stream, + /// if the passed stream is not nullopt. + explicit OptionalStreamGuard(optional stream_opt) : guard_(stream_opt) {} + + /// Copy is disallowed + OptionalStreamGuard(const OptionalStreamGuard&) = delete; + OptionalStreamGuard& operator=(const OptionalStreamGuard&) = delete; + + // See Note [Move construction for RAII guards is tricky] + OptionalStreamGuard(OptionalStreamGuard&& other) = delete; + + // See Note [Move assignment for RAII guards is tricky] + OptionalStreamGuard& operator=(OptionalStreamGuard&& other) = delete; + + /// Resets the currently set stream to the original stream and + /// the currently set device to the original device. Then, + /// set the current device to the device associated with the passed stream, + /// and set the current stream on that device to the passed stream. + /// Initializes the guard if it was not previously initialized. + void reset_stream(Stream stream) { guard_.reset_stream(stream); } + + /// Returns the stream that was set at the time the guard was most recently + /// initialized, or nullopt if the guard is uninitialized. + optional original_stream() const { return guard_.original_stream(); } + + /// Returns the most recent stream that was set using this stream guard, + /// either from construction, or via reset_stream, if the guard is initialized, + /// or nullopt if the guard is uninitialized. + optional current_stream() const { return guard_.current_stream(); } + + /// Restore the original device and stream, resetting this guard to uninitialized state. + void reset() { guard_.reset(); } + +private: + c10::impl::InlineOptionalStreamGuard guard_; +}; + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/TensorImpl.h b/thirdparty/libtorch/include/c10/core/TensorImpl.h new file mode 100644 index 0000000000..2e4f338819 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/TensorImpl.h @@ -0,0 +1,1738 @@ +#pragma once + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +// A global boolean variable to control whether we free memory when a Tensor +// is shrinked to a smaller size. As a result, a Tensor is always going to +// keep the memory allocated for its maximum capacity reshaped to so far. +// +// This parameter is respected "upper-case" methods which call Resize() +// (e.g., CopyFrom, ResizeLike); it is NOT respected by Tensor::resize_ +// or ShrinkTo, both of which guarantee to never to free memory. +C10_DECLARE_bool(caffe2_keep_on_shrink); + +// Since we can have high variance in blob memory allocated across different +// inputs in the same run, we will shrink the blob only if the memory gain +// is larger than this flag in bytes. This only applies to functions which +// respect caffe2_keep_on_shrink. +C10_DECLARE_int64(caffe2_max_keep_on_shrink_memory); + + +namespace at { +class Tensor; +} + +namespace c10 { +class Scalar; +struct Storage; + +/** + * A utility function to convert vector to vector. + */ +inline std::vector ToVectorint64_t(ArrayRef src) { + return std::vector(src.begin(), src.end()); +} + +/** + * Return product of all dimensions starting from k + */ +inline int64_t size_from_dim_(int k, IntArrayRef dims) { + int64_t r = 1; + for (size_t i = k; i < dims.size(); ++i) { + r *= dims[i]; + } + return r; +} + +// Product of all dims up to k (not including dims[k]) +inline int64_t size_to_dim_(int k, IntArrayRef dims) { + TORCH_CHECK((unsigned)k <= dims.size()); + int64_t r = 1; + for (int i = 0; i < k; ++i) { + r *= dims[i]; + } + return r; +} + +// Product of all dims between k and l (not including dims[k] and dims[l]) +inline int64_t size_between_dim_(int k, int l, IntArrayRef dims) { + TORCH_CHECK((unsigned)l < dims.size()); + int64_t r = 1; + if (k < l) { + for (int i = k + 1; i < l; ++i) { + r *= dims[i]; + } + } else { + for (int i = l + 1; i < k; ++i) { + r *= dims[i]; + } + } + return r; +} + +// Wrap around axis_index if it is negative, s.t., -1 is the last dim +inline int canonical_axis_index_(int axis_index, int ndims) { + TORCH_CHECK(axis_index >= -ndims); + TORCH_CHECK(axis_index < ndims); + if (axis_index < 0) { + return axis_index + ndims; + } + return axis_index; +} + +using PlacementDtor = void (*)(void*, size_t); + +/* + * A Context that will call extra placement deleter during + * deconstruction. + * + * Accept a already constructed DataPtr and store it as member + * during destruction, we'll call extra deleter on the underlying + * data pointer before the DataPtr is destructed. + * `data_ptr_` owns the memory. + */ +struct C10_API PlacementDeleteContext { + DataPtr data_ptr_; + PlacementDtor placement_dtor_; + size_t size_; + PlacementDeleteContext( + DataPtr&& data_ptr, + PlacementDtor placement_dtor, + size_t size) + : data_ptr_(std::move(data_ptr)), + placement_dtor_(placement_dtor), + size_(size) {} + static DataPtr makeDataPtr( + DataPtr&& data_ptr, + PlacementDtor placement_dtor, + size_t size, + Device device); + ~PlacementDeleteContext() { + placement_dtor_(data_ptr_.get(), size_); + // original memory will be freed when data_ptr_ is destructed + } +}; + +struct TensorImpl; + +struct C10_API AutogradMetaInterface { + virtual void set_requires_grad(bool requires_grad, at::TensorImpl* self_impl) = 0; + virtual bool requires_grad() const = 0; + virtual at::Tensor& grad() = 0; + virtual const at::Tensor& grad() const = 0; + virtual ~AutogradMetaInterface(); +}; + +namespace impl { + +// Unfortunately, the definition of AutogradMeta lives in a separate +// compilation unit than TensorImpl (libtorch.so versus libc10.so) +// which means that we cannot construct an AutogradMeta from TensorImpl, +// not even from the cpp file. So we have to indirect it through a factory +// function which will be initialized when we load libtorch.so. + +struct C10_API AutogradMetaFactory { + virtual ~AutogradMetaFactory() = default; + virtual std::unique_ptr make() const = 0; + // This method is the dumbest method. But I don't have access + // to Tensor (not TensorImpl) which is undefined in this header. + virtual const at::Tensor& undefined_tensor() const = 0; +}; + +C10_API void SetAutogradMetaFactory(AutogradMetaFactory* factory); +C10_API AutogradMetaFactory* GetAutogradMetaFactory(); + +struct C10_API AutogradMetaFactoryRegisterer { + explicit AutogradMetaFactoryRegisterer(AutogradMetaFactory* factory) { + SetAutogradMetaFactory(factory); + } +}; + +} // namespace impl + +struct C10_API NamedTensorMetaInterface { + virtual ~NamedTensorMetaInterface() {}; + virtual std::unique_ptr clone() const { + TORCH_INTERNAL_ASSERT( + false, + "Not implemented: NamedTensorMetaInterface::clone"); + }; + virtual int64_t slow_dim() const { + TORCH_INTERNAL_ASSERT( + false, + "Not implemented: NamedTensorMetaInterface::slow_dim"); + }; +}; + +// NOTE [ Version Counter Sharing ] +// +// Every Tensor has a version counter. Version counters are incremented whenever the +// data or size of a tensor changes through in-place Variable operations. Version +// counters are used to detect modifications to saved variables which would result in +// incorrect gradient calculations. Version counters may be shared between Variables: +// +// 1. A view shares the version counter of the base Variable, +// 2. `x.detach()` shares the version counter of `x`, +// 3. Unpacked saved variables share the version counter of the source. +// +// Version counters are not shared in these scenarios: +// +// 1. When we replace a `Variable`'s underlying `Tensor` by calling `set_data(...)`, +// 2. `x.data` does not share the version counter of `x`. (See discussion at +// https://github.com/pytorch/pytorch/issues/5396) +// +// Question: Why do we put the version counter in TensorImpl instead of AutogradMeta? +// +// Answer: After the Variable/Tensor merge, a tensor will not have AutogradMeta when +// its `requires_grad_` is false, but when we use this tensor in the forward pass of +// a function that requires saving this tensor for backward, we need to keep track of +// this tensor's version to make sure it's always valid in the autograd graph. +// +// To achieve this goal, we put the version counter in TensorImpl instead of AutogradMeta, +// and have it always be available. This allows us to have the optimization of not +// carrying AutogradMeta when a tensor doesn't require gradient. +// +// A hypothetical alternative way to achieve this goal is to initialize AutogradMeta and +// create the version counter for the non-requires-grad tensor only when it's saved for +// backward. However, since saving a tensor for backward happens in the forward pass, and +// our invariant is that forward pass needs to be thread-safe, lazy-initializing AutogradMeta +// when saving a tensor can introduce race conditions when we are running the forward +// pass in multi-thread scenarios, thus making the forward pass not thread-safe anymore, +// which breaks the invariant. +struct C10_API VariableVersion { + private: + struct VersionCounter : intrusive_ptr_target { + VersionCounter(uint32_t version) : version_(version) {} + std::atomic version_; + }; + c10::intrusive_ptr version_counter_; + + public: + bool unique() const { + return 1 == version_counter_.use_count(); + } + // NOTE: As of C++11 and 14, default-constructing a std::atomic variable + // leaves it in a persistently undefined state. See + // https://cplusplus.github.io/LWG/issue2334. + VariableVersion(uint32_t version = 0) + : version_counter_(c10::make_intrusive(version)) {} + + void bump() noexcept { + ++version_counter_->version_; + } + + uint32_t current_version() const noexcept { + return version_counter_->version_; + } +}; + +/** + * The low-level representation of a tensor, which contains a pointer + * to a storage (which contains the actual data) and metadata (e.g., sizes and + * strides) describing this particular view of the data as a tensor. + * + * Some basic characteristics about our in-memory representation of + * tensors: + * + * - It contains a pointer to a storage struct (Storage/StorageImpl) + * which contains the pointer to the actual data and records the + * data type and device of the view. This allows multiple tensors + * to alias the same underlying data, which allows to efficiently + * implement differing *views* on a tensor. + * + * - The tensor struct itself records view-specific metadata about + * the tensor, e.g., sizes, strides and offset into storage. + * Each view of a storage can have a different size or offset. + * + * - This class is intrusively refcounted. It is refcounted so that + * we can support prompt deallocation of large tensors; it is + * intrusively refcounted so that we can still perform reference + * counted operations on raw pointers, which is often more convenient + * when passing tensors across language boundaries. + * + * - For backwards-compatibility reasons, a tensor may be in an + * uninitialized state. A tensor may be uninitialized in the following + * two ways: + * + * - A tensor may be DTYPE UNINITIALIZED. A tensor of this + * form has an uninitialized dtype. This situation most + * frequently arises when a user writes Tensor x(CPU). The dtype and + * is subsequently initialized when mutable_data() is + * invoked for the first time. + * + * - A tensor may be STORAGE UNINITIALIZED. A tensor of this form + * has non-zero size, but has a storage with a null data pointer. + * This situation most frequently arises when a user calls + * Resize() or FreeMemory(). This is because Caffe2 historically + * does lazy allocation: allocation of data doesn't occur until + * mutable_data() is invoked. A tensor with zero size is + * always storage initialized, because no allocation is necessary + * in this case. + * + * All combinations of these two uninitialized states are possible. + * Consider the following transcript in idiomatic Caffe2 API: + * + * Tensor x(CPU); // x is storage-initialized, dtype-UNINITIALIZED + * x.Resize(4); // x is storage-UNINITIALIZED, dtype-UNINITIALIZED + * x.mutable_data(); // x is storage-initialized, dtype-initialized + * x.FreeMemory(); // x is storage-UNINITIALIZED, dtype-initialized. + * + * All other fields on tensor are always initialized. In particular, + * size is always valid. (Historically, a tensor declared as Tensor x(CPU) + * also had uninitialized size, encoded as numel == -1, but we have now + * decided to default to zero size, resulting in numel == 0). + * + * Uninitialized storages MUST be uniquely owned, to keep our model + * simple. Thus, we will reject operations which could cause an + * uninitialized storage to become shared (or a shared storage to + * become uninitialized, e.g., from FreeMemory). + * + * In practice, tensors which are storage-UNINITIALIZED and + * dtype-UNINITIALIZED are *extremely* ephemeral: essentially, + * after you do a Resize(), you basically always call mutable_data() + * immediately afterwards. Most functions are not designed to + * work if given a storage-UNINITIALIZED, dtype-UNINITIALIZED tensor. + * + * We intend to eliminate all uninitialized states, so that every + * tensor is fully initialized in all fields. Please do not write new code + * that depends on these uninitialized states. + */ +struct C10_API TensorImpl : public c10::intrusive_ptr_target { + TensorImpl() = delete; + + /** + * Construct a 1-dim 0-size tensor backed by the given storage. + */ + TensorImpl(Storage&& storage, TensorTypeSet); + + /** + * Construct a 1-dim 0 size tensor that doesn't have a storage. + */ + TensorImpl(TensorTypeSet, const caffe2::TypeMeta& data_type, c10::optional device_opt); + + // Legacy constructors so I don't have to go update call sites. + // TODO: When Variable is added, delete these constructors + TensorImpl(Storage&& storage, TensorTypeId type_id) + : TensorImpl(std::move(storage), TensorTypeSet(type_id)) {} + TensorImpl(TensorTypeId type_id, const caffe2::TypeMeta& data_type, c10::optional device_opt) + : TensorImpl(TensorTypeSet(type_id), data_type, device_opt) {} + + private: + // This constructor is private, because the data_type is redundant with + // storage. Still, we pass it in separately because it's easier to write + // the initializer list if we're not worried about storage being moved out + // from under us. + TensorImpl(Storage&& storage, TensorTypeSet, const caffe2::TypeMeta& data_type, c10::optional); + + public: + TensorImpl(const TensorImpl&) = delete; + TensorImpl& operator=(const TensorImpl&) = delete; + TensorImpl(TensorImpl&&) = default; + TensorImpl& operator=(TensorImpl&&) = default; + + /** + * Release (decref) storage, and any other external allocations. This + * override is for `intrusive_ptr_target` and is used to implement weak + * tensors. + */ + virtual void release_resources() override; + + /** + * Return the TensorTypeSet corresponding to this Tensor, specifying + * all of the TensorTypeIds that this Tensor identifies as. This is the + * information used to dispatch operations on this tensor. + */ + TensorTypeSet type_set() const { return type_set_; } + + /** + * Return a reference to the sizes of this tensor. This reference remains + * valid as long as the tensor is live and not resized. + */ + virtual IntArrayRef sizes() const; + + /** + * Return a reference to the strides of this tensor. This reference remains + * valid as long as the tensor is live and not restrided. + */ + virtual IntArrayRef strides() const; + + /** + * Return the number of dimensions of this tensor. Note that 0-dimension + * represents a Tensor that is a Scalar, e.g., one that has a single element. + */ + virtual int64_t dim() const; + + /** + * True if this tensor has storage. See storage() for details. + */ + virtual bool has_storage() const; + + /** + * Return the underlying storage of a Tensor. Multiple tensors may share + * a single storage. A Storage is an impoverished, Tensor-like class + * which supports far less operations than Tensor. + * + * Avoid using this method if possible; try to use only Tensor APIs to perform + * operations. + */ + virtual const Storage& storage() const; + + /** + * The number of elements in a tensor. + * + * WARNING: Previously, if you were using the Caffe2 API, you could + * test numel() == -1 to see if a tensor was uninitialized. This + * is no longer true; numel always accurately reports the product + * of sizes of a tensor. + */ + virtual int64_t numel() const { +#ifdef DEBUG + TORCH_INTERNAL_ASSERT(compute_numel() == numel_); +#endif + return numel_; + } + + bool unique_version() const { + return version_counter_.unique(); + } + + /** + * Whether or not a tensor is laid out in contiguous memory. + * + * Tensors with non-trivial strides are not contiguous. See + * compute_contiguous() for the exact definition of whether or not + * a tensor is contiguous or not. + */ + virtual bool is_contiguous(at::MemoryFormat memory_format=at::MemoryFormat::Contiguous) const; + + bool is_sparse() const { + // NB: This method is not virtual and avoid dispatches for performance reasons. + return type_set_.has(TensorTypeId::SparseCPUTensorId) || + type_set_.has(TensorTypeId::SparseCUDATensorId) || + type_set_.has(TensorTypeId::SparseHIPTensorId); + } + + bool is_quantized() const { + // NB: This method is not virtual and avoid dispatches for performance reasons. + return type_set_.has(TensorTypeId::QuantizedCPUTensorId); + } + + bool is_cuda() const { + // NB: This method is not virtual and avoid dispatches for performance reasons. + return type_set_.has(TensorTypeId::CUDATensorId) || + type_set_.has(TensorTypeId::SparseCUDATensorId); + } + + bool is_hip() const { + // NB: This method is not virtual and avoid dispatches for performance reasons. + return type_set_.has(TensorTypeId::HIPTensorId) || + type_set_.has(TensorTypeId::SparseHIPTensorId); + } + + bool is_mkldnn() const { + return type_set_.has(TensorTypeId::MkldnnCPUTensorId); + } + + int64_t get_device() const { + TORCH_CHECK( + device_opt_.has_value(), + "tensor does not have a device"); + // See NOTE [c10::optional operator usage in CUDA] + return (*device_opt_).index(); + } + + Device device() const { + TORCH_CHECK( + device_opt_.has_value(), + "tensor does not have a device"); + // See NOTE [c10::optional operator usage in CUDA] + return *device_opt_; + } + + Layout layout() const { + // NB: This method is not virtual and avoid dispatches for perf. + if (is_sparse()) { + return kSparse; + } else if (is_mkldnn()) { + return kMkldnn; + } else { + return kStrided; + } + } + + /** + * If `condition_when_zero_dim` is true, and the tensor is a 1-dim, 1-size + * tensor, reshape the tensor into a 0-dim tensor (scalar). + * + * This helper function is called from generated wrapper code, to help + * "fix up" tensors that legacy code didn't generate in the correct shape. + * For example, suppose that we have a legacy function 'add' which produces + * a tensor which is the same shape as its inputs; however, if the inputs + * were zero-dimensional, it produced a 1-dim 1-size tensor (don't ask). + * result->maybe_zero_dim(lhs->dim() == 0 && rhs->dim() == 0) will be called, + * correctly resetting the dimension to 0 when when the inputs had 0-dim. + * + * As we teach more and more of TH to handle 0-dim correctly, this function + * will become less necessary. At the moment, it is often called from functions + * that correctly handle the 0-dim case, and is just dead code in this case. + * In the glorious future, this function will be eliminated entirely. + */ + virtual TensorImpl* maybe_zero_dim(bool condition_when_zero_dim); + + /** + * True if a tensor was auto-wrapped from a C++ or Python number. + * For example, when you write 't + 2', 2 is auto-wrapped into a Tensor + * with `is_wrapped_number_` set to true. + * + * Wrapped numbers do not participate in the result type computation for + * mixed-type operations if there are any Tensors that are not wrapped + * numbers. This is useful, because we want 't + 2' to work with + * any type of tensor, not just LongTensor (which is what integers + * in Python represent). + * + * Otherwise, they behave like their non-wrapped equivalents. + * See [Result type computation] in TensorIterator.h. + * + * Why did we opt for wrapped numbers, as opposed to just having + * an extra function add(Tensor, Scalar)? This helps greatly reduce + * the amount of code we have to write for add, when actually + * a Tensor-Scalar addition is really just a Tensor-Tensor + * addition when the RHS is 0-dim (except for promotion behavior.) + */ + bool is_wrapped_number() const { + return is_wrapped_number_; + } + + /** + * Set whether or not a tensor was auto-wrapped from a C++ or Python + * number. You probably don't want to call this, unless you are + * writing binding code. + */ + void set_wrapped_number(bool value) { + TORCH_INTERNAL_ASSERT(dim() == 0); + is_wrapped_number_ = value; + } + + // ~~~~~ Autograd API ~~~~~ + // Some methods below are defined in TensorImpl.cpp because Tensor is an + // incomplete type. + + /** + * Set whether or not a tensor requires gradient. + * + * It is only valid to call this method on a Variable. + * See Note [Tensor versus Variable in C++]. + */ + void set_requires_grad(bool requires_grad); + + /** + * True if a tensor requires gradient. Tensors which require gradient + * have history tracked for any operations performed on them, so that + * we can automatically differentiate back to them. A tensor that + * requires gradient and has no history is a "leaf" tensor, which we + * accumulate gradients into. + * + * It is only valid to call this method on a Variable. + * See Note [Tensor versus Variable in C++]. + */ + bool requires_grad() const; + + /** + * Return a mutable reference to the gradient. This is conventionally + * used as `t.grad() = x` to set a gradient to a completely new tensor. + * + * It is only valid to call this method on a Variable. + * See Note [Tensor versus Variable in C++]. + */ + at::Tensor& grad(); + + /** + * Return the accumulated gradient of a tensor. This gradient is written + * into when performing backwards, when this tensor is a leaf tensor. + * + * It is only valid to call this method on a Variable. + * See Note [Tensor versus Variable in C++]. + */ + const at::Tensor& grad() const; + + /** + * Return a typed data pointer to the actual data which this tensor refers to. + * This checks that the requested type (from the template parameter) matches + * the internal type of the tensor. + * + * It is invalid to call data() on a dtype-uninitialized tensor, even if + * the size is 0. + * + * WARNING: If a tensor is not contiguous, you MUST use strides when + * performing index calculations to determine the location of elements in + * the tensor. We recommend using 'TensorAccessor' to handle this computation + * for you; this class is available from 'Tensor'. + */ + template + inline T * data() const { + TORCH_CHECK(has_storage(), + "Cannot access data pointer of Tensor that doesn't have storage"); + TORCH_CHECK( + storage_initialized(), + "The tensor has a non-zero number of elements, but its data is not allocated yet. " + "Caffe2 uses a lazy allocation, so you will need to call " + "mutable_data() or raw_mutable_data() to actually allocate memory."); + TORCH_CHECK( + storage_.IsType(), + "Tensor type mismatch, caller expects elements to be ", + caffe2::TypeMeta::TypeName(), + ", while tensor contains ", + data_type_.name(), + ". "); + // We managed the type check ourselves + return storage_.unsafe_data() + storage_offset_; + } + + /** + * Return a void* data pointer to the actual data which this tensor refers to. + * + * It is invalid to call data() on a dtype-uninitialized tensor, even if the + * size is 0. + * + * WARNING: The data pointed to by this tensor may not contiguous; do NOT + * assume that itemsize() * numel() is sufficient to compute the bytes that + * can be validly read from this tensor. + */ + inline void* data() const { + TORCH_CHECK(has_storage(), + "Cannot access data pointer of Tensor that doesn't have storage"); + TORCH_CHECK(dtype_initialized(), + "Cannot access data pointer of Tensor that doesn't have initialized dtype " + "(e.g., caffe2::Tensor x(CPU), prior to calling mutable_data() on x)"); + return static_cast( + static_cast(storage_.data()) + + data_type_.itemsize() * storage_offset_); + } + + /** + * Like data(), but performs no checks. You are responsible for ensuring + * that all invariants required by data() are upheld here. + */ + template + inline T * unsafe_data() const { + return storage_.unsafe_data() + storage_offset_; + } + + /** + * Returns the TypeMeta of a tensor, which describes what data type + * it is (e.g., int, float, ...) + */ + const caffe2::TypeMeta& dtype() const { + return data_type_; + } + + /** + * Return the size of a single element of this tensor in bytes. + */ + size_t itemsize() const { + TORCH_CHECK(dtype_initialized(), + "Cannot report itemsize of Tensor that doesn't have initialized dtype " + "(e.g., caffe2::Tensor x(CPU), prior to calling mutable_data() on x)"); + return data_type_.itemsize(); + } + + /** + * Return the offset in number of elements into the storage that this + * tensor points to. Most tensors have storage_offset() == 0, but, + * for example, an index into a tensor will have a non-zero storage_offset(). + * + * WARNING: This is NOT computed in bytes. + * + * XXX: The only thing stopping this function from being virtual is Variable. + */ + virtual int64_t storage_offset() const { + return storage_offset_; + } + + /** + * True if a tensor has no elements (e.g., numel() == 0). + */ + inline bool is_empty() const { + return numel() == 0; + } + + /** + * Change the dimensionality of a tensor. This is truly a resize: + * old sizes, if they are still valid, are preserved (this invariant + * is utilized by some call-sites, e.g., the implementation of squeeze, which + * mostly wants the sizes to stay the same). New dimensions are given zero + * size and zero stride; this is probably not what you want--you should + * set_size/set_stride afterwards. + * + * TODO: This should be jettisoned in favor of `set_sizes_and_strides`, + * which is harder to misuse. + */ + virtual void resize_dim(int64_t ndim) { + TORCH_CHECK(allow_tensor_metadata_change(), "resize_dim ", err_msg_tensor_metadata_change_not_allowed); + sizes_.resize(ndim, 0); + strides_.resize(ndim, 0); + refresh_numel(); + refresh_contiguous(); + } + + /** + * Change the size at some dimension. This DOES NOT update strides; + * thus, most changes to size will not preserve contiguity. You probably + * also want to call set_stride() when you call this. + * + * TODO: This should be jettisoned in favor of `set_sizes_and_strides`, + * which is harder to misuse. + */ + virtual void set_size(int64_t dim, int64_t new_size) { + TORCH_CHECK(allow_tensor_metadata_change(), "set_size ", err_msg_tensor_metadata_change_not_allowed); + sizes_.at(dim) = new_size; + refresh_numel(); + refresh_contiguous(); + } + + /** + * Change the stride at some dimension. + * + * TODO: This should be jettisoned in favor of `set_sizes_and_strides`, + * which is harder to misuse. + */ + virtual void set_stride(int64_t dim, int64_t new_stride) { + TORCH_CHECK(allow_tensor_metadata_change(), "set_stride ", err_msg_tensor_metadata_change_not_allowed); + strides_[dim] = new_stride; + refresh_numel(); + refresh_contiguous(); + } + + /** + * Set the offset into the storage of this tensor. + * + * WARNING: This does NOT check if the tensor is in bounds for the new + * location at the storage; the caller is responsible for checking this + * (and resizing if necessary.) + */ + virtual void set_storage_offset(int64_t storage_offset) { + TORCH_CHECK(allow_tensor_metadata_change(), "set_storage_offset ", err_msg_tensor_metadata_change_not_allowed); + storage_offset_ = storage_offset; + } + + /** + * Like set_sizes_and_strides but assumes contiguous strides. + * + * WARNING: This function does not check if the requested + * sizes/strides are in bounds for the storage that is allocated; + * this is the responsibility of the caller + */ + void set_sizes_contiguous(IntArrayRef new_size) { + TORCH_CHECK(allow_tensor_metadata_change(), "set_sizes_contiguous ", err_msg_tensor_metadata_change_not_allowed); + auto new_dim = new_size.size(); + + sizes_.resize(new_dim); + for (size_t dim = 0; dim < new_dim; ++dim) { + sizes_[dim] = new_size[dim]; + } + + refresh_numel(); + empty_tensor_restride(MemoryFormat::Contiguous); + } + + /** + * Set the sizes and strides of a tensor. + * + * WARNING: This function does not check if the requested + * sizes/strides are in bounds for the storage that is allocated; + * this is the responsibility of the caller + */ + void set_sizes_and_strides(IntArrayRef new_size, IntArrayRef new_stride) { + TORCH_CHECK(allow_tensor_metadata_change(), "set_sizes_and_strides ", err_msg_tensor_metadata_change_not_allowed); + TORCH_CHECK( + new_size.size() == new_stride.size(), + "dimensionality of sizes (", + new_size.size(), + ") must match dimensionality of strides (", + new_stride.size(), + ")"); + auto new_dim = new_size.size(); + + sizes_.resize(new_dim); + for (size_t dim = 0; dim < new_dim; ++dim) { + sizes_[dim] = new_size[dim]; + } + + strides_.resize(new_dim); + if (new_dim > 0) { + for (size_t dim = new_dim - 1; ; dim--) { + if (new_stride[dim] >= 0) { + strides_[dim] = new_stride[dim]; + } else { + // XXX: This behavior is surprising and may need to be removed to + // support negative strides. Some pytorch functions rely on it: + // for example, torch.cat (run TestTorch.test_cat_empty). + if (dim == new_dim - 1) { + strides_[dim] = 1; + } else { + // Keep stride monotonically increasing to match NumPy. + strides_[dim] = std::max(sizes_[dim + 1], 1) * strides_[dim + 1]; + } + } + if (dim == 0) break; + } + } + + refresh_numel(); + refresh_contiguous(); + } + + /** + * Return the size of a tensor at some dimension. + */ + virtual int64_t size(int64_t d) const; + + /** + * Return the stride of a tensor at some dimension. + */ + virtual int64_t stride(int64_t d) const; + + /** + * Set whether a tensor allows changes to its metadata (e.g. sizes / strides / storage / storage_offset). + * See NOTE [ Metadata Change for a Detached Tensor ] for details. + */ + void set_allow_tensor_metadata_change(bool value) { + allow_tensor_metadata_change_ = value; + } + + /** + * True if a tensor allows changes to its metadata (e.g. sizes / strides / storage / storage_offset). + * See NOTE [ Metadata Change for a Detached Tensor ] for details. + */ + bool allow_tensor_metadata_change() const { + return allow_tensor_metadata_change_; + } + + /** + * Set the pointer to autograd metadata. + */ + void set_autograd_meta(std::unique_ptr autograd_meta); + + /** + * Return the pointer to autograd metadata. May return nullptr if the + * tensor does not track gradients. + */ + c10::AutogradMetaInterface* autograd_meta() const; + + /** + * Set the pointer to named tensor metadata. + */ + void set_named_tensor_meta(std::unique_ptr named_tensor_meta) { + TORCH_WARN_ONCE( + "Named tensors and all their associated APIs are an experimental feature ", + "and subject to change. Please do not use them for anything important ", + "until they are released as stable."); +#ifdef DEBUG + if (named_tensor_meta) { + TORCH_INTERNAL_ASSERT(named_tensor_meta->slow_dim() == dim()); + } +#endif + named_tensor_meta_ = std::move(named_tensor_meta); + } + + /** + * Return the pointer to named tensor metadata. + */ + const c10::NamedTensorMetaInterface* named_tensor_meta() const { + return named_tensor_meta_.get(); + } + + c10::NamedTensorMetaInterface* named_tensor_meta() { + return named_tensor_meta_.get(); + } + + + // NOTE [ TensorImpl Shallow-Copying ] + // + // TensorImpl shallow-copying is used when we want to have two Variables share the same tensor metadata + // (e.g. sizes / strides / storage pointer / storage_offset), but each with a different autograd history. + // Example call sites: + // + // 1. `var_detached = var.detach()` uses `shallow_copy_and_detach()` to create `var_detached` that shares + // the same tensor metadata with `var`, but with a completely new autograd history. + // 2. `var.set_data(tensor)` uses `shallow_copy_from()` to copy tensor metadata from + // `tensor` into `var`, while keeping `var`'s original AutogradMeta. + // + // Functions that shallow-copy a TensorImpl (such as `shallow_copy_and_detach()` / `shallow_copy_from()` / + // `copy_tensor_metadata()`) copy the tensor metadata fields (e.g. sizes / strides / storage pointer / + // storage_offset) by value. However, the following fields are not copied: + // + // 1. the AutogradMeta pointer, because it is unique for each Variable. + // 2. the version counter, because the destination TensorImpl's version counter is either set to the + // passed-in `version_counter` (in `shallow_copy_and_detach()` and `copy_tensor_metadata()`), or it is kept + // intact (in `shallow_copy_from()`). See NOTE [ Version Counter Sharing ] for details. + // + // In `shallow_copy_and_detach()` and `copy_tensor_metadata()`, the passed-in `allow_tensor_metadata_change` + // determines whether the TensorImpl shallow-copy allows changes to its metadata (e.g. sizes / strides / + // storage / storage_offset). See NOTE [ Metadata Change for a Detached Tensor ] for details. + // + // In `shallow_copy_from()`, we don't check the destination TensorImpl's `allow_tensor_metadata_change_`, + // because `shallow_copy_from()` is used for implementing functions such as `var.set_data(tensor)`, which + // changes `var`'s tensor metadata and expects its `allow_tensor_metadata_change_` to be ignored. + + /** + * One TensorImpl can be copied to another TensorImpl if they have the same + * TensorTypeSet. The only two special cases (for legacy reason) are: + * CPUTensorId is compatible with CUDATensorId and SparseCPUTensorId is + * compatible with SparseCUDATensorId. + */ + inline bool has_compatible_shallow_copy_type(TensorTypeSet from) { + auto is_dense = [](TensorTypeSet ts) { + return ts.has(TensorTypeId::CPUTensorId) || + ts.has(TensorTypeId::CUDATensorId) || + ts.has(TensorTypeId::HIPTensorId); + }; + auto is_sparse = [](TensorTypeSet ts) { + return ts.has(TensorTypeId::SparseCPUTensorId) || + ts.has(TensorTypeId::SparseCUDATensorId) || + ts.has(TensorTypeId::SparseHIPTensorId); + }; + return (type_set_ == from) || (is_dense(type_set_) && is_dense(from)) || (is_sparse(type_set_) && is_sparse(from)); + } + + /** + * Return a TensorImpl that is a shallow-copy of this TensorImpl. + * + * For usage of `version_counter` and `allow_tensor_metadata_change`, + * see NOTE [ TensorImpl Shallow-Copying ]. + */ + virtual c10::intrusive_ptr shallow_copy_and_detach( + const c10::VariableVersion& version_counter, + bool allow_tensor_metadata_change) const { + auto impl = c10::make_intrusive(Storage(storage()), type_set_); + copy_tensor_metadata( + /*src_impl=*/this, + /*dest_impl=*/impl.get(), + /*version_counter=*/version_counter, + /*allow_tensor_metadata_change=*/allow_tensor_metadata_change); + impl->refresh_numel(); + impl->refresh_contiguous(); + return impl; + } + + /** + * Shallow-copies data from another TensorImpl into this TensorImpl. + * + * For why this function doesn't check this TensorImpl's `allow_tensor_metadata_change_`, + * see NOTE [ TensorImpl Shallow-Copying ]. + */ + virtual void shallow_copy_from(const c10::intrusive_ptr& impl) { + copy_tensor_metadata( + /*src_impl=*/impl.get(), + /*dest_impl=*/this, + /*version_counter=*/version_counter(), + /*allow_tensor_metadata_change=*/allow_tensor_metadata_change()); + refresh_numel(); + refresh_contiguous(); + } + + void set_version_counter( + const c10::VariableVersion& version_counter) noexcept { + version_counter_ = version_counter; + } + + const c10::VariableVersion& version_counter() const noexcept { + return version_counter_; + } + + void bump_version() noexcept { + version_counter_.bump(); + } + + inline void set_pyobj(PyObject* pyobj) noexcept { + pyobj_ = pyobj; + } + + inline PyObject* pyobj() const noexcept { + return pyobj_; + } + + private: + // See NOTE [c10::optional operator usage in CUDA] + // We probably don't want to expose this publically until + // the note is addressed. + c10::optional device_opt() const { + return device_opt_; + } + + public: + + /** + * The device type of a Tensor, e.g., DeviceType::CPU or DeviceType::CUDA. + */ + DeviceType device_type() const { + // TODO: A useful internal assert would be to show that device_opt_ is null + // only if you are an undefined tensor + TORCH_CHECK(device_opt_.has_value(), "device_type cannot be run on undefined Tensor"); + // See NOTE [c10::optional operator usage in CUDA] + return (*device_opt_).type(); + } + + /** + * @brief Extends the outer-most dimension of this tensor by num elements, + * preserving the existing data. + * + * The underlying data may be reallocated in order to accommodate the new + * elements, in which case this tensors' capacity is grown at a factor of + * growthPct. This ensures that Extend runs on an amortized O(1) time + * complexity. + * + * This op is auto-asynchronous if the underlying device (CUDA) supports it. + */ + void Extend(int64_t num, float growthPct) { + TORCH_CHECK(sizes_.size() >= 1u); + TORCH_CHECK(num >= 0, "`num` must be non-negative for Extend"); + TORCH_CHECK( + is_contiguous_, + "Right now Extend is only supported for contiguous Tensor."); + auto newDims = sizes_; + newDims[0] += num; + if (!storage_.data()) { + Resize(newDims); + return; + } + auto newNumel = std::accumulate( + newDims.begin(), + newDims.end(), + static_cast(1), + std::multiplies()); + if (newNumel * storage_.itemsize() <= storage_.capacity()) { + sizes_ = newDims; + numel_ = newNumel; + return; + } + auto newCapacity = sizes_; + newCapacity[0] = std::max( + newDims[0], std::ceil(sizes_[0] * (growthPct + 100) / 100)); + auto oldData = std::move(storage_.data_ptr()); + auto oldSize = numel_; + auto oldDims = sizes_; + Resize(newCapacity); + auto* newData = raw_mutable_data(data_type_); + if (data_type_.copy()) { + TORCH_CHECK( + device_type() == DeviceType::CPU, + "non-POD types work only on CPU"); + data_type_.copy()(oldData.get(), newData, oldSize); + } else { + // The following copy uses the current (thread local) stream for copying + // and also takes the GPU id from the device() field passed in. + // + // TODO: Potentially more enforcements are necessary to avoid accidental + // switch to sync copy if the currently set device is wrong. + // + // Specifically, we might need to switch to a different context device + // here explicitly to avoid relying on user synchronizing things + // properly. + CopyBytes( + oldSize * itemsize(), + oldData.get(), + device(), + newData, + device(), + true); // non-blocking + } + reserved_ = true; + sizes_ = newDims; + numel_ = newNumel; + } + + /** + * @brief Reserve space for the underlying tensor. + * + * This must be called after Resize(), since we only specify the first + * dimension This does not copy over the old data to the newly allocated space + */ + template + void ReserveSpace(const T& outer_dim) { + TORCH_CHECK( + is_contiguous_, + "Right now ReserveSpace is only supported for contiguous Tensor."); + TORCH_CHECK( + storage_.unique(), "Can't call ReserveSpace on shared storage."); + auto newCapacity = sizes_; + newCapacity[0] = outer_dim; + auto newNumel = std::accumulate( + newCapacity.begin(), + newCapacity.end(), + static_cast(1), + std::multiplies()); + if (newNumel * storage_.itemsize() <= storage_.capacity()) { + return; + } + // Old data is discarded + storage_.data_ptr().clear(); + auto oldSize = numel_; + auto oldDims = sizes_; + Resize(newCapacity); + // Allocate new memory but don't copy over the data + raw_mutable_data(data_type_); + sizes_ = oldDims; + numel_ = oldSize; + reserved_ = true; + } + + /** + * @brief Resizes a tensor. + * + * Resize takes in a vector of ints specifying the dimensions of the tensor. + * You can pass in an empty vector to specify that it is a scalar (i.e. + * containing one single item). + * + * The underlying storage may be deleted after calling Resize: if the new + * shape leads to a different number of items in the tensor, the old memory + * is deleted and new memory will be allocated next time you call + * mutable_data(). However, if the shape is different but the total number of + * items is the same, the underlying storage is kept. + * + * This method respects caffe2_keep_on_shrink. Consult the internal logic + * of this method to see exactly under what circumstances this flag matters. + */ + template + void Resize(Ts... dim_source) { + bool size_changed = SetDims(dim_source...); + if (size_changed) { + // If needed, we will free the data. the next mutable_data() call + // will create the data storage. + bool reset_tensor = false; + if (reserved_) { + // If tensor is reserved then don't claim its memeory unless capacity() + // is smaller than new size + reset_tensor = storage_.capacity() < (storage_offset_ + numel_) * storage_.itemsize(); + } else { + reset_tensor = storage_.capacity() < + (storage_offset_ + numel_) * storage_.itemsize() || + !FLAGS_caffe2_keep_on_shrink || + storage_.capacity() - + (storage_offset_ + numel_) * storage_.itemsize() > + static_cast(FLAGS_caffe2_max_keep_on_shrink_memory); + } + + if (reset_tensor && storage_initialized()) { + FreeMemory(); + } + } + } + + /** + * Resizes the tensor without touching underlying storage. + * This requires the total size of the tensor to remains constant. + */ + inline void Reshape(const std::vector& dims) { + TORCH_CHECK( + is_contiguous_, + "Right now Reshape is only supported for contiguous Tensor."); + int64_t new_size = 1; + for (auto d : dims) { + TORCH_CHECK(d >= 0); + new_size *= d; + } + TORCH_CHECK( + new_size == numel_, + "New size and old size are not equal. You cannot use Reshape, " + "but should use Resize." + // TODO(jiayq): remove the following warning after pending diffs + // stabilize. + " The old caffe2 mixes Reshape and Resize but this behavior has " + "been changed. If you find this error, most likely you will need " + "to change corresponding code from Reshape to Resize."); + sizes_ = dims; + empty_tensor_restride(MemoryFormat::Contiguous); + } + + /** + * Release whatever memory the tensor was holding but keep size and type + * information. Subsequent call to mutable_data will trigger new memory + * allocation. + */ + inline void FreeMemory() { + // We'll detach from the old Storage and create a new one + storage_ = Storage::create_legacy(storage_.device(), data_type_); + storage_offset_ = 0; + } + + /** + * @brief Shares the data with another tensor. + * + * To share data between two tensors, the sizes of the two tensors must be + * equal already. The reason we do not implicitly do a Resize to make the two + * tensors have the same shape is that we want to allow tensors of different + * shapes but the same number of items to still be able to share data. This + * allows one to e.g. have a n-dimensional Tensor and a flattened version + * sharing the same underlying storage. + * + * The source tensor should already have its data allocated. + */ + // To be deprecated + void ShareData(const TensorImpl& src) { + // Right now, we are assuming the device_type are the same, since it is + // inherently the same in the non-templatized code. We should probably add + // an assert here which might affect perf a little bit. + TORCH_CHECK( + src.numel_ == numel_, + "Size mismatch - did you call reshape before sharing the data?"); + // It is possible that the source tensor hasn't called mutable_data() yet, + // in which case ShareData() doesn't make much sense since we don't really + // know what to share yet. + // TODO: Add the assert after all uninitialized states are eliminated + // TORCH_CHECK(src.dtype_initialized(), + // "Source tensor don't have a data type (did you call mutable_data on the tensor?)"); + if (!src.dtype_initialized()) { + C10_LOG_EVERY_MS(WARNING, 1000) << + "Source tensor don't have a data type (did you call mutable_data on the tensor?)"; + } + TORCH_CHECK( + src.storage_initialized(), + "Source tensor has no content and has size > 0"); + // Finally, do sharing. + /* Since we create new Storage whenever we need to change data_type/capacity + * this still keeps the original semantics + */ + storage_ = src.storage(); + data_type_ = src.dtype(); + device_opt_ = src.device_opt(); + storage_offset_ = src.storage_offset(); + } + + void ShareExternalPointer( + DataPtr&& data_ptr, + const caffe2::TypeMeta& data_type, + size_t capacity) { + TORCH_CHECK( + data_type.id() != caffe2::TypeIdentifier::uninitialized(), + "To share with a raw external pointer you need to pass in an " + "initialized data_type(TypeMeta)."); + if (!capacity) { + capacity = numel_ * data_type.itemsize(); + } + if (storage_.unique()) { + storage_.UniqueStorageShareExternalPointer( + std::move(data_ptr), data_type, capacity); + data_type_ = data_type; + device_opt_ = storage_.device(); + storage_offset_ = 0; + } else { + int64_t numel = capacity / data_type.itemsize(); + // Create a new Storage + storage_ = Storage( + data_type, + numel, + std::move(data_ptr), + /*allocator=*/nullptr, + /*resizable=*/false); + data_type_ = data_type; + device_opt_ = storage_.device(); + storage_offset_ = 0; + } + } + + /** + * Returns a mutable raw pointer of the underlying storage. Since we will need + * to know the type of the data for allocation, a TypeMeta object is passed in + * to specify the necessary information. This is conceptually equivalent of + * calling mutable_data() where the TypeMeta parameter meta is derived from + * the type T. This function differs from mutable_data() in the sense that + * the type T can be specified during runtime via the TypeMeta object. + * + * If the existing data does not match the desired type, it will be deleted + * and a new storage will be created. + */ + inline void* raw_mutable_data(const caffe2::TypeMeta& meta) { + // For 0-size tensors it's fine to return any pointer (including nullptr) + if (data_type_ == meta && storage_initialized()) { + return static_cast(static_cast(storage_.data()) + storage_offset_ * meta.itemsize()); + } else { + bool had_special_dtor = data_type_.placementDelete() != nullptr; + storage_offset_ = 0; + if (storage_.unique()) { + storage_.set_dtype(meta); + } else { + if (data_type_ != meta) { + storage_ = Storage::create_legacy(storage_.device(), meta); + } + } + data_type_ = meta; + // NB: device is not changed + + // We can reuse the existing buffer if the current data does not have + // a special destructor and the new data doesn't have a special + // constructor. + if (numel_ == 0 || + (meta.placementNew() == nullptr && !had_special_dtor && + storage_.numel() >= numel_)) { + TORCH_INTERNAL_ASSERT(storage_offset_ == 0); // because we just reallocated + return storage_.data(); + } + const Allocator* allocator = storage_.allocator(); + // Storage might have nullptr allocator in rare cases, for example, if + // an external memory segment has been wrapped with Tensor and we don't + // know how to reallocate it. However, in order to preserve legacy C2 + // behavior, we allow reallocating the memory using default allocator. + if (allocator == nullptr) { + allocator = GetAllocator(storage_.device_type()); + } + if (meta.placementNew()) { + // For types that need placement new, we will call it, as well as + // making sure that when the data is freed, it calls the right + // destruction procedure. + auto size = numel_; + auto dtor = data_type_.placementDelete(); + auto data_ptr = allocator->allocate(numel_ * storage_.itemsize()); + storage_.set_data_ptr(PlacementDeleteContext::makeDataPtr( + std::move(data_ptr), dtor, size, storage_.device())); + data_type_.placementNew()(storage_.data(), numel_); + } else { + // For fundamental type, new and delete is easier. + storage_.set_data_ptr( + allocator->allocate(numel_ * storage_.itemsize())); + } + storage_.set_numel(numel_); + TORCH_INTERNAL_ASSERT(storage_offset_ == 0); // because we just reallocated + device_opt_ = storage_.device(); + return storage_.data(); + } + } + + /** + * Returns a typed pointer of the underlying storage. + * + * For fundamental types, we reuse possible existing storage if there + * is sufficient capacity. + */ + template + inline T* mutable_data() { + if (storage_initialized() && storage_.IsType()) { + return static_cast(storage_.data()) + storage_offset_; + } + // Check it here statically - otherwise TypeMeta would throw the runtime + // error in attempt to invoke TypeMeta::ctor() + static_assert( + std::is_default_constructible::value, + "Tensor can't hold non-default-constructible types"); + return static_cast(raw_mutable_data(caffe2::TypeMeta::Make())); + } + + /** + * True if a tensor is storage initialized. A tensor may become + * storage UNINITIALIZED after a Resize() or FreeMemory() + */ + bool storage_initialized() const { + TORCH_CHECK(has_storage(), "cannot call storage_initialized on tensor that does not have storage"); + return storage_.data() || numel_ == 0; + } + + /** + * True if a tensor is dtype initialized. A tensor allocated with + * Caffe2-style constructors is dtype uninitialized until the + * first time mutable_data() is called. + */ + bool dtype_initialized() const noexcept { + return data_type_ != caffe2::TypeMeta(); + } + + void set_storage(at::Storage storage) { + TORCH_CHECK(allow_tensor_metadata_change(), "set_storage ", err_msg_tensor_metadata_change_not_allowed); + storage_ = std::move(storage); + data_type_ = storage_.dtype(); + device_opt_ = storage_.device(); + } + + /** + * Set the strides of the tensor to match memory_format + * + * WARNING: This function doesn't rearrange data and assumes tensor is a memory + * contiguous + */ + virtual void empty_tensor_restride(MemoryFormat memory_format) { + #ifdef DEBUG + TORCH_INTERNAL_ASSERT(compute_numel() == numel_, + "If you are seeing this error, that means empty_tensor_restride was " + "called before setting correct numel"); + #endif + switch (memory_format) { + case MemoryFormat::Contiguous: { + strides_.resize(sizes_.size(), 0); + if (dim() > 0) { + int last_idx = dim() - 1; + strides_[last_idx] = 1; + for (auto i = last_idx - 1; i >= 0; --i) { + strides_[i] = strides_[i + 1] * std::max(sizes_[i + 1], 1); + } + } + break; + } + case MemoryFormat::ChannelsLast: { + TORCH_CHECK( + dim() == 4, + "required rank 4 tensor to use channels_last format"); + set_sizes_and_strides(sizes(), get_channels_last_strides(sizes())); + break; + } + case MemoryFormat::Preserve: + TORCH_CHECK(false, "unsupported memory format ", memory_format); + // Cleaning warning messages, no need to break as TORCH_CHECK(false) + // terminates flow. + // break; + } + // recompute contiguous flag, as currently NHWC/NCHW flags are not mutually + // exclusive see #24090 + refresh_contiguous(); + } + + bool is_strides_like_channels_last() const { + return is_channels_last_; + } + + bool is_non_overlapping_and_dense() const { + return is_non_overlapping_and_dense_; + } + +private: + + // The Caffe2 Resize() method supports being called both as Resize({2,2}) as + // well as variadic with Resize(2, 2). These overloads provide all of the + // supported calling configurations, while being overloads (and not templates) + // so that implicit conversions still work. + // + // SetDims on ArrayRef is internally implemented as a template, so we can + // handle both ArrayRefs of different types (there are some uses of + // Resize in Caffe2 which pass in int, not int64_t.) + + template < + typename T, + typename = typename std::enable_if::value>::type> + bool SetDimsTemplate(ArrayRef src) { + auto old_numel = numel_; + sizes_.resize(src.size()); + int64_t new_numel = 1; + for (size_t i = 0; i < src.size(); ++i) { + new_numel *= src[i]; + sizes_[i] = src[i]; + } + numel_ = new_numel; + empty_tensor_restride(MemoryFormat::Contiguous); + return numel_ != old_numel; + } + + bool SetDims(ArrayRef s) { + return SetDimsTemplate(s); + } + + bool SetDims(ArrayRef s) { + return SetDimsTemplate(s); + } + + bool SetDims(ArrayRef s) { + return SetDimsTemplate(s); + } + + bool SetDims() { + return SetDims(IntArrayRef{}); + } + + bool SetDims(const int64_t d0) { + return SetDims(IntArrayRef{d0}); + } + + bool SetDims(const int64_t d0, const int64_t d1) { + return SetDims(IntArrayRef{d0, d1}); + } + + bool SetDims(const int64_t d0, const int64_t d1, const int64_t d2) { + return SetDims(IntArrayRef{d0, d1, d2}); + } + + bool SetDims(const int64_t d0, const int64_t d1, const int64_t d2, const int64_t d3) { + return SetDims(IntArrayRef{d0, d1, d2, d3}); + } + + /** + * Compute the number of elements based on the sizes of a tensor. + */ + int64_t compute_numel() const { + int64_t n = 1; + for (auto s : sizes()) { + n *= s; + } + return n; + } + + /** + * Compute whether or not a tensor is contiguous based on the sizes and + * strides of a tensor. + */ + bool compute_contiguous() const; + + bool compute_channels_last_contiguous() const; + + bool compute_strides_like_channels_last() const; + + bool compute_non_overlapping_and_dense() const; + +protected: + /** + * Recompute the cached numel of a tensor. Call this if you modify sizes. + */ + void refresh_numel() { + numel_ = compute_numel(); + } + + /** + * Recompute the cached contiguity of a tensor. Call this if you modify sizes + * or strides. + */ + void refresh_contiguous() { + is_contiguous_ = compute_contiguous(); + is_channels_last_contiguous_ = compute_channels_last_contiguous(); + is_channels_last_ = is_channels_last_contiguous_ || compute_strides_like_channels_last(); + is_non_overlapping_and_dense_ = is_contiguous_ || is_channels_last_contiguous_ || compute_non_overlapping_and_dense(); + } + + /** + * Copy the tensor metadata fields (e.g. sizes / strides / storage pointer / storage_offset) + * from one TensorImpl to another TensorImpl. + * + * For usage of `version_counter` and `allow_tensor_metadata_change`, see NOTE [ TensorImpl Shallow-Copying ]. + */ + static void copy_tensor_metadata( + const TensorImpl* src_impl, + TensorImpl* dest_impl, + const c10::VariableVersion& version_counter, + bool allow_tensor_metadata_change); + +protected: + // Error message to show when the user tries to change tensor metadata on + // Tensor created from .data or .detach(). + // + // See NOTE [ Metadata Change for a Detached Tensor ] for details. + static const char * const err_msg_tensor_metadata_change_not_allowed; + + Storage storage_; + +private: + // This pointer points to an AutogradMeta struct that stores autograd-specific fields + // (such as grad_ / grad_fn_ / grad_accumulator_). + // This pointer always has unique ownership (meaning only one TensorImpl can own it + // at a time). + // + // autograd_meta_ can be nullptr, as an optimization. When this occurs, it is + // equivalent to having an autograd_meta_ pointing to a default constructed + // AutogradMeta; intuitively, tensors which don't require grad will have this + // field set to null. If !type_set_.has(VariableTensorId), then + // autograd_meta == nullptr (but not vice versa, due to the nullptr + // optimization) + // + // This means accessors on autograd_meta_ have to be careful to test if they + // got a nullptr, and handle default behavior appropriately in that case. + // + // Note that we don't enforce the invariant that if the AutogradMeta is + // default constructed, it is nullptr (to do this, we'd have to continuously + // check if an AutogradMeta became, by mutation, equal to the default + // constructed form. (This might be useful, but it seems rare enough that + // a requires_grad=True variable will turn back into the requires_grad=False + // version.) So there are three representable states: + // + // 1. autograd_meta_ == nullptr + // 2. autograd_meta_ is default constructed (semantically, same as (1)) + // 3. autograd_meta_ has nontrivial information content + // + std::unique_ptr autograd_meta_ = nullptr; + +protected: + std::unique_ptr named_tensor_meta_ = nullptr; + + c10::VariableVersion version_counter_; + + // This field contains a weak reference to a PyObject representing + // this Tensor. It MUST NOT be a strong reference, as that would + // create a reference cycle between Tensor and the PyObject. If + // pyobj is nullptr, when we transfer Tensor to Python, we allocate + // a new PyObject for it and set this field. This is thread safe + // because all Python code is protected under the GIL. This design does + // NOT WORK for Tensors which are shared across multiple Python + // subinterpreters (introduced in Python 3.8) since you don't have + // enough space to store the separate PyObject per subinterpreter. + // When a PyObject dies, you are obligated to clear this field + // (otherwise, you will try to use-after-free the pyobj); this currently + // occurs in THPVariable_clear in torch/csrc/autograd/python_variable.cpp + PyObject* pyobj_ = nullptr; + + // We could save a word or two by combining the SmallVector structs, + // since their size is redundant, and if we need to overflow the buffer space + // we could keep the two pointers together. However, that would require + // implementing another struct from scratch, so only do this if we're desperate. + SmallVector sizes_; + SmallVector strides_; + + int64_t storage_offset_ = 0; + // If sizes and strides are empty, the numel is 1!! However, most of the + // time, we will immediately set sizes to {0} and reset numel to 0. + // (Can't do that in the default initializers, because there's no way to + // spell "allocate a one-element array" for strides_). + int64_t numel_ = 1; + + // INVARIANT: When storage is non-null, this type meta must + // agree with the type meta in storage + caffe2::TypeMeta data_type_; + + // NOTE [c10::optional operator usage in CUDA] + // Our optional definition doesn't compile in .cu file if `value()` or + // `operator->` are used. Instead, we always use `operator*`. + // See https://github.com/pytorch/pytorch/issues/18496 for more info. + // If this is too burdensome to maintain, we can just + // manually implement this with an additional bool. + + // INVARIANT: When storage is non-null, this Device must + // agree with the type meta in storage. + // + // INVARIANT: device_opt_ is only nullopt for undefined tensors + // (which do not have a device.) + c10::optional device_opt_; + + // The set of TensorTypeIds which describe this tensor + // + // INVARIANT: type_set_.has(TensorTypeId::VariableTensorId) (every tensor + // is a variable). Historically this was not the case (there was a + // distinction between plain tensors and variables), but because + // we merged Variable and Tensor, this invariant now always holds. + // This invariant is currently enforced in the constructor of TensorImpl. + // + // You might be wondering why we don't just not include VariableTensorId + // from the type set, if it is always set. The answer is, we still need + // to dispatch differently from variables, and then mask out the variable + // id once we are done handling autograd. If the boolean here was + // inverted, we wouldn't be able to get autograd codepath (since there's + // be no TensorTypeId to dispatch to!) We cannot set VariableTensorId + // as the default value contained in the *included* tensor type id set + // as TLS requires our state to be zero-initialized (i.e., it is not + // included). + TensorTypeSet type_set_; + + // You get to have eight byte-size fields here, before you + // should pack this into a bitfield. + bool is_contiguous_ = true; + + // Tensor is stored in the channels last memory format, when dimensions + // order is NCHW and C-strides < W-strides < H-strides < N-strides + // (If size of any dimension is equal to 1, this dimension strides value + // is not taken into account). + bool is_channels_last_ = false; + + // Channels last contiguous tensor is channel last tensor which occupies + // contiguous memory block. + bool is_channels_last_contiguous_ = false; + + // Dense tensor is the tensor that store values in a contiguous block of memory. + // Non-overlapping tensor is the tensor in which elements occupy individual + // non-repetitive memory. + bool is_non_overlapping_and_dense_ = false; + + bool is_wrapped_number_ = false; + + // NOTE [ Metadata Change for a Detached Tensor ] + // + // Normally, a user is allowed to change the tensor metadata + // (e.g. sizes / strides / storage / storage_offset) of a tensor. + // However, if the tensor is created by `t1_detached = t1.data` in Python + // or `t1_detached = t1.detach()` in Python/C++, those changes to the + // tensor metadata of `t1_detached` will not be propagated back to the + // original tensor `t1`. In order to make such changes explicitly illegal, + // we created the `allow_tensor_metadata_change_` flag, to prevent users + // from changing metadata of the detached tensor and expecting the original + // tensor to also be updated. + // + // NOTE: For a full list of tensor metadata fields, please see + // `copy_tensor_metadata()` in TensorImpl and its subclasses to find + // which fields are copied by value. + bool allow_tensor_metadata_change_ = true; + + // we decide to keep reserved_ and it will + // live in Tensor after the split + // The logic is that if Extend() or ReserveSpace() were ever called, + // then subsequent Resize()s will not free up Storage. + bool reserved_ = false; + +}; + +// Note [TensorImpl size constraints] +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Changed the size of TensorImpl? If the size went down, good for +// you! Adjust the documentation below and the expected size. +// Did it go up? Read on... +// +// Struct size matters. In some production systems at Facebook, we have +// 400M live tensors during a training run. Do the math: every 64-bit +// word you add to Tensor is an extra 3.2 gigabytes in RAM. +// +// If you are a Facebook employee, you can check if the run in question +// has tipped you over the point using the command here: +// https://fburl.com/q5enpv98 +// +// For reference, we OOMed at 160 bytes (20 words) per TensorImpl. +// This is not counting overhead from strides out-of-line allocation and +// StorageImpl space and this is from before we inlined sizes and strides +// directly into TensorImpl as SmallVectors. +// +// Our memory usage on 32-bit systems is suboptimal, but we're not checking +// for it at the moment (to help avoid rage inducing cycles when the +// 32-bit number is wrong). +// +// Current breakdown: +// +// vtable pointer +// strong refcount TODO: pack these into one word +// weak refcount +// storage pointer +// autograd metadata pointer +// version counter pointer +// PyObject pointer +// sizes SmallVector (begin) +// sizes SmallVector (end) +// sizes SmallVector (capacity) +// sizes SmallVector (pre-allocated 0) +// sizes SmallVector (pre-allocated 1) +// sizes SmallVector (pre-allocated 2) +// sizes SmallVector (pre-allocated 3) +// sizes SmallVector (pre-allocated 4) +// strides SmallVector (begin) +// strides SmallVector (end) +// strides SmallVector (capacity) +// strides SmallVector (pre-allocated 0) +// strides SmallVector (pre-allocated 1) +// strides SmallVector (pre-allocated 2) +// strides SmallVector (pre-allocated 3) +// strides SmallVector (pre-allocated 4) +// storage offset +// numel +// data type pointer +// (optional) device +// tensor type id +// miscellaneous bitfield +// +static_assert(sizeof(void*) != sizeof(int64_t) || // if 64-bit... + sizeof(TensorImpl) == sizeof(int64_t) * 30, + "You changed the size of TensorImpl on 64-bit arch." + "See Note [TensorImpl size constraints] on how to proceed."); + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/TensorOptions.h b/thirdparty/libtorch/include/c10/core/TensorOptions.h new file mode 100644 index 0000000000..bdae437877 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/TensorOptions.h @@ -0,0 +1,590 @@ +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include +#include + +namespace c10 { +/// A class to encapsulate construction axes of an Tensor. TensorOptions was +/// designed to support the Python style API for specifying construction options +/// on factory functions, e.g., +/// +/// torch.zeros(2, 3, dtype=torch.int32) +/// +/// Because C++ doesn't natively support keyword arguments, there must be +/// another way of specifying keyword-like arguments. TensorOptions is a +/// builder class which can be used to construct this "dictionary" of keyword +/// arguments: functions which support TensorOptions conventionally take this +/// argument optionally as their last argument. +/// +/// WARNING: In PyTorch, there are `torch::` variants of factory functions, +/// e.g., torch::zeros for at::zeros. These return Variables (while the +/// stock ATen functions return plain Tensors). If you mix these functions +/// up, you WILL BE SAD. +/// +/// Rather than use the constructor of this class directly, you should prefer to +/// use the constructor functions, and then chain setter methods on top of them. +/// +/// at::device(at::kCUDA).dtype(kInt) +/// at::dtype(at::kInt) +/// +/// Additionally, anywhere a TensorOptions is expected, you can directly +/// pass at::kCUDA / at::kInt, and it will implicitly convert to a TensorOptions. +/// +/// Here are some recommended ways to create a 2x2 tensor of zeros +/// with certain properties. These all *implicitly* make use of +/// TensorOptions, even if they don't mention the class explicitly: +/// +/// at::zeros({2,2}, at::kCUDA); +/// at::zeros({2,2}, at::kLong); +/// at::zeros({2,2}, at::device(at::kCUDA).dtype(at::kLong())); +/// at::zeros({2,2}, at::device({at::kCUDA, 1})); // place on device 1 +/// at::zeros({2,2}, at::requires_grad()); +/// + +/// NOTE [ TensorOptions Constructors ] +/// +/// TensorOptions is like a dictionary with entries from the set: +/// {requires_grad, device, dtype, layout}, where each entry may be +/// unspecified (i.e., is optional). It is used to specify the properties of +/// tensors in many places both in C++ internal and API, e.g., tensor factory +/// methods like `at::empty({10}, options)`, tensor conversions like +/// `tensor.to(...)`, etc. +/// +/// To provide a simple API that is consistent with Python, where one can do +/// `torch.empty(sizes, X)` with `X` being a `torch.device`, `torch.dtype`, or a +/// `torch.layout`, we want TensorOptions to be implicitly convertible from +/// `ScalarType dtype`, `Layout layout` and `Device device`. Therefore, we have +/// three implicit constructors from each of these three types. +/// +/// This is sufficient for `ScalarType` and `Layout` as they are simple Enum +/// classes. However, `Device` is an ordinary class with implicit constructors +/// `Device(DeviceType, DeviceIndex = -1)` and `Device(std::string)` to be +/// consistent with Python API, where strings are treated as equivalent with a +/// `torch.device` object (e.g., "cuda:1" can be passed to everywhere a +/// `torch.device("cuda:1")` is accepted). To support the syntax +/// `at::empty({10}, {kCUDA, 1})` and `tensor.to(kCUDA)`, we need to make sure +/// that `TensorOptions` is implicitly constructible with any argments that a +/// `Device` can constructed from. So we have, +/// +/// /* implicit */ TensorOptions(T&& device) : TensorOptions() { +/// this->set_device(device); +/// } +/// +/// template ::value>> +/// /* implicit */ TensorOptions(Args&&... args) +/// : TensorOptions(Device(std::forward(args)...)) {} +/// +/// +/// But this will be problematic. Consider this: `TensorOptions({kCUDA, 1})`. +/// Compiler will compain about ambiguity between the copy constructor and the +/// `Device` constructor because `{kCUDA, 1}` can be converted to both a +/// `TensorOption` and a `Device`. +/// +/// To get around this, we templatize the `Device` constructor. Since overload +/// resolution is done before template resolution, our problem is solved. + + +struct C10_API TensorOptions { + TensorOptions() + : requires_grad_(false) + , pinned_memory_(false) + , has_device_(false) + , has_dtype_(false) + , has_layout_(false) + , has_requires_grad_(false) + , has_pinned_memory_(false) + {} + + /// Constructs a `TensorOptions` object with the given layout. + /* implicit */ TensorOptions(Layout layout) : TensorOptions() { + this->set_layout(layout); + } + + /// Constructs a `TensorOptions` object with the given device. + /// See NOTE [ TensorOptions Constructors ] on why this is templatized. + template, Device>::value>> + /* implicit */ TensorOptions(T&& device) : TensorOptions() { + this->set_device(std::forward(device)); + } + + /// Constructs a `TensorOptions` object from arguments allowed in `Device` + /// constructors. + /// + /// See NOTE [ TensorOptions Constructors ]. + /// + /// NB: Ideally we only allow implicit constructors here. But there is no easy + /// way to detect them. So we have this one that allows explicit + /// constructors too. + template ::value>> + /* implicit */ TensorOptions(Args&&... args) + : TensorOptions(Device(std::forward(args)...)) {} + + /// Constructs a `TensorOptions` object with the given dtype. + /* implicit */ TensorOptions(caffe2::TypeMeta dtype) : TensorOptions() { + this->set_dtype(dtype); + } + + /// legacy constructor to support ScalarType + /* implicit */ TensorOptions(ScalarType dtype) : TensorOptions() { + this->set_dtype(dtype); + } + + /// Return a copy of `TensorOptions` with `device` set to the given one, or + /// cleared if `device` is `nullopt`. + C10_NODISCARD TensorOptions device(c10::optional device) const noexcept { + TensorOptions r = *this; + r.set_device(device); + return r; + } + + /// Return a copy of `TensorOptions` with `device` set to the given one. + /// (This overload ensures that variadic template c10::optional constructor + /// for Device work correctly.) + template + C10_NODISCARD TensorOptions device(Args&&... args) const noexcept { + return device(c10::optional(c10::in_place, std::forward(args)...)); + } + + /// Return a copy of `TensorOptions`, but with device set to CUDA, and the + /// device index set to the given one. + /// + /// TODO: This function encourages bad behavior (assuming CUDA is + /// the only device that matters). Get rid of it / rename it. + C10_NODISCARD TensorOptions device_index(int16_t device_index) const noexcept { + return device(Device::Type::CUDA, device_index); + } + + /// Return a copy of `TensorOptions` with `dtype` set to the given one. + C10_NODISCARD TensorOptions dtype(c10::optional dtype) const noexcept { + TensorOptions r = *this; + r.set_dtype(dtype); + return r; + } + + // legacy function to support ScalarType + C10_NODISCARD TensorOptions dtype(c10::optional dtype) const noexcept { + TensorOptions r = *this; + r.set_dtype(dtype); + return r; + } + + // Since dtype is taken... + template + TensorOptions& dtype() { + dtype_ = caffe2::TypeMeta::Make(); + has_dtype_ = true; + return *this; + } + + /// Sets the layout of the `TensorOptions`. + C10_NODISCARD TensorOptions layout(c10::optional layout) const noexcept { + TensorOptions r = *this; + r.set_layout(layout); + return r; + } + + /// Sets the `requires_grad` property of the `TensorOptions`. + C10_NODISCARD TensorOptions requires_grad(c10::optional requires_grad) const noexcept { + TensorOptions r = *this; + r.set_requires_grad(requires_grad); + return r; + } + + /// Sets the `pinned_memory` property on the `TensorOptions`. + C10_NODISCARD TensorOptions pinned_memory(c10::optional pinned_memory) const noexcept { + TensorOptions r = *this; + r.set_pinned_memory(pinned_memory); + return r; + } + + /// Returns the device of the `TensorOptions`. + Device device() const noexcept { + return has_device_ ? device_ : Device(kCPU); + } + + /// Returns whether the device is specified. + bool has_device() const noexcept { + return has_device_; + } + + /// Returns the device of the `TensorOptions`, or `c10::nullopt` if + /// device is not specified. + c10::optional device_opt() const noexcept { + return has_device_ ? c10::make_optional(device_) : c10::nullopt; + } + + /// Returns the device index of the `TensorOptions`. + int32_t device_index() const noexcept { + return device().index(); + } + + /// Returns the dtype of the `TensorOptions`. + caffe2::TypeMeta dtype() const noexcept { + return has_dtype_ ? dtype_ : get_default_dtype(); + } + + /// Returns whether the dtype is specified. + bool has_dtype() const noexcept { + return has_dtype_; + } + + /// Returns the dtype of the `TensorOptions`, or `c10::nullopt` if + /// device is not specified. + c10::optional dtype_opt() const noexcept { + return has_dtype_ ? c10::make_optional(dtype_) : c10::nullopt; + } + + /// Returns the layout of the `TensorOptions`. + Layout layout() const noexcept { + return has_layout_ ? layout_ : kStrided; + } + + /// Returns whether the layout is specified. + bool has_layout() const noexcept { + return has_layout_; + } + + /// Returns the layout of the `TensorOptions`, or `c10::nullopt` if + /// layout is not specified. + c10::optional layout_opt() const noexcept { + return has_layout_ ? c10::make_optional(layout_) : c10::nullopt; + } + + /// Returns the `requires_grad` property of the `TensorOptions`. + bool requires_grad() const noexcept { + return has_requires_grad_ ? requires_grad_ : false; + } + + /// Returns whether the `requires_grad` is specified. + bool has_requires_grad() const noexcept { + return has_requires_grad_; + } + + /// Returns the `requires_grad` property of the `TensorOptions`, or + /// `c10::nullopt` if `requires_grad` is not specified. + c10::optional requires_grad_opt() const noexcept { + return has_requires_grad_ ? c10::make_optional(requires_grad_) + : c10::nullopt; + } + + /// Returns the `pinned_memory` property of the `TensorOptions`. + bool pinned_memory() const noexcept { + return has_pinned_memory_ ? pinned_memory_ : false; + } + + /// Returns whether the `pinned_memory` is specified. + bool has_pinned_memory() const noexcept { + return has_pinned_memory_; + } + + + /// Returns the `pinned_memory` property of the `TensorOptions`, or + /// `c10::nullopt` if `pinned_memory` is not specified. + c10::optional pinned_memory_opt() const noexcept { + return has_pinned_memory_ ? c10::make_optional(pinned_memory_) : c10::nullopt; + } + + // Resolves the ATen backend specified by the current construction axes. + // TODO: Deprecate this + Backend backend() const noexcept { + return at::tensorTypeIdToBackend(computeTensorTypeId()); + } + + /// Return the right-biased merge of two TensorOptions. This has the + /// effect of overwriting settings from self with specified options + /// of options. + /// + /// NB: This merging operation does NOT respect device merges. + /// For example, if you device({kCUDA, 1}).merge_in(kCUDA) + /// you will get kCUDA in the end! Functions like Tensor.new_empty + /// ensure the right device is selected anyway by way of a + /// device guard. + /// + TensorOptions merge_in(TensorOptions options) const noexcept { + TensorOptions r = options; + if (!r.has_device()) r.set_device(device()); + if (!r.has_dtype()) r.set_dtype(dtype()); + if (!r.has_layout()) r.set_layout(layout()); + // NB: requires grad is right biased; not a logical AND/OR! + if (!r.has_requires_grad()) r.set_requires_grad(requires_grad()); + if (!r.has_pinned_memory()) r.set_pinned_memory(pinned_memory()); + return r; + } + + // Resolves the tensor type set specified by the current construction axes. + TensorTypeSet type_set() const noexcept { + return TensorTypeSet(computeTensorTypeId()).add(TensorTypeId::VariableTensorId); + } + + inline TensorTypeId computeTensorTypeId() const { + switch (layout()) { + case Layout::Strided: + switch (device().type()) { + case DeviceType::CPU: { + auto dtype_tmp = typeMetaToScalarType(dtype()); + if (isComplexType(dtype_tmp)) { + return TensorTypeId::ComplexCPUTensorId; + } + if (isQIntType(dtype_tmp)) { + return TensorTypeId::QuantizedCPUTensorId; + } + return TensorTypeId::CPUTensorId; + } + case DeviceType::CUDA: + if (isComplexType(typeMetaToScalarType(dtype()))) { + return TensorTypeId::ComplexCUDATensorId; + } + return TensorTypeId::CUDATensorId; + case DeviceType::MKLDNN: + return TensorTypeId::MKLDNNTensorId; + case DeviceType::OPENGL: + return TensorTypeId::OpenGLTensorId; + case DeviceType::OPENCL: + return TensorTypeId::OpenCLTensorId; + case DeviceType::IDEEP: + return TensorTypeId::IDEEPTensorId; + case DeviceType::HIP: + return TensorTypeId::HIPTensorId; + case DeviceType::MSNPU: + return TensorTypeId::MSNPUTensorId; + case DeviceType::XLA: + return TensorTypeId::XLATensorId; + default: + AT_ERROR("Unsupported device type for dense layout: ", device().type()); + } + case Layout::Sparse: + switch (device().type()) { + case DeviceType::CPU: + return TensorTypeId::SparseCPUTensorId; + case DeviceType::CUDA: + return TensorTypeId::SparseCUDATensorId; + case DeviceType::HIP: + return TensorTypeId::SparseHIPTensorId; + default: + AT_ERROR("Unsupported device type for sparse layout: ", device().type()); + } + case Layout::Mkldnn: + switch (device().type()) { + case DeviceType::CPU: + return TensorTypeId::MkldnnCPUTensorId; + default: + AT_ERROR("Unsupported device type for mkldnn layout: ", device().type()); + } + default: + AT_ERROR("Unsupported layout: ", layout()); + } + } + + private: + + // These methods are currently private because I'm not sure if it's wise + // to actually publish them. They are methods because I need them in + // the constructor and the functional API implementation. + // + // If you really, really need it, you can make these public, but check if you + // couldn't just do what you need with the functional API. Similarly, these + // methods are not chainable, because if you wanted chaining, you probably + // want to use the functional API instead. (It's probably OK to make + // these chainable, because these functions are all explicitly annotated + // with a ref-qualifier, the trailing &, that makes them illegal to call + // on temporaries.) + + /// Mutably set the device of `TensorOptions`. + void set_device(c10::optional device) & noexcept { + if (device) { + device_ = *device; + has_device_ = true; + } else { + has_device_ = false; + } + } + + /// Mutably set the dtype of `TensorOptions`. + void set_dtype(c10::optional dtype) & noexcept { + if (dtype) { + dtype_ = *dtype; + has_dtype_ = true; + } else { + has_dtype_ = false; + } + } + + // legacy function to support ScalarType + void set_dtype(c10::optional dtype) & noexcept { + if (dtype) { + dtype_ = scalarTypeToTypeMeta(*dtype); + has_dtype_ = true; + } else { + has_dtype_ = false; + } + } + + /// Mutably set the layout of `TensorOptions`. + void set_layout(c10::optional layout) & noexcept { + if (layout) { + layout_ = *layout; + has_layout_ = true; + } else { + has_layout_ = false; + } + } + + /// Mutably set the `requires_grad` property of `TensorOptions`. + void set_requires_grad(c10::optional requires_grad) & noexcept { + if (requires_grad) { + requires_grad_ = *requires_grad; + has_requires_grad_ = true; + } else { + has_requires_grad_ = false; + } + } + + /// Mutably set the `pinned_memory` property of `TensorOptions`. + void set_pinned_memory(c10::optional pinned_memory) & noexcept { + if (pinned_memory) { + pinned_memory_ = *pinned_memory; + has_pinned_memory_ = true; + } else { + has_pinned_memory_ = false; + } + } + + // WARNING: If you edit TensorOptions to add more options, you + // must adjust the implementation of Tensor::options + + // NB: We didn't use c10::optional here, because then we can't pack + // the has_***_ boolean fields. + + caffe2::TypeMeta dtype_ = caffe2::TypeMeta::Make(); // 64-bit + Device device_ = at::kCPU; // 32-bit + Layout layout_ = at::kStrided; // 8-bit + + // Bitmask required here to get this to fit inside 32 bits (or even 64 bits, + // for that matter) + + bool requires_grad_ : 1; + bool pinned_memory_ : 1; + + + bool has_device_ : 1; + bool has_dtype_ : 1; + bool has_layout_ : 1; + bool has_requires_grad_ : 1; + bool has_pinned_memory_ : 1; +}; + +// We should aspire to fit in one machine-size word; but a size greater than two +// words is too much. (We are doing terribly on 32-bit archs, where we require +// three machine size words to store tensor options. Eek!) +static_assert( sizeof(TensorOptions) <= sizeof(int64_t) * 2, + "TensorOptions must fit in 128-bits" ); + +/// Convenience function that returns a `TensorOptions` object with the `dtype` +/// set to the given one. +inline TensorOptions dtype(caffe2::TypeMeta dtype) { + return TensorOptions().dtype(dtype); +} + +// legacy function to support ScalarType +inline TensorOptions dtype(ScalarType dtype) { + return TensorOptions().dtype(scalarTypeToTypeMeta(dtype)); +} + +/// Convenience function that returns a `TensorOptions` object with the `layout` +/// set to the given one. +inline TensorOptions layout(Layout layout) { + return TensorOptions().layout(layout); +} + +/// Convenience function that returns a `TensorOptions` object with the `device` +/// set to the given one. +inline TensorOptions device(Device device) { + return TensorOptions().device(std::move(device)); +} + +/// Convenience function that returns a `TensorOptions` object with the +/// `device` set to CUDA and the `device_index` set to the given one. +inline TensorOptions device_index(int16_t device_index) { + return TensorOptions().device_index(device_index); +} + +/// Convenience function that returns a `TensorOptions` object with the +/// `requires_grad` set to the given one. +inline TensorOptions requires_grad(bool requires_grad = true) { + return TensorOptions().requires_grad(requires_grad); +} + +C10_API std::ostream& operator<<( + std::ostream& stream, + const TensorOptions& options); + +template +inline TensorOptions dtype() { + return dtype(caffe2::TypeMeta::Make()); +} + +// This is intended to be a centralized location by which we can determine +// what an appropriate TensorTypeId for a tensor is. +// +// This takes a TensorOptions, rather than just a DeviceType and Layout, because +// we reserve the right to change dispatch based on *any* aspect of +// TensorOptions. WARNING: If you do this, you need to fix the calls +// to computeTensorTypeId in caffe2/tensor.h +inline TensorTypeId computeTensorTypeId(TensorOptions options) { + return options.computeTensorTypeId(); +} + +inline DeviceType computeDeviceType(TensorTypeId tid) { + if (tid == TensorTypeId::CPUTensorId) { + return DeviceType::CPU; + } else if (tid == TensorTypeId::CUDATensorId) { + return DeviceType::CUDA; + } else if (tid == TensorTypeId::HIPTensorId) { + return DeviceType::HIP; + } else if (tid == TensorTypeId::MKLDNNTensorId) { + return DeviceType::MKLDNN; + } else if (tid == TensorTypeId::OpenGLTensorId) { + return DeviceType::IDEEP; + } else if (tid == TensorTypeId::OpenCLTensorId) { + return DeviceType::OPENCL; + } else if (tid == TensorTypeId::IDEEPTensorId) { + return DeviceType::IDEEP; + } else if (tid == TensorTypeId::HIPTensorId) { + return DeviceType::HIP; + } else if (tid == TensorTypeId::MSNPUTensorId) { + return DeviceType::MSNPU; + } else if (tid == TensorTypeId::XLATensorId) { + return DeviceType::XLA; + } else if (tid == TensorTypeId::SparseCPUTensorId) { + return DeviceType::CPU; + } else if (tid == TensorTypeId::SparseCUDATensorId) { + return DeviceType::CUDA; + } else if (tid == TensorTypeId::SparseHIPTensorId) { + return DeviceType::HIP; + } else if (tid == TensorTypeId::MkldnnCPUTensorId) { + return DeviceType::CPU; + } else if (tid == TensorTypeId::ComplexCPUTensorId) { + return DeviceType::CPU; + } else if (tid == TensorTypeId::ComplexCUDATensorId) { + return DeviceType::CUDA; + } else { + AT_ASSERTM(false, "Unknown TensorTypeId: ", tid); + } +} + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/TensorTypeId.h b/thirdparty/libtorch/include/c10/core/TensorTypeId.h new file mode 100644 index 0000000000..e1bcbc2d26 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/TensorTypeId.h @@ -0,0 +1,93 @@ +#pragma once + +#include +#include +#include "c10/macros/Macros.h" + +namespace c10 { + +// A "bit" in a TensorTypeSet, which may have a unique dispatch handler +// for it. Higher bit indexes get handled by dispatching first (because +// we "count leading zeros") +enum class TensorTypeId : uint8_t { + // This is not a "real" tensor id, but it exists to give us a "nullopt" + // element we can return for cases when a TensorTypeSet contains no elements. + // You can think a more semantically accurate definition of TensorTypeId is: + // + // using TensorTypeId = optional + // + // and UndefinedTensorId == nullopt. We didn't actually represent + // it this way because optional would take two + // words, when TensorTypeId fits in eight bits. + UndefinedTensorId = 0, + + // This pool of IDs is not really ordered, but it is merged into + // the hierarchy for convenience and performance + CPUTensorId, // PyTorch/Caffe2 supported + CUDATensorId, // PyTorch/Caffe2 supported + MKLDNNTensorId, // Caffe2 only + OpenGLTensorId, // Caffe2 only + OpenCLTensorId, // Caffe2 only + IDEEPTensorId, // Caffe2 only + HIPTensorId, // PyTorch/Caffe2 supported + SparseHIPTensorId, // PyTorch only + MSNPUTensorId, // PyTorch only + XLATensorId, // PyTorch only + MkldnnCPUTensorId, + QuantizedCPUTensorId, // PyTorch only + ComplexCPUTensorId, // PyTorch only + ComplexCUDATensorId, // PyTorch only + + // Sparse has multi-dispatch with dense; handle it first + SparseCPUTensorId, // PyTorch only + SparseCUDATensorId, // PyTorch only + + // WARNING! If you add more "wrapper" style tensor ids (tensor + // ids which don't get kernels directly defined in native_functions.yaml; + // examples are tracing or profiling) here, you need to also adjust + // legacyExtractTypeId in c10/core/TensorTypeId.h to mask them out. + + VariableTensorId, + + // TESTING: This is intended to be a generic testing tensor type id. + // Don't use it for anything real; its only acceptible use is within a single + // process test. Use it by creating a TensorImpl with this TensorTypeId, and + // then registering operators to operate on this type id. + TESTING_ONLY_GenericWrapperTensorId, + + // TESTING: This is intended to be a generic testing tensor type id. + // Don't use it for anything real; its only acceptible use is within a ingle + // process test. Use it by toggling the mode on and off via + // TESTING_ONLY_tls_generic_mode_set_enabled and then registering operators + // to operate on this type id. + TESTING_ONLY_GenericModeTensorId, + + NumTensorIds, // Sentinel +}; + +static_assert( + static_cast(TensorTypeId::NumTensorIds) < 64, + "TensorTypeId is used as index into 64-bit bitmask; you must have less than 64 entries"); + +C10_API const char* toString(TensorTypeId); +C10_API std::ostream& operator<<(std::ostream&, TensorTypeId); + +// For backwards compatibility with XLA repository +// (I don't want to fix this in XLA right now because there might be +// more renaming coming in the future.) +static inline TensorTypeId XLATensorId() { + return TensorTypeId::XLATensorId; +} + +} // namespace c10 + +// NB: You really shouldn't use this instance; this enum is guaranteed +// to be pretty small so a regular array should be acceptable. +namespace std { +template <> +struct hash { + size_t operator()(c10::TensorTypeId x) const { + return static_cast(x); + } +}; +} diff --git a/thirdparty/libtorch/include/c10/core/TensorTypeSet.h b/thirdparty/libtorch/include/c10/core/TensorTypeSet.h new file mode 100644 index 0000000000..f5ce1af70b --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/TensorTypeSet.h @@ -0,0 +1,128 @@ +#pragma once + +#include +#include +#include +#include + +namespace c10 { + +// A representation of a set of TensorTypeIds. A tensor may have multiple +// tensor type ids, e.g., a Variable tensor can also be a CPU tensor; the +// TensorTypeSet specifies what type ids apply. The internal representation is +// as a 64-bit bit set (this means only 64 tensor type ids are supported). +// +// Note that TensorTypeIds are ordered; thus, we can ask questions like "what is +// the highest priority TensorTypeId in the set"? (The set itself is not +// ordered; two sets with the same ids will always have the ids ordered in the +// same way.) +// +// At the moment, there are no nontrivial uses of this set; tensors are always +// singletons. In the near future, this set will represent variable? + tensor +// type id. In the far future, it will be requires grad? + profiling? + +// tracing? + lazy? + tensor type id. +// +// (The difference between variable and requires grad, is that +// there are currently three states a tensor can be: +// 1. Not a variable +// 2. Variable with requires_grad=False +// 3. Variable with requires_grad=True +// Eventually, we want to kill state (1), and only dispatch to autograd +// handling code if one of the inputs requires grad.) +// +// An undefined tensor is one with an empty tensor type set. +class TensorTypeSet final { +public: + enum Full { FULL }; + enum Raw { RAW }; + + // NB: default constructor representation as zero is MANDATORY as + // use of TensorTypeSet in TLS requires this. + TensorTypeSet() + : repr_(0) {} + TensorTypeSet(Full) + : repr_(-1) {} + // Public version of TensorTypeSet(uint64_t) API; external users + // must be explicit when they do this! + TensorTypeSet(Raw, uint64_t x) + : repr_(x) {} + explicit TensorTypeSet(TensorTypeId t) + : repr_(t == TensorTypeId::UndefinedTensorId + ? 0 + : 1ULL << (static_cast(t) - 1)) {} + // Test if a TensorTypeId is in the set + bool has(TensorTypeId t) const { + TORCH_INTERNAL_ASSERT(t != TensorTypeId::UndefinedTensorId); + return static_cast(repr_ & TensorTypeSet(t).repr_); + } + // Perform set union + TensorTypeSet operator|(TensorTypeSet other) const { + return TensorTypeSet(repr_ | other.repr_); + } + // Perform set intersection + TensorTypeSet operator&(TensorTypeSet other) const { + return TensorTypeSet(repr_ & other.repr_); + } + // Compute the set difference self - other + TensorTypeSet operator-(TensorTypeSet other) const { + return TensorTypeSet(repr_ & ~other.repr_); + } + // Perform set equality + bool operator==(TensorTypeSet other) const { + return repr_ == other.repr_; + } + // Add a TensorTypeId to the TensorTypeId set. Does NOT mutate, + // returns the extended TensorTypeSet! + C10_NODISCARD TensorTypeSet add(TensorTypeId t) const { + return *this | TensorTypeSet(t); + } + // Remove a TensorTypeId from the TensorTypeId set. This is + // generally not an operation you should be doing (it's + // used to implement operator<<) + C10_NODISCARD TensorTypeSet remove(TensorTypeId t) const { + return TensorTypeSet(repr_ & ~TensorTypeSet(t).repr_); + } + // Is the set empty? (AKA undefined tensor) + bool empty() const { + return repr_ == 0; + } + uint64_t raw_repr() { return repr_; } + // Return the type id in this set with the highest priority (i.e., + // is the largest in the TensorTypeId enum). Intuitively, this + // type id is the one that should handle dispatch (assuming there + // aren't any further exclusions or inclusions). + TensorTypeId highestPriorityTypeId() const { + // TODO: If I put UndefinedTensorId as entry 64 and then adjust the + // singleton constructor to shift from the right, we can get rid of the + // subtraction here. It's modestly more complicated to get right so I + // didn't do it for now. + return static_cast(64 - llvm::countLeadingZeros(repr_)); + } +private: + TensorTypeSet(uint64_t repr) : repr_(repr) {} + uint64_t repr_ = 0; +}; + +C10_API std::string toString(TensorTypeSet); +C10_API std::ostream& operator<<(std::ostream&, TensorTypeSet); + +// Historically, every tensor only had a single TensorTypeId, and it was +// always something like CPUTensorId and not something weird like VariableId. +// For the forseeable future, it will still be possible to extract /that/ +// TensorTypeId, and that's what this function does. It should be used +// for legacy code that is still using TensorTypeId for things like instanceof +// checks; if at all possible, refactor the code to stop using TensorTypeId +// in those cases. +// +// What's the difference between 'legacyExtractTypeId(s) == id' +// and 's.has(id)'? legacyExtractTypeId will NEVER return VariableTensorId; +// but s.has(VariableTensorId) will evaluate to true if s has VariableTensorId. +// For non-VariableTensorId equality tests, they are indistinguishable. +// +// NB: If you add other non-VariableTensorId other keys to this set, you'll +// have to adjust this some more (sorry.) +static inline TensorTypeId legacyExtractTypeId(TensorTypeSet s) { + return s.remove(TensorTypeId::VariableTensorId).highestPriorityTypeId(); +} + +} diff --git a/thirdparty/libtorch/include/c10/core/UndefinedTensorImpl.h b/thirdparty/libtorch/include/c10/core/UndefinedTensorImpl.h new file mode 100644 index 0000000000..9f1cb93c10 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/UndefinedTensorImpl.h @@ -0,0 +1,35 @@ +#pragma once + +#include + +namespace c10 { + +struct C10_API UndefinedTensorImpl final : public TensorImpl { + public: + // Without this, we get: + // error: identifier "at::UndefinedTensorImpl::_singleton" is undefined in device code + // (ostensibly because the constexpr tricks MSVC into trying to compile this + // function for device as well). +#ifdef _WIN32 + static inline TensorImpl * singleton() { +#else + static constexpr inline TensorImpl * singleton() { +#endif + return &_singleton; + } + IntArrayRef sizes() const override; + IntArrayRef strides() const override; + int64_t size(int64_t d) const override; + int64_t stride(int64_t d) const override; + int64_t dim() const override; + bool has_storage() const override; + const Storage& storage() const override; + int64_t storage_offset() const override; +private: + UndefinedTensorImpl(); + static UndefinedTensorImpl _singleton; +public: + friend struct UndefinedType; +}; + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/core/WrapDimMinimal.h b/thirdparty/libtorch/include/c10/core/WrapDimMinimal.h new file mode 100644 index 0000000000..40090239e8 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/WrapDimMinimal.h @@ -0,0 +1,26 @@ +#pragma once + +#include + +namespace c10 { + +static inline int64_t maybe_wrap_dim(int64_t dim, int64_t dim_post_expr, bool wrap_scalar=true) { + if (dim_post_expr <= 0) { + if (!wrap_scalar) { + AT_INDEX_ERROR("dimension specified as ", dim, " but tensor has no dimensions"); + } + dim_post_expr = 1; // this will make range [-1, 0] + } + + int64_t min = -dim_post_expr; + int64_t max = dim_post_expr - 1; + if (dim < min || dim > max) { + AT_INDEX_ERROR( + "Dimension out of range (expected to be in range of [", + min, ", ", max, "], but got ", dim, ")"); + } + if (dim < 0) dim += dim_post_expr; + return dim; +} + +} diff --git a/thirdparty/libtorch/include/c10/core/impl/DeviceGuardImplInterface.h b/thirdparty/libtorch/include/c10/core/impl/DeviceGuardImplInterface.h new file mode 100644 index 0000000000..516aebba07 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/impl/DeviceGuardImplInterface.h @@ -0,0 +1,224 @@ +#pragma once + +#include +#include +#include +#include + +// Just for C10_ANONYMOUS_VARIABLE +#include + +#include + +namespace c10 { + +/** + * Flags defining the behavior of events. + * + * PYTORCH_DEFAULT and BACKEND_DEFAULT are valid for all backends. The + * BACKEND_DEFAULT is what a particular backend would select if no + * flags were given. PYTORCH_DEFAULT is the PyTorch's framework default + * choice for events on that backend, which may not be the same. For example, + * when PyTorch creates a CUDA event it sets the flag + * CUDA_EVENT_DISABLING_TIMING by default to improve performance. + * + * The mapping of PYTORCH_DEFAULT and BACKEND_DEFAULT is done by each + * backend implementation. Backend-specific flags, like CUDA_EVENT_DEFAULT, + * should map one-to-one with actual event flags for those backends. + */ +enum class EventFlag { + PYTORCH_DEFAULT, + BACKEND_DEFAULT, + // CUDA flags + CUDA_EVENT_DEFAULT, + CUDA_EVENT_DISABLE_TIMING, // PyTorch-default for CUDA + // HIP flags + HIP_EVENT_DEFAULT, + HIP_EVENT_DISABLE_TIMING, // PyTorch-default for HIP + // FOR TESTING ONLY + INVALID +}; + +namespace impl { + +/** + * DeviceGuardImplInterface represents the virtual interface which provides + * functionality to provide an RAII class for device and stream switching, + * via DeviceGuard. Every distinct device type, e.g., CUDA and HIP, is + * expected to implement and register an implementation of this interface. + * All classes which inherit from DeviceGuardImplInterface should be declared + * 'final'. + * + * This class exists because we provide a unified interface for performing + * device guards via DeviceGuard, but we cannot assume that we have actually + * compiled against the, e.g., CUDA library, which actually implements + * this guard functionality. In this case, a dynamic dispatch is required + * to cross the library boundary. + * + * If possible, you should directly use implementations of this interface; + * those uses will be devirtualized. + */ +struct C10_API DeviceGuardImplInterface { + /** + * Return the type of device managed by this guard implementation. + */ + virtual DeviceType type() const = 0; + + /** + * Set the current device to Device, and return the previous Device. + */ + virtual Device exchangeDevice(Device) const = 0; + // NB: Implementations of exchangeDevice can be a bit boilerplatey. You might + // consider replacing exchangeDevice with a non-virtual function with a baked + // in implementation; however, note that this will triple the number of + // virtual calls (when you implement exchangeDevice in a final subclass, + // the compiler gets to devirtualize everything; it won't do that if you don't + // define it in the subclass!) A common way to solve this problem is to use + // some sort of CRTP; however, we can template DeviceGuardImplInterface since + // we really *do* need it to be virtual. A little boilerplate seems easiest + // to explain. (Another way around this problem is to provide inline + // functions that provide the default implementations, but this seems a little + // hard to explain. In any case, we're only going to have on order of ten + // implementations of this anyway.) + + /** + * Get the current device. + */ + virtual Device getDevice() const = 0; + + /** + * Set the current device to Device. + */ + virtual void setDevice(Device) const = 0; + + /** + * Set the current device to Device, without checking for errors + * (so, e.g., this can be called from a destructor). + */ + virtual void uncheckedSetDevice(Device) const noexcept = 0; + + /** + * Get the current stream for a given device. + */ + virtual Stream getStream(Device) const noexcept = 0; + + /** + * Get the default stream for a given device. + */ + virtual Stream getDefaultStream(Device) const { + TORCH_CHECK(false, "Backend doesn't support acquiring a default stream.") + } + + /** + * Set a stream to be the thread local current stream for its device. + * Return the previous stream for that device. You are NOT required + * to set the current device to match the device of this stream. + */ + virtual Stream exchangeStream(Stream) const noexcept = 0; + +/** + * Destroys the given event. + */ + virtual void destroyEvent ( + void* event, + const DeviceIndex device_index) const noexcept { } + +/** + * Increments the event's version and enqueues a job with this version + * in the stream's work queue. When the stream process that job + * it nofifies all streams waiting on / blocked by that version of the + * event to continue and marks that version as recorded. + * */ + virtual void record( + void** event, + const Stream& stream, + const DeviceIndex device_index, + const c10::EventFlag flag) const { + TORCH_CHECK(false, "Backend doesn't support events."); + } + +/** + * Does nothing if the event has not been scheduled to be recorded. + * If the event was previously enqueued to be recorded, a command + * to wait for the version of the event that exists at the time of this call + * is inserted in the stream's work queue. + * When the stream reaches this command it will stop processing + * additional commands until that version of the event is marked as recorded. + */ + virtual void block( + void* event, + const Stream& stream) const { + TORCH_CHECK(false, "Backend doesn't support events."); + } + +/** + * Returns true if (and only if) + * (1) the event has never been scheduled to be recorded + * (2) the current version is marked as recorded. + * Returns false otherwise. + */ + virtual bool queryEvent(void* event) const { + TORCH_CHECK(false, "Backend doesn't support events."); + } + + /** + * Get the number of devices. WARNING: This is REQUIRED to not raise + * an exception. If there is some sort of problem, e.g., driver error, + * you should report that there are zero available devices. + */ + virtual DeviceIndex deviceCount() const noexcept = 0; + + /** + * Intended use of this class is to leak the DeviceGuardImpl at program end. + * So you better not call the destructor, buster! + */ + virtual ~DeviceGuardImplInterface() = default; +}; + +// The registry is NON-owning. Each stored pointer is std::atomic so +// that under all interleavings of registry calls the structure is +// race-free. This doesn't cost us anything on reads in X86. (An +// unsynchronized implementation probably is OK too, but I didn't want +// to prove that we never read from device_guard_impl_registry at the +// same time some registration is occurring. Shiver.) +// +// I'd like this registry to be valid even at program destruction time +// (in case someone uses a DeviceGuard in a destructor to do some cleanup +// in the CUDA API.) Since there are no direct accesses of the underlying +// owning objects which I can use to enforce initialization order (unlike +// in a Meyer singleton), it implies that you must *leak* objects when +// putting them in the registry. This is done by deleting the destructor +// on DeviceGuardImplInterface. +extern C10_API std::atomic +device_guard_impl_registry[static_cast(DeviceType::COMPILE_TIME_MAX_DEVICE_TYPES)]; + +// I can't conveniently use c10/util/Registry.h for the following reason: +// c10/util/Registry.h gives me a slow way of Create'ing a object of some +// interface from the registry, but no way of quickly accessing an already +// created object. I'll be banging on getDeviceGuardImpl every time we do a +// DeviceGuard, so I really don't want to be doing an unordered_map lookup. +// Better if the registration mechanism directly drops its implementation +// into device_guard_impl_registry. + +class C10_API DeviceGuardImplRegistrar { +public: + DeviceGuardImplRegistrar(DeviceType, const DeviceGuardImplInterface*); +}; + +#define C10_REGISTER_GUARD_IMPL(DevType, DeviceGuardImpl) \ + static ::c10::impl::DeviceGuardImplRegistrar C10_ANONYMOUS_VARIABLE(g_##DeviceType)(::c10::DeviceType::DevType, new DeviceGuardImpl()); + +inline const DeviceGuardImplInterface* getDeviceGuardImpl(DeviceType type) { + auto p = device_guard_impl_registry[static_cast(type)].load(); + // This seems to be the first place where you make use of a device + // when you pass devices to factory functions. Give a nicer error + // message in this case. + TORCH_CHECK(p, "PyTorch is not linked with support for ", type, " devices"); + return p; +} + +inline bool hasDeviceGuardImpl(DeviceType type) { + return device_guard_impl_registry[static_cast(type)].load(); +} + +}} // namespace c10::impl diff --git a/thirdparty/libtorch/include/c10/core/impl/FakeGuardImpl.h b/thirdparty/libtorch/include/c10/core/impl/FakeGuardImpl.h new file mode 100644 index 0000000000..24f48fb8fe --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/impl/FakeGuardImpl.h @@ -0,0 +1,105 @@ +#pragma once + +#include + +#include + +namespace c10 { +namespace impl { + +// FakeGuardImpl is hardcoded to have eight devices. Not for +// any good reason, just to simplify code. +constexpr size_t kFakeGuardImplMaxDevices = 8; + +/** + * A fake implementation of DeviceGuardImplInterface suitable for testing. + * The current device is modeled as a mutable field in the guard implementation + * class. See DeviceGuard_test.cpp for an example use. + */ +template +struct FakeGuardImpl final : public DeviceGuardImplInterface { + static constexpr DeviceType static_type = T; + // Runtime device type is not used + FakeGuardImpl(DeviceType) {} + FakeGuardImpl() {} + DeviceType type() const override { + return T; + } + Device exchangeDevice(Device d) const override { + AT_ASSERT(d.type() == type()); + AT_ASSERT(d.index() < kFakeGuardImplMaxDevices); + Device old_device = getDevice(); + if (old_device.index() != d.index()) { + current_device_ = d.index(); + } + return old_device; + } + Device getDevice() const override { + return Device(type(), current_device_); + } + void setDevice(Device d) const override { + AT_ASSERT(d.type() == type()); + AT_ASSERT(d.index() >= 0); + AT_ASSERT(d.index() < kFakeGuardImplMaxDevices); + current_device_ = d.index(); + } + void uncheckedSetDevice(Device d) const noexcept override { + current_device_ = d.index(); + } + Stream getStream(Device d) const noexcept override { + return Stream(Stream::UNSAFE, d, current_streams_[d.index()]); + } + Stream exchangeStream(Stream s) const noexcept override { + auto old_id = current_streams_[s.device_index()]; + current_streams_[s.device_index()] = s.id(); + return Stream(Stream::UNSAFE, s.device(), old_id); + } + DeviceIndex deviceCount() const noexcept override { + return kFakeGuardImplMaxDevices; + } + + // Event-related functions + void record( + void** event, + const Stream& stream, + const DeviceIndex device_index, + const EventFlag flag) const override { } + void block( + void* event, + const Stream& stream) const override { } + bool queryEvent(void* event) const override { return true; } + void destroyEvent( + void* event, + const DeviceIndex device_index) const noexcept override { } + + // Convenience methods for testing + static DeviceIndex getDeviceIndex() { + return current_device_; + } + static void setDeviceIndex(DeviceIndex i) { + AT_ASSERT(i >= 0); + AT_ASSERT(i < kFakeGuardImplMaxDevices); + current_device_ = i; + } + static StreamId getCurrentStreamIdFor(DeviceIndex i) { + return current_streams_.at(i); + } + static void resetStreams() { + current_streams_.fill(0); + } +private: + thread_local static DeviceIndex current_device_; + thread_local static std::array current_streams_; +}; + +template +thread_local DeviceIndex FakeGuardImpl::current_device_ = 0; + +template +constexpr DeviceType FakeGuardImpl::static_type; + +template +thread_local std::array FakeGuardImpl::current_streams_ = {0,0,0,0,0,0,0,0}; + + +}} // namespace c10::impl diff --git a/thirdparty/libtorch/include/c10/core/impl/InlineDeviceGuard.h b/thirdparty/libtorch/include/c10/core/impl/InlineDeviceGuard.h new file mode 100644 index 0000000000..4ffe9830d5 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/impl/InlineDeviceGuard.h @@ -0,0 +1,391 @@ +#pragma once + +// This file provides implementations of InlineDeviceGuard and InlineOptionalDeviceGuard. + +#include +#include +#include +#include +#include + +namespace c10 { +namespace impl { + + + +/** + * A DeviceGuard is an RAII class that sets a device to some value + * on construction, and resets the device to its original value on + * destruction. + * + * InlineDeviceGuard is a helper class for implementing DeviceGuards. + * It is templated over a DeviceGuardImpl (anything that implements + * DeviceGuardImplInterface). There are two primary ways to instantiate + * InlineDeviceGuard: + * + * - With a concrete implementation of DeviceGuardImpl, e.g., CUDAGuardImpl. + * This is the best way to use InlineDeviceGuard, as all calls are + * devirtualized, giving you code as efficient as straight line + * calls to cudaGetDevice/cudaSetDevice. + * + * - With VirtualGuardImpl, which does a virtual dispatch to a DeviceGuardImpl + * retrieved from a DeviceType registry. We have explicitly instantiated + * InlineDeviceGuard this way as c10::DeviceGuard. + * + * If you are in a hurry, you can use InlineDeviceGuard directly: + * + * using CUDAGuard = impl::InlineDeviceGuard; + * + * However, you can provide a better user experience if you explicitly write a + * wrapper class that itself contains the template instantiation: + * + * class CUDAGuard { + * public: + * // ... the API ... + * private: + * impl::InlineDeviceGuard guard_; + * } + * + * The wrapper class provides a good place to write documentation, and helps + * avoid weird template instantiation errors when a user incorrectly uses the + * class. + * + * If you need to test this class, consider instantiating it with FakeGuardImpl. + */ +template +class InlineDeviceGuard { +public: + // Note [Omitted default constructor from RAII] + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + // In principle, we could add a default constructor to + // DeviceGuard which reads the current device and promises to + // restore to that device on exit. However, most cases where you + // would have written this, you probably meant to actually just + // use OptionalDeviceGuard (since you don't actually need the + // restore to happen if you don't ever actually set the device). + // We remove the constructor here to encourage you to think about + // what you actually want to happen. + explicit InlineDeviceGuard() = delete; + + /// Set the current device to the passed Device. + explicit InlineDeviceGuard(Device device) + : impl_(device.type()) + , original_device_(device.index() == -1 ? impl_.getDevice() : impl_.exchangeDevice(device)) + , current_device_(device.index() == -1 ? original_device_ : device) + {} + + /// Set the current device index to the passed DeviceIndex. (The + /// device type is inferred from the template parameter T). + template ::value>::type> + explicit InlineDeviceGuard(DeviceIndex device_index) + : InlineDeviceGuard(Device(U::static_type, device_index)) {} + + /// Construct an InlineDeviceGuard using VirtualGuardImpl with an explicit + /// DeviceGuardImplInterface pointer. + template ::value>::type> + explicit InlineDeviceGuard(Device device, const DeviceGuardImplInterface* impl) + : impl_(VirtualGuardImpl(impl ? impl : getDeviceGuardImpl(device.type()))) + , original_device_(device.index() == -1 ? impl_.getDevice() : impl_.exchangeDevice(device)) + , current_device_(device.index() == -1 ? original_device_ : device) + {} + + /// Copy is disallowed + InlineDeviceGuard(const InlineDeviceGuard&) = delete; + InlineDeviceGuard& operator=(const InlineDeviceGuard&) = delete; + + /// Move is disallowed, as DeviceGuard does not have an uninitialized state, + /// which is required for moves on types with nontrivial destructors. + InlineDeviceGuard(InlineDeviceGuard&& other) = delete; + InlineDeviceGuard& operator=(InlineDeviceGuard&& other) = delete; + + ~InlineDeviceGuard() { + impl_.uncheckedSetDevice(original_device_); + } + + /// Sets the device to the given one. + template ::value, int>::type = 0> + void set_device(at::Device device) { + AT_ASSERT((U::static_type == DeviceType::HIP && device.type() == DeviceType::CUDA) || + device.type() == U::static_type); + auto index = device.index(); + if (index == -1) return; + impl_.setDevice(device); + current_device_ = device; + } + + /// Resets the currently set device to its original device, and then sets the + /// current device to the passed device. This is effectively equivalent to + /// set_device when a guard supports only a single device type. + template + typename std::enable_if::value >::type + reset_device(at::Device device) { + set_device(device); + } + + /// Resets the currently set device to its original device, and then sets the + /// current device to the passed device (for a possibly different device + /// type). + /// + /// This method is named reset_device to highlight the fact that previous + /// device settings from this guard are NOT preserved, even if the device + /// has a different device type. For example: + /// + /// // CUDA device is 0 + /// DeviceGuard g(Device(kCUDA, 1)); + /// g.reset_device(Device(kHIP, 2)); + /// // CUDA device is 0 (!!) + /// + /// NOTE: this implementation may skip some device setting if it can prove + /// that it is unnecessary. + /// + /// Optional argument is for testing only. + template + typename std::enable_if::value >::type + reset_device(at::Device device, const impl::DeviceGuardImplInterface* impl = nullptr) { + auto index = device.index(); + if (index == -1) return; + if (device.type() == original_device_.type()) { + AT_ASSERT(impl == nullptr || impl->type() == device.type()); + impl_.setDevice(device); + current_device_ = device; + } else { + // Destruct and reconstruct the DeviceGuard in place + impl_.setDevice(original_device_); + impl_ = !impl ? VirtualGuardImpl(device.type()) : VirtualGuardImpl(impl); + original_device_ = impl_.exchangeDevice(device); + current_device_ = device; + } + } + + /// Sets the device index to the given one. The device type is inferred + /// from the original device type. + void set_index(DeviceIndex index) { + reset_device(Device(original_device_.type(), index)); + } + + /// Returns the device that was set at the time the most recent + /// reset_device(), or otherwise the device at construction time. + Device original_device() const { + return original_device_; + } + + /// Returns the most recent device that was set using this device guard, + /// either from construction, or via set_device/reset_device/set_index. + Device current_device() const { + return current_device_; + } + +protected: + T impl_; + +private: + Device original_device_; + Device current_device_; +}; + +/** + * A OptionalDeviceGuard is an RAII class that sets a device to some value on + * initialization, and resets the device to its original value on destruction. + * + * InlineOptionalDeviceGuard is a helper class for implementing + * OptionalDeviceGuards. See guidance in InlineDeviceGuard on how to + * use this. See OptionalDeviceGuard for user-oriented usage notes. + */ +template +class InlineOptionalDeviceGuard { +public: + // Note [Explicit initialization of optional fields] + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + // Explicit initialization of optional fields + // required to workaround an nvcc bug; see https://github.com/pytorch/pytorch/issues/12117 + + /// Creates an uninitialized OptionalDeviceGuard. + explicit InlineOptionalDeviceGuard() + : guard_() // See Note [Explicit initialization of optional fields] + {} + + /// Set the current device to the passed Device, if it is not nullopt. + explicit InlineOptionalDeviceGuard(optional device_opt) + : guard_() { // See Note [Explicit initialization of optional fields] + if (device_opt.has_value()) { + guard_.emplace(device_opt.value()); + } + } + + /// Set the current device to the passed DeviceIndex, if it is not nullopt. + template ::value>::type> + explicit InlineOptionalDeviceGuard(optional device_index_opt) + : guard_() { // See Note [Explicit initialization of optional fields] + if (device_index_opt.has_value()) { + guard_.emplace(device_index_opt.value()); + } + } + + /// All constructors of DeviceGuard are valid for OptionalDeviceGuard + /// and result in initialized OptionalDeviceGuard. + template + explicit InlineOptionalDeviceGuard(Args&&... args) + : guard_(in_place, std::forward(args)...) {} + + // TODO: Consider readding Tensor and TensorList constructors here, when + // Tensor moves to c10. (These are only valid on OptionalDeviceGuard, + // because a Tensor may be undefined, in which case we need an uninitialized + // tensor guard.) + + // Note [Move construction for RAII guards is tricky] + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + // In principle, move construction is useful for terminating + // the lifetime of a `OptionalDeviceGuard` early; for example: + // + // // current device is d0 + // OptionalDeviceGuard g1(d1); + // // current device is d1 + // { + // OptionalDeviceGuard g2(std::move(g1)); + // } + // // current device is d0!! + // + // However, it's difficult to implement the move constructor + // in a way that works in all situations. For example, consider + // the following example: + // + // OptionalDeviceGuard g1(d1); + // { + // OptionalDeviceGuard g2(d2); + // { + // OptionalDeviceGuard g3(std::move(g1)); // !!! + // } + // } + // + // What should the current device be while g3 in scope... and what + // should it be after it goes out of scope? What about g2? + // There don't seem to be satisfactory answers for these questions. + // + // It's in principle possible to raise an error when this occurs + // by doing some extra thread-local bookkeeping. But why bother? + // Just don't provide the constructor. + InlineOptionalDeviceGuard(InlineOptionalDeviceGuard&& other) = delete; + + // Note [Move assignment for RAII guards is tricky] + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + // Move assignment is deleted, because you need to know which guard was + // defined "first", as that guard's original_device_ wins--with the current + // representation, we have no way of telling which is the case. (Move + // construction does not have this problem, as one guard is always + // uninitialized.) + // + // We can make this clear by way of a pair of examples: + // + // Example 1: + // + // // initial device is n0 + // { + // CUDAGuard g1(n1); + // { + // CUDAGuard g2(n2); + // // current device should be n2 + // g1 = std::move(g2); + // // current device should still be n2 + // } + // // current device should still be n2 + // } + // // current device should be n0 + // + // Example 2 (flip the order of the two guards): + // + // // initial device is n0 + // { + // CUDAGuard g2(n2); + // { + // CUDAGuard g1(n1); + // // current device should be n1 + // g1 = std::move(g2); + // // current device should be n2 + // } + // // current device should be n0 (since g2 has been vacated) + // } + // + // In both examples, we need g1 to restore to n0 after move assignment. + // However, in example 1, this is determined by the restore value of g1 + // (prior to the move). In example 2, however, it is determined by the the + // restore value of g2(!!). We don't know which one should win, without having + // a way of telling which guard was allocated first. + // + // We could solve this with an extra thread-local variable. But no one is + // actually using move-assignment. So just get rid of it. + InlineOptionalDeviceGuard& operator=(InlineOptionalDeviceGuard&& other) = delete; + + /// Sets the device to the given one. Initializes OptionalDeviceGuard if it + /// is not already initialized. + template ::value>::type> + void set_device(at::Device device) { + if (!guard_.has_value()) { + guard_.emplace(device); + } else { + guard_->set_device(device); + } + } + + /// Resets the currently set device to its original device, and then sets the + /// current device to the passed device (for a possibly different device + /// type). Initializes OptionalDeviceGuard if it is not already initialized. + /// + /// See notes on why this is called reset_device on InlineDeviceGuard. + /// + /// Optional argument is for testing only. + template ::value>::type> + void reset_device(at::Device device, const DeviceGuardImplInterface* impl = nullptr) { + if (!guard_.has_value()) { + guard_.emplace(device, impl); + } else { + guard_->reset_device(device, impl); + } + } + + /// Resets the currently set device to its original device, and then sets the + /// current device to the passed device. Initializes the guard if it is + /// not already initialized. This is effectively equivalent to set_device + /// when a guard supports only a single device type. + template ::value>::type> + void reset_device(at::Device device) { + if (!guard_.has_value()) { + guard_.emplace(device); + } else { + guard_->reset_device(device); + } + } + + /// Sets the device index to the given one. The device type is statically + /// known. + template ::value >::type> + void set_index(DeviceIndex index) { + if (!guard_.has_value()) { + guard_.emplace(index); + } else { + guard_->set_index(index); + } + } + + /// Returns the device that was set immediately prior to initialization of the, + /// guard, or nullopt if the guard is uninitialized. + optional original_device() const { + return guard_.has_value() ? make_optional(guard_->original_device()) : nullopt; + } + + /// Returns the most recent device that was set using this device guard, + /// either from construction, or via set_device, if the guard is initialized, + /// or nullopt if the guard is uninitialized. + optional current_device() const { + return guard_.has_value() ? make_optional(guard_->current_device()) : nullopt; + } + + /// Restore the original device, resetting this guard to uninitialized state. + void reset() { + guard_.reset(); + } + +private: + optional> guard_; +}; + +}} // namespace c10::impl diff --git a/thirdparty/libtorch/include/c10/core/impl/InlineEvent.h b/thirdparty/libtorch/include/c10/core/impl/InlineEvent.h new file mode 100644 index 0000000000..dfc86173c6 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/impl/InlineEvent.h @@ -0,0 +1,102 @@ +#pragma once + +#include "c10/core/DeviceType.h" +#include "c10/core/Stream.h" +#include "c10/util/Exception.h" +#include "c10/core/impl/DeviceGuardImplInterface.h" + +namespace c10 { +namespace impl { + +template +struct InlineEvent final { + + InlineEvent() = delete; + InlineEvent( + const DeviceType _device_type, + const EventFlag _flag = EventFlag::PYTORCH_DEFAULT) + : backend_{_device_type}, + device_type_{_device_type}, + flag_{_flag} { } + + // Copy constructor and copy assignment operator (deleted) + InlineEvent(const InlineEvent&) = delete; + InlineEvent& operator=(const InlineEvent&) = delete; + + // Move constructor and move assignment operator + InlineEvent(InlineEvent&& other) + : InlineEvent(other.device_type_, other.flag_) { + swap(std::move(other)); + } + InlineEvent& operator=(InlineEvent&& other) { + swap(std::move(other)); + return *this; + } + + void swap(InlineEvent&& other) { + std::swap(event_, other.event_); + std::swap(backend_, other.backend_); + std::swap(device_type_, other.device_type_); + std::swap(device_index_, other.device_index_); + std::swap(flag_, other.flag_); + std::swap(was_marked_for_recording_, other.was_marked_for_recording_); + } + + ~InlineEvent() noexcept { + if (event_) backend_.destroyEvent(event_, device_index_); + } + + DeviceType device_type() const noexcept { return device_type_; } + DeviceIndex device_index() const noexcept { return device_index_; } + EventFlag flag() const noexcept { return flag_; } + bool was_marked_for_recording() const noexcept { return was_marked_for_recording_; } + + + void recordOnce(const Stream& stream) { + if (!was_marked_for_recording_) record(stream); + } + + void record(const Stream& stream) { + TORCH_CHECK( + stream.device_type() == device_type_, + "Event device type ", + DeviceTypeName(device_type_), + " does not match recording stream's device type ", + DeviceTypeName(stream.device_type()), + "."); + + backend_.record(&event_, stream, device_index_, flag_); + was_marked_for_recording_ = true; + device_index_ = stream.device_index(); + } + + void block(const Stream& stream) const { + if (!was_marked_for_recording_) return; + + TORCH_CHECK( + stream.device_type() == device_type_, + "Event device type ", + DeviceTypeName(device_type_), + " does not match blocking stream's device type ", + DeviceTypeName(stream.device_type()), + "."); + + backend_.block(event_, stream); + } + + bool query() const { + if (!was_marked_for_recording_) return true; + return backend_.queryEvent(event_); + } + +private: + void* event_ = nullptr; + T backend_; + DeviceType device_type_; + DeviceIndex device_index_ = -1; + EventFlag flag_ = EventFlag::PYTORCH_DEFAULT; + bool was_marked_for_recording_ = false; +}; + +} // impl +} // c10 diff --git a/thirdparty/libtorch/include/c10/core/impl/InlineStreamGuard.h b/thirdparty/libtorch/include/c10/core/impl/InlineStreamGuard.h new file mode 100644 index 0000000000..4fbb61d60e --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/impl/InlineStreamGuard.h @@ -0,0 +1,187 @@ +#pragma once + +#include + +namespace c10 { +namespace impl { + +/** + * A StreamGuard is an RAII class that changes the current device + * to the device corresponding to some stream, and changes the + * default stream on that device to be this stream. + * + * InlineStreamGuard is a helper class for implementing StreamGuards. + * See InlineDeviceGuard for guidance on how to use this class. + */ +template +class InlineStreamGuard : private InlineDeviceGuard { +public: + /// No default constructor, see Note [Omitted default constructor from RAII] + explicit InlineStreamGuard() = delete; + + /// Set the current device to the device associated with the passed stream, + /// and set the current stream on that device to the passed stream. + explicit InlineStreamGuard(Stream stream) + : InlineDeviceGuard(stream.device()) + , original_stream_of_original_device_(this->impl_.getStream(original_device())) + , original_stream_of_current_device_(this->impl_.exchangeStream(stream)) + , current_stream_(stream) + {} + + /// This constructor exists purely for testing + template ::value>::type> + explicit InlineStreamGuard(Stream stream, const DeviceGuardImplInterface* impl) + : InlineDeviceGuard(stream.device(), impl ? impl : getDeviceGuardImpl(stream.device_type())) + , original_stream_of_original_device_(this->impl_.getStream(original_device())) + , original_stream_of_current_device_(this->impl_.exchangeStream(stream)) + , current_stream_(stream) + {} + + /// Copy is disallowed + InlineStreamGuard(const InlineStreamGuard&) = delete; + InlineStreamGuard& operator=(const InlineStreamGuard&) = delete; + + /// Move is disallowed, as StreamGuard does not have an uninitialized state, + /// which is required for moves on types with nontrivial destructors. + InlineStreamGuard(InlineStreamGuard&& other) = delete; + InlineStreamGuard& operator=(InlineStreamGuard&& other) = delete; + + ~InlineStreamGuard() { + this->impl_.exchangeStream(original_stream_of_current_device_); + } + + /// Resets the currently set stream to the original stream and + /// the currently set device to the original device. Then, + /// set the current device to the device associated with the passed stream, + /// and set the current stream on that device to the passed stream. + /// + /// NOTE: this implementation may skip some stream/device setting if + /// it can prove that it is unnecessary. + /// + /// WARNING: reset_stream does NOT preserve previously set streams on + /// different devices. If you need to set streams on multiple devices + /// on CUDA, use CUDAMultiStreamGuard instead. + void reset_stream(Stream stream) { + // TODO: make a version that takes an impl argument. Unfortunately, + // that will require SFINAE because impl is only valid for the + // VirtualGuardImpl specialization. + if (stream.device() == this->current_device()) { + this->impl_.exchangeStream(stream); + current_stream_ = stream; + } else { + // Destruct and reconstruct the StreamGuard in-place + this->impl_.exchangeStream(original_stream_of_current_device_); + this->reset_device(stream.device()); + original_stream_of_current_device_ = this->impl_.exchangeStream(stream); + current_stream_ = stream; + } + } + + // It's not clear if set_device should also reset the current stream + // if the device is unchanged; therefore, we don't provide it. + // The situation is somewhat clearer with reset_device, but it's still + // a pretty weird thing to do, so haven't added this either. + + /// Returns the stream of the original device prior to this guard. Subtly, + /// the stream returned here is the original stream of the *original* + /// device; i.e., it's the stream that your computation *would* have + /// been put on, if it hadn't been for this meddling stream guard. + /// This is usually what you want. + Stream original_stream() const { + return original_stream_of_original_device_; + } + + /// Returns the most recent stream that was set using this device guard, + /// either from construction, or via set_stream. + Stream current_stream() const { + return current_stream_; + } + + /// Returns the most recent device that was set using this device guard, + /// either from construction, or via set_device/reset_device/set_index. + Device current_device() const { + return InlineDeviceGuard::current_device(); + } + + /// Returns the device that was set at the most recent reset_stream(), + /// or otherwise the device at construction time. + Device original_device() const { + return InlineDeviceGuard::original_device(); + } + +private: + Stream original_stream_of_original_device_; // what the user probably cares about + Stream original_stream_of_current_device_; // what we need to restore + Stream current_stream_; +}; + +/** + * An OptionalStreamGuard is an RAII class that sets a device to some value on + * initialization, and resets the device to its original value on destruction. + * See InlineOptionalDeviceGuard for more guidance on how to use this class. + */ +template +class InlineOptionalStreamGuard { +public: + /// Creates an uninitialized stream guard. + explicit InlineOptionalStreamGuard() + : guard_() // See Note [Explicit initialization of optional fields] + {} + + /// Set the current device to the device associated with the passed stream, + /// and set the current stream on that device to the passed stream, + /// if the passed stream is not nullopt. + explicit InlineOptionalStreamGuard(optional stream_opt) + : guard_() { + if (stream_opt.has_value()) { + guard_.emplace(stream_opt.value()); + } + } + + /// All constructors of StreamGuard are valid for OptionalStreamGuard + template + explicit InlineOptionalStreamGuard(Args&&... args) + : guard_(in_place, std::forward(args)...) {} + + // See Note [Move construction for RAII guards is tricky] + InlineOptionalStreamGuard(InlineOptionalStreamGuard&& other) = delete; + + // See Note [Move assignment for RAII guards is tricky] + InlineOptionalStreamGuard& operator=(InlineOptionalStreamGuard&& other) = delete; + + /// Resets the currently set stream to the original stream and + /// the currently set device to the original device. Then, + /// set the current device to the device associated with the passed stream, + /// and set the current stream on that device to the passed stream. + /// Initializes the OptionalStreamGuard if it was not previously initialized. + void reset_stream(Stream stream) { + if (guard_.has_value()) { + guard_->reset_stream(stream); + } else { + guard_.emplace(stream); + } + } + + /// Returns the stream that was set at the time the guard was most recently + /// initialized, or nullopt if the guard is uninitialized. + optional original_stream() const { + return guard_.has_value() ? make_optional(guard_->original_stream()) : nullopt; + } + + /// Returns the most recent stream that was set using this stream guard, + /// either from construction, or via reset_stream, if the guard is initialized, + /// or nullopt if the guard is uninitialized. + optional current_stream() const { + return guard_.has_value() ? make_optional(guard_->current_stream()) : nullopt; + } + + /// Restore the original device and stream, resetting this guard to uninitialized state. + void reset() { + guard_.reset(); + } + +private: + optional> guard_; +}; + +}} // namespace c10::impl diff --git a/thirdparty/libtorch/include/c10/core/impl/LocalTensorTypeSet.h b/thirdparty/libtorch/include/c10/core/impl/LocalTensorTypeSet.h new file mode 100644 index 0000000000..728cc8afe3 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/impl/LocalTensorTypeSet.h @@ -0,0 +1,100 @@ +#pragma once + +#include + +// TLS management for TensorTypeSet (the "local" TensorTypeSet(s)) +// +// This manages two thread-local TensorTypeSets: +// +// - The included type set, which adds a tensor type for consideration +// in dispatch. (For example, you might add ProfilingTensorId to +// the included type set to turn on profiling on all tensor operations.) +// +// - The excluded type set, which disqualifies a tensor type from dispatch. +// (For example, after redispatching on variable, we disqualify +// VariableTensorId so we don't attempt to handle variable again.) +// (Exclusion wins over inclusion.) +// +// NB: Originally, I implemented the excluded type set as storing the inverted +// set, but TLS is defined to be zero-initialized, so this doesn't actually work +// (if it's inverted, you want the set to be -1 initialized). + +namespace c10 { +namespace impl { + +// POD version of LocalTensorTypeSet. Declared here just so that +// we can put it in the guards. +struct C10_API PODLocalTensorTypeSet { + uint64_t included_; + uint64_t excluded_; + + TensorTypeSet included() const { + return TensorTypeSet(TensorTypeSet::RAW, included_); + } + TensorTypeSet excluded() const { + return TensorTypeSet(TensorTypeSet::RAW, excluded_); + } + + void set_included(TensorTypeSet x) { + included_ = x.raw_repr(); + } + void set_excluded(TensorTypeSet x) { + excluded_ = x.raw_repr(); + } +}; +static_assert(std::is_pod::value, "PODLocalTensorTypeSet must be a POD type."); + +struct C10_API LocalTensorTypeSet { + /* implicit */ LocalTensorTypeSet(PODLocalTensorTypeSet x) + : included_(x.included()), excluded_(x.excluded()) {} + TensorTypeSet included_; + TensorTypeSet excluded_; +}; + +C10_API LocalTensorTypeSet tls_local_tensor_type_set(); + +// RAII API for manipulating the thread-local dispatch state. + +class C10_API IncludeTensorTypeIdGuard { +public: + IncludeTensorTypeIdGuard(TensorTypeId); + ~IncludeTensorTypeIdGuard(); +private: + // A little micro-optimization to save us from tls_get_addr call + // on destruction + PODLocalTensorTypeSet* tls_; + TensorTypeId id_; + bool prev_state_; +}; + +class C10_API ExcludeTensorTypeIdGuard { +public: + ExcludeTensorTypeIdGuard(TensorTypeId); + ~ExcludeTensorTypeIdGuard(); +private: + // A little micro-optimization to save us from tls_get_addr call + // on destruction + PODLocalTensorTypeSet* tls_; + TensorTypeId id_; + bool prev_state_; +}; + +// Non-RAII API for manipulating the thread-local dispatch state. +// Please prefer the RAII API. The non-RAII API may be useful when +// the included/excluded state of a given TensorTypeId must span +// many calls from the Python to the C++, so you cannot conveniently +// use an RAII guard. +// +// Example use case: a Python context manager that includes a certain +// TensorTypeId, to ensure ops running under the context manager dispatch +// through that TensorTypeId's registered overrides. +// +// The non-RAII API is less efficient than the RAII guards because both the +// getter and setter will do a tls_getaddr lookup (the RAII struct only needs one!) + +bool tls_is_tensor_type_id_excluded(TensorTypeId x); +void tls_set_tensor_type_id_excluded(TensorTypeId x, bool desired_state); +bool tls_is_tensor_type_id_included(TensorTypeId x); +void tls_set_tensor_type_id_included(TensorTypeId x, bool desired_state); + +}} // namespace c10::impl diff --git a/thirdparty/libtorch/include/c10/core/impl/VirtualGuardImpl.h b/thirdparty/libtorch/include/c10/core/impl/VirtualGuardImpl.h new file mode 100644 index 0000000000..c6b6420501 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/impl/VirtualGuardImpl.h @@ -0,0 +1,75 @@ +#pragma once + +#include + +namespace c10 { +namespace impl { + +/** + * An implementation of DeviceGuardImplInterface which delegates + * to virtual dispatch on the DeviceGuardImpl registry. + */ +class VirtualGuardImpl final : public DeviceGuardImplInterface { +public: + VirtualGuardImpl(DeviceType device_type) + : impl_(getDeviceGuardImpl(device_type)) {} + // This constructor exists purely for testing + VirtualGuardImpl(const DeviceGuardImplInterface* impl) + : impl_(impl) {} + + // Copying and moving is OK! + + DeviceType type() const override { + return impl_->type(); + } + Device exchangeDevice(Device d) const override { + return impl_->exchangeDevice(d); + } + Device getDevice() const override { + return impl_->getDevice(); + } + void setDevice(Device d) const override { + impl_->setDevice(d); + } + void uncheckedSetDevice(Device d) const noexcept override { + impl_->uncheckedSetDevice(d); + } + Stream getStream(Device d) const noexcept override { + return impl_->getStream(d); + } + Stream getDefaultStream(Device d) const override { + return impl_->getDefaultStream(d); + } + Stream exchangeStream(Stream s) const noexcept override { + return impl_->exchangeStream(s); + } + DeviceIndex deviceCount() const noexcept override { + return impl_->deviceCount(); + } + + // Event functions + void record(void** event, + const Stream& stream, + const DeviceIndex device_index, + const EventFlag flag) const override { + impl_->record(event, stream, device_index, flag); + } + void block( + void* event, + const Stream& stream) const override { + impl_->block(event, stream); + } + bool queryEvent(void* event) const override { + return impl_->queryEvent(event); + } + void destroyEvent( + void* event, + const DeviceIndex device_index) const noexcept override { + impl_->destroyEvent(event, device_index); + } + +private: + const DeviceGuardImplInterface* impl_ = nullptr; +}; + +}} // namespace c10::impl diff --git a/thirdparty/libtorch/include/c10/core/thread_pool.h b/thirdparty/libtorch/include/c10/core/thread_pool.h new file mode 100644 index 0000000000..28cb5f3503 --- /dev/null +++ b/thirdparty/libtorch/include/c10/core/thread_pool.h @@ -0,0 +1,125 @@ +#pragma once + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +namespace c10 { + +// TODO: move this to C10 and make it C10_API +class C10_API TaskThreadPoolBase { + public: + virtual void run(const std::function& func) = 0; + + virtual size_t size() const = 0; + + /** + * The number of available (i.e. idle) threads in this thread pool. + */ + virtual size_t numAvailable() const = 0; + + /** + * Check if the current thread is from the thread pool. + */ + virtual bool inThreadPool() const = 0; + + virtual ~TaskThreadPoolBase() noexcept {} + + static size_t defaultNumThreads() { + auto num_threads = std::thread::hardware_concurrency(); +#if defined(_M_X64) || defined(__x86_64__) + num_threads /= 2; +#endif + return num_threads; + } +}; + +class C10_API ThreadPool : public c10::TaskThreadPoolBase { + protected: + struct task_element_t { + bool run_with_id; + const std::function no_id; + const std::function with_id; + + explicit task_element_t(const std::function& f) + : run_with_id(false), no_id(f), with_id(nullptr) {} + explicit task_element_t(const std::function& f) + : run_with_id(true), no_id(nullptr), with_id(f) {} + }; + + std::queue tasks_; + std::vector threads_; + std::mutex mutex_; + std::condition_variable condition_; + std::condition_variable completed_; + std::atomic_bool running_; + bool complete_; + std::size_t available_; + std::size_t total_; + int numa_node_id_; + + public: + ThreadPool() = delete; + + explicit ThreadPool( + int pool_size, + int numa_node_id = -1, + std::function init_thread = nullptr); + + ~ThreadPool(); + + size_t size() const override; + + size_t numAvailable() const override; + + bool inThreadPool() const override; + + void run(const std::function& func) override; + + template + void runTaskWithID(Task task) { + std::unique_lock lock(mutex_); + + // Set task and signal condition variable so that a worker thread will + // wake up and use the task. + tasks_.push( + task_element_t(static_cast>(task))); + complete_ = false; + condition_.notify_one(); + } + + /// @brief Wait for queue to be empty + void waitWorkComplete(); + + private: + // @brief Entry point for pool threads. + void main_loop(std::size_t index); +}; + +class C10_API TaskThreadPool : public c10::ThreadPool { + public: + explicit TaskThreadPool( + std::size_t pool_size, + int numa_node_id = -1) + : ThreadPool(pool_size, numa_node_id, [numa_node_id](){ + setThreadName("CaffeTaskThread"); + NUMABind(numa_node_id); + }) {} +}; + +C10_DECLARE_SHARED_REGISTRY( + ThreadPoolRegistry, + TaskThreadPoolBase, + int, + int, + bool); + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/cuda/CUDACachingAllocator.h b/thirdparty/libtorch/include/c10/cuda/CUDACachingAllocator.h new file mode 100644 index 0000000000..31f9d3a167 --- /dev/null +++ b/thirdparty/libtorch/include/c10/cuda/CUDACachingAllocator.h @@ -0,0 +1,125 @@ +#ifndef THC_DEVICE_ALLOCATOR_INC +#define THC_DEVICE_ALLOCATOR_INC + +#include +#include +#include +#include + +#include +#include + +namespace c10 { + +// Caching allocator will execute every registered callback if it unable to find +// block inside of already allocated area. +class C10_CUDA_API FreeMemoryCallback { + public: + virtual ~FreeMemoryCallback() {}; + virtual bool Execute() = 0; +}; + +C10_DECLARE_REGISTRY(FreeCudaMemoryCallbacksRegistry, FreeMemoryCallback); +#define REGISTER_FREE_MEMORY_CALLBACK(name, ...) \ + C10_REGISTER_CLASS(FreeCudaMemoryCallbacksRegistry, name, __VA_ARGS__); + +namespace cuda { + +// TODO: Turn this into an honest to goodness class. I briefly attempted to do +// this, but it was a bit irritating to figure out how to also correctly +// apply pimpl pattern so I didn't have to leak any internal implementation +// details in the header (CUDACachingAllocator could be made a pimpl, but +// you also need to appropriately define a class which is a subclass +// of Allocator. Not impossible, but required a bit more surgery than +// I wanted to do at the time.) +// +// Why is this using a namespace rather than old-style THCCachingAllocator_ +// prefix? Mostly because it made the HIPify rules easier to write; _ is +// not counted as a word boundary, so you would otherwise have to list each +// of these functions. + +namespace CUDACachingAllocator { + +struct Stat { + int64_t current = 0; + int64_t peak = 0; + int64_t allocated = 0; + int64_t freed = 0; +}; + +enum struct StatType : uint64_t { + AGGREGATE = 0, + SMALL_POOL = 1, + LARGE_POOL = 2, + NUM_TYPES = 3 // remember to update this whenever a new stat type is added +}; + +typedef std::array(StatType::NUM_TYPES)> StatArray; + +// Struct containing memory allocator summary statistics for a device. +struct DeviceStats { + // COUNT: allocations requested by client code + StatArray allocation; + // COUNT: number of allocated segments from cudaMalloc(). + StatArray segment; + // COUNT: number of active memory blocks (allocated or used by stream) + StatArray active; + // COUNT: number of inactive, split memory blocks (unallocated but can't be released via cudaFree) + StatArray inactive_split; + + // SUM: bytes requested by client code + StatArray allocated_bytes; + // SUM: bytes reserved by this memory allocator (both free and used) + StatArray reserved_bytes; + // SUM: bytes within active memory blocks + StatArray active_bytes; + // SUM: bytes within inactive, split memory blocks + StatArray inactive_split_bytes; + + // COUNT: total number of failed calls to CUDA malloc necessitating cache flushes. + int64_t num_alloc_retries = 0; + + // COUNT: total number of OOMs (i.e. failed calls to CUDA after cache flush) + int64_t num_ooms = 0; +}; + +// Struct containing info of an allocation block (i.e. a fractional part of a cudaMalloc).. +struct BlockInfo { + int64_t size = 0; + bool allocated = false; + bool active = false; +}; + +// Struct containing info of a memory segment (i.e. one contiguous cudaMalloc). +struct SegmentInfo { + int64_t device = 0; + int64_t address = 0; + int64_t total_size = 0; + int64_t allocated_size = 0; + int64_t active_size = 0; + bool is_large = false; + std::vector blocks; +}; + +C10_CUDA_API void* raw_alloc(size_t nbytes); +C10_CUDA_API void raw_delete(void* ptr); + +C10_CUDA_API Allocator* get(); +C10_CUDA_API void emptyCache(); +C10_CUDA_API void cacheInfo(int dev_id, size_t* cachedAndFree, size_t* largestBlock); +C10_CUDA_API void* getBaseAllocation(void *ptr, size_t *size); +C10_CUDA_API void recordStream(void *ptr, CUDAStream stream); +C10_CUDA_API DeviceStats getDeviceStats(int device); +C10_CUDA_API void resetAccumulatedStats(int device); +C10_CUDA_API void resetPeakStats(int device); +C10_CUDA_API std::vector snapshot(); + +C10_CUDA_API std::mutex* getFreeMutex(); + +C10_CUDA_API std::shared_ptr getIpcDevPtr(std::string handle); + +} // namespace CUDACachingAllocator + +}} // namespace c10::cuda + +#endif diff --git a/thirdparty/libtorch/include/c10/cuda/CUDAException.h b/thirdparty/libtorch/include/c10/cuda/CUDAException.h new file mode 100644 index 0000000000..bdce44f1c6 --- /dev/null +++ b/thirdparty/libtorch/include/c10/cuda/CUDAException.h @@ -0,0 +1,31 @@ +#pragma once + +#include +#include +#include + +// Note [CHECK macro] +// ~~~~~~~~~~~~~~~~~~ +// This is a macro so that AT_ERROR can get accurate __LINE__ +// and __FILE__ information. We could split this into a short +// macro and a function implementation if we pass along __LINE__ +// and __FILE__, but no one has found this worth doing. + +// For CUDA Runtime API +#define C10_CUDA_CHECK(EXPR) \ + do { \ + cudaError_t __err = EXPR; \ + if (__err != cudaSuccess) { \ + auto error_unused C10_UNUSED = cudaGetLastError(); \ + TORCH_CHECK(false, "CUDA error: ", cudaGetErrorString(__err)); \ + } \ + } while (0) + + #define C10_CUDA_CHECK_WARN(EXPR) \ + do { \ + cudaError_t __err = EXPR; \ + if (__err != cudaSuccess) { \ + auto error_unused C10_UNUSED = cudaGetLastError(); \ + TORCH_WARN("CUDA warning: ", cudaGetErrorString(__err)); \ + } \ + } while (0) diff --git a/thirdparty/libtorch/include/c10/cuda/CUDAFunctions.h b/thirdparty/libtorch/include/c10/cuda/CUDAFunctions.h new file mode 100644 index 0000000000..2af6925ce0 --- /dev/null +++ b/thirdparty/libtorch/include/c10/cuda/CUDAFunctions.h @@ -0,0 +1,48 @@ +#pragma once + +// This header provides C++ wrappers around commonly used CUDA API functions. +// The benefit of using C++ here is that we can raise an exception in the +// event of an error, rather than explicitly pass around error codes. This +// leads to more natural APIs. +// +// The naming convention used here matches the naming convention of torch.cuda + +#include + +#include +#include +#include + +namespace c10 { +namespace cuda { + +inline DeviceIndex device_count() noexcept { + int count; + // NB: In the past, we were inconsistent about whether or not this reported + // an error if there were driver problems are not. Based on experience + // interacting with users, it seems that people basically ~never want this + // function to fail; it should just return zero if things are not working. + // Oblige them. + cudaError_t err = cudaGetDeviceCount(&count); + if (err != cudaSuccess) { + // Clear out the error state, so we don't spuriously trigger someone else. + // (This shouldn't really matter, since we won't be running very much CUDA + // code in this regime.) + cudaError_t last_err = cudaGetLastError(); + (void)last_err; + return 0; + } + return static_cast(count); +} + +inline DeviceIndex current_device() { + int cur_device; + C10_CUDA_CHECK(cudaGetDevice(&cur_device)); + return static_cast(cur_device); +} + +inline void set_device(DeviceIndex device) { + C10_CUDA_CHECK(cudaSetDevice(static_cast(device))); +} + +}} // namespace c10::cuda diff --git a/thirdparty/libtorch/include/c10/cuda/CUDAGuard.h b/thirdparty/libtorch/include/c10/cuda/CUDAGuard.h new file mode 100644 index 0000000000..0635922101 --- /dev/null +++ b/thirdparty/libtorch/include/c10/cuda/CUDAGuard.h @@ -0,0 +1,236 @@ +#pragma once + +#include +#include +#include +#include +#include + +#include + +namespace c10 { namespace cuda { + +// This code is kind of boilerplatey. See Note [Whither the DeviceGuard boilerplate] + +/// A variant of DeviceGuard that is specialized for CUDA. It accepts +/// integer indices (interpreting them as CUDA devices) and is a little +/// more efficient than DeviceGuard (it compiles to straight line +/// cudaSetDevice/cudaGetDevice calls); however, it can only be used +/// from code that links against CUDA directly. +struct CUDAGuard { + /// No default constructor; see Note [Omitted default constructor from RAII] + explicit CUDAGuard() = delete; + + /// Set the current CUDA device to the passed device index. + explicit CUDAGuard(DeviceIndex device_index) : guard_(device_index) {} + + /// Sets the current CUDA device to the passed device. Errors if the passed + /// device is not a CUDA device. + explicit CUDAGuard(Device device) : guard_(device) {} + + // Copy is not allowed + CUDAGuard(const CUDAGuard&) = delete; + CUDAGuard& operator=(const CUDAGuard&) = delete; + + // Move is not allowed (there is no uninitialized state) + CUDAGuard(CUDAGuard&& other) = delete; + CUDAGuard& operator=(CUDAGuard&& other) = delete; + + /// Sets the CUDA device to the given device. Errors if the given device + /// is not a CUDA device. + void set_device(Device device) { guard_.set_device(device); } + + /// Sets the CUDA device to the given device. Errors if the given device + /// is not a CUDA device. (This method is provided for uniformity with + /// DeviceGuard). + void reset_device(Device device) { guard_.reset_device(device); } + + /// Sets the CUDA device to the given device index. + void set_index(DeviceIndex device_index) { guard_.set_index(device_index); } + + /// Returns the device that was set upon construction of the guard + Device original_device() const { return guard_.original_device(); } + + /// Returns the last device that was set via `set_device`, if any, otherwise the + /// device passed during construction. + Device current_device() const { return guard_.current_device(); } + + private: + /// The guard for the current device. + c10::impl::InlineDeviceGuard guard_; +}; + +/// A variant of OptionalDeviceGuard that is specialized for CUDA. See +/// CUDAGuard for when you can use this. +struct OptionalCUDAGuard { + /// Create an uninitialized OptionalCUDAGuard. + explicit OptionalCUDAGuard() : guard_() {} + + /// Set the current CUDA device to the passed Device, if it is not nullopt. + explicit OptionalCUDAGuard(optional device_opt) : guard_(device_opt) {} + + /// Set the current CUDA device to the passed device index, if it is not + /// nullopt + explicit OptionalCUDAGuard(optional device_index_opt) : guard_(device_index_opt) {} + + // Copy is not allowed + OptionalCUDAGuard(const OptionalCUDAGuard&) = delete; + OptionalCUDAGuard& operator=(const OptionalCUDAGuard&) = delete; + + // See Note [Move construction for RAII guards is tricky] + OptionalCUDAGuard(OptionalCUDAGuard&& other) = delete; + + // See Note [Move assignment for RAII guards is tricky] + OptionalCUDAGuard& operator=(OptionalCUDAGuard&& other) = delete; + + /// Sets the CUDA device to the given device, initializing the guard if it + /// is not already initialized. Errors if the given device is not a CUDA device. + void set_device(Device device) { guard_.set_device(device); } + + /// Sets the CUDA device to the given device, initializing the guard if it is + /// not already initialized. Errors if the given device is not a CUDA device. + /// (This method is provided for uniformity with OptionalDeviceGuard). + void reset_device(Device device) { guard_.reset_device(device); } + + /// Sets the CUDA device to the given device index, initializing the guard if + /// it is not already initialized. + void set_index(DeviceIndex device_index) { guard_.set_index(device_index); } + + /// Returns the device that was set immediately prior to initialization of the + /// guard, or nullopt if the guard is uninitialized. + optional original_device() const { return guard_.original_device(); } + + /// Returns the most recent device that was set using this device guard, + /// either from construction, or via set_device, if the guard is initialized, + /// or nullopt if the guard is uninitialized. + optional current_device() const { return guard_.current_device(); } + + /// Restore the original CUDA device, resetting this guard to uninitialized state. + void reset() { guard_.reset(); } + +private: + c10::impl::InlineOptionalDeviceGuard guard_; +}; + +/// A variant of StreamGuard that is specialized for CUDA. See CUDAGuard +/// for when you can use this. +struct CUDAStreamGuard { + /// No default constructor, see Note [Omitted default constructor from RAII] + explicit CUDAStreamGuard() = delete; + + /// Set the current CUDA device to the device associated with the passed stream, + /// and set the current CUDA stream on that device to the passed stream. + /// Errors if the Stream is not a CUDA stream. + explicit CUDAStreamGuard(Stream stream) : guard_(stream) {} + + /// Copy is disallowed + CUDAStreamGuard(const CUDAStreamGuard&) = delete; + CUDAStreamGuard& operator=(const CUDAStreamGuard&) = delete; + + /// Move is disallowed, as CUDAStreamGuard does not have an uninitialized state, + /// which is required for moves on types with nontrivial destructors. + CUDAStreamGuard(CUDAStreamGuard&& other) = delete; + CUDAStreamGuard& operator=(CUDAStreamGuard&& other) = delete; + + /// Resets the currently set stream to the original stream and + /// the currently set device to the original device. Then, + /// set the current device to the device associated with the passed stream, + /// and set the current stream on that device to the passed stream. + /// Errors if the stream passed is not a CUDA stream. + /// + /// NOTE: this implementation may skip some stream/device setting if + /// it can prove that it is unnecessary. + /// + /// WARNING: reset_stream does NOT preserve previously set streams on + /// different devices. If you need to set streams on multiple devices + /// on CUDA, use CUDAMultiStreamGuard instead. + void reset_stream(Stream stream) { guard_.reset_stream(stream); } + + /// Returns the CUDA stream that was set at the time the guard was constructed. + CUDAStream original_stream() const { + return CUDAStream(CUDAStream::UNCHECKED, guard_.original_stream()); + } + + /// Returns the most recent CUDA stream that was set using this device guard, + /// either from construction, or via set_stream. + CUDAStream current_stream() const { + return CUDAStream(CUDAStream::UNCHECKED, guard_.current_stream()); + } + + /// Returns the most recent CUDA device that was set using this device guard, + /// either from construction, or via set_device/reset_device/set_index. + Device current_device() const { return guard_.current_device(); } + + /// Returns the CUDA device that was set at the most recent reset_stream(), + /// or otherwise the device at construction time. + Device original_device() const { return guard_.original_device(); } + +private: + c10::impl::InlineStreamGuard guard_; +}; + +/// A variant of OptionalStreamGuard that is specialized for CUDA. See CUDAGuard +/// for when you can use this. +struct OptionalCUDAStreamGuard { + /// Create an uninitialized guard. + explicit OptionalCUDAStreamGuard() : guard_() {} + + /// Set the current CUDA device to the device associated with the passed stream, + /// and set the current CUDA stream on that device to the passed stream. + /// Errors if the Stream is not a CUDA stream. + explicit OptionalCUDAStreamGuard(Stream stream) : guard_(stream) {} + + /// Set the current device to the device associated with the passed stream, + /// and set the current stream on that device to the passed stream, + /// if the passed stream is not nullopt. + explicit OptionalCUDAStreamGuard(optional stream_opt) : guard_(stream_opt) {} + + /// Copy is disallowed + OptionalCUDAStreamGuard(const OptionalCUDAStreamGuard&) = delete; + OptionalCUDAStreamGuard& operator=(const OptionalCUDAStreamGuard&) = delete; + + // See Note [Move construction for RAII guards is tricky] + OptionalCUDAStreamGuard(OptionalCUDAStreamGuard&& other) = delete; + + // See Note [Move assignment for RAII guards is tricky] + OptionalCUDAStreamGuard& operator=(OptionalCUDAStreamGuard&& other) = delete; + + /// Resets the currently set CUDA stream to the original stream and + /// the currently set device to the original device. Then, + /// set the current device to the device associated with the passed stream, + /// and set the current stream on that device to the passed stream. + /// Initializes the guard if it was not previously initialized. + void reset_stream(Stream stream) { guard_.reset_stream(stream); } + + /// Returns the CUDA stream that was set at the time the guard was most recently + /// initialized, or nullopt if the guard is uninitialized. + optional original_stream() const { + auto r = guard_.original_stream(); + if (r.has_value()) { + return make_optional(CUDAStream(CUDAStream::UNCHECKED, r.value())); + } else { + return nullopt; + } + } + + /// Returns the most recent CUDA stream that was set using this stream guard, + /// either from construction, or via reset_stream, if the guard is initialized, + /// or nullopt if the guard is uninitialized. + optional current_stream() const { + auto r = guard_.current_stream(); + if (r.has_value()) { + return make_optional(CUDAStream(CUDAStream::UNCHECKED, r.value())); + } else { + return nullopt; + } + } + + /// Restore the original CUDA device and stream, resetting this guard to uninitialized state. + void reset() { guard_.reset(); } + +private: + c10::impl::InlineOptionalStreamGuard guard_; +}; + +} // namespace cuda +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/cuda/CUDAMacros.h b/thirdparty/libtorch/include/c10/cuda/CUDAMacros.h new file mode 100644 index 0000000000..cd63cec850 --- /dev/null +++ b/thirdparty/libtorch/include/c10/cuda/CUDAMacros.h @@ -0,0 +1,38 @@ +#pragma once + +#ifndef C10_USING_CUSTOM_GENERATED_MACROS +#include +#endif + +// See c10/macros/Export.h for a detailed explanation of what the function +// of these macros are. We need one set of macros for every separate library +// we build. + +#ifdef _WIN32 +#if defined(C10_CUDA_BUILD_SHARED_LIBS) +#define C10_CUDA_EXPORT __declspec(dllexport) +#define C10_CUDA_IMPORT __declspec(dllimport) +#else +#define C10_CUDA_EXPORT +#define C10_CUDA_IMPORT +#endif +#else // _WIN32 +#if defined(__GNUC__) +#define C10_CUDA_EXPORT __attribute__((__visibility__("default"))) +#else // defined(__GNUC__) +#define C10_CUDA_EXPORT +#endif // defined(__GNUC__) +#define C10_CUDA_IMPORT C10_CUDA_EXPORT +#endif // _WIN32 + +// This one is being used by libc10_cuda.so +#ifdef C10_CUDA_BUILD_MAIN_LIB +#define C10_CUDA_API C10_CUDA_EXPORT +#else +#define C10_CUDA_API C10_CUDA_IMPORT +#endif + +/** + * The maximum number of GPUs that we recognizes. + */ +#define C10_COMPILE_TIME_MAX_GPUS 16 diff --git a/thirdparty/libtorch/include/c10/cuda/CUDAMathCompat.h b/thirdparty/libtorch/include/c10/cuda/CUDAMathCompat.h new file mode 100644 index 0000000000..a3b8f6f4ab --- /dev/null +++ b/thirdparty/libtorch/include/c10/cuda/CUDAMathCompat.h @@ -0,0 +1,105 @@ +#pragma once + +/* This file defines math functions compatible across different gpu + * platforms (currently CUDA and HIP). + */ +#if defined(__CUDACC__) || defined(__HIPCC__) + +#include + +#ifdef __HIPCC__ +#define __MATH_FUNCTIONS_DECL__ inline C10_DEVICE +#else /* __HIPCC__ */ +#ifdef __CUDACC_RTC__ +#define __MATH_FUNCTIONS_DECL__ C10_HOST_DEVICE +#else /* __CUDACC_RTC__ */ +#define __MATH_FUNCTIONS_DECL__ static inline C10_HOST_DEVICE +#endif /* __CUDACC_RTC__ */ +#endif /* __HIPCC__ */ + +namespace c10 { +namespace cuda { +namespace compat { + +__MATH_FUNCTIONS_DECL__ float abs(float x) { + return ::fabsf(x); +} +__MATH_FUNCTIONS_DECL__ double abs(double x) { + return ::fabs(x); +} + +__MATH_FUNCTIONS_DECL__ float exp(float x) { + return ::expf(x); +} +__MATH_FUNCTIONS_DECL__ double exp(double x) { + return ::exp(x); +} + +__MATH_FUNCTIONS_DECL__ float floor(float x) { + return ::floorf(x); +} +__MATH_FUNCTIONS_DECL__ double floor(double x) { + return ::floor(x); +} + +__MATH_FUNCTIONS_DECL__ float log(float x) { + return ::logf(x); +} +__MATH_FUNCTIONS_DECL__ double log(double x) { + return ::log(x); +} + +__MATH_FUNCTIONS_DECL__ float max(float x, float y) { + return ::fmaxf(x, y); +} +__MATH_FUNCTIONS_DECL__ double max(double x, double y) { + return ::fmax(x, y); +} + +__MATH_FUNCTIONS_DECL__ float pow(float x, float y) { + return ::powf(x, y); +} +__MATH_FUNCTIONS_DECL__ double pow(double x, double y) { + return ::pow(x, y); +} + +__MATH_FUNCTIONS_DECL__ void sincos(float x, float* sptr, float* cptr) { + return ::sincosf(x, sptr, cptr); +} +__MATH_FUNCTIONS_DECL__ void sincos(double x, double* sptr, double* cptr) { + return ::sincos(x, sptr, cptr); +} + +__MATH_FUNCTIONS_DECL__ float sqrt(float x) { + return ::sqrtf(x); +} +__MATH_FUNCTIONS_DECL__ double sqrt(double x) { + return ::sqrt(x); +} + +__MATH_FUNCTIONS_DECL__ float rsqrt(float x) { + return ::rsqrtf(x); +} +__MATH_FUNCTIONS_DECL__ double rsqrt(double x) { + return ::rsqrt(x); +} + +__MATH_FUNCTIONS_DECL__ float tan(float x) { + return ::tanf(x); +} +__MATH_FUNCTIONS_DECL__ double tan(double x) { + return ::tan(x); +} + +__MATH_FUNCTIONS_DECL__ float normcdf(float x) { + return ::normcdff(x); +} +__MATH_FUNCTIONS_DECL__ double normcdf(double x) { + return ::normcdf(x); +} + +} // namespace compat +} // namespace cuda +} // namespace c10 + +#endif diff --git a/thirdparty/libtorch/include/c10/cuda/CUDAStream.h b/thirdparty/libtorch/include/c10/cuda/CUDAStream.h new file mode 100644 index 0000000000..b23f8aa1c6 --- /dev/null +++ b/thirdparty/libtorch/include/c10/cuda/CUDAStream.h @@ -0,0 +1,228 @@ +#pragma once + +#include +#include + +#include + +#include +#include +#include +#include +#include + +/* +* Stream pool note. +* +* A CUDAStream is an abstraction of an actual cuStream on the GPU. CUDAStreams +* are backed by cuStreams, but they use several pools to minimize the costs +* associated with creating, retaining, and destroying cuStreams. +* +* There are three pools per device, and a device's pools are lazily created. +* +* The first pool contains only the default stream. When the default stream +* is requested it's returned. +* +* The second pool is the "low priority" or "default priority" streams. In +* HIP builds there is no distinction between streams in this pool and streams +* in the third pool (below). There are 32 of these streams per device, and +* when a stream is requested one of these streams is returned round-robin. +* That is, the first stream requested is at index 0, the second at index 1... +* to index 31, then index 0 again. +* +* This means that if 33 low priority streams are requested, the first and +* last streams requested are actually the same stream (under the covers) +* and kernels enqueued on them cannot run concurrently. +* +* The third pool is the "high priority" streams. The third pool acts like +* the second pool except the streams are created with a higher priority. +* +* These pools suggest that stream users should prefer many short-lived streams, +* as the cost of acquiring and releasing streams is effectively zero. If +* many longer-lived streams are required in performance critical scenarios +* then the functionality here may need to be extended to allow, for example, +* "reserving" a subset of the pool so that other streams do not accidentally +* overlap the performance critical streams. +* +* Note: although the notion of "current stream for device" is thread local +* (every OS thread has a separate current stream, as one might expect), +* the stream pool is global across all threads; stream 0 is always stream 0 +* no matter which thread you use it on. Multiple threads can synchronize +* on the same stream. Although the CUDA documentation is not very clear +* on the matter, streams are thread safe; e.g., it is safe to enqueue +* a kernel on the same stream from two different threads. +*/ + +namespace c10 { +namespace cuda { + +// Value object representing a CUDA stream. This is just a wrapper +// around c10::Stream, but it comes with a little extra CUDA-specific +// functionality (conversion to cudaStream_t), and a guarantee that +// the wrapped c10::Stream really is a CUDA stream. +class C10_CUDA_API CUDAStream { +public: + + enum Unchecked { UNCHECKED }; + + /// Construct a CUDAStream from a Stream. This construction is checked, + /// and will raise an error if the Stream is not, in fact, a CUDA stream. + explicit CUDAStream(Stream stream) : stream_(stream) { + TORCH_CHECK(stream_.device_type() == DeviceType::CUDA); + } + + /// Construct a CUDAStream from a Stream with no error checking. + /// This constructor uses the "named" constructor idiom, and can + /// be invoked as: CUDAStream(CUDAStream::UNCHECKED, stream) + explicit CUDAStream(Unchecked, Stream stream) : stream_(stream) {} + + bool operator==(const CUDAStream& other) const noexcept { + return unwrap() == other.unwrap(); + } + + bool operator!=(const CUDAStream& other) const noexcept { + return unwrap() != other.unwrap(); + } + + /// Implicit conversion to cudaStream_t. + operator cudaStream_t() const { return stream(); } + + /// Implicit conversion to Stream (a.k.a., forget that the stream is a + /// CUDA stream). + operator Stream() const { return unwrap(); } + + /// Get the CUDA device index that this stream is associated with. + DeviceIndex device_index() const { return stream_.device_index(); } + + /// Get the full Device that this stream is associated with. The Device + /// is guaranteed to be a CUDA device. + Device device() const { return Device(DeviceType::CUDA, device_index()); } + + /// Return the stream ID corresponding to this particular stream. + StreamId id() const { return stream_.id(); } + + bool query() const { + DeviceGuard guard{stream_.device()}; + cudaError_t err = cudaStreamQuery(stream()); + + if (err == cudaSuccess) { + return true; + } else if (err != cudaErrorNotReady) { + C10_CUDA_CHECK(err); + } + + return false; + } + + void synchronize() const { + DeviceGuard guard{stream_.device()}; + C10_CUDA_CHECK(cudaStreamSynchronize(stream())); + } + + int priority() const { + #ifndef __HIP_PLATFORM_HCC__ + DeviceGuard guard{stream_.device()}; + int priority = 0; + C10_CUDA_CHECK(cudaStreamGetPriority(stream(), &priority)); + return priority; + #else + AT_ERROR("cuStreamGetPriority with HIP is not supported"); + #endif + } + + /// Explicit conversion to cudaStream_t. + cudaStream_t stream() const; + + /// Explicit conversion to Stream. + Stream unwrap() const { return stream_; } + + /// Reversibly pack a CUDAStream into a uint64_t representation. This may + /// be helpful when storing a CUDAStream in a C struct, where you cannot + /// conveniently place the CUDAStream object itself (which is morally + /// equivalent, but unfortunately is not POD due to the fact that it + /// has constructors.) + /// + /// The CUDAStream can be unpacked using unpack(). The format of + /// the uint64_t is unspecified and may be changed. + uint64_t pack() const noexcept { + return stream_.pack(); + } + + // Unpack a CUDAStream from the uint64_t representation generated by pack(). + static CUDAStream unpack(uint64_t bits) { + return CUDAStream(Stream::unpack(bits)); + } + + static std::tuple priority_range() { + #ifndef __HIP_PLATFORM_HCC__ + int least_priority, greatest_priority; + C10_CUDA_CHECK( + cudaDeviceGetStreamPriorityRange(&least_priority, &greatest_priority)); + return std::make_tuple(least_priority, greatest_priority); + #else + AT_ERROR("cuDeviceGetStreamPriorityRange with HIP is not supported"); + #endif + } + + // Deleted for now; use CUDAEvent::block instead + // void synchronize_with(const CUDAEvent& event) const; + +private: + Stream stream_; +}; + +/** + * Get a new stream from the CUDA stream pool. You can think of this + * as "creating" a new stream, but no such creation actually happens; + * instead, streams are preallocated from the pool and returned in a + * round-robin fashion. + * + * You can request a stream from the high priority pool by setting + * isHighPriority to true, or a stream for a specific device by setting device + * (defaulting to the current CUDA stream.) + */ +CAFFE2_API CUDAStream +getStreamFromPool(const bool isHighPriority = false, DeviceIndex device = -1); + +/** + * Get the default CUDA stream, for the passed CUDA device, or for the + * current device if no device index is passed. The default stream is + * where most computation occurs when you aren't explicitly using + * streams. + */ +CAFFE2_API CUDAStream getDefaultCUDAStream(DeviceIndex device_index = -1); + +/** + * Get the current CUDA stream, for the passed CUDA device, or for the + * current device if no device index is passed. The current CUDA stream + * will usually be the default CUDA stream for the device, but it may + * be different if someone called 'setCurrentCUDAStream' or used 'StreamGuard' + * or 'CUDAStreamGuard'. + */ +CAFFE2_API CUDAStream getCurrentCUDAStream(DeviceIndex device_index = -1); + +/** + * Set the current stream on the device of the passed in stream to be + * the passed in stream. Yes, you read that right: this function + * has *nothing* to do with the current device: it toggles the current + * stream of the device of the passed stream. + * + * Confused? Avoid using this function; prefer using 'CUDAStreamGuard' instead + * (which will switch both your current device and current stream in the way you + * expect, and reset it back to its original state afterwards). + */ +CAFFE2_API void setCurrentCUDAStream(CUDAStream stream); + +C10_API std::ostream& operator<<(std::ostream& stream, const CUDAStream& s); + +} // namespace cuda +} // namespace at + +namespace std { + template <> + struct hash { + size_t operator()(c10::cuda::CUDAStream s) const noexcept { + return std::hash{}(s.unwrap()); + } + }; +} // namespace std diff --git a/thirdparty/libtorch/include/c10/cuda/impl/CUDAGuardImpl.h b/thirdparty/libtorch/include/c10/cuda/impl/CUDAGuardImpl.h new file mode 100644 index 0000000000..47bcaf4bbc --- /dev/null +++ b/thirdparty/libtorch/include/c10/cuda/impl/CUDAGuardImpl.h @@ -0,0 +1,154 @@ +#pragma once + +#include +#include +#include + +#include +#include +#include + +#include + +namespace c10 { +namespace cuda { +namespace impl { + +struct CUDAGuardImpl final : public c10::impl::DeviceGuardImplInterface { + static constexpr DeviceType static_type = DeviceType::CUDA; + + CUDAGuardImpl() {} + explicit CUDAGuardImpl(DeviceType t) { + TORCH_INTERNAL_ASSERT(t == DeviceType::CUDA); + } + DeviceType type() const override { + return DeviceType::CUDA; + } + Device exchangeDevice(Device d) const override { + TORCH_INTERNAL_ASSERT(d.type() == DeviceType::CUDA); + Device old_device = getDevice(); + if (old_device.index() != d.index()) { + C10_CUDA_CHECK(cudaSetDevice(d.index())); + } + return old_device; + } + Device getDevice() const override { + int device; + C10_CUDA_CHECK(cudaGetDevice(&device)); + return Device(DeviceType::CUDA, device); + } + void setDevice(Device d) const override { + TORCH_INTERNAL_ASSERT(d.type() == DeviceType::CUDA); + C10_CUDA_CHECK(cudaSetDevice(d.index())); + } + void uncheckedSetDevice(Device d) const noexcept override { + C10_CUDA_CHECK_WARN(cudaSetDevice(d.index())); + } + Stream getStream(Device d) const noexcept override { + return getCurrentCUDAStream(d.index()).unwrap(); + } + Stream getDefaultStream(Device d) const override { + return getDefaultCUDAStream(d.index()); + } + // NB: These do NOT set the current device + Stream exchangeStream(Stream s) const noexcept override { + CUDAStream cs(s); + auto old_stream = getCurrentCUDAStream(s.device().index()); + setCurrentCUDAStream(cs); + return old_stream.unwrap(); + } + DeviceIndex deviceCount() const noexcept override { + return device_count(); + } + + // Event-related functions + void createEvent( + cudaEvent_t* cuda_event, + const EventFlag flag) const { + // Maps PyTorch's Event::Flag to CUDA flag + auto cuda_flag = cudaEventDefault; + switch (flag) { + case EventFlag::PYTORCH_DEFAULT: + case EventFlag::CUDA_EVENT_DISABLE_TIMING: + cuda_flag = cudaEventDisableTiming; + break; + case EventFlag::BACKEND_DEFAULT: + case EventFlag::CUDA_EVENT_DEFAULT: + cuda_flag = cudaEventDefault; + break; + default: + TORCH_CHECK(false, "CUDA event received unknown flag"); + } + + C10_CUDA_CHECK(cudaEventCreateWithFlags(cuda_event, cuda_flag)); + } + + void destroyEvent( + void* event, + const DeviceIndex device_index) const noexcept override { + if (!event) return; + auto cuda_event = static_cast(event); + int orig_device; + C10_CUDA_CHECK_WARN(cudaGetDevice(&orig_device)); + C10_CUDA_CHECK_WARN(cudaSetDevice(device_index)); + C10_CUDA_CHECK_WARN(cudaEventDestroy(cuda_event)); + C10_CUDA_CHECK_WARN(cudaSetDevice(orig_device)); + } + + void record( + void** event, + const Stream& stream, + const DeviceIndex device_index, + const EventFlag flag) const override { + TORCH_CHECK(device_index == -1 || device_index == stream.device_index(), + "Event device index ", + device_index, + " does not match recording stream's device index ", + stream.device_index(), + "."); + + cudaEvent_t cuda_event = static_cast(*event); + CUDAStream cuda_stream{stream}; + + // Moves to stream's device to record + const auto orig_device = getDevice(); + setDevice(stream.device()); + + // Creates the event (lazily) + if (!cuda_event) createEvent(&cuda_event, flag); + C10_CUDA_CHECK(cudaEventRecord(cuda_event, cuda_stream)); + // Makes the void* point to the (possibly just allocated) CUDA event + *event = cuda_event; + + // Resets device + setDevice(orig_device); + } + + void block( + void* event, + const Stream& stream) const override { + if (!event) return; + cudaEvent_t cuda_event = static_cast(event); + CUDAStream cuda_stream{stream}; + const auto orig_device = getDevice(); + setDevice(stream.device()); + C10_CUDA_CHECK(cudaStreamWaitEvent( + cuda_stream, + cuda_event, + /*flags (must be zero)=*/ 0)); + setDevice(orig_device); + } + + // May be called from any device + bool queryEvent(void* event) const override { + if (!event) return true; + cudaEvent_t cuda_event = static_cast(event); + const cudaError_t err = cudaEventQuery(cuda_event); + if (err != cudaErrorNotReady) { + C10_CUDA_CHECK(err); + } + return (err == cudaSuccess); + } +}; + +}}} // namespace c10::cuda::impl diff --git a/thirdparty/libtorch/include/c10/cuda/impl/CUDATest.h b/thirdparty/libtorch/include/c10/cuda/impl/CUDATest.h new file mode 100644 index 0000000000..ccfe38e020 --- /dev/null +++ b/thirdparty/libtorch/include/c10/cuda/impl/CUDATest.h @@ -0,0 +1,11 @@ +#pragma once + +#include + +namespace c10 { +namespace cuda { +namespace impl { + +C10_CUDA_API int c10_cuda_test(); + +}}} /// namespace c10::cuda::impl diff --git a/thirdparty/libtorch/include/c10/cuda/impl/cuda_cmake_macros.h b/thirdparty/libtorch/include/c10/cuda/impl/cuda_cmake_macros.h new file mode 100644 index 0000000000..c363c63d5d --- /dev/null +++ b/thirdparty/libtorch/include/c10/cuda/impl/cuda_cmake_macros.h @@ -0,0 +1,6 @@ +#pragma once + +// Automatically generated header file for the C10 CUDA library. Do not +// include this file directly. Instead, include c10/cuda/CUDAMacros.h + +#define C10_CUDA_BUILD_SHARED_LIBS diff --git a/thirdparty/libtorch/include/c10/macros/Export.h b/thirdparty/libtorch/include/c10/macros/Export.h new file mode 100644 index 0000000000..0a213edd7f --- /dev/null +++ b/thirdparty/libtorch/include/c10/macros/Export.h @@ -0,0 +1,110 @@ +#ifndef C10_MACROS_EXPORT_H_ +#define C10_MACROS_EXPORT_H_ + +/* Header file to define the common scaffolding for exported symbols. + * + * Export is by itself a quite tricky situation to deal with, and if you are + * hitting this file, make sure you start with the background here: + * - Linux: https://gcc.gnu.org/wiki/Visibility + * - Windows: + * https://docs.microsoft.com/en-us/cpp/cpp/dllexport-dllimport?view=vs-2017 + * + * Do NOT include this file directly. Instead, use c10/macros/Macros.h + */ + +// You do not need to edit this part of file unless you are changing the core +// pytorch export abstractions. +// +// This part defines the C10 core export and import macros. This is controlled +// by whether we are building shared libraries or not, which is determined +// during build time and codified in c10/core/cmake_macros.h. +// When the library is built as a shared lib, EXPORT and IMPORT will contain +// visibility attributes. If it is being built as a static lib, then EXPORT +// and IMPORT basically have no effect. + +// As a rule of thumb, you should almost NEVER mix static and shared builds for +// libraries that depend on c10. AKA, if c10 is built as a static library, we +// recommend everything dependent on c10 to be built statically. If c10 is built +// as a shared library, everything dependent on it should be built as shared. In +// the PyTorch project, all native libraries shall use the macro +// C10_BUILD_SHARED_LIB to check whether pytorch is building shared or static +// libraries. + +// For build systems that do not directly depend on CMake and directly build +// from the source directory (such as Buck), one may not have a cmake_macros.h +// file at all. In this case, the build system is responsible for providing +// correct macro definitions corresponding to the cmake_macros.h.in file. +// +// In such scenarios, one should define the macro +// C10_USING_CUSTOM_GENERATED_MACROS +// to inform this header that it does not need to include the cmake_macros.h +// file. + +#ifndef C10_USING_CUSTOM_GENERATED_MACROS +#include "c10/macros/cmake_macros.h" +#endif // C10_USING_CUSTOM_GENERATED_MACROS + +#ifdef _WIN32 +#define C10_HIDDEN +#if defined(C10_BUILD_SHARED_LIBS) +#define C10_EXPORT __declspec(dllexport) +#define C10_IMPORT __declspec(dllimport) +#else +#define C10_EXPORT +#define C10_IMPORT +#endif +#else // _WIN32 +#if defined(__GNUC__) +#define C10_EXPORT __attribute__((__visibility__("default"))) +#define C10_HIDDEN __attribute__((__visibility__("hidden"))) +#else // defined(__GNUC__) +#define C10_EXPORT +#define C10_HIDDEN +#endif // defined(__GNUC__) +#define C10_IMPORT C10_EXPORT +#endif // _WIN32 + +#ifdef NO_EXPORT +#undef C10_EXPORT +#define C10_EXPORT +#endif + +// Definition of an adaptive XX_API macro, that depends on whether you are +// building the library itself or not, routes to XX_EXPORT and XX_IMPORT. +// Basically, you will need to do this for each shared library that you are +// building, and the instruction is as follows: assuming that you are building +// a library called libawesome.so. You should: +// (1) for your cmake target (usually done by "add_library(awesome, ...)"), +// define a macro called AWESOME_BUILD_MAIN_LIB using +// target_compile_options. +// (2) define the AWESOME_API macro similar to the one below. +// And in the source file of your awesome library, use AWESOME_API to +// annotate public symbols. + +// Here, for the C10 library, we will define the macro C10_API for both import +// and export. + +// This one is being used by libc10.so +#ifdef C10_BUILD_MAIN_LIB +#define C10_API C10_EXPORT +#else +#define C10_API C10_IMPORT +#endif + +// This one is being used by libtorch.so +// TODO: rename this to TORCH_API +#ifdef CAFFE2_BUILD_MAIN_LIB +#define CAFFE2_API C10_EXPORT +#else +#define CAFFE2_API C10_IMPORT +#endif + +// This one will eventually be used by libtorch_cuda.so, but for +// now it has the same function as CAFFE2_API +#ifdef CAFFE2_BUILD_MAIN_LIB +#define TORCH_CUDA_API C10_EXPORT +#else +#define TORCH_CUDA_API C10_IMPORT +#endif + +#endif // C10_MACROS_MACROS_H_ diff --git a/thirdparty/libtorch/include/c10/macros/Macros.h b/thirdparty/libtorch/include/c10/macros/Macros.h new file mode 100644 index 0000000000..2b75f09f67 --- /dev/null +++ b/thirdparty/libtorch/include/c10/macros/Macros.h @@ -0,0 +1,275 @@ +#ifndef C10_MACROS_MACROS_H_ +#define C10_MACROS_MACROS_H_ + +/* Main entry for c10/macros. + * + * In your code, include c10/macros/Macros.h directly, instead of individual + * files in this folder. + */ + +// For build systems that do not directly depend on CMake and directly build +// from the source directory (such as Buck), one may not have a cmake_macros.h +// file at all. In this case, the build system is responsible for providing +// correct macro definitions corresponding to the cmake_macros.h.in file. +// +// In such scenarios, one should define the macro +// C10_USING_CUSTOM_GENERATED_MACROS +// to inform this header that it does not need to include the cmake_macros.h +// file. + +#ifndef C10_USING_CUSTOM_GENERATED_MACROS +#include "c10/macros/cmake_macros.h" +#endif // C10_USING_CUSTOM_GENERATED_MACROS + +#include "c10/macros/Export.h" + +// Disable the copy and assignment operator for a class. Note that this will +// disable the usage of the class in std containers. +#define C10_DISABLE_COPY_AND_ASSIGN(classname) \ + classname(const classname&) = delete; \ + classname& operator=(const classname&) = delete + +#define C10_CONCATENATE_IMPL(s1, s2) s1##s2 +#define C10_CONCATENATE(s1, s2) C10_CONCATENATE_IMPL(s1, s2) + +#define C10_MACRO_EXPAND(args) args + +/** + * C10_ANONYMOUS_VARIABLE(str) introduces an identifier starting with + * str and ending with a number that varies with the line. + */ +#ifdef __COUNTER__ +#define C10_ANONYMOUS_VARIABLE(str) C10_CONCATENATE(str, __COUNTER__) +#else +#define C10_ANONYMOUS_VARIABLE(str) C10_CONCATENATE(str, __LINE__) +#endif + + +/// C10_NODISCARD - Warn if a type or return value is discarded. + +// Technically, we should check if __cplusplus > 201402L here, because +// [[nodiscard]] is only defined in C++17. However, some compilers +// we care about don't advertise being C++17 (e.g., clang), but +// support the attribute anyway. In fact, this is not just a good idea, +// it's the law: clang::warn_unused_result doesn't work on nvcc + clang +// and the best workaround for this case is to use [[nodiscard]] +// instead; see https://github.com/pytorch/pytorch/issues/13118 +// +// Note to future editors: if you have noticed that a compiler is +// misbehaving (e.g., it advertises support, but the support doesn't +// actually work, or it is emitting warnings). Some compilers which +// are strict about the matter include MSVC, which will complain: +// +// error C2429: attribute 'nodiscard' requires compiler flag '/std:c++latest' +// +// Exhibits: +// - MSVC 19.14: https://godbolt.org/z/Dzd7gn (requires /std:c++latest) +// - Clang 8.0.0: https://godbolt.org/z/3PYL4Z (always advertises support) +// - gcc 8.3: https://godbolt.org/z/4tLMQS (always advertises support) +#define C10_NODISCARD +#if defined(__has_cpp_attribute) +# if __has_cpp_attribute(nodiscard) +# undef C10_NODISCARD +# define C10_NODISCARD [[nodiscard]] +# endif +// Workaround for llvm.org/PR23435, since clang 3.6 and below emit a spurious +// error when __has_cpp_attribute is given a scoped attribute in C mode. +#elif __cplusplus && defined(__has_cpp_attribute) +# if __has_cpp_attribute(clang::warn_unused_result) +// TODO: It's possible this is still triggering https://github.com/pytorch/pytorch/issues/13118 +// on Windows; if it is, better fix it. +# undef C10_NODISCARD +# define C10_NODISCARD [[clang::warn_unused_result]] +# endif +#endif + +// suppress an unused variable. +#ifdef _MSC_VER +#define C10_UNUSED +#else +#define C10_UNUSED __attribute__((__unused__)) +#endif //_MSC_VER + +#define C10_RESTRICT __restrict + +// Simply define the namespace, in case a dependent library want to refer to +// the c10 namespace but not any nontrivial files. +namespace c10 {} // namespace c10 +namespace c10 { namespace cuda {} } +namespace c10 { namespace hip {} } + +// Since C10 is the core library for caffe2 (and aten), we will simply reroute +// all abstractions defined in c10 to be available in caffe2 as well. +// This is only for backwards compatibility. Please use the symbols from the +// c10 namespace where possible. +namespace caffe2 { using namespace c10; } +namespace at { using namespace c10; } +namespace at { namespace cuda { using namespace c10::cuda; }} + +// WARNING!!! THIS IS A GIANT HACK!!! +// This line means you cannot simultaneously include c10/hip +// and c10/cuda and then use them from the at::cuda namespace. +// This is true in practice, because HIPIFY works inplace on +// files in ATen/cuda, so it assumes that c10::hip is available +// from at::cuda. This namespace makes that happen. When +// HIPIFY is no longer out-of-place, we can switch the cuda +// here to hip and everyone is happy. +namespace at { namespace cuda { using namespace c10::hip; }} + +// C10_NORETURN +#if defined(_MSC_VER) +#define C10_NORETURN __declspec(noreturn) +#else +#define C10_NORETURN __attribute__((noreturn)) +#endif + +// C10_LIKELY/C10_UNLIKELY +// +// These macros provide parentheses, so you can use these macros as: +// +// if C10_LIKELY(some_expr) { +// ... +// } +// +// NB: static_cast to boolean is mandatory in C++, because __builtin_expect +// takes a long argument, which means you may trigger the wrong conversion +// without it. +// +#if defined(__GNUC__) || defined(__ICL) || defined(__clang__) +#define C10_LIKELY(expr) (__builtin_expect(static_cast(expr), 1)) +#define C10_UNLIKELY(expr) (__builtin_expect(static_cast(expr), 0)) +#else +#define C10_LIKELY(expr) (expr) +#define C10_UNLIKELY(expr) (expr) +#endif + +#include +#include + +#if defined(__CUDACC__) || defined(__HIPCC__) +// Designates functions callable from the host (CPU) and the device (GPU) +#define C10_HOST_DEVICE __host__ __device__ +#define C10_DEVICE __device__ +#define C10_HOST __host__ +// constants from (https://docs.nvidia.com/cuda/cuda-c-programming-guide/index.html#features-and-technical-specifications) +// The maximum number of threads per multiprocessor is 1024 for Turing architecture (7.5) +// but 2048 for previous architectures. You'll get warnings if you exceed these constants. +// Hence, the following macros adjust the input values from the user to resolve potential warnings. +#if __CUDA_ARCH__ >= 750 +constexpr uint32_t CUDA_MAX_THREADS_PER_SM = 1024; +#else +constexpr uint32_t CUDA_MAX_THREADS_PER_SM = 2048; +#endif +// CUDA_MAX_THREADS_PER_BLOCK is same for all architectures currently +constexpr uint32_t CUDA_MAX_THREADS_PER_BLOCK = 1024; +// CUDA_THREADS_PER_BLOCK_FALLBACK is the "canonical fallback" choice of block size. +// 256 is a good number for this fallback and should give good occupancy and +// versatility across all architectures. +constexpr uint32_t CUDA_THREADS_PER_BLOCK_FALLBACK = 256; +// NOTE: if you are thinking of constexpr-ify the inputs to launch bounds, it +// turns out that although __launch_bounds__ can take constexpr, it +// can't take a constexpr that has anything to do with templates. +// Currently we use launch_bounds that depend on template arguments in +// Loops.cuh, Reduce.cuh and LossCTC.cuh. Hence, C10_MAX_THREADS_PER_BLOCK and +// C10_MIN_BLOCKS_PER_SM are kept as macros. +// Suppose you were planning to write __launch_bounds__(a, b), based on your performance tuning on a modern GPU. +// Instead, you should write __launch_bounds__(C10_MAX_THREADS_PER_BLOCK(a), C10_MIN_BLOCKS_PER_SM(a, b)), +// which will also properly respect limits on old architectures. +#define C10_MAX_THREADS_PER_BLOCK(val) (((val) <= CUDA_MAX_THREADS_PER_BLOCK) ? (val) : CUDA_THREADS_PER_BLOCK_FALLBACK) +#define C10_MIN_BLOCKS_PER_SM(threads_per_block, blocks_per_sm) ((((threads_per_block)*(blocks_per_sm) <= CUDA_MAX_THREADS_PER_SM) ? (blocks_per_sm) : ((CUDA_MAX_THREADS_PER_SM + (threads_per_block) - 1) / (threads_per_block)))) +// C10_LAUNCH_BOUNDS is analogous to __launch_bounds__ +#define C10_LAUNCH_BOUNDS_0 __launch_bounds__(256, 4) // default launch bounds that should give good occupancy and versatility across all architectures. +#define C10_LAUNCH_BOUNDS_1(max_threads_per_block) __launch_bounds__((C10_MAX_THREADS_PER_BLOCK((max_threads_per_block)))) +#define C10_LAUNCH_BOUNDS_2(max_threads_per_block, min_blocks_per_sm) __launch_bounds__((C10_MAX_THREADS_PER_BLOCK((max_threads_per_block))), (C10_MIN_BLOCKS_PER_SM((max_threads_per_block), (min_blocks_per_sm)))) +#else +#define C10_HOST_DEVICE +#define C10_HOST +#define C10_DEVICE +#endif + +#ifdef __HIP_PLATFORM_HCC__ +#define C10_HIP_HOST_DEVICE __host__ __device__ +#else +#define C10_HIP_HOST_DEVICE +#endif + +#ifdef __HIP_PLATFORM_HCC__ +#define C10_WARP_SIZE 64 +#else +#define C10_WARP_SIZE 32 +#endif + +#ifdef __APPLE__ +#include +#endif + +#if defined(__ANDROID__) +#define C10_ANDROID 1 +#define C10_MOBILE 1 +#elif ( \ + defined(__APPLE__) && \ + (TARGET_IPHONE_SIMULATOR || TARGET_OS_SIMULATOR || TARGET_OS_IPHONE)) +#define C10_IOS 1 +#define C10_MOBILE 1 +#elif (defined(__APPLE__) && TARGET_OS_MAC) +#define C10_IOS 1 +#endif // ANDROID / IOS / MACOS + +// Portably determine if a type T is trivially copyable or not. +#if __GNUG__ && __GNUC__ < 5 +#define C10_IS_TRIVIALLY_COPYABLE(T) __has_trivial_copy(T) +#else +#define C10_IS_TRIVIALLY_COPYABLE(T) std::is_trivially_copyable::value +#endif + +// AT_CPP14_CONSTEXPR: Make it constexpr if we're in C++14 or later +#if defined(_MSC_VER) && defined(__CUDACC__) && \ + (__CUDACC_VER_MAJOR__ >= 10 || \ + (__CUDACC_VER_MAJOR__ == 9 && __CUDACC_VER_MINOR__ >= 2)) +// workaround: CUDA >= v9.2 compiler cannot compile correctly on Windows. +#define AT_CPP14_CONSTEXPR +#define AT_IS_CPP14_CONSTEXPR 0 +#else +#if defined(__cpp_constexpr) && __cpp_constexpr >= 201304 +#define AT_CPP14_CONSTEXPR constexpr +#define AT_IS_CPP14_CONSTEXPR 1 +#else +#define AT_CPP14_CONSTEXPR +#define AT_IS_CPP14_CONSTEXPR 0 +#endif +#endif + +// We need --expt-relaxed-constexpr in CUDA because of Eigen. This flag allows +// device code in CUDA to call host constexpr functions. Unfortunately, +// the CUDA compiler (at least for CUDA 9.0, 9.1 and 9.2) isn't compatible +// with many of the constexpr things we'd like to do and the device code +// compiler crashes when it sees one of these host-only functions. +// It works when nvcc builds host code, but not when it builds device code +// and notices it can call these constexpr functions from device code. +// As a workaround, we use C10_HOST_CONSTEXPR instead of constexpr for these +// functions. This enables constexpr when compiled on the host and applies +// __host__ when it is compiled on the device in an attempt to stop it from +// being called from device functions. Not sure if the latter works, but +// even if not, it not being constexpr anymore should be enough to stop +// it from being called from device code. +// TODO This occurred in CUDA 9 (9.0 to 9.2). Test if this is fixed in CUDA 10. +#if defined(__CUDA_ARCH__) +#define C10_HOST_CONSTEXPR __host__ +#define C10_HOST_CONSTEXPR_VAR +#define C10_CPP14_HOST_CONSTEXPR __host__ +#else +#define C10_HOST_CONSTEXPR constexpr +#define C10_HOST_CONSTEXPR_VAR constexpr +#define C10_CPP14_HOST_CONSTEXPR AT_CPP14_CONSTEXPR +#endif + +#if !defined(__clang__) && !defined(_MSC_VER) && defined(__GNUC__) && \ + __GNUC__ < 6 +#define CONSTEXPR_EXCEPT_GCC5 +#define IS_NOT_GCC5_CONSTEXPR 0 +#else +#define CONSTEXPR_EXCEPT_GCC5 AT_CPP14_CONSTEXPR +#define IS_NOT_GCC5_CONSTEXPR AT_IS_CPP14_CONSTEXPR +#endif + +#endif // C10_MACROS_MACROS_H_ diff --git a/thirdparty/libtorch/include/c10/macros/cmake_macros.h b/thirdparty/libtorch/include/c10/macros/cmake_macros.h new file mode 100644 index 0000000000..d770487e37 --- /dev/null +++ b/thirdparty/libtorch/include/c10/macros/cmake_macros.h @@ -0,0 +1,22 @@ +#ifndef C10_MACROS_CMAKE_MACROS_H_ +#define C10_MACROS_CMAKE_MACROS_H_ + +// Automatically generated header file for the C10 library. +// Do not include this file directly. Instead, include c10/macros/Macros.h. + +#define C10_BUILD_SHARED_LIBS +/* #undef C10_USE_GLOG */ +/* #undef C10_USE_GFLAGS */ +#define C10_DISABLE_NUMA + +// Used by libtorch mobile build to enable features that are not enabled by +// caffe2 mobile build. Should only use it when necessary as we are committed +// to converging libtorch and caffe2 mobile builds and removing it eventually. +/* #undef FEATURE_TORCH_MOBILE */ + +// If defined it will use static dispatch for ATen operators. +// Should expose this macro for projects including ATen headers to inherient +// the same option. +/* #undef USE_STATIC_DISPATCH */ + +#endif // C10_MACROS_CMAKE_MACROS_H_ diff --git a/thirdparty/libtorch/include/c10/util/AlignOf.h b/thirdparty/libtorch/include/c10/util/AlignOf.h new file mode 100644 index 0000000000..7f99a0718b --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/AlignOf.h @@ -0,0 +1,173 @@ +//===--- AlignOf.h - Portable calculation of type alignment -----*- C++ -*-===// +// +// The LLVM Compiler Infrastructure +// +// This file is distributed under the University of Illinois Open Source +// License. See LICENSE.TXT for details. +// +//===----------------------------------------------------------------------===// +// +// This file defines the AlignedCharArray and AlignedCharArrayUnion classes. +// +//===----------------------------------------------------------------------===// + +// ATen: modified from llvm::AlignOf +// replaced LLVM_ALIGNAS with alignas + +#pragma once + +#include + +namespace c10 { + +/// \struct AlignedCharArray +/// \brief Helper for building an aligned character array type. +/// +/// This template is used to explicitly build up a collection of aligned +/// character array types. We have to build these up using a macro and explicit +/// specialization to cope with MSVC (at least till 2015) where only an +/// integer literal can be used to specify an alignment constraint. Once built +/// up here, we can then begin to indirect between these using normal C++ +/// template parameters. + +// MSVC requires special handling here. +#ifndef _MSC_VER + +template +struct AlignedCharArray { + alignas(Alignment) char buffer[Size]; +}; + +#else // _MSC_VER + +/// \brief Create a type with an aligned char buffer. +template +struct AlignedCharArray; + +// We provide special variations of this template for the most common +// alignments because __declspec(align(...)) doesn't actually work when it is +// a member of a by-value function argument in MSVC, even if the alignment +// request is something reasonably like 8-byte or 16-byte. Note that we can't +// even include the declspec with the union that forces the alignment because +// MSVC warns on the existence of the declspec despite the union member forcing +// proper alignment. + +template +struct AlignedCharArray<1, Size> { + union { + char aligned; + char buffer[Size]; + }; +}; + +template +struct AlignedCharArray<2, Size> { + union { + short aligned; + char buffer[Size]; + }; +}; + +template +struct AlignedCharArray<4, Size> { + union { + int aligned; + char buffer[Size]; + }; +}; + +template +struct AlignedCharArray<8, Size> { + union { + double aligned; + char buffer[Size]; + }; +}; + +// The rest of these are provided with a __declspec(align(...)) and we simply +// can't pass them by-value as function arguments on MSVC. + +#define AT_ALIGNEDCHARARRAY_TEMPLATE_ALIGNMENT(x) \ + template \ + struct AlignedCharArray { \ + __declspec(align(x)) char buffer[Size]; \ + }; + +AT_ALIGNEDCHARARRAY_TEMPLATE_ALIGNMENT(16) +AT_ALIGNEDCHARARRAY_TEMPLATE_ALIGNMENT(32) +AT_ALIGNEDCHARARRAY_TEMPLATE_ALIGNMENT(64) +AT_ALIGNEDCHARARRAY_TEMPLATE_ALIGNMENT(128) + +#undef AT_ALIGNEDCHARARRAY_TEMPLATE_ALIGNMENT + +#endif // _MSC_VER + +namespace detail { +template < + typename T1, + typename T2 = char, + typename T3 = char, + typename T4 = char, + typename T5 = char, + typename T6 = char, + typename T7 = char, + typename T8 = char, + typename T9 = char, + typename T10 = char> +class AlignerImpl { + T1 t1; + T2 t2; + T3 t3; + T4 t4; + T5 t5; + T6 t6; + T7 t7; + T8 t8; + T9 t9; + T10 t10; + + AlignerImpl() = delete; +}; + +template < + typename T1, + typename T2 = char, + typename T3 = char, + typename T4 = char, + typename T5 = char, + typename T6 = char, + typename T7 = char, + typename T8 = char, + typename T9 = char, + typename T10 = char> +union SizerImpl { + char arr1[sizeof(T1)], arr2[sizeof(T2)], arr3[sizeof(T3)], arr4[sizeof(T4)], + arr5[sizeof(T5)], arr6[sizeof(T6)], arr7[sizeof(T7)], arr8[sizeof(T8)], + arr9[sizeof(T9)], arr10[sizeof(T10)]; +}; +} // end namespace detail + +/// \brief This union template exposes a suitably aligned and sized character +/// array member which can hold elements of any of up to ten types. +/// +/// These types may be arrays, structs, or any other types. The goal is to +/// expose a char array buffer member which can be used as suitable storage for +/// a placement new of any of these types. Support for more than ten types can +/// be added at the cost of more boilerplate. +template < + typename T1, + typename T2 = char, + typename T3 = char, + typename T4 = char, + typename T5 = char, + typename T6 = char, + typename T7 = char, + typename T8 = char, + typename T9 = char, + typename T10 = char> +struct AlignedCharArrayUnion + : AlignedCharArray< + alignof(detail::AlignerImpl), + sizeof(::c10::detail:: + SizerImpl)> {}; +} // end namespace c10 diff --git a/thirdparty/libtorch/include/c10/util/Array.h b/thirdparty/libtorch/include/c10/util/Array.h new file mode 100644 index 0000000000..ce4c3aa22d --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/Array.h @@ -0,0 +1,323 @@ +/** +* This file is based on the std::array implementation of libstdc++ at +* https://gcc.gnu.org/onlinedocs/gcc-7.1.0/libstdc++/api/a01056_source.html +* +* Changes: +* - isolate, i.e. remove dependencies on internal libstdc++ stuff +* - use c++17 behavior even in c++11 or c++14 +* - remove std::swappable special case because that doesn't work with MSVC +* - constexpr more things +* - add some features like prepend/tail +* +* If using std::array at runtime, feel free to either keep using std::array or use this one - it doesn't really matter. +* For compile time computations, this one here is preferred because std::array in C++11 +* misses some constexpr specifiers, forcing these methods to be called at runtime instead of compile time. +*/ + +// Copyright (C) 2007-2017 Free Software Foundation, Inc. +// +// This file is part of the GNU ISO C++ Library. This library is free +// software; you can redistribute it and/or modify it under the +// terms of the GNU General Public License as published by the +// Free Software Foundation; either version 3, or (at your option) +// any later version. + +// This library is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// Under Section 7 of GPL version 3, you are granted additional +// permissions described in the GCC Runtime Library Exception, version +// 3.1, as published by the Free Software Foundation. + +// You should have received a copy of the GNU General Public License and +// a copy of the GCC Runtime Library Exception along with this program; +// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see +// . + +#pragma once + +#include +#include +#include +#include +#include + +namespace c10 { namespace guts { + +namespace detail { +template +struct __array_traits final { + using _Type = _Tp[_Nm]; + + static constexpr _Tp& _S_ref(const _Type& __t, std::size_t __n) noexcept { + return const_cast<_Tp&>(__t[__n]); + } + + static constexpr _Tp* _S_ptr(const _Type& __t) noexcept { + return const_cast<_Tp*>(__t); + } +}; + +template +struct __array_traits<_Tp, 0> final { + struct _Type final {}; + + static constexpr _Tp& _S_ref(const _Type& __t, std::size_t) noexcept { + return *_S_ptr(__t); + } + + static constexpr _Tp* _S_ptr(const _Type&) noexcept { + return nullptr; + } +}; + +[[noreturn]] inline void __throw_out_of_range(std::string msg) { + throw std::out_of_range(std::move(msg)); +} +} + +template +class array final { +public: + using value_type = _Tp; + using pointer = value_type*; + using const_pointer = const value_type*; + using reference = value_type&; + using const_reference = const value_type&; + using iterator = value_type*; + using const_iterator = const value_type*; + using size_type = std::size_t; + using difference_type = std::ptrdiff_t; + using reverse_iterator = std::reverse_iterator; + using const_reverse_iterator = std::reverse_iterator; + +private: + using _AT_Type = detail::__array_traits<_Tp, _Nm>; +public: // needs to be public member for aggregate initialization + typename _AT_Type::_Type _M_elems; + +public: + // No explicit construct/copy/destroy for aggregate type. + + // DR 776. + AT_CPP14_CONSTEXPR void fill(const value_type& __u) + { std::fill_n(begin(), size(), __u); } + + AT_CPP14_CONSTEXPR void swap(array& __other) + { std::swap_ranges(begin(), end(), __other.begin()); } + + // Iterators. + AT_CPP14_CONSTEXPR iterator begin() noexcept + { return iterator(data()); } + + constexpr const_iterator begin() const noexcept + { return const_iterator(data()); } + + AT_CPP14_CONSTEXPR iterator end() noexcept + { return iterator(data() + _Nm); } + + constexpr const_iterator end() const noexcept + { return const_iterator(data() + _Nm); } + + AT_CPP14_CONSTEXPR reverse_iterator rbegin() noexcept + { return reverse_iterator(end()); } + + constexpr const_reverse_iterator rbegin() const noexcept + { return const_reverse_iterator(end()); } + + AT_CPP14_CONSTEXPR reverse_iterator rend() noexcept + { return reverse_iterator(begin()); } + + constexpr const_reverse_iterator rend() const noexcept + { return const_reverse_iterator(begin()); } + + constexpr const_iterator cbegin() const noexcept + { return const_iterator(data()); } + + constexpr const_iterator cend() const noexcept + { return const_iterator(data() + _Nm); } + + constexpr const_reverse_iterator crbegin() const noexcept + { return const_reverse_iterator(end()); } + + constexpr const_reverse_iterator crend() const noexcept + { return const_reverse_iterator(begin()); } + + // Capacity. + constexpr size_type size() const noexcept { return _Nm; } + + constexpr size_type max_size() const noexcept { return _Nm; } + + constexpr bool empty() const noexcept { return size() == 0; } + + // Element access. + AT_CPP14_CONSTEXPR reference operator[](size_type __n) noexcept + { return _AT_Type::_S_ref(_M_elems, __n); } + + constexpr const_reference operator[](size_type __n) const noexcept + { return _AT_Type::_S_ref(_M_elems, __n); } + + AT_CPP14_CONSTEXPR reference at(size_type __n) { + if (__n >= _Nm) { + detail::__throw_out_of_range(std::string() + + "array::at: __n (which is " + to_string(__n) + ") " + + ">= _Nm (which is " + to_string(_Nm) + ")"); + } + return _AT_Type::_S_ref(_M_elems, __n); + } + + constexpr const_reference at(size_type __n) const { + // Result of conditional expression must be an lvalue so use + // boolean ? lvalue : (throw-expr, lvalue) + return __n < _Nm ? _AT_Type::_S_ref(_M_elems, __n) + : (detail::__throw_out_of_range(std::string() + + "array::at: __n (which is " + to_string(__n) + ") " + + ">= _Nm (which is " + to_string(_Nm) + ")"), + _AT_Type::_S_ref(_M_elems, 0)); + } + + AT_CPP14_CONSTEXPR reference front() noexcept + { return *begin(); } + + constexpr const_reference front() const noexcept + { return _AT_Type::_S_ref(_M_elems, 0); } + + AT_CPP14_CONSTEXPR reference back() noexcept + { return _Nm ? *(end() - 1) : *end(); } + + constexpr const_reference back() const noexcept + { + return _Nm ? _AT_Type::_S_ref(_M_elems, _Nm - 1) + : _AT_Type::_S_ref(_M_elems, 0); + } + + AT_CPP14_CONSTEXPR pointer data() noexcept + { return _AT_Type::_S_ptr(_M_elems); } + + constexpr const_pointer data() const noexcept + { return _AT_Type::_S_ptr(_M_elems); } +}; + +#if defined(__cpp_deduction_guides) && __cpp_deduction_guides >= 201606 + template + array(_Tp, _Up...) -> + array::value && ...), _Tp>, 1 + sizeof...(_Up)>; +#endif + +// Array comparisons. +namespace detail { +template +constexpr inline bool array_equals_(const array& lhs, const array& rhs, size_t current_index) { + return (current_index == N) + ? true + : (lhs.at(current_index) == rhs.at(current_index) && array_equals_(lhs, rhs, current_index + 1)); +} +template +constexpr inline bool array_less_(const array& lhs, const array& rhs, size_t current_index) { + return (current_index == N) + ? false + : (lhs.at(current_index) < rhs.at(current_index) || array_less_(lhs, rhs, current_index + 1)); +} +} +template +constexpr inline bool operator==(const array<_Tp, _Nm>& __one, const array<_Tp, _Nm>& __two) +{ return detail::array_equals_(__one, __two, 0); } + +template +constexpr inline bool operator!=(const array<_Tp, _Nm>& __one, const array<_Tp, _Nm>& __two) +{ return !(__one == __two); } + +template +constexpr inline bool operator<(const array<_Tp, _Nm>& __a, const array<_Tp, _Nm>& __b) +{ return detail::array_less_(__a, __b, 0); } + +template +constexpr inline bool operator>(const array<_Tp, _Nm>& __one, const array<_Tp, _Nm>& __two) +{ return __two < __one; } + +template +constexpr inline bool operator<=(const array<_Tp, _Nm>& __one, const array<_Tp, _Nm>& __two) +{ return !(__one > __two); } + +template +constexpr inline bool operator>=(const array<_Tp, _Nm>& __one, const array<_Tp, _Nm>& __two) +{ return !(__one < __two); } + +// Specialized algorithms. +template +inline void swap(array<_Tp, _Nm>& __one, array<_Tp, _Nm>& __two) noexcept(noexcept(__one.swap(__two))) +{ __one.swap(__two); } + +template +constexpr _Tp& get(array<_Tp, _Nm>& __arr) noexcept { + static_assert(_Int < _Nm, "array index is within bounds"); + return detail::__array_traits<_Tp, _Nm>::_S_ref(__arr._M_elems, _Int); +} + +template +constexpr _Tp&& get(array<_Tp, _Nm>&& __arr) noexcept +{ + static_assert(_Int < _Nm, "array index is within bounds"); + return guts::move(get<_Int>(__arr)); +} + +template +constexpr const _Tp& get(const array<_Tp, _Nm>& __arr) noexcept +{ + static_assert(_Int < _Nm, "array index is within bounds"); + return detail::__array_traits<_Tp, _Nm>::_S_ref(__arr._M_elems, _Int); +} + +/** + * Some added features not available in std::array. + * Only call these at compile time, they're slow if called at runtime. + * Examples: + * tail({2, 3, 4}) == {3, 4} + * prepend(2, {3, 4}) == {2, 3, 4} + */ +namespace detail { +template +constexpr inline array tail_(const array& arg, guts::index_sequence) { + static_assert(sizeof...(INDEX) == N-1, "invariant"); + return {{get(arg)...}}; +} +} +template +constexpr inline array tail(const array& arg) { + static_assert(N > 0, "Can only call tail() on an array with at least one element"); + return detail::tail_(arg, guts::make_index_sequence()); +} + +namespace detail { +template +constexpr inline array prepend_(T&& head, const array& tail, guts::index_sequence) { + return {{guts::forward(head), get(tail)...}}; +} +} +template +constexpr inline array prepend(T&& head, const array& tail) { + return detail::prepend_(guts::forward(head), tail, guts::make_index_sequence()); +} + +/** + * Convert a C array into a std::array. + * Example: + * int source[3] = {2, 3, 4}; + * std::array target = to_std_array(source); + */ + +namespace detail { +template +constexpr array to_array_(const T (&arr)[N], guts::index_sequence) { + return {{arr[INDEX]...}}; +} +} + +template +constexpr array to_array(const T (&arr)[N]) { + return detail::to_array_(arr, guts::make_index_sequence()); +} + +}} diff --git a/thirdparty/libtorch/include/c10/util/ArrayRef.h b/thirdparty/libtorch/include/c10/util/ArrayRef.h new file mode 100644 index 0000000000..5b1298fa21 --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/ArrayRef.h @@ -0,0 +1,280 @@ +//===--- ArrayRef.h - Array Reference Wrapper -------------------*- C++ -*-===// +// +// The LLVM Compiler Infrastructure +// +// This file is distributed under the University of Illinois Open Source +// License. See LICENSE.TXT for details. +// +//===----------------------------------------------------------------------===// + +// ATen: modified from llvm::ArrayRef. +// removed llvm-specific functionality +// removed some implicit const -> non-const conversions that rely on +// complicated std::enable_if meta-programming +// removed a bunch of slice variants for simplicity... + +#pragma once + +#include +#include +#include +#include + +#include +#include +#include + +namespace c10 { + +/// ArrayRef - Represent a constant reference to an array (0 or more elements +/// consecutively in memory), i.e. a start pointer and a length. It allows +/// various APIs to take consecutive elements easily and conveniently. +/// +/// This class does not own the underlying data, it is expected to be used in +/// situations where the data resides in some other buffer, whose lifetime +/// extends past that of the ArrayRef. For this reason, it is not in general +/// safe to store an ArrayRef. +/// +/// This is intended to be trivially copyable, so it should be passed by +/// value. +template +class ArrayRef final { + public: + using iterator = const T*; + using const_iterator = const T*; + using size_type = size_t; + using value_type = T; + + using reverse_iterator = std::reverse_iterator; + + private: + /// The start of the array, in an external buffer. + const T* Data; + + /// The number of elements. + size_type Length; + + public: + /// @name Constructors + /// @{ + + /// Construct an empty ArrayRef. + /* implicit */ constexpr ArrayRef() : Data(nullptr), Length(0) {} + + /// Construct an ArrayRef from a single element. + // TODO Make this explicit + constexpr ArrayRef(const T& OneElt) : Data(&OneElt), Length(1) {} + + /// Construct an ArrayRef from a pointer and length. + constexpr ArrayRef(const T* data, size_t length) + : Data(data), Length(length) {} + + /// Construct an ArrayRef from a range. + constexpr ArrayRef(const T* begin, const T* end) + : Data(begin), Length(end - begin) {} + + /// Construct an ArrayRef from a SmallVector. This is templated in order to + /// avoid instantiating SmallVectorTemplateCommon whenever we + /// copy-construct an ArrayRef. + template + /* implicit */ ArrayRef(const SmallVectorTemplateCommon& Vec) + : Data(Vec.data()), Length(Vec.size()) {} + + /// Construct an ArrayRef from a std::vector. + // The enable_if stuff here makes sure that this isn't used for std::vector, + // because ArrayRef can't work on a std::vector bitfield. + template + /* implicit */ ArrayRef(const std::vector& Vec) + : Data(Vec.data()), Length(Vec.size()) { + static_assert(!std::is_same::value, "ArrayRef cannot be constructed from a std::vector bitfield."); + } + + /// Construct an ArrayRef from a std::array + template + /* implicit */ constexpr ArrayRef(const std::array& Arr) + : Data(Arr.data()), Length(N) {} + + /// Construct an ArrayRef from a C array. + template + /* implicit */ constexpr ArrayRef(const T (&Arr)[N]) : Data(Arr), Length(N) {} + + /// Construct an ArrayRef from a std::initializer_list. + /* implicit */ constexpr ArrayRef(const std::initializer_list& Vec) + : Data(std::begin(Vec) == std::end(Vec) ? static_cast(nullptr) : std::begin(Vec)), + Length(Vec.size()) {} + + /// @} + /// @name Simple Operations + /// @{ + + constexpr iterator begin() const { + return Data; + } + constexpr iterator end() const { + return Data + Length; + } + + // These are actually the same as iterator, since ArrayRef only + // gives you const iterators. + constexpr const_iterator cbegin() const { + return Data; + } + constexpr const_iterator cend() const { + return Data + Length; + } + + constexpr reverse_iterator rbegin() const { + return reverse_iterator(end()); + } + constexpr reverse_iterator rend() const { + return reverse_iterator(begin()); + } + + /// empty - Check if the array is empty. + constexpr bool empty() const { + return Length == 0; + } + + constexpr const T* data() const { + return Data; + } + + /// size - Get the array size. + constexpr size_t size() const { + return Length; + } + + /// front - Get the first element. + C10_CPP14_HOST_CONSTEXPR const T& front() const { + TORCH_CHECK(!empty(), "ArrayRef: attempted to access front() of empty list"); + return Data[0]; + } + + /// back - Get the last element. + C10_CPP14_HOST_CONSTEXPR const T& back() const { + TORCH_CHECK(!empty(), "ArrayRef: attempted to access back() of empty list"); + return Data[Length - 1]; + } + + /// equals - Check for element-wise equality. + constexpr bool equals(ArrayRef RHS) const { + return Length == RHS.Length && std::equal(begin(), end(), RHS.begin()); + } + + /// slice(n, m) - Chop off the first N elements of the array, and keep M + /// elements in the array. + C10_CPP14_HOST_CONSTEXPR ArrayRef slice(size_t N, size_t M) const { + TORCH_CHECK( + N + M <= size(), + "ArrayRef: invalid slice, N = ", + N, + "; M = ", + M, + "; size = ", + size()); + return ArrayRef(data() + N, M); + } + + /// slice(n) - Chop off the first N elements of the array. + constexpr ArrayRef slice(size_t N) const { + return slice(N, size() - N); + } + + /// @} + /// @name Operator Overloads + /// @{ + constexpr const T& operator[](size_t Index) const { + return Data[Index]; + } + + /// Vector compatibility + C10_CPP14_HOST_CONSTEXPR const T& at(size_t Index) const { + TORCH_CHECK( + Index < Length, + "ArrayRef: invalid index Index = ", + Index, + "; Length = ", + Length); + return Data[Index]; + } + + /// Disallow accidental assignment from a temporary. + /// + /// The declaration here is extra complicated so that "arrayRef = {}" + /// continues to select the move assignment operator. + template + typename std::enable_if::value, ArrayRef>::type& + operator=(U&& Temporary) = delete; + + /// Disallow accidental assignment from a temporary. + /// + /// The declaration here is extra complicated so that "arrayRef = {}" + /// continues to select the move assignment operator. + template + typename std::enable_if::value, ArrayRef>::type& + operator=(std::initializer_list) = delete; + + /// @} + /// @name Expensive Operations + /// @{ + std::vector vec() const { + return std::vector(Data, Data + Length); + } + + /// @} +}; + +template +std::ostream& operator<<(std::ostream & out, ArrayRef list) { + int i = 0; + out << "["; + for(auto e : list) { + if (i++ > 0) + out << ", "; + out << e; + } + out << "]"; + return out; +} + +// WARNING: Template instantiation will NOT be willing to do an implicit +// conversions to get you to an c10::ArrayRef, which is why we need so +// many overloads. + +template +bool operator==(c10::ArrayRef a1, c10::ArrayRef a2) { + return a1.equals(a2); +} + +template +bool operator!=(c10::ArrayRef a1, c10::ArrayRef a2) { + return !a1.equals(a2); +} + +template +bool operator==(const std::vector& a1, c10::ArrayRef a2) { + return c10::ArrayRef(a1).equals(a2); +} + +template +bool operator!=(const std::vector& a1, c10::ArrayRef a2) { + return !c10::ArrayRef(a1).equals(a2); +} + +template +bool operator==(c10::ArrayRef a1, const std::vector& a2) { + return a1.equals(c10::ArrayRef(a2)); +} + +template +bool operator!=(c10::ArrayRef a1, const std::vector& a2) { + return !a1.equals(c10::ArrayRef(a2)); +} + +using IntArrayRef = ArrayRef; + +// This alias is deprecated because it doesn't make ownership +// semantics obvious. Use IntArrayRef instead! +C10_DEFINE_DEPRECATED_USING(IntList, ArrayRef) + +} // namespace c10 diff --git a/thirdparty/libtorch/include/c10/util/BFloat16-inl.h b/thirdparty/libtorch/include/c10/util/BFloat16-inl.h new file mode 100644 index 0000000000..ab366d000d --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/BFloat16-inl.h @@ -0,0 +1,267 @@ +#pragma once + +#include +#include + +namespace c10 { + +/// Constructors +inline C10_HOST_DEVICE BFloat16::BFloat16(float value) { + // RNE by default + x = detail::round_to_nearest_even(value); +} + +/// Implicit conversions +inline C10_HOST_DEVICE BFloat16::operator float() const { + return detail::f32_from_bits(x); +} + +/// Arithmetic + +inline C10_HOST_DEVICE BFloat16 operator+(const BFloat16& a, const BFloat16& b) { + return static_cast(a) + static_cast(b); +} + +inline C10_HOST_DEVICE BFloat16 operator-(const BFloat16& a, const BFloat16& b) { + return static_cast(a) - static_cast(b); +} + +inline C10_HOST_DEVICE BFloat16 operator*(const BFloat16& a, const BFloat16& b) { + return static_cast(a) * static_cast(b); +} + +inline C10_HOST_DEVICE BFloat16 operator/(const BFloat16& a, const BFloat16& b) { + return static_cast(a) / static_cast(b); +} + +inline C10_HOST_DEVICE BFloat16 operator-(const BFloat16& a) { + return -static_cast(a); +} + +inline C10_HOST_DEVICE BFloat16& operator+=(BFloat16& a, const BFloat16& b) { + a = a + b; + return a; +} + +inline C10_HOST_DEVICE BFloat16& operator-=(BFloat16& a, const BFloat16& b) { + a = a - b; + return a; +} + +inline C10_HOST_DEVICE BFloat16& operator*=(BFloat16& a, const BFloat16& b) { + a = a * b; + return a; +} + +inline C10_HOST_DEVICE BFloat16& operator/=(BFloat16& a, const BFloat16& b) { + a = a / b; + return a; +} + +inline C10_HOST_DEVICE BFloat16& operator|(BFloat16& a, const BFloat16& b) { + a.x = a.x | b.x; + return a; +} + +inline C10_HOST_DEVICE BFloat16& operator^(BFloat16& a, const BFloat16& b) { + a.x = a.x ^ b.x; + return a; +} + +inline C10_HOST_DEVICE BFloat16& operator&(BFloat16& a, const BFloat16& b) { + a.x = a.x & b.x; + return a; +} + +/// Arithmetic with floats + +inline C10_HOST_DEVICE float operator+(BFloat16 a, float b) { + return static_cast(a) + b; +} +inline C10_HOST_DEVICE float operator-(BFloat16 a, float b) { + return static_cast(a) - b; +} +inline C10_HOST_DEVICE float operator*(BFloat16 a, float b) { + return static_cast(a) * b; +} +inline C10_HOST_DEVICE float operator/(BFloat16 a, float b) { + return static_cast(a) / b; +} + +inline C10_HOST_DEVICE float operator+(float a, BFloat16 b) { + return a + static_cast(b); +} +inline C10_HOST_DEVICE float operator-(float a, BFloat16 b) { + return a - static_cast(b); +} +inline C10_HOST_DEVICE float operator*(float a, BFloat16 b) { + return a * static_cast(b); +} +inline C10_HOST_DEVICE float operator/(float a, BFloat16 b) { + return a / static_cast(b); +} + +inline C10_HOST_DEVICE float& operator+=(float& a, const BFloat16& b) { + return a += static_cast(b); +} +inline C10_HOST_DEVICE float& operator-=(float& a, const BFloat16& b) { + return a -= static_cast(b); +} +inline C10_HOST_DEVICE float& operator*=(float& a, const BFloat16& b) { + return a *= static_cast(b); +} +inline C10_HOST_DEVICE float& operator/=(float& a, const BFloat16& b) { + return a /= static_cast(b); +} + +/// Arithmetic with doubles + +inline C10_HOST_DEVICE double operator+(BFloat16 a, double b) { + return static_cast(a) + b; +} +inline C10_HOST_DEVICE double operator-(BFloat16 a, double b) { + return static_cast(a) - b; +} +inline C10_HOST_DEVICE double operator*(BFloat16 a, double b) { + return static_cast(a) * b; +} +inline C10_HOST_DEVICE double operator/(BFloat16 a, double b) { + return static_cast(a) / b; +} + +inline C10_HOST_DEVICE double operator+(double a, BFloat16 b) { + return a + static_cast(b); +} +inline C10_HOST_DEVICE double operator-(double a, BFloat16 b) { + return a - static_cast(b); +} +inline C10_HOST_DEVICE double operator*(double a, BFloat16 b) { + return a * static_cast(b); +} +inline C10_HOST_DEVICE double operator/(double a, BFloat16 b) { + return a / static_cast(b); +} + +/// Arithmetic with ints + +inline C10_HOST_DEVICE BFloat16 operator+(BFloat16 a, int b) { + return a + static_cast(b); +} +inline C10_HOST_DEVICE BFloat16 operator-(BFloat16 a, int b) { + return a - static_cast(b); +} +inline C10_HOST_DEVICE BFloat16 operator*(BFloat16 a, int b) { + return a * static_cast(b); +} +inline C10_HOST_DEVICE BFloat16 operator/(BFloat16 a, int b) { + return a / static_cast(b); +} + +inline C10_HOST_DEVICE BFloat16 operator+(int a, BFloat16 b) { + return static_cast(a) + b; +} +inline C10_HOST_DEVICE BFloat16 operator-(int a, BFloat16 b) { + return static_cast(a) - b; +} +inline C10_HOST_DEVICE BFloat16 operator*(int a, BFloat16 b) { + return static_cast(a) * b; +} +inline C10_HOST_DEVICE BFloat16 operator/(int a, BFloat16 b) { + return static_cast(a) / b; +} + +//// Arithmetic with int64_t + +inline C10_HOST_DEVICE BFloat16 operator+(BFloat16 a, int64_t b) { + return a + static_cast(b); +} +inline C10_HOST_DEVICE BFloat16 operator-(BFloat16 a, int64_t b) { + return a - static_cast(b); +} +inline C10_HOST_DEVICE BFloat16 operator*(BFloat16 a, int64_t b) { + return a * static_cast(b); +} +inline C10_HOST_DEVICE BFloat16 operator/(BFloat16 a, int64_t b) { + return a / static_cast(b); +} + +inline C10_HOST_DEVICE BFloat16 operator+(int64_t a, BFloat16 b) { + return static_cast(a) + b; +} +inline C10_HOST_DEVICE BFloat16 operator-(int64_t a, BFloat16 b) { + return static_cast(a) - b; +} +inline C10_HOST_DEVICE BFloat16 operator*(int64_t a, BFloat16 b) { + return static_cast(a) * b; +} +inline C10_HOST_DEVICE BFloat16 operator/(int64_t a, BFloat16 b) { + return static_cast(a) / b; +} + +} // namespace c10 + +namespace std { + +template <> +class numeric_limits { +public: + static constexpr bool is_signed = true; + static constexpr bool is_specialized = true; + static constexpr bool is_integer = false; + static constexpr bool is_exact = false; + static constexpr bool has_infinity = true; + static constexpr bool has_quiet_NaN = true; + static constexpr bool has_signaling_NaN = true; + static constexpr auto has_denorm = numeric_limits::has_denorm; + static constexpr auto has_denorm_loss = + numeric_limits::has_denorm_loss; + static constexpr auto round_style = numeric_limits::round_style; + static constexpr bool is_iec559 = false; + static constexpr bool is_bounded = true; + static constexpr bool is_modulo = false; + static constexpr int digits = 8; + static constexpr int digits10 = 2; + static constexpr int max_digits10 = 4; + static constexpr int radix = 2; + static constexpr int min_exponent = -125; + static constexpr int min_exponent10 = -37; + static constexpr int max_exponent = 128; + static constexpr int max_exponent10 = 38; + static constexpr auto traps = numeric_limits::traps; + static constexpr auto tinyness_before = + numeric_limits::tinyness_before; + + static constexpr c10::BFloat16 min() { + return c10::BFloat16(0x0080, c10::BFloat16::from_bits()); + } + static constexpr c10::BFloat16 lowest() { + return c10::BFloat16(0xFF7F, c10::BFloat16::from_bits()); + } + static constexpr c10::BFloat16 max() { + return c10::BFloat16(0x7F7F, c10::BFloat16::from_bits()); + } + static constexpr c10::BFloat16 epsilon() { + return c10::BFloat16(0x3C00, c10::BFloat16::from_bits()); + } + static constexpr c10::BFloat16 round_error() { + return c10::BFloat16(0x3F00, c10::BFloat16::from_bits()); + } + static constexpr c10::BFloat16 infinity() { + return c10::BFloat16(0x7F80, c10::BFloat16::from_bits()); + } + static constexpr c10::BFloat16 quiet_NaN() { + return c10::BFloat16(0x7FC0, c10::BFloat16::from_bits()); + } + static constexpr c10::BFloat16 signaling_NaN() { + return c10::BFloat16(0x7F80, c10::BFloat16::from_bits()); + } + static constexpr c10::BFloat16 denorm_min() { + return c10::BFloat16(0x0001, c10::BFloat16::from_bits()); + } +}; + +/// Used by vec256::map +inline c10::BFloat16 exp(c10::BFloat16 a) { return std::exp(float(a)); } +inline c10::BFloat16 log(c10::BFloat16 a) { return std::log(float(a)); } + +} // namespace std diff --git a/thirdparty/libtorch/include/c10/util/BFloat16.h b/thirdparty/libtorch/include/c10/util/BFloat16.h new file mode 100644 index 0000000000..f66fb5971e --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/BFloat16.h @@ -0,0 +1,86 @@ +#pragma once + +// Defines the bloat16 type (brain floating-point). This representation uses +// 1 bit for the sign, 8 bits for the exponent and 7 bits for the mantissa. + +#include +#include +#include + +namespace c10 { + +namespace detail { + inline C10_HOST_DEVICE float f32_from_bits(uint16_t src) { + float res = 0; + uint32_t tmp = src; + tmp <<= 16; + +#ifdef __HIP_PLATFORM_HCC__ + float* tempRes; + + // We should be using memcpy in order to respect the strict aliasing rule + // but it fails in the HIP environment. + tempRes = reinterpret_cast(&tmp); + res = *tempRes; +#else + std::memcpy(&res, &tmp, sizeof(tmp)); +#endif + + return res; + } + + inline C10_HOST_DEVICE uint16_t bits_from_f32(float src) { + uint32_t res = 0; + +#ifdef __HIP_PLATFORM_HCC__ + // We should be using memcpy in order to respect the strict aliasing rule + // but it fails in the HIP environment. + uint32_t* tempRes = reinterpret_cast(&src); + res = *tempRes; +#else + std::memcpy(&res, &src, sizeof(res)); +#endif + + return res >> 16; + } + + inline C10_HOST_DEVICE uint16_t round_to_nearest_even(float src) { + if (std::isnan(src)) { + return 0x7FC0; + } else { + union { + uint32_t U32; + float F32; + }; + + F32 = src; + uint32_t rounding_bias = ((U32 >> 16) & 1) + 0x7FFF; + return static_cast((U32 + rounding_bias) >> 16); + } + } +} // namespace detail + +struct alignas(2) BFloat16 { + uint16_t x; + + // HIP wants __host__ __device__ tag, CUDA does not +#ifdef __HIP_PLATFORM_HCC__ + C10_HOST_DEVICE BFloat16() = default; +#else + BFloat16() = default; +#endif + + struct from_bits_t {}; + static constexpr from_bits_t from_bits() { + return from_bits_t(); + } + + constexpr C10_HOST_DEVICE BFloat16(unsigned short bits, from_bits_t) : x(bits){}; + inline C10_HOST_DEVICE BFloat16(float value); + inline C10_HOST_DEVICE operator float() const; +}; + +} // namespace c10 + + +#include diff --git a/thirdparty/libtorch/include/c10/util/Backtrace.h b/thirdparty/libtorch/include/c10/util/Backtrace.h new file mode 100644 index 0000000000..21afac0eb6 --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/Backtrace.h @@ -0,0 +1,17 @@ +#ifndef C10_UTIL_BACKTRACE_H_ +#define C10_UTIL_BACKTRACE_H_ + +#include +#include +#include + +#include "c10/macros/Macros.h" + +namespace c10 { +C10_API std::string get_backtrace( + size_t frames_to_skip = 0, + size_t maximum_number_of_frames = 64, + bool skip_python_frames = true); +} // namespace c10 + +#endif // C10_UTIL_BACKTRACE_H_ diff --git a/thirdparty/libtorch/include/c10/util/C++17.h b/thirdparty/libtorch/include/c10/util/C++17.h new file mode 100644 index 0000000000..9b8458cce2 --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/C++17.h @@ -0,0 +1,308 @@ +#pragma once +#ifndef C10_UTIL_CPP17_H_ +#define C10_UTIL_CPP17_H_ + +#include +#include +#include +#include +#include +#include +#include +#include + + +#if !defined(__clang__) && !defined(_MSC_VER) && defined(__GNUC__) && \ + __GNUC__ < 5 +#error "You're trying to build PyTorch with a too old version of GCC. We need GCC 5 or later." +#endif + +/* + * This header adds some polyfills with C++14 and C++17 functionality + */ + +namespace c10 { namespace guts { + + + +#ifdef __cpp_lib_transformation_trait_aliases +template using conditional_t = std::conditional_t; +template using enable_if_t = std::enable_if_t; +template using add_lvalue_reference_t = std::add_lvalue_reference_t; +template using remove_reference_t = std::remove_reference_t; +template using remove_cv_t = std::remove_cv_t; +template using result_of_t = std::result_of_t; +template using decay_t = std::decay_t; +template using remove_const_t = std::remove_const_t; +template using remove_pointer_t = std::remove_pointer_t; +template using common_type_t = std::common_type_t; +#else +template using conditional_t = typename std::conditional::type; +template using enable_if_t = typename std::enable_if::type; +template using add_lvalue_reference_t = typename std::add_lvalue_reference::type; +template using remove_reference_t = typename std::remove_reference::type; +template using remove_cv_t = typename std::remove_cv::type; +template using result_of_t = typename std::result_of::type; +template using decay_t = typename std::decay::type; +template using remove_const_t = typename std::remove_const::type; +template using remove_pointer_t = typename std::remove_pointer::type; +template using common_type_t = typename std::common_type::type; +#endif + + + + +// C++11 doesn't have constexpr std::move / std::forward. +// Implementation taken from libc++. +template +constexpr inline guts::remove_reference_t&& move(T&& t) noexcept { + return static_cast&&>(t); +} +template +constexpr inline T&& forward(guts::remove_reference_t& t) noexcept { + return static_cast(t); +} +template +constexpr inline T&& forward(guts::remove_reference_t&& t) noexcept { + static_assert(!std::is_lvalue_reference::value, + "can not forward an rvalue as an lvalue."); + return static_cast(t); +} + + + + +#if __cplusplus >= 201402L || defined(__cpp_lib_make_unique) && __cpp_lib_make_unique >= 201304L || \ + (defined(__ANDROID__) && __ANDROID__ && __cplusplus >= 201300L) || defined(_MSC_VER) && _MSC_VER >= 1900 + +/* using override */ using std::make_unique; + +#else + +// Implementation taken from folly +template +typename std::enable_if::value, std::unique_ptr>::type +make_unique(Args&&... args) { + return std::unique_ptr(new T(c10::guts::forward(args)...)); +} +// Allows 'make_unique(10)'. (N3690 s20.9.1.4 p3-4) +template +typename std::enable_if::value, std::unique_ptr>::type +make_unique(const size_t n) { + return std::unique_ptr(new typename std::remove_extent::type[n]()); +} +// Disallows 'make_unique()'. (N3690 s20.9.1.4 p5) +template +typename std::enable_if::value != 0, std::unique_ptr>::type +make_unique(Args&&...) = delete; + +#endif + +template +typename std::enable_if::value && !std::is_array::value && std::is_base_of::value, std::unique_ptr>::type +make_unique_base(Args&&... args) { + return std::unique_ptr(new Child(c10::guts::forward(args)...)); +} + + + +#ifdef __cpp_lib_integer_sequence + +template using integer_sequence = std::integer_sequence; +template using index_sequence = std::index_sequence; +template using make_integer_sequence = std::make_integer_sequence; +template using make_index_sequence = std::make_index_sequence; +template using index_sequence_for = std::index_sequence_for; + +#else + +template struct integer_sequence { + using value_type = T; + static constexpr std::size_t size() noexcept {return sizeof...(Ints);} +}; +template using index_sequence = integer_sequence; +namespace detail { + template + struct make_integer_sequence_ { + using type = typename make_integer_sequence_::type; + }; + template + struct make_integer_sequence_ { + using type = integer_sequence; + }; +} +template using make_integer_sequence = typename detail::make_integer_sequence_::type; +template using make_index_sequence = make_integer_sequence; +static_assert(std::is_same, make_index_sequence<0>>::value, ""); +static_assert(std::is_same, make_index_sequence<3>>::value, ""); +template using index_sequence_for = make_index_sequence; + +#endif + + + + +#ifdef __cpp_lib_logical_traits + +template +using conjunction = std::conjunction; +template +using disjunction = std::disjunction; +template +using bool_constant = std::bool_constant; +template +using negation = std::negation; + +#else + +// Implementation taken from http://en.cppreference.com/w/cpp/types/conjunction +template struct conjunction : std::true_type { }; +template struct conjunction : B1 { }; +template +struct conjunction + : conditional_t, B1> {}; + +// Implementation taken from http://en.cppreference.com/w/cpp/types/disjunction +template struct disjunction : std::false_type { }; +template struct disjunction : B1 { }; +template +struct disjunction + : conditional_t> { }; + +// Implementation taken from http://en.cppreference.com/w/cpp/types/integral_constant +template +using bool_constant = std::integral_constant; + +// Implementation taken from http://en.cppreference.com/w/cpp/types/negation +template +struct negation : bool_constant { }; + +#endif + + + +#ifdef __cpp_lib_void_t + +template using void_t = std::void_t; + +#else + +// Implementation taken from http://en.cppreference.com/w/cpp/types/void_t +// (it takes CWG1558 into account and also works for older compilers) +template struct make_void { typedef void type;}; +template using void_t = typename make_void::type; + +#endif + + + +#ifdef __cpp_lib_apply + +template +inline constexpr decltype(auto) apply(F&& f, Tuple&& t) { + return std::apply(std::forward(f), std::forward(t)); +} + +#else + +// Implementation from http://en.cppreference.com/w/cpp/utility/apply (but modified) +// TODO This is an incomplete implementation of std::apply, not working for member functions. +namespace detail { +template +#if defined(_MSC_VER) +// MSVC has a problem with the decltype() return type, but it also doesn't need it +// Also, nvcc on Windows needs C10_HOST_DEVICE here. +C10_HOST_DEVICE constexpr auto apply_impl(F&& f, Tuple&& t, guts::index_sequence) +#else +// GCC/Clang need the decltype() return type and rocm doesn't like the C10_HOST_DEVICE +constexpr auto apply_impl(F&& f, Tuple&& t, guts::index_sequence) +-> decltype(c10::guts::forward(f)(std::get(c10::guts::forward(t))...)) +#endif +{ + return c10::guts::forward(f)(std::get(c10::guts::forward(t))...); +} +} // namespace detail + +template +#if defined(_MSC_VER) +C10_HOST_DEVICE // rocm doesn't like the C10_HOST_DEVICE +#endif +constexpr auto apply(F&& f, Tuple&& t) -> decltype(detail::apply_impl( + c10::guts::forward(f), c10::guts::forward(t), + guts::make_index_sequence>::value>{})) +{ + return detail::apply_impl( + c10::guts::forward(f), c10::guts::forward(t), + guts::make_index_sequence>::value>{}); +} + +#endif + + + + +template +typename std::enable_if< + std::is_member_pointer::type>::value, + typename std::result_of::type>::type +invoke(Functor&& f, Args&&... args) { + return std::mem_fn(f)(std::forward(args)...); +} + +template +typename std::enable_if< + !std::is_member_pointer::type>::value, + typename std::result_of::type>::type +invoke(Functor&& f, Args&&... args) { + return std::forward(f)(std::forward(args)...); +} + + + +// GCC 4.8 doesn't define std::to_string, even though that's in C++11. Let's define it. +namespace detail { +class DummyClassForToString final {}; +}}} +namespace std { +// We use SFINAE to detect if std::to_string exists for a type, but that only works +// if the function name is defined. So let's define a std::to_string for a dummy type. +// If you're getting an error here saying that this overload doesn't match your +// std::to_string() call, then you're calling std::to_string() but should be calling +// c10::guts::to_string(). +inline std::string to_string(c10::guts::detail::DummyClassForToString) { return ""; } + +} +namespace c10 { namespace guts { namespace detail { + +template +struct to_string_ final { + static std::string call(T value) { + std::ostringstream str; + str << value; + return str.str(); + } +}; +// If a std::to_string exists, use that instead +template +struct to_string_()))>> final { + static std::string call(T value) { + return std::to_string(value); + } +}; +} +template inline std::string to_string(T value) { + return detail::to_string_::call(value); +} + +template +constexpr const T& min(const T& a, const T& b) { + return (b < a) ? b : a; +} + +template +constexpr const T& max(const T& a, const T& b) { + return (a < b) ? b : a; +} +}} + +#endif // C10_UTIL_CPP17_H_ diff --git a/thirdparty/libtorch/include/c10/util/Complex.h b/thirdparty/libtorch/include/c10/util/Complex.h new file mode 100644 index 0000000000..c35e7c7ab9 --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/Complex.h @@ -0,0 +1,17 @@ +#pragma once + +#include + + +namespace std { + +template <> +class numeric_limits> : public numeric_limits {}; + +template <> +class numeric_limits> : public numeric_limits {}; + +template <> +class numeric_limits : public numeric_limits {}; + +} // namespace std diff --git a/thirdparty/libtorch/include/c10/util/ConstexprCrc.h b/thirdparty/libtorch/include/c10/util/ConstexprCrc.h new file mode 100644 index 0000000000..d61091da80 --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/ConstexprCrc.h @@ -0,0 +1,137 @@ +#pragma once + +#include +#include +#include + +namespace c10 { +namespace util { + +namespace detail { +constexpr uint64_t crc64_table[] = { + 0x0000000000000000, 0x7ad870c830358979, 0xf5b0e190606b12f2, + 0x8f689158505e9b8b, 0xc038e5739841b68f, 0xbae095bba8743ff6, + 0x358804e3f82aa47d, 0x4f50742bc81f2d04, 0xab28ecb46814fe75, + 0xd1f09c7c5821770c, 0x5e980d24087fec87, 0x24407dec384a65fe, + 0x6b1009c7f05548fa, 0x11c8790fc060c183, 0x9ea0e857903e5a08, + 0xe478989fa00bd371, 0x7d08ff3b88be6f81, 0x07d08ff3b88be6f8, + 0x88b81eabe8d57d73, 0xf2606e63d8e0f40a, 0xbd301a4810ffd90e, + 0xc7e86a8020ca5077, 0x4880fbd87094cbfc, 0x32588b1040a14285, + 0xd620138fe0aa91f4, 0xacf86347d09f188d, 0x2390f21f80c18306, + 0x594882d7b0f40a7f, 0x1618f6fc78eb277b, 0x6cc0863448deae02, + 0xe3a8176c18803589, 0x997067a428b5bcf0, 0xfa11fe77117cdf02, + 0x80c98ebf2149567b, 0x0fa11fe77117cdf0, 0x75796f2f41224489, + 0x3a291b04893d698d, 0x40f16bccb908e0f4, 0xcf99fa94e9567b7f, + 0xb5418a5cd963f206, 0x513912c379682177, 0x2be1620b495da80e, + 0xa489f35319033385, 0xde51839b2936bafc, 0x9101f7b0e12997f8, + 0xebd98778d11c1e81, 0x64b116208142850a, 0x1e6966e8b1770c73, + 0x8719014c99c2b083, 0xfdc17184a9f739fa, 0x72a9e0dcf9a9a271, + 0x08719014c99c2b08, 0x4721e43f0183060c, 0x3df994f731b68f75, + 0xb29105af61e814fe, 0xc849756751dd9d87, 0x2c31edf8f1d64ef6, + 0x56e99d30c1e3c78f, 0xd9810c6891bd5c04, 0xa3597ca0a188d57d, + 0xec09088b6997f879, 0x96d1784359a27100, 0x19b9e91b09fcea8b, + 0x636199d339c963f2, 0xdf7adabd7a6e2d6f, 0xa5a2aa754a5ba416, + 0x2aca3b2d1a053f9d, 0x50124be52a30b6e4, 0x1f423fcee22f9be0, + 0x659a4f06d21a1299, 0xeaf2de5e82448912, 0x902aae96b271006b, + 0x74523609127ad31a, 0x0e8a46c1224f5a63, 0x81e2d7997211c1e8, + 0xfb3aa75142244891, 0xb46ad37a8a3b6595, 0xceb2a3b2ba0eecec, + 0x41da32eaea507767, 0x3b024222da65fe1e, 0xa2722586f2d042ee, + 0xd8aa554ec2e5cb97, 0x57c2c41692bb501c, 0x2d1ab4dea28ed965, + 0x624ac0f56a91f461, 0x1892b03d5aa47d18, 0x97fa21650afae693, + 0xed2251ad3acf6fea, 0x095ac9329ac4bc9b, 0x7382b9faaaf135e2, + 0xfcea28a2faafae69, 0x8632586aca9a2710, 0xc9622c4102850a14, + 0xb3ba5c8932b0836d, 0x3cd2cdd162ee18e6, 0x460abd1952db919f, + 0x256b24ca6b12f26d, 0x5fb354025b277b14, 0xd0dbc55a0b79e09f, + 0xaa03b5923b4c69e6, 0xe553c1b9f35344e2, 0x9f8bb171c366cd9b, + 0x10e3202993385610, 0x6a3b50e1a30ddf69, 0x8e43c87e03060c18, + 0xf49bb8b633338561, 0x7bf329ee636d1eea, 0x012b592653589793, + 0x4e7b2d0d9b47ba97, 0x34a35dc5ab7233ee, 0xbbcbcc9dfb2ca865, + 0xc113bc55cb19211c, 0x5863dbf1e3ac9dec, 0x22bbab39d3991495, + 0xadd33a6183c78f1e, 0xd70b4aa9b3f20667, 0x985b3e827bed2b63, + 0xe2834e4a4bd8a21a, 0x6debdf121b863991, 0x1733afda2bb3b0e8, + 0xf34b37458bb86399, 0x8993478dbb8deae0, 0x06fbd6d5ebd3716b, + 0x7c23a61ddbe6f812, 0x3373d23613f9d516, 0x49aba2fe23cc5c6f, + 0xc6c333a67392c7e4, 0xbc1b436e43a74e9d, 0x95ac9329ac4bc9b5, + 0xef74e3e19c7e40cc, 0x601c72b9cc20db47, 0x1ac40271fc15523e, + 0x5594765a340a7f3a, 0x2f4c0692043ff643, 0xa02497ca54616dc8, + 0xdafce7026454e4b1, 0x3e847f9dc45f37c0, 0x445c0f55f46abeb9, + 0xcb349e0da4342532, 0xb1eceec59401ac4b, 0xfebc9aee5c1e814f, + 0x8464ea266c2b0836, 0x0b0c7b7e3c7593bd, 0x71d40bb60c401ac4, + 0xe8a46c1224f5a634, 0x927c1cda14c02f4d, 0x1d148d82449eb4c6, + 0x67ccfd4a74ab3dbf, 0x289c8961bcb410bb, 0x5244f9a98c8199c2, + 0xdd2c68f1dcdf0249, 0xa7f41839ecea8b30, 0x438c80a64ce15841, + 0x3954f06e7cd4d138, 0xb63c61362c8a4ab3, 0xcce411fe1cbfc3ca, + 0x83b465d5d4a0eece, 0xf96c151de49567b7, 0x76048445b4cbfc3c, + 0x0cdcf48d84fe7545, 0x6fbd6d5ebd3716b7, 0x15651d968d029fce, + 0x9a0d8ccedd5c0445, 0xe0d5fc06ed698d3c, 0xaf85882d2576a038, + 0xd55df8e515432941, 0x5a3569bd451db2ca, 0x20ed197575283bb3, + 0xc49581ead523e8c2, 0xbe4df122e51661bb, 0x3125607ab548fa30, + 0x4bfd10b2857d7349, 0x04ad64994d625e4d, 0x7e7514517d57d734, + 0xf11d85092d094cbf, 0x8bc5f5c11d3cc5c6, 0x12b5926535897936, + 0x686de2ad05bcf04f, 0xe70573f555e26bc4, 0x9ddd033d65d7e2bd, + 0xd28d7716adc8cfb9, 0xa85507de9dfd46c0, 0x273d9686cda3dd4b, + 0x5de5e64efd965432, 0xb99d7ed15d9d8743, 0xc3450e196da80e3a, + 0x4c2d9f413df695b1, 0x36f5ef890dc31cc8, 0x79a59ba2c5dc31cc, + 0x037deb6af5e9b8b5, 0x8c157a32a5b7233e, 0xf6cd0afa9582aa47, + 0x4ad64994d625e4da, 0x300e395ce6106da3, 0xbf66a804b64ef628, + 0xc5bed8cc867b7f51, 0x8aeeace74e645255, 0xf036dc2f7e51db2c, + 0x7f5e4d772e0f40a7, 0x05863dbf1e3ac9de, 0xe1fea520be311aaf, + 0x9b26d5e88e0493d6, 0x144e44b0de5a085d, 0x6e963478ee6f8124, + 0x21c640532670ac20, 0x5b1e309b16452559, 0xd476a1c3461bbed2, + 0xaeaed10b762e37ab, 0x37deb6af5e9b8b5b, 0x4d06c6676eae0222, + 0xc26e573f3ef099a9, 0xb8b627f70ec510d0, 0xf7e653dcc6da3dd4, + 0x8d3e2314f6efb4ad, 0x0256b24ca6b12f26, 0x788ec2849684a65f, + 0x9cf65a1b368f752e, 0xe62e2ad306bafc57, 0x6946bb8b56e467dc, + 0x139ecb4366d1eea5, 0x5ccebf68aecec3a1, 0x2616cfa09efb4ad8, + 0xa97e5ef8cea5d153, 0xd3a62e30fe90582a, 0xb0c7b7e3c7593bd8, + 0xca1fc72bf76cb2a1, 0x45775673a732292a, 0x3faf26bb9707a053, + 0x70ff52905f188d57, 0x0a2722586f2d042e, 0x854fb3003f739fa5, + 0xff97c3c80f4616dc, 0x1bef5b57af4dc5ad, 0x61372b9f9f784cd4, + 0xee5fbac7cf26d75f, 0x9487ca0fff135e26, 0xdbd7be24370c7322, + 0xa10fceec0739fa5b, 0x2e675fb4576761d0, 0x54bf2f7c6752e8a9, + 0xcdcf48d84fe75459, 0xb71738107fd2dd20, 0x387fa9482f8c46ab, + 0x42a7d9801fb9cfd2, 0x0df7adabd7a6e2d6, 0x772fdd63e7936baf, + 0xf8474c3bb7cdf024, 0x829f3cf387f8795d, 0x66e7a46c27f3aa2c, + 0x1c3fd4a417c62355, 0x935745fc4798b8de, 0xe98f353477ad31a7, + 0xa6df411fbfb21ca3, 0xdc0731d78f8795da, 0x536fa08fdfd90e51, + 0x29b7d047efec8728, +}; + +inline C10_HOST_CONSTEXPR uint64_t +crc64impl(uint64_t accumulator, const char* data, size_t size) { +#if __cpp_constexpr >= 201304 + // if we are in C++14, just use a for loop. This compiles faster. + for (size_t i = 0; i < size; ++i) { + accumulator = + crc64_table[(accumulator ^ data[i]) & 0xFF] ^ (accumulator >> 8); + } + return accumulator; +#else + // if we are in C++11, we need to do it recursively because of constexpr + // restrictions. + return (size == 0) + ? accumulator + : crc64impl( + crc64_table[(accumulator ^ *data) & 0xFF] ^ (accumulator >> 8), + data + 1, + size - 1); +#endif +} +} // namespace detail + +struct crc64_t final : IdWrapper { + constexpr crc64_t(uint64_t checksum) : IdWrapper(checksum) {} + constexpr uint64_t checksum() const { + return this->underlyingId(); + } +}; + +// CRC64 with Jones coefficients and an init value of 0. +inline C10_HOST_CONSTEXPR crc64_t crc64(const char* data, size_t size) { + return crc64_t{detail::crc64impl(0, data, size)}; +} +} // namespace util +} // namespace c10 + +// Allow usage of crc64_t in std::unordered_set +C10_DEFINE_HASH_FOR_IDWRAPPER(c10::util::crc64_t); diff --git a/thirdparty/libtorch/include/c10/util/Deprecated.h b/thirdparty/libtorch/include/c10/util/Deprecated.h new file mode 100644 index 0000000000..59770ac60d --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/Deprecated.h @@ -0,0 +1,101 @@ +#pragma once + +/** + * This file provides portable macros for marking declarations + * as deprecated. You should generally use C10_DEPRECATED, + * except when marking 'using' declarations as deprecated, + * in which case you should use C10_DEFINE_DEPRECATED_USING + * (due to portability concerns). + */ + +// Sample usage: +// +// C10_DEPRECATED void bad_func(); +// struct C10_DEPRECATED BadStruct { +// ... +// }; + +// NB: In PyTorch, this block is not actually used at the moment +// because we are C++11. However, aspirationally, we would like +// to use this version, because as of C++14 it is the correct and +// portable way to declare something deprecated. +// NB: __cplusplus doesn't work for MSVC, so for now MSVC always uses +// the "__declspec(deprecated)" implementation and not the C++14 "[[deprecated]]" +// attribute. We tried enabling "[[deprecated]]" for C++14 on MSVC, but +// ran into issues with some older MSVC versions. +#if (defined(__cplusplus) && __cplusplus >= 201402L) +# define C10_DEPRECATED [[deprecated]] +# define C10_DEPRECATED_MESSAGE(message) [[deprecated(message)]] +#elif defined(__GNUC__) +# define C10_DEPRECATED __attribute__((deprecated)) +// TODO Is there some way to implement this? +# define C10_DEPRECATED_MESSAGE(message) __attribute__((deprecated)) + +#elif defined(_MSC_VER) +# define C10_DEPRECATED __declspec(deprecated) +# define C10_DEPRECATED_MESSAGE(message) __declspec(deprecated(message)) +#else +# warning "You need to implement C10_DEPRECATED for this compiler" +# define C10_DEPRECATED +#endif + + +// Sample usage: +// +// C10_DEFINE_DEPRECATED_USING(BadType, int) +// +// which is the portable version of +// +// using BadType [[deprecated]] = int; + +// technically [[deprecated]] syntax is from c++14 standard, but it works in +// many compilers. +#if defined(__has_cpp_attribute) +#if __has_cpp_attribute(deprecated) +# define C10_DEFINE_DEPRECATED_USING(TypeName, TypeThingy) using TypeName [[deprecated]] = TypeThingy; +#endif +#endif + +#if defined(_MSC_VER) +#if defined(__CUDACC__) +// neither [[deprecated]] nor __declspec(deprecated) work on nvcc on Windows; +// you get the error: +// +// error: attribute does not apply to any entity +// +// So we just turn the macro off in this case. +#if defined(C10_DEFINE_DEPRECATED_USING) +# undef C10_DEFINE_DEPRECATED_USING +#endif +# define C10_DEFINE_DEPRECATED_USING(TypeName, TypeThingy) using TypeName = TypeThingy; +#else +// [[deprecated]] does work in windows without nvcc, though msc doesn't support +// `__has_cpp_attribute` when c++14 is supported, otherwise __declspec(deprecated) +// is used as the alternative. +#ifndef C10_DEFINE_DEPRECATED_USING +#if defined(_MSVC_LANG) && _MSVC_LANG >= 201402L +# define C10_DEFINE_DEPRECATED_USING(TypeName, TypeThingy) using TypeName [[deprecated]] = TypeThingy; +#else +# define C10_DEFINE_DEPRECATED_USING(TypeName, TypeThingy) using TypeName = __declspec(deprecated) TypeThingy; +#endif +#endif +#endif +#endif + +#if !defined(C10_DEFINE_DEPRECATED_USING) && defined(__GNUC__) +// nvcc has a bug where it doesn't understand __attribute__((deprecated)) +// declarations even when the host compiler supports it. We'll only use this gcc +// attribute when not cuda, and when using a GCC compiler that doesn't support +// the c++14 syntax we checked for above (availble in __GNUC__ >= 5) +#if !defined(__CUDACC__) +# define C10_DEFINE_DEPRECATED_USING(TypeName, TypeThingy) using TypeName __attribute__((deprecated)) = TypeThingy; +#else +// using cuda + gcc < 5, neither deprecated syntax is available so turning off. +# define C10_DEFINE_DEPRECATED_USING(TypeName, TypeThingy) using TypeName = TypeThingy; +#endif +#endif + +#if ! defined(C10_DEFINE_DEPRECATED_USING) +# warning "You need to implement C10_DEFINE_DEPRECATED_USING for this compiler" +# define C10_DEFINE_DEPRECATED_USING +#endif diff --git a/thirdparty/libtorch/include/c10/util/Exception.h b/thirdparty/libtorch/include/c10/util/Exception.h new file mode 100644 index 0000000000..e1effd1704 --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/Exception.h @@ -0,0 +1,409 @@ +#ifndef C10_UTIL_EXCEPTION_H_ +#define C10_UTIL_EXCEPTION_H_ + +#include "c10/macros/Macros.h" +#include "c10/util/StringUtil.h" +#include "c10/util/Deprecated.h" + +#include +#include +#include +#include +#include +#include + +#if defined(_MSC_VER) && _MSC_VER <= 1900 +#define __func__ __FUNCTION__ +#endif + +namespace c10 { + +/// The primary ATen error class. +/// Provides a complete error message with source location information via +/// `what()`, and a more concise message via `what_without_backtrace()`. +/// Don't throw this directly; use TORCH_CHECK/TORCH_INTERNAL_ASSERT instead. +/// +/// NB: c10::Error is handled specially by the default torch to suppress the +/// backtrace, see torch/csrc/Exceptions.h +class C10_API Error : public std::exception { + std::vector msg_stack_; + std::string backtrace_; + + // These two are derived fields from msg_stack_ and backtrace_, but we need + // fields for the strings so that we can return a const char* (as the + // signature of std::exception requires). + std::string msg_; + std::string msg_without_backtrace_; + + // This is a little debugging trick: you can stash a relevant pointer + // in caller, and then when you catch the exception, you can compare + // against pointers you have on hand to get more information about + // where the exception came from. In Caffe2, this is used to figure + // out which operator raised an exception. + const void* caller_; + + public: + Error( + const std::string& msg, + const std::string& backtrace, + const void* caller = nullptr); + Error(SourceLocation source_location, const std::string& msg); + Error( + const char* file, + const uint32_t line, + const char* condition, + const std::string& msg, + const std::string& backtrace, + const void* caller = nullptr); + + void AppendMessage(const std::string& msg); + + // Compute the full message from msg_ and msg_without_backtrace_ + // TODO: Maybe this should be private + std::string msg() const; + std::string msg_without_backtrace() const; + + const std::vector& msg_stack() const { + return msg_stack_; + } + + /// Returns the complete error message, including the source location. + const char* what() const noexcept override { + return msg_.c_str(); + } + + const void* caller() const noexcept { + return caller_; + } + + /// Returns only the error message string, without source location. + const char* what_without_backtrace() const noexcept { + return msg_without_backtrace_.c_str(); + } +}; + +class C10_API WarningHandler { + public: + virtual ~WarningHandler() noexcept(false) {} + /// The default warning handler. Prints the message to stderr. + virtual void process( + const SourceLocation& source_location, + const std::string& msg); +}; + +namespace Warning { + +/// Issue a warning with a given message. Dispatched to the current +/// warning handler. +C10_API void warn(SourceLocation source_location, const std::string& msg); +/// Sets the global warning handler. This is not thread-safe, so it should +/// generally be called once during initialization or while holding the GIL +/// for programs that use python. +/// User is responsible for keeping the WarningHandler alive until +/// it is not needed. +C10_API void set_warning_handler(WarningHandler* handler) noexcept(true); +/// Gets the global warning handler. +C10_API WarningHandler* get_warning_handler() noexcept(true); + +} // namespace Warning + +// Used in ATen for out-of-bound indices that can reasonably only be detected +// lazily inside a kernel (See: advanced indexing). These turn into +// IndexError when they cross to Python. +class C10_API IndexError : public Error { + using Error::Error; +}; + + +// A utility function to return an exception std::string by prepending its +// exception type before its what() content +C10_API std::string GetExceptionString(const std::exception& e); + +namespace detail { + +// Return x if it is non-empty; otherwise return y. +inline std::string if_empty_then(std::string x, std::string y) { + if (x.empty()) { + return y; + } else { + return x; + } +} + +} + + +} // namespace c10 + +// Private helper macro for implementing TORCH_INTERNAL_ASSERT and TORCH_CHECK +// +// Note: In the debug build With MSVC, __LINE__ might be of long type (a.k.a int32_t), +// which is different from the definition of `SourceLocation` that requires +// unsigned int (a.k.a uint32_t) and may cause a compile error with the message: +// error C2397: conversion from 'long' to 'uint32_t' requires a narrowing conversion +// Here the static cast is used to pass the build. +// if this is used inside a lambda the __func__ macro expands to operator(), +// which isn't very useful, but hard to fix in a macro so suppressing the warning. +#define C10_THROW_ERROR(err_type, msg) \ + throw ::c10::err_type({__func__, __FILE__, static_cast(__LINE__)}, msg) + +// Private helper macro for workaround MSVC misexpansion of nested macro +// invocations involving __VA_ARGS__. See +// https://stackoverflow.com/questions/5134523/msvc-doesnt-expand-va-args-correctly +#define C10_EXPAND_MSVC_WORKAROUND(x) x + +// On nvcc, C10_UNLIKELY thwarts missing return statement analysis. In cases +// where the unlikely expression may be a constant, use this macro to ensure +// return statement analysis keeps working (at the cost of not getting the +// likely/unlikely annotation on nvcc). https://github.com/pytorch/pytorch/issues/21418 +// +// Currently, this is only used in the error reporting macros below. If you +// want to use it more generally, move me to Macros.h +// +// TODO: Brian Vaughan observed that we might be able to get this to work on nvcc +// by writing some sort of C++ overload that distinguishes constexpr inputs +// from non-constexpr. Since there isn't any evidence that losing C10_UNLIKELY +// in nvcc is causing us perf problems, this is not yet implemented, but this +// might be an interesting piece of C++ code for an intrepid bootcamper to +// write. +#if defined(__CUDACC__) +#define C10_UNLIKELY_OR_CONST(e) e +#else +#define C10_UNLIKELY_OR_CONST(e) C10_UNLIKELY(e) +#endif + + +// ---------------------------------------------------------------------------- +// Error reporting macros +// ---------------------------------------------------------------------------- + +// A utility macro to provide assert()-like functionality; that is, enforcement +// of internal invariants in code. It supports an arbitrary number of extra +// arguments (evaluated only on failure), which will be printed in the assert +// failure message using operator<< (this is useful to print some variables +// which may be useful for debugging.) +// +// Usage: +// TORCH_INTERNAL_ASSERT(should_be_true); +// TORCH_INTERNAL_ASSERT(x == 0, "x = ", x); +// +// Assuming no bugs in PyTorch, the conditions tested by this macro should +// always be true; e.g., it should be possible to disable all of these +// conditions without changing observable user behavior. If you would like to +// do error reporting for user input, please use TORCH_CHECK instead. +// +// NOTE: It is SAFE to use this macro in production code; on failure, this +// simply raises an exception, it does NOT unceremoniously quit the process +// (unlike assert()). +// +#ifdef STRIP_ERROR_MESSAGES +#define TORCH_INTERNAL_ASSERT(cond, ...) \ + if (C10_UNLIKELY_OR_CONST(!(cond))) { \ + C10_THROW_ERROR(Error, \ + #cond " INTERNAL ASSERT FAILED at" \ + __FILE__ \ + ); \ + } +#else +#define TORCH_INTERNAL_ASSERT(cond, ...) \ + if (C10_UNLIKELY_OR_CONST(!(cond))) { \ + C10_THROW_ERROR(Error, ::c10::str( \ + #cond " INTERNAL ASSERT FAILED at ", \ + __FILE__, \ + ":", \ + __LINE__, \ + ", please report a bug to PyTorch. ", \ + ::c10::str(__VA_ARGS__) \ + )); \ + } +#endif + +// A utility macro to make it easier to test for error conditions from user +// input. Like TORCH_INTERNAL_ASSERT, it supports an arbitrary number of extra +// arguments (evaluated only on failure), which will be printed in the error +// message using operator<< (e.g., you can pass any object which has +// operator<< defined. Most objects in PyTorch have these definitions!) +// +// Usage: +// TORCH_CHECK(should_be_true); // A default error message will be provided +// // in this case; but we recommend writing an +// // explicit error message, as it is more +// // user friendly. +// TORCH_CHECK(x == 0, "Expected x to be 0, but got ", x); +// +// On failure, this macro will raise an exception. If this exception propagates +// to Python, it will convert into a Python RuntimeError. +// +// NOTE: It is SAFE to use this macro in production code; on failure, this +// simply raises an exception, it does NOT unceremoniously quit the process +// (unlike CHECK() from glog.) +// +#ifdef STRIP_ERROR_MESSAGES +#define TORCH_CHECK(cond, ...) \ + if (C10_UNLIKELY_OR_CONST(!(cond))) { \ + C10_THROW_ERROR(Error, \ + #cond " CHECK FAILED at " \ + __FILE__ \ + ); \ + } +#else +#define TORCH_CHECK(cond, ...) \ + if (C10_UNLIKELY_OR_CONST(!(cond))) { \ + C10_THROW_ERROR(Error, \ + ::c10::detail::if_empty_then( \ + ::c10::str(__VA_ARGS__), \ + "Expected " #cond " to be true, but got false. " \ + "(Could this error message be improved? If so, " \ + "please report an enhancement request to PyTorch.)" \ + ) \ + ); \ + } +#endif +// TODO: We're going to get a lot of similar looking string literals +// this way; check if this actually affects binary size. + +// Like TORCH_CHECK, but raises IndexErrors instead of Errors. +#ifdef STRIP_ERROR_MESSAGES +#define TORCH_CHECK_INDEX(cond, ...) \ + if (C10_UNLIKELY_OR_CONST(!(cond))) { \ + C10_THROW_ERROR(Error, \ + #cond " INDEX CHECK FAILED at " \ + __FILE__ \ + ); \ + } +#else +#define TORCH_CHECK_INDEX(cond, ...) \ + if (C10_UNLIKELY_OR_CONST(!(cond))) { \ + C10_THROW_ERROR(IndexError, \ + ::c10::detail::if_empty_then( \ + ::c10::str(__VA_ARGS__), \ + "Expected " #cond " to be true, but got false. " \ + "(Could this error message be improved? If so, " \ + "please report an enhancement request to PyTorch.)" \ + ) \ + ); \ + } +#endif + + +// Report a warning to the user. Accepts an arbitrary number of extra +// arguments which are concatenated into the warning message using operator<< +// +#define TORCH_WARN(...) \ + ::c10::Warning::warn({__func__, __FILE__, static_cast(__LINE__)}, ::c10::str(__VA_ARGS__)) + +// Report a warning to the user only once. Accepts an arbitrary number of extra +// arguments which are concatenated into the warning message using operator<< +// +#define TORCH_WARN_ONCE(...) \ + C10_UNUSED static const auto C10_ANONYMOUS_VARIABLE(torch_warn_once_) = [&] { \ + ::c10::Warning::warn({__func__, __FILE__, static_cast(__LINE__)}, ::c10::str(__VA_ARGS__)); \ + return true; \ + }() + + +// ---------------------------------------------------------------------------- +// Deprecated macros +// ---------------------------------------------------------------------------- + +namespace c10 { namespace detail { + +/* +// Deprecation disabled until we fix sites in our codebase +C10_DEPRECATED_MESSAGE("AT_ERROR(msg) is deprecated, use TORCH_CHECK(false, msg) instead.") +*/ +inline void deprecated_AT_ERROR() {} + +/* +// Deprecation disabled until we fix sites in our codebase +C10_DEPRECATED_MESSAGE("AT_INDEX_ERROR(msg) is deprecated, use TORCH_CHECK_INDEX(false, msg) instead.") +*/ +inline void deprecated_AT_INDEX_ERROR() {} + +/* +// Deprecation disabled until we fix sites in our codebase +C10_DEPRECATED_MESSAGE("AT_WARN is deprecated, use TORCH_WARN instead.") +*/ +inline void deprecated_AT_WARN() {} + +C10_DEPRECATED_MESSAGE("AT_CHECK is deprecated, use TORCH_CHECK instead.") +inline void deprecated_AT_CHECK() {} + +/* +// Deprecation disabled until we fix sites in our codebase +C10_DEPRECATED_MESSAGE("AT_ASSERT is deprecated, if you mean to indicate an internal invariant failure, use " \ + "TORCH_INTERNAL_ASSERT instead; if you mean to do user error checking, use " \ + "TORCH_CHECK. See https://github.com/pytorch/pytorch/issues/20287 for more details.") +*/ +inline void deprecated_AT_ASSERT() {} + +/* +// Deprecation disabled until we fix sites in our codebase +C10_DEPRECATED_MESSAGE("AT_ASSERTM is deprecated, if you mean to indicate an internal invariant failure, use " \ + "TORCH_INTERNAL_ASSERT instead; if you mean to do user error checking, use " \ + "TORCH_CHECK. See https://github.com/pytorch/pytorch/issues/20287 for more details.") +*/ +inline void deprecated_AT_ASSERTM() {} + +}} // namespace c10::detail + +// Deprecated alias; this alias was deprecated because it wasn't clear to +// people that you should use a macro with AT_ prefix inside the torch/csrc +// directory. Use TORCH_CHECK instead. +#define AT_CHECK(...) \ + do { \ + ::c10::detail::deprecated_AT_CHECK(); \ + C10_EXPAND_MSVC_WORKAROUND(TORCH_CHECK(__VA_ARGS__)); \ + } while (false) + +// Deprecated alias; this alias was deprecated because people kept mistakenly +// using it for user error checking. Use TORCH_INTERNAL_ASSERT or TORCH_CHECK +// instead. See https://github.com/pytorch/pytorch/issues/20287 for more details. +#define AT_ASSERT(...) \ + do { \ + ::c10::detail::deprecated_AT_ASSERT(); \ + C10_EXPAND_MSVC_WORKAROUND(TORCH_INTERNAL_ASSERT(__VA_ARGS__)); \ + } while (false) + +// Deprecated alias, like AT_ASSERT. The new TORCH_INTERNAL_ASSERT macro supports +// both 0-ary and variadic calls, so having a separate message-accepting macro +// is not necessary. +// +// NB: we MUST include cond explicitly here, as MSVC will miscompile the macro +// expansion, shunting all of __VA_ARGS__ to cond. An alternate workaround +// can be seen at +// https://stackoverflow.com/questions/5134523/msvc-doesnt-expand-va-args-correctly +#define AT_ASSERTM(cond, ...) \ + do { \ + ::c10::detail::deprecated_AT_ASSERTM(); \ + C10_EXPAND_MSVC_WORKAROUND(TORCH_INTERNAL_ASSERT(cond, __VA_ARGS__)); \ + } while (false) + +// Deprecated alias; this alias was deprecated because it represents extra API +// surface that makes it hard for people to understand what macro to use. +// Use TORCH_CHECK(false, ...) or TORCH_INTERNAL_ASSERT(false, ...) to +// unconditionally fail at a line of code. +#define AT_ERROR(...) \ + do { \ + ::c10::detail::deprecated_AT_ERROR(); \ + C10_EXPAND_MSVC_WORKAROUND(TORCH_CHECK(false, ::c10::str(__VA_ARGS__))); \ + } while (false) + +// Deprecated alias; this alias was deprecated for consistency with TORCH_CHECK. +#define AT_INDEX_ERROR(...) \ + do { \ + ::c10::detail::deprecated_AT_INDEX_ERROR(); \ + C10_EXPAND_MSVC_WORKAROUND(TORCH_CHECK_INDEX(false, ::c10::str(__VA_ARGS__))); \ + } while (false) + +// Deprecated alias; this alias was deprecated because it wasn't clear to +// people that you should use a macro with AT_ prefix inside the torch/csrc +// directory. Use TORCH_WARN instead. +#define AT_WARN(...) \ + do { \ + ::c10::detail::deprecated_AT_WARN(); \ + C10_EXPAND_MSVC_WORKAROUND(TORCH_WARN(__VA_ARGS__)); \ + } while (false) + + +#endif // C10_UTIL_EXCEPTION_H_ diff --git a/thirdparty/libtorch/include/c10/util/Flags.h b/thirdparty/libtorch/include/c10/util/Flags.h new file mode 100644 index 0000000000..b26d4a724d --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/Flags.h @@ -0,0 +1,228 @@ +#ifndef C10_UTIL_FLAGS_H_ +#define C10_UTIL_FLAGS_H_ + +/* Commandline flags support for C10. + * + * This is a portable commandline flags tool for c10, so we can optionally + * choose to use gflags or a lightweighted custom implementation if gflags is + * not possible on a certain platform. If you have gflags installed, set the + * macro C10_USE_GFLAGS will seamlessly route everything to gflags. + * + * To define a flag foo of type bool default to true, do the following in the + * *global* namespace: + * C10_DEFINE_bool(foo, true, "An example."); + * + * To use it in another .cc file, you can use C10_DECLARE_* as follows: + * C10_DECLARE_bool(foo); + * + * In both cases, you can then access the flag via FLAGS_foo. + * + * It is recommended that you build with gflags. To learn more about the flags + * usage, refer to the gflags page here: + * + * https://gflags.github.io/gflags/ + * + * Note about Python users / devs: gflags is initiated from a C++ function + * ParseCommandLineFlags, and is usually done in native binaries in the main + * function. As Python does not have a modifiable main function, it is usually + * difficult to change the flags after Python starts. Hence, it is recommended + * that one sets the default value of the flags to one that's acceptable in + * general - that will allow Python to run without wrong flags. + */ + +#include + +#include "c10/macros/Macros.h" +#include "c10/util/Registry.h" + +namespace c10 { +/** + * Sets the usage message when a commandline tool is called with "--help". + */ +C10_API void SetUsageMessage(const std::string& str); + +/** + * Returns the usage message for the commandline tool set by SetUsageMessage. + */ +C10_API const char* UsageMessage(); + +/** + * Parses the commandline flags. + * + * This command parses all the commandline arguments passed in via pargc + * and argv. Once it is finished, partc and argv will contain the remaining + * commandline args that c10 does not deal with. Note that following + * convention, argv[0] contains the binary name and is not parsed. + */ +C10_API bool ParseCommandLineFlags(int* pargc, char*** pargv); + +/** + * Checks if the commandline flags has already been passed. + */ +C10_API bool CommandLineFlagsHasBeenParsed(); + +} // namespace c10 + +//////////////////////////////////////////////////////////////////////////////// +// Below are gflags and non-gflags specific implementations. +// In general, they define the following macros for one to declare (use +// C10_DECLARE) or define (use C10_DEFINE) flags: +// C10_{DECLARE,DEFINE}_{int,int64,double,bool,string} +//////////////////////////////////////////////////////////////////////////////// + +#ifdef C10_USE_GFLAGS + +//////////////////////////////////////////////////////////////////////////////// +// Begin gflags section: most functions are basically rerouted to gflags. +//////////////////////////////////////////////////////////////////////////////// +#include + +// C10 uses hidden visibility by default. However, in gflags, it only uses +// export on Windows platform (with dllexport) but not on linux/mac (with +// default visibility). As a result, to ensure that we are always exporting +// global variables, we will redefine the GFLAGS_DLL_DEFINE_FLAG macro if we +// are building C10 as a shared libray. +// This has to be done after the inclusion of gflags, because some early +// versions of gflags.h (e.g. 2.0 on ubuntu 14.04) directly defines the +// macros, so we need to do definition after gflags is done. +#ifdef GFLAGS_DLL_DEFINE_FLAG +#undef GFLAGS_DLL_DEFINE_FLAG +#endif // GFLAGS_DLL_DEFINE_FLAG +#ifdef GFLAGS_DLL_DECLARE_FLAG +#undef GFLAGS_DLL_DECLARE_FLAG +#endif // GFLAGS_DLL_DECLARE_FLAG +#define GFLAGS_DLL_DEFINE_FLAG C10_EXPORT +#define GFLAGS_DLL_DECLARE_FLAG C10_IMPORT + +// gflags before 2.0 uses namespace google and after 2.1 uses namespace gflags. +// Using GFLAGS_GFLAGS_H_ to capture this change. +#ifndef GFLAGS_GFLAGS_H_ +namespace gflags = google; +#endif // GFLAGS_GFLAGS_H_ + +// Motivation about the gflags wrapper: +// (1) We would need to make sure that the gflags version and the non-gflags +// version of C10 are going to expose the same flags abstraction. One should +// explicitly use FLAGS_flag_name to access the flags. +// (2) For flag names, it is recommended to start with c10_ to distinguish it +// from regular gflags flags. For example, do +// C10_DEFINE_BOOL(c10_my_flag, true, "An example"); +// to allow one to use FLAGS_c10_my_flag. +// (3) Gflags has a design issue that does not properly expose the global flags, +// if one builds the library with -fvisibility=hidden. The current gflags (as of +// Aug 2018) only deals with the Windows case using dllexport, and not the Linux +// counterparts. As a result, we will explciitly use C10_EXPORT to export the +// flags defined in C10. This is done via a global reference, so the flag +// itself is not duplicated - under the hood it is the same global gflags flag. +#define C10_GFLAGS_DEF_WRAPPER(type, real_type, name, default_value, help_str) \ + DEFINE_##type(name, default_value, help_str); \ + +#define C10_DEFINE_int(name, default_value, help_str) \ + C10_GFLAGS_DEF_WRAPPER(int32, gflags::int32, name, default_value, help_str) +#define C10_DEFINE_int32(name, default_value, help_str) \ + C10_DEFINE_int(name, default_value, help_str) +#define C10_DEFINE_int64(name, default_value, help_str) \ + C10_GFLAGS_DEF_WRAPPER(int64, gflags::int64, name, default_value, help_str) +#define C10_DEFINE_double(name, default_value, help_str) \ + C10_GFLAGS_DEF_WRAPPER(double, double, name, default_value, help_str) +#define C10_DEFINE_bool(name, default_value, help_str) \ + C10_GFLAGS_DEF_WRAPPER(bool, bool, name, default_value, help_str) +#define C10_DEFINE_string(name, default_value, help_str) \ + C10_GFLAGS_DEF_WRAPPER(string, ::fLS::clstring, name, default_value, help_str) + +// DECLARE_typed_var should be used in header files and in the global namespace. +#define C10_GFLAGS_DECLARE_WRAPPER(type, real_type, name) \ + DECLARE_##type(name); \ + +#define C10_DECLARE_int(name) \ + C10_GFLAGS_DECLARE_WRAPPER(int32, gflags::int32, name) +#define C10_DECLARE_int32(name) C10_DECLARE_int(name) +#define C10_DECLARE_int64(name) \ + C10_GFLAGS_DECLARE_WRAPPER(int64, gflags::int64, name) +#define C10_DECLARE_double(name) \ + C10_GFLAGS_DECLARE_WRAPPER(double, double, name) +#define C10_DECLARE_bool(name) C10_GFLAGS_DECLARE_WRAPPER(bool, bool, name) +#define C10_DECLARE_string(name) \ + C10_GFLAGS_DECLARE_WRAPPER(string, ::fLS::clstring, name) + +//////////////////////////////////////////////////////////////////////////////// +// End gflags section. +//////////////////////////////////////////////////////////////////////////////// + +#else // C10_USE_GFLAGS + +//////////////////////////////////////////////////////////////////////////////// +// Begin non-gflags section: providing equivalent functionality. +//////////////////////////////////////////////////////////////////////////////// + +namespace c10 { + +class C10_API C10FlagParser { + public: + C10FlagParser() {} + bool success() { + return success_; + } + + protected: + template + bool Parse(const std::string& content, T* value); + bool success_; +}; + +C10_DECLARE_REGISTRY(C10FlagsRegistry, C10FlagParser, const std::string&); + +} // namespace c10 + +// The macros are defined outside the c10 namespace. In your code, you should +// write the C10_DEFINE_* and C10_DECLARE_* macros outside any namespace +// as well. + +#define C10_DEFINE_typed_var(type, name, default_value, help_str) \ + C10_EXPORT type FLAGS_##name = default_value; \ + namespace c10 { \ + namespace { \ + class C10FlagParser_##name : public C10FlagParser { \ + public: \ + explicit C10FlagParser_##name(const std::string& content) { \ + success_ = C10FlagParser::Parse(content, &FLAGS_##name); \ + } \ + }; \ + } \ + RegistererC10FlagsRegistry g_C10FlagsRegistry_##name( \ + #name, \ + C10FlagsRegistry(), \ + RegistererC10FlagsRegistry::DefaultCreator, \ + "(" #type ", default " #default_value ") " help_str); \ + } + +#define C10_DEFINE_int(name, default_value, help_str) \ + C10_DEFINE_typed_var(int, name, default_value, help_str) +#define C10_DEFINE_int32(name, default_value, help_str) \ + C10_DEFINE_int(name, default_value, help_str) +#define C10_DEFINE_int64(name, default_value, help_str) \ + C10_DEFINE_typed_var(int64_t, name, default_value, help_str) +#define C10_DEFINE_double(name, default_value, help_str) \ + C10_DEFINE_typed_var(double, name, default_value, help_str) +#define C10_DEFINE_bool(name, default_value, help_str) \ + C10_DEFINE_typed_var(bool, name, default_value, help_str) +#define C10_DEFINE_string(name, default_value, help_str) \ + C10_DEFINE_typed_var(std::string, name, default_value, help_str) + +// DECLARE_typed_var should be used in header files and in the global namespace. +#define C10_DECLARE_typed_var(type, name) C10_IMPORT extern type FLAGS_##name + +#define C10_DECLARE_int(name) C10_DECLARE_typed_var(int, name) +#define C10_DECLARE_int32(name) C10_DECLARE_int(name) +#define C10_DECLARE_int64(name) C10_DECLARE_typed_var(int64_t, name) +#define C10_DECLARE_double(name) C10_DECLARE_typed_var(double, name) +#define C10_DECLARE_bool(name) C10_DECLARE_typed_var(bool, name) +#define C10_DECLARE_string(name) C10_DECLARE_typed_var(std::string, name) + +//////////////////////////////////////////////////////////////////////////////// +// End non-gflags section. +//////////////////////////////////////////////////////////////////////////////// + +#endif // C10_USE_GFLAGS + +#endif // C10_UTIL_FLAGS_H_ diff --git a/thirdparty/libtorch/include/c10/util/FunctionRef.h b/thirdparty/libtorch/include/c10/util/FunctionRef.h new file mode 100644 index 0000000000..a3730476b7 --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/FunctionRef.h @@ -0,0 +1,65 @@ +//===- llvm/ADT/STLExtras.h - Useful STL related functions ------*- C++ -*-===// +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// +//===----------------------------------------------------------------------===// +// +// This file contains some templates that are useful if you are working with the +// STL at all. +// +// No library is required when using these functions. +// +//===----------------------------------------------------------------------===// + +// c10: modified from llvm::function_ref +// c10: added more SFINAE to enable use in overloaded functions + +#pragma once + +namespace c10 { + +/// An efficient, type-erasing, non-owning reference to a callable. This is +/// intended for use as the type of a function parameter that is not used +/// after the function in question returns. +/// +/// This class does not own the callable, so it is not in general safe to store +/// a function_ref. +template class function_ref; + +template +class function_ref { +Ret (*callback)(intptr_t callable, Params ...params) = nullptr; +intptr_t callable; + +template +static Ret callback_fn(intptr_t callable, Params ...params) { + return (*reinterpret_cast(callable))( + std::forward(params)...); +} + +public: +function_ref() = default; +function_ref(std::nullptr_t) {} + +template +function_ref(Callable &&callable, + typename std::enable_if< + !std::is_same::type, + function_ref>::value>::type * = nullptr, + typename std::enable_if< + std::is_convertible< + typename std::result_of::type, + Ret>::value>::type * = nullptr) + : callback(callback_fn::type>), + callable(reinterpret_cast(&callable)) {} + +Ret operator()(Params ...params) const { + return callback(callable, std::forward(params)...); +} + +operator bool() const { return callback; } +}; + +} diff --git a/thirdparty/libtorch/include/c10/util/Half-inl.h b/thirdparty/libtorch/include/c10/util/Half-inl.h new file mode 100644 index 0000000000..f3ef954a58 --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/Half-inl.h @@ -0,0 +1,289 @@ +#pragma once + +#include +#include +#include + +#ifdef __CUDACC__ +#include +#endif + +#ifdef __HIPCC__ +#include +#endif + +namespace c10 { + +/// Constructors + +inline C10_HOST_DEVICE Half::Half(float value) { +#if defined(__CUDA_ARCH__) || defined(__HIP_DEVICE_COMPILE__) + x = __half_as_short(__float2half(value)); +#else + x = detail::fp16_ieee_from_fp32_value(value); +#endif +} + +/// Implicit conversions + +inline C10_HOST_DEVICE Half::operator float() const { +#if defined(__CUDA_ARCH__) || defined(__HIP_DEVICE_COMPILE__) + return __half2float(*reinterpret_cast(&x)); +#else + return detail::fp16_ieee_to_fp32_value(x); +#endif +} + +#if defined(__CUDACC__) || defined(__HIPCC__) +inline C10_HOST_DEVICE Half::Half(const __half& value) { + x = *reinterpret_cast(&value); +} +inline C10_HOST_DEVICE Half::operator __half() const { + return *reinterpret_cast(&x); +} +#endif + +// CUDA intrinsics + +#if defined(__CUDA_ARCH__) && (__CUDA_ARCH__ >= 350) +inline __device__ Half __ldg(const Half* ptr) { + return __ldg(reinterpret_cast(ptr)); +} +#endif + +/// Arithmetic + +inline C10_HOST_DEVICE Half operator+(const Half& a, const Half& b) { + return static_cast(a) + static_cast(b); +} + +inline C10_HOST_DEVICE Half operator-(const Half& a, const Half& b) { + return static_cast(a) - static_cast(b); +} + +inline C10_HOST_DEVICE Half operator*(const Half& a, const Half& b) { + return static_cast(a) * static_cast(b); +} + +inline C10_HOST_DEVICE Half operator/(const Half& a, const Half& b) { + return static_cast(a) / static_cast(b); +} + +inline C10_HOST_DEVICE Half operator-(const Half& a) { +#if (defined(__CUDA_ARCH__) && __CUDA_ARCH__ >= 530) || defined(__HIP_DEVICE_COMPILE__) + return __hneg(a); +#else + return -static_cast(a); +#endif +} + +inline C10_HOST_DEVICE Half& operator+=(Half& a, const Half& b) { + a = a + b; + return a; +} + +inline C10_HOST_DEVICE Half& operator-=(Half& a, const Half& b) { + a = a - b; + return a; +} + +inline C10_HOST_DEVICE Half& operator*=(Half& a, const Half& b) { + a = a * b; + return a; +} + +inline C10_HOST_DEVICE Half& operator/=(Half& a, const Half& b) { + a = a / b; + return a; +} + +/// Arithmetic with floats + +inline C10_HOST_DEVICE float operator+(Half a, float b) { + return static_cast(a) + b; +} +inline C10_HOST_DEVICE float operator-(Half a, float b) { + return static_cast(a) - b; +} +inline C10_HOST_DEVICE float operator*(Half a, float b) { + return static_cast(a) * b; +} +inline C10_HOST_DEVICE float operator/(Half a, float b) { + return static_cast(a) / b; +} + +inline C10_HOST_DEVICE float operator+(float a, Half b) { + return a + static_cast(b); +} +inline C10_HOST_DEVICE float operator-(float a, Half b) { + return a - static_cast(b); +} +inline C10_HOST_DEVICE float operator*(float a, Half b) { + return a * static_cast(b); +} +inline C10_HOST_DEVICE float operator/(float a, Half b) { + return a / static_cast(b); +} + +inline C10_HOST_DEVICE float& operator+=(float& a, const Half& b) { + return a += static_cast(b); +} +inline C10_HOST_DEVICE float& operator-=(float& a, const Half& b) { + return a -= static_cast(b); +} +inline C10_HOST_DEVICE float& operator*=(float& a, const Half& b) { + return a *= static_cast(b); +} +inline C10_HOST_DEVICE float& operator/=(float& a, const Half& b) { + return a /= static_cast(b); +} + +/// Arithmetic with doubles + +inline C10_HOST_DEVICE double operator+(Half a, double b) { + return static_cast(a) + b; +} +inline C10_HOST_DEVICE double operator-(Half a, double b) { + return static_cast(a) - b; +} +inline C10_HOST_DEVICE double operator*(Half a, double b) { + return static_cast(a) * b; +} +inline C10_HOST_DEVICE double operator/(Half a, double b) { + return static_cast(a) / b; +} + +inline C10_HOST_DEVICE double operator+(double a, Half b) { + return a + static_cast(b); +} +inline C10_HOST_DEVICE double operator-(double a, Half b) { + return a - static_cast(b); +} +inline C10_HOST_DEVICE double operator*(double a, Half b) { + return a * static_cast(b); +} +inline C10_HOST_DEVICE double operator/(double a, Half b) { + return a / static_cast(b); +} + +/// Arithmetic with ints + +inline C10_HOST_DEVICE Half operator+(Half a, int b) { + return a + static_cast(b); +} +inline C10_HOST_DEVICE Half operator-(Half a, int b) { + return a - static_cast(b); +} +inline C10_HOST_DEVICE Half operator*(Half a, int b) { + return a * static_cast(b); +} +inline C10_HOST_DEVICE Half operator/(Half a, int b) { + return a / static_cast(b); +} + +inline C10_HOST_DEVICE Half operator+(int a, Half b) { + return static_cast(a) + b; +} +inline C10_HOST_DEVICE Half operator-(int a, Half b) { + return static_cast(a) - b; +} +inline C10_HOST_DEVICE Half operator*(int a, Half b) { + return static_cast(a) * b; +} +inline C10_HOST_DEVICE Half operator/(int a, Half b) { + return static_cast(a) / b; +} + +//// Arithmetic with int64_t + +inline C10_HOST_DEVICE Half operator+(Half a, int64_t b) { + return a + static_cast(b); +} +inline C10_HOST_DEVICE Half operator-(Half a, int64_t b) { + return a - static_cast(b); +} +inline C10_HOST_DEVICE Half operator*(Half a, int64_t b) { + return a * static_cast(b); +} +inline C10_HOST_DEVICE Half operator/(Half a, int64_t b) { + return a / static_cast(b); +} + +inline C10_HOST_DEVICE Half operator+(int64_t a, Half b) { + return static_cast(a) + b; +} +inline C10_HOST_DEVICE Half operator-(int64_t a, Half b) { + return static_cast(a) - b; +} +inline C10_HOST_DEVICE Half operator*(int64_t a, Half b) { + return static_cast(a) * b; +} +inline C10_HOST_DEVICE Half operator/(int64_t a, Half b) { + return static_cast(a) / b; +} + +/// NOTE: we do not define comparisons directly and instead rely on the implicit +/// conversion from c10::Half to float. + +} // namespace c10 + +namespace std { + +template <> +class numeric_limits { + public: + static constexpr bool is_specialized = true; + static constexpr bool is_signed = true; + static constexpr bool is_integer = false; + static constexpr bool is_exact = false; + static constexpr bool has_infinity = true; + static constexpr bool has_quiet_NaN = true; + static constexpr bool has_signaling_NaN = true; + static constexpr auto has_denorm = numeric_limits::has_denorm; + static constexpr auto has_denorm_loss = + numeric_limits::has_denorm_loss; + static constexpr auto round_style = numeric_limits::round_style; + static constexpr bool is_iec559 = true; + static constexpr bool is_bounded = true; + static constexpr bool is_modulo = false; + static constexpr int digits = 11; + static constexpr int digits10 = 3; + static constexpr int max_digits10 = 5; + static constexpr int radix = 2; + static constexpr int min_exponent = -13; + static constexpr int min_exponent10 = -4; + static constexpr int max_exponent = 16; + static constexpr int max_exponent10 = 4; + static constexpr auto traps = numeric_limits::traps; + static constexpr auto tinyness_before = + numeric_limits::tinyness_before; + static constexpr c10::Half min() { + return c10::Half(0x0400, c10::Half::from_bits()); + } + static constexpr c10::Half lowest() { + return c10::Half(0xFBFF, c10::Half::from_bits()); + } + static constexpr c10::Half max() { + return c10::Half(0x7BFF, c10::Half::from_bits()); + } + static constexpr c10::Half epsilon() { + return c10::Half(0x1400, c10::Half::from_bits()); + } + static constexpr c10::Half round_error() { + return c10::Half(0x3800, c10::Half::from_bits()); + } + static constexpr c10::Half infinity() { + return c10::Half(0x7C00, c10::Half::from_bits()); + } + static constexpr c10::Half quiet_NaN() { + return c10::Half(0x7E00, c10::Half::from_bits()); + } + static constexpr c10::Half signaling_NaN() { + return c10::Half(0x7D00, c10::Half::from_bits()); + } + static constexpr c10::Half denorm_min() { + return c10::Half(0x0001, c10::Half::from_bits()); + } +}; + +} // namespace std diff --git a/thirdparty/libtorch/include/c10/util/Half.h b/thirdparty/libtorch/include/c10/util/Half.h new file mode 100644 index 0000000000..338f271627 --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/Half.h @@ -0,0 +1,510 @@ +#pragma once + +/// Defines the Half type (half-precision floating-point) including conversions +/// to standard C types and basic arithmetic operations. Note that arithmetic +/// operations are implemented by converting to floating point and +/// performing the operation in float32, instead of using CUDA half intrinisics. +/// Most uses of this type within ATen are memory bound, including the +/// element-wise kernels, and the half intrinisics aren't efficient on all GPUs. +/// If you are writing a compute bound kernel, you can use the CUDA half +/// intrinsics directly on the Half type from device code. + +#include +#include + +#if defined(__cplusplus) && (__cplusplus >= 201103L) +#include +#include +#elif !defined(__OPENCL_VERSION__) +#include +#include +#endif + +#ifdef _MSC_VER +#include +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef __CUDACC__ +#include +#endif + +#ifdef __HIPCC__ +#include +#endif + +namespace c10 { + +namespace detail { + + inline float fp32_from_bits(uint32_t w) { + #if defined(__OPENCL_VERSION__) + return as_float(w); + #elif defined(__CUDA_ARCH__) + return __uint_as_float((unsigned int)w); + #elif defined(__INTEL_COMPILER) + return _castu32_f32(w); + #else + union { + uint32_t as_bits; + float as_value; + } fp32 = {w}; + return fp32.as_value; + #endif + } + + inline uint32_t fp32_to_bits(float f) { + #if defined(__OPENCL_VERSION__) + return as_uint(f); + #elif defined(__CUDA_ARCH__) + return (uint32_t)__float_as_uint(f); + #elif defined(__INTEL_COMPILER) + return _castf32_u32(f); + #else + union { + float as_value; + uint32_t as_bits; + } fp32 = {f}; + return fp32.as_bits; + #endif + } + + /* + * Convert a 16-bit floating-point number in IEEE half-precision format, in bit representation, to + * a 32-bit floating-point number in IEEE single-precision format, in bit representation. + * + * @note The implementation doesn't use any floating-point operations. + */ + inline uint32_t fp16_ieee_to_fp32_bits(uint16_t h) { + /* + * Extend the half-precision floating-point number to 32 bits and shift to the upper part of the 32-bit word: + * +---+-----+------------+-------------------+ + * | S |EEEEE|MM MMMM MMMM|0000 0000 0000 0000| + * +---+-----+------------+-------------------+ + * Bits 31 26-30 16-25 0-15 + * + * S - sign bit, E - bits of the biased exponent, M - bits of the mantissa, 0 - zero bits. + */ + const uint32_t w = (uint32_t) h << 16; + /* + * Extract the sign of the input number into the high bit of the 32-bit word: + * + * +---+----------------------------------+ + * | S |0000000 00000000 00000000 00000000| + * +---+----------------------------------+ + * Bits 31 0-31 + */ + const uint32_t sign = w & UINT32_C(0x80000000); + /* + * Extract mantissa and biased exponent of the input number into the bits 0-30 of the 32-bit word: + * + * +---+-----+------------+-------------------+ + * | 0 |EEEEE|MM MMMM MMMM|0000 0000 0000 0000| + * +---+-----+------------+-------------------+ + * Bits 30 27-31 17-26 0-16 + */ + const uint32_t nonsign = w & UINT32_C(0x7FFFFFFF); + /* + * Renorm shift is the number of bits to shift mantissa left to make the half-precision number normalized. + * If the initial number is normalized, some of its high 6 bits (sign == 0 and 5-bit exponent) equals one. + * In this case renorm_shift == 0. If the number is denormalize, renorm_shift > 0. Note that if we shift + * denormalized nonsign by renorm_shift, the unit bit of mantissa will shift into exponent, turning the + * biased exponent into 1, and making mantissa normalized (i.e. without leading 1). + */ +#ifdef _MSC_VER + unsigned long nonsign_bsr; + _BitScanReverse(&nonsign_bsr, (unsigned long)nonsign); + uint32_t renorm_shift = (uint32_t)nonsign_bsr ^ 31; +#else + uint32_t renorm_shift = __builtin_clz(nonsign); +#endif + renorm_shift = renorm_shift > 5 ? renorm_shift - 5 : 0; + /* + * Iff half-precision number has exponent of 15, the addition overflows + * it into bit 31, and the subsequent shift turns the high 9 bits + * into 1. Thus inf_nan_mask == 0x7F800000 if the half-precision number + * had exponent of 15 (i.e. was NaN or infinity) 0x00000000 otherwise + */ + const int32_t inf_nan_mask = + ((int32_t)(nonsign + 0x04000000) >> 8) & INT32_C(0x7F800000); + /* + * Iff nonsign is 0, it overflows into 0xFFFFFFFF, turning bit 31 + * into 1. Otherwise, bit 31 remains 0. The signed shift right by 31 + * broadcasts bit 31 into all bits of the zero_mask. Thus zero_mask == + * 0xFFFFFFFF if the half-precision number was zero (+0.0h or -0.0h) + * 0x00000000 otherwise + */ + const int32_t zero_mask = (int32_t)(nonsign - 1) >> 31; + /* + * 1. Shift nonsign left by renorm_shift to normalize it (if the input + * was denormal) + * 2. Shift nonsign right by 3 so the exponent (5 bits originally) + * becomes an 8-bit field and 10-bit mantissa shifts into the 10 high + * bits of the 23-bit mantissa of IEEE single-precision number. + * 3. Add 0x70 to the exponent (starting at bit 23) to compensate the + * different in exponent bias (0x7F for single-precision number less 0xF + * for half-precision number). + * 4. Subtract renorm_shift from the exponent (starting at bit 23) to + * account for renormalization. As renorm_shift is less than 0x70, this + * can be combined with step 3. + * 5. Binary OR with inf_nan_mask to turn the exponent into 0xFF if the + * input was NaN or infinity. + * 6. Binary ANDNOT with zero_mask to turn the mantissa and exponent + * into zero if the input was zero. + * 7. Combine with the sign of the input number. + */ + return sign | + ((((nonsign << renorm_shift >> 3) + ((0x70 - renorm_shift) << 23)) | + inf_nan_mask) & + ~zero_mask); + } + + /* + * Convert a 16-bit floating-point number in IEEE half-precision format, in bit representation, to + * a 32-bit floating-point number in IEEE single-precision format. + * + * @note The implementation relies on IEEE-like (no assumption about rounding mode and no operations on denormals) + * floating-point operations and bitcasts between integer and floating-point variables. + */ + inline float fp16_ieee_to_fp32_value(uint16_t h) { + /* + * Extend the half-precision floating-point number to 32 bits and shift to the upper part of the 32-bit word: + * +---+-----+------------+-------------------+ + * | S |EEEEE|MM MMMM MMMM|0000 0000 0000 0000| + * +---+-----+------------+-------------------+ + * Bits 31 26-30 16-25 0-15 + * + * S - sign bit, E - bits of the biased exponent, M - bits of the mantissa, 0 - zero bits. + */ + const uint32_t w = (uint32_t) h << 16; + /* + * Extract the sign of the input number into the high bit of the 32-bit word: + * + * +---+----------------------------------+ + * | S |0000000 00000000 00000000 00000000| + * +---+----------------------------------+ + * Bits 31 0-31 + */ + const uint32_t sign = w & UINT32_C(0x80000000); + /* + * Extract mantissa and biased exponent of the input number into the high bits of the 32-bit word: + * + * +-----+------------+---------------------+ + * |EEEEE|MM MMMM MMMM|0 0000 0000 0000 0000| + * +-----+------------+---------------------+ + * Bits 27-31 17-26 0-16 + */ + const uint32_t two_w = w + w; + + /* + * Shift mantissa and exponent into bits 23-28 and bits 13-22 so they become mantissa and exponent + * of a single-precision floating-point number: + * + * S|Exponent | Mantissa + * +-+---+-----+------------+----------------+ + * |0|000|EEEEE|MM MMMM MMMM|0 0000 0000 0000| + * +-+---+-----+------------+----------------+ + * Bits | 23-31 | 0-22 + * + * Next, there are some adjustments to the exponent: + * - The exponent needs to be corrected by the difference in exponent bias between single-precision and half-precision + * formats (0x7F - 0xF = 0x70) + * - Inf and NaN values in the inputs should become Inf and NaN values after conversion to the single-precision number. + * Therefore, if the biased exponent of the half-precision input was 0x1F (max possible value), the biased exponent + * of the single-precision output must be 0xFF (max possible value). We do this correction in two steps: + * - First, we adjust the exponent by (0xFF - 0x1F) = 0xE0 (see exp_offset below) rather than by 0x70 suggested + * by the difference in the exponent bias (see above). + * - Then we multiply the single-precision result of exponent adjustment by 2**(-112) to reverse the effect of + * exponent adjustment by 0xE0 less the necessary exponent adjustment by 0x70 due to difference in exponent bias. + * The floating-point multiplication hardware would ensure than Inf and NaN would retain their value on at least + * partially IEEE754-compliant implementations. + * + * Note that the above operations do not handle denormal inputs (where biased exponent == 0). However, they also do not + * operate on denormal inputs, and do not produce denormal results. + */ + const uint32_t exp_offset = UINT32_C(0xE0) << 23; + // const float exp_scale = 0x1.0p-112f; + uint32_t scale_bits = (uint32_t) 15 << 23; + float exp_scale_val; + std::memcpy(&exp_scale_val, &scale_bits, sizeof(exp_scale_val)); + const float exp_scale = exp_scale_val; + const float normalized_value = fp32_from_bits((two_w >> 4) + exp_offset) * exp_scale; + + /* + * Convert denormalized half-precision inputs into single-precision results (always normalized). + * Zero inputs are also handled here. + * + * In a denormalized number the biased exponent is zero, and mantissa has on-zero bits. + * First, we shift mantissa into bits 0-9 of the 32-bit word. + * + * zeros | mantissa + * +---------------------------+------------+ + * |0000 0000 0000 0000 0000 00|MM MMMM MMMM| + * +---------------------------+------------+ + * Bits 10-31 0-9 + * + * Now, remember that denormalized half-precision numbers are represented as: + * FP16 = mantissa * 2**(-24). + * The trick is to construct a normalized single-precision number with the same mantissa and thehalf-precision input + * and with an exponent which would scale the corresponding mantissa bits to 2**(-24). + * A normalized single-precision floating-point number is represented as: + * FP32 = (1 + mantissa * 2**(-23)) * 2**(exponent - 127) + * Therefore, when the biased exponent is 126, a unit change in the mantissa of the input denormalized half-precision + * number causes a change of the constructud single-precision number by 2**(-24), i.e. the same ammount. + * + * The last step is to adjust the bias of the constructed single-precision number. When the input half-precision number + * is zero, the constructed single-precision number has the value of + * FP32 = 1 * 2**(126 - 127) = 2**(-1) = 0.5 + * Therefore, we need to subtract 0.5 from the constructed single-precision number to get the numerical equivalent of + * the input half-precision number. + */ + const uint32_t magic_mask = UINT32_C(126) << 23; + const float magic_bias = 0.5f; + const float denormalized_value = fp32_from_bits((two_w >> 17) | magic_mask) - magic_bias; + + /* + * - Choose either results of conversion of input as a normalized number, or as a denormalized number, depending on the + * input exponent. The variable two_w contains input exponent in bits 27-31, therefore if its smaller than 2**27, the + * input is either a denormal number, or zero. + * - Combine the result of conversion of exponent and mantissa with the sign of the input number. + */ + const uint32_t denormalized_cutoff = UINT32_C(1) << 27; + const uint32_t result = sign | + (two_w < denormalized_cutoff ? fp32_to_bits(denormalized_value) : fp32_to_bits(normalized_value)); + return fp32_from_bits(result); + } + + /* + * Convert a 32-bit floating-point number in IEEE single-precision format to a 16-bit floating-point number in + * IEEE half-precision format, in bit representation. + * + * @note The implementation relies on IEEE-like (no assumption about rounding mode and no operations on denormals) + * floating-point operations and bitcasts between integer and floating-point variables. + */ + inline uint16_t fp16_ieee_from_fp32_value(float f) { + // const float scale_to_inf = 0x1.0p+112f; + // const float scale_to_zero = 0x1.0p-110f; + uint32_t scale_to_inf_bits = (uint32_t) 239 << 23; + uint32_t scale_to_zero_bits = (uint32_t) 17 << 23; + float scale_to_inf_val, scale_to_zero_val; + std::memcpy(&scale_to_inf_val, &scale_to_inf_bits, sizeof(scale_to_inf_val)); + std::memcpy(&scale_to_zero_val, &scale_to_zero_bits, sizeof(scale_to_zero_val)); + const float scale_to_inf = scale_to_inf_val; + const float scale_to_zero = scale_to_zero_val; + + float base = (fabsf(f) * scale_to_inf) * scale_to_zero; + + const uint32_t w = fp32_to_bits(f); + const uint32_t shl1_w = w + w; + const uint32_t sign = w & UINT32_C(0x80000000); + uint32_t bias = shl1_w & UINT32_C(0xFF000000); + if (bias < UINT32_C(0x71000000)) { + bias = UINT32_C(0x71000000); + } + + base = fp32_from_bits((bias >> 1) + UINT32_C(0x07800000)) + base; + const uint32_t bits = fp32_to_bits(base); + const uint32_t exp_bits = (bits >> 13) & UINT32_C(0x00007C00); + const uint32_t mantissa_bits = bits & UINT32_C(0x00000FFF); + const uint32_t nonsign = exp_bits + mantissa_bits; + return (sign >> 16) | (shl1_w > UINT32_C(0xFF000000) ? UINT16_C(0x7E00) : nonsign); + } + +} // namespace detail + +struct alignas(2) Half { + unsigned short x; + + struct from_bits_t {}; + static constexpr from_bits_t from_bits() { + return from_bits_t(); + } + + // HIP wants __host__ __device__ tag, CUDA does not +#ifdef __HIP_PLATFORM_HCC__ + C10_HOST_DEVICE Half() = default; +#else + Half() = default; +#endif + + constexpr C10_HOST_DEVICE Half(unsigned short bits, from_bits_t) : x(bits){}; + inline C10_HOST_DEVICE Half(float value); + inline C10_HOST_DEVICE operator float() const; + +#if defined(__CUDACC__) || defined(__HIPCC__) + inline C10_HOST_DEVICE Half(const __half& value); + inline C10_HOST_DEVICE operator __half() const; +#endif +}; + +// This is just a placeholder for whatever complex representation we +// end up deciding to use for half-precision complex numbers. +struct alignas(4) ComplexHalf { + Half real_; + Half imag_; + ComplexHalf() = default; + Half real() const { + return real_; + } + Half imag() const { + return imag_; + } + inline ComplexHalf(std::complex value) + : real_(value.real()), imag_(value.imag()) {} + inline operator std::complex() const { + return {real_, imag_}; + } +}; + +template +struct is_complex_t : public std::false_type {}; + +template +struct is_complex_t> : public std::true_type {}; + +template <> +struct is_complex_t : public std::true_type {}; + +// Extract double from std::complex; is identity otherwise +// TODO: Write in more idiomatic C++17 +template +struct scalar_value_type { + using type = T; +}; +template +struct scalar_value_type> { + using type = T; +}; +template <> +struct scalar_value_type { + using type = Half; +}; + +// The old implementation of Converter as a function made nvcc's head explode +// when we added std::complex on top of the specializations for CUDA-only types +// like __half, so I rewrote it as a templated class (so, no more overloads, +// just (partial) specialization). + +template +struct Converter { + To operator()(From f) { + return static_cast(f); + } +}; + +template +To convert(From from) { + return Converter()(from); +} + +template +struct Converter< + To, + std::complex, + typename std::enable_if< + c10::guts::negation>::value>::type> { + To operator()(std::complex f) { + return static_cast(f.real()); + } +}; + +// In some versions of MSVC, there will be a compiler error when building. +// C4146: unary minus operator applied to unsigned type, result still unsigned +// C4804: unsafe use of type 'bool' in operation +// It can be addressed by disabling the following warning. +#ifdef _MSC_VER +#pragma warning( push ) +#pragma warning( disable : 4146 ) +#pragma warning( disable : 4804 ) +#endif + + +// bool can be converted to any type. +// Without specializing on bool, in pytorch_linux_trusty_py2_7_9_build: +// `error: comparison of constant '255' with boolean expression is always false` +// for `f > limit::max()` below +template +typename std::enable_if::value, bool>::type overflows( + From f) { + return false; +} + +// skip isnan and isinf check for integral types +template +typename std::enable_if::value && !std::is_same::value, bool>::type overflows( + From f) { + using limit = std::numeric_limits::type>; + if (!limit::is_signed && std::numeric_limits::is_signed) { + // allow for negative numbers to wrap using two's complement arithmetic. + // For example, with uint8, this allows for `a - b` to be treated as + // `a + 255 * b`. + return f > limit::max() || + (f < 0 && -static_cast(f) > limit::max()); + } else { + return f < limit::lowest() || f > limit::max(); + } +} + +template +typename std::enable_if::value, bool>::type +overflows(From f) { + using limit = std::numeric_limits::type>; + if (limit::has_infinity && std::isinf(static_cast(f))) { + return false; + } + if (!limit::has_quiet_NaN && (f != f)) { + return true; + } + return f < limit::lowest() || f > limit::max(); +} + +#ifdef _MSC_VER +#pragma warning( pop ) +#endif + +template +typename std::enable_if::value, bool>::type overflows( + From f) { + // casts from complex to real are considered to overflow if the + // imaginary component is non-zero + if (!is_complex_t::value && f.imag() != 0) { + return true; + } + // Check for overflow componentwise + // (Technically, the imag overflow check is guaranteed to be false + // when !is_complex_t, but any optimizer worth its salt will be + // able to figure it out.) + return overflows< + typename scalar_value_type::type, + typename From::value_type>(f.real()) || + overflows< + typename scalar_value_type::type, + typename From::value_type>(f.imag()); +} + +template +To checked_convert(From f, const char* name) { + // Converting to bool can't overflow so we exclude this case from checking. + if (!std::is_same::value && overflows(f)) { + std::ostringstream oss; + oss << "value cannot be converted to type " << name + << " without overflow: " << f; + throw std::domain_error(oss.str()); + } + return convert(f); +} + +C10_API std::ostream& operator<<(std::ostream& out, const Half& value); + +} // namespace c10 + +#include diff --git a/thirdparty/libtorch/include/c10/util/IdWrapper.h b/thirdparty/libtorch/include/c10/util/IdWrapper.h new file mode 100644 index 0000000000..dc28141e53 --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/IdWrapper.h @@ -0,0 +1,77 @@ +#pragma once + +#include +#include +#include + +namespace c10 { + +/** + * This template simplifies generation of simple classes that wrap an id + * in a typesafe way. Namely, you can use it to create a very lightweight + * type that only offers equality comparators and hashing. Example: + * + * struct MyIdType final : IdWrapper { + * constexpr explicit MyIdType(uint32_t id): IdWrapper(id) {} + * }; + * + * Then in the global top level namespace: + * + * C10_DEFINE_HASH_FOR_IDWRAPPER(MyIdType); + * + * That's it - equality operators and hash functions are automatically defined + * for you, given the underlying type supports it. + */ +template +class IdWrapper { + public: + using underlying_type = UnderlyingType; + using concrete_type = ConcreteType; + + protected: + constexpr explicit IdWrapper(underlying_type id) noexcept( + noexcept(underlying_type(std::declval()))) + : id_(id) {} + + constexpr underlying_type underlyingId() const + noexcept(noexcept(underlying_type(std::declval()))) { + return id_; + } + + private: + friend size_t hash_value(const concrete_type& v) { + return std::hash()(v.id_); + } + + // TODO Making operator== noexcept if underlying type is noexcept equality + // comparable doesn't work with GCC 4.8. + // Fix this once we don't need GCC 4.8 anymore. + friend constexpr bool operator==( + const concrete_type& lhs, + const concrete_type& rhs) noexcept { + return lhs.id_ == rhs.id_; + } + + // TODO Making operator!= noexcept if operator== is noexcept doesn't work with + // GCC 4.8. + // Fix this once we don't need GCC 4.8 anymore. + friend constexpr bool operator!=( + const concrete_type& lhs, + const concrete_type& rhs) noexcept { + return !(lhs == rhs); + } + + underlying_type id_; +}; + +} // namespace c10 + +#define C10_DEFINE_HASH_FOR_IDWRAPPER(ClassName)\ + namespace std { \ + template <> \ + struct hash { \ + size_t operator()(ClassName x) const { \ + return hash_value(x); \ + } \ + }; \ + } diff --git a/thirdparty/libtorch/include/c10/util/LeftRight.h b/thirdparty/libtorch/include/c10/util/LeftRight.h new file mode 100644 index 0000000000..ad6cc81413 --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/LeftRight.h @@ -0,0 +1,177 @@ +#include +#include +#include +#include +#include +#include + +namespace c10 { + +namespace detail { + +struct IncrementRAII final { +public: + explicit IncrementRAII(std::atomic *counter): _counter(counter) { + ++(*_counter); + } + + ~IncrementRAII() { + --(*_counter); + } +private: + std::atomic *_counter; + + C10_DISABLE_COPY_AND_ASSIGN(IncrementRAII); +}; + +} + +// LeftRight wait-free readers synchronization primitive +// https://hal.archives-ouvertes.fr/hal-01207881/document +template +class LeftRight final { +public: + template + explicit LeftRight(const Args& ...args) + : _counters{{{0}, {0}}} + , _foregroundCounterIndex(0) + , _foregroundDataIndex(0) + , _inDestruction(false) + , _data{{T{args...}, T{args...}}} + , _writeMutex() + {} + + // Copying and moving would not be threadsafe. + // Needs more thought and careful design to make that work. + LeftRight(const LeftRight&) = delete; + LeftRight(LeftRight&&) noexcept = delete; + LeftRight& operator=(const LeftRight&) = delete; + LeftRight& operator=(LeftRight&&) noexcept= delete; + + ~LeftRight() { + // from now on, no new readers/writers will be accepted (see asserts in read()/write()) + _inDestruction = true; + + // wait until any potentially running writers are finished + { + std::unique_lock lock(_writeMutex); + } + + // wait until any potentially running readers are finished + while (_counters[0].load() != 0 || _counters[1].load() != 0) { + std::this_thread::yield(); + } + } + + template + auto read(F&& readFunc) const -> typename std::result_of::type { + detail::IncrementRAII _increment_counter(&_counters[_foregroundCounterIndex.load()]); + + if(C10_UNLIKELY(_inDestruction.load())) { + throw std::logic_error("Issued LeftRight::read() after the destructor started running"); + } + + return readFunc(_data[_foregroundDataIndex.load()]); + } + + // Throwing an exception in writeFunc is ok but causes the state to be either the old or the new state, + // depending on if the first or the second call to writeFunc threw. + template + auto write(F&& writeFunc) -> typename std::result_of::type { + std::unique_lock lock(_writeMutex); + + if(C10_UNLIKELY(_inDestruction.load())) { + throw std::logic_error("Issued LeftRight::write() after the destructor started running"); + } + + return _write(writeFunc); + } + +private: + template + auto _write(const F& writeFunc) -> typename std::result_of::type { + /* + * Assume, A is in background and B in foreground. In simplified terms, we want to do the following: + * 1. Write to A (old background) + * 2. Switch A/B + * 3. Write to B (new background) + * + * More detailed algorithm (explanations on why this is important are below in code): + * 1. Write to A + * 2. Switch A/B data pointers + * 3. Wait until A counter is zero + * 4. Switch A/B counters + * 5. Wait until B counter is zero + * 6. Write to B + */ + + auto localDataIndex = _foregroundDataIndex.load(); + + // 1. Write to A + _callWriteFuncOnBackgroundInstance(writeFunc, localDataIndex); + + // 2. Switch A/B data pointers + localDataIndex = localDataIndex ^ 1; + _foregroundDataIndex = localDataIndex; + + /* + * 3. Wait until A counter is zero + * + * In the previous write run, A was foreground and B was background. + * There was a time after switching _foregroundDataIndex (B to foreground) and before switching _foregroundCounterIndex, + * in which new readers could have read B but incremented A's counter. + * + * In this current run, we just switched _foregroundDataIndex (A back to foreground), but before writing to + * the new background B, we have to make sure A's counter was zero briefly, so all these old readers are gone. + */ + auto localCounterIndex = _foregroundCounterIndex.load(); + _waitForBackgroundCounterToBeZero(localCounterIndex); + + /* + * 4. Switch A/B counters + * + * Now that we know all readers on B are really gone, we can switch the counters and have new readers + * increment A's counter again, which is the correct counter since they're reading A. + */ + localCounterIndex = localCounterIndex ^ 1; + _foregroundCounterIndex = localCounterIndex; + + /* + * 5. Wait until B counter is zero + * + * This waits for all the readers on B that came in while both data and counter for B was in foreground, + * i.e. normal readers that happened outside of that brief gap between switching data and counter. + */ + _waitForBackgroundCounterToBeZero(localCounterIndex); + + // 6. Write to B + return _callWriteFuncOnBackgroundInstance(writeFunc, localDataIndex); + } + + template + auto _callWriteFuncOnBackgroundInstance(const F& writeFunc, uint8_t localDataIndex) -> typename std::result_of::type { + try { + return writeFunc(_data[localDataIndex ^ 1]); + } catch (...) { + // recover invariant by copying from the foreground instance + _data[localDataIndex ^ 1] = _data[localDataIndex]; + // rethrow + throw; + } + } + + void _waitForBackgroundCounterToBeZero(uint8_t counterIndex) { + while (_counters[counterIndex ^ 1].load() != 0) { + std::this_thread::yield(); + } + } + + mutable std::array, 2> _counters; + std::atomic _foregroundCounterIndex; + std::atomic _foregroundDataIndex; + std::atomic _inDestruction; + std::array _data; + std::mutex _writeMutex; +}; + +} diff --git a/thirdparty/libtorch/include/c10/util/Logging.h b/thirdparty/libtorch/include/c10/util/Logging.h new file mode 100644 index 0000000000..0ede427b4e --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/Logging.h @@ -0,0 +1,295 @@ +#ifndef C10_UTIL_LOGGING_H_ +#define C10_UTIL_LOGGING_H_ + +#include +#include +#include +#include +#include + +#include "c10/macros/Macros.h" +#include "c10/util/Exception.h" +#include "c10/util/Flags.h" +#include "c10/util/StringUtil.h" + +// CAFFE2_LOG_THRESHOLD is a compile time flag that would allow us to turn off +// logging at compile time so no logging message below that level is produced +// at all. The value should be between INT_MIN and CAFFE_FATAL. +#ifndef CAFFE2_LOG_THRESHOLD +// If we have not defined the compile time log threshold, we keep all the +// log cases. +#define CAFFE2_LOG_THRESHOLD INT_MIN +#endif // CAFFE2_LOG_THRESHOLD + +// Below are different implementations for glog and non-glog cases. +#ifdef C10_USE_GLOG +#include "c10/util/logging_is_google_glog.h" +#else // !C10_USE_GLOG +#include "c10/util/logging_is_not_google_glog.h" +#endif // C10_USE_GLOG + +C10_DECLARE_int(caffe2_log_level); +C10_DECLARE_bool(caffe2_use_fatal_for_enforce); + +// Some versions of GLOG support less-spammy version of LOG_EVERY_MS. If it's +// not available - just short-circuit to the always working one one. +// We define the C10_ name to avoid confusing other files +#ifdef LOG_EVERY_MS +#define C10_LOG_EVERY_MS(severity, ms) LOG_EVERY_MS(severity, ms) +#else +#define C10_LOG_EVERY_MS(severity, ms) LOG(severity) +#endif + +// Same for LOG_FIRST_N +#ifdef LOG_FIRST_N +#define C10_LOG_FIRST_N(severity, n) LOG_FIRST_N(severity, n) +#else +#define C10_LOG_FIRST_N(severity, n) LOG(severity) +#endif + +// Same for LOG_EVERY_N +#ifdef LOG_EVERY_N +#define C10_LOG_EVERY_N(severity, n) LOG_EVERY_N(severity, n) +#else +#define C10_LOG_EVERY_N(severity, n) LOG(severity) +#endif + +namespace c10 { + +using std::string; + +// Functions that we use for initialization. +C10_API bool InitCaffeLogging(int* argc, char** argv); +C10_API void UpdateLoggingLevelsFromFlags(); + +C10_API C10_NORETURN void ThrowEnforceNotMet( + const char* file, + const int line, + const char* condition, + const std::string& msg, + const void* caller = nullptr); + +constexpr bool IsUsingGoogleLogging() { +#ifdef C10_USE_GLOG + return true; +#else + return false; +#endif +} + +/** + * A utility to allow one to show log info to stderr after the program starts. + * + * This is similar to calling GLOG's --logtostderr, or setting caffe2_log_level + * to smaller than INFO. You are recommended to only use this in a few sparse + * cases, such as when you want to write a tutorial or something. Normally, use + * the commandline flags to set the log level. + */ +C10_API void ShowLogInfoToStderr(); + +C10_API void SetStackTraceFetcher(std::function fetcher); + +using EnforceNotMet = ::c10::Error; + +#define CAFFE_ENFORCE(condition, ...) \ + do { \ + if (C10_UNLIKELY(!(condition))) { \ + ::c10::ThrowEnforceNotMet( \ + __FILE__, __LINE__, #condition, ::c10::str(__VA_ARGS__)); \ + } \ + } while (false) + +#define CAFFE_ENFORCE_WITH_CALLER(condition, ...) \ + do { \ + if (C10_UNLIKELY(!(condition))) { \ + ::c10::ThrowEnforceNotMet( \ + __FILE__, __LINE__, #condition, ::c10::str(__VA_ARGS__), this); \ + } \ + } while (false) + +#define CAFFE_THROW(...) \ + ::c10::ThrowEnforceNotMet(__FILE__, __LINE__, "", ::c10::str(__VA_ARGS__)) + +/** + * Rich logging messages + * + * CAFFE_ENFORCE_THAT can be used with one of the "checker functions" that + * capture input argument values and add it to the exception message. E.g. + * `CAFFE_ENFORCE_THAT(Equals(foo(x), bar(y)), "Optional additional message")` + * would evaluate both foo and bar only once and if the results are not equal - + * include them in the exception message. + * + * Some of the basic checker functions like Equals or Greater are already + * defined below. Other header might define customized checkers by adding + * functions to caffe2::enforce_detail namespace. For example: + * + * namespace caffe2 { namespace enforce_detail { + * inline EnforceFailMessage IsVector(const vector& shape) { + * if (shape.size() == 1) { return EnforceOK(); } + * return c10::str("Shape ", shape, " is not a vector"); + * } + * }} + * + * With further usages like `CAFFE_ENFORCE_THAT(IsVector(Input(0).dims()))` + * + * Convenient wrappers for binary operations like CAFFE_ENFORCE_EQ are provided + * too. Please use them instead of CHECK_EQ and friends for failures in + * user-provided input. + */ + +namespace enforce_detail { + +struct C10_API EnforceOK {}; + +class C10_API EnforceFailMessage { + public: +#ifdef _MSC_VER + // MSVC + NVCC ignores constexpr and will issue a warning if included. + /* implicit */ EnforceFailMessage(EnforceOK) : msg_(nullptr) {} +#else + constexpr /* implicit */ EnforceFailMessage(EnforceOK) : msg_(nullptr) {} +#endif + EnforceFailMessage(EnforceFailMessage&&) = default; + EnforceFailMessage(const EnforceFailMessage&) = delete; + EnforceFailMessage& operator=(EnforceFailMessage&&) = delete; + EnforceFailMessage& operator=(const EnforceFailMessage&) = delete; + + // Catch all wrong usages like CAFFE_ENFORCE_THAT(x < y) + template + /* implicit */ EnforceFailMessage(Args...) { + static_assert( + // This stands for an "impossible" condition. Plain `false` doesn't + // trick compiler enough. + sizeof...(Args) == std::numeric_limits::max(), + "CAFFE_ENFORCE_THAT has to be used with one of special check functions " + "like `Equals`. Use CAFFE_ENFORCE for simple boolean checks."); + } + + /* implicit */ EnforceFailMessage(std::string&& msg); + + inline bool bad() const { + return msg_ != nullptr; + } + std::string get_message_and_free(std::string&& extra) const { + std::string r; + if (extra.empty()) { + r = std::move(*msg_); + } else { + r = ::c10::str(std::move(*msg_), ". ", std::move(extra)); + } + delete msg_; + return r; + } + + private: + std::string* msg_{}; +}; + +#define BINARY_COMP_HELPER(name, op) \ + template \ + inline EnforceFailMessage name(const T1& x, const T2& y) { \ + if (x op y) { \ + return EnforceOK(); \ + } \ + return c10::str(x, " vs ", y); \ + } +BINARY_COMP_HELPER(Equals, ==) +BINARY_COMP_HELPER(NotEquals, !=) +BINARY_COMP_HELPER(Greater, >) +BINARY_COMP_HELPER(GreaterEquals, >=) +BINARY_COMP_HELPER(Less, <) +BINARY_COMP_HELPER(LessEquals, <=) +#undef BINARY_COMP_HELPER + +#define CAFFE_ENFORCE_THAT_IMPL(condition, expr, ...) \ + do { \ + using namespace ::c10::enforce_detail; \ + const EnforceFailMessage& CAFFE_ENFORCE_THAT_IMPL_r_ = (condition); \ + if (C10_UNLIKELY(CAFFE_ENFORCE_THAT_IMPL_r_.bad())) { \ + ::c10::ThrowEnforceNotMet( \ + __FILE__, \ + __LINE__, \ + expr, \ + CAFFE_ENFORCE_THAT_IMPL_r_.get_message_and_free( \ + ::c10::str(__VA_ARGS__))); \ + } \ + } while (false) + +#define CAFFE_ENFORCE_THAT_IMPL_WITH_CALLER(condition, expr, ...) \ + do { \ + using namespace ::c10::enforce_detail; \ + const EnforceFailMessage& CAFFE_ENFORCE_THAT_IMPL_WITH_CALLER_r_ = \ + (condition); \ + if (C10_UNLIKELY(CAFFE_ENFORCE_THAT_IMPL_WITH_CALLER_r_.bad())) { \ + ::c10::ThrowEnforceNotMet( \ + __FILE__, \ + __LINE__, \ + expr, \ + CAFFE_ENFORCE_THAT_IMPL_WITH_CALLER_r_.get_message_and_free( \ + ::c10::str(__VA_ARGS__)), \ + this); \ + } \ + } while (false) +} // namespace enforce_detail + +#define CAFFE_ENFORCE_THAT(condition, ...) \ + CAFFE_ENFORCE_THAT_IMPL((condition), #condition, __VA_ARGS__) + +#define CAFFE_ENFORCE_EQ(x, y, ...) \ + CAFFE_ENFORCE_THAT_IMPL(Equals((x), (y)), #x " == " #y, __VA_ARGS__) +#define CAFFE_ENFORCE_NE(x, y, ...) \ + CAFFE_ENFORCE_THAT_IMPL(NotEquals((x), (y)), #x " != " #y, __VA_ARGS__) +#define CAFFE_ENFORCE_LE(x, y, ...) \ + CAFFE_ENFORCE_THAT_IMPL(LessEquals((x), (y)), #x " <= " #y, __VA_ARGS__) +#define CAFFE_ENFORCE_LT(x, y, ...) \ + CAFFE_ENFORCE_THAT_IMPL(Less((x), (y)), #x " < " #y, __VA_ARGS__) +#define CAFFE_ENFORCE_GE(x, y, ...) \ + CAFFE_ENFORCE_THAT_IMPL(GreaterEquals((x), (y)), #x " >= " #y, __VA_ARGS__) +#define CAFFE_ENFORCE_GT(x, y, ...) \ + CAFFE_ENFORCE_THAT_IMPL(Greater((x), (y)), #x " > " #y, __VA_ARGS__) +#define CAFFE_ENFORCE_EQ_WITH_CALLER(x, y, ...) \ + CAFFE_ENFORCE_THAT_IMPL_WITH_CALLER( \ + Equals((x), (y)), #x " == " #y, __VA_ARGS__) +#define CAFFE_ENFORCE_NE_WITH_CALLER(x, y, ...) \ + CAFFE_ENFORCE_THAT_IMPL_WITH_CALLER( \ + NotEquals((x), (y)), #x " != " #y, __VA_ARGS__) +#define CAFFE_ENFORCE_LE_WITH_CALLER(x, y, ...) \ + CAFFE_ENFORCE_THAT_IMPL_WITH_CALLER( \ + LessEquals((x), (y)), #x " <= " #y, __VA_ARGS__) +#define CAFFE_ENFORCE_LT_WITH_CALLER(x, y, ...) \ + CAFFE_ENFORCE_THAT_IMPL_WITH_CALLER(Less((x), (y)), #x " < " #y, __VA_ARGS__) +#define CAFFE_ENFORCE_GE_WITH_CALLER(x, y, ...) \ + CAFFE_ENFORCE_THAT_IMPL_WITH_CALLER( \ + GreaterEquals((x), (y)), #x " >= " #y, __VA_ARGS__) +#define CAFFE_ENFORCE_GT_WITH_CALLER(x, y, ...) \ + CAFFE_ENFORCE_THAT_IMPL_WITH_CALLER( \ + Greater((x), (y)), #x " > " #y, __VA_ARGS__) + +/** + * Very lightweight logging for the first time API usage. It's beneficial for + * tracking of individual functionality usage in larger applications. + * + * In order to ensure light-weightness of logging, we utilize static variable + * trick - LogAPIUsage will be invoked only once and further invocations will + * just do an atomic check. + * + * Example: + * // Logs caller info with an arbitrary text event, if there is a usage. + * C10_LOG_API_USAGE_ONCE("my_api"); + */ +#define C10_LOG_API_USAGE_ONCE(...) \ + C10_UNUSED static bool C10_ANONYMOUS_VARIABLE(logFlag) = \ + ::c10::detail::LogAPIUsageFakeReturn(__VA_ARGS__); + +// API usage logging capabilities +C10_API void SetAPIUsageLogger(std::function logger); +C10_API void LogAPIUsage(const std::string& context); + +namespace detail { +// Return value is needed to do the static variable initialization trick +C10_API bool LogAPIUsageFakeReturn(const std::string& context); +} + +} // namespace c10 + +#endif // C10_UTIL_LOGGING_H_ diff --git a/thirdparty/libtorch/include/c10/util/Metaprogramming.h b/thirdparty/libtorch/include/c10/util/Metaprogramming.h new file mode 100644 index 0000000000..b1056a9b10 --- /dev/null +++ b/thirdparty/libtorch/include/c10/util/Metaprogramming.h @@ -0,0 +1,146 @@ +#pragma once + +#include +#include +#include +#include +#include + +namespace c10 { namespace guts { + +/** + * Access information about result type or arguments from a function type. + * Example: + * using A = function_traits::return_type // A == int + * using A = function_traits::parameter_types::tuple_type // A == tuple + */ +template struct function_traits { + static_assert(!std::is_same::value, "In function_traits, Func must be a plain function type."); +}; +template +struct function_traits { + using func_type = Result (Args...); + using return_type = Result; + using parameter_types = typelist::typelist; + static constexpr auto number_of_parameters = sizeof...(Args); +}; + +/** + * infer_function_traits: creates a `function_traits` type for a simple + * function (pointer) or functor (lambda/struct). Currently does not support + * class methods. + */ + +template +struct infer_function_traits { + using type = function_traits>; +}; + +template +struct infer_function_traits { + using type = function_traits; +}; + +template +struct infer_function_traits { + using type = function_traits; +}; + +template +using infer_function_traits_t = typename infer_function_traits::type; + +/** + * Use extract_arg_by_filtered_index to return the i-th argument whose + * type fulfills a given type trait. The argument itself is perfectly forwarded. + * + * Example: + * std::string arg1 = "Hello"; + * std::string arg2 = "World"; + * std::string&& result = extract_arg_by_filtered_index(0, arg1, 2.0, std::move(arg2)); + * + * Warning: Taking the result by rvalue reference can cause segfaults because ownership will not be passed on + * from the original reference. The original reference dies after the expression and the resulting + */ +namespace detail { +template